Compare commits
2 Commits
v0.3.0
...
indent-lam
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d5a18a697c | ||
|
|
3e218fa2ec |
@@ -1,3 +1,37 @@
|
||||
[alias]
|
||||
dev = "run --package ruff_dev --bin ruff_dev"
|
||||
benchmark = "bench -p ruff_benchmark --bench linter --bench formatter --"
|
||||
|
||||
[target.'cfg(all())']
|
||||
rustflags = [
|
||||
# CLIPPY LINT SETTINGS
|
||||
# This is a workaround to configure lints for the entire workspace, pending the ability to configure this via TOML.
|
||||
# See: `https://github.com/rust-lang/cargo/issues/5034`
|
||||
# `https://github.com/EmbarkStudios/rust-ecosystem/issues/22#issuecomment-947011395`
|
||||
"-Dunsafe_code",
|
||||
"-Wclippy::pedantic",
|
||||
# Allowed pedantic lints
|
||||
"-Wclippy::char_lit_as_u8",
|
||||
"-Aclippy::collapsible_else_if",
|
||||
"-Aclippy::collapsible_if",
|
||||
"-Aclippy::implicit_hasher",
|
||||
"-Aclippy::match_same_arms",
|
||||
"-Aclippy::missing_errors_doc",
|
||||
"-Aclippy::missing_panics_doc",
|
||||
"-Aclippy::module_name_repetitions",
|
||||
"-Aclippy::must_use_candidate",
|
||||
"-Aclippy::similar_names",
|
||||
"-Aclippy::too_many_lines",
|
||||
# Disallowed restriction lints
|
||||
"-Wclippy::print_stdout",
|
||||
"-Wclippy::print_stderr",
|
||||
"-Wclippy::dbg_macro",
|
||||
"-Wclippy::empty_drop",
|
||||
"-Wclippy::empty_structs_with_brackets",
|
||||
"-Wclippy::exit",
|
||||
"-Wclippy::get_unwrap",
|
||||
"-Wclippy::rc_buffer",
|
||||
"-Wclippy::rc_mutex",
|
||||
"-Wclippy::rest_pat_in_fully_bound_structs",
|
||||
"-Wunreachable_pub"
|
||||
]
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
[profile.ci]
|
||||
# Print out output for failing tests as soon as they fail, and also at the end
|
||||
# of the run (for easy scrollability).
|
||||
failure-output = "immediate-final"
|
||||
# Do not cancel the test run on the first failure.
|
||||
fail-fast = false
|
||||
|
||||
status-level = "skip"
|
||||
3
.gitattributes
vendored
3
.gitattributes
vendored
@@ -3,8 +3,5 @@
|
||||
crates/ruff_linter/resources/test/fixtures/isort/line_ending_crlf.py text eol=crlf
|
||||
crates/ruff_linter/resources/test/fixtures/pycodestyle/W605_1.py text eol=crlf
|
||||
|
||||
crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring_code_examples_crlf.py text eol=crlf
|
||||
crates/ruff_python_formatter/tests/snapshots/format@docstring_code_examples_crlf.py.snap text eol=crlf
|
||||
|
||||
ruff.schema.json linguist-generated=true text=auto eol=lf
|
||||
*.md.snap linguist-language=Markdown
|
||||
|
||||
6
.github/CODEOWNERS
vendored
6
.github/CODEOWNERS
vendored
@@ -7,9 +7,3 @@
|
||||
|
||||
# Jupyter
|
||||
/crates/ruff_linter/src/jupyter/ @dhruvmanila
|
||||
/crates/ruff_formatter/ @MichaReiser
|
||||
/crates/ruff_python_formatter/ @MichaReiser
|
||||
/crates/ruff_python_parser/ @MichaReiser
|
||||
|
||||
# flake8-pyi
|
||||
/crates/ruff_linter/src/rules/flake8_pyi/ @AlexWaygood
|
||||
|
||||
8
.github/dependabot.yml
vendored
8
.github/dependabot.yml
vendored
@@ -5,14 +5,6 @@ updates:
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
labels: ["internal"]
|
||||
groups:
|
||||
actions:
|
||||
patterns:
|
||||
- "*"
|
||||
ignore:
|
||||
# The latest versions of these are not compatible with our release workflow
|
||||
- dependency-name: "actions/upload-artifact"
|
||||
- dependency-name: "actions/download-artifact"
|
||||
|
||||
- package-ecosystem: "cargo"
|
||||
directory: "/"
|
||||
|
||||
29
.github/release.yml
vendored
Normal file
29
.github/release.yml
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
# https://docs.github.com/en/repositories/releasing-projects-on-github/automatically-generated-release-notes#configuring-automatically-generated-release-notes
|
||||
changelog:
|
||||
exclude:
|
||||
labels:
|
||||
- internal
|
||||
- documentation
|
||||
categories:
|
||||
- title: Breaking Changes
|
||||
labels:
|
||||
- breaking
|
||||
- title: Rules
|
||||
labels:
|
||||
- rule
|
||||
- title: Settings
|
||||
labels:
|
||||
- configuration
|
||||
- cli
|
||||
- title: Bug Fixes
|
||||
labels:
|
||||
- bug
|
||||
- title: Formatter
|
||||
labels:
|
||||
- formatter
|
||||
- title: Preview
|
||||
labels:
|
||||
- preview
|
||||
- title: Other Changes
|
||||
labels:
|
||||
- "*"
|
||||
156
.github/workflows/ci.yaml
vendored
156
.github/workflows/ci.yaml
vendored
@@ -23,19 +23,14 @@ jobs:
|
||||
name: "Determine changes"
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
# Flag that is raised when any code that affects linter is changed
|
||||
linter: ${{ steps.changed.outputs.linter_any_changed }}
|
||||
# Flag that is raised when any code that affects formatter is changed
|
||||
formatter: ${{ steps.changed.outputs.formatter_any_changed }}
|
||||
# Flag that is raised when any code is changed
|
||||
# This is superset of the linter and formatter
|
||||
code: ${{ steps.changed.outputs.code_any_changed }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: tj-actions/changed-files@v42
|
||||
- uses: tj-actions/changed-files@v40
|
||||
id: changed
|
||||
with:
|
||||
files_yaml: |
|
||||
@@ -48,7 +43,6 @@ jobs:
|
||||
- "!crates/ruff_dev/**"
|
||||
- "!crates/ruff_shrinking/**"
|
||||
- scripts/*
|
||||
- python/**
|
||||
- .github/workflows/ci.yaml
|
||||
|
||||
formatter:
|
||||
@@ -64,19 +58,11 @@ jobs:
|
||||
- crates/ruff_python_parser/**
|
||||
- crates/ruff_dev/**
|
||||
- scripts/*
|
||||
- python/**
|
||||
- .github/workflows/ci.yaml
|
||||
|
||||
code:
|
||||
- "**/*"
|
||||
- "!**/*.md"
|
||||
- "!docs/**"
|
||||
- "!assets/**"
|
||||
|
||||
cargo-fmt:
|
||||
name: "cargo fmt"
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
@@ -86,9 +72,6 @@ jobs:
|
||||
cargo-clippy:
|
||||
name: "cargo clippy"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
@@ -97,26 +80,41 @@ jobs:
|
||||
rustup target add wasm32-unknown-unknown
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Clippy"
|
||||
run: cargo clippy --workspace --all-targets --all-features --locked -- -D warnings
|
||||
run: cargo clippy --workspace --all-targets --all-features -- -D warnings
|
||||
- name: "Clippy (wasm)"
|
||||
run: cargo clippy -p ruff_wasm --target wasm32-unknown-unknown --all-features --locked -- -D warnings
|
||||
run: cargo clippy -p ruff_wasm --target wasm32-unknown-unknown --all-features -- -D warnings
|
||||
|
||||
cargo-test-linux:
|
||||
name: "cargo test (linux)"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
name: "cargo test (linux)"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@v1
|
||||
- name: "Install cargo nextest"
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
tool: cargo-insta
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run tests"
|
||||
run: cargo insta test --all --all-features --unreferenced reject
|
||||
# Check for broken links in the documentation.
|
||||
- run: cargo doc --all --no-deps
|
||||
env:
|
||||
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
|
||||
RUSTDOCFLAGS: "-D warnings"
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ruff
|
||||
path: target/debug/ruff
|
||||
|
||||
cargo-test-windows:
|
||||
runs-on: windows-latest
|
||||
name: "cargo test (windows)"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
@@ -124,47 +122,12 @@ jobs:
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
env:
|
||||
NEXTEST_PROFILE: "ci"
|
||||
run: cargo insta test --all-features --unreferenced reject --test-runner nextest
|
||||
|
||||
# Check for broken links in the documentation.
|
||||
- run: cargo doc --all --no-deps
|
||||
env:
|
||||
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
|
||||
RUSTDOCFLAGS: "-D warnings"
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ruff
|
||||
path: target/debug/ruff
|
||||
|
||||
cargo-test-windows:
|
||||
name: "cargo test (windows)"
|
||||
runs-on: windows-latest
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
run: |
|
||||
cargo nextest run --all-features --profile ci
|
||||
cargo test --all-features --doc
|
||||
# We can't reject unreferenced snapshots on windows because flake8_executable can't run on windows
|
||||
run: cargo insta test --all --all-features
|
||||
|
||||
cargo-test-wasm:
|
||||
name: "cargo test (wasm)"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 10
|
||||
name: "cargo test (wasm)"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
@@ -182,11 +145,8 @@ jobs:
|
||||
wasm-pack test --node
|
||||
|
||||
cargo-fuzz:
|
||||
name: "cargo fuzz"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 10
|
||||
name: "cargo fuzz"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
@@ -197,15 +157,12 @@ jobs:
|
||||
- name: "Install cargo-fuzz"
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-fuzz@0.11.2
|
||||
tool: cargo-fuzz@0.11
|
||||
- run: cargo fuzz build -s none
|
||||
|
||||
scripts:
|
||||
name: "test scripts"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 5
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
@@ -227,30 +184,25 @@ jobs:
|
||||
- cargo-test-linux
|
||||
- determine_changes
|
||||
# Only runs on pull requests, since that is the only we way we can find the base version for comparison.
|
||||
# Ecosystem check needs linter and/or formatter changes.
|
||||
if: github.event_name == 'pull_request' && ${{
|
||||
needs.determine_changes.outputs.code == 'true'
|
||||
}}
|
||||
timeout-minutes: 20
|
||||
if: github.event_name == 'pull_request'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
- uses: actions/download-artifact@v3
|
||||
name: Download comparison Ruff binary
|
||||
id: ruff-target
|
||||
with:
|
||||
name: ruff
|
||||
path: target/debug
|
||||
|
||||
- uses: dawidd6/action-download-artifact@v3
|
||||
- uses: dawidd6/action-download-artifact@v2
|
||||
name: Download baseline Ruff binary
|
||||
with:
|
||||
name: ruff
|
||||
branch: ${{ github.event.pull_request.base.ref }}
|
||||
workflow: "ci.yaml"
|
||||
check_artifacts: true
|
||||
|
||||
- name: Install ruff-ecosystem
|
||||
@@ -325,13 +277,13 @@ jobs:
|
||||
run: |
|
||||
echo ${{ github.event.number }} > pr-number
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
- uses: actions/upload-artifact@v3
|
||||
name: Upload PR Number
|
||||
with:
|
||||
name: pr-number
|
||||
path: pr-number
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
- uses: actions/upload-artifact@v3
|
||||
name: Upload Results
|
||||
with:
|
||||
name: ecosystem-result
|
||||
@@ -340,9 +292,6 @@ jobs:
|
||||
cargo-udeps:
|
||||
name: "cargo udeps"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install nightly Rust toolchain"
|
||||
@@ -357,10 +306,9 @@ jobs:
|
||||
python-package:
|
||||
name: "python package"
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
@@ -382,10 +330,9 @@ jobs:
|
||||
pre-commit:
|
||||
name: "pre-commit"
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Install Rust toolchain"
|
||||
@@ -394,7 +341,7 @@ jobs:
|
||||
- name: "Install pre-commit"
|
||||
run: pip install pre-commit
|
||||
- name: "Cache pre-commit"
|
||||
uses: actions/cache@v4
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cache/pre-commit
|
||||
key: pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
@@ -411,15 +358,14 @@ jobs:
|
||||
docs:
|
||||
name: "mkdocs"
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
env:
|
||||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v4
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@v0.9.0
|
||||
uses: webfactory/ssh-agent@v0.8.0
|
||||
with:
|
||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||
- name: "Install Rust toolchain"
|
||||
@@ -442,14 +388,13 @@ jobs:
|
||||
run: mkdocs build --strict -f mkdocs.insiders.yml
|
||||
- name: "Build docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.public.yml
|
||||
run: mkdocs build --strict -f mkdocs.generated.yml
|
||||
|
||||
check-formatter-instability-and-black-similarity:
|
||||
name: "formatter instabilities and black similarity"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: needs.determine_changes.outputs.formatter == 'true' || github.ref == 'refs/heads/main'
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
@@ -466,11 +411,7 @@ jobs:
|
||||
check-ruff-lsp:
|
||||
name: "test ruff-lsp"
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
needs:
|
||||
- cargo-test-linux
|
||||
- determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
needs: cargo-test-linux
|
||||
steps:
|
||||
- uses: extractions/setup-just@v1
|
||||
env:
|
||||
@@ -481,11 +422,11 @@ jobs:
|
||||
with:
|
||||
repository: "astral-sh/ruff-lsp"
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
- uses: actions/download-artifact@v3
|
||||
name: Download development ruff binary
|
||||
id: ruff-target
|
||||
with:
|
||||
@@ -508,9 +449,6 @@ jobs:
|
||||
|
||||
benchmarks:
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- name: "Checkout Branch"
|
||||
uses: actions/checkout@v4
|
||||
@@ -529,7 +467,7 @@ jobs:
|
||||
run: cargo codspeed build --features codspeed -p ruff_benchmark
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@v2
|
||||
uses: CodSpeedHQ/action@v1
|
||||
with:
|
||||
run: cargo codspeed run
|
||||
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||
|
||||
8
.github/workflows/docs.yaml
vendored
8
.github/workflows/docs.yaml
vendored
@@ -20,10 +20,10 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v4
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@v0.9.0
|
||||
uses: webfactory/ssh-agent@v0.8.0
|
||||
with:
|
||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||
- name: "Install Rust toolchain"
|
||||
@@ -44,10 +44,10 @@ jobs:
|
||||
run: mkdocs build --strict -f mkdocs.insiders.yml
|
||||
- name: "Build docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.public.yml
|
||||
run: mkdocs build --strict -f mkdocs.generated.yml
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@v3.4.1
|
||||
uses: cloudflare/wrangler-action@v3.3.2
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
|
||||
247
.github/workflows/flake8-to-ruff.yaml
vendored
Normal file
247
.github/workflows/flake8-to-ruff.yaml
vendored
Normal file
@@ -0,0 +1,247 @@
|
||||
name: "[flake8-to-ruff] Release"
|
||||
|
||||
on: workflow_dispatch
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
PACKAGE_NAME: flake8-to-ruff
|
||||
CRATE_NAME: flake8_to_ruff
|
||||
PYTHON_VERSION: "3.11"
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
macos-x86_64:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Build wheels - x86_64"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: x86_64
|
||||
args: --release --out dist --sdist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||
- name: "Install built wheel - x86_64"
|
||||
run: |
|
||||
pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
macos-universal:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Build wheels - universal2"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
args: --release --target universal2-apple-darwin --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||
- name: "Install built wheel - universal2"
|
||||
run: |
|
||||
pip install dist/${{ env.CRATE_NAME }}-*universal2.whl --force-reinstall
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
|
||||
windows:
|
||||
runs-on: windows-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target: [x64, x86]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: ${{ matrix.target }}
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||
- name: "Install built wheel"
|
||||
shell: bash
|
||||
run: |
|
||||
python -m pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
|
||||
linux:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target: [x86_64, i686]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: auto
|
||||
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||
- name: "Install built wheel"
|
||||
if: matrix.target == 'x86_64'
|
||||
run: |
|
||||
pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
|
||||
linux-cross:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target: [aarch64, armv7, s390x, ppc64le, ppc64]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: auto
|
||||
args: --no-default-features --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||
- uses: uraimo/run-on-arch-action@v2
|
||||
if: matrix.target != 'ppc64'
|
||||
name: Install built wheel
|
||||
with:
|
||||
arch: ${{ matrix.target }}
|
||||
distro: ubuntu20.04
|
||||
githubToken: ${{ github.token }}
|
||||
install: |
|
||||
apt-get update
|
||||
apt-get install -y --no-install-recommends python3 python3-pip
|
||||
pip3 install -U pip
|
||||
run: |
|
||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
|
||||
musllinux:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target:
|
||||
- x86_64-unknown-linux-musl
|
||||
- i686-unknown-linux-musl
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: musllinux_1_2
|
||||
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||
- name: "Install built wheel"
|
||||
if: matrix.target == 'x86_64-unknown-linux-musl'
|
||||
uses: addnab/docker-run-action@v3
|
||||
with:
|
||||
image: alpine:latest
|
||||
options: -v ${{ github.workspace }}:/io -w /io
|
||||
run: |
|
||||
apk add py3-pip
|
||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links /io/dist/ --force-reinstall
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
|
||||
musllinux-cross:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
platform:
|
||||
- target: aarch64-unknown-linux-musl
|
||||
arch: aarch64
|
||||
- target: armv7-unknown-linux-musleabihf
|
||||
arch: armv7
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: musllinux_1_2
|
||||
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||
- uses: uraimo/run-on-arch-action@v2
|
||||
name: Install built wheel
|
||||
with:
|
||||
arch: ${{ matrix.platform.arch }}
|
||||
distro: alpine_latest
|
||||
githubToken: ${{ github.token }}
|
||||
install: |
|
||||
apk add py3-pip
|
||||
run: |
|
||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
|
||||
release:
|
||||
name: Release
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- macos-universal
|
||||
- macos-x86_64
|
||||
- windows
|
||||
- linux
|
||||
- linux-cross
|
||||
- musllinux
|
||||
- musllinux-cross
|
||||
steps:
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
- uses: actions/setup-python@v4
|
||||
- name: "Publish to PyPi"
|
||||
env:
|
||||
TWINE_USERNAME: __token__
|
||||
TWINE_PASSWORD: ${{ secrets.FLAKE8_TO_RUFF_TOKEN }}
|
||||
run: |
|
||||
pip install --upgrade twine
|
||||
twine upload --skip-existing *
|
||||
2
.github/workflows/playground.yaml
vendored
2
.github/workflows/playground.yaml
vendored
@@ -40,7 +40,7 @@ jobs:
|
||||
working-directory: playground
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@v3.4.1
|
||||
uses: cloudflare/wrangler-action@v3.3.2
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
|
||||
8
.github/workflows/pr-comment.yaml
vendored
8
.github/workflows/pr-comment.yaml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@v3
|
||||
- uses: dawidd6/action-download-artifact@v2
|
||||
name: Download pull request number
|
||||
with:
|
||||
name: pr-number
|
||||
@@ -32,7 +32,7 @@ jobs:
|
||||
echo "pr-number=$(<pr-number)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@v3
|
||||
- uses: dawidd6/action-download-artifact@v2
|
||||
name: "Download ecosystem results"
|
||||
id: download-ecosystem-result
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
@@ -61,7 +61,7 @@ jobs:
|
||||
echo 'EOF' >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Find existing comment
|
||||
uses: peter-evans/find-comment@v3
|
||||
uses: peter-evans/find-comment@v2
|
||||
if: steps.generate-comment.outcome == 'success'
|
||||
id: find-comment
|
||||
with:
|
||||
@@ -71,7 +71,7 @@ jobs:
|
||||
|
||||
- name: Create or update comment
|
||||
if: steps.find-comment.outcome == 'success'
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
uses: peter-evans/create-or-update-comment@v3
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
|
||||
191
.github/workflows/release.yaml
vendored
191
.github/workflows/release.yaml
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
@@ -52,9 +52,9 @@ jobs:
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload sdist"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels-sdist
|
||||
name: wheels
|
||||
path: dist
|
||||
|
||||
macos-x86_64:
|
||||
@@ -63,7 +63,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
@@ -73,26 +73,26 @@ jobs:
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: x86_64
|
||||
args: --release --locked --out dist
|
||||
args: --release --out dist
|
||||
- name: "Test wheel - x86_64"
|
||||
run: |
|
||||
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels-macos-x86_64
|
||||
name: wheels
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-x86_64-apple-darwin.tar.gz
|
||||
ARCHIVE_FILE=ruff-x86_64-apple-darwin.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/x86_64-apple-darwin/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries-macos-x86_64
|
||||
name: binaries
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
@@ -103,7 +103,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
@@ -112,26 +112,26 @@ jobs:
|
||||
- name: "Build wheels - universal2"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
args: --release --locked --target universal2-apple-darwin --out dist
|
||||
args: --release --target universal2-apple-darwin --out dist
|
||||
- name: "Test wheel - universal2"
|
||||
run: |
|
||||
pip install dist/${{ env.PACKAGE_NAME }}-*universal2.whl --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels-aarch64-apple-darwin
|
||||
name: wheels
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-aarch64-apple-darwin.tar.gz
|
||||
ARCHIVE_FILE=ruff-aarch64-apple-darwin.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/aarch64-apple-darwin/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries-aarch64-apple-darwin
|
||||
name: binaries
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
@@ -151,7 +151,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: ${{ matrix.platform.arch }}
|
||||
@@ -161,7 +161,7 @@ jobs:
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
args: --release --locked --out dist
|
||||
args: --release --out dist
|
||||
- name: "Test wheel"
|
||||
if: ${{ !startsWith(matrix.platform.target, 'aarch64') }}
|
||||
shell: bash
|
||||
@@ -170,20 +170,20 @@ jobs:
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels-${{ matrix.platform.target }}
|
||||
name: wheels
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.zip
|
||||
ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.zip
|
||||
7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/ruff.exe
|
||||
sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries-${{ matrix.platform.target }}
|
||||
name: binaries
|
||||
path: |
|
||||
*.zip
|
||||
*.sha256
|
||||
@@ -199,7 +199,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
@@ -210,7 +210,7 @@ jobs:
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: auto
|
||||
args: --release --locked --out dist
|
||||
args: --release --out dist
|
||||
- name: "Test wheel"
|
||||
if: ${{ startsWith(matrix.target, 'x86_64') }}
|
||||
run: |
|
||||
@@ -218,19 +218,19 @@ jobs:
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels-${{ matrix.target }}
|
||||
name: wheels
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.target }}.tar.gz
|
||||
ARCHIVE_FILE=ruff-${{ matrix.target }}.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries-${{ matrix.target }}
|
||||
name: binaries
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
@@ -251,18 +251,14 @@ jobs:
|
||||
arch: s390x
|
||||
- target: powerpc64le-unknown-linux-gnu
|
||||
arch: ppc64le
|
||||
# see https://github.com/astral-sh/ruff/issues/10073
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
- target: powerpc64-unknown-linux-gnu
|
||||
arch: ppc64
|
||||
# see https://github.com/astral-sh/ruff/issues/10073
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
@@ -273,7 +269,7 @@ jobs:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
args: --release --locked --out dist
|
||||
args: --release --out dist
|
||||
- uses: uraimo/run-on-arch-action@v2
|
||||
if: matrix.platform.arch != 'ppc64'
|
||||
name: Test wheel
|
||||
@@ -289,19 +285,19 @@ jobs:
|
||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels-${{ matrix.platform.target }}
|
||||
name: wheels
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.tar.gz
|
||||
ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries-${{ matrix.platform.target }}
|
||||
name: binaries
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
@@ -317,7 +313,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
@@ -328,7 +324,7 @@ jobs:
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: musllinux_1_2
|
||||
args: --release --locked --out dist
|
||||
args: --release --out dist
|
||||
- name: "Test wheel"
|
||||
if: matrix.target == 'x86_64-unknown-linux-musl'
|
||||
uses: addnab/docker-run-action@v3
|
||||
@@ -336,24 +332,24 @@ jobs:
|
||||
image: alpine:latest
|
||||
options: -v ${{ github.workspace }}:/io -w /io
|
||||
run: |
|
||||
apk add python3
|
||||
python -m venv .venv
|
||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
.venv/bin/ruff check --help
|
||||
apk add py3-pip
|
||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links /io/dist/ --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels-${{ matrix.target }}
|
||||
name: wheels
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.target }}.tar.gz
|
||||
ARCHIVE_FILE=ruff-${{ matrix.target }}.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries-${{ matrix.target }}
|
||||
name: binaries
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
@@ -373,7 +369,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
@@ -383,7 +379,7 @@ jobs:
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: musllinux_1_2
|
||||
args: --release --locked --out dist
|
||||
args: --release --out dist
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
- uses: uraimo/run-on-arch-action@v2
|
||||
name: Test wheel
|
||||
@@ -392,25 +388,24 @@ jobs:
|
||||
distro: alpine_latest
|
||||
githubToken: ${{ github.token }}
|
||||
install: |
|
||||
apk add python3
|
||||
apk add py3-pip
|
||||
run: |
|
||||
python -m venv .venv
|
||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
.venv/bin/ruff check --help
|
||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
ruff check --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels-${{ matrix.platform.target }}
|
||||
name: wheels
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.tar.gz
|
||||
ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries-${{ matrix.platform.target }}
|
||||
name: binaries
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
@@ -467,11 +462,10 @@ jobs:
|
||||
# For pypi trusted publishing
|
||||
id-token: write
|
||||
steps:
|
||||
- uses: actions/download-artifact@v4
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
pattern: wheels-*
|
||||
name: wheels
|
||||
path: wheels
|
||||
merge-multiple: true
|
||||
- name: Publish to PyPi
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
@@ -511,11 +505,10 @@ jobs:
|
||||
# For GitHub release publishing
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/download-artifact@v4
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
pattern: binaries-*
|
||||
name: binaries
|
||||
path: binaries
|
||||
merge-multiple: true
|
||||
- name: "Publish to GitHub"
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
@@ -523,62 +516,6 @@ jobs:
|
||||
files: binaries/*
|
||||
tag_name: v${{ inputs.tag }}
|
||||
|
||||
docker-publish:
|
||||
# This action doesn't need to wait on any other task, it's easy to re-tag if something failed and we're validating
|
||||
# the tag here also
|
||||
name: Push Docker image ghcr.io/astral-sh/ruff
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
permissions:
|
||||
# For the docker push
|
||||
packages: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
- uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ghcr.io/astral-sh/ruff
|
||||
|
||||
- name: Check tag consistency
|
||||
# Unlike validate-tag we don't check if the commit is on the main branch, but it seems good enough since we can
|
||||
# change docker tags
|
||||
if: ${{ inputs.tag }}
|
||||
run: |
|
||||
version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
|
||||
if [ "${{ inputs.tag }}" != "${version}" ]; then
|
||||
echo "The input tag does not match the version from pyproject.toml:" >&2
|
||||
echo "${{ inputs.tag }}" >&2
|
||||
echo "${version}" >&2
|
||||
exit 1
|
||||
else
|
||||
echo "Releasing ${version}"
|
||||
fi
|
||||
|
||||
- name: "Build and push Docker image"
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
# Reuse the builder
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
push: ${{ inputs.tag != '' }}
|
||||
tags: ghcr.io/astral-sh/ruff:latest,ghcr.io/astral-sh/ruff:${{ inputs.tag || 'dry-run' }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
|
||||
# After the release has been published, we update downstream repositories
|
||||
# This is separate because if this fails the release is still fine, we just need to do some manual workflow triggers
|
||||
update-dependents:
|
||||
@@ -587,7 +524,7 @@ jobs:
|
||||
needs: publish-release
|
||||
steps:
|
||||
- name: "Update pre-commit mirror"
|
||||
uses: actions/github-script@v7
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
github-token: ${{ secrets.RUFF_PRE_COMMIT_PAT }}
|
||||
script: |
|
||||
|
||||
@@ -4,7 +4,7 @@ exclude: |
|
||||
(?x)^(
|
||||
crates/ruff_linter/resources/.*|
|
||||
crates/ruff_linter/src/rules/.*/snapshots/.*|
|
||||
crates/ruff/resources/.*|
|
||||
crates/ruff_cli/resources/.*|
|
||||
crates/ruff_python_formatter/resources/.*|
|
||||
crates/ruff_python_formatter/tests/snapshots/.*|
|
||||
crates/ruff_python_resolver/resources/.*|
|
||||
@@ -13,12 +13,12 @@ exclude: |
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/abravalheri/validate-pyproject
|
||||
rev: v0.15
|
||||
rev: v0.12.1
|
||||
hooks:
|
||||
- id: validate-pyproject
|
||||
|
||||
- repo: https://github.com/executablebooks/mdformat
|
||||
rev: 0.7.17
|
||||
rev: 0.7.16
|
||||
hooks:
|
||||
- id: mdformat
|
||||
additional_dependencies:
|
||||
@@ -26,22 +26,16 @@ repos:
|
||||
- mdformat-admon
|
||||
exclude: |
|
||||
(?x)^(
|
||||
docs/formatter/black\.md
|
||||
| docs/\w+\.md
|
||||
docs/formatter/black.md
|
||||
)$
|
||||
|
||||
- repo: https://github.com/igorshubovych/markdownlint-cli
|
||||
rev: v0.37.0
|
||||
rev: v0.33.0
|
||||
hooks:
|
||||
- id: markdownlint-fix
|
||||
exclude: |
|
||||
(?x)^(
|
||||
docs/formatter/black\.md
|
||||
| docs/\w+\.md
|
||||
)$
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.16.22
|
||||
rev: v1.14.12
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
@@ -55,7 +49,7 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.1.4
|
||||
rev: v0.1.3
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
@@ -70,7 +64,7 @@ repos:
|
||||
|
||||
# Prettier
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: v3.0.3
|
||||
rev: v3.0.0
|
||||
hooks:
|
||||
- id: prettier
|
||||
types: [yaml]
|
||||
|
||||
@@ -1,93 +1,5 @@
|
||||
# Breaking Changes
|
||||
|
||||
## 0.3.0
|
||||
|
||||
### Ruff 2024.2 style
|
||||
|
||||
The formatter now formats code according to the Ruff 2024.2 style guide. Read the [changelog](./CHANGELOG.md#030) for a detailed list of stabilized style changes.
|
||||
|
||||
### `isort`: Use one blank line after imports in typing stub files ([#9971](https://github.com/astral-sh/ruff/pull/9971))
|
||||
|
||||
Previously, Ruff used one or two blank lines (or the number configured by `isort.lines-after-imports`) after imports in typing stub files (`.pyi` files).
|
||||
The [typing style guide for stubs](https://typing.readthedocs.io/en/latest/source/stubs.html#style-guide) recommends using at most 1 blank line for grouping.
|
||||
As of this release, `isort` now always uses one blank line after imports in stub files, the same as the formatter.
|
||||
|
||||
### `build` is no longer excluded by default ([#10093](https://github.com/astral-sh/ruff/pull/10093))
|
||||
|
||||
Ruff maintains a list of directories and files that are excluded by default. This list now consists of the following patterns:
|
||||
|
||||
- `.bzr`
|
||||
- `.direnv`
|
||||
- `.eggs`
|
||||
- `.git`
|
||||
- `.git-rewrite`
|
||||
- `.hg`
|
||||
- `.ipynb_checkpoints`
|
||||
- `.mypy_cache`
|
||||
- `.nox`
|
||||
- `.pants.d`
|
||||
- `.pyenv`
|
||||
- `.pytest_cache`
|
||||
- `.pytype`
|
||||
- `.ruff_cache`
|
||||
- `.svn`
|
||||
- `.tox`
|
||||
- `.venv`
|
||||
- `.vscode`
|
||||
- `__pypackages__`
|
||||
- `_build`
|
||||
- `buck-out`
|
||||
- `dist`
|
||||
- `node_modules`
|
||||
- `site-packages`
|
||||
- `venv`
|
||||
|
||||
Previously, the `build` directory was included in this list. However, the `build` directory tends to be a not-unpopular directory
|
||||
name, and excluding it by default caused confusion. Ruff now no longer excludes `build` except if it is excluded by a `.gitignore` file
|
||||
or because it is listed in `extend-exclude`.
|
||||
|
||||
### `--format` is no longer a valid `rule` or `linter` command option
|
||||
|
||||
Previously, `ruff rule` and `ruff linter` accepted the `--format <FORMAT>` option as an alias for `--output-format`. Ruff no longer
|
||||
supports this alias. Please use `ruff rule --output-format <FORMAT>` and `ruff linter --output-format <FORMAT>` instead.
|
||||
|
||||
## 0.1.9
|
||||
|
||||
### `site-packages` is now excluded by default ([#5513](https://github.com/astral-sh/ruff/pull/5513))
|
||||
|
||||
Ruff maintains a list of default exclusions, which now consists of the following patterns:
|
||||
|
||||
- `.bzr`
|
||||
- `.direnv`
|
||||
- `.eggs`
|
||||
- `.git-rewrite`
|
||||
- `.git`
|
||||
- `.hg`
|
||||
- `.ipynb_checkpoints`
|
||||
- `.mypy_cache`
|
||||
- `.nox`
|
||||
- `.pants.d`
|
||||
- `.pyenv`
|
||||
- `.pytest_cache`
|
||||
- `.pytype`
|
||||
- `.ruff_cache`
|
||||
- `.svn`
|
||||
- `.tox`
|
||||
- `.venv`
|
||||
- `.vscode`
|
||||
- `__pypackages__`
|
||||
- `_build`
|
||||
- `buck-out`
|
||||
- `build`
|
||||
- `dist`
|
||||
- `node_modules`
|
||||
- `site-packages`
|
||||
- `venv`
|
||||
|
||||
Previously, the `site-packages` directory was not excluded by default. While `site-packages` tends
|
||||
to be excluded anyway by virtue of the `.venv` exclusion, this may not be the case when using Ruff
|
||||
from VS Code outside a virtual environment.
|
||||
|
||||
## 0.1.0
|
||||
|
||||
### The deprecated `format` setting has been removed
|
||||
|
||||
947
CHANGELOG.md
947
CHANGELOG.md
@@ -1,952 +1,5 @@
|
||||
# Changelog
|
||||
|
||||
## 0.3.0
|
||||
|
||||
This release introduces the new Ruff formatter 2024.2 style and adds a new lint rule to
|
||||
detect invalid formatter suppression comments.
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-bandit`\] Remove suspicious-lxml-import (`S410`) ([#10154](https://github.com/astral-sh/ruff/pull/10154))
|
||||
- \[`pycodestyle`\] Allow `os.environ` modifications between imports (`E402`) ([#10066](https://github.com/astral-sh/ruff/pull/10066))
|
||||
- \[`pycodestyle`\] Don't warn about a single whitespace character before a comma in a tuple (`E203`) ([#10094](https://github.com/astral-sh/ruff/pull/10094))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`eradicate`\] Detect commented out `case` statements (`ERA001`) ([#10055](https://github.com/astral-sh/ruff/pull/10055))
|
||||
- \[`eradicate`\] Detect single-line code for `try:`, `except:`, etc. (`ERA001`) ([#10057](https://github.com/astral-sh/ruff/pull/10057))
|
||||
- \[`flake8-boolean-trap`\] Allow boolean positionals in `__post_init__` ([#10027](https://github.com/astral-sh/ruff/pull/10027))
|
||||
- \[`flake8-copyright`\] Allow © in copyright notices ([#10065](https://github.com/astral-sh/ruff/pull/10065))
|
||||
- \[`isort`\]: Use one blank line after imports in typing stub files ([#9971](https://github.com/astral-sh/ruff/pull/9971))
|
||||
- \[`pylint`\] New Rule `dict-iter-missing-items` (`PLE1141`) ([#9845](https://github.com/astral-sh/ruff/pull/9845))
|
||||
- \[`pylint`\] Ignore `sys.version` and `sys.platform` (`PLR1714`) ([#10054](https://github.com/astral-sh/ruff/pull/10054))
|
||||
- \[`pyupgrade`\] Detect literals with unary operators (`UP018`) ([#10060](https://github.com/astral-sh/ruff/pull/10060))
|
||||
- \[`ruff`\] Expand rule for `list(iterable).pop(0)` idiom (`RUF015`) ([#10148](https://github.com/astral-sh/ruff/pull/10148))
|
||||
|
||||
### Formatter
|
||||
|
||||
This release introduces the Ruff 2024.2 style, stabilizing the following changes:
|
||||
|
||||
- Prefer splitting the assignment's value over the target or type annotation ([#8943](https://github.com/astral-sh/ruff/pull/8943))
|
||||
- Remove blank lines before class docstrings ([#9154](https://github.com/astral-sh/ruff/pull/9154))
|
||||
- Wrap multiple context managers in `with` parentheses when targeting Python 3.9 or newer ([#9222](https://github.com/astral-sh/ruff/pull/9222))
|
||||
- Add a blank line after nested classes with a dummy body (`...`) in typing stub files ([#9155](https://github.com/astral-sh/ruff/pull/9155))
|
||||
- Reduce vertical spacing for classes and functions with a dummy (`...`) body ([#7440](https://github.com/astral-sh/ruff/issues/7440), [#9240](https://github.com/astral-sh/ruff/pull/9240))
|
||||
- Add a blank line after the module docstring ([#8283](https://github.com/astral-sh/ruff/pull/8283))
|
||||
- Parenthesize long type hints in assignments ([#9210](https://github.com/astral-sh/ruff/pull/9210))
|
||||
- Preserve indent for single multiline-string call-expressions ([#9673](https://github.com/astral-sh/ruff/pull/9637))
|
||||
- Normalize hex escape and unicode escape sequences ([#9280](https://github.com/astral-sh/ruff/pull/9280))
|
||||
- Format module docstrings ([#9725](https://github.com/astral-sh/ruff/pull/9725))
|
||||
|
||||
### CLI
|
||||
|
||||
- Explicitly disallow `extend` as part of a `--config` flag ([#10135](https://github.com/astral-sh/ruff/pull/10135))
|
||||
- Remove `build` from the default exclusion list ([#10093](https://github.com/astral-sh/ruff/pull/10093))
|
||||
- Deprecate `ruff <path>`, `ruff --explain`, `ruff --clean`, and `ruff --generate-shell-completion` in favor of `ruff check <path>`, `ruff rule`, `ruff clean`, and `ruff generate-shell-completion` ([#10169](https://github.com/astral-sh/ruff/pull/10169))
|
||||
- Remove the deprecated CLI option `--format` from `ruff rule` and `ruff linter` ([#10170](https://github.com/astral-sh/ruff/pull/10170))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-bugbear`\] Avoid adding default initializers to stubs (`B006`) ([#10152](https://github.com/astral-sh/ruff/pull/10152))
|
||||
- \[`flake8-type-checking`\] Respect runtime-required decorators for function signatures ([#10091](https://github.com/astral-sh/ruff/pull/10091))
|
||||
- \[`pycodestyle`\] Mark fixes overlapping with a multiline string as unsafe (`W293`) ([#10049](https://github.com/astral-sh/ruff/pull/10049))
|
||||
- \[`pydocstyle`\] Trim whitespace when removing blank lines after section (`D413`) ([#10162](https://github.com/astral-sh/ruff/pull/10162))
|
||||
- \[`pylint`\] Delete entire statement, including semicolons (`PLR0203`) ([#10074](https://github.com/astral-sh/ruff/pull/10074))
|
||||
- \[`ruff`\] Avoid f-string false positives in `gettext` calls (`RUF027`) ([#10118](https://github.com/astral-sh/ruff/pull/10118))
|
||||
- Fix `ruff` crashing on PowerPC systems because of too small page size ([#10080](https://github.com/astral-sh/ruff/pull/10080))
|
||||
|
||||
### Performance
|
||||
|
||||
- Add cold attribute to less likely printer queue branches in the formatter ([#10121](https://github.com/astral-sh/ruff/pull/10121))
|
||||
- Skip unnecessary string normalization in the formatter ([#10116](https://github.com/astral-sh/ruff/pull/10116))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Remove "Beta" Label from formatter documentation ([#10144](https://github.com/astral-sh/ruff/pull/10144))
|
||||
- `line-length` option: fix link to `pycodestyle.max-line-length` ([#10136](https://github.com/astral-sh/ruff/pull/10136))
|
||||
|
||||
## 0.2.2
|
||||
|
||||
Highlights include:
|
||||
|
||||
- Initial support formatting f-strings (in `--preview`).
|
||||
- Support for overriding arbitrary configuration options via the CLI through an expanded `--config`
|
||||
argument (e.g., `--config "lint.isort.combine-as-imports=false"`).
|
||||
- Significant performance improvements in Ruff's lexer, parser, and lint rules.
|
||||
|
||||
### Preview features
|
||||
|
||||
- Implement minimal f-string formatting ([#9642](https://github.com/astral-sh/ruff/pull/9642))
|
||||
- \[`pycodestyle`\] Add blank line(s) rules (`E301`, `E302`, `E303`, `E304`, `E305`, `E306`) ([#9266](https://github.com/astral-sh/ruff/pull/9266))
|
||||
- \[`refurb`\] Implement `readlines_in_for` (`FURB129`) ([#9880](https://github.com/astral-sh/ruff/pull/9880))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`ruff`\] Ensure closing parentheses for multiline sequences are always on their own line (`RUF022`, `RUF023`) ([#9793](https://github.com/astral-sh/ruff/pull/9793))
|
||||
- \[`numpy`\] Add missing deprecation violations (`NPY002`) ([#9862](https://github.com/astral-sh/ruff/pull/9862))
|
||||
- \[`flake8-bandit`\] Detect `mark_safe` usages in decorators ([#9887](https://github.com/astral-sh/ruff/pull/9887))
|
||||
- \[`ruff`\] Expand `asyncio-dangling-task` (`RUF006`) to include `new_event_loop` ([#9976](https://github.com/astral-sh/ruff/pull/9976))
|
||||
- \[`flake8-pyi`\] Ignore 'unused' private type dicts in class scopes ([#9952](https://github.com/astral-sh/ruff/pull/9952))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Docstring formatting: Preserve tab indentation when using `indent-style=tabs` ([#9915](https://github.com/astral-sh/ruff/pull/9915))
|
||||
- Disable top-level docstring formatting for notebooks ([#9957](https://github.com/astral-sh/ruff/pull/9957))
|
||||
- Stabilize quote-style's `preserve` mode ([#9922](https://github.com/astral-sh/ruff/pull/9922))
|
||||
|
||||
### CLI
|
||||
|
||||
- Allow arbitrary configuration options to be overridden via the CLI ([#9599](https://github.com/astral-sh/ruff/pull/9599))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Make `show-settings` filters directory-agnostic ([#9866](https://github.com/astral-sh/ruff/pull/9866))
|
||||
- Respect duplicates when rewriting type aliases ([#9905](https://github.com/astral-sh/ruff/pull/9905))
|
||||
- Respect tuple assignments in typing analyzer ([#9969](https://github.com/astral-sh/ruff/pull/9969))
|
||||
- Use atomic write when persisting cache ([#9981](https://github.com/astral-sh/ruff/pull/9981))
|
||||
- Use non-parenthesized range for `DebugText` ([#9953](https://github.com/astral-sh/ruff/pull/9953))
|
||||
- \[`flake8-simplify`\] Avoid false positive with `async` for loops (`SIM113`) ([#9996](https://github.com/astral-sh/ruff/pull/9996))
|
||||
- \[`flake8-trio`\] Respect `async with` in `timeout-without-await` ([#9859](https://github.com/astral-sh/ruff/pull/9859))
|
||||
- \[`perflint`\] Catch a wider range of mutations in `PERF101` ([#9955](https://github.com/astral-sh/ruff/pull/9955))
|
||||
- \[`pycodestyle`\] Fix `E30X` panics on blank lines with trailing white spaces ([#9907](https://github.com/astral-sh/ruff/pull/9907))
|
||||
- \[`pydocstyle`\] Allow using `parameters` as a subsection header (`D405`) ([#9894](https://github.com/astral-sh/ruff/pull/9894))
|
||||
- \[`pydocstyle`\] Fix blank-line docstring rules for module-level docstrings ([#9878](https://github.com/astral-sh/ruff/pull/9878))
|
||||
- \[`pylint`\] Accept 0.0 and 1.0 as common magic values (`PLR2004`) ([#9964](https://github.com/astral-sh/ruff/pull/9964))
|
||||
- \[`pylint`\] Avoid suggesting set rewrites for non-hashable types ([#9956](https://github.com/astral-sh/ruff/pull/9956))
|
||||
- \[`ruff`\] Avoid false negatives with string literals inside of method calls (`RUF027`) ([#9865](https://github.com/astral-sh/ruff/pull/9865))
|
||||
- \[`ruff`\] Fix panic on with f-string detection (`RUF027`) ([#9990](https://github.com/astral-sh/ruff/pull/9990))
|
||||
- \[`ruff`\] Ignore builtins when detecting missing f-strings ([#9849](https://github.com/astral-sh/ruff/pull/9849))
|
||||
|
||||
### Performance
|
||||
|
||||
- Use `memchr` for string lexing ([#9888](https://github.com/astral-sh/ruff/pull/9888))
|
||||
- Use `memchr` for tab-indentation detection ([#9853](https://github.com/astral-sh/ruff/pull/9853))
|
||||
- Reduce `Result<Tok, LexicalError>` size by using `Box<str>` instead of `String` ([#9885](https://github.com/astral-sh/ruff/pull/9885))
|
||||
- Reduce size of `Expr` from 80 to 64 bytes ([#9900](https://github.com/astral-sh/ruff/pull/9900))
|
||||
- Improve trailing comma rule performance ([#9867](https://github.com/astral-sh/ruff/pull/9867))
|
||||
- Remove unnecessary string cloning from the parser ([#9884](https://github.com/astral-sh/ruff/pull/9884))
|
||||
|
||||
## 0.2.1
|
||||
|
||||
This release includes support for range formatting (i.e., the ability to format specific lines
|
||||
within a source file).
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`refurb`\] Implement `missing-f-string-syntax` (`RUF027`) ([#9728](https://github.com/astral-sh/ruff/pull/9728))
|
||||
- Format module-level docstrings ([#9725](https://github.com/astral-sh/ruff/pull/9725))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Add `--range` option to `ruff format` ([#9733](https://github.com/astral-sh/ruff/pull/9733))
|
||||
- Don't trim last empty line in docstrings ([#9813](https://github.com/astral-sh/ruff/pull/9813))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Skip empty lines when determining base indentation ([#9795](https://github.com/astral-sh/ruff/pull/9795))
|
||||
- Drop `__get__` and `__set__` from `unnecessary-dunder-call` ([#9791](https://github.com/astral-sh/ruff/pull/9791))
|
||||
- Respect generic `Protocol` in ellipsis removal ([#9841](https://github.com/astral-sh/ruff/pull/9841))
|
||||
- Revert "Use publicly available Apple Silicon runners (#9726)" ([#9834](https://github.com/astral-sh/ruff/pull/9834))
|
||||
|
||||
### Performance
|
||||
|
||||
- Skip LibCST parsing for standard dedent adjustments ([#9769](https://github.com/astral-sh/ruff/pull/9769))
|
||||
- Remove CST-based fixer for `C408` ([#9822](https://github.com/astral-sh/ruff/pull/9822))
|
||||
- Add our own ignored-names abstractions ([#9802](https://github.com/astral-sh/ruff/pull/9802))
|
||||
- Remove CST-based fixers for `C400`, `C401`, `C410`, and `C418` ([#9819](https://github.com/astral-sh/ruff/pull/9819))
|
||||
- Use `AhoCorasick` to speed up quote match ([#9773](https://github.com/astral-sh/ruff/pull/9773))
|
||||
- Remove CST-based fixers for `C405` and `C409` ([#9821](https://github.com/astral-sh/ruff/pull/9821))
|
||||
- Add fast-path for comment detection ([#9808](https://github.com/astral-sh/ruff/pull/9808))
|
||||
- Invert order of checks in `zero-sleep-call` ([#9766](https://github.com/astral-sh/ruff/pull/9766))
|
||||
- Short-circuit typing matches based on imports ([#9800](https://github.com/astral-sh/ruff/pull/9800))
|
||||
- Run dunder method rule on methods directly ([#9815](https://github.com/astral-sh/ruff/pull/9815))
|
||||
- Track top-level module imports in the semantic model ([#9775](https://github.com/astral-sh/ruff/pull/9775))
|
||||
- Slight speed-up for lowercase and uppercase identifier checks ([#9798](https://github.com/astral-sh/ruff/pull/9798))
|
||||
- Remove LibCST-based fixer for `C403` ([#9818](https://github.com/astral-sh/ruff/pull/9818))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Update `max-pos-args` example to `max-positional-args` ([#9797](https://github.com/astral-sh/ruff/pull/9797))
|
||||
- Fixed example code in `weak_cryptographic_key.rs` ([#9774](https://github.com/astral-sh/ruff/pull/9774))
|
||||
- Fix references to deprecated `ANN` rules in changelog ([#9771](https://github.com/astral-sh/ruff/pull/9771))
|
||||
- Fix default for `max-positional-args` ([#9838](https://github.com/astral-sh/ruff/pull/9838))
|
||||
|
||||
## 0.2.0
|
||||
|
||||
### Breaking changes
|
||||
|
||||
- The `NURSERY` selector cannot be used anymore
|
||||
- Legacy selection of nursery rules by exact codes is no longer allowed without preview enabled
|
||||
|
||||
See also, the "Remapped rules" section which may result in disabled rules.
|
||||
|
||||
### Deprecations
|
||||
|
||||
The following rules are now deprecated:
|
||||
|
||||
- [`missing-type-self`](https://docs.astral.sh/ruff/rules/missing-type-self/) (`ANN101`)
|
||||
- [`missing-type-cls`](https://docs.astral.sh/ruff/rules/missing-type-cls/) (`ANN102`)
|
||||
|
||||
The following command line options are now deprecated:
|
||||
|
||||
- `--show-source`; use `--output-format full` instead
|
||||
- `--no-show-source`; use `--output-format concise` instead
|
||||
- `--output-format text`; use `full` or `concise` instead
|
||||
|
||||
The following settings have moved and the previous name is deprecated:
|
||||
|
||||
- `ruff.allowed-confusables` → [`ruff.lint.allowed-confusables`](https://docs.astral.sh//ruff/settings/#lint_allowed-confusables)
|
||||
- `ruff.dummy-variable-rgx` → [`ruff.lint.dummy-variable-rgx`](https://docs.astral.sh//ruff/settings/#lint_dummy-variable-rgx)
|
||||
- `ruff.explicit-preview-rules` → [`ruff.lint.explicit-preview-rules`](https://docs.astral.sh//ruff/settings/#lint_explicit-preview-rules)
|
||||
- `ruff.extend-fixable` → [`ruff.lint.extend-fixable`](https://docs.astral.sh//ruff/settings/#lint_extend-fixable)
|
||||
- `ruff.extend-ignore` → [`ruff.lint.extend-ignore`](https://docs.astral.sh//ruff/settings/#lint_extend-ignore)
|
||||
- `ruff.extend-per-file-ignores` → [`ruff.lint.extend-per-file-ignores`](https://docs.astral.sh//ruff/settings/#lint_extend-per-file-ignores)
|
||||
- `ruff.extend-safe-fixes` → [`ruff.lint.extend-safe-fixes`](https://docs.astral.sh//ruff/settings/#lint_extend-safe-fixes)
|
||||
- `ruff.extend-select` → [`ruff.lint.extend-select`](https://docs.astral.sh//ruff/settings/#lint_extend-select)
|
||||
- `ruff.extend-unfixable` → [`ruff.lint.extend-unfixable`](https://docs.astral.sh//ruff/settings/#lint_extend-unfixable)
|
||||
- `ruff.extend-unsafe-fixes` → [`ruff.lint.extend-unsafe-fixes`](https://docs.astral.sh//ruff/settings/#lint_extend-unsafe-fixes)
|
||||
- `ruff.external` → [`ruff.lint.external`](https://docs.astral.sh//ruff/settings/#lint_external)
|
||||
- `ruff.fixable` → [`ruff.lint.fixable`](https://docs.astral.sh//ruff/settings/#lint_fixable)
|
||||
- `ruff.flake8-annotations` → [`ruff.lint.flake8-annotations`](https://docs.astral.sh//ruff/settings/#lint_flake8-annotations)
|
||||
- `ruff.flake8-bandit` → [`ruff.lint.flake8-bandit`](https://docs.astral.sh//ruff/settings/#lint_flake8-bandit)
|
||||
- `ruff.flake8-bugbear` → [`ruff.lint.flake8-bugbear`](https://docs.astral.sh//ruff/settings/#lint_flake8-bugbear)
|
||||
- `ruff.flake8-builtins` → [`ruff.lint.flake8-builtins`](https://docs.astral.sh//ruff/settings/#lint_flake8-builtins)
|
||||
- `ruff.flake8-comprehensions` → [`ruff.lint.flake8-comprehensions`](https://docs.astral.sh//ruff/settings/#lint_flake8-comprehensions)
|
||||
- `ruff.flake8-copyright` → [`ruff.lint.flake8-copyright`](https://docs.astral.sh//ruff/settings/#lint_flake8-copyright)
|
||||
- `ruff.flake8-errmsg` → [`ruff.lint.flake8-errmsg`](https://docs.astral.sh//ruff/settings/#lint_flake8-errmsg)
|
||||
- `ruff.flake8-gettext` → [`ruff.lint.flake8-gettext`](https://docs.astral.sh//ruff/settings/#lint_flake8-gettext)
|
||||
- `ruff.flake8-implicit-str-concat` → [`ruff.lint.flake8-implicit-str-concat`](https://docs.astral.sh//ruff/settings/#lint_flake8-implicit-str-concat)
|
||||
- `ruff.flake8-import-conventions` → [`ruff.lint.flake8-import-conventions`](https://docs.astral.sh//ruff/settings/#lint_flake8-import-conventions)
|
||||
- `ruff.flake8-pytest-style` → [`ruff.lint.flake8-pytest-style`](https://docs.astral.sh//ruff/settings/#lint_flake8-pytest-style)
|
||||
- `ruff.flake8-quotes` → [`ruff.lint.flake8-quotes`](https://docs.astral.sh//ruff/settings/#lint_flake8-quotes)
|
||||
- `ruff.flake8-self` → [`ruff.lint.flake8-self`](https://docs.astral.sh//ruff/settings/#lint_flake8-self)
|
||||
- `ruff.flake8-tidy-imports` → [`ruff.lint.flake8-tidy-imports`](https://docs.astral.sh//ruff/settings/#lint_flake8-tidy-imports)
|
||||
- `ruff.flake8-type-checking` → [`ruff.lint.flake8-type-checking`](https://docs.astral.sh//ruff/settings/#lint_flake8-type-checking)
|
||||
- `ruff.flake8-unused-arguments` → [`ruff.lint.flake8-unused-arguments`](https://docs.astral.sh//ruff/settings/#lint_flake8-unused-arguments)
|
||||
- `ruff.ignore` → [`ruff.lint.ignore`](https://docs.astral.sh//ruff/settings/#lint_ignore)
|
||||
- `ruff.ignore-init-module-imports` → [`ruff.lint.ignore-init-module-imports`](https://docs.astral.sh//ruff/settings/#lint_ignore-init-module-imports)
|
||||
- `ruff.isort` → [`ruff.lint.isort`](https://docs.astral.sh//ruff/settings/#lint_isort)
|
||||
- `ruff.logger-objects` → [`ruff.lint.logger-objects`](https://docs.astral.sh//ruff/settings/#lint_logger-objects)
|
||||
- `ruff.mccabe` → [`ruff.lint.mccabe`](https://docs.astral.sh//ruff/settings/#lint_mccabe)
|
||||
- `ruff.pep8-naming` → [`ruff.lint.pep8-naming`](https://docs.astral.sh//ruff/settings/#lint_pep8-naming)
|
||||
- `ruff.per-file-ignores` → [`ruff.lint.per-file-ignores`](https://docs.astral.sh//ruff/settings/#lint_per-file-ignores)
|
||||
- `ruff.pycodestyle` → [`ruff.lint.pycodestyle`](https://docs.astral.sh//ruff/settings/#lint_pycodestyle)
|
||||
- `ruff.pydocstyle` → [`ruff.lint.pydocstyle`](https://docs.astral.sh//ruff/settings/#lint_pydocstyle)
|
||||
- `ruff.pyflakes` → [`ruff.lint.pyflakes`](https://docs.astral.sh//ruff/settings/#lint_pyflakes)
|
||||
- `ruff.pylint` → [`ruff.lint.pylint`](https://docs.astral.sh//ruff/settings/#lint_pylint)
|
||||
- `ruff.pyupgrade` → [`ruff.lint.pyupgrade`](https://docs.astral.sh//ruff/settings/#lint_pyupgrade)
|
||||
- `ruff.select` → [`ruff.lint.select`](https://docs.astral.sh//ruff/settings/#lint_select)
|
||||
- `ruff.task-tags` → [`ruff.lint.task-tags`](https://docs.astral.sh//ruff/settings/#lint_task-tags)
|
||||
- `ruff.typing-modules` → [`ruff.lint.typing-modules`](https://docs.astral.sh//ruff/settings/#lint_typing-modules)
|
||||
- `ruff.unfixable` → [`ruff.lint.unfixable`](https://docs.astral.sh//ruff/settings/#lint_unfixable)
|
||||
|
||||
### Remapped rules
|
||||
|
||||
The following rules have been remapped to new codes:
|
||||
|
||||
- [`raise-without-from-inside-except`](https://docs.astral.sh/ruff/rules/raise-without-from-inside-except/): `TRY200` to `B904`
|
||||
- [`suspicious-eval-usage`](https://docs.astral.sh/ruff/rules/suspicious-eval-usage/): `PGH001` to `S307`
|
||||
- [`logging-warn`](https://docs.astral.sh/ruff/rules/logging-warn/): `PGH002` to `G010`
|
||||
- [`static-key-dict-comprehension`](https://docs.astral.sh/ruff/rules/static-key-dict-comprehension): `RUF011` to `B035`
|
||||
- [`runtime-string-union`](https://docs.astral.sh/ruff/rules/runtime-string-union): `TCH006` to `TCH010`
|
||||
|
||||
### Stabilizations
|
||||
|
||||
The following rules have been stabilized and are no longer in preview:
|
||||
|
||||
- [`trio-timeout-without-await`](https://docs.astral.sh/ruff/rules/trio-timeout-without-await) (`TRIO100`)
|
||||
- [`trio-sync-call`](https://docs.astral.sh/ruff/rules/trio-sync-call) (`TRIO105`)
|
||||
- [`trio-async-function-with-timeout`](https://docs.astral.sh/ruff/rules/trio-async-function-with-timeout) (`TRIO109`)
|
||||
- [`trio-unneeded-sleep`](https://docs.astral.sh/ruff/rules/trio-unneeded-sleep) (`TRIO110`)
|
||||
- [`trio-zero-sleep-call`](https://docs.astral.sh/ruff/rules/trio-zero-sleep-call) (`TRIO115`)
|
||||
- [`unnecessary-escaped-quote`](https://docs.astral.sh/ruff/rules/unnecessary-escaped-quote) (`Q004`)
|
||||
- [`enumerate-for-loop`](https://docs.astral.sh/ruff/rules/enumerate-for-loop) (`SIM113`)
|
||||
- [`zip-dict-keys-and-values`](https://docs.astral.sh/ruff/rules/zip-dict-keys-and-values) (`SIM911`)
|
||||
- [`timeout-error-alias`](https://docs.astral.sh/ruff/rules/timeout-error-alias) (`UP041`)
|
||||
- [`flask-debug-true`](https://docs.astral.sh/ruff/rules/flask-debug-true) (`S201`)
|
||||
- [`tarfile-unsafe-members`](https://docs.astral.sh/ruff/rules/tarfile-unsafe-members) (`S202`)
|
||||
- [`ssl-insecure-version`](https://docs.astral.sh/ruff/rules/ssl-insecure-version) (`S502`)
|
||||
- [`ssl-with-bad-defaults`](https://docs.astral.sh/ruff/rules/ssl-with-bad-defaults) (`S503`)
|
||||
- [`ssl-with-no-version`](https://docs.astral.sh/ruff/rules/ssl-with-no-version) (`S504`)
|
||||
- [`weak-cryptographic-key`](https://docs.astral.sh/ruff/rules/weak-cryptographic-key) (`S505`)
|
||||
- [`ssh-no-host-key-verification`](https://docs.astral.sh/ruff/rules/ssh-no-host-key-verification) (`S507`)
|
||||
- [`django-raw-sql`](https://docs.astral.sh/ruff/rules/django-raw-sql) (`S611`)
|
||||
- [`mako-templates`](https://docs.astral.sh/ruff/rules/mako-templates) (`S702`)
|
||||
- [`generator-return-from-iter-method`](https://docs.astral.sh/ruff/rules/generator-return-from-iter-method) (`PYI058`)
|
||||
- [`runtime-string-union`](https://docs.astral.sh/ruff/rules/runtime-string-union) (`TCH006`)
|
||||
- [`numpy2-deprecation`](https://docs.astral.sh/ruff/rules/numpy2-deprecation) (`NPY201`)
|
||||
- [`quadratic-list-summation`](https://docs.astral.sh/ruff/rules/quadratic-list-summation) (`RUF017`)
|
||||
- [`assignment-in-assert`](https://docs.astral.sh/ruff/rules/assignment-in-assert) (`RUF018`)
|
||||
- [`unnecessary-key-check`](https://docs.astral.sh/ruff/rules/unnecessary-key-check) (`RUF019`)
|
||||
- [`never-union`](https://docs.astral.sh/ruff/rules/never-union) (`RUF020`)
|
||||
- [`direct-logger-instantiation`](https://docs.astral.sh/ruff/rules/direct-logger-instantiation) (`LOG001`)
|
||||
- [`invalid-get-logger-argument`](https://docs.astral.sh/ruff/rules/invalid-get-logger-argument) (`LOG002`)
|
||||
- [`exception-without-exc-info`](https://docs.astral.sh/ruff/rules/exception-without-exc-info) (`LOG007`)
|
||||
- [`undocumented-warn`](https://docs.astral.sh/ruff/rules/undocumented-warn) (`LOG009`)
|
||||
|
||||
Fixes for the following rules have been stabilized and are now available without preview:
|
||||
|
||||
- [`triple-single-quotes`](https://docs.astral.sh/ruff/rules/triple-single-quotes) (`D300`)
|
||||
- [`non-pep604-annotation`](https://docs.astral.sh/ruff/rules/non-pep604-annotation) (`UP007`)
|
||||
- [`dict-get-with-none-default`](https://docs.astral.sh/ruff/rules/dict-get-with-none-default) (`SIM910`)
|
||||
- [`in-dict-keys`](https://docs.astral.sh/ruff/rules/in-dict-keys) (`SIM118`)
|
||||
- [`collapsible-else-if`](https://docs.astral.sh/ruff/rules/collapsible-else-if) (`PLR5501`)
|
||||
- [`if-with-same-arms`](https://docs.astral.sh/ruff/rules/if-with-same-arms) (`SIM114`)
|
||||
- [`useless-else-on-loop`](https://docs.astral.sh/ruff/rules/useless-else-on-loop) (`PLW0120`)
|
||||
- [`unnecessary-literal-union`](https://docs.astral.sh/ruff/rules/unnecessary-literal-union) (`PYI030`)
|
||||
- [`unnecessary-spread`](https://docs.astral.sh/ruff/rules/unnecessary-spread) (`PIE800`)
|
||||
- [`error-instead-of-exception`](https://docs.astral.sh/ruff/rules/error-instead-of-exception) (`TRY400`)
|
||||
- [`redefined-while-unused`](https://docs.astral.sh/ruff/rules/redefined-while-unused) (`F811`)
|
||||
- [`duplicate-value`](https://docs.astral.sh/ruff/rules/duplicate-value) (`B033`)
|
||||
- [`multiple-imports-on-one-line`](https://docs.astral.sh/ruff/rules/multiple-imports-on-one-line) (`E401`)
|
||||
- [`non-pep585-annotation`](https://docs.astral.sh/ruff/rules/non-pep585-annotation) (`UP006`)
|
||||
|
||||
Fixes for the following rules have been promoted from unsafe to safe:
|
||||
|
||||
- [`unaliased-collections-abc-set-import`](https://docs.astral.sh/ruff/rules/unaliased-collections-abc-set-import) (`PYI025`)
|
||||
|
||||
The following behaviors have been stabilized:
|
||||
|
||||
- [`module-import-not-at-top-of-file`](https://docs.astral.sh/ruff/rules/module-import-not-at-top-of-file/) (`E402`) allows `sys.path` modifications between imports
|
||||
- [`reimplemented-container-builtin`](https://docs.astral.sh/ruff/rules/reimplemented-container-builtin/) (`PIE807`) includes lambdas that can be replaced with `dict`
|
||||
- [`unnecessary-placeholder`](https://docs.astral.sh/ruff/rules/unnecessary-placeholder/) (`PIE790`) applies to unnecessary ellipses (`...`)
|
||||
- [`if-else-block-instead-of-dict-get`](https://docs.astral.sh/ruff/rules/if-else-block-instead-of-dict-get/) (`SIM401`) applies to `if-else` expressions
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`refurb`\] Implement `metaclass_abcmeta` (`FURB180`) ([#9658](https://github.com/astral-sh/ruff/pull/9658))
|
||||
- Implement `blank_line_after_nested_stub_class` preview style ([#9155](https://github.com/astral-sh/ruff/pull/9155))
|
||||
- The preview rule [`and-or-ternary`](https://docs.astral.sh/ruff/rules/and-or-ternary) (`PLR1706`) was removed
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-async`\] Take `pathlib.Path` into account when analyzing async functions ([#9703](https://github.com/astral-sh/ruff/pull/9703))
|
||||
- \[`flake8-return`\] - fix indentation syntax error (`RET505`) ([#9705](https://github.com/astral-sh/ruff/pull/9705))
|
||||
- Detect multi-statement lines in else removal ([#9748](https://github.com/astral-sh/ruff/pull/9748))
|
||||
- `RUF022`, `RUF023`: never add two trailing commas to the end of a sequence ([#9698](https://github.com/astral-sh/ruff/pull/9698))
|
||||
- `RUF023`: Don't sort `__match_args__`, only `__slots__` ([#9724](https://github.com/astral-sh/ruff/pull/9724))
|
||||
- \[`flake8-simplify`\] - Fix syntax error in autofix (`SIM114`) ([#9704](https://github.com/astral-sh/ruff/pull/9704))
|
||||
- \[`pylint`\] Show verbatim constant in `magic-value-comparison` (`PLR2004`) ([#9694](https://github.com/astral-sh/ruff/pull/9694))
|
||||
- Removing trailing whitespace inside multiline strings is unsafe ([#9744](https://github.com/astral-sh/ruff/pull/9744))
|
||||
- Support `IfExp` with dual string arms in `invalid-envvar-default` ([#9734](https://github.com/astral-sh/ruff/pull/9734))
|
||||
- \[`pylint`\] Add `__mro_entries__` to known dunder methods (`PLW3201`) ([#9706](https://github.com/astral-sh/ruff/pull/9706))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Removed rules are now retained in the documentation ([#9691](https://github.com/astral-sh/ruff/pull/9691))
|
||||
- Deprecated rules are now indicated in the documentation ([#9689](https://github.com/astral-sh/ruff/pull/9689))
|
||||
|
||||
## 0.1.15
|
||||
|
||||
### Preview features
|
||||
|
||||
- Error when `NURSERY` selector is used with `--preview` ([#9682](https://github.com/astral-sh/ruff/pull/9682))
|
||||
- Preserve indentation around multiline strings in formatter ([#9637](https://github.com/astral-sh/ruff/pull/9637))
|
||||
- \[`flake8-return`\] Add fixes for all rules (`RET505`, `RET506`, `RET507`, `RET508`) ([#9595](https://github.com/astral-sh/ruff/pull/9595))
|
||||
- \[`flake8-simplify`\] Add fix for `if-with-same-arms` (`SIM114`) ([#9591](https://github.com/astral-sh/ruff/pull/9591))
|
||||
- \[`pycodestyle`\] Add fix for `multiple-imports-on-one-line` (`E401`) ([#9518](https://github.com/astral-sh/ruff/pull/9518))
|
||||
- \[`pylint`\] Add fix for `collapsible-else-if` (`PLR5501`) ([#9594](https://github.com/astral-sh/ruff/pull/9594))
|
||||
- \[`pylint`\] Add fix for `useless-else-on-loop` (`PLW0120`) ([#9590](https://github.com/astral-sh/ruff/pull/9590))
|
||||
- \[`pylint`\] Implement `assigning-non-slot` (`E0237`) ([#9623](https://github.com/astral-sh/ruff/pull/9623))
|
||||
- \[`pylint`\] Implement `potential-index-error` (`PLE0643`) ([#9545](https://github.com/astral-sh/ruff/pull/9545))
|
||||
- \[`pylint`\] Implement `too-many-nested-blocks` (`PLR1702`) ([#9172](https://github.com/astral-sh/ruff/pull/9172))
|
||||
- \[`ruff`\] Add rule to sort `__slots__` and `__match_args__` ([#9564](https://github.com/astral-sh/ruff/pull/9564))
|
||||
- \[`ruff`\] Detect unnecessary `dict` comprehensions for iterables (`RUF025`) ([#9613](https://github.com/astral-sh/ruff/pull/9613))
|
||||
- \[`ruff`\] Guard against use of `default_factory` as a keyword argument (`RUF026`) ([#9651](https://github.com/astral-sh/ruff/pull/9651))
|
||||
- \[`ruff`\] Implement `mutable-fromkeys-value` (`RUF024`) ([#9597](https://github.com/astral-sh/ruff/pull/9597))
|
||||
|
||||
### CLI
|
||||
|
||||
- Enable auto-wrapping of `--help` output ([#9633](https://github.com/astral-sh/ruff/pull/9633))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid rendering display-only rules as fixable ([#9649](https://github.com/astral-sh/ruff/pull/9649))
|
||||
- Detect automagic-like assignments in notebooks ([#9653](https://github.com/astral-sh/ruff/pull/9653))
|
||||
- Generate custom JSON schema for dynamic setting ([#9632](https://github.com/astral-sh/ruff/pull/9632))
|
||||
- \[`flake8-no-pep420`\] Include global `--config` when determining namespace packages ([#9603](https://github.com/astral-sh/ruff/pull/9603))
|
||||
- \[`flake8-pie`\] Omit bound tuples passed to `.startswith` or `.endswith` ([#9661](https://github.com/astral-sh/ruff/pull/9661))
|
||||
- \[`flake8-return`\] Avoid panic when fixing inlined else blocks ([#9657](https://github.com/astral-sh/ruff/pull/9657))
|
||||
- \[`flake8-return`\] Consider exception suppression in unnecessary assignment ([#9673](https://github.com/astral-sh/ruff/pull/9673))
|
||||
- \[`flake8-return`\] Take `NoReturn` annotation into account when analyzing implicit returns ([#9636](https://github.com/astral-sh/ruff/pull/9636))
|
||||
- \[`flake8-simplify`\] Support inverted returns in `needless-bool` (`SIM103`) ([#9619](https://github.com/astral-sh/ruff/pull/9619))
|
||||
- \[`flake8-type-checking`\] Add Pydantic's `BaseConfig` to default-copy list ([#9650](https://github.com/astral-sh/ruff/pull/9650))
|
||||
- \[`flake8-type-checking`\] Avoid marking `InitVar` as a typing-only annotation ([#9688](https://github.com/astral-sh/ruff/pull/9688))
|
||||
- \[`pycodestyle`\] Allow `dtype` comparisons in `type-comparison` ([#9676](https://github.com/astral-sh/ruff/pull/9676))
|
||||
- \[`pydocstyle`\] Re-implement `last-line-after-section` (`D413`) ([#9654](https://github.com/astral-sh/ruff/pull/9654))
|
||||
|
||||
### Documentation
|
||||
|
||||
- \[`flake8-pytest-style`\] Add fix safety documentation for `duplicate-parameterize-test-cases` ([#9678](https://github.com/astral-sh/ruff/pull/9678))
|
||||
- \[`pylint`\] Document `literal-membership` fix safety conditions ([#9677](https://github.com/astral-sh/ruff/pull/9677))
|
||||
- \[`isort`\] Fix reference to `isort` rule code ([#9598](https://github.com/astral-sh/ruff/pull/9598))
|
||||
|
||||
## 0.1.14
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-bugbear`\] Add fix for `duplicate-value` (`B033`) ([#9510](https://github.com/astral-sh/ruff/pull/9510))
|
||||
- \[`flake8-simplify`\] Implement `enumerate-for-loop` (`SIM113`) ([#7777](https://github.com/astral-sh/ruff/pull/7777))
|
||||
- \[`pygrep_hooks`\] Add fix for `deprecated-log-warn` (`PGH002`) ([#9519](https://github.com/astral-sh/ruff/pull/9519))
|
||||
- \[`pylint`\] Implement `import-private-name` (`C2701`) ([#5920](https://github.com/astral-sh/ruff/pull/5920))
|
||||
- \[`refurb`\] Implement `regex-flag-alias` with fix (`FURB167`) ([#9516](https://github.com/astral-sh/ruff/pull/9516))
|
||||
- \[`ruff`\] Add rule and fix to sort contents of `__all__` (`RUF022`) ([#9474](https://github.com/astral-sh/ruff/pull/9474))
|
||||
- \[`tryceratops`\] Add fix for `error-instead-of-exception` (`TRY400`) ([#9520](https://github.com/astral-sh/ruff/pull/9520))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-pyi`\] Fix `PYI047` false negatives on PEP-695 type aliases ([#9566](https://github.com/astral-sh/ruff/pull/9566))
|
||||
- \[`flake8-pyi`\] Fix `PYI049` false negatives on call-based `TypedDict`s ([#9567](https://github.com/astral-sh/ruff/pull/9567))
|
||||
- \[`pylint`\] Exclude `self` and `cls` when counting method arguments (`PLR0917`) ([#9563](https://github.com/astral-sh/ruff/pull/9563))
|
||||
|
||||
### CLI
|
||||
|
||||
- `--show-settings` displays active settings in a far more readable format ([#9464](https://github.com/astral-sh/ruff/pull/9464))
|
||||
- Add `--extension` support to the formatter ([#9483](https://github.com/astral-sh/ruff/pull/9483))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Ignore preview status for fixable and unfixable selectors ([#9538](https://github.com/astral-sh/ruff/pull/9538))
|
||||
- \[`pycodestyle`\] Use the configured tab size when expanding indents ([#9506](https://github.com/astral-sh/ruff/pull/9506))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Recursively visit deferred AST nodes ([#9541](https://github.com/astral-sh/ruff/pull/9541))
|
||||
- Visit deferred lambdas before type definitions ([#9540](https://github.com/astral-sh/ruff/pull/9540))
|
||||
- \[`flake8-simplify`\] Avoid some more `enumerate-for-loop` false positives (`SIM113`) ([#9515](https://github.com/astral-sh/ruff/pull/9515))
|
||||
- \[`pandas-vet`\] Limit inplace diagnostics to methods that accept inplace ([#9495](https://github.com/astral-sh/ruff/pull/9495))
|
||||
- \[`pylint`\] Add the `__prepare__` method to the list of recognized dunder method ([#9529](https://github.com/astral-sh/ruff/pull/9529))
|
||||
- \[`pylint`\] Ignore unnecessary dunder calls within dunder definitions ([#9496](https://github.com/astral-sh/ruff/pull/9496))
|
||||
- \[`refurb`\] Avoid bailing when `reimplemented-operator` is called on function (`FURB118`) ([#9556](https://github.com/astral-sh/ruff/pull/9556))
|
||||
- \[`ruff`\] Avoid treating named expressions as static keys (`RUF011`) ([#9494](https://github.com/astral-sh/ruff/pull/9494))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add instructions on using `noqa` with isort rules ([#9555](https://github.com/astral-sh/ruff/pull/9555))
|
||||
- Documentation update for URL giving 'page not found' ([#9565](https://github.com/astral-sh/ruff/pull/9565))
|
||||
- Fix admonition in dark mode ([#9502](https://github.com/astral-sh/ruff/pull/9502))
|
||||
- Update contributing docs to use `cargo bench -p ruff_benchmark` ([#9535](https://github.com/astral-sh/ruff/pull/9535))
|
||||
- Update emacs integration section to include `emacs-ruff-format` ([#9403](https://github.com/astral-sh/ruff/pull/9403))
|
||||
- \[`flake8-blind-except`\] Document exceptions to `blind-except` rule ([#9580](https://github.com/astral-sh/ruff/pull/9580))
|
||||
|
||||
## 0.1.13
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Include base pyproject when initializing cache settings ([#9480](https://github.com/astral-sh/ruff/pull/9480))
|
||||
- \[`flake8-simplify`\] Account for possibly-empty f-string values in truthiness logic ([#9484](https://github.com/astral-sh/ruff/pull/9484))
|
||||
- \[`pylint`\] Add the missing period in `unnecessary-dunder-call` ([#9485](https://github.com/astral-sh/ruff/pull/9485))
|
||||
- \[`pylint`\] Fix `__aenter__` message in `unnecessary-dunder-call` ([#9492](https://github.com/astral-sh/ruff/pull/9492))
|
||||
|
||||
## 0.1.12
|
||||
|
||||
### Preview features
|
||||
|
||||
- Formatter: Hug multiline-strings in preview style ([#9243](https://github.com/astral-sh/ruff/pull/9243))
|
||||
- \[`flake8-bandit`\] Add `ssl-with-no-version` (`S504`) ([#9384](https://github.com/astral-sh/ruff/pull/9384))
|
||||
- \[`flake8-bandit`\] Implement `ssl-insecure-version` (`S502`) ([#9390](https://github.com/astral-sh/ruff/pull/9390))
|
||||
- \[`flake8-bandit`\] Implement `ssl-with-bad-defaults` (`S503`) ([#9391](https://github.com/astral-sh/ruff/pull/9391))
|
||||
- \[`flake8-bandit`\] Implement suspicious import rules (`S4XX`) ([#8831](https://github.com/astral-sh/ruff/pull/8831))
|
||||
- \[`flake8-simplify`\] Implement `zip-dict-keys-and-values` (`SIM911`) ([#9460](https://github.com/astral-sh/ruff/pull/9460))
|
||||
- \[`pyflakes`\] Add a fix for `redefined-while-unused` (`F811`) ([#9419](https://github.com/astral-sh/ruff/pull/9419))
|
||||
- \[`pylint`\] Implement `unnecessary-dunder-call` (`C2801`) ([#9166](https://github.com/astral-sh/ruff/pull/9166))
|
||||
- \[`ruff`\] Add `parenthesize-chained-operators` (`RUF021`) to enforce parentheses in `a or b and c` ([#9440](https://github.com/astral-sh/ruff/pull/9440))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-boolean-trap`\] Allow Boolean positional arguments in setters ([#9429](https://github.com/astral-sh/ruff/pull/9429))
|
||||
- \[`flake8-builtins`\] Restrict `builtin-attribute-shadowing` (`A003`) to actual shadowed references ([#9462](https://github.com/astral-sh/ruff/pull/9462))
|
||||
- \[`flake8-pyi`\] Add fix for `generator-return-from-iter-method` (`PYI058`) ([#9355](https://github.com/astral-sh/ruff/pull/9355))
|
||||
- \[`pyflakes`\] Don't flag `redefined-while-unused` (`F811`) in `if` branches ([#9418](https://github.com/astral-sh/ruff/pull/9418))
|
||||
- \[`pyupgrade`\] Add some additional Python 3.12 typing members to `deprecated-import` ([#9445](https://github.com/astral-sh/ruff/pull/9445))
|
||||
- \[`ruff`\] Add fix for `parenthesize-chained-operators` (`RUF021`) ([#9449](https://github.com/astral-sh/ruff/pull/9449))
|
||||
- \[`ruff`\] Include subscripts and attributes in static key rule (`RUF011`) ([#9416](https://github.com/astral-sh/ruff/pull/9416))
|
||||
- \[`ruff`\] Support variable keys in static dictionary key rule (`RUF011`) ([#9411](https://github.com/astral-sh/ruff/pull/9411))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Generate deterministic IDs when formatting notebooks ([#9359](https://github.com/astral-sh/ruff/pull/9359))
|
||||
- Allow `# fmt: skip` with interspersed same-line comments ([#9395](https://github.com/astral-sh/ruff/pull/9395))
|
||||
- Parenthesize breaking named expressions in match guards ([#9396](https://github.com/astral-sh/ruff/pull/9396))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Add cell indexes to all diagnostics ([#9387](https://github.com/astral-sh/ruff/pull/9387))
|
||||
- Avoid infinite loop in constant vs. `None` comparisons ([#9376](https://github.com/astral-sh/ruff/pull/9376))
|
||||
- Handle raises with implicit alternate branches ([#9377](https://github.com/astral-sh/ruff/pull/9377))
|
||||
- Ignore trailing quotes for unclosed l-brace errors ([#9388](https://github.com/astral-sh/ruff/pull/9388))
|
||||
- Respect multi-segment submodule imports when resolving qualified names ([#9382](https://github.com/astral-sh/ruff/pull/9382))
|
||||
- Use `DisplayParseError` for stdin parser errors ([#9409](https://github.com/astral-sh/ruff/pull/9409))
|
||||
- Use `comment_ranges` for isort directive extraction ([#9414](https://github.com/astral-sh/ruff/pull/9414))
|
||||
- Use transformed source code for diagnostic locations ([#9408](https://github.com/astral-sh/ruff/pull/9408))
|
||||
- \[`flake8-pyi`\] Exclude `warnings.deprecated` and `typing_extensions.deprecated` arguments ([#9423](https://github.com/astral-sh/ruff/pull/9423))
|
||||
- \[`flake8-pyi`\] Fix false negative for `unused-private-protocol` (`PYI046`) with unused generic protocols ([#9405](https://github.com/astral-sh/ruff/pull/9405))
|
||||
- \[`pydocstyle`\] Disambiguate argument descriptors from section headers ([#9427](https://github.com/astral-sh/ruff/pull/9427))
|
||||
- \[`pylint`\] Homogenize `PLR0914` message to match other `PLR09XX` rules ([#9399](https://github.com/astral-sh/ruff/pull/9399))
|
||||
- \[`ruff`\] Allow `Hashable = None` in type annotations (`RUF013`) ([#9442](https://github.com/astral-sh/ruff/pull/9442))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix admonition hyperlink colouring ([#9385](https://github.com/astral-sh/ruff/pull/9385))
|
||||
- Add missing preview link ([#9386](https://github.com/astral-sh/ruff/pull/9386))
|
||||
|
||||
## 0.1.11
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`pylint`\] Implement `super-without-brackets` (`W0245`) ([#9257](https://github.com/astral-sh/ruff/pull/9257))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Check path string properly in `python -m ruff` invocations ([#9367](https://github.com/astral-sh/ruff/pull/9367))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Tweak `relative-imports` message ([#9365](https://github.com/astral-sh/ruff/pull/9365))
|
||||
- Add fix safety note for `yield-in-for-loop` ([#9364](https://github.com/astral-sh/ruff/pull/9364))
|
||||
|
||||
## 0.1.10
|
||||
|
||||
### Preview features
|
||||
|
||||
- Improve `dummy_implementations` preview style formatting ([#9240](https://github.com/astral-sh/ruff/pull/9240))
|
||||
- Normalise Hex and unicode escape sequences in strings ([#9280](https://github.com/astral-sh/ruff/pull/9280))
|
||||
- Parenthesize long type annotations in annotated assignments ([#9210](https://github.com/astral-sh/ruff/pull/9210))
|
||||
- Parenthesize multi-context managers in `with` statements ([#9222](https://github.com/astral-sh/ruff/pull/9222))
|
||||
- \[`flake8-pyi`\] Implement `generator-return-from-iter-method` (`PYI058`) ([#9313](https://github.com/astral-sh/ruff/pull/9313))
|
||||
- \[`pylint`\] Implement `empty-comment` (`PLR2044`) ([#9174](https://github.com/astral-sh/ruff/pull/9174))
|
||||
- \[`refurb`\] Implement `bit-count` (`FURB161`) ([#9265](https://github.com/astral-sh/ruff/pull/9265))
|
||||
- \[`ruff`\] Add `never-union` rule to detect redundant `typing.NoReturn` and `typing.Never` ([#9217](https://github.com/astral-sh/ruff/pull/9217))
|
||||
|
||||
### CLI
|
||||
|
||||
- Add paths to TOML parse errors ([#9358](https://github.com/astral-sh/ruff/pull/9358))
|
||||
- Add row and column numbers to formatter parse errors ([#9321](https://github.com/astral-sh/ruff/pull/9321))
|
||||
- Improve responsiveness when invoked via Python ([#9315](https://github.com/astral-sh/ruff/pull/9315))
|
||||
- Short rule messages should not end with a period ([#9345](https://github.com/astral-sh/ruff/pull/9345))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Respect runtime-required decorators on functions ([#9317](https://github.com/astral-sh/ruff/pull/9317))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid `asyncio-dangling-task` for nonlocal and global bindings ([#9263](https://github.com/astral-sh/ruff/pull/9263))
|
||||
- Escape trailing placeholders in rule documentation ([#9301](https://github.com/astral-sh/ruff/pull/9301))
|
||||
- Fix continuation detection following multi-line strings ([#9332](https://github.com/astral-sh/ruff/pull/9332))
|
||||
- Fix scoping for generators in named expressions in classes ([#9248](https://github.com/astral-sh/ruff/pull/9248))
|
||||
- Port from obsolete wsl crate to is-wsl ([#9356](https://github.com/astral-sh/ruff/pull/9356))
|
||||
- Remove special pre-visit for module docstrings ([#9261](https://github.com/astral-sh/ruff/pull/9261))
|
||||
- Respect `__str__` definitions from super classes ([#9338](https://github.com/astral-sh/ruff/pull/9338))
|
||||
- Respect `unused-noqa` via `per-file-ignores` ([#9300](https://github.com/astral-sh/ruff/pull/9300))
|
||||
- Respect attribute chains when resolving builtin call paths ([#9309](https://github.com/astral-sh/ruff/pull/9309))
|
||||
- Treat all `typing_extensions` members as typing aliases ([#9335](https://github.com/astral-sh/ruff/pull/9335))
|
||||
- Use `Display` for formatter parse errors ([#9316](https://github.com/astral-sh/ruff/pull/9316))
|
||||
- Wrap subscripted dicts in parens for f-string conversion ([#9238](https://github.com/astral-sh/ruff/pull/9238))
|
||||
- \[`flake8-annotations`\] Avoid adding return types to stub methods ([#9277](https://github.com/astral-sh/ruff/pull/9277))
|
||||
- \[`flake8-annotations`\] Respect mixed `return` and `raise` cases in return-type analysis ([#9310](https://github.com/astral-sh/ruff/pull/9310))
|
||||
- \[`flake8-bandit`\] Don't report violations when `SafeLoader` is imported from `yaml.loader` (`S506`) ([#9299](https://github.com/astral-sh/ruff/pull/9299))
|
||||
- \[`pylint`\] Avoid panic when comment is preceded by Unicode ([#9331](https://github.com/astral-sh/ruff/pull/9331))
|
||||
- \[`pylint`\] Change `PLR0917` error message to match other `PLR09XX` messages ([#9308](https://github.com/astral-sh/ruff/pull/9308))
|
||||
- \[`refurb`\] Avoid false positives for `math-constant` (`FURB152`) ([#9290](https://github.com/astral-sh/ruff/pull/9290))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Expand target name for better rule documentation ([#9302](https://github.com/astral-sh/ruff/pull/9302))
|
||||
- Fix typos found by codespell ([#9346](https://github.com/astral-sh/ruff/pull/9346))
|
||||
- \[`perflint`\] Document `PERF102` fix un-safety ([#9351](https://github.com/astral-sh/ruff/pull/9351))
|
||||
- \[`pyupgrade`\] Document `UP007` fix un-safety ([#9306](https://github.com/astral-sh/ruff/pull/9306))
|
||||
|
||||
## 0.1.9
|
||||
|
||||
### Breaking changes
|
||||
|
||||
- Add site-packages to default exclusions ([#9188](https://github.com/astral-sh/ruff/pull/9188))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Fix: Avoid parenthesizing subscript targets and values ([#9209](https://github.com/astral-sh/ruff/pull/9209))
|
||||
- \[`pylint`\] Implement `too-many-locals` (`PLR0914`) ([#9163](https://github.com/astral-sh/ruff/pull/9163))
|
||||
- Implement `reimplemented_operator` (FURB118) ([#9171](https://github.com/astral-sh/ruff/pull/9171))
|
||||
- Add a rule to detect string members in runtime-evaluated unions ([#9143](https://github.com/astral-sh/ruff/pull/9143))
|
||||
- Implement `no_blank_line_before_class_docstring` preview style ([#9154](https://github.com/astral-sh/ruff/pull/9154))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- `CONSTANT_CASE` variables are improperly flagged for yoda violation (`SIM300`) ([#9164](https://github.com/astral-sh/ruff/pull/9164))
|
||||
- \[`flake8-pyi`\] Cover ParamSpecs and TypeVarTuples (`PYI018`) ([#9198](https://github.com/astral-sh/ruff/pull/9198))
|
||||
- \[`flake8-bugbear`\] Add fix for `zip-without-explicit-strict` (`B905`) ([#9176](https://github.com/astral-sh/ruff/pull/9176))
|
||||
- Add fix to automatically remove `print` and `pprint` statements (`T201`, `T203`) ([#9208](https://github.com/astral-sh/ruff/pull/9208))
|
||||
- Prefer `Never` to `NoReturn` in auto-typing in Python >= 3.11 (`ANN201`) ([#9213](https://github.com/astral-sh/ruff/pull/9213))
|
||||
|
||||
### Formatter
|
||||
|
||||
- `can_omit_optional_parentheses`: Exit early for unparenthesized expressions ([#9125](https://github.com/astral-sh/ruff/pull/9125))
|
||||
- Fix `dynamic` mode with doctests so that it doesn't exceed configured line width ([#9129](https://github.com/astral-sh/ruff/pull/9129))
|
||||
- Fix `can_omit_optional_parentheses` for expressions with a right most fstring ([#9124](https://github.com/astral-sh/ruff/pull/9124))
|
||||
- Add `target_version` to formatter options ([#9220](https://github.com/astral-sh/ruff/pull/9220))
|
||||
|
||||
### CLI
|
||||
|
||||
- Update `ruff format --check` to display message for already formatted files ([#9153](https://github.com/astral-sh/ruff/pull/9153))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Reverse order of arguments for `operator.contains` ([#9192](https://github.com/astral-sh/ruff/pull/9192))
|
||||
- Iterate over lambdas in deferred type annotations ([#9175](https://github.com/astral-sh/ruff/pull/9175))
|
||||
- Fix panic in `D208` with multibyte indent ([#9147](https://github.com/astral-sh/ruff/pull/9147))
|
||||
- Add support for `NoReturn` in auto-return-typing ([#9206](https://github.com/astral-sh/ruff/pull/9206))
|
||||
- Allow removal of `typing` from `exempt-modules` ([#9214](https://github.com/astral-sh/ruff/pull/9214))
|
||||
- Avoid `mutable-class-default` violations for Pydantic subclasses ([#9187](https://github.com/astral-sh/ruff/pull/9187))
|
||||
- Fix dropped union expressions for piped non-types in `PYI055` autofix ([#9161](https://github.com/astral-sh/ruff/pull/9161))
|
||||
- Enable annotation quoting for multi-line expressions ([#9142](https://github.com/astral-sh/ruff/pull/9142))
|
||||
- Deduplicate edits when quoting annotations ([#9140](https://github.com/astral-sh/ruff/pull/9140))
|
||||
- Prevent invalid utf8 indexing in cell magic detection ([#9146](https://github.com/astral-sh/ruff/pull/9146))
|
||||
- Avoid nested quotations in auto-quoting fix ([#9168](https://github.com/astral-sh/ruff/pull/9168))
|
||||
- Add base-class inheritance detection to flake8-django rules ([#9151](https://github.com/astral-sh/ruff/pull/9151))
|
||||
- Avoid `asyncio-dangling-task` violations on shadowed bindings ([#9215](https://github.com/astral-sh/ruff/pull/9215))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix blog post URL in changelog ([#9119](https://github.com/astral-sh/ruff/pull/9119))
|
||||
- Add error suppression hint for multi-line strings ([#9205](https://github.com/astral-sh/ruff/pull/9205))
|
||||
- Fix typo in SemanticModel.parent_expression docstring ([#9167](https://github.com/astral-sh/ruff/pull/9167))
|
||||
- Document link between import sorting and formatter ([#9117](https://github.com/astral-sh/ruff/pull/9117))
|
||||
|
||||
## 0.1.8
|
||||
|
||||
This release includes opt-in support for formatting Python snippets within
|
||||
docstrings via the `docstring-code-format` setting.
|
||||
[Check out the blog post](https://astral.sh/blog/ruff-v0.1.8) for more details!
|
||||
|
||||
### Preview features
|
||||
|
||||
- Add `"preserve"` quote-style to mimic Black's skip-string-normalization ([#8822](https://github.com/astral-sh/ruff/pull/8822))
|
||||
- Implement `prefer_splitting_right_hand_side_of_assignments` preview style ([#8943](https://github.com/astral-sh/ruff/pull/8943))
|
||||
- \[`pycodestyle`\] Add fix for `unexpected-spaces-around-keyword-parameter-equals` ([#9072](https://github.com/astral-sh/ruff/pull/9072))
|
||||
- \[`pycodestyle`\] Add fix for comment-related whitespace rules ([#9075](https://github.com/astral-sh/ruff/pull/9075))
|
||||
- \[`pycodestyle`\] Allow `sys.path` modifications between imports ([#9047](https://github.com/astral-sh/ruff/pull/9047))
|
||||
- \[`refurb`\] Implement `hashlib-digest-hex` (`FURB181`) ([#9077](https://github.com/astral-sh/ruff/pull/9077))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- Allow `flake8-type-checking` rules to automatically quote runtime-evaluated references ([#6001](https://github.com/astral-sh/ruff/pull/6001))
|
||||
- Allow transparent cell magics in Jupyter Notebooks ([#8911](https://github.com/astral-sh/ruff/pull/8911))
|
||||
- \[`flake8-annotations`\] Avoid `ANN2xx` fixes for abstract methods with empty bodies ([#9034](https://github.com/astral-sh/ruff/pull/9034))
|
||||
- \[`flake8-self`\] Ignore underscore references in type annotations ([#9036](https://github.com/astral-sh/ruff/pull/9036))
|
||||
- \[`pep8-naming`\] Allow class names when `apps.get_model` is a non-string ([#9065](https://github.com/astral-sh/ruff/pull/9065))
|
||||
- \[`pycodestyle`\] Allow `matplotlib.use` calls to intersperse imports ([#9094](https://github.com/astral-sh/ruff/pull/9094))
|
||||
- \[`pyflakes`\] Support fixing unused assignments in tuples by renaming variables (`F841`) ([#9107](https://github.com/astral-sh/ruff/pull/9107))
|
||||
- \[`pylint`\] Add fix for `subprocess-run-without-check` (`PLW1510`) ([#6708](https://github.com/astral-sh/ruff/pull/6708))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Add `docstring-code-format` knob to enable docstring snippet formatting ([#8854](https://github.com/astral-sh/ruff/pull/8854))
|
||||
- Use double quotes for all docstrings, including single-quoted docstrings ([#9020](https://github.com/astral-sh/ruff/pull/9020))
|
||||
- Implement "dynamic" line width mode for docstring code formatting ([#9098](https://github.com/astral-sh/ruff/pull/9098))
|
||||
- Support reformatting Markdown code blocks ([#9030](https://github.com/astral-sh/ruff/pull/9030))
|
||||
- add support for formatting reStructuredText code snippets ([#9003](https://github.com/astral-sh/ruff/pull/9003))
|
||||
- Avoid trailing comma for single-argument with positional separator ([#9076](https://github.com/astral-sh/ruff/pull/9076))
|
||||
- Fix handling of trailing target comment ([#9051](https://github.com/astral-sh/ruff/pull/9051))
|
||||
|
||||
### CLI
|
||||
|
||||
- Hide unsafe fix suggestions when explicitly disabled ([#9095](https://github.com/astral-sh/ruff/pull/9095))
|
||||
- Add SARIF support to `--output-format` ([#9078](https://github.com/astral-sh/ruff/pull/9078))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Apply unnecessary index rule prior to enumerate rewrite ([#9012](https://github.com/astral-sh/ruff/pull/9012))
|
||||
- \[`flake8-err-msg`\] Allow `EM` fixes even if `msg` variable is defined ([#9059](https://github.com/astral-sh/ruff/pull/9059))
|
||||
- \[`flake8-pie`\] Prevent keyword arguments duplication ([#8450](https://github.com/astral-sh/ruff/pull/8450))
|
||||
- \[`flake8-pie`\] Respect trailing comma in `unnecessary-dict-kwargs` (`PIE804`) ([#9015](https://github.com/astral-sh/ruff/pull/9015))
|
||||
- \[`flake8-raise`\] Avoid removing parentheses on ctypes.WinError ([#9027](https://github.com/astral-sh/ruff/pull/9027))
|
||||
- \[`isort`\] Avoid invalid combination of `force-sort-within-types` and `lines-between-types` ([#9041](https://github.com/astral-sh/ruff/pull/9041))
|
||||
- \[`isort`\] Ensure that from-style imports are always ordered first in `__future__` ([#9039](https://github.com/astral-sh/ruff/pull/9039))
|
||||
- \[`pycodestyle`\] Allow tab indentation before keyword ([#9099](https://github.com/astral-sh/ruff/pull/9099))
|
||||
- \[`pylint`\] Ignore `@overrides` and `@overloads` for `too-many-positional` ([#9000](https://github.com/astral-sh/ruff/pull/9000))
|
||||
- \[`pyupgrade`\] Enable `printf-string-formatting` fix with comments on right-hand side ([#9037](https://github.com/astral-sh/ruff/pull/9037))
|
||||
- \[`refurb`\] Make `math-constant` (`FURB152`) rule more targeted ([#9054](https://github.com/astral-sh/ruff/pull/9054))
|
||||
- \[`refurb`\] Support floating-point base in `redundant-log-base` (`FURB163`) ([#9100](https://github.com/astral-sh/ruff/pull/9100))
|
||||
- \[`ruff`\] Detect `unused-asyncio-dangling-task` (`RUF006`) on unused assignments ([#9060](https://github.com/astral-sh/ruff/pull/9060))
|
||||
|
||||
## 0.1.7
|
||||
|
||||
### Preview features
|
||||
|
||||
- Implement multiline dictionary and list hugging for preview style ([#8293](https://github.com/astral-sh/ruff/pull/8293))
|
||||
- Implement the `fix_power_op_line_length` preview style ([#8947](https://github.com/astral-sh/ruff/pull/8947))
|
||||
- Use Python version to determine typing rewrite safety ([#8919](https://github.com/astral-sh/ruff/pull/8919))
|
||||
- \[`flake8-annotations`\] Enable auto-return-type involving `Optional` and `Union` annotations ([#8885](https://github.com/astral-sh/ruff/pull/8885))
|
||||
- \[`flake8-bandit`\] Implement `django-raw-sql` (`S611`) ([#8651](https://github.com/astral-sh/ruff/pull/8651))
|
||||
- \[`flake8-bandit`\] Implement `tarfile-unsafe-members` (`S202`) ([#8829](https://github.com/astral-sh/ruff/pull/8829))
|
||||
- \[`flake8-pyi`\] Implement fix for `unnecessary-literal-union` (`PYI030`) ([#7934](https://github.com/astral-sh/ruff/pull/7934))
|
||||
- \[`flake8-simplify`\] Extend `dict-get-with-none-default` (`SIM910`) to non-literals ([#8762](https://github.com/astral-sh/ruff/pull/8762))
|
||||
- \[`pylint`\] - add `unnecessary-list-index-lookup` (`PLR1736`) + autofix ([#7999](https://github.com/astral-sh/ruff/pull/7999))
|
||||
- \[`pylint`\] - implement R0202 and R0203 with autofixes ([#8335](https://github.com/astral-sh/ruff/pull/8335))
|
||||
- \[`pylint`\] Implement `repeated-keyword` (`PLe1132`) ([#8706](https://github.com/astral-sh/ruff/pull/8706))
|
||||
- \[`pylint`\] Implement `too-many-positional` (`PLR0917`) ([#8995](https://github.com/astral-sh/ruff/pull/8995))
|
||||
- \[`pylint`\] Implement `unnecessary-dict-index-lookup` (`PLR1733`) ([#8036](https://github.com/astral-sh/ruff/pull/8036))
|
||||
- \[`refurb`\] Implement `redundant-log-base` (`FURB163`) ([#8842](https://github.com/astral-sh/ruff/pull/8842))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-boolean-trap`\] Allow booleans in `@override` methods ([#8882](https://github.com/astral-sh/ruff/pull/8882))
|
||||
- \[`flake8-bugbear`\] Avoid `B015`,`B018` for last expression in a cell ([#8815](https://github.com/astral-sh/ruff/pull/8815))
|
||||
- \[`flake8-pie`\] Allow ellipses for enum values in stub files ([#8825](https://github.com/astral-sh/ruff/pull/8825))
|
||||
- \[`flake8-pyi`\] Check PEP 695 type aliases for `snake-case-type-alias` and `t-suffixed-type-alias` ([#8966](https://github.com/astral-sh/ruff/pull/8966))
|
||||
- \[`flake8-pyi`\] Check for kwarg and vararg `NoReturn` type annotations ([#8948](https://github.com/astral-sh/ruff/pull/8948))
|
||||
- \[`flake8-simplify`\] Omit select context managers from `SIM117` ([#8801](https://github.com/astral-sh/ruff/pull/8801))
|
||||
- \[`pep8-naming`\] Allow Django model loads in `non-lowercase-variable-in-function` (`N806`) ([#8917](https://github.com/astral-sh/ruff/pull/8917))
|
||||
- \[`pycodestyle`\] Avoid `E703` for last expression in a cell ([#8821](https://github.com/astral-sh/ruff/pull/8821))
|
||||
- \[`pycodestyle`\] Update `E402` to work at cell level for notebooks ([#8872](https://github.com/astral-sh/ruff/pull/8872))
|
||||
- \[`pydocstyle`\] Avoid `D100` for Jupyter Notebooks ([#8816](https://github.com/astral-sh/ruff/pull/8816))
|
||||
- \[`pylint`\] Implement fix for `unspecified-encoding` (`PLW1514`) ([#8928](https://github.com/astral-sh/ruff/pull/8928))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Avoid unstable formatting in ellipsis-only body with trailing comment ([#8984](https://github.com/astral-sh/ruff/pull/8984))
|
||||
- Inline trailing comments for type alias similar to assignments ([#8941](https://github.com/astral-sh/ruff/pull/8941))
|
||||
- Insert trailing comma when function breaks with single argument ([#8921](https://github.com/astral-sh/ruff/pull/8921))
|
||||
|
||||
### CLI
|
||||
|
||||
- Update `ruff check` and `ruff format` to default to the current directory ([#8791](https://github.com/astral-sh/ruff/pull/8791))
|
||||
- Stop at the first resolved parent configuration ([#8864](https://github.com/astral-sh/ruff/pull/8864))
|
||||
|
||||
### Configuration
|
||||
|
||||
- \[`pylint`\] Default `max-positional-args` to `max-args` ([#8998](https://github.com/astral-sh/ruff/pull/8998))
|
||||
- \[`pylint`\] Add `allow-dunder-method-names` setting for `bad-dunder-method-name` (`PLW3201`) ([#8812](https://github.com/astral-sh/ruff/pull/8812))
|
||||
- \[`isort`\] Add support for `from-first` setting ([#8663](https://github.com/astral-sh/ruff/pull/8663))
|
||||
- \[`isort`\] Add support for `length-sort` settings ([#8841](https://github.com/astral-sh/ruff/pull/8841))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Add support for `@functools.singledispatch` ([#8934](https://github.com/astral-sh/ruff/pull/8934))
|
||||
- Avoid off-by-one error in stripping noqa following multi-byte char ([#8979](https://github.com/astral-sh/ruff/pull/8979))
|
||||
- Avoid off-by-one error in with-item named expressions ([#8915](https://github.com/astral-sh/ruff/pull/8915))
|
||||
- Avoid syntax error via invalid ur string prefix ([#8971](https://github.com/astral-sh/ruff/pull/8971))
|
||||
- Avoid underflow in `get_model` matching ([#8965](https://github.com/astral-sh/ruff/pull/8965))
|
||||
- Avoid unnecessary index diagnostics when value is modified ([#8970](https://github.com/astral-sh/ruff/pull/8970))
|
||||
- Convert over-indentation rule to use number of characters ([#8983](https://github.com/astral-sh/ruff/pull/8983))
|
||||
- Detect implicit returns in auto-return-types ([#8952](https://github.com/astral-sh/ruff/pull/8952))
|
||||
- Fix start >= end error in over-indentation ([#8982](https://github.com/astral-sh/ruff/pull/8982))
|
||||
- Ignore `@overload` and `@override` methods for too-many-arguments checks ([#8954](https://github.com/astral-sh/ruff/pull/8954))
|
||||
- Lexer start of line is false only for `Mode::Expression` ([#8880](https://github.com/astral-sh/ruff/pull/8880))
|
||||
- Mark `pydantic_settings.BaseSettings` as having default copy semantics ([#8793](https://github.com/astral-sh/ruff/pull/8793))
|
||||
- Respect dictionary unpacking in `NamedTuple` assignments ([#8810](https://github.com/astral-sh/ruff/pull/8810))
|
||||
- Respect local subclasses in `flake8-type-checking` ([#8768](https://github.com/astral-sh/ruff/pull/8768))
|
||||
- Support type alias statements in simple statement positions ([#8916](https://github.com/astral-sh/ruff/pull/8916))
|
||||
- \[`flake8-annotations`\] Avoid filtering out un-representable types in return annotation ([#8881](https://github.com/astral-sh/ruff/pull/8881))
|
||||
- \[`flake8-pie`\] Retain extra ellipses in protocols and abstract methods ([#8769](https://github.com/astral-sh/ruff/pull/8769))
|
||||
- \[`flake8-pyi`\] Respect local enum subclasses in `simple-defaults` (`PYI052`) ([#8767](https://github.com/astral-sh/ruff/pull/8767))
|
||||
- \[`flake8-trio`\] Use correct range for `TRIO115` fix ([#8933](https://github.com/astral-sh/ruff/pull/8933))
|
||||
- \[`flake8-trio`\] Use full arguments range for zero-sleep-call ([#8936](https://github.com/astral-sh/ruff/pull/8936))
|
||||
- \[`isort`\] fix: mark `__main__` as first-party import ([#8805](https://github.com/astral-sh/ruff/pull/8805))
|
||||
- \[`pep8-naming`\] Avoid `N806` errors for type alias statements ([#8785](https://github.com/astral-sh/ruff/pull/8785))
|
||||
- \[`perflint`\] Avoid `PERF101` if there's an append in loop body ([#8809](https://github.com/astral-sh/ruff/pull/8809))
|
||||
- \[`pycodestyle`\] Allow space-before-colon after end-of-slice ([#8838](https://github.com/astral-sh/ruff/pull/8838))
|
||||
- \[`pydocstyle`\] Avoid non-character breaks in `over-indentation` (`D208`) ([#8866](https://github.com/astral-sh/ruff/pull/8866))
|
||||
- \[`pydocstyle`\] Ignore underlines when determining docstring logical lines ([#8929](https://github.com/astral-sh/ruff/pull/8929))
|
||||
- \[`pylint`\] Extend `self-assigning-variable` to multi-target assignments ([#8839](https://github.com/astral-sh/ruff/pull/8839))
|
||||
- \[`tryceratops`\] Avoid repeated triggers in nested `tryceratops` diagnostics ([#8772](https://github.com/astral-sh/ruff/pull/8772))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add advice for fixing RUF008 when mutability is not desired ([#8853](https://github.com/astral-sh/ruff/pull/8853))
|
||||
- Added the command to run ruff using pkgx to the installation.md ([#8955](https://github.com/astral-sh/ruff/pull/8955))
|
||||
- Document fix safety for flake8-comprehensions and some pyupgrade rules ([#8918](https://github.com/astral-sh/ruff/pull/8918))
|
||||
- Fix doc formatting for zero-sleep-call ([#8937](https://github.com/astral-sh/ruff/pull/8937))
|
||||
- Remove duplicate imports from os-stat documentation ([#8930](https://github.com/astral-sh/ruff/pull/8930))
|
||||
- Replace generated reference to MkDocs ([#8806](https://github.com/astral-sh/ruff/pull/8806))
|
||||
- Update Arch Linux package URL in installation.md ([#8802](https://github.com/astral-sh/ruff/pull/8802))
|
||||
- \[`flake8-pyi`\] Fix error in `t-suffixed-type-alias` (`PYI043`) example ([#8963](https://github.com/astral-sh/ruff/pull/8963))
|
||||
- \[`flake8-pyi`\] Improve motivation for `custom-type-var-return-type` (`PYI019`) ([#8766](https://github.com/astral-sh/ruff/pull/8766))
|
||||
|
||||
## 0.1.6
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-boolean-trap`\] Extend `boolean-type-hint-positional-argument` (`FBT001`) to include booleans in unions ([#7501](https://github.com/astral-sh/ruff/pull/7501))
|
||||
- \[`flake8-pie`\] Extend `reimplemented-list-builtin` (`PIE807`) to `dict` reimplementations ([#8608](https://github.com/astral-sh/ruff/pull/8608))
|
||||
- \[`flake8-pie`\] Extend `unnecessary-pass` (`PIE790`) to include ellipses (`...`) ([#8641](https://github.com/astral-sh/ruff/pull/8641))
|
||||
- \[`flake8-pie`\] Implement fix for `unnecessary-spread` (`PIE800`) ([#8668](https://github.com/astral-sh/ruff/pull/8668))
|
||||
- \[`flake8-quotes`\] Implement `unnecessary-escaped-quote` (`Q004`) ([#8630](https://github.com/astral-sh/ruff/pull/8630))
|
||||
- \[`pycodestyle`\] Implement fix for `multiple-spaces-after-keyword` (`E271`) and `multiple-spaces-before-keyword` (`E272`) ([#8622](https://github.com/astral-sh/ruff/pull/8622))
|
||||
- \[`pycodestyle`\] Implement fix for `multiple-spaces-after-operator` (`E222`) and `multiple-spaces-before-operator` (`E221`) ([#8623](https://github.com/astral-sh/ruff/pull/8623))
|
||||
- \[`pyflakes`\] Extend `is-literal` (`F632`) to include comparisons against mutable initializers ([#8607](https://github.com/astral-sh/ruff/pull/8607))
|
||||
- \[`pylint`\] Implement `redefined-argument-from-local` (`PLR1704`) ([#8159](https://github.com/astral-sh/ruff/pull/8159))
|
||||
- \[`pylint`\] Implement fix for `unnecessary-lambda` (`PLW0108`) ([#8621](https://github.com/astral-sh/ruff/pull/8621))
|
||||
- \[`refurb`\] Implement `if-expr-min-max` (`FURB136`) ([#8664](https://github.com/astral-sh/ruff/pull/8664))
|
||||
- \[`refurb`\] Implement `math-constant` (`FURB152`) ([#8727](https://github.com/astral-sh/ruff/pull/8727))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-annotations`\] Add autotyping-like return type inference for annotation rules ([#8643](https://github.com/astral-sh/ruff/pull/8643))
|
||||
- \[`flake8-future-annotations`\] Implement fix for `future-required-type-annotation` (`FA102`) ([#8711](https://github.com/astral-sh/ruff/pull/8711))
|
||||
- \[`flake8-implicit-namespace-package`\] Avoid missing namespace violations in scripts with shebangs ([#8710](https://github.com/astral-sh/ruff/pull/8710))
|
||||
- \[`pydocstyle`\] Update `over-indentation` (`D208`) to preserve indentation offsets when fixing overindented lines ([#8699](https://github.com/astral-sh/ruff/pull/8699))
|
||||
- \[`pyupgrade`\] Refine `timeout-error-alias` (`UP041`) to remove false positives ([#8587](https://github.com/astral-sh/ruff/pull/8587))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Fix instability in `await` formatting with fluent style ([#8676](https://github.com/astral-sh/ruff/pull/8676))
|
||||
- Compare formatted and unformatted ASTs during formatter tests ([#8624](https://github.com/astral-sh/ruff/pull/8624))
|
||||
- Preserve trailing semicolon for Notebooks ([#8590](https://github.com/astral-sh/ruff/pull/8590))
|
||||
|
||||
### CLI
|
||||
|
||||
- Improve debug printing for resolving origin of config settings ([#8729](https://github.com/astral-sh/ruff/pull/8729))
|
||||
- Write unchanged, excluded files to stdout when read via stdin ([#8596](https://github.com/astral-sh/ruff/pull/8596))
|
||||
|
||||
### Configuration
|
||||
|
||||
- \[`isort`\] Support disabling sections with `no-sections = true` ([#8657](https://github.com/astral-sh/ruff/pull/8657))
|
||||
- \[`pep8-naming`\] Support local and dynamic class- and static-method decorators ([#8592](https://github.com/astral-sh/ruff/pull/8592))
|
||||
- \[`pydocstyle`\] Allow overriding pydocstyle convention rules ([#8586](https://github.com/astral-sh/ruff/pull/8586))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid syntax error via importing `trio.lowlevel` ([#8730](https://github.com/astral-sh/ruff/pull/8730))
|
||||
- Omit unrolled augmented assignments in `PIE794` ([#8634](https://github.com/astral-sh/ruff/pull/8634))
|
||||
- Slice source code instead of generating it for `EM` fixes ([#7746](https://github.com/astral-sh/ruff/pull/7746))
|
||||
- Allow whitespace around colon in slices for `whitespace-before-punctuation` (`E203`) ([#8654](https://github.com/astral-sh/ruff/pull/8654))
|
||||
- Use function range for `no-self-use` ([#8637](https://github.com/astral-sh/ruff/pull/8637))
|
||||
- F-strings doesn't contain bytes literal for `PLW0129` ([#8675](https://github.com/astral-sh/ruff/pull/8675))
|
||||
- Improve detection of `TYPE_CHECKING` blocks imported from `typing_extensions` or `_typeshed` ([#8429](https://github.com/astral-sh/ruff/pull/8429))
|
||||
- Treat display as a builtin in IPython ([#8707](https://github.com/astral-sh/ruff/pull/8707))
|
||||
- Avoid `FURB113` autofix if comments are present ([#8494](https://github.com/astral-sh/ruff/pull/8494))
|
||||
- Consider the new f-string tokens for `flake8-commas` ([#8582](https://github.com/astral-sh/ruff/pull/8582))
|
||||
- Remove erroneous bad-dunder-name reference ([#8742](https://github.com/astral-sh/ruff/pull/8742))
|
||||
- Avoid recommending Self usages in metaclasses ([#8639](https://github.com/astral-sh/ruff/pull/8639))
|
||||
- Detect runtime-evaluated base classes defined in the current file ([#8572](https://github.com/astral-sh/ruff/pull/8572))
|
||||
- Avoid inserting trailing commas within f-strings ([#8574](https://github.com/astral-sh/ruff/pull/8574))
|
||||
- Remove incorrect deprecation label for stdout and stderr ([#8743](https://github.com/astral-sh/ruff/pull/8743))
|
||||
- Fix unnecessary parentheses in UP007 fix ([#8610](https://github.com/astral-sh/ruff/pull/8610))
|
||||
- Remove repeated and erroneous scoped settings headers in docs ([#8670](https://github.com/astral-sh/ruff/pull/8670))
|
||||
- Trim trailing empty strings when converting to f-strings ([#8712](https://github.com/astral-sh/ruff/pull/8712))
|
||||
- Fix ordering for `force-sort-within-sections` ([#8665](https://github.com/astral-sh/ruff/pull/8665))
|
||||
- Run unicode prefix rule over tokens ([#8709](https://github.com/astral-sh/ruff/pull/8709))
|
||||
- Update UP032 to unescape curly braces in literal parts of converted strings ([#8697](https://github.com/astral-sh/ruff/pull/8697))
|
||||
- List all ipython builtins ([#8719](https://github.com/astral-sh/ruff/pull/8719))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Document conventions in the FAQ ([#8638](https://github.com/astral-sh/ruff/pull/8638))
|
||||
- Redirect from rule codes to rule pages in docs ([#8636](https://github.com/astral-sh/ruff/pull/8636))
|
||||
- Fix permalink to convention setting ([#8575](https://github.com/astral-sh/ruff/pull/8575))
|
||||
|
||||
## 0.1.5
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-bandit`\] Implement `mako-templates` (`S702`) ([#8533](https://github.com/astral-sh/ruff/pull/8533))
|
||||
- \[`flake8-trio`\] Implement `TRIO105` ([#8490](https://github.com/astral-sh/ruff/pull/8490))
|
||||
- \[`flake8-trio`\] Implement `TRIO109` ([#8534](https://github.com/astral-sh/ruff/pull/8534))
|
||||
- \[`flake8-trio`\] Implement `TRIO110` ([#8537](https://github.com/astral-sh/ruff/pull/8537))
|
||||
- \[`flake8-trio`\] Implement `TRIO115` ([#8486](https://github.com/astral-sh/ruff/pull/8486))
|
||||
- \[`refurb`\] Implement `type-none-comparison` (`FURB169`) ([#8487](https://github.com/astral-sh/ruff/pull/8487))
|
||||
- Flag all comparisons against builtin types in `E721` ([#8491](https://github.com/astral-sh/ruff/pull/8491))
|
||||
- Make `SIM118` fix as safe when the expression is a known dictionary ([#8525](https://github.com/astral-sh/ruff/pull/8525))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Fix multiline lambda expression statement formatting ([#8466](https://github.com/astral-sh/ruff/pull/8466))
|
||||
|
||||
### CLI
|
||||
|
||||
- Add hidden `--extension` to override inference of source type from file extension ([#8373](https://github.com/astral-sh/ruff/pull/8373))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Account for selector specificity when merging `extend_unsafe_fixes` and `override extend_safe_fixes` ([#8444](https://github.com/astral-sh/ruff/pull/8444))
|
||||
- Add support for disabling cache with `RUFF_NO_CACHE` environment variable ([#8538](https://github.com/astral-sh/ruff/pull/8538))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`E721`\] Flag comparisons to `memoryview` ([#8485](https://github.com/astral-sh/ruff/pull/8485))
|
||||
- Allow collapsed-ellipsis bodies in other statements ([#8499](https://github.com/astral-sh/ruff/pull/8499))
|
||||
- Avoid `D301` autofix for `u` prefixed strings ([#8495](https://github.com/astral-sh/ruff/pull/8495))
|
||||
- Only flag `flake8-trio` rules when `trio` import is present ([#8550](https://github.com/astral-sh/ruff/pull/8550))
|
||||
- Reject more syntactically invalid Python programs ([#8524](https://github.com/astral-sh/ruff/pull/8524))
|
||||
- Avoid raising `TRIO115` violations for `trio.sleep(...)` calls with non-number values ([#8532](https://github.com/astral-sh/ruff/pull/8532))
|
||||
- Fix `F841` false negative on assignment to multiple variables ([#8489](https://github.com/astral-sh/ruff/pull/8489))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix link to isort `known-first-party` ([#8562](https://github.com/astral-sh/ruff/pull/8562))
|
||||
- Add notes on fix safety to a few rules ([#8500](https://github.com/astral-sh/ruff/pull/8500))
|
||||
- Add missing toml config tabs ([#8512](https://github.com/astral-sh/ruff/pull/8512))
|
||||
- Add instructions for configuration of Emacs ([#8488](https://github.com/astral-sh/ruff/pull/8488))
|
||||
- Improve detail link contrast in dark mode ([#8548](https://github.com/astral-sh/ruff/pull/8548))
|
||||
- Fix typo in example ([#8506](https://github.com/astral-sh/ruff/pull/8506))
|
||||
- Added tabs for configuration files in the documentation ([#8480](https://github.com/astral-sh/ruff/pull/8480))
|
||||
- Recommend `project.requires-python` over `target-version` ([#8513](https://github.com/astral-sh/ruff/pull/8513))
|
||||
- Add singleton escape hatch to `B008` documentation ([#8501](https://github.com/astral-sh/ruff/pull/8501))
|
||||
- Fix tab configuration docs ([#8502](https://github.com/astral-sh/ruff/pull/8502))
|
||||
|
||||
## 0.1.4
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-trio`\] Implement `timeout-without-await` (`TRIO001`) ([#8439](https://github.com/astral-sh/ruff/pull/8439))
|
||||
- \[`numpy`\] Implement NumPy 2.0 migration rule (`NPY200`) ([#7702](https://github.com/astral-sh/ruff/pull/7702))
|
||||
- \[`pylint`\] Implement `bad-open-mode` (`W1501`) ([#8294](https://github.com/astral-sh/ruff/pull/8294))
|
||||
- \[`pylint`\] Implement `import-outside-toplevel` (`C0415`) rule ([#5180](https://github.com/astral-sh/ruff/pull/5180))
|
||||
- \[`pylint`\] Implement `useless-with-lock` (`W2101`) ([#8321](https://github.com/astral-sh/ruff/pull/8321))
|
||||
- \[`pyupgrade`\] Implement `timeout-error-alias` (`UP041`) ([#8476](https://github.com/astral-sh/ruff/pull/8476))
|
||||
- \[`refurb`\] Implement `isinstance-type-none` (`FURB168`) ([#8308](https://github.com/astral-sh/ruff/pull/8308))
|
||||
- Detect confusable Unicode-to-Unicode units in `RUF001`, `RUF002`, and `RUF003` ([#4430](https://github.com/astral-sh/ruff/pull/4430))
|
||||
- Add newline after module docstrings in preview style ([#8283](https://github.com/astral-sh/ruff/pull/8283))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Add a note on line-too-long to the formatter docs ([#8314](https://github.com/astral-sh/ruff/pull/8314))
|
||||
- Preserve trailing statement semicolons when using `fmt: skip` ([#8273](https://github.com/astral-sh/ruff/pull/8273))
|
||||
- Preserve trailing semicolons when using `fmt: off` ([#8275](https://github.com/astral-sh/ruff/pull/8275))
|
||||
- Avoid duplicating linter-formatter compatibility warnings ([#8292](https://github.com/astral-sh/ruff/pull/8292))
|
||||
- Avoid inserting a newline after function docstrings ([#8375](https://github.com/astral-sh/ruff/pull/8375))
|
||||
- Insert newline between docstring and following own line comment ([#8216](https://github.com/astral-sh/ruff/pull/8216))
|
||||
- Split tuples in return positions by comma first ([#8280](https://github.com/astral-sh/ruff/pull/8280))
|
||||
- Avoid treating byte strings as docstrings ([#8350](https://github.com/astral-sh/ruff/pull/8350))
|
||||
- Add `--line-length` option to `format` command ([#8363](https://github.com/astral-sh/ruff/pull/8363))
|
||||
- Avoid parenthesizing unsplittable because of comments ([#8431](https://github.com/astral-sh/ruff/pull/8431))
|
||||
|
||||
### CLI
|
||||
|
||||
- Add `--output-format` to `ruff rule` and `ruff linter` ([#8203](https://github.com/astral-sh/ruff/pull/8203))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Respect `--force-exclude` in `lint.exclude` and `format.exclude` ([#8393](https://github.com/astral-sh/ruff/pull/8393))
|
||||
- Respect `--extend-per-file-ignores` on the CLI ([#8329](https://github.com/astral-sh/ruff/pull/8329))
|
||||
- Extend `bad-dunder-method-name` to permit `__index__` ([#8300](https://github.com/astral-sh/ruff/pull/8300))
|
||||
- Fix panic with 8 in octal escape ([#8356](https://github.com/astral-sh/ruff/pull/8356))
|
||||
- Avoid raising `D300` when both triple quote styles are present ([#8462](https://github.com/astral-sh/ruff/pull/8462))
|
||||
- Consider unterminated f-strings in `FStringRanges` ([#8154](https://github.com/astral-sh/ruff/pull/8154))
|
||||
- Avoid including literal `shell=True` for truthy, non-`True` diagnostics ([#8359](https://github.com/astral-sh/ruff/pull/8359))
|
||||
- Avoid triggering single-element test for starred expressions ([#8433](https://github.com/astral-sh/ruff/pull/8433))
|
||||
- Detect and ignore Jupyter automagics ([#8398](https://github.com/astral-sh/ruff/pull/8398))
|
||||
- Fix invalid E231 error with f-strings ([#8369](https://github.com/astral-sh/ruff/pull/8369))
|
||||
- Avoid triggering `NamedTuple` rewrite with starred annotation ([#8434](https://github.com/astral-sh/ruff/pull/8434))
|
||||
- Avoid un-setting bracket flag in logical lines ([#8380](https://github.com/astral-sh/ruff/pull/8380))
|
||||
- Place 'r' prefix before 'f' for raw format strings ([#8464](https://github.com/astral-sh/ruff/pull/8464))
|
||||
- Remove trailing periods from NumPy 2.0 code actions ([#8475](https://github.com/astral-sh/ruff/pull/8475))
|
||||
- Fix bug where `PLE1307` was raised when formatting `%c` with characters ([#8407](https://github.com/astral-sh/ruff/pull/8407))
|
||||
- Remove unicode flag from comparable ([#8440](https://github.com/astral-sh/ruff/pull/8440))
|
||||
- Improve B015 message ([#8295](https://github.com/astral-sh/ruff/pull/8295))
|
||||
- Use `fixedOverflowWidgets` for playground popover ([#8458](https://github.com/astral-sh/ruff/pull/8458))
|
||||
- Mark `byte_bounds` as a non-backwards-compatible NumPy 2.0 change ([#8474](https://github.com/astral-sh/ruff/pull/8474))
|
||||
|
||||
### Internals
|
||||
|
||||
- Add a dedicated cache directory per Ruff version ([#8333](https://github.com/astral-sh/ruff/pull/8333))
|
||||
- Allow selective caching for `--fix` and `--diff` ([#8316](https://github.com/astral-sh/ruff/pull/8316))
|
||||
- Improve performance of comment parsing ([#8193](https://github.com/astral-sh/ruff/pull/8193))
|
||||
- Improve performance of string parsing ([#8227](https://github.com/astral-sh/ruff/pull/8227))
|
||||
- Use a dedicated sort key for isort import sorting ([#7963](https://github.com/astral-sh/ruff/pull/7963))
|
||||
|
||||
## 0.1.3
|
||||
|
||||
This release includes a variety of improvements to the Ruff formatter, removing several known and
|
||||
|
||||
@@ -72,7 +72,7 @@ representative at an online or offline event.
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported to the community leaders responsible for enforcement at
|
||||
<charlie.r.marsh@gmail.com>.
|
||||
charlie.r.marsh@gmail.com.
|
||||
All complaints will be reviewed and investigated promptly and fairly.
|
||||
|
||||
All community leaders are obligated to respect the privacy and security of the
|
||||
|
||||
@@ -26,10 +26,6 @@ Welcome! We're happy to have you here. Thank you in advance for your contributio
|
||||
- [`cargo dev`](#cargo-dev)
|
||||
- [Subsystems](#subsystems)
|
||||
- [Compilation Pipeline](#compilation-pipeline)
|
||||
- [Import Categorization](#import-categorization)
|
||||
- [Project root](#project-root)
|
||||
- [Package root](#package-root)
|
||||
- [Import categorization](#import-categorization-1)
|
||||
|
||||
## The Basics
|
||||
|
||||
@@ -39,7 +35,7 @@ For small changes (e.g., bug fixes), feel free to submit a PR.
|
||||
|
||||
For larger changes (e.g., new lint rules, new functionality, new configuration options), consider
|
||||
creating an [**issue**](https://github.com/astral-sh/ruff/issues) outlining your proposed change.
|
||||
You can also join us on [**Discord**](https://discord.com/invite/astral-sh) to discuss your idea with the
|
||||
You can also join us on [**Discord**](https://discord.gg/c9MhzV8aU5) to discuss your idea with the
|
||||
community. We've labeled [beginner-friendly tasks](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22)
|
||||
in the issue tracker, along with [bugs](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3Abug)
|
||||
and [improvements](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3Aaccepted)
|
||||
@@ -67,7 +63,7 @@ You'll also need [Insta](https://insta.rs/docs/) to update snapshot tests:
|
||||
cargo install cargo-insta
|
||||
```
|
||||
|
||||
And you'll need pre-commit to run some validation checks:
|
||||
and pre-commit to run some validation checks:
|
||||
|
||||
```shell
|
||||
pipx install pre-commit # or `pip install pre-commit` if you have a virtualenv
|
||||
@@ -80,22 +76,12 @@ when making a commit:
|
||||
pre-commit install
|
||||
```
|
||||
|
||||
We recommend [nextest](https://nexte.st/) to run Ruff's test suite (via `cargo nextest run`),
|
||||
though it's not strictly necessary:
|
||||
|
||||
```shell
|
||||
cargo install cargo-nextest --locked
|
||||
```
|
||||
|
||||
Throughout this guide, any usages of `cargo test` can be replaced with `cargo nextest run`,
|
||||
if you choose to install `nextest`.
|
||||
|
||||
### Development
|
||||
|
||||
After cloning the repository, run Ruff locally from the repository root with:
|
||||
|
||||
```shell
|
||||
cargo run -p ruff -- check /path/to/file.py --no-cache
|
||||
cargo run -p ruff_cli -- check /path/to/file.py --no-cache
|
||||
```
|
||||
|
||||
Prior to opening a pull request, ensure that your code has been auto-formatted,
|
||||
@@ -134,7 +120,7 @@ At the time of writing, the repository includes the following crates:
|
||||
If you're working on a rule, this is the crate for you.
|
||||
- `crates/ruff_benchmark`: binary crate for running micro-benchmarks.
|
||||
- `crates/ruff_cache`: library crate for caching lint results.
|
||||
- `crates/ruff`: binary crate containing Ruff's command-line interface.
|
||||
- `crates/ruff_cli`: binary crate containing Ruff's command-line interface.
|
||||
- `crates/ruff_dev`: binary crate containing utilities used in the development of Ruff itself (e.g.,
|
||||
`cargo dev generate-all`), see the [`cargo dev`](#cargo-dev) section below.
|
||||
- `crates/ruff_diagnostics`: library crate for the rule-independent abstractions in the lint
|
||||
@@ -245,7 +231,7 @@ Once you've completed the code for the rule itself, you can define tests with th
|
||||
For example, if you're adding a new rule named `E402`, you would run:
|
||||
|
||||
```shell
|
||||
cargo run -p ruff -- check crates/ruff_linter/resources/test/fixtures/pycodestyle/E402.py --no-cache --preview --select E402
|
||||
cargo run -p ruff_cli -- check crates/ruff_linter/resources/test/fixtures/pycodestyle/E402.py --no-cache --select E402
|
||||
```
|
||||
|
||||
**Note:** Only a subset of rules are enabled by default. When testing a new rule, ensure that
|
||||
@@ -266,7 +252,7 @@ Once you've completed the code for the rule itself, you can define tests with th
|
||||
|
||||
Ruff's user-facing settings live in a few different places.
|
||||
|
||||
First, the command-line options are defined via the `Args` struct in `crates/ruff/src/args.rs`.
|
||||
First, the command-line options are defined via the `Args` struct in `crates/ruff_cli/src/args.rs`.
|
||||
|
||||
Second, the `pyproject.toml` options are defined in `crates/ruff_workspace/src/options.rs` (via the
|
||||
`Options` struct), `crates/ruff_workspace/src/configuration.rs` (via the `Configuration` struct),
|
||||
@@ -309,14 +295,14 @@ To preview any changes to the documentation locally:
|
||||
|
||||
```shell
|
||||
# For contributors.
|
||||
mkdocs serve -f mkdocs.public.yml
|
||||
mkdocs serve -f mkdocs.generated.yml
|
||||
|
||||
# For members of the Astral org, which has access to MkDocs Insiders via sponsorship.
|
||||
mkdocs serve -f mkdocs.insiders.yml
|
||||
```
|
||||
|
||||
The documentation should then be available locally at
|
||||
[http://127.0.0.1:8000/ruff/](http://127.0.0.1:8000/ruff/).
|
||||
[http://127.0.0.1:8000/docs/](http://127.0.0.1:8000/docs/).
|
||||
|
||||
## Release Process
|
||||
|
||||
@@ -329,41 +315,23 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
|
||||
|
||||
### Creating a new release
|
||||
|
||||
We use an experimental in-house tool for managing releases.
|
||||
|
||||
1. Install `rooster`: `pip install git+https://github.com/zanieb/rooster@main`
|
||||
1. Run `rooster release`; this command will:
|
||||
- Generate a changelog entry in `CHANGELOG.md`
|
||||
- Update versions in `pyproject.toml` and `Cargo.toml`
|
||||
- Update references to versions in the `README.md` and documentation
|
||||
1. The changelog should then be editorialized for consistency
|
||||
- Often labels will be missing from pull requests they will need to be manually organized into the proper section
|
||||
- Changes should be edited to be user-facing descriptions, avoiding internal details
|
||||
1. Highlight any breaking changes in `BREAKING_CHANGES.md`
|
||||
1. Run `cargo check`. This should update the lock file with new versions.
|
||||
1. Create a pull request with the changelog and version updates
|
||||
1. Update the version with `rg 0.0.269 --files-with-matches | xargs sed -i 's/0.0.269/0.0.270/g'`
|
||||
1. Update `BREAKING_CHANGES.md`
|
||||
1. Create a PR with the version and `BREAKING_CHANGES.md` updated
|
||||
1. Merge the PR
|
||||
1. Run the [release workflow](https://github.com/astral-sh/ruff/actions/workflows/release.yaml) with:
|
||||
- The new version number (without starting `v`)
|
||||
- The commit hash of the merged release pull request on `main`
|
||||
1. Run the release workflow with the version number (without starting `v`) as input. Make sure
|
||||
main has your merged PR as last commit
|
||||
1. The release workflow will do the following:
|
||||
1. Build all the assets. If this fails (even though we tested in step 4), we haven't tagged or
|
||||
uploaded anything, you can restart after pushing a fix.
|
||||
1. Upload to PyPI.
|
||||
1. Create and push the Git tag (as extracted from `pyproject.toml`). We create the Git tag only
|
||||
after building the wheels and uploading to PyPI, since we can't delete or modify the tag ([#4468](https://github.com/astral-sh/ruff/issues/4468)).
|
||||
after building the wheels and uploading to PyPI, since we can't delete or modify the tag ([#4468](https://github.com/charliermarsh/ruff/issues/4468)).
|
||||
1. Attach artifacts to draft GitHub release
|
||||
1. Trigger downstream repositories. This can fail non-catastrophically, as we can run any
|
||||
downstream jobs manually if needed.
|
||||
1. Publish the GitHub release
|
||||
1. Open the draft release in the GitHub release section
|
||||
1. Copy the changelog for the release into the GitHub release
|
||||
- See previous releases for formatting of section headers
|
||||
1. Generate the contributor list with `rooster contributors` and add to the release notes
|
||||
1. If needed, [update the schemastore](https://github.com/astral-sh/ruff/blob/main/scripts/update_schemastore.py).
|
||||
1. One can determine if an update is needed when
|
||||
`git diff old-version-tag new-version-tag -- ruff.schema.json` returns a non-empty diff.
|
||||
1. Once run successfully, you should follow the link in the output to create a PR.
|
||||
1. Create release notes in GitHub UI and promote from draft.
|
||||
1. If needed, [update the schemastore](https://github.com/charliermarsh/ruff/blob/main/scripts/update_schemastore.py)
|
||||
1. If needed, update the `ruff-lsp` and `ruff-vscode` repositories.
|
||||
|
||||
## Ecosystem CI
|
||||
@@ -384,14 +352,9 @@ See the [ruff-ecosystem package](https://github.com/astral-sh/ruff/tree/main/pyt
|
||||
We have several ways of benchmarking and profiling Ruff:
|
||||
|
||||
- Our main performance benchmark comparing Ruff with other tools on the CPython codebase
|
||||
- Microbenchmarks which run the linter or the formatter on individual files. These run on pull requests.
|
||||
- Microbenchmarks which the linter or the formatter on individual files. There run on pull requests.
|
||||
- Profiling the linter on either the microbenchmarks or entire projects
|
||||
|
||||
> \[!NOTE\]
|
||||
> When running benchmarks, ensure that your CPU is otherwise idle (e.g., close any background
|
||||
> applications, like web browsers). You may also want to switch your CPU to a "performance"
|
||||
> mode, if it exists, especially when benchmarking short-lived processes.
|
||||
|
||||
### CPython Benchmark
|
||||
|
||||
First, clone [CPython](https://github.com/python/cpython). It's a large and diverse Python codebase,
|
||||
@@ -537,10 +500,10 @@ if the benchmark improved/regressed compared to that baseline.
|
||||
|
||||
```shell
|
||||
# Run once on your "baseline" code
|
||||
cargo bench -p ruff_benchmark -- --save-baseline=main
|
||||
cargo benchmark --save-baseline=main
|
||||
|
||||
# Then iterate with
|
||||
cargo bench -p ruff_benchmark -- --baseline=main
|
||||
cargo benchmark --baseline=main
|
||||
```
|
||||
|
||||
#### PR Summary
|
||||
@@ -550,10 +513,10 @@ This is useful to illustrate the improvements of a PR.
|
||||
|
||||
```shell
|
||||
# On main
|
||||
cargo bench -p ruff_benchmark -- --save-baseline=main
|
||||
cargo benchmark --save-baseline=main
|
||||
|
||||
# After applying your changes
|
||||
cargo bench -p ruff_benchmark -- --save-baseline=pr
|
||||
cargo benchmark --save-baseline=pr
|
||||
|
||||
critcmp main pr
|
||||
```
|
||||
@@ -566,10 +529,10 @@ cargo install critcmp
|
||||
|
||||
#### Tips
|
||||
|
||||
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo benchmark lexer`
|
||||
to only run the lexer benchmarks.
|
||||
- Use `cargo bench -p ruff_benchmark -- --quiet` for a more cleaned up output (without statistical relevance)
|
||||
- Use `cargo bench -p ruff_benchmark -- --quick` to get faster results (more prone to noise)
|
||||
- Use `cargo benchmark <filter>` to only run specific benchmarks. For example: `cargo benchmark linter/pydantic`
|
||||
to only run the pydantic tests.
|
||||
- Use `cargo benchmark --quiet` for a more cleaned up output (without statistical relevance)
|
||||
- Use `cargo benchmark --quick` to get faster results (more prone to noise)
|
||||
|
||||
### Profiling Projects
|
||||
|
||||
@@ -580,10 +543,10 @@ examples.
|
||||
|
||||
#### Linux
|
||||
|
||||
Install `perf` and build `ruff_benchmark` with the `profiling` profile and then run it with perf
|
||||
Install `perf` and build `ruff_benchmark` with the `release-debug` profile and then run it with perf
|
||||
|
||||
```shell
|
||||
cargo bench -p ruff_benchmark --no-run --profile=profiling && perf record --call-graph dwarf -F 9999 cargo bench -p ruff_benchmark --profile=profiling -- --profile-time=1
|
||||
cargo bench -p ruff_benchmark --no-run --profile=release-debug && perf record --call-graph dwarf -F 9999 cargo bench -p ruff_benchmark --profile=release-debug -- --profile-time=1
|
||||
```
|
||||
|
||||
You can also use the `ruff_dev` launcher to run `ruff check` multiple times on a repository to
|
||||
@@ -591,8 +554,8 @@ gather enough samples for a good flamegraph (change the 999, the sample rate, an
|
||||
of checks, to your liking)
|
||||
|
||||
```shell
|
||||
cargo build --bin ruff_dev --profile=profiling
|
||||
perf record -g -F 999 target/profiling/ruff_dev repeat --repeat 30 --exit-zero --no-cache path/to/cpython > /dev/null
|
||||
cargo build --bin ruff_dev --profile=release-debug
|
||||
perf record -g -F 999 target/release-debug/ruff_dev repeat --repeat 30 --exit-zero --no-cache path/to/cpython > /dev/null
|
||||
```
|
||||
|
||||
Then convert the recorded profile
|
||||
@@ -622,7 +585,7 @@ cargo install cargo-instruments
|
||||
Then run the profiler with
|
||||
|
||||
```shell
|
||||
cargo instruments -t time --bench linter --profile profiling -p ruff_benchmark -- --profile-time=1
|
||||
cargo instruments -t time --bench linter --profile release-debug -p ruff_benchmark -- --profile-time=1
|
||||
```
|
||||
|
||||
- `-t`: Specifies what to profile. Useful options are `time` to profile the wall time and `alloc`
|
||||
@@ -637,7 +600,7 @@ Otherwise, follow the instructions from the linux section.
|
||||
utils with it:
|
||||
|
||||
- `cargo dev print-ast <file>`: Print the AST of a python file using the
|
||||
[RustPython parser](https://github.com/astral-sh/ruff/tree/main/crates/ruff_python_parser) that is
|
||||
[RustPython parser](https://github.com/astral-sh/RustPython-Parser/tree/main/parser) that is
|
||||
mainly used in Ruff. For `if True: pass # comment`, you can see the syntax tree, the byte offsets
|
||||
for start and stop of each node and also how the `:` token, the comment and whitespace are not
|
||||
represented anymore:
|
||||
|
||||
1218
Cargo.lock
generated
1218
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
165
Cargo.toml
165
Cargo.toml
@@ -12,152 +12,51 @@ authors = ["Charlie Marsh <charlie.r.marsh@gmail.com>"]
|
||||
license = "MIT"
|
||||
|
||||
[workspace.dependencies]
|
||||
aho-corasick = { version = "1.1.2" }
|
||||
annotate-snippets = { version = "0.9.2", features = ["color"] }
|
||||
anyhow = { version = "1.0.80" }
|
||||
argfile = { version = "0.1.6" }
|
||||
assert_cmd = { version = "2.0.13" }
|
||||
bincode = { version = "1.3.3" }
|
||||
bitflags = { version = "2.4.1" }
|
||||
bstr = { version = "1.9.1" }
|
||||
cachedir = { version = "0.3.1" }
|
||||
chrono = { version = "0.4.34", default-features = false, features = ["clock"] }
|
||||
clap = { version = "4.5.1", features = ["derive"] }
|
||||
clap_complete_command = { version = "0.5.1" }
|
||||
clearscreen = { version = "2.0.0" }
|
||||
codspeed-criterion-compat = { version = "2.4.0", default-features = false }
|
||||
colored = { version = "2.1.0" }
|
||||
configparser = { version = "3.0.3" }
|
||||
console_error_panic_hook = { version = "0.1.7" }
|
||||
console_log = { version = "1.0.0" }
|
||||
countme = { version = "3.0.1" }
|
||||
criterion = { version = "0.5.1", default-features = false }
|
||||
dirs = { version = "5.0.0" }
|
||||
drop_bomb = { version = "0.1.5" }
|
||||
env_logger = { version = "0.10.1" }
|
||||
fern = { version = "0.6.1" }
|
||||
filetime = { version = "0.2.23" }
|
||||
fs-err = { version = "2.11.0" }
|
||||
anyhow = { version = "1.0.69" }
|
||||
bitflags = { version = "2.3.1" }
|
||||
chrono = { version = "0.4.31", default-features = false, features = ["clock"] }
|
||||
clap = { version = "4.4.7", features = ["derive"] }
|
||||
colored = { version = "2.0.0" }
|
||||
filetime = { version = "0.2.20" }
|
||||
glob = { version = "0.3.1" }
|
||||
globset = { version = "0.4.14" }
|
||||
hexf-parse = { version = "0.2.1" }
|
||||
ignore = { version = "0.4.22" }
|
||||
imara-diff = { version = "0.1.5" }
|
||||
imperative = { version = "1.0.4" }
|
||||
indicatif = { version = "0.17.8" }
|
||||
indoc = { version = "2.0.4" }
|
||||
insta = { version = "1.35.1", feature = ["filters", "glob"] }
|
||||
insta-cmd = { version = "0.4.0" }
|
||||
is-macro = { version = "0.3.5" }
|
||||
is-wsl = { version = "0.4.0" }
|
||||
itertools = { version = "0.12.1" }
|
||||
js-sys = { version = "0.3.67" }
|
||||
lalrpop-util = { version = "0.20.0", default-features = false }
|
||||
lexical-parse-float = { version = "0.8.0", features = ["format"] }
|
||||
globset = { version = "0.4.10" }
|
||||
ignore = { version = "0.4.20" }
|
||||
insta = { version = "1.34.0", feature = ["filters", "glob"] }
|
||||
is-macro = { version = "0.3.0" }
|
||||
itertools = { version = "0.11.0" }
|
||||
libcst = { version = "1.1.0", default-features = false }
|
||||
log = { version = "0.4.17" }
|
||||
memchr = { version = "2.7.1" }
|
||||
mimalloc = { version = "0.1.39" }
|
||||
natord = { version = "1.0.9" }
|
||||
notify = { version = "6.1.1" }
|
||||
once_cell = { version = "1.19.0" }
|
||||
memchr = { version = "2.6.4" }
|
||||
once_cell = { version = "1.17.1" }
|
||||
path-absolutize = { version = "3.1.1" }
|
||||
pathdiff = { version = "0.2.1" }
|
||||
pep440_rs = { version = "0.4.0", features = ["serde"] }
|
||||
pretty_assertions = "1.3.0"
|
||||
proc-macro2 = { version = "1.0.78" }
|
||||
pyproject-toml = { version = "0.9.0" }
|
||||
quick-junit = { version = "0.3.5" }
|
||||
proc-macro2 = { version = "1.0.69" }
|
||||
quote = { version = "1.0.23" }
|
||||
rand = { version = "0.8.5" }
|
||||
rayon = { version = "1.8.1" }
|
||||
regex = { version = "1.10.2" }
|
||||
result-like = { version = "0.5.0" }
|
||||
rustc-hash = { version = "1.1.0" }
|
||||
schemars = { version = "0.8.16" }
|
||||
seahash = { version = "4.1.0" }
|
||||
semver = { version = "1.0.22" }
|
||||
serde = { version = "1.0.197", features = ["derive"] }
|
||||
serde-wasm-bindgen = { version = "0.6.4" }
|
||||
serde_json = { version = "1.0.113" }
|
||||
serde_test = { version = "1.0.152" }
|
||||
serde_with = { version = "3.6.0", default-features = false, features = ["macros"] }
|
||||
schemars = { version = "0.8.15" }
|
||||
serde = { version = "1.0.190", features = ["derive"] }
|
||||
serde_json = { version = "1.0.107" }
|
||||
shellexpand = { version = "3.0.0" }
|
||||
shlex = { version = "1.3.0" }
|
||||
similar = { version = "2.4.0", features = ["inline"] }
|
||||
smallvec = { version = "1.13.1" }
|
||||
similar = { version = "2.3.0", features = ["inline"] }
|
||||
smallvec = { version = "1.11.1" }
|
||||
static_assertions = "1.1.0"
|
||||
strum = { version = "0.25.0", features = ["strum_macros"] }
|
||||
strum_macros = { version = "0.25.3" }
|
||||
syn = { version = "2.0.51" }
|
||||
tempfile = { version = "3.9.0" }
|
||||
test-case = { version = "3.3.1" }
|
||||
thiserror = { version = "1.0.57" }
|
||||
tikv-jemallocator = { version = "0.5.0" }
|
||||
toml = { version = "0.8.9" }
|
||||
syn = { version = "2.0.38" }
|
||||
test-case = { version = "3.2.1" }
|
||||
thiserror = { version = "1.0.50" }
|
||||
toml = { version = "0.7.8" }
|
||||
tracing = { version = "0.1.40" }
|
||||
tracing-indicatif = { version = "0.3.6" }
|
||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
|
||||
typed-arena = { version = "2.0.2" }
|
||||
unic-ucd-category = { version = "0.9" }
|
||||
tracing-indicatif = { version = "0.3.4" }
|
||||
tracing-subscriber = { version = "0.3.17", features = ["env-filter"] }
|
||||
unicode-ident = { version = "1.0.12" }
|
||||
unicode_names2 = { version = "1.2.0" }
|
||||
unicode-width = { version = "0.1.11" }
|
||||
unicode_names2 = { version = "1.2.1" }
|
||||
ureq = { version = "2.9.6" }
|
||||
url = { version = "2.5.0" }
|
||||
uuid = { version = "1.6.1", features = ["v4", "fast-rng", "macro-diagnostics", "js"] }
|
||||
walkdir = { version = "2.3.2" }
|
||||
wasm-bindgen = { version = "0.2.84" }
|
||||
wasm-bindgen-test = { version = "0.3.40" }
|
||||
wild = { version = "2" }
|
||||
|
||||
[workspace.lints.rust]
|
||||
unsafe_code = "warn"
|
||||
unreachable_pub = "warn"
|
||||
|
||||
[workspace.lints.clippy]
|
||||
pedantic = { level = "warn", priority = -2 }
|
||||
# Allowed pedantic lints
|
||||
char_lit_as_u8 = "allow"
|
||||
collapsible_else_if = "allow"
|
||||
collapsible_if = "allow"
|
||||
implicit_hasher = "allow"
|
||||
match_same_arms = "allow"
|
||||
missing_errors_doc = "allow"
|
||||
missing_panics_doc = "allow"
|
||||
module_name_repetitions = "allow"
|
||||
must_use_candidate = "allow"
|
||||
similar_names = "allow"
|
||||
too_many_lines = "allow"
|
||||
# To allow `#[allow(clippy::all)]` in `crates/ruff_python_parser/src/python.rs`.
|
||||
needless_raw_string_hashes = "allow"
|
||||
# Disallowed restriction lints
|
||||
print_stdout = "warn"
|
||||
print_stderr = "warn"
|
||||
dbg_macro = "warn"
|
||||
empty_drop = "warn"
|
||||
empty_structs_with_brackets = "warn"
|
||||
exit = "warn"
|
||||
get_unwrap = "warn"
|
||||
rc_buffer = "warn"
|
||||
rc_mutex = "warn"
|
||||
rest_pat_in_fully_bound_structs = "warn"
|
||||
uuid = { version = "1.5.0", features = ["v4", "fast-rng", "macro-diagnostics", "js"] }
|
||||
wsl = { version = "0.1.0" }
|
||||
|
||||
[profile.release]
|
||||
# Note that we set these explicitly, and these values
|
||||
# were chosen based on a trade-off between compile times
|
||||
# and runtime performance[1].
|
||||
#
|
||||
# [1]: https://github.com/astral-sh/ruff/pull/9031
|
||||
lto = "thin"
|
||||
codegen-units = 16
|
||||
|
||||
# Some crates don't change as much but benefit more from
|
||||
# more expensive optimization passes, so we selectively
|
||||
# decrease codegen-units in some cases.
|
||||
[profile.release.package.ruff_python_parser]
|
||||
codegen-units = 1
|
||||
[profile.release.package.ruff_python_ast]
|
||||
lto = "fat"
|
||||
codegen-units = 1
|
||||
|
||||
[profile.dev.package.insta]
|
||||
@@ -171,8 +70,8 @@ opt-level = 3
|
||||
[profile.dev.package.ruff_python_parser]
|
||||
opt-level = 1
|
||||
|
||||
# Use the `--profile profiling` flag to show symbols in release mode.
|
||||
# e.g. `cargo build --profile profiling`
|
||||
[profile.profiling]
|
||||
# Use the `--profile release-debug` flag to show symbols in release mode.
|
||||
# e.g. `cargo build --profile release-debug`
|
||||
[profile.release-debug]
|
||||
inherits = "release"
|
||||
debug = 1
|
||||
|
||||
38
Dockerfile
38
Dockerfile
@@ -1,38 +0,0 @@
|
||||
FROM --platform=$BUILDPLATFORM ubuntu as build
|
||||
ENV HOME="/root"
|
||||
WORKDIR $HOME
|
||||
|
||||
RUN apt update && apt install -y build-essential curl python3-venv
|
||||
|
||||
# Setup zig as cross compiling linker
|
||||
RUN python3 -m venv $HOME/.venv
|
||||
RUN .venv/bin/pip install cargo-zigbuild
|
||||
ENV PATH="$HOME/.venv/bin:$PATH"
|
||||
|
||||
# Install rust
|
||||
ARG TARGETPLATFORM
|
||||
RUN case "$TARGETPLATFORM" in \
|
||||
"linux/arm64") echo "aarch64-unknown-linux-musl" > rust_target.txt ;; \
|
||||
"linux/amd64") echo "x86_64-unknown-linux-musl" > rust_target.txt ;; \
|
||||
*) exit 1 ;; \
|
||||
esac
|
||||
# Update rustup whenever we bump the rust version
|
||||
COPY rust-toolchain.toml rust-toolchain.toml
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --target $(cat rust_target.txt) --profile minimal --default-toolchain none
|
||||
ENV PATH="$HOME/.cargo/bin:$PATH"
|
||||
# Installs the correct toolchain version from rust-toolchain.toml and then the musl target
|
||||
RUN rustup target add $(cat rust_target.txt)
|
||||
|
||||
# Build
|
||||
COPY crates crates
|
||||
COPY Cargo.toml Cargo.toml
|
||||
COPY Cargo.lock Cargo.lock
|
||||
RUN cargo zigbuild --bin ruff --target $(cat rust_target.txt) --release
|
||||
RUN cp target/$(cat rust_target.txt)/release/ruff /ruff
|
||||
# TODO: Optimize binary size, with a version that also works when cross compiling
|
||||
# RUN strip --strip-all /ruff
|
||||
|
||||
FROM scratch
|
||||
COPY --from=build /ruff /ruff
|
||||
WORKDIR /io
|
||||
ENTRYPOINT ["/ruff"]
|
||||
50
README.md
50
README.md
@@ -8,7 +8,7 @@
|
||||
[](https://pypi.python.org/pypi/ruff)
|
||||
[](https://github.com/astral-sh/ruff/actions)
|
||||
|
||||
[**Discord**](https://discord.com/invite/astral-sh) | [**Docs**](https://docs.astral.sh/ruff/) | [**Playground**](https://play.ruff.rs/)
|
||||
[**Discord**](https://discord.gg/c9MhzV8aU5) | [**Docs**](https://docs.astral.sh/ruff/) | [**Playground**](https://play.ruff.rs/)
|
||||
|
||||
An extremely fast Python linter and code formatter, written in Rust.
|
||||
|
||||
@@ -54,7 +54,7 @@ Ruff is extremely actively developed and used in major open-source projects like
|
||||
- [Pandas](https://github.com/pandas-dev/pandas)
|
||||
- [SciPy](https://github.com/scipy/scipy)
|
||||
|
||||
...and [many more](#whos-using-ruff).
|
||||
...and many more.
|
||||
|
||||
Ruff is backed by [Astral](https://astral.sh). Read the [launch post](https://astral.sh/blog/announcing-astral-the-company-behind-ruff),
|
||||
or the original [project announcement](https://notes.crmarsh.com/python-tooling-could-be-much-much-faster).
|
||||
@@ -148,14 +148,14 @@ ruff format @arguments.txt # Format using an input file, treating its
|
||||
Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff-pre-commit`](https://github.com/astral-sh/ruff-pre-commit):
|
||||
|
||||
```yaml
|
||||
# Run the Ruff linter.
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.3.0
|
||||
rev: v0.1.3
|
||||
hooks:
|
||||
# Run the linter.
|
||||
# Run the Ruff linter.
|
||||
- id: ruff
|
||||
args: [ --fix ]
|
||||
# Run the formatter.
|
||||
# Run the Ruff formatter.
|
||||
- id: ruff-format
|
||||
```
|
||||
|
||||
@@ -172,7 +172,7 @@ jobs:
|
||||
ruff:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- uses: chartboost/ruff-action@v1
|
||||
```
|
||||
|
||||
@@ -194,25 +194,20 @@ exclude = [
|
||||
".git",
|
||||
".git-rewrite",
|
||||
".hg",
|
||||
".ipynb_checkpoints",
|
||||
".mypy_cache",
|
||||
".nox",
|
||||
".pants.d",
|
||||
".pyenv",
|
||||
".pytest_cache",
|
||||
".pytype",
|
||||
".ruff_cache",
|
||||
".svn",
|
||||
".tox",
|
||||
".venv",
|
||||
".vscode",
|
||||
"__pypackages__",
|
||||
"_build",
|
||||
"buck-out",
|
||||
"build",
|
||||
"dist",
|
||||
"node_modules",
|
||||
"site-packages",
|
||||
"venv",
|
||||
]
|
||||
|
||||
@@ -341,14 +336,14 @@ For a complete enumeration of the supported rules, see [_Rules_](https://docs.as
|
||||
Contributions are welcome and highly appreciated. To get started, check out the
|
||||
[**contributing guidelines**](https://docs.astral.sh/ruff/contributing/).
|
||||
|
||||
You can also join us on [**Discord**](https://discord.com/invite/astral-sh).
|
||||
You can also join us on [**Discord**](https://discord.gg/c9MhzV8aU5).
|
||||
|
||||
## Support
|
||||
|
||||
Having trouble? Check out the existing issues on [**GitHub**](https://github.com/astral-sh/ruff/issues),
|
||||
or feel free to [**open a new one**](https://github.com/astral-sh/ruff/issues/new).
|
||||
|
||||
You can also ask for help on [**Discord**](https://discord.com/invite/astral-sh).
|
||||
You can also ask for help on [**Discord**](https://discord.gg/c9MhzV8aU5).
|
||||
|
||||
## Acknowledgements
|
||||
|
||||
@@ -378,16 +373,14 @@ Ruff is released under the MIT license.
|
||||
|
||||
Ruff is used by a number of major open-source projects and companies, including:
|
||||
|
||||
- [Albumentations](https://github.com/albumentations-team/albumentations)
|
||||
- Amazon ([AWS SAM](https://github.com/aws/serverless-application-model))
|
||||
- Anthropic ([Python SDK](https://github.com/anthropics/anthropic-sdk-python))
|
||||
- [Apache Airflow](https://github.com/apache/airflow)
|
||||
- AstraZeneca ([Magnus](https://github.com/AstraZeneca/magnus-core))
|
||||
- [Babel](https://github.com/python-babel/babel)
|
||||
- Benchling ([Refac](https://github.com/benchling/refac))
|
||||
- [Babel](https://github.com/python-babel/babel)
|
||||
- [Bokeh](https://github.com/bokeh/bokeh)
|
||||
- [Cryptography (PyCA)](https://github.com/pyca/cryptography)
|
||||
- CERN ([Indico](https://getindico.io/))
|
||||
- [DVC](https://github.com/iterative/dvc)
|
||||
- [Dagger](https://github.com/dagger/dagger)
|
||||
- [Dagster](https://github.com/dagster-io/dagster)
|
||||
@@ -396,17 +389,15 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [Gradio](https://github.com/gradio-app/gradio)
|
||||
- [Great Expectations](https://github.com/great-expectations/great_expectations)
|
||||
- [HTTPX](https://github.com/encode/httpx)
|
||||
- [Hatch](https://github.com/pypa/hatch)
|
||||
- [Home Assistant](https://github.com/home-assistant/core)
|
||||
- Hugging Face ([Transformers](https://github.com/huggingface/transformers),
|
||||
[Datasets](https://github.com/huggingface/datasets),
|
||||
[Diffusers](https://github.com/huggingface/diffusers))
|
||||
- [Hatch](https://github.com/pypa/hatch)
|
||||
- [Home Assistant](https://github.com/home-assistant/core)
|
||||
- ING Bank ([popmon](https://github.com/ing-bank/popmon), [probatus](https://github.com/ing-bank/probatus))
|
||||
- [Ibis](https://github.com/ibis-project/ibis)
|
||||
- [ivy](https://github.com/unifyai/ivy)
|
||||
- [Jupyter](https://github.com/jupyter-server/jupyter_server)
|
||||
- [LangChain](https://github.com/hwchase17/langchain)
|
||||
- [Litestar](https://litestar.dev/)
|
||||
- [LlamaIndex](https://github.com/jerryjliu/llama_index)
|
||||
- Matrix ([Synapse](https://github.com/matrix-org/synapse))
|
||||
- [MegaLinter](https://github.com/oxsecurity/megalinter)
|
||||
@@ -420,36 +411,29 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- Netflix ([Dispatch](https://github.com/Netflix/dispatch))
|
||||
- [Neon](https://github.com/neondatabase/neon)
|
||||
- [NoneBot](https://github.com/nonebot/nonebot2)
|
||||
- [NumPyro](https://github.com/pyro-ppl/numpyro)
|
||||
- [ONNX](https://github.com/onnx/onnx)
|
||||
- [OpenBB](https://github.com/OpenBB-finance/OpenBBTerminal)
|
||||
- [PDM](https://github.com/pdm-project/pdm)
|
||||
- [PaddlePaddle](https://github.com/PaddlePaddle/Paddle)
|
||||
- [Pandas](https://github.com/pandas-dev/pandas)
|
||||
- [Pillow](https://github.com/python-pillow/Pillow)
|
||||
- [Poetry](https://github.com/python-poetry/poetry)
|
||||
- [Polars](https://github.com/pola-rs/polars)
|
||||
- [PostHog](https://github.com/PostHog/posthog)
|
||||
- Prefect ([Python SDK](https://github.com/PrefectHQ/prefect), [Marvin](https://github.com/PrefectHQ/marvin))
|
||||
- [PyInstaller](https://github.com/pyinstaller/pyinstaller)
|
||||
- [PyMC](https://github.com/pymc-devs/pymc/)
|
||||
- [PyMC-Marketing](https://github.com/pymc-labs/pymc-marketing)
|
||||
- [pytest](https://github.com/pytest-dev/pytest)
|
||||
- [PyTorch](https://github.com/pytorch/pytorch)
|
||||
- [Pydantic](https://github.com/pydantic/pydantic)
|
||||
- [Pylint](https://github.com/PyCQA/pylint)
|
||||
- [PyVista](https://github.com/pyvista/pyvista)
|
||||
- [Reflex](https://github.com/reflex-dev/reflex)
|
||||
- [River](https://github.com/online-ml/river)
|
||||
- [Rippling](https://rippling.com)
|
||||
- [Robyn](https://github.com/sansyrox/robyn)
|
||||
- [Saleor](https://github.com/saleor/saleor)
|
||||
- Scale AI ([Launch SDK](https://github.com/scaleapi/launch-python-client))
|
||||
- [SciPy](https://github.com/scipy/scipy)
|
||||
- Snowflake ([SnowCLI](https://github.com/Snowflake-Labs/snowcli))
|
||||
- [Saleor](https://github.com/saleor/saleor)
|
||||
- [SciPy](https://github.com/scipy/scipy)
|
||||
- [Sphinx](https://github.com/sphinx-doc/sphinx)
|
||||
- [Stable Baselines3](https://github.com/DLR-RM/stable-baselines3)
|
||||
- [Starlette](https://github.com/encode/starlette)
|
||||
- [Litestar](https://litestar.dev/)
|
||||
- [The Algorithms](https://github.com/TheAlgorithms/Python)
|
||||
- [Vega-Altair](https://github.com/altair-viz/altair)
|
||||
- WordPress ([Openverse](https://github.com/WordPress/openverse))
|
||||
@@ -465,7 +449,7 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
|
||||
### Show Your Support
|
||||
|
||||
If you're using Ruff, consider adding the Ruff badge to your project's `README.md`:
|
||||
If you're using Ruff, consider adding the Ruff badge to project's `README.md`:
|
||||
|
||||
```md
|
||||
[](https://github.com/astral-sh/ruff)
|
||||
@@ -491,6 +475,6 @@ MIT
|
||||
|
||||
<div align="center">
|
||||
<a target="_blank" href="https://astral.sh" style="background:none">
|
||||
<img src="https://raw.githubusercontent.com/astral-sh/ruff/main/assets/svg/Astral.svg" alt="Made by Astral">
|
||||
<img src="https://raw.githubusercontent.com/astral-sh/ruff/main/assets/svg/Astral.svg">
|
||||
</a>
|
||||
</div>
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
[files]
|
||||
# https://github.com/crate-ci/typos/issues/868
|
||||
extend-exclude = ["**/resources/**/*", "**/snapshots/**/*"]
|
||||
extend-exclude = ["resources", "snapshots"]
|
||||
|
||||
[default.extend-words]
|
||||
hel = "hel"
|
||||
|
||||
36
crates/flake8_to_ruff/Cargo.toml
Normal file
36
crates/flake8_to_ruff/Cargo.toml
Normal file
@@ -0,0 +1,36 @@
|
||||
[package]
|
||||
name = "flake8-to-ruff"
|
||||
version = "0.1.3"
|
||||
description = """
|
||||
Convert Flake8 configuration files to Ruff configuration files.
|
||||
"""
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
ruff_linter = { path = "../ruff_linter", default-features = false }
|
||||
ruff_workspace = { path = "../ruff_workspace" }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
clap = { workspace = true }
|
||||
colored = { workspace = true }
|
||||
configparser = { version = "3.0.2" }
|
||||
itertools = { workspace = true }
|
||||
log = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
pep440_rs = { version = "0.3.12", features = ["serde"] }
|
||||
regex = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
strum = { workspace = true }
|
||||
strum_macros = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
pretty_assertions = "1.3.0"
|
||||
99
crates/flake8_to_ruff/README.md
Normal file
99
crates/flake8_to_ruff/README.md
Normal file
@@ -0,0 +1,99 @@
|
||||
# flake8-to-ruff
|
||||
|
||||
Convert existing Flake8 configuration files (`setup.cfg`, `tox.ini`, or `.flake8`) for use with
|
||||
[Ruff](https://github.com/astral-sh/ruff).
|
||||
|
||||
Generates a Ruff-compatible `pyproject.toml` section.
|
||||
|
||||
## Installation and Usage
|
||||
|
||||
### Installation
|
||||
|
||||
Available as [`flake8-to-ruff`](https://pypi.org/project/flake8-to-ruff/) on PyPI:
|
||||
|
||||
```shell
|
||||
pip install flake8-to-ruff
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
To run `flake8-to-ruff`:
|
||||
|
||||
```shell
|
||||
flake8-to-ruff path/to/setup.cfg
|
||||
flake8-to-ruff path/to/tox.ini
|
||||
flake8-to-ruff path/to/.flake8
|
||||
```
|
||||
|
||||
`flake8-to-ruff` will print the relevant `pyproject.toml` sections to standard output, like so:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
exclude = [
|
||||
'.svn',
|
||||
'CVS',
|
||||
'.bzr',
|
||||
'.hg',
|
||||
'.git',
|
||||
'__pycache__',
|
||||
'.tox',
|
||||
'.idea',
|
||||
'.mypy_cache',
|
||||
'.venv',
|
||||
'node_modules',
|
||||
'_state_machine.py',
|
||||
'test_fstring.py',
|
||||
'bad_coding2.py',
|
||||
'badsyntax_*.py',
|
||||
]
|
||||
select = [
|
||||
'A',
|
||||
'E',
|
||||
'F',
|
||||
'Q',
|
||||
]
|
||||
ignore = []
|
||||
|
||||
[tool.ruff.flake8-quotes]
|
||||
inline-quotes = 'single'
|
||||
|
||||
[tool.ruff.pep8-naming]
|
||||
ignore-names = [
|
||||
'foo',
|
||||
'bar',
|
||||
]
|
||||
```
|
||||
|
||||
### Plugins
|
||||
|
||||
`flake8-to-ruff` will attempt to infer any activated plugins based on the settings provided in your
|
||||
configuration file.
|
||||
|
||||
For example, if your `.flake8` file includes a `docstring-convention` property, `flake8-to-ruff`
|
||||
will enable the appropriate [`flake8-docstrings`](https://pypi.org/project/flake8-docstrings/)
|
||||
checks.
|
||||
|
||||
Alternatively, you can manually specify plugins on the command-line:
|
||||
|
||||
```shell
|
||||
flake8-to-ruff path/to/.flake8 --plugin flake8-builtins --plugin flake8-quotes
|
||||
```
|
||||
|
||||
## Limitations
|
||||
|
||||
1. Ruff only supports a subset of the Flake configuration options. `flake8-to-ruff` will warn on and
|
||||
ignore unsupported options in the `.flake8` file (or equivalent). (Similarly, Ruff has a few
|
||||
configuration options that don't exist in Flake8.)
|
||||
1. Ruff will omit any rule codes that are unimplemented or unsupported by Ruff, including rule
|
||||
codes from unsupported plugins. (See the
|
||||
[documentation](https://docs.astral.sh/ruff/faq/#how-does-ruff-compare-to-flake8) for the complete
|
||||
list of supported plugins.)
|
||||
|
||||
## License
|
||||
|
||||
MIT
|
||||
|
||||
## Contributing
|
||||
|
||||
Contributions are welcome and hugely appreciated. To get started, check out the
|
||||
[contributing guidelines](https://github.com/astral-sh/ruff/blob/main/CONTRIBUTING.md).
|
||||
65
crates/flake8_to_ruff/examples/cryptography/pyproject.toml
Normal file
65
crates/flake8_to_ruff/examples/cryptography/pyproject.toml
Normal file
@@ -0,0 +1,65 @@
|
||||
[build-system]
|
||||
requires = [
|
||||
# The minimum setuptools version is specific to the PEP 517 backend,
|
||||
# and may be stricter than the version required in `setup.cfg`
|
||||
"setuptools>=40.6.0,!=60.9.0",
|
||||
"wheel",
|
||||
# Must be kept in sync with the `install_requirements` in `setup.cfg`
|
||||
"cffi>=1.12; platform_python_implementation != 'PyPy'",
|
||||
"setuptools-rust>=0.11.4",
|
||||
]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[tool.black]
|
||||
line-length = 79
|
||||
target-version = ["py36"]
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
addopts = "-r s --capture=no --strict-markers --benchmark-disable"
|
||||
markers = [
|
||||
"skip_fips: this test is not executed in FIPS mode",
|
||||
"supported: parametrized test requiring only_if and skip_message",
|
||||
]
|
||||
|
||||
[tool.mypy]
|
||||
show_error_codes = true
|
||||
check_untyped_defs = true
|
||||
no_implicit_reexport = true
|
||||
warn_redundant_casts = true
|
||||
warn_unused_ignores = true
|
||||
warn_unused_configs = true
|
||||
strict_equality = true
|
||||
|
||||
[[tool.mypy.overrides]]
|
||||
module = [
|
||||
"pretend"
|
||||
]
|
||||
ignore_missing_imports = true
|
||||
|
||||
[tool.coverage.run]
|
||||
branch = true
|
||||
relative_files = true
|
||||
source = [
|
||||
"cryptography",
|
||||
"tests/",
|
||||
]
|
||||
|
||||
[tool.coverage.paths]
|
||||
source = [
|
||||
"src/cryptography",
|
||||
"*.tox/*/lib*/python*/site-packages/cryptography",
|
||||
"*.tox\\*\\Lib\\site-packages\\cryptography",
|
||||
"*.tox/pypy/site-packages/cryptography",
|
||||
]
|
||||
tests =[
|
||||
"tests/",
|
||||
"*tests\\",
|
||||
]
|
||||
|
||||
[tool.coverage.report]
|
||||
exclude_lines = [
|
||||
"@abc.abstractmethod",
|
||||
"@abc.abstractproperty",
|
||||
"@typing.overload",
|
||||
"if typing.TYPE_CHECKING",
|
||||
]
|
||||
91
crates/flake8_to_ruff/examples/cryptography/setup.cfg
Normal file
91
crates/flake8_to_ruff/examples/cryptography/setup.cfg
Normal file
@@ -0,0 +1,91 @@
|
||||
[metadata]
|
||||
name = cryptography
|
||||
version = attr: cryptography.__version__
|
||||
description = cryptography is a package which provides cryptographic recipes and primitives to Python developers.
|
||||
long_description = file: README.rst
|
||||
long_description_content_type = text/x-rst
|
||||
license = BSD-3-Clause OR Apache-2.0
|
||||
url = https://github.com/pyca/cryptography
|
||||
author = The Python Cryptographic Authority and individual contributors
|
||||
author_email = cryptography-dev@python.org
|
||||
project_urls =
|
||||
Documentation=https://cryptography.io/
|
||||
Source=https://github.com/pyca/cryptography/
|
||||
Issues=https://github.com/pyca/cryptography/issues
|
||||
Changelog=https://cryptography.io/en/latest/changelog/
|
||||
classifiers =
|
||||
Development Status :: 5 - Production/Stable
|
||||
Intended Audience :: Developers
|
||||
License :: OSI Approved :: Apache Software License
|
||||
License :: OSI Approved :: BSD License
|
||||
Natural Language :: English
|
||||
Operating System :: MacOS :: MacOS X
|
||||
Operating System :: POSIX
|
||||
Operating System :: POSIX :: BSD
|
||||
Operating System :: POSIX :: Linux
|
||||
Operating System :: Microsoft :: Windows
|
||||
Programming Language :: Python
|
||||
Programming Language :: Python :: 3
|
||||
Programming Language :: Python :: 3 :: Only
|
||||
Programming Language :: Python :: 3.6
|
||||
Programming Language :: Python :: 3.7
|
||||
Programming Language :: Python :: 3.8
|
||||
Programming Language :: Python :: 3.9
|
||||
Programming Language :: Python :: 3.10
|
||||
Programming Language :: Python :: 3.11
|
||||
Programming Language :: Python :: Implementation :: CPython
|
||||
Programming Language :: Python :: Implementation :: PyPy
|
||||
Topic :: Security :: Cryptography
|
||||
|
||||
[options]
|
||||
python_requires = >=3.6
|
||||
include_package_data = True
|
||||
zip_safe = False
|
||||
package_dir =
|
||||
=src
|
||||
packages = find:
|
||||
# `install_requires` must be kept in sync with `pyproject.toml`
|
||||
install_requires =
|
||||
cffi >=1.12
|
||||
|
||||
[options.packages.find]
|
||||
where = src
|
||||
exclude =
|
||||
_cffi_src
|
||||
_cffi_src.*
|
||||
|
||||
[options.extras_require]
|
||||
test =
|
||||
pytest>=6.2.0
|
||||
pytest-benchmark
|
||||
pytest-cov
|
||||
pytest-subtests
|
||||
pytest-xdist
|
||||
pretend
|
||||
iso8601
|
||||
pytz
|
||||
hypothesis>=1.11.4,!=3.79.2
|
||||
docs =
|
||||
sphinx >= 1.6.5,!=1.8.0,!=3.1.0,!=3.1.1,!=5.2.0,!=5.2.0.post0
|
||||
sphinx_rtd_theme
|
||||
docstest =
|
||||
pyenchant >= 1.6.11
|
||||
twine >= 1.12.0
|
||||
sphinxcontrib-spelling >= 4.0.1
|
||||
sdist =
|
||||
setuptools_rust >= 0.11.4
|
||||
pep8test =
|
||||
black
|
||||
flake8
|
||||
flake8-import-order
|
||||
pep8-naming
|
||||
# This extra is for OpenSSH private keys that use bcrypt KDF
|
||||
# Versions: v3.1.3 - ignore_few_rounds, v3.1.5 - abi3
|
||||
ssh =
|
||||
bcrypt >= 3.1.5
|
||||
|
||||
[flake8]
|
||||
ignore = E203,E211,W503,W504,N818
|
||||
exclude = .tox,*.egg,.git,_build,.hypothesis
|
||||
select = E,W,F,N,I
|
||||
application-import-names = cryptography,cryptography_vectors,tests
|
||||
19
crates/flake8_to_ruff/examples/jupyterhub.ini
Normal file
19
crates/flake8_to_ruff/examples/jupyterhub.ini
Normal file
@@ -0,0 +1,19 @@
|
||||
[flake8]
|
||||
# Ignore style and complexity
|
||||
# E: style errors
|
||||
# W: style warnings
|
||||
# C: complexity
|
||||
# D: docstring warnings (unused pydocstyle extension)
|
||||
# F841: local variable assigned but never used
|
||||
ignore = E, C, W, D, F841
|
||||
builtins = c, get_config
|
||||
exclude =
|
||||
.cache,
|
||||
.github,
|
||||
docs,
|
||||
jupyterhub/alembic*,
|
||||
onbuild,
|
||||
scripts,
|
||||
share,
|
||||
tools,
|
||||
setup.py
|
||||
43
crates/flake8_to_ruff/examples/manim.ini
Normal file
43
crates/flake8_to_ruff/examples/manim.ini
Normal file
@@ -0,0 +1,43 @@
|
||||
[flake8]
|
||||
# Exclude the grpc generated code
|
||||
exclude = ./manim/grpc/gen/*
|
||||
max-complexity = 15
|
||||
max-line-length = 88
|
||||
statistics = True
|
||||
# Prevents some flake8-rst-docstrings errors
|
||||
rst-roles = attr,class,func,meth,mod,obj,ref,doc,exc
|
||||
rst-directives = manim, SEEALSO, seealso
|
||||
docstring-convention=numpy
|
||||
|
||||
select = A,A00,B,B9,C4,C90,D,E,F,F,PT,RST,SIM,W
|
||||
|
||||
# General Compatibility
|
||||
extend-ignore = E203, W503, D202, D212, D213, D404
|
||||
|
||||
# Misc
|
||||
F401, F403, F405, F841, E501, E731, E402, F811, F821,
|
||||
|
||||
# Plug-in: flake8-builtins
|
||||
A001, A002, A003,
|
||||
|
||||
# Plug-in: flake8-bugbear
|
||||
B006, B007, B008, B009, B010, B903, B950,
|
||||
|
||||
# Plug-in: flake8-simplify
|
||||
SIM105, SIM106, SIM119,
|
||||
|
||||
# Plug-in: flake8-comprehensions
|
||||
C901
|
||||
|
||||
# Plug-in: flake8-pytest-style
|
||||
PT001, PT004, PT006, PT011, PT018, PT022, PT023,
|
||||
|
||||
# Plug-in: flake8-docstrings
|
||||
D100, D101, D102, D103, D104, D105, D106, D107,
|
||||
D200, D202, D204, D205, D209,
|
||||
D301,
|
||||
D400, D401, D402, D403, D405, D406, D407, D409, D411, D412, D414,
|
||||
|
||||
# Plug-in: flake8-rst-docstrings
|
||||
RST201, RST203, RST210, RST212, RST213, RST215,
|
||||
RST301, RST303,
|
||||
36
crates/flake8_to_ruff/examples/poetry.ini
Normal file
36
crates/flake8_to_ruff/examples/poetry.ini
Normal file
@@ -0,0 +1,36 @@
|
||||
[flake8]
|
||||
min_python_version = 3.7.0
|
||||
max-line-length = 88
|
||||
ban-relative-imports = true
|
||||
# flake8-use-fstring: https://github.com/MichaelKim0407/flake8-use-fstring#--percent-greedy-and---format-greedy
|
||||
format-greedy = 1
|
||||
inline-quotes = double
|
||||
enable-extensions = TC, TC1
|
||||
type-checking-strict = true
|
||||
eradicate-whitelist-extend = ^-.*;
|
||||
extend-ignore =
|
||||
# E203: Whitespace before ':' (pycqa/pycodestyle#373)
|
||||
E203,
|
||||
# SIM106: Handle error-cases first
|
||||
SIM106,
|
||||
# ANN101: Missing type annotation for self in method
|
||||
ANN101,
|
||||
# ANN102: Missing type annotation for cls in classmethod
|
||||
ANN102,
|
||||
# PIE781: assign-and-return
|
||||
PIE781,
|
||||
# PIE798 no-unnecessary-class: Consider using a module for namespacing instead
|
||||
PIE798,
|
||||
per-file-ignores =
|
||||
# TC002: Move third-party import '...' into a type-checking block
|
||||
__init__.py:TC002,
|
||||
# ANN201: Missing return type annotation for public function
|
||||
tests/test_*:ANN201
|
||||
tests/**/test_*:ANN201
|
||||
extend-exclude =
|
||||
# Frozen and not subject to change in this repo:
|
||||
get-poetry.py,
|
||||
install-poetry.py,
|
||||
# External to the project's coding standards:
|
||||
tests/fixtures/*,
|
||||
tests/**/fixtures/*,
|
||||
19
crates/flake8_to_ruff/examples/python-discord.ini
Normal file
19
crates/flake8_to_ruff/examples/python-discord.ini
Normal file
@@ -0,0 +1,19 @@
|
||||
[flake8]
|
||||
max-line-length=120
|
||||
docstring-convention=all
|
||||
import-order-style=pycharm
|
||||
application_import_names=bot,tests
|
||||
exclude=.cache,.venv,.git,constants.py
|
||||
extend-ignore=
|
||||
B311,W503,E226,S311,T000,E731
|
||||
# Missing Docstrings
|
||||
D100,D104,D105,D107,
|
||||
# Docstring Whitespace
|
||||
D203,D212,D214,D215,
|
||||
# Docstring Quotes
|
||||
D301,D302,
|
||||
# Docstring Content
|
||||
D400,D401,D402,D404,D405,D406,D407,D408,D409,D410,D411,D412,D413,D414,D416,D417
|
||||
# Type Annotations
|
||||
ANN002,ANN003,ANN101,ANN102,ANN204,ANN206,ANN401
|
||||
per-file-ignores=tests/*:D,ANN
|
||||
6
crates/flake8_to_ruff/examples/requests.ini
Normal file
6
crates/flake8_to_ruff/examples/requests.ini
Normal file
@@ -0,0 +1,6 @@
|
||||
[flake8]
|
||||
ignore = E203, E501, W503
|
||||
per-file-ignores =
|
||||
requests/__init__.py:E402, F401
|
||||
requests/compat.py:E402, F401
|
||||
tests/compat.py:F401
|
||||
34
crates/flake8_to_ruff/pyproject.toml
Normal file
34
crates/flake8_to_ruff/pyproject.toml
Normal file
@@ -0,0 +1,34 @@
|
||||
[project]
|
||||
name = "flake8-to-ruff"
|
||||
keywords = ["automation", "flake8", "pycodestyle", "pyflakes", "pylint", "clippy"]
|
||||
classifiers = [
|
||||
"Development Status :: 3 - Alpha",
|
||||
"Environment :: Console",
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Operating System :: OS Independent",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3 :: Only",
|
||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||
"Topic :: Software Development :: Quality Assurance",
|
||||
]
|
||||
author = "Charlie Marsh"
|
||||
author_email = "charlie.r.marsh@gmail.com"
|
||||
description = "Convert existing Flake8 configuration to Ruff."
|
||||
requires-python = ">=3.7"
|
||||
|
||||
[project.urls]
|
||||
repository = "https://github.com/astral-sh/ruff#subdirectory=crates/flake8_to_ruff"
|
||||
|
||||
[build-system]
|
||||
requires = ["maturin>=1.0,<2.0"]
|
||||
build-backend = "maturin"
|
||||
|
||||
[tool.maturin]
|
||||
bindings = "bin"
|
||||
strip = true
|
||||
13
crates/flake8_to_ruff/src/black.rs
Normal file
13
crates/flake8_to_ruff/src/black.rs
Normal file
@@ -0,0 +1,13 @@
|
||||
//! Extract Black configuration settings from a pyproject.toml.
|
||||
|
||||
use ruff_linter::line_width::LineLength;
|
||||
use ruff_linter::settings::types::PythonVersion;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
|
||||
pub(crate) struct Black {
|
||||
#[serde(alias = "line-length", alias = "line_length")]
|
||||
pub(crate) line_length: Option<LineLength>,
|
||||
#[serde(alias = "target-version", alias = "target_version")]
|
||||
pub(crate) target_version: Option<Vec<PythonVersion>>,
|
||||
}
|
||||
687
crates/flake8_to_ruff/src/converter.rs
Normal file
687
crates/flake8_to_ruff/src/converter.rs
Normal file
@@ -0,0 +1,687 @@
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::str::FromStr;
|
||||
|
||||
use itertools::Itertools;
|
||||
|
||||
use ruff_linter::line_width::LineLength;
|
||||
use ruff_linter::registry::Linter;
|
||||
use ruff_linter::rule_selector::RuleSelector;
|
||||
use ruff_linter::rules::flake8_pytest_style::types::{
|
||||
ParametrizeNameType, ParametrizeValuesRowType, ParametrizeValuesType,
|
||||
};
|
||||
use ruff_linter::rules::flake8_quotes::settings::Quote;
|
||||
use ruff_linter::rules::flake8_tidy_imports::settings::Strictness;
|
||||
use ruff_linter::rules::pydocstyle::settings::Convention;
|
||||
use ruff_linter::settings::types::PythonVersion;
|
||||
use ruff_linter::settings::DEFAULT_SELECTORS;
|
||||
use ruff_linter::warn_user;
|
||||
use ruff_workspace::options::{
|
||||
Flake8AnnotationsOptions, Flake8BugbearOptions, Flake8BuiltinsOptions, Flake8ErrMsgOptions,
|
||||
Flake8PytestStyleOptions, Flake8QuotesOptions, Flake8TidyImportsOptions, LintCommonOptions,
|
||||
LintOptions, McCabeOptions, Options, Pep8NamingOptions, PydocstyleOptions,
|
||||
};
|
||||
use ruff_workspace::pyproject::Pyproject;
|
||||
|
||||
use super::external_config::ExternalConfig;
|
||||
use super::plugin::Plugin;
|
||||
use super::{parser, plugin};
|
||||
|
||||
pub(crate) fn convert(
|
||||
config: &HashMap<String, HashMap<String, Option<String>>>,
|
||||
external_config: &ExternalConfig,
|
||||
plugins: Option<Vec<Plugin>>,
|
||||
) -> Pyproject {
|
||||
// Extract the Flake8 section.
|
||||
let flake8 = config
|
||||
.get("flake8")
|
||||
.expect("Unable to find flake8 section in INI file");
|
||||
|
||||
// Extract all referenced rule code prefixes, to power plugin inference.
|
||||
let mut referenced_codes: HashSet<RuleSelector> = HashSet::default();
|
||||
for (key, value) in flake8 {
|
||||
if let Some(value) = value {
|
||||
match key.as_str() {
|
||||
"select" | "ignore" | "extend-select" | "extend_select" | "extend-ignore"
|
||||
| "extend_ignore" => {
|
||||
referenced_codes.extend(parser::parse_prefix_codes(value.as_ref()));
|
||||
}
|
||||
"per-file-ignores" | "per_file_ignores" => {
|
||||
if let Ok(per_file_ignores) =
|
||||
parser::parse_files_to_codes_mapping(value.as_ref())
|
||||
{
|
||||
for (_, codes) in parser::collect_per_file_ignores(per_file_ignores) {
|
||||
referenced_codes.extend(codes);
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Infer plugins, if not provided.
|
||||
let plugins = plugins.unwrap_or_else(|| {
|
||||
let from_options = plugin::infer_plugins_from_options(flake8);
|
||||
if !from_options.is_empty() {
|
||||
#[allow(clippy::print_stderr)]
|
||||
{
|
||||
eprintln!("Inferred plugins from settings: {from_options:#?}");
|
||||
}
|
||||
}
|
||||
let from_codes = plugin::infer_plugins_from_codes(&referenced_codes);
|
||||
if !from_codes.is_empty() {
|
||||
#[allow(clippy::print_stderr)]
|
||||
{
|
||||
eprintln!("Inferred plugins from referenced codes: {from_codes:#?}");
|
||||
}
|
||||
}
|
||||
from_options.into_iter().chain(from_codes).collect()
|
||||
});
|
||||
|
||||
// Check if the user has specified a `select`. If not, we'll add our own
|
||||
// default `select`, and populate it based on user plugins.
|
||||
let mut select = flake8
|
||||
.get("select")
|
||||
.and_then(|value| {
|
||||
value
|
||||
.as_ref()
|
||||
.map(|value| HashSet::from_iter(parser::parse_prefix_codes(value)))
|
||||
})
|
||||
.unwrap_or_else(|| resolve_select(&plugins));
|
||||
let mut ignore: HashSet<RuleSelector> = flake8
|
||||
.get("ignore")
|
||||
.and_then(|value| {
|
||||
value
|
||||
.as_ref()
|
||||
.map(|value| HashSet::from_iter(parser::parse_prefix_codes(value)))
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
// Parse each supported option.
|
||||
let mut options = Options::default();
|
||||
let mut lint_options = LintCommonOptions::default();
|
||||
let mut flake8_annotations = Flake8AnnotationsOptions::default();
|
||||
let mut flake8_bugbear = Flake8BugbearOptions::default();
|
||||
let mut flake8_builtins = Flake8BuiltinsOptions::default();
|
||||
let mut flake8_errmsg = Flake8ErrMsgOptions::default();
|
||||
let mut flake8_pytest_style = Flake8PytestStyleOptions::default();
|
||||
let mut flake8_quotes = Flake8QuotesOptions::default();
|
||||
let mut flake8_tidy_imports = Flake8TidyImportsOptions::default();
|
||||
let mut mccabe = McCabeOptions::default();
|
||||
let mut pep8_naming = Pep8NamingOptions::default();
|
||||
let mut pydocstyle = PydocstyleOptions::default();
|
||||
for (key, value) in flake8 {
|
||||
if let Some(value) = value {
|
||||
match key.as_str() {
|
||||
// flake8
|
||||
"builtins" => {
|
||||
options.builtins = Some(parser::parse_strings(value.as_ref()));
|
||||
}
|
||||
"max-line-length" | "max_line_length" => match LineLength::from_str(value) {
|
||||
Ok(line_length) => options.line_length = Some(line_length),
|
||||
Err(e) => {
|
||||
warn_user!("Unable to parse '{key}' property: {e}");
|
||||
}
|
||||
},
|
||||
"select" => {
|
||||
// No-op (handled above).
|
||||
select.extend(parser::parse_prefix_codes(value.as_ref()));
|
||||
}
|
||||
"ignore" => {
|
||||
// No-op (handled above).
|
||||
}
|
||||
"extend-select" | "extend_select" => {
|
||||
// Unlike Flake8, use a single explicit `select`.
|
||||
select.extend(parser::parse_prefix_codes(value.as_ref()));
|
||||
}
|
||||
"extend-ignore" | "extend_ignore" => {
|
||||
// Unlike Flake8, use a single explicit `ignore`.
|
||||
ignore.extend(parser::parse_prefix_codes(value.as_ref()));
|
||||
}
|
||||
"exclude" => {
|
||||
options.exclude = Some(parser::parse_strings(value.as_ref()));
|
||||
}
|
||||
"extend-exclude" | "extend_exclude" => {
|
||||
options.extend_exclude = Some(parser::parse_strings(value.as_ref()));
|
||||
}
|
||||
"per-file-ignores" | "per_file_ignores" => {
|
||||
match parser::parse_files_to_codes_mapping(value.as_ref()) {
|
||||
Ok(per_file_ignores) => {
|
||||
lint_options.per_file_ignores =
|
||||
Some(parser::collect_per_file_ignores(per_file_ignores));
|
||||
}
|
||||
Err(e) => {
|
||||
warn_user!("Unable to parse '{key}' property: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
// flake8-bugbear
|
||||
"extend-immutable-calls" | "extend_immutable_calls" => {
|
||||
flake8_bugbear.extend_immutable_calls =
|
||||
Some(parser::parse_strings(value.as_ref()));
|
||||
}
|
||||
// flake8-builtins
|
||||
"builtins-ignorelist" | "builtins_ignorelist" => {
|
||||
flake8_builtins.builtins_ignorelist =
|
||||
Some(parser::parse_strings(value.as_ref()));
|
||||
}
|
||||
// flake8-annotations
|
||||
"suppress-none-returning" | "suppress_none_returning" => {
|
||||
match parser::parse_bool(value.as_ref()) {
|
||||
Ok(bool) => flake8_annotations.suppress_none_returning = Some(bool),
|
||||
Err(e) => {
|
||||
warn_user!("Unable to parse '{key}' property: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
"suppress-dummy-args" | "suppress_dummy_args" => {
|
||||
match parser::parse_bool(value.as_ref()) {
|
||||
Ok(bool) => flake8_annotations.suppress_dummy_args = Some(bool),
|
||||
Err(e) => {
|
||||
warn_user!("Unable to parse '{key}' property: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
"mypy-init-return" | "mypy_init_return" => {
|
||||
match parser::parse_bool(value.as_ref()) {
|
||||
Ok(bool) => flake8_annotations.mypy_init_return = Some(bool),
|
||||
Err(e) => {
|
||||
warn_user!("Unable to parse '{key}' property: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
"allow-star-arg-any" | "allow_star_arg_any" => {
|
||||
match parser::parse_bool(value.as_ref()) {
|
||||
Ok(bool) => flake8_annotations.allow_star_arg_any = Some(bool),
|
||||
Err(e) => {
|
||||
warn_user!("Unable to parse '{key}' property: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
// flake8-quotes
|
||||
"quotes" | "inline-quotes" | "inline_quotes" => match value.trim() {
|
||||
"'" | "single" => flake8_quotes.inline_quotes = Some(Quote::Single),
|
||||
"\"" | "double" => flake8_quotes.inline_quotes = Some(Quote::Double),
|
||||
_ => {
|
||||
warn_user!("Unexpected '{key}' value: {value}");
|
||||
}
|
||||
},
|
||||
"multiline-quotes" | "multiline_quotes" => match value.trim() {
|
||||
"'" | "single" => flake8_quotes.multiline_quotes = Some(Quote::Single),
|
||||
"\"" | "double" => flake8_quotes.multiline_quotes = Some(Quote::Double),
|
||||
_ => {
|
||||
warn_user!("Unexpected '{key}' value: {value}");
|
||||
}
|
||||
},
|
||||
"docstring-quotes" | "docstring_quotes" => match value.trim() {
|
||||
"'" | "single" => flake8_quotes.docstring_quotes = Some(Quote::Single),
|
||||
"\"" | "double" => flake8_quotes.docstring_quotes = Some(Quote::Double),
|
||||
_ => {
|
||||
warn_user!("Unexpected '{key}' value: {value}");
|
||||
}
|
||||
},
|
||||
"avoid-escape" | "avoid_escape" => match parser::parse_bool(value.as_ref()) {
|
||||
Ok(bool) => flake8_quotes.avoid_escape = Some(bool),
|
||||
Err(e) => {
|
||||
warn_user!("Unable to parse '{key}' property: {e}");
|
||||
}
|
||||
},
|
||||
// pep8-naming
|
||||
"ignore-names" | "ignore_names" => {
|
||||
pep8_naming.ignore_names = Some(parser::parse_strings(value.as_ref()));
|
||||
}
|
||||
"classmethod-decorators" | "classmethod_decorators" => {
|
||||
pep8_naming.classmethod_decorators =
|
||||
Some(parser::parse_strings(value.as_ref()));
|
||||
}
|
||||
"staticmethod-decorators" | "staticmethod_decorators" => {
|
||||
pep8_naming.staticmethod_decorators =
|
||||
Some(parser::parse_strings(value.as_ref()));
|
||||
}
|
||||
// flake8-tidy-imports
|
||||
"ban-relative-imports" | "ban_relative_imports" => match value.trim() {
|
||||
"true" => flake8_tidy_imports.ban_relative_imports = Some(Strictness::All),
|
||||
"parents" => {
|
||||
flake8_tidy_imports.ban_relative_imports = Some(Strictness::Parents);
|
||||
}
|
||||
_ => {
|
||||
warn_user!("Unexpected '{key}' value: {value}");
|
||||
}
|
||||
},
|
||||
// flake8-docstrings
|
||||
"docstring-convention" => match value.trim() {
|
||||
"google" => pydocstyle.convention = Some(Convention::Google),
|
||||
"numpy" => pydocstyle.convention = Some(Convention::Numpy),
|
||||
"pep257" => pydocstyle.convention = Some(Convention::Pep257),
|
||||
"all" => pydocstyle.convention = None,
|
||||
_ => {
|
||||
warn_user!("Unexpected '{key}' value: {value}");
|
||||
}
|
||||
},
|
||||
// mccabe
|
||||
"max-complexity" | "max_complexity" => match value.parse::<usize>() {
|
||||
Ok(max_complexity) => mccabe.max_complexity = Some(max_complexity),
|
||||
Err(e) => {
|
||||
warn_user!("Unable to parse '{key}' property: {e}");
|
||||
}
|
||||
},
|
||||
// flake8-errmsg
|
||||
"errmsg-max-string-length" | "errmsg_max_string_length" => {
|
||||
match value.parse::<usize>() {
|
||||
Ok(max_string_length) => {
|
||||
flake8_errmsg.max_string_length = Some(max_string_length);
|
||||
}
|
||||
Err(e) => {
|
||||
warn_user!("Unable to parse '{key}' property: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
// flake8-pytest-style
|
||||
"pytest-fixture-no-parentheses" | "pytest_fixture_no_parentheses " => {
|
||||
match parser::parse_bool(value.as_ref()) {
|
||||
Ok(bool) => flake8_pytest_style.fixture_parentheses = Some(!bool),
|
||||
Err(e) => {
|
||||
warn_user!("Unable to parse '{key}' property: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
"pytest-parametrize-names-type" | "pytest_parametrize_names_type" => {
|
||||
match value.trim() {
|
||||
"csv" => {
|
||||
flake8_pytest_style.parametrize_names_type =
|
||||
Some(ParametrizeNameType::Csv);
|
||||
}
|
||||
"tuple" => {
|
||||
flake8_pytest_style.parametrize_names_type =
|
||||
Some(ParametrizeNameType::Tuple);
|
||||
}
|
||||
"list" => {
|
||||
flake8_pytest_style.parametrize_names_type =
|
||||
Some(ParametrizeNameType::List);
|
||||
}
|
||||
_ => {
|
||||
warn_user!("Unexpected '{key}' value: {value}");
|
||||
}
|
||||
}
|
||||
}
|
||||
"pytest-parametrize-values-type" | "pytest_parametrize_values_type" => {
|
||||
match value.trim() {
|
||||
"tuple" => {
|
||||
flake8_pytest_style.parametrize_values_type =
|
||||
Some(ParametrizeValuesType::Tuple);
|
||||
}
|
||||
"list" => {
|
||||
flake8_pytest_style.parametrize_values_type =
|
||||
Some(ParametrizeValuesType::List);
|
||||
}
|
||||
_ => {
|
||||
warn_user!("Unexpected '{key}' value: {value}");
|
||||
}
|
||||
}
|
||||
}
|
||||
"pytest-parametrize-values-row-type" | "pytest_parametrize_values_row_type" => {
|
||||
match value.trim() {
|
||||
"tuple" => {
|
||||
flake8_pytest_style.parametrize_values_row_type =
|
||||
Some(ParametrizeValuesRowType::Tuple);
|
||||
}
|
||||
"list" => {
|
||||
flake8_pytest_style.parametrize_values_row_type =
|
||||
Some(ParametrizeValuesRowType::List);
|
||||
}
|
||||
_ => {
|
||||
warn_user!("Unexpected '{key}' value: {value}");
|
||||
}
|
||||
}
|
||||
}
|
||||
"pytest-raises-require-match-for" | "pytest_raises_require_match_for" => {
|
||||
flake8_pytest_style.raises_require_match_for =
|
||||
Some(parser::parse_strings(value.as_ref()));
|
||||
}
|
||||
"pytest-mark-no-parentheses" | "pytest_mark_no_parentheses" => {
|
||||
match parser::parse_bool(value.as_ref()) {
|
||||
Ok(bool) => flake8_pytest_style.mark_parentheses = Some(!bool),
|
||||
Err(e) => {
|
||||
warn_user!("Unable to parse '{key}' property: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
// Unknown
|
||||
_ => {
|
||||
warn_user!("Skipping unsupported property: {}", key);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Deduplicate and sort.
|
||||
lint_options.select = Some(
|
||||
select
|
||||
.into_iter()
|
||||
.sorted_by_key(RuleSelector::prefix_and_code)
|
||||
.collect(),
|
||||
);
|
||||
lint_options.ignore = Some(
|
||||
ignore
|
||||
.into_iter()
|
||||
.sorted_by_key(RuleSelector::prefix_and_code)
|
||||
.collect(),
|
||||
);
|
||||
if flake8_annotations != Flake8AnnotationsOptions::default() {
|
||||
lint_options.flake8_annotations = Some(flake8_annotations);
|
||||
}
|
||||
if flake8_bugbear != Flake8BugbearOptions::default() {
|
||||
lint_options.flake8_bugbear = Some(flake8_bugbear);
|
||||
}
|
||||
if flake8_builtins != Flake8BuiltinsOptions::default() {
|
||||
lint_options.flake8_builtins = Some(flake8_builtins);
|
||||
}
|
||||
if flake8_errmsg != Flake8ErrMsgOptions::default() {
|
||||
lint_options.flake8_errmsg = Some(flake8_errmsg);
|
||||
}
|
||||
if flake8_pytest_style != Flake8PytestStyleOptions::default() {
|
||||
lint_options.flake8_pytest_style = Some(flake8_pytest_style);
|
||||
}
|
||||
if flake8_quotes != Flake8QuotesOptions::default() {
|
||||
lint_options.flake8_quotes = Some(flake8_quotes);
|
||||
}
|
||||
if flake8_tidy_imports != Flake8TidyImportsOptions::default() {
|
||||
lint_options.flake8_tidy_imports = Some(flake8_tidy_imports);
|
||||
}
|
||||
if mccabe != McCabeOptions::default() {
|
||||
lint_options.mccabe = Some(mccabe);
|
||||
}
|
||||
if pep8_naming != Pep8NamingOptions::default() {
|
||||
lint_options.pep8_naming = Some(pep8_naming);
|
||||
}
|
||||
if pydocstyle != PydocstyleOptions::default() {
|
||||
lint_options.pydocstyle = Some(pydocstyle);
|
||||
}
|
||||
|
||||
// Extract any settings from the existing `pyproject.toml`.
|
||||
if let Some(black) = &external_config.black {
|
||||
if let Some(line_length) = &black.line_length {
|
||||
options.line_length = Some(*line_length);
|
||||
}
|
||||
|
||||
if let Some(target_version) = &black.target_version {
|
||||
if let Some(target_version) = target_version.iter().min() {
|
||||
options.target_version = Some(*target_version);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(isort) = &external_config.isort {
|
||||
if let Some(src_paths) = &isort.src_paths {
|
||||
match options.src.as_mut() {
|
||||
Some(src) => {
|
||||
src.extend_from_slice(src_paths);
|
||||
}
|
||||
None => {
|
||||
options.src = Some(src_paths.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(project) = &external_config.project {
|
||||
if let Some(requires_python) = &project.requires_python {
|
||||
if options.target_version.is_none() {
|
||||
options.target_version =
|
||||
PythonVersion::get_minimum_supported_version(requires_python);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if lint_options != LintCommonOptions::default() {
|
||||
options.lint = Some(LintOptions {
|
||||
common: lint_options,
|
||||
..LintOptions::default()
|
||||
});
|
||||
}
|
||||
|
||||
// Create the pyproject.toml.
|
||||
Pyproject::new(options)
|
||||
}
|
||||
|
||||
/// Resolve the set of enabled `RuleSelector` values for the given
|
||||
/// plugins.
|
||||
fn resolve_select(plugins: &[Plugin]) -> HashSet<RuleSelector> {
|
||||
let mut select: HashSet<_> = DEFAULT_SELECTORS.iter().cloned().collect();
|
||||
select.extend(plugins.iter().map(|p| Linter::from(p).into()));
|
||||
select
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::collections::HashMap;
|
||||
use std::str::FromStr;
|
||||
|
||||
use anyhow::Result;
|
||||
use itertools::Itertools;
|
||||
use pep440_rs::VersionSpecifiers;
|
||||
|
||||
use pretty_assertions::assert_eq;
|
||||
use ruff_linter::line_width::LineLength;
|
||||
use ruff_linter::registry::Linter;
|
||||
use ruff_linter::rule_selector::RuleSelector;
|
||||
use ruff_linter::rules::flake8_quotes;
|
||||
use ruff_linter::rules::pydocstyle::settings::Convention;
|
||||
use ruff_linter::settings::types::PythonVersion;
|
||||
use ruff_workspace::options::{
|
||||
Flake8QuotesOptions, LintCommonOptions, LintOptions, Options, PydocstyleOptions,
|
||||
};
|
||||
use ruff_workspace::pyproject::Pyproject;
|
||||
|
||||
use crate::converter::DEFAULT_SELECTORS;
|
||||
use crate::pep621::Project;
|
||||
use crate::ExternalConfig;
|
||||
|
||||
use super::super::plugin::Plugin;
|
||||
use super::convert;
|
||||
|
||||
fn lint_default_options(plugins: impl IntoIterator<Item = RuleSelector>) -> LintCommonOptions {
|
||||
LintCommonOptions {
|
||||
ignore: Some(vec![]),
|
||||
select: Some(
|
||||
DEFAULT_SELECTORS
|
||||
.iter()
|
||||
.cloned()
|
||||
.chain(plugins)
|
||||
.sorted_by_key(RuleSelector::prefix_and_code)
|
||||
.collect(),
|
||||
),
|
||||
..LintCommonOptions::default()
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_converts_empty() {
|
||||
let actual = convert(
|
||||
&HashMap::from([("flake8".to_string(), HashMap::default())]),
|
||||
&ExternalConfig::default(),
|
||||
None,
|
||||
);
|
||||
let expected = Pyproject::new(Options {
|
||||
lint: Some(LintOptions {
|
||||
common: lint_default_options([]),
|
||||
..LintOptions::default()
|
||||
}),
|
||||
..Options::default()
|
||||
});
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_converts_dashes() {
|
||||
let actual = convert(
|
||||
&HashMap::from([(
|
||||
"flake8".to_string(),
|
||||
HashMap::from([("max-line-length".to_string(), Some("100".to_string()))]),
|
||||
)]),
|
||||
&ExternalConfig::default(),
|
||||
Some(vec![]),
|
||||
);
|
||||
let expected = Pyproject::new(Options {
|
||||
line_length: Some(LineLength::try_from(100).unwrap()),
|
||||
lint: Some(LintOptions {
|
||||
common: lint_default_options([]),
|
||||
..LintOptions::default()
|
||||
}),
|
||||
..Options::default()
|
||||
});
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_converts_underscores() {
|
||||
let actual = convert(
|
||||
&HashMap::from([(
|
||||
"flake8".to_string(),
|
||||
HashMap::from([("max_line_length".to_string(), Some("100".to_string()))]),
|
||||
)]),
|
||||
&ExternalConfig::default(),
|
||||
Some(vec![]),
|
||||
);
|
||||
let expected = Pyproject::new(Options {
|
||||
line_length: Some(LineLength::try_from(100).unwrap()),
|
||||
lint: Some(LintOptions {
|
||||
common: lint_default_options([]),
|
||||
..LintOptions::default()
|
||||
}),
|
||||
..Options::default()
|
||||
});
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_ignores_parse_errors() {
|
||||
let actual = convert(
|
||||
&HashMap::from([(
|
||||
"flake8".to_string(),
|
||||
HashMap::from([("max_line_length".to_string(), Some("abc".to_string()))]),
|
||||
)]),
|
||||
&ExternalConfig::default(),
|
||||
Some(vec![]),
|
||||
);
|
||||
let expected = Pyproject::new(Options {
|
||||
lint: Some(LintOptions {
|
||||
common: lint_default_options([]),
|
||||
..LintOptions::default()
|
||||
}),
|
||||
..Options::default()
|
||||
});
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_converts_plugin_options() {
|
||||
let actual = convert(
|
||||
&HashMap::from([(
|
||||
"flake8".to_string(),
|
||||
HashMap::from([("inline-quotes".to_string(), Some("single".to_string()))]),
|
||||
)]),
|
||||
&ExternalConfig::default(),
|
||||
Some(vec![]),
|
||||
);
|
||||
let expected = Pyproject::new(Options {
|
||||
lint: Some(LintOptions {
|
||||
common: LintCommonOptions {
|
||||
flake8_quotes: Some(Flake8QuotesOptions {
|
||||
inline_quotes: Some(flake8_quotes::settings::Quote::Single),
|
||||
multiline_quotes: None,
|
||||
docstring_quotes: None,
|
||||
avoid_escape: None,
|
||||
}),
|
||||
..lint_default_options([])
|
||||
},
|
||||
..LintOptions::default()
|
||||
}),
|
||||
..Options::default()
|
||||
});
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_converts_docstring_conventions() {
|
||||
let actual = convert(
|
||||
&HashMap::from([(
|
||||
"flake8".to_string(),
|
||||
HashMap::from([(
|
||||
"docstring-convention".to_string(),
|
||||
Some("numpy".to_string()),
|
||||
)]),
|
||||
)]),
|
||||
&ExternalConfig::default(),
|
||||
Some(vec![Plugin::Flake8Docstrings]),
|
||||
);
|
||||
let expected = Pyproject::new(Options {
|
||||
lint: Some(LintOptions {
|
||||
common: LintCommonOptions {
|
||||
pydocstyle: Some(PydocstyleOptions {
|
||||
convention: Some(Convention::Numpy),
|
||||
ignore_decorators: None,
|
||||
property_decorators: None,
|
||||
}),
|
||||
..lint_default_options([Linter::Pydocstyle.into()])
|
||||
},
|
||||
..LintOptions::default()
|
||||
}),
|
||||
..Options::default()
|
||||
});
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_infers_plugins_if_omitted() {
|
||||
let actual = convert(
|
||||
&HashMap::from([(
|
||||
"flake8".to_string(),
|
||||
HashMap::from([("inline-quotes".to_string(), Some("single".to_string()))]),
|
||||
)]),
|
||||
&ExternalConfig::default(),
|
||||
None,
|
||||
);
|
||||
let expected = Pyproject::new(Options {
|
||||
lint: Some(LintOptions {
|
||||
common: LintCommonOptions {
|
||||
flake8_quotes: Some(Flake8QuotesOptions {
|
||||
inline_quotes: Some(flake8_quotes::settings::Quote::Single),
|
||||
multiline_quotes: None,
|
||||
docstring_quotes: None,
|
||||
avoid_escape: None,
|
||||
}),
|
||||
..lint_default_options([Linter::Flake8Quotes.into()])
|
||||
},
|
||||
..LintOptions::default()
|
||||
}),
|
||||
..Options::default()
|
||||
});
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_converts_project_requires_python() -> Result<()> {
|
||||
let actual = convert(
|
||||
&HashMap::from([("flake8".to_string(), HashMap::default())]),
|
||||
&ExternalConfig {
|
||||
project: Some(&Project {
|
||||
requires_python: Some(VersionSpecifiers::from_str(">=3.8.16, <3.11")?),
|
||||
}),
|
||||
..ExternalConfig::default()
|
||||
},
|
||||
Some(vec![]),
|
||||
);
|
||||
let expected = Pyproject::new(Options {
|
||||
target_version: Some(PythonVersion::Py38),
|
||||
lint: Some(LintOptions {
|
||||
common: lint_default_options([]),
|
||||
..LintOptions::default()
|
||||
}),
|
||||
..Options::default()
|
||||
});
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
10
crates/flake8_to_ruff/src/external_config.rs
Normal file
10
crates/flake8_to_ruff/src/external_config.rs
Normal file
@@ -0,0 +1,10 @@
|
||||
use super::black::Black;
|
||||
use super::isort::Isort;
|
||||
use super::pep621::Project;
|
||||
|
||||
#[derive(Default)]
|
||||
pub(crate) struct ExternalConfig<'a> {
|
||||
pub(crate) black: Option<&'a Black>,
|
||||
pub(crate) isort: Option<&'a Isort>,
|
||||
pub(crate) project: Option<&'a Project>,
|
||||
}
|
||||
10
crates/flake8_to_ruff/src/isort.rs
Normal file
10
crates/flake8_to_ruff/src/isort.rs
Normal file
@@ -0,0 +1,10 @@
|
||||
//! Extract isort configuration settings from a pyproject.toml.
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// The [isort configuration](https://pycqa.github.io/isort/docs/configuration/config_files.html).
|
||||
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
|
||||
pub(crate) struct Isort {
|
||||
#[serde(alias = "src-paths", alias = "src_paths")]
|
||||
pub(crate) src_paths: Option<Vec<String>>,
|
||||
}
|
||||
80
crates/flake8_to_ruff/src/main.rs
Normal file
80
crates/flake8_to_ruff/src/main.rs
Normal file
@@ -0,0 +1,80 @@
|
||||
//! Utility to generate Ruff's `pyproject.toml` section from a Flake8 INI file.
|
||||
|
||||
mod black;
|
||||
mod converter;
|
||||
mod external_config;
|
||||
mod isort;
|
||||
mod parser;
|
||||
mod pep621;
|
||||
mod plugin;
|
||||
mod pyproject;
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
use anyhow::Result;
|
||||
use clap::Parser;
|
||||
use configparser::ini::Ini;
|
||||
|
||||
use crate::converter::convert;
|
||||
use crate::external_config::ExternalConfig;
|
||||
use crate::plugin::Plugin;
|
||||
use crate::pyproject::parse;
|
||||
use ruff_linter::logging::{set_up_logging, LogLevel};
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(
|
||||
about = "Convert existing Flake8 configuration to Ruff.",
|
||||
long_about = None
|
||||
)]
|
||||
struct Args {
|
||||
/// Path to the Flake8 configuration file (e.g., `setup.cfg`, `tox.ini`, or
|
||||
/// `.flake8`).
|
||||
#[arg(required = true)]
|
||||
file: PathBuf,
|
||||
/// Optional path to a `pyproject.toml` file, used to ensure compatibility
|
||||
/// with Black.
|
||||
#[arg(long)]
|
||||
pyproject: Option<PathBuf>,
|
||||
/// List of plugins to enable.
|
||||
#[arg(long, value_delimiter = ',')]
|
||||
plugin: Option<Vec<Plugin>>,
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
set_up_logging(&LogLevel::Default)?;
|
||||
|
||||
let args = Args::parse();
|
||||
|
||||
// Read the INI file.
|
||||
let mut ini = Ini::new_cs();
|
||||
ini.set_multiline(true);
|
||||
let config = ini.load(args.file).map_err(|msg| anyhow::anyhow!(msg))?;
|
||||
|
||||
// Read the pyproject.toml file.
|
||||
let pyproject = args.pyproject.map(parse).transpose()?;
|
||||
let external_config = pyproject
|
||||
.as_ref()
|
||||
.and_then(|pyproject| pyproject.tool.as_ref())
|
||||
.map(|tool| ExternalConfig {
|
||||
black: tool.black.as_ref(),
|
||||
isort: tool.isort.as_ref(),
|
||||
..Default::default()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
let external_config = ExternalConfig {
|
||||
project: pyproject
|
||||
.as_ref()
|
||||
.and_then(|pyproject| pyproject.project.as_ref()),
|
||||
..external_config
|
||||
};
|
||||
|
||||
// Create Ruff's pyproject.toml section.
|
||||
let pyproject = convert(&config, &external_config, args.plugin);
|
||||
|
||||
#[allow(clippy::print_stdout)]
|
||||
{
|
||||
println!("{}", toml::to_string_pretty(&pyproject)?);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
391
crates/flake8_to_ruff/src/parser.rs
Normal file
391
crates/flake8_to_ruff/src/parser.rs
Normal file
@@ -0,0 +1,391 @@
|
||||
use std::str::FromStr;
|
||||
|
||||
use anyhow::{bail, Result};
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_linter::settings::types::PatternPrefixPair;
|
||||
use ruff_linter::{warn_user, RuleSelector};
|
||||
|
||||
static COMMA_SEPARATED_LIST_RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"[,\s]").unwrap());
|
||||
|
||||
/// Parse a comma-separated list of `RuleSelector` values (e.g.,
|
||||
/// "F401,E501").
|
||||
pub(crate) fn parse_prefix_codes(value: &str) -> Vec<RuleSelector> {
|
||||
let mut codes: Vec<RuleSelector> = vec![];
|
||||
for code in COMMA_SEPARATED_LIST_RE.split(value) {
|
||||
let code = code.trim();
|
||||
if code.is_empty() {
|
||||
continue;
|
||||
}
|
||||
if let Ok(code) = RuleSelector::from_str(code) {
|
||||
codes.push(code);
|
||||
} else {
|
||||
warn_user!("Unsupported prefix code: {code}");
|
||||
}
|
||||
}
|
||||
codes
|
||||
}
|
||||
|
||||
/// Parse a comma-separated list of strings (e.g., "__init__.py,__main__.py").
|
||||
pub(crate) fn parse_strings(value: &str) -> Vec<String> {
|
||||
COMMA_SEPARATED_LIST_RE
|
||||
.split(value)
|
||||
.map(str::trim)
|
||||
.filter(|part| !part.is_empty())
|
||||
.map(String::from)
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Parse a boolean.
|
||||
pub(crate) fn parse_bool(value: &str) -> Result<bool> {
|
||||
match value.trim() {
|
||||
"true" => Ok(true),
|
||||
"false" => Ok(false),
|
||||
_ => bail!("Unexpected boolean value: {value}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Token {
|
||||
token_name: TokenType,
|
||||
src: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
enum TokenType {
|
||||
Code,
|
||||
File,
|
||||
Colon,
|
||||
Comma,
|
||||
Ws,
|
||||
Eof,
|
||||
}
|
||||
|
||||
struct State {
|
||||
seen_sep: bool,
|
||||
seen_colon: bool,
|
||||
filenames: Vec<String>,
|
||||
codes: Vec<String>,
|
||||
}
|
||||
|
||||
impl State {
|
||||
const fn new() -> Self {
|
||||
Self {
|
||||
seen_sep: true,
|
||||
seen_colon: false,
|
||||
filenames: vec![],
|
||||
codes: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate the list of `StrRuleCodePair` pairs for the current
|
||||
/// state.
|
||||
fn parse(&self) -> Vec<PatternPrefixPair> {
|
||||
let mut codes: Vec<PatternPrefixPair> = vec![];
|
||||
for code in &self.codes {
|
||||
if let Ok(code) = RuleSelector::from_str(code) {
|
||||
for filename in &self.filenames {
|
||||
codes.push(PatternPrefixPair {
|
||||
pattern: filename.clone(),
|
||||
prefix: code.clone(),
|
||||
});
|
||||
}
|
||||
} else {
|
||||
warn_user!("Unsupported prefix code: {code}");
|
||||
}
|
||||
}
|
||||
codes
|
||||
}
|
||||
}
|
||||
|
||||
/// Tokenize the raw 'files-to-codes' mapping.
|
||||
fn tokenize_files_to_codes_mapping(value: &str) -> Vec<Token> {
|
||||
let mut tokens = vec![];
|
||||
let mut i = 0;
|
||||
while i < value.len() {
|
||||
for (token_re, token_name) in [
|
||||
(
|
||||
Regex::new(r"([A-Z]+[0-9]*)(?:$|\s|,)").unwrap(),
|
||||
TokenType::Code,
|
||||
),
|
||||
(Regex::new(r"([^\s:,]+)").unwrap(), TokenType::File),
|
||||
(Regex::new(r"(\s*:\s*)").unwrap(), TokenType::Colon),
|
||||
(Regex::new(r"(\s*,\s*)").unwrap(), TokenType::Comma),
|
||||
(Regex::new(r"(\s+)").unwrap(), TokenType::Ws),
|
||||
] {
|
||||
if let Some(cap) = token_re.captures(&value[i..]) {
|
||||
let mat = cap.get(1).unwrap();
|
||||
if mat.start() == 0 {
|
||||
tokens.push(Token {
|
||||
token_name,
|
||||
src: mat.as_str().trim().to_string(),
|
||||
});
|
||||
i += mat.end();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
tokens.push(Token {
|
||||
token_name: TokenType::Eof,
|
||||
src: String::new(),
|
||||
});
|
||||
tokens
|
||||
}
|
||||
|
||||
/// Parse a 'files-to-codes' mapping, mimicking Flake8's internal logic.
|
||||
/// See: <https://github.com/PyCQA/flake8/blob/7dfe99616fc2f07c0017df2ba5fa884158f3ea8a/src/flake8/utils.py#L45>
|
||||
pub(crate) fn parse_files_to_codes_mapping(value: &str) -> Result<Vec<PatternPrefixPair>> {
|
||||
if value.trim().is_empty() {
|
||||
return Ok(vec![]);
|
||||
}
|
||||
let mut codes: Vec<PatternPrefixPair> = vec![];
|
||||
let mut state = State::new();
|
||||
for token in tokenize_files_to_codes_mapping(value) {
|
||||
if matches!(token.token_name, TokenType::Comma | TokenType::Ws) {
|
||||
state.seen_sep = true;
|
||||
} else if !state.seen_colon {
|
||||
if matches!(token.token_name, TokenType::Colon) {
|
||||
state.seen_colon = true;
|
||||
state.seen_sep = true;
|
||||
} else if state.seen_sep && matches!(token.token_name, TokenType::File) {
|
||||
state.filenames.push(token.src);
|
||||
state.seen_sep = false;
|
||||
} else {
|
||||
bail!("Unexpected token: {:?}", token.token_name);
|
||||
}
|
||||
} else {
|
||||
if matches!(token.token_name, TokenType::Eof) {
|
||||
codes.extend(state.parse());
|
||||
state = State::new();
|
||||
} else if state.seen_sep && matches!(token.token_name, TokenType::Code) {
|
||||
state.codes.push(token.src);
|
||||
state.seen_sep = false;
|
||||
} else if state.seen_sep && matches!(token.token_name, TokenType::File) {
|
||||
codes.extend(state.parse());
|
||||
state = State::new();
|
||||
state.filenames.push(token.src);
|
||||
state.seen_sep = false;
|
||||
} else {
|
||||
bail!("Unexpected token: {:?}", token.token_name);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(codes)
|
||||
}
|
||||
|
||||
/// Collect a list of `PatternPrefixPair` structs as a `BTreeMap`.
|
||||
pub(crate) fn collect_per_file_ignores(
|
||||
pairs: Vec<PatternPrefixPair>,
|
||||
) -> FxHashMap<String, Vec<RuleSelector>> {
|
||||
let mut per_file_ignores: FxHashMap<String, Vec<RuleSelector>> = FxHashMap::default();
|
||||
for pair in pairs {
|
||||
per_file_ignores
|
||||
.entry(pair.pattern)
|
||||
.or_default()
|
||||
.push(pair.prefix);
|
||||
}
|
||||
per_file_ignores
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use anyhow::Result;
|
||||
|
||||
use ruff_linter::codes;
|
||||
use ruff_linter::registry::Linter;
|
||||
use ruff_linter::settings::types::PatternPrefixPair;
|
||||
use ruff_linter::RuleSelector;
|
||||
|
||||
use super::{parse_files_to_codes_mapping, parse_prefix_codes, parse_strings};
|
||||
|
||||
#[test]
|
||||
fn it_parses_prefix_codes() {
|
||||
let actual = parse_prefix_codes("");
|
||||
let expected: Vec<RuleSelector> = vec![];
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
let actual = parse_prefix_codes(" ");
|
||||
let expected: Vec<RuleSelector> = vec![];
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
let actual = parse_prefix_codes("F401");
|
||||
let expected = vec![codes::Pyflakes::_401.into()];
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
let actual = parse_prefix_codes("F401,");
|
||||
let expected = vec![codes::Pyflakes::_401.into()];
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
let actual = parse_prefix_codes("F401,E501");
|
||||
let expected = vec![
|
||||
codes::Pyflakes::_401.into(),
|
||||
codes::Pycodestyle::E501.into(),
|
||||
];
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
let actual = parse_prefix_codes("F401, E501");
|
||||
let expected = vec![
|
||||
codes::Pyflakes::_401.into(),
|
||||
codes::Pycodestyle::E501.into(),
|
||||
];
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_parses_strings() {
|
||||
let actual = parse_strings("");
|
||||
let expected: Vec<String> = vec![];
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
let actual = parse_strings(" ");
|
||||
let expected: Vec<String> = vec![];
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
let actual = parse_strings("__init__.py");
|
||||
let expected = vec!["__init__.py".to_string()];
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
let actual = parse_strings("__init__.py,");
|
||||
let expected = vec!["__init__.py".to_string()];
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
let actual = parse_strings("__init__.py,__main__.py");
|
||||
let expected = vec!["__init__.py".to_string(), "__main__.py".to_string()];
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
let actual = parse_strings("__init__.py, __main__.py");
|
||||
let expected = vec!["__init__.py".to_string(), "__main__.py".to_string()];
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_parse_files_to_codes_mapping() -> Result<()> {
|
||||
let actual = parse_files_to_codes_mapping("")?;
|
||||
let expected: Vec<PatternPrefixPair> = vec![];
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
let actual = parse_files_to_codes_mapping(" ")?;
|
||||
let expected: Vec<PatternPrefixPair> = vec![];
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
// Ex) locust
|
||||
let actual = parse_files_to_codes_mapping(
|
||||
"per-file-ignores =
|
||||
locust/test/*: F841
|
||||
examples/*: F841
|
||||
*.pyi: E302,E704"
|
||||
.strip_prefix("per-file-ignores =")
|
||||
.unwrap(),
|
||||
)?;
|
||||
let expected: Vec<PatternPrefixPair> = vec![
|
||||
PatternPrefixPair {
|
||||
pattern: "locust/test/*".to_string(),
|
||||
prefix: codes::Pyflakes::_841.into(),
|
||||
},
|
||||
PatternPrefixPair {
|
||||
pattern: "examples/*".to_string(),
|
||||
prefix: codes::Pyflakes::_841.into(),
|
||||
},
|
||||
];
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
// Ex) celery
|
||||
let actual = parse_files_to_codes_mapping(
|
||||
"per-file-ignores =
|
||||
t/*,setup.py,examples/*,docs/*,extra/*:
|
||||
D,"
|
||||
.strip_prefix("per-file-ignores =")
|
||||
.unwrap(),
|
||||
)?;
|
||||
let expected: Vec<PatternPrefixPair> = vec![
|
||||
PatternPrefixPair {
|
||||
pattern: "t/*".to_string(),
|
||||
prefix: Linter::Pydocstyle.into(),
|
||||
},
|
||||
PatternPrefixPair {
|
||||
pattern: "setup.py".to_string(),
|
||||
prefix: Linter::Pydocstyle.into(),
|
||||
},
|
||||
PatternPrefixPair {
|
||||
pattern: "examples/*".to_string(),
|
||||
prefix: Linter::Pydocstyle.into(),
|
||||
},
|
||||
PatternPrefixPair {
|
||||
pattern: "docs/*".to_string(),
|
||||
prefix: Linter::Pydocstyle.into(),
|
||||
},
|
||||
PatternPrefixPair {
|
||||
pattern: "extra/*".to_string(),
|
||||
prefix: Linter::Pydocstyle.into(),
|
||||
},
|
||||
];
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
// Ex) scrapy
|
||||
let actual = parse_files_to_codes_mapping(
|
||||
"per-file-ignores =
|
||||
scrapy/__init__.py:E402
|
||||
scrapy/core/downloader/handlers/http.py:F401
|
||||
scrapy/http/__init__.py:F401
|
||||
scrapy/linkextractors/__init__.py:E402,F401
|
||||
scrapy/selector/__init__.py:F401
|
||||
scrapy/spiders/__init__.py:E402,F401
|
||||
scrapy/utils/url.py:F403,F405
|
||||
tests/test_loader.py:E741"
|
||||
.strip_prefix("per-file-ignores =")
|
||||
.unwrap(),
|
||||
)?;
|
||||
let expected: Vec<PatternPrefixPair> = vec![
|
||||
PatternPrefixPair {
|
||||
pattern: "scrapy/__init__.py".to_string(),
|
||||
prefix: codes::Pycodestyle::E402.into(),
|
||||
},
|
||||
PatternPrefixPair {
|
||||
pattern: "scrapy/core/downloader/handlers/http.py".to_string(),
|
||||
prefix: codes::Pyflakes::_401.into(),
|
||||
},
|
||||
PatternPrefixPair {
|
||||
pattern: "scrapy/http/__init__.py".to_string(),
|
||||
prefix: codes::Pyflakes::_401.into(),
|
||||
},
|
||||
PatternPrefixPair {
|
||||
pattern: "scrapy/linkextractors/__init__.py".to_string(),
|
||||
prefix: codes::Pycodestyle::E402.into(),
|
||||
},
|
||||
PatternPrefixPair {
|
||||
pattern: "scrapy/linkextractors/__init__.py".to_string(),
|
||||
prefix: codes::Pyflakes::_401.into(),
|
||||
},
|
||||
PatternPrefixPair {
|
||||
pattern: "scrapy/selector/__init__.py".to_string(),
|
||||
prefix: codes::Pyflakes::_401.into(),
|
||||
},
|
||||
PatternPrefixPair {
|
||||
pattern: "scrapy/spiders/__init__.py".to_string(),
|
||||
prefix: codes::Pycodestyle::E402.into(),
|
||||
},
|
||||
PatternPrefixPair {
|
||||
pattern: "scrapy/spiders/__init__.py".to_string(),
|
||||
prefix: codes::Pyflakes::_401.into(),
|
||||
},
|
||||
PatternPrefixPair {
|
||||
pattern: "scrapy/utils/url.py".to_string(),
|
||||
prefix: codes::Pyflakes::_403.into(),
|
||||
},
|
||||
PatternPrefixPair {
|
||||
pattern: "scrapy/utils/url.py".to_string(),
|
||||
prefix: codes::Pyflakes::_405.into(),
|
||||
},
|
||||
PatternPrefixPair {
|
||||
pattern: "tests/test_loader.py".to_string(),
|
||||
prefix: codes::Pycodestyle::E741.into(),
|
||||
},
|
||||
];
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
10
crates/flake8_to_ruff/src/pep621.rs
Normal file
10
crates/flake8_to_ruff/src/pep621.rs
Normal file
@@ -0,0 +1,10 @@
|
||||
//! Extract PEP 621 configuration settings from a pyproject.toml.
|
||||
|
||||
use pep440_rs::VersionSpecifiers;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
|
||||
pub(crate) struct Project {
|
||||
#[serde(alias = "requires-python", alias = "requires_python")]
|
||||
pub(crate) requires_python: Option<VersionSpecifiers>,
|
||||
}
|
||||
368
crates/flake8_to_ruff/src/plugin.rs
Normal file
368
crates/flake8_to_ruff/src/plugin.rs
Normal file
@@ -0,0 +1,368 @@
|
||||
use std::collections::{BTreeSet, HashMap, HashSet};
|
||||
use std::fmt;
|
||||
use std::str::FromStr;
|
||||
|
||||
use anyhow::anyhow;
|
||||
use ruff_linter::registry::Linter;
|
||||
use ruff_linter::rule_selector::PreviewOptions;
|
||||
use ruff_linter::RuleSelector;
|
||||
|
||||
#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq)]
|
||||
pub enum Plugin {
|
||||
Flake82020,
|
||||
Flake8Annotations,
|
||||
Flake8Bandit,
|
||||
Flake8BlindExcept,
|
||||
Flake8BooleanTrap,
|
||||
Flake8Bugbear,
|
||||
Flake8Builtins,
|
||||
Flake8Commas,
|
||||
Flake8Comprehensions,
|
||||
Flake8Datetimez,
|
||||
Flake8Debugger,
|
||||
Flake8Docstrings,
|
||||
Flake8Eradicate,
|
||||
Flake8ErrMsg,
|
||||
Flake8Executable,
|
||||
Flake8ImplicitStrConcat,
|
||||
Flake8ImportConventions,
|
||||
Flake8NoPep420,
|
||||
Flake8Pie,
|
||||
Flake8Print,
|
||||
Flake8PytestStyle,
|
||||
Flake8Quotes,
|
||||
Flake8Return,
|
||||
Flake8Simplify,
|
||||
Flake8TidyImports,
|
||||
Flake8TypeChecking,
|
||||
Flake8UnusedArguments,
|
||||
Flake8UsePathlib,
|
||||
McCabe,
|
||||
PEP8Naming,
|
||||
PandasVet,
|
||||
Pyupgrade,
|
||||
Tryceratops,
|
||||
}
|
||||
|
||||
impl FromStr for Plugin {
|
||||
type Err = anyhow::Error;
|
||||
|
||||
fn from_str(string: &str) -> Result<Self, Self::Err> {
|
||||
match string {
|
||||
"flake8-2020" => Ok(Plugin::Flake82020),
|
||||
"flake8-annotations" => Ok(Plugin::Flake8Annotations),
|
||||
"flake8-bandit" => Ok(Plugin::Flake8Bandit),
|
||||
"flake8-blind-except" => Ok(Plugin::Flake8BlindExcept),
|
||||
"flake8-boolean-trap" => Ok(Plugin::Flake8BooleanTrap),
|
||||
"flake8-bugbear" => Ok(Plugin::Flake8Bugbear),
|
||||
"flake8-builtins" => Ok(Plugin::Flake8Builtins),
|
||||
"flake8-commas" => Ok(Plugin::Flake8Commas),
|
||||
"flake8-comprehensions" => Ok(Plugin::Flake8Comprehensions),
|
||||
"flake8-datetimez" => Ok(Plugin::Flake8Datetimez),
|
||||
"flake8-debugger" => Ok(Plugin::Flake8Debugger),
|
||||
"flake8-docstrings" => Ok(Plugin::Flake8Docstrings),
|
||||
"flake8-eradicate" => Ok(Plugin::Flake8Eradicate),
|
||||
"flake8-errmsg" => Ok(Plugin::Flake8ErrMsg),
|
||||
"flake8-executable" => Ok(Plugin::Flake8Executable),
|
||||
"flake8-implicit-str-concat" => Ok(Plugin::Flake8ImplicitStrConcat),
|
||||
"flake8-import-conventions" => Ok(Plugin::Flake8ImportConventions),
|
||||
"flake8-no-pep420" => Ok(Plugin::Flake8NoPep420),
|
||||
"flake8-pie" => Ok(Plugin::Flake8Pie),
|
||||
"flake8-print" => Ok(Plugin::Flake8Print),
|
||||
"flake8-pytest-style" => Ok(Plugin::Flake8PytestStyle),
|
||||
"flake8-quotes" => Ok(Plugin::Flake8Quotes),
|
||||
"flake8-return" => Ok(Plugin::Flake8Return),
|
||||
"flake8-simplify" => Ok(Plugin::Flake8Simplify),
|
||||
"flake8-tidy-imports" => Ok(Plugin::Flake8TidyImports),
|
||||
"flake8-type-checking" => Ok(Plugin::Flake8TypeChecking),
|
||||
"flake8-unused-arguments" => Ok(Plugin::Flake8UnusedArguments),
|
||||
"flake8-use-pathlib" => Ok(Plugin::Flake8UsePathlib),
|
||||
"mccabe" => Ok(Plugin::McCabe),
|
||||
"pep8-naming" => Ok(Plugin::PEP8Naming),
|
||||
"pandas-vet" => Ok(Plugin::PandasVet),
|
||||
"pyupgrade" => Ok(Plugin::Pyupgrade),
|
||||
"tryceratops" => Ok(Plugin::Tryceratops),
|
||||
_ => Err(anyhow!("Unknown plugin: {string}")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for Plugin {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
match self {
|
||||
Plugin::Flake82020 => "flake8-2020",
|
||||
Plugin::Flake8Annotations => "flake8-annotations",
|
||||
Plugin::Flake8Bandit => "flake8-bandit",
|
||||
Plugin::Flake8BlindExcept => "flake8-blind-except",
|
||||
Plugin::Flake8BooleanTrap => "flake8-boolean-trap",
|
||||
Plugin::Flake8Bugbear => "flake8-bugbear",
|
||||
Plugin::Flake8Builtins => "flake8-builtins",
|
||||
Plugin::Flake8Commas => "flake8-commas",
|
||||
Plugin::Flake8Comprehensions => "flake8-comprehensions",
|
||||
Plugin::Flake8Datetimez => "flake8-datetimez",
|
||||
Plugin::Flake8Debugger => "flake8-debugger",
|
||||
Plugin::Flake8Docstrings => "flake8-docstrings",
|
||||
Plugin::Flake8Eradicate => "flake8-eradicate",
|
||||
Plugin::Flake8ErrMsg => "flake8-errmsg",
|
||||
Plugin::Flake8Executable => "flake8-executable",
|
||||
Plugin::Flake8ImplicitStrConcat => "flake8-implicit-str-concat",
|
||||
Plugin::Flake8ImportConventions => "flake8-import-conventions",
|
||||
Plugin::Flake8NoPep420 => "flake8-no-pep420",
|
||||
Plugin::Flake8Pie => "flake8-pie",
|
||||
Plugin::Flake8Print => "flake8-print",
|
||||
Plugin::Flake8PytestStyle => "flake8-pytest-style",
|
||||
Plugin::Flake8Quotes => "flake8-quotes",
|
||||
Plugin::Flake8Return => "flake8-return",
|
||||
Plugin::Flake8Simplify => "flake8-simplify",
|
||||
Plugin::Flake8TidyImports => "flake8-tidy-imports",
|
||||
Plugin::Flake8TypeChecking => "flake8-type-checking",
|
||||
Plugin::Flake8UnusedArguments => "flake8-unused-arguments",
|
||||
Plugin::Flake8UsePathlib => "flake8-use-pathlib",
|
||||
Plugin::McCabe => "mccabe",
|
||||
Plugin::PEP8Naming => "pep8-naming",
|
||||
Plugin::PandasVet => "pandas-vet",
|
||||
Plugin::Pyupgrade => "pyupgrade",
|
||||
Plugin::Tryceratops => "tryceratops",
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&Plugin> for Linter {
|
||||
fn from(plugin: &Plugin) -> Self {
|
||||
match plugin {
|
||||
Plugin::Flake82020 => Linter::Flake82020,
|
||||
Plugin::Flake8Annotations => Linter::Flake8Annotations,
|
||||
Plugin::Flake8Bandit => Linter::Flake8Bandit,
|
||||
Plugin::Flake8BlindExcept => Linter::Flake8BlindExcept,
|
||||
Plugin::Flake8BooleanTrap => Linter::Flake8BooleanTrap,
|
||||
Plugin::Flake8Bugbear => Linter::Flake8Bugbear,
|
||||
Plugin::Flake8Builtins => Linter::Flake8Builtins,
|
||||
Plugin::Flake8Commas => Linter::Flake8Commas,
|
||||
Plugin::Flake8Comprehensions => Linter::Flake8Comprehensions,
|
||||
Plugin::Flake8Datetimez => Linter::Flake8Datetimez,
|
||||
Plugin::Flake8Debugger => Linter::Flake8Debugger,
|
||||
Plugin::Flake8Docstrings => Linter::Pydocstyle,
|
||||
Plugin::Flake8Eradicate => Linter::Eradicate,
|
||||
Plugin::Flake8ErrMsg => Linter::Flake8ErrMsg,
|
||||
Plugin::Flake8Executable => Linter::Flake8Executable,
|
||||
Plugin::Flake8ImplicitStrConcat => Linter::Flake8ImplicitStrConcat,
|
||||
Plugin::Flake8ImportConventions => Linter::Flake8ImportConventions,
|
||||
Plugin::Flake8NoPep420 => Linter::Flake8NoPep420,
|
||||
Plugin::Flake8Pie => Linter::Flake8Pie,
|
||||
Plugin::Flake8Print => Linter::Flake8Print,
|
||||
Plugin::Flake8PytestStyle => Linter::Flake8PytestStyle,
|
||||
Plugin::Flake8Quotes => Linter::Flake8Quotes,
|
||||
Plugin::Flake8Return => Linter::Flake8Return,
|
||||
Plugin::Flake8Simplify => Linter::Flake8Simplify,
|
||||
Plugin::Flake8TidyImports => Linter::Flake8TidyImports,
|
||||
Plugin::Flake8TypeChecking => Linter::Flake8TypeChecking,
|
||||
Plugin::Flake8UnusedArguments => Linter::Flake8UnusedArguments,
|
||||
Plugin::Flake8UsePathlib => Linter::Flake8UsePathlib,
|
||||
Plugin::McCabe => Linter::McCabe,
|
||||
Plugin::PEP8Naming => Linter::PEP8Naming,
|
||||
Plugin::PandasVet => Linter::PandasVet,
|
||||
Plugin::Pyupgrade => Linter::Pyupgrade,
|
||||
Plugin::Tryceratops => Linter::Tryceratops,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Infer the enabled plugins based on user-provided options.
|
||||
///
|
||||
/// For example, if the user specified a `mypy-init-return` setting, we should
|
||||
/// infer that `flake8-annotations` is active.
|
||||
pub(crate) fn infer_plugins_from_options(flake8: &HashMap<String, Option<String>>) -> Vec<Plugin> {
|
||||
let mut plugins = BTreeSet::new();
|
||||
for key in flake8.keys() {
|
||||
match key.as_str() {
|
||||
// flake8-annotations
|
||||
"suppress-none-returning" | "suppress_none_returning" => {
|
||||
plugins.insert(Plugin::Flake8Annotations);
|
||||
}
|
||||
"suppress-dummy-args" | "suppress_dummy_args" => {
|
||||
plugins.insert(Plugin::Flake8Annotations);
|
||||
}
|
||||
"allow-untyped-defs" | "allow_untyped_defs" => {
|
||||
plugins.insert(Plugin::Flake8Annotations);
|
||||
}
|
||||
"allow-untyped-nested" | "allow_untyped_nested" => {
|
||||
plugins.insert(Plugin::Flake8Annotations);
|
||||
}
|
||||
"mypy-init-return" | "mypy_init_return" => {
|
||||
plugins.insert(Plugin::Flake8Annotations);
|
||||
}
|
||||
"dispatch-decorators" | "dispatch_decorators" => {
|
||||
plugins.insert(Plugin::Flake8Annotations);
|
||||
}
|
||||
"overload-decorators" | "overload_decorators" => {
|
||||
plugins.insert(Plugin::Flake8Annotations);
|
||||
}
|
||||
"allow-star-arg-any" | "allow_star_arg_any" => {
|
||||
plugins.insert(Plugin::Flake8Annotations);
|
||||
}
|
||||
// flake8-bugbear
|
||||
"extend-immutable-calls" | "extend_immutable_calls" => {
|
||||
plugins.insert(Plugin::Flake8Bugbear);
|
||||
}
|
||||
// flake8-builtins
|
||||
"builtins-ignorelist" | "builtins_ignorelist" => {
|
||||
plugins.insert(Plugin::Flake8Builtins);
|
||||
}
|
||||
// flake8-docstrings
|
||||
"docstring-convention" | "docstring_convention" => {
|
||||
plugins.insert(Plugin::Flake8Docstrings);
|
||||
}
|
||||
// flake8-eradicate
|
||||
"eradicate-aggressive" | "eradicate_aggressive" => {
|
||||
plugins.insert(Plugin::Flake8Eradicate);
|
||||
}
|
||||
"eradicate-whitelist" | "eradicate_whitelist" => {
|
||||
plugins.insert(Plugin::Flake8Eradicate);
|
||||
}
|
||||
"eradicate-whitelist-extend" | "eradicate_whitelist_extend" => {
|
||||
plugins.insert(Plugin::Flake8Eradicate);
|
||||
}
|
||||
// flake8-pytest-style
|
||||
"pytest-fixture-no-parentheses" | "pytest_fixture_no_parentheses " => {
|
||||
plugins.insert(Plugin::Flake8PytestStyle);
|
||||
}
|
||||
"pytest-parametrize-names-type" | "pytest_parametrize_names_type" => {
|
||||
plugins.insert(Plugin::Flake8PytestStyle);
|
||||
}
|
||||
"pytest-parametrize-values-type" | "pytest_parametrize_values_type" => {
|
||||
plugins.insert(Plugin::Flake8PytestStyle);
|
||||
}
|
||||
"pytest-parametrize-values-row-type" | "pytest_parametrize_values_row_type" => {
|
||||
plugins.insert(Plugin::Flake8PytestStyle);
|
||||
}
|
||||
"pytest-raises-require-match-for" | "pytest_raises_require_match_for" => {
|
||||
plugins.insert(Plugin::Flake8PytestStyle);
|
||||
}
|
||||
"pytest-mark-no-parentheses" | "pytest_mark_no_parentheses" => {
|
||||
plugins.insert(Plugin::Flake8PytestStyle);
|
||||
}
|
||||
// flake8-quotes
|
||||
"quotes" | "inline-quotes" | "inline_quotes" => {
|
||||
plugins.insert(Plugin::Flake8Quotes);
|
||||
}
|
||||
"multiline-quotes" | "multiline_quotes" => {
|
||||
plugins.insert(Plugin::Flake8Quotes);
|
||||
}
|
||||
"docstring-quotes" | "docstring_quotes" => {
|
||||
plugins.insert(Plugin::Flake8Quotes);
|
||||
}
|
||||
"avoid-escape" | "avoid_escape" => {
|
||||
plugins.insert(Plugin::Flake8Quotes);
|
||||
}
|
||||
// flake8-tidy-imports
|
||||
"ban-relative-imports" | "ban_relative_imports" => {
|
||||
plugins.insert(Plugin::Flake8TidyImports);
|
||||
}
|
||||
"banned-modules" | "banned_modules" => {
|
||||
plugins.insert(Plugin::Flake8TidyImports);
|
||||
}
|
||||
// mccabe
|
||||
"max-complexity" | "max_complexity" => {
|
||||
plugins.insert(Plugin::McCabe);
|
||||
}
|
||||
// pep8-naming
|
||||
"ignore-names" | "ignore_names" => {
|
||||
plugins.insert(Plugin::PEP8Naming);
|
||||
}
|
||||
"classmethod-decorators" | "classmethod_decorators" => {
|
||||
plugins.insert(Plugin::PEP8Naming);
|
||||
}
|
||||
"staticmethod-decorators" | "staticmethod_decorators" => {
|
||||
plugins.insert(Plugin::PEP8Naming);
|
||||
}
|
||||
"max-string-length" | "max_string_length" => {
|
||||
plugins.insert(Plugin::Flake8ErrMsg);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
Vec::from_iter(plugins)
|
||||
}
|
||||
|
||||
/// Infer the enabled plugins based on the referenced prefixes.
|
||||
///
|
||||
/// For example, if the user ignores `ANN101`, we should infer that
|
||||
/// `flake8-annotations` is active.
|
||||
pub(crate) fn infer_plugins_from_codes(selectors: &HashSet<RuleSelector>) -> Vec<Plugin> {
|
||||
// Ignore cases in which we've knowingly changed rule prefixes.
|
||||
[
|
||||
Plugin::Flake82020,
|
||||
Plugin::Flake8Annotations,
|
||||
Plugin::Flake8Bandit,
|
||||
// Plugin::Flake8BlindExcept,
|
||||
Plugin::Flake8BooleanTrap,
|
||||
Plugin::Flake8Bugbear,
|
||||
Plugin::Flake8Builtins,
|
||||
// Plugin::Flake8Commas,
|
||||
Plugin::Flake8Comprehensions,
|
||||
Plugin::Flake8Datetimez,
|
||||
Plugin::Flake8Debugger,
|
||||
Plugin::Flake8Docstrings,
|
||||
// Plugin::Flake8Eradicate,
|
||||
Plugin::Flake8ErrMsg,
|
||||
Plugin::Flake8Executable,
|
||||
Plugin::Flake8ImplicitStrConcat,
|
||||
// Plugin::Flake8ImportConventions,
|
||||
Plugin::Flake8NoPep420,
|
||||
Plugin::Flake8Pie,
|
||||
Plugin::Flake8Print,
|
||||
Plugin::Flake8PytestStyle,
|
||||
Plugin::Flake8Quotes,
|
||||
Plugin::Flake8Return,
|
||||
Plugin::Flake8Simplify,
|
||||
// Plugin::Flake8TidyImports,
|
||||
// Plugin::Flake8TypeChecking,
|
||||
Plugin::Flake8UnusedArguments,
|
||||
// Plugin::Flake8UsePathlib,
|
||||
Plugin::McCabe,
|
||||
Plugin::PEP8Naming,
|
||||
Plugin::PandasVet,
|
||||
Plugin::Tryceratops,
|
||||
]
|
||||
.into_iter()
|
||||
.filter(|plugin| {
|
||||
for selector in selectors {
|
||||
if selector
|
||||
.rules(&PreviewOptions::default())
|
||||
.any(|rule| Linter::from(plugin).rules().any(|r| r == rule))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
false
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::collections::HashMap;
|
||||
|
||||
use super::{infer_plugins_from_options, Plugin};
|
||||
|
||||
#[test]
|
||||
fn it_infers_plugins() {
|
||||
let actual = infer_plugins_from_options(&HashMap::from([(
|
||||
"inline-quotes".to_string(),
|
||||
Some("single".to_string()),
|
||||
)]));
|
||||
let expected = vec![Plugin::Flake8Quotes];
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
let actual = infer_plugins_from_options(&HashMap::from([(
|
||||
"staticmethod-decorators".to_string(),
|
||||
Some("[]".to_string()),
|
||||
)]));
|
||||
let expected = vec![Plugin::PEP8Naming];
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
}
|
||||
26
crates/flake8_to_ruff/src/pyproject.rs
Normal file
26
crates/flake8_to_ruff/src/pyproject.rs
Normal file
@@ -0,0 +1,26 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::black::Black;
|
||||
use super::isort::Isort;
|
||||
use super::pep621::Project;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub(crate) struct Tools {
|
||||
pub(crate) black: Option<Black>,
|
||||
pub(crate) isort: Option<Isort>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub(crate) struct Pyproject {
|
||||
pub(crate) tool: Option<Tools>,
|
||||
pub(crate) project: Option<Project>,
|
||||
}
|
||||
|
||||
pub(crate) fn parse<P: AsRef<Path>>(path: P) -> Result<Pyproject> {
|
||||
let contents = std::fs::read_to_string(path)?;
|
||||
let pyproject = toml::from_str::<Pyproject>(&contents)?;
|
||||
Ok(pyproject)
|
||||
}
|
||||
@@ -1,2 +0,0 @@
|
||||
[tool.ruff]
|
||||
select = []
|
||||
@@ -1,2 +0,0 @@
|
||||
[tool.ruff]
|
||||
include = ["a.py", "subdirectory/c.py"]
|
||||
@@ -1,413 +0,0 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"id": "4f8ce941-1492-4d4e-8ab5-70d733fe891a",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%config ZMQInteractiveShell.ast_node_interactivity=\"last_expr_or_assign\""
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"id": "721ec705-0c65-4bfb-9809-7ed8bc534186",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"1"
|
||||
]
|
||||
},
|
||||
"execution_count": 2,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"# Assignment statement without a semicolon\n",
|
||||
"x = 1"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"id": "de50e495-17e5-41cc-94bd-565757555d7e",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Assignment statement with a semicolon\n",
|
||||
"x = 1;\n",
|
||||
"x = 1;"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"id": "39e31201-23da-44eb-8684-41bba3663991",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"2"
|
||||
]
|
||||
},
|
||||
"execution_count": 4,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"# Augmented assignment without a semicolon\n",
|
||||
"x += 1"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"id": "6b73d3dd-c73a-4697-9e97-e109a6c1fbab",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Augmented assignment without a semicolon\n",
|
||||
"x += 1;\n",
|
||||
"x += 1; # comment\n",
|
||||
"# comment"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"id": "2a3e5b86-aa5b-46ba-b9c6-0386d876f58c",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Multiple assignment without a semicolon\n",
|
||||
"x = y = 1"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 7,
|
||||
"id": "07f89e51-9357-4cfb-8fc5-76fb75e35949",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Multiple assignment with a semicolon\n",
|
||||
"x = y = 1;\n",
|
||||
"x = y = 1;"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 8,
|
||||
"id": "c22b539d-473e-48f8-a236-625e58c47a00",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Tuple unpacking without a semicolon\n",
|
||||
"x, y = 1, 2"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 9,
|
||||
"id": "12c87940-a0d5-403b-a81c-7507eb06dc7e",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Tuple unpacking with a semicolon (irrelevant)\n",
|
||||
"x, y = 1, 2;\n",
|
||||
"x, y = 1, 2; # comment\n",
|
||||
"# comment"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 10,
|
||||
"id": "5a768c76-6bc4-470c-b37e-8cc14bc6caf4",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"1"
|
||||
]
|
||||
},
|
||||
"execution_count": 10,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"# Annotated assignment statement without a semicolon\n",
|
||||
"x: int = 1"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 11,
|
||||
"id": "21bfda82-1a9a-4ba1-9078-74ac480804b5",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Annotated assignment statement without a semicolon\n",
|
||||
"x: int = 1;\n",
|
||||
"x: int = 1; # comment\n",
|
||||
"# comment"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 12,
|
||||
"id": "09929999-ff29-4d10-ad2b-e665af15812d",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"1"
|
||||
]
|
||||
},
|
||||
"execution_count": 12,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"# Assignment expression without a semicolon\n",
|
||||
"(x := 1)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 13,
|
||||
"id": "32a83217-1bad-4f61-855e-ffcdb119c763",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Assignment expression with a semicolon\n",
|
||||
"(x := 1);\n",
|
||||
"(x := 1); # comment\n",
|
||||
"# comment"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 14,
|
||||
"id": "61b81865-277e-4964-b03e-eb78f1f318eb",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"1"
|
||||
]
|
||||
},
|
||||
"execution_count": 14,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"x = 1\n",
|
||||
"# Expression without a semicolon\n",
|
||||
"x"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 15,
|
||||
"id": "974c29be-67e1-4000-95fa-6ca118a63bad",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"x = 1\n",
|
||||
"# Expression with a semicolon\n",
|
||||
"x;"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 16,
|
||||
"id": "cfeb1757-46d6-4f13-969f-a283b6d0304f",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"class Point:\n",
|
||||
" def __init__(self, x, y):\n",
|
||||
" self.x = x\n",
|
||||
" self.y = y\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"p = Point(0, 0);"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 17,
|
||||
"id": "2ee7f1a5-ccfe-4004-bfa4-ef834a58da97",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Assignment statement where the left is an attribute access doesn't\n",
|
||||
"# print the value.\n",
|
||||
"p.x = 1;"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 18,
|
||||
"id": "3e49370a-048b-474d-aa0a-3d1d4a73ad37",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"data = {}\n",
|
||||
"\n",
|
||||
"# Neither does the subscript node\n",
|
||||
"data[\"foo\"] = 1;"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 19,
|
||||
"id": "d594bdd3-eaa9-41ef-8cda-cf01bc273b2d",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"if (x := 1):\n",
|
||||
" # It should be the top level statement\n",
|
||||
" x"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 20,
|
||||
"id": "e532f0cf-80c7-42b7-8226-6002fcf74fb6",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"1"
|
||||
]
|
||||
},
|
||||
"execution_count": 20,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"# Parentheses with comments\n",
|
||||
"(\n",
|
||||
" x := 1 # comment\n",
|
||||
") # comment"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 21,
|
||||
"id": "473c5d62-871b-46ed-8a34-27095243f462",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Parentheses with comments\n",
|
||||
"(\n",
|
||||
" x := 1 # comment\n",
|
||||
"); # comment"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 22,
|
||||
"id": "8c3c2361-f49f-45fe-bbe3-7e27410a8a86",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"'Hello world!'"
|
||||
]
|
||||
},
|
||||
"execution_count": 22,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"\"\"\"Hello world!\"\"\""
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 23,
|
||||
"id": "23dbe9b5-3f68-4890-ab2d-ab0dbfd0712a",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"\"\"\"Hello world!\"\"\"; # comment\n",
|
||||
"# comment"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 24,
|
||||
"id": "3ce33108-d95d-4c70-83d1-0d4fd36a2951",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"'x = 1'"
|
||||
]
|
||||
},
|
||||
"execution_count": 24,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"x = 1\n",
|
||||
"f\"x = {x}\""
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 25,
|
||||
"id": "654a4a67-de43-4684-824a-9451c67db48f",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"x = 1\n",
|
||||
"f\"x = {x}\";\n",
|
||||
"f\"x = {x}\"; # comment\n",
|
||||
"# comment"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python (ruff-playground)",
|
||||
"language": "python",
|
||||
"name": "ruff-playground"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.11.3"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,5 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/src/version.rs
|
||||
expression: version
|
||||
---
|
||||
0.0.0
|
||||
@@ -1,17 +0,0 @@
|
||||
use std::io;
|
||||
use std::io::{Read, Write};
|
||||
|
||||
/// Read a string from `stdin`.
|
||||
pub(crate) fn read_from_stdin() -> Result<String, io::Error> {
|
||||
let mut buffer = String::new();
|
||||
io::stdin().lock().read_to_string(&mut buffer)?;
|
||||
Ok(buffer)
|
||||
}
|
||||
|
||||
/// Read bytes from `stdin` and write them to `stdout`.
|
||||
pub(crate) fn parrot_stdin() -> Result<(), io::Error> {
|
||||
let mut buffer = String::new();
|
||||
io::stdin().lock().read_to_string(&mut buffer)?;
|
||||
io::stdout().write_all(buffer.as_bytes())?;
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,150 +0,0 @@
|
||||
//! A test suite that ensures deprecated command line options have appropriate warnings / behaviors
|
||||
|
||||
use ruff_linter::settings::types::SerializationFormat;
|
||||
use std::process::Command;
|
||||
|
||||
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
|
||||
|
||||
const BIN_NAME: &str = "ruff";
|
||||
|
||||
const STDIN: &str = "l = 1";
|
||||
|
||||
fn ruff_check(show_source: Option<bool>, output_format: Option<String>) -> Command {
|
||||
let mut cmd = Command::new(get_cargo_bin(BIN_NAME));
|
||||
let output_format = output_format.unwrap_or(format!("{}", SerializationFormat::default(false)));
|
||||
cmd.arg("check")
|
||||
.arg("--output-format")
|
||||
.arg(output_format)
|
||||
.arg("--no-cache");
|
||||
match show_source {
|
||||
Some(true) => {
|
||||
cmd.arg("--show-source");
|
||||
}
|
||||
Some(false) => {
|
||||
cmd.arg("--no-show-source");
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
cmd.arg("-");
|
||||
|
||||
cmd
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ensure_show_source_is_deprecated() {
|
||||
assert_cmd_snapshot!(ruff_check(Some(true), None).pass_stdin(STDIN), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:1:1: E741 Ambiguous variable name: `l`
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
warning: The `--show-source` argument is deprecated and has been ignored in favor of `--output-format=concise`.
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ensure_no_show_source_is_deprecated() {
|
||||
assert_cmd_snapshot!(ruff_check(Some(false), None).pass_stdin(STDIN), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:1:1: E741 Ambiguous variable name: `l`
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
warning: The `--no-show-source` argument is deprecated and has been ignored in favor of `--output-format=concise`.
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ensure_output_format_is_deprecated() {
|
||||
assert_cmd_snapshot!(ruff_check(None, Some("text".into())).pass_stdin(STDIN), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:1:1: E741 Ambiguous variable name: `l`
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
warning: `--output-format=text` is deprecated. Use `--output-format=full` or `--output-format=concise` instead. `text` will be treated as `concise`.
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ensure_output_format_overrides_show_source() {
|
||||
assert_cmd_snapshot!(ruff_check(Some(true), Some("concise".into())).pass_stdin(STDIN), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:1:1: E741 Ambiguous variable name: `l`
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
warning: The `--show-source` argument is deprecated and has been ignored in favor of `--output-format=concise`.
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ensure_full_output_format_overrides_no_show_source() {
|
||||
assert_cmd_snapshot!(ruff_check(Some(false), Some("full".into())).pass_stdin(STDIN), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:1:1: E741 Ambiguous variable name: `l`
|
||||
|
|
||||
1 | l = 1
|
||||
| ^ E741
|
||||
|
|
||||
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
warning: The `--no-show-source` argument is deprecated and has been ignored in favor of `--output-format=full`.
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ensure_output_format_uses_concise_over_no_show_source() {
|
||||
assert_cmd_snapshot!(ruff_check(Some(false), Some("concise".into())).pass_stdin(STDIN), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:1:1: E741 Ambiguous variable name: `l`
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
warning: The `--no-show-source` argument is deprecated and has been ignored in favor of `--output-format=concise`.
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ensure_deprecated_output_format_overrides_show_source() {
|
||||
assert_cmd_snapshot!(ruff_check(Some(true), Some("text".into())).pass_stdin(STDIN), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:1:1: E741 Ambiguous variable name: `l`
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
warning: The `--show-source` argument is deprecated and has been ignored in favor of `--output-format=text`.
|
||||
warning: `--output-format=text` is deprecated. Use `--output-format=full` or `--output-format=concise` instead. `text` will be treated as `concise`.
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ensure_deprecated_output_format_overrides_no_show_source() {
|
||||
assert_cmd_snapshot!(ruff_check(Some(false), Some("text".into())).pass_stdin(STDIN), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:1:1: E741 Ambiguous variable name: `l`
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
warning: The `--no-show-source` argument is deprecated and has been ignored in favor of `--output-format=text`.
|
||||
warning: `--output-format=text` is deprecated. Use `--output-format=full` or `--output-format=concise` instead. `text` will be treated as `concise`.
|
||||
"###);
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,935 +0,0 @@
|
||||
//! Tests the interaction of the `lint` configuration section
|
||||
|
||||
#![cfg(not(target_family = "wasm"))]
|
||||
|
||||
use regex::escape;
|
||||
use std::fs;
|
||||
use std::process::Command;
|
||||
use std::str;
|
||||
|
||||
use anyhow::Result;
|
||||
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
|
||||
use tempfile::TempDir;
|
||||
|
||||
const BIN_NAME: &str = "ruff";
|
||||
const STDIN_BASE_OPTIONS: &[&str] = &["check", "--no-cache", "--output-format", "concise"];
|
||||
|
||||
fn tempdir_filter(tempdir: &TempDir) -> String {
|
||||
format!(r"{}\\?/?", escape(tempdir.path().to_str().unwrap()))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn top_level_options() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
extend-select = ["B", "Q"]
|
||||
|
||||
[flake8-quotes]
|
||||
inline-quotes = "single"
|
||||
"#,
|
||||
)?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => vec![(tempdir_filter(&tempdir).as_str(), "[TMP]/")]
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.arg("--config")
|
||||
.arg(&ruff_toml)
|
||||
.args(["--stdin-filename", "test.py"])
|
||||
.arg("-")
|
||||
.pass_stdin(r#"a = "abcba".strip("aba")"#), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
test.py:1:5: Q000 [*] Double quotes found but single quotes preferred
|
||||
test.py:1:5: B005 Using `.strip()` with multi-character strings is misleading
|
||||
test.py:1:19: Q000 [*] Double quotes found but single quotes preferred
|
||||
Found 3 errors.
|
||||
[*] 2 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
warning: The top-level linter settings are deprecated in favour of their counterparts in the `lint` section. Please update the following options in `[TMP]/ruff.toml`:
|
||||
- 'extend-select' -> 'lint.extend-select'
|
||||
- 'flake8-quotes' -> 'lint.flake8-quotes'
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn lint_options() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
[lint]
|
||||
extend-select = ["B", "Q"]
|
||||
|
||||
[lint.flake8-quotes]
|
||||
inline-quotes = "single"
|
||||
"#,
|
||||
)?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => vec![(tempdir_filter(&tempdir).as_str(), "[TMP]/")]
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.arg("--config")
|
||||
.arg(&ruff_toml)
|
||||
.arg("-")
|
||||
.pass_stdin(r#"a = "abcba".strip("aba")"#), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:1:5: Q000 [*] Double quotes found but single quotes preferred
|
||||
-:1:5: B005 Using `.strip()` with multi-character strings is misleading
|
||||
-:1:19: Q000 [*] Double quotes found but single quotes preferred
|
||||
Found 3 errors.
|
||||
[*] 2 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Tests that configurations from the top-level and `lint` section are merged together.
|
||||
#[test]
|
||||
fn mixed_levels() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
extend-select = ["B", "Q"]
|
||||
|
||||
[lint.flake8-quotes]
|
||||
inline-quotes = "single"
|
||||
"#,
|
||||
)?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => vec![(tempdir_filter(&tempdir).as_str(), "[TMP]/")]
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.arg("--config")
|
||||
.arg(&ruff_toml)
|
||||
.arg("-")
|
||||
.pass_stdin(r#"a = "abcba".strip("aba")"#), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:1:5: Q000 [*] Double quotes found but single quotes preferred
|
||||
-:1:5: B005 Using `.strip()` with multi-character strings is misleading
|
||||
-:1:19: Q000 [*] Double quotes found but single quotes preferred
|
||||
Found 3 errors.
|
||||
[*] 2 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
warning: The top-level linter settings are deprecated in favour of their counterparts in the `lint` section. Please update the following options in `[TMP]/ruff.toml`:
|
||||
- 'extend-select' -> 'lint.extend-select'
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Tests that options in the `lint` section have higher precedence than top-level options (because they are more specific).
|
||||
#[test]
|
||||
fn precedence() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
[lint]
|
||||
extend-select = ["B", "Q"]
|
||||
|
||||
[flake8-quotes]
|
||||
inline-quotes = "double"
|
||||
|
||||
[lint.flake8-quotes]
|
||||
inline-quotes = "single"
|
||||
"#,
|
||||
)?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => vec![(tempdir_filter(&tempdir).as_str(), "[TMP]/")]
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.arg("--config")
|
||||
.arg(&ruff_toml)
|
||||
.arg("-")
|
||||
.pass_stdin(r#"a = "abcba".strip("aba")"#), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:1:5: Q000 [*] Double quotes found but single quotes preferred
|
||||
-:1:5: B005 Using `.strip()` with multi-character strings is misleading
|
||||
-:1:19: Q000 [*] Double quotes found but single quotes preferred
|
||||
Found 3 errors.
|
||||
[*] 2 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
warning: The top-level linter settings are deprecated in favour of their counterparts in the `lint` section. Please update the following options in `[TMP]/ruff.toml`:
|
||||
- 'flake8-quotes' -> 'lint.flake8-quotes'
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn exclude() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
extend-select = ["B", "Q"]
|
||||
extend-exclude = ["out"]
|
||||
|
||||
[lint]
|
||||
exclude = ["test.py", "generated.py"]
|
||||
|
||||
[lint.flake8-quotes]
|
||||
inline-quotes = "single"
|
||||
"#,
|
||||
)?;
|
||||
|
||||
fs::write(
|
||||
tempdir.path().join("main.py"),
|
||||
r#"
|
||||
from test import say_hy
|
||||
|
||||
if __name__ == "__main__":
|
||||
say_hy("dear Ruff contributor")
|
||||
"#,
|
||||
)?;
|
||||
|
||||
// Excluded file but passed to the CLI directly, should be linted
|
||||
let test_path = tempdir.path().join("test.py");
|
||||
fs::write(
|
||||
&test_path,
|
||||
r#"
|
||||
def say_hy(name: str):
|
||||
print(f"Hy {name}")"#,
|
||||
)?;
|
||||
|
||||
fs::write(
|
||||
tempdir.path().join("generated.py"),
|
||||
r#"NUMBERS = [
|
||||
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
|
||||
10, 11, 12, 13, 14, 15, 16, 17, 18, 19
|
||||
]
|
||||
OTHER = "OTHER"
|
||||
"#,
|
||||
)?;
|
||||
|
||||
let out_dir = tempdir.path().join("out");
|
||||
fs::create_dir(&out_dir)?;
|
||||
|
||||
fs::write(out_dir.join("a.py"), r#"a = "a""#)?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => vec![(tempdir_filter(&tempdir).as_str(), "[TMP]/")]
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.current_dir(tempdir.path())
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.args(["--config", &ruff_toml.file_name().unwrap().to_string_lossy()])
|
||||
// Explicitly pass test.py, should be linted regardless of it being excluded by lint.exclude
|
||||
.arg(test_path.file_name().unwrap())
|
||||
// Lint all other files in the directory, should respect the `exclude` and `lint.exclude` options
|
||||
.arg("."), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
main.py:4:16: Q000 [*] Double quotes found but single quotes preferred
|
||||
main.py:5:12: Q000 [*] Double quotes found but single quotes preferred
|
||||
test.py:3:15: Q000 [*] Double quotes found but single quotes preferred
|
||||
Found 3 errors.
|
||||
[*] 3 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
warning: The top-level linter settings are deprecated in favour of their counterparts in the `lint` section. Please update the following options in `ruff.toml`:
|
||||
- 'extend-select' -> 'lint.extend-select'
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn exclude_stdin() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
extend-select = ["B", "Q"]
|
||||
|
||||
[lint]
|
||||
exclude = ["generated.py"]
|
||||
|
||||
[lint.flake8-quotes]
|
||||
inline-quotes = "single"
|
||||
"#,
|
||||
)?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => vec![(tempdir_filter(&tempdir).as_str(), "[TMP]/")]
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.current_dir(tempdir.path())
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.args(["--config", &ruff_toml.file_name().unwrap().to_string_lossy()])
|
||||
.args(["--stdin-filename", "generated.py"])
|
||||
.arg("-")
|
||||
.pass_stdin(r#"
|
||||
from test import say_hy
|
||||
|
||||
if __name__ == "__main__":
|
||||
say_hy("dear Ruff contributor")
|
||||
"#), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
generated.py:4:16: Q000 [*] Double quotes found but single quotes preferred
|
||||
generated.py:5:12: Q000 [*] Double quotes found but single quotes preferred
|
||||
Found 2 errors.
|
||||
[*] 2 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
warning: The top-level linter settings are deprecated in favour of their counterparts in the `lint` section. Please update the following options in `ruff.toml`:
|
||||
- 'extend-select' -> 'lint.extend-select'
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn line_too_long_width_override() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
line-length = 80
|
||||
select = ["E501"]
|
||||
|
||||
[pycodestyle]
|
||||
max-line-length = 100
|
||||
"#,
|
||||
)?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => vec![(tempdir_filter(&tempdir).as_str(), "[TMP]/")]
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.arg("--config")
|
||||
.arg(&ruff_toml)
|
||||
.args(["--stdin-filename", "test.py"])
|
||||
.arg("-")
|
||||
.pass_stdin(r#"
|
||||
# longer than 80, but less than 100
|
||||
_ = "---------------------------------------------------------------------------亜亜亜亜亜亜"
|
||||
# longer than 100
|
||||
_ = "---------------------------------------------------------------------------亜亜亜亜亜亜亜亜亜亜亜亜亜亜"
|
||||
"#), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
test.py:5:91: E501 Line too long (109 > 100)
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
warning: The top-level linter settings are deprecated in favour of their counterparts in the `lint` section. Please update the following options in `[TMP]/ruff.toml`:
|
||||
- 'select' -> 'lint.select'
|
||||
- 'pycodestyle' -> 'lint.pycodestyle'
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn per_file_ignores_stdin() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
extend-select = ["B", "Q"]
|
||||
|
||||
[lint.flake8-quotes]
|
||||
inline-quotes = "single"
|
||||
"#,
|
||||
)?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => vec![(tempdir_filter(&tempdir).as_str(), "[TMP]/")]
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.current_dir(tempdir.path())
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.args(["--config", &ruff_toml.file_name().unwrap().to_string_lossy()])
|
||||
.args(["--stdin-filename", "generated.py"])
|
||||
.args(["--per-file-ignores", "generated.py:Q"])
|
||||
.arg("-")
|
||||
.pass_stdin(r#"
|
||||
import os
|
||||
|
||||
from test import say_hy
|
||||
|
||||
if __name__ == "__main__":
|
||||
say_hy("dear Ruff contributor")
|
||||
"#), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
generated.py:2:8: F401 [*] `os` imported but unused
|
||||
Found 1 error.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
warning: The top-level linter settings are deprecated in favour of their counterparts in the `lint` section. Please update the following options in `ruff.toml`:
|
||||
- 'extend-select' -> 'lint.extend-select'
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extend_per_file_ignores_stdin() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
extend-select = ["B", "Q"]
|
||||
|
||||
[lint.flake8-quotes]
|
||||
inline-quotes = "single"
|
||||
"#,
|
||||
)?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => vec![(tempdir_filter(&tempdir).as_str(), "[TMP]/")]
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.current_dir(tempdir.path())
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.args(["--config", &ruff_toml.file_name().unwrap().to_string_lossy()])
|
||||
.args(["--stdin-filename", "generated.py"])
|
||||
.args(["--extend-per-file-ignores", "generated.py:Q"])
|
||||
.arg("-")
|
||||
.pass_stdin(r#"
|
||||
import os
|
||||
|
||||
from test import say_hy
|
||||
|
||||
if __name__ == "__main__":
|
||||
say_hy("dear Ruff contributor")
|
||||
"#), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
generated.py:2:8: F401 [*] `os` imported but unused
|
||||
Found 1 error.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
warning: The top-level linter settings are deprecated in favour of their counterparts in the `lint` section. Please update the following options in `ruff.toml`:
|
||||
- 'extend-select' -> 'lint.extend-select'
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Regression test for [#8858](https://github.com/astral-sh/ruff/issues/8858)
|
||||
#[test]
|
||||
fn parent_configuration_override() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let root_ruff = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
root_ruff,
|
||||
r#"
|
||||
[lint]
|
||||
select = ["ALL"]
|
||||
"#,
|
||||
)?;
|
||||
|
||||
let sub_dir = tempdir.path().join("subdirectory");
|
||||
fs::create_dir(&sub_dir)?;
|
||||
|
||||
let subdirectory_ruff = sub_dir.join("ruff.toml");
|
||||
fs::write(
|
||||
subdirectory_ruff,
|
||||
r#"
|
||||
[lint]
|
||||
ignore = ["D203", "D212"]
|
||||
"#,
|
||||
)?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => vec![(tempdir_filter(&tempdir).as_str(), "[TMP]/")]
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.current_dir(sub_dir)
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
, @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
warning: No Python files found under the given path(s)
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nonexistent_config_file() {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.args(["--config", "foo.toml", "."]), @r###"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: invalid value 'foo.toml' for '--config <CONFIG_OPTION>'
|
||||
|
||||
tip: A `--config` flag must either be a path to a `.toml` configuration file
|
||||
or a TOML `<KEY> = <VALUE>` pair overriding a specific configuration
|
||||
option
|
||||
|
||||
It looks like you were trying to pass a path to a configuration file.
|
||||
The path `foo.toml` does not exist
|
||||
|
||||
For more information, try '--help'.
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn config_override_rejected_if_invalid_toml() {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.args(["--config", "foo = bar", "."]), @r###"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: invalid value 'foo = bar' for '--config <CONFIG_OPTION>'
|
||||
|
||||
tip: A `--config` flag must either be a path to a `.toml` configuration file
|
||||
or a TOML `<KEY> = <VALUE>` pair overriding a specific configuration
|
||||
option
|
||||
|
||||
The supplied argument is not valid TOML:
|
||||
|
||||
TOML parse error at line 1, column 7
|
||||
|
|
||||
1 | foo = bar
|
||||
| ^
|
||||
invalid string
|
||||
expected `"`, `'`
|
||||
|
||||
For more information, try '--help'.
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn too_many_config_files() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_dot_toml = tempdir.path().join("ruff.toml");
|
||||
let ruff2_dot_toml = tempdir.path().join("ruff2.toml");
|
||||
fs::File::create(&ruff_dot_toml)?;
|
||||
fs::File::create(&ruff2_dot_toml)?;
|
||||
insta::with_settings!({
|
||||
filters => vec![(tempdir_filter(&tempdir).as_str(), "[TMP]/")]
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.arg("--config")
|
||||
.arg(&ruff_dot_toml)
|
||||
.arg("--config")
|
||||
.arg(&ruff2_dot_toml)
|
||||
.arg("."), @r###"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
ruff failed
|
||||
Cause: You cannot specify more than one configuration file on the command line.
|
||||
|
||||
tip: remove either `--config=[TMP]/ruff.toml` or `--config=[TMP]/ruff2.toml`.
|
||||
For more information, try `--help`.
|
||||
|
||||
"###);
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extend_passed_via_config_argument() {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.args(["--config", "extend = 'foo.toml'", "."]), @r###"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: invalid value 'extend = 'foo.toml'' for '--config <CONFIG_OPTION>'
|
||||
|
||||
tip: Cannot include `extend` in a --config flag value
|
||||
|
||||
For more information, try '--help'.
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn config_file_and_isolated() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_dot_toml = tempdir.path().join("ruff.toml");
|
||||
fs::File::create(&ruff_dot_toml)?;
|
||||
insta::with_settings!({
|
||||
filters => vec![(tempdir_filter(&tempdir).as_str(), "[TMP]/")]
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.arg("--config")
|
||||
.arg(&ruff_dot_toml)
|
||||
.arg("--isolated")
|
||||
.arg("."), @r###"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
ruff failed
|
||||
Cause: The argument `--config=[TMP]/ruff.toml` cannot be used with `--isolated`
|
||||
|
||||
tip: You cannot specify a configuration file and also specify `--isolated`,
|
||||
as `--isolated` causes ruff to ignore all configuration files.
|
||||
For more information, try `--help`.
|
||||
|
||||
"###);
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn config_override_via_cli() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
line-length = 100
|
||||
|
||||
[lint]
|
||||
select = ["I"]
|
||||
|
||||
[lint.isort]
|
||||
combine-as-imports = true
|
||||
"#,
|
||||
)?;
|
||||
let fixture = r#"
|
||||
from foo import (
|
||||
aaaaaaaaaaaaaaaaaaa,
|
||||
bbbbbbbbbbb as bbbbbbbbbbbbbbbb,
|
||||
cccccccccccccccc,
|
||||
ddddddddddd as ddddddddddddd,
|
||||
eeeeeeeeeeeeeee,
|
||||
ffffffffffff as ffffffffffffff,
|
||||
ggggggggggggg,
|
||||
hhhhhhh as hhhhhhhhhhh,
|
||||
iiiiiiiiiiiiii,
|
||||
jjjjjjjjjjjjj as jjjjjj,
|
||||
)
|
||||
|
||||
x = "longer_than_90_charactersssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssss"
|
||||
"#;
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.arg("--config")
|
||||
.arg(&ruff_toml)
|
||||
.args(["--config", "line-length=90"])
|
||||
.args(["--config", "lint.extend-select=['E501', 'F841']"])
|
||||
.args(["--config", "lint.isort.combine-as-imports = false"])
|
||||
.arg("-")
|
||||
.pass_stdin(fixture), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:2:1: I001 [*] Import block is un-sorted or un-formatted
|
||||
-:15:91: E501 Line too long (97 > 90)
|
||||
Found 2 errors.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn valid_toml_but_nonexistent_option_provided_via_config_argument() {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.args([".", "--config", "extend-select=['F481']"]), // No such code as F481!
|
||||
@r###"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: invalid value 'extend-select=['F481']' for '--config <CONFIG_OPTION>'
|
||||
|
||||
tip: A `--config` flag must either be a path to a `.toml` configuration file
|
||||
or a TOML `<KEY> = <VALUE>` pair overriding a specific configuration
|
||||
option
|
||||
|
||||
Could not parse the supplied argument as a `ruff.toml` configuration option:
|
||||
|
||||
Unknown rule selector: `F481`
|
||||
|
||||
For more information, try '--help'.
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn each_toml_option_requires_a_new_flag_1() {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
// commas can't be used to delimit different config overrides;
|
||||
// you need a new --config flag for each override
|
||||
.args([".", "--config", "extend-select=['F841'], line-length=90"]),
|
||||
@r###"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: invalid value 'extend-select=['F841'], line-length=90' for '--config <CONFIG_OPTION>'
|
||||
|
||||
tip: A `--config` flag must either be a path to a `.toml` configuration file
|
||||
or a TOML `<KEY> = <VALUE>` pair overriding a specific configuration
|
||||
option
|
||||
|
||||
The supplied argument is not valid TOML:
|
||||
|
||||
TOML parse error at line 1, column 23
|
||||
|
|
||||
1 | extend-select=['F841'], line-length=90
|
||||
| ^
|
||||
expected newline, `#`
|
||||
|
||||
For more information, try '--help'.
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn each_toml_option_requires_a_new_flag_2() {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
// spaces *also* can't be used to delimit different config overrides;
|
||||
// you need a new --config flag for each override
|
||||
.args([".", "--config", "extend-select=['F841'] line-length=90"]),
|
||||
@r###"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: invalid value 'extend-select=['F841'] line-length=90' for '--config <CONFIG_OPTION>'
|
||||
|
||||
tip: A `--config` flag must either be a path to a `.toml` configuration file
|
||||
or a TOML `<KEY> = <VALUE>` pair overriding a specific configuration
|
||||
option
|
||||
|
||||
The supplied argument is not valid TOML:
|
||||
|
||||
TOML parse error at line 1, column 24
|
||||
|
|
||||
1 | extend-select=['F841'] line-length=90
|
||||
| ^
|
||||
expected newline, `#`
|
||||
|
||||
For more information, try '--help'.
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn config_doubly_overridden_via_cli() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
line-length = 100
|
||||
|
||||
[lint]
|
||||
select=["E501"]
|
||||
"#,
|
||||
)?;
|
||||
let fixture = "x = 'longer_than_90_charactersssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssss'";
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
// The --line-length flag takes priority over both the config file
|
||||
// and the `--config="line-length=110"` flag,
|
||||
// despite them both being specified after this flag on the command line:
|
||||
.args(["--line-length", "90"])
|
||||
.arg("--config")
|
||||
.arg(&ruff_toml)
|
||||
.args(["--config", "line-length=110"])
|
||||
.arg("-")
|
||||
.pass_stdin(fixture), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:1:91: E501 Line too long (97 > 90)
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn complex_config_setting_overridden_via_cli() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(&ruff_toml, "lint.select = ['N801']")?;
|
||||
let fixture = "class violates_n801: pass";
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.arg("--config")
|
||||
.arg(&ruff_toml)
|
||||
.args(["--config", "lint.per-file-ignores = {'generated.py' = ['N801']}"])
|
||||
.args(["--stdin-filename", "generated.py"])
|
||||
.arg("-")
|
||||
.pass_stdin(fixture), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deprecated_config_option_overridden_via_cli() {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.args(["--config", "select=['N801']", "-"])
|
||||
.pass_stdin("class lowercase: ..."),
|
||||
@r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:1:7: N801 Class name `lowercase` should use CapWords convention
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
warning: The top-level linter settings are deprecated in favour of their counterparts in the `lint` section. Please update the following options in your `--config` CLI arguments:
|
||||
- 'select' -> 'lint.select'
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extension() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
include = ["*.ipy"]
|
||||
"#,
|
||||
)?;
|
||||
|
||||
fs::write(
|
||||
tempdir.path().join("main.ipy"),
|
||||
r#"
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "ad6f36d9-4b7d-4562-8d00-f15a0f1fbb6d",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3 (ipykernel)",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.12.0"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
"#,
|
||||
)?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => vec![(tempdir_filter(&tempdir).as_str(), "[TMP]/")]
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.current_dir(tempdir.path())
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.args(["--config", &ruff_toml.file_name().unwrap().to_string_lossy()])
|
||||
.args(["--extension", "ipy:ipynb"])
|
||||
.arg("."), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
main.ipy:cell 1:1:8: F401 [*] `os` imported but unused
|
||||
Found 1 error.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,103 +0,0 @@
|
||||
#![cfg(not(target_family = "wasm"))]
|
||||
|
||||
use std::path::Path;
|
||||
use std::process::Command;
|
||||
use std::str;
|
||||
|
||||
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
|
||||
const BIN_NAME: &str = "ruff";
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
const TEST_FILTERS: &[(&str, &str)] = &[(".*/resources/test/fixtures/", "[BASEPATH]/")];
|
||||
#[cfg(target_os = "windows")]
|
||||
const TEST_FILTERS: &[(&str, &str)] = &[
|
||||
(r".*\\resources\\test\\fixtures\\", "[BASEPATH]\\"),
|
||||
(r"\\", "/"),
|
||||
];
|
||||
|
||||
#[test]
|
||||
fn check_project_include_defaults() {
|
||||
// Defaults to checking the current working directory
|
||||
//
|
||||
// The test directory includes:
|
||||
// - A pyproject.toml which specifies an include
|
||||
// - A nested pyproject.toml which has a Ruff section
|
||||
//
|
||||
// The nested project should all be checked instead of respecting the parent includes
|
||||
|
||||
insta::with_settings!({
|
||||
filters => TEST_FILTERS.to_vec()
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(["check", "--show-files"]).current_dir(Path::new("./resources/test/fixtures/include-test")), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
[BASEPATH]/include-test/a.py
|
||||
[BASEPATH]/include-test/nested-project/e.py
|
||||
[BASEPATH]/include-test/nested-project/pyproject.toml
|
||||
[BASEPATH]/include-test/subdirectory/c.py
|
||||
|
||||
----- stderr -----
|
||||
warning: The top-level linter settings are deprecated in favour of their counterparts in the `lint` section. Please update the following options in `nested-project/pyproject.toml`:
|
||||
- 'select' -> 'lint.select'
|
||||
"###);
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_project_respects_direct_paths() {
|
||||
// Given a direct path not included in the project `includes`, it should be checked
|
||||
|
||||
insta::with_settings!({
|
||||
filters => TEST_FILTERS.to_vec()
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(["check", "--show-files", "b.py"]).current_dir(Path::new("./resources/test/fixtures/include-test")), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
[BASEPATH]/include-test/b.py
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_project_respects_subdirectory_includes() {
|
||||
// Given a direct path to a subdirectory, the include should be respected
|
||||
|
||||
insta::with_settings!({
|
||||
filters => TEST_FILTERS.to_vec()
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(["check", "--show-files", "subdirectory"]).current_dir(Path::new("./resources/test/fixtures/include-test")), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
[BASEPATH]/include-test/subdirectory/c.py
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_project_from_project_subdirectory_respects_includes() {
|
||||
// Run from a project subdirectory, the include specified in the parent directory should be respected
|
||||
|
||||
insta::with_settings!({
|
||||
filters => TEST_FILTERS.to_vec()
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(["check", "--show-files"]).current_dir(Path::new("./resources/test/fixtures/include-test/subdirectory")), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
[BASEPATH]/include-test/subdirectory/c.py
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
});
|
||||
}
|
||||
@@ -1,33 +0,0 @@
|
||||
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
|
||||
use std::path::Path;
|
||||
use std::process::Command;
|
||||
|
||||
const BIN_NAME: &str = "ruff";
|
||||
|
||||
#[test]
|
||||
fn display_default_settings() {
|
||||
// Navigate from the crate directory to the workspace root.
|
||||
let base_path = Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||
.parent()
|
||||
.unwrap()
|
||||
.parent()
|
||||
.unwrap();
|
||||
let base_path = base_path.to_string_lossy();
|
||||
|
||||
// Escape the backslashes for the regex.
|
||||
let base_path = regex::escape(&base_path);
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
let test_filters = &[(base_path.as_ref(), "[BASEPATH]")];
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
let test_filters = &[
|
||||
(base_path.as_ref(), "[BASEPATH]"),
|
||||
(r#"\\+(\w\w|\s|\.|")"#, "/$1"),
|
||||
];
|
||||
|
||||
insta::with_settings!({ filters => test_filters.to_vec() }, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(["check", "--show-settings", "unformatted.py"]).current_dir(Path::new("./resources/test/fixtures")));
|
||||
});
|
||||
}
|
||||
@@ -1,367 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/show_settings.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--show-settings"
|
||||
- unformatted.py
|
||||
---
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
Resolved settings for: "[BASEPATH]/crates/ruff/resources/test/fixtures/unformatted.py"
|
||||
Settings path: "[BASEPATH]/pyproject.toml"
|
||||
|
||||
# General Settings
|
||||
cache_dir = "[BASEPATH]/.ruff_cache"
|
||||
fix = false
|
||||
fix_only = false
|
||||
output_format = concise
|
||||
show_fixes = false
|
||||
unsafe_fixes = hint
|
||||
|
||||
# File Resolver Settings
|
||||
file_resolver.exclude = [
|
||||
".bzr",
|
||||
".direnv",
|
||||
".eggs",
|
||||
".git",
|
||||
".git-rewrite",
|
||||
".hg",
|
||||
".ipynb_checkpoints",
|
||||
".mypy_cache",
|
||||
".nox",
|
||||
".pants.d",
|
||||
".pyenv",
|
||||
".pytest_cache",
|
||||
".pytype",
|
||||
".ruff_cache",
|
||||
".svn",
|
||||
".tox",
|
||||
".venv",
|
||||
".vscode",
|
||||
"__pypackages__",
|
||||
"_build",
|
||||
"buck-out",
|
||||
"dist",
|
||||
"node_modules",
|
||||
"site-packages",
|
||||
"venv",
|
||||
]
|
||||
file_resolver.extend_exclude = [
|
||||
"crates/ruff_linter/resources/",
|
||||
"crates/ruff_python_formatter/resources/",
|
||||
]
|
||||
file_resolver.force_exclude = false
|
||||
file_resolver.include = [
|
||||
"*.py",
|
||||
"*.pyi",
|
||||
"**/pyproject.toml",
|
||||
]
|
||||
file_resolver.extend_include = []
|
||||
file_resolver.respect_gitignore = true
|
||||
file_resolver.project_root = "[BASEPATH]"
|
||||
|
||||
# Linter Settings
|
||||
linter.exclude = []
|
||||
linter.project_root = "[BASEPATH]"
|
||||
linter.rules.enabled = [
|
||||
MultipleImportsOnOneLine,
|
||||
ModuleImportNotAtTopOfFile,
|
||||
MultipleStatementsOnOneLineColon,
|
||||
MultipleStatementsOnOneLineSemicolon,
|
||||
UselessSemicolon,
|
||||
NoneComparison,
|
||||
TrueFalseComparison,
|
||||
NotInTest,
|
||||
NotIsTest,
|
||||
TypeComparison,
|
||||
BareExcept,
|
||||
LambdaAssignment,
|
||||
AmbiguousVariableName,
|
||||
AmbiguousClassName,
|
||||
AmbiguousFunctionName,
|
||||
IOError,
|
||||
SyntaxError,
|
||||
UnusedImport,
|
||||
ImportShadowedByLoopVar,
|
||||
UndefinedLocalWithImportStar,
|
||||
LateFutureImport,
|
||||
UndefinedLocalWithImportStarUsage,
|
||||
UndefinedLocalWithNestedImportStarUsage,
|
||||
FutureFeatureNotDefined,
|
||||
PercentFormatInvalidFormat,
|
||||
PercentFormatExpectedMapping,
|
||||
PercentFormatExpectedSequence,
|
||||
PercentFormatExtraNamedArguments,
|
||||
PercentFormatMissingArgument,
|
||||
PercentFormatMixedPositionalAndNamed,
|
||||
PercentFormatPositionalCountMismatch,
|
||||
PercentFormatStarRequiresSequence,
|
||||
PercentFormatUnsupportedFormatCharacter,
|
||||
StringDotFormatInvalidFormat,
|
||||
StringDotFormatExtraNamedArguments,
|
||||
StringDotFormatExtraPositionalArguments,
|
||||
StringDotFormatMissingArguments,
|
||||
StringDotFormatMixingAutomatic,
|
||||
FStringMissingPlaceholders,
|
||||
MultiValueRepeatedKeyLiteral,
|
||||
MultiValueRepeatedKeyVariable,
|
||||
ExpressionsInStarAssignment,
|
||||
MultipleStarredExpressions,
|
||||
AssertTuple,
|
||||
IsLiteral,
|
||||
InvalidPrintSyntax,
|
||||
IfTuple,
|
||||
BreakOutsideLoop,
|
||||
ContinueOutsideLoop,
|
||||
YieldOutsideFunction,
|
||||
ReturnOutsideFunction,
|
||||
DefaultExceptNotLast,
|
||||
ForwardAnnotationSyntaxError,
|
||||
RedefinedWhileUnused,
|
||||
UndefinedName,
|
||||
UndefinedExport,
|
||||
UndefinedLocal,
|
||||
UnusedVariable,
|
||||
UnusedAnnotation,
|
||||
RaiseNotImplemented,
|
||||
]
|
||||
linter.rules.should_fix = [
|
||||
MultipleImportsOnOneLine,
|
||||
ModuleImportNotAtTopOfFile,
|
||||
MultipleStatementsOnOneLineColon,
|
||||
MultipleStatementsOnOneLineSemicolon,
|
||||
UselessSemicolon,
|
||||
NoneComparison,
|
||||
TrueFalseComparison,
|
||||
NotInTest,
|
||||
NotIsTest,
|
||||
TypeComparison,
|
||||
BareExcept,
|
||||
LambdaAssignment,
|
||||
AmbiguousVariableName,
|
||||
AmbiguousClassName,
|
||||
AmbiguousFunctionName,
|
||||
IOError,
|
||||
SyntaxError,
|
||||
UnusedImport,
|
||||
ImportShadowedByLoopVar,
|
||||
UndefinedLocalWithImportStar,
|
||||
LateFutureImport,
|
||||
UndefinedLocalWithImportStarUsage,
|
||||
UndefinedLocalWithNestedImportStarUsage,
|
||||
FutureFeatureNotDefined,
|
||||
PercentFormatInvalidFormat,
|
||||
PercentFormatExpectedMapping,
|
||||
PercentFormatExpectedSequence,
|
||||
PercentFormatExtraNamedArguments,
|
||||
PercentFormatMissingArgument,
|
||||
PercentFormatMixedPositionalAndNamed,
|
||||
PercentFormatPositionalCountMismatch,
|
||||
PercentFormatStarRequiresSequence,
|
||||
PercentFormatUnsupportedFormatCharacter,
|
||||
StringDotFormatInvalidFormat,
|
||||
StringDotFormatExtraNamedArguments,
|
||||
StringDotFormatExtraPositionalArguments,
|
||||
StringDotFormatMissingArguments,
|
||||
StringDotFormatMixingAutomatic,
|
||||
FStringMissingPlaceholders,
|
||||
MultiValueRepeatedKeyLiteral,
|
||||
MultiValueRepeatedKeyVariable,
|
||||
ExpressionsInStarAssignment,
|
||||
MultipleStarredExpressions,
|
||||
AssertTuple,
|
||||
IsLiteral,
|
||||
InvalidPrintSyntax,
|
||||
IfTuple,
|
||||
BreakOutsideLoop,
|
||||
ContinueOutsideLoop,
|
||||
YieldOutsideFunction,
|
||||
ReturnOutsideFunction,
|
||||
DefaultExceptNotLast,
|
||||
ForwardAnnotationSyntaxError,
|
||||
RedefinedWhileUnused,
|
||||
UndefinedName,
|
||||
UndefinedExport,
|
||||
UndefinedLocal,
|
||||
UnusedVariable,
|
||||
UnusedAnnotation,
|
||||
RaiseNotImplemented,
|
||||
]
|
||||
linter.per_file_ignores = {}
|
||||
linter.safety_table.forced_safe = []
|
||||
linter.safety_table.forced_unsafe = []
|
||||
linter.target_version = Py37
|
||||
linter.preview = disabled
|
||||
linter.explicit_preview_rules = false
|
||||
linter.extension.mapping = {}
|
||||
linter.allowed_confusables = []
|
||||
linter.builtins = []
|
||||
linter.dummy_variable_rgx = ^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$
|
||||
linter.external = []
|
||||
linter.ignore_init_module_imports = false
|
||||
linter.logger_objects = []
|
||||
linter.namespace_packages = []
|
||||
linter.src = [
|
||||
"[BASEPATH]",
|
||||
]
|
||||
linter.tab_size = 4
|
||||
linter.line_length = 88
|
||||
linter.task_tags = [
|
||||
TODO,
|
||||
FIXME,
|
||||
XXX,
|
||||
]
|
||||
linter.typing_modules = []
|
||||
|
||||
# Linter Plugins
|
||||
linter.flake8_annotations.mypy_init_return = false
|
||||
linter.flake8_annotations.suppress_dummy_args = false
|
||||
linter.flake8_annotations.suppress_none_returning = false
|
||||
linter.flake8_annotations.allow_star_arg_any = false
|
||||
linter.flake8_annotations.ignore_fully_untyped = false
|
||||
linter.flake8_bandit.hardcoded_tmp_directory = [
|
||||
/tmp,
|
||||
/var/tmp,
|
||||
/dev/shm,
|
||||
]
|
||||
linter.flake8_bandit.check_typed_exception = false
|
||||
linter.flake8_bugbear.extend_immutable_calls = []
|
||||
linter.flake8_builtins.builtins_ignorelist = []
|
||||
linter.flake8_comprehensions.allow_dict_calls_with_keyword_arguments = false
|
||||
linter.flake8_copyright.notice_rgx = (?i)Copyright\s+((?:\(C\)|©)\s+)?\d{4}(-\d{4})*
|
||||
linter.flake8_copyright.author = none
|
||||
linter.flake8_copyright.min_file_size = 0
|
||||
linter.flake8_errmsg.max_string_length = 0
|
||||
linter.flake8_gettext.functions_names = [
|
||||
_,
|
||||
gettext,
|
||||
ngettext,
|
||||
]
|
||||
linter.flake8_implicit_str_concat.allow_multiline = true
|
||||
linter.flake8_import_conventions.aliases = {"matplotlib": "mpl", "matplotlib.pyplot": "plt", "pandas": "pd", "seaborn": "sns", "tensorflow": "tf", "networkx": "nx", "plotly.express": "px", "polars": "pl", "numpy": "np", "panel": "pn", "pyarrow": "pa", "altair": "alt", "tkinter": "tk", "holoviews": "hv"}
|
||||
linter.flake8_import_conventions.banned_aliases = {}
|
||||
linter.flake8_import_conventions.banned_from = []
|
||||
linter.flake8_pytest_style.fixture_parentheses = true
|
||||
linter.flake8_pytest_style.parametrize_names_type = tuple
|
||||
linter.flake8_pytest_style.parametrize_values_type = list
|
||||
linter.flake8_pytest_style.parametrize_values_row_type = tuple
|
||||
linter.flake8_pytest_style.raises_require_match_for = [
|
||||
BaseException,
|
||||
Exception,
|
||||
ValueError,
|
||||
OSError,
|
||||
IOError,
|
||||
EnvironmentError,
|
||||
socket.error,
|
||||
]
|
||||
linter.flake8_pytest_style.raises_extend_require_match_for = []
|
||||
linter.flake8_pytest_style.mark_parentheses = true
|
||||
linter.flake8_quotes.inline_quotes = double
|
||||
linter.flake8_quotes.multiline_quotes = double
|
||||
linter.flake8_quotes.docstring_quotes = double
|
||||
linter.flake8_quotes.avoid_escape = true
|
||||
linter.flake8_self.ignore_names = [
|
||||
_make,
|
||||
_asdict,
|
||||
_replace,
|
||||
_fields,
|
||||
_field_defaults,
|
||||
_name_,
|
||||
_value_,
|
||||
]
|
||||
linter.flake8_tidy_imports.ban_relative_imports = "parents"
|
||||
linter.flake8_tidy_imports.banned_api = {}
|
||||
linter.flake8_tidy_imports.banned_module_level_imports = []
|
||||
linter.flake8_type_checking.strict = false
|
||||
linter.flake8_type_checking.exempt_modules = [
|
||||
typing,
|
||||
typing_extensions,
|
||||
]
|
||||
linter.flake8_type_checking.runtime_required_base_classes = []
|
||||
linter.flake8_type_checking.runtime_required_decorators = []
|
||||
linter.flake8_type_checking.quote_annotations = false
|
||||
linter.flake8_unused_arguments.ignore_variadic_names = false
|
||||
linter.isort.required_imports = []
|
||||
linter.isort.combine_as_imports = false
|
||||
linter.isort.force_single_line = false
|
||||
linter.isort.force_sort_within_sections = false
|
||||
linter.isort.detect_same_package = true
|
||||
linter.isort.case_sensitive = false
|
||||
linter.isort.force_wrap_aliases = false
|
||||
linter.isort.force_to_top = []
|
||||
linter.isort.known_modules = {}
|
||||
linter.isort.order_by_type = true
|
||||
linter.isort.relative_imports_order = furthest_to_closest
|
||||
linter.isort.single_line_exclusions = []
|
||||
linter.isort.split_on_trailing_comma = true
|
||||
linter.isort.classes = []
|
||||
linter.isort.constants = []
|
||||
linter.isort.variables = []
|
||||
linter.isort.no_lines_before = []
|
||||
linter.isort.lines_after_imports = -1
|
||||
linter.isort.lines_between_types = 0
|
||||
linter.isort.forced_separate = []
|
||||
linter.isort.section_order = [
|
||||
known { type = future },
|
||||
known { type = standard_library },
|
||||
known { type = third_party },
|
||||
known { type = first_party },
|
||||
known { type = local_folder },
|
||||
]
|
||||
linter.isort.no_sections = false
|
||||
linter.isort.from_first = false
|
||||
linter.isort.length_sort = false
|
||||
linter.isort.length_sort_straight = false
|
||||
linter.mccabe.max_complexity = 10
|
||||
linter.pep8_naming.ignore_names = [
|
||||
setUp,
|
||||
tearDown,
|
||||
setUpClass,
|
||||
tearDownClass,
|
||||
setUpModule,
|
||||
tearDownModule,
|
||||
asyncSetUp,
|
||||
asyncTearDown,
|
||||
setUpTestData,
|
||||
failureException,
|
||||
longMessage,
|
||||
maxDiff,
|
||||
]
|
||||
linter.pep8_naming.classmethod_decorators = []
|
||||
linter.pep8_naming.staticmethod_decorators = []
|
||||
linter.pycodestyle.max_line_length = 88
|
||||
linter.pycodestyle.max_doc_length = none
|
||||
linter.pycodestyle.ignore_overlong_task_comments = false
|
||||
linter.pyflakes.extend_generics = []
|
||||
linter.pylint.allow_magic_value_types = [
|
||||
str,
|
||||
bytes,
|
||||
]
|
||||
linter.pylint.allow_dunder_method_names = []
|
||||
linter.pylint.max_args = 5
|
||||
linter.pylint.max_positional_args = 5
|
||||
linter.pylint.max_returns = 6
|
||||
linter.pylint.max_bool_expr = 5
|
||||
linter.pylint.max_branches = 12
|
||||
linter.pylint.max_statements = 50
|
||||
linter.pylint.max_public_methods = 20
|
||||
linter.pylint.max_locals = 15
|
||||
linter.pyupgrade.keep_runtime_typing = false
|
||||
|
||||
# Formatter Settings
|
||||
formatter.exclude = []
|
||||
formatter.target_version = Py37
|
||||
formatter.preview = disabled
|
||||
formatter.line_width = 88
|
||||
formatter.line_ending = auto
|
||||
formatter.indent_style = space
|
||||
formatter.indent_width = 4
|
||||
formatter.quote_style = double
|
||||
formatter.magic_trailing_comma = respect
|
||||
formatter.docstring_code_format = disabled
|
||||
formatter.docstring_code_line_width = dynamic
|
||||
|
||||
----- stderr -----
|
||||
@@ -13,7 +13,6 @@ license = { workspace = true }
|
||||
|
||||
[lib]
|
||||
bench = false
|
||||
doctest = false
|
||||
|
||||
[[bench]]
|
||||
name = "linter"
|
||||
@@ -32,29 +31,26 @@ name = "formatter"
|
||||
harness = false
|
||||
|
||||
[dependencies]
|
||||
once_cell = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
url = { workspace = true }
|
||||
ureq = { workspace = true }
|
||||
criterion = { workspace = true, default-features = false }
|
||||
codspeed-criterion-compat = { workspace = true, default-features = false, optional = true}
|
||||
once_cell.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
url = "2.3.1"
|
||||
ureq = "2.8.0"
|
||||
criterion = { version = "0.5.1", default-features = false }
|
||||
codspeed-criterion-compat = { version="2.3.0", default-features = false, optional = true}
|
||||
|
||||
[dev-dependencies]
|
||||
ruff_linter = { path = "../ruff_linter" }
|
||||
ruff_python_ast = { path = "../ruff_python_ast" }
|
||||
ruff_linter.path = "../ruff_linter"
|
||||
ruff_python_ast.path = "../ruff_python_ast"
|
||||
ruff_python_formatter = { path = "../ruff_python_formatter" }
|
||||
ruff_python_index = { path = "../ruff_python_index" }
|
||||
ruff_python_parser = { path = "../ruff_python_parser" }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[features]
|
||||
codspeed = ["codspeed-criterion-compat"]
|
||||
|
||||
[target.'cfg(target_os = "windows")'.dev-dependencies]
|
||||
mimalloc = { workspace = true }
|
||||
mimalloc = "0.1.39"
|
||||
|
||||
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64")))'.dev-dependencies]
|
||||
tikv-jemallocator = { workspace = true }
|
||||
tikv-jemallocator = "0.5.0"
|
||||
|
||||
@@ -1,16 +1,5 @@
|
||||
# Ruff Benchmarks
|
||||
|
||||
The `ruff_benchmark` crate benchmarks the linter and the formatter on individual files:
|
||||
|
||||
```shell
|
||||
# Run once on the "baseline".
|
||||
cargo bench -p ruff_benchmark -- --save-baseline=main
|
||||
|
||||
# Compare against the "baseline".
|
||||
cargo bench -p ruff_benchmark -- --baseline=main
|
||||
|
||||
# Run the lexer benchmarks.
|
||||
cargo bench -p ruff_benchmark lexer -- --baseline=main
|
||||
```
|
||||
The `ruff_benchmark` crate benchmarks the linter and the formatter on individual files.
|
||||
|
||||
See [CONTRIBUTING.md](../../CONTRIBUTING.md) on how to use these benchmarks.
|
||||
|
||||
@@ -4,10 +4,10 @@ use ruff_benchmark::criterion::{
|
||||
criterion_group, criterion_main, BenchmarkId, Criterion, Throughput,
|
||||
};
|
||||
use ruff_benchmark::{TestCase, TestFile, TestFileDownloadError};
|
||||
use ruff_python_formatter::{format_module_ast, PreviewMode, PyFormatOptions};
|
||||
use ruff_python_formatter::{format_module_ast, PyFormatOptions};
|
||||
use ruff_python_index::CommentRangesBuilder;
|
||||
use ruff_python_parser::lexer::lex;
|
||||
use ruff_python_parser::{allocate_tokens_vec, parse_tokens, Mode};
|
||||
use ruff_python_parser::{parse_tokens, Mode};
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
#[global_allocator]
|
||||
@@ -52,7 +52,7 @@ fn benchmark_formatter(criterion: &mut Criterion) {
|
||||
BenchmarkId::from_parameter(case.name()),
|
||||
&case,
|
||||
|b, case| {
|
||||
let mut tokens = allocate_tokens_vec(case.code());
|
||||
let mut tokens = Vec::new();
|
||||
let mut comment_ranges = CommentRangesBuilder::default();
|
||||
|
||||
for result in lex(case.code(), Mode::Module) {
|
||||
@@ -65,12 +65,11 @@ fn benchmark_formatter(criterion: &mut Criterion) {
|
||||
let comment_ranges = comment_ranges.finish();
|
||||
|
||||
// Parse the AST.
|
||||
let module = parse_tokens(tokens, case.code(), Mode::Module)
|
||||
let module = parse_tokens(tokens, case.code(), Mode::Module, "<filename>")
|
||||
.expect("Input to be a valid python program");
|
||||
|
||||
b.iter(|| {
|
||||
let options = PyFormatOptions::from_extension(Path::new(case.name()))
|
||||
.with_preview(PreviewMode::Enabled);
|
||||
let options = PyFormatOptions::from_extension(Path::new(case.name()));
|
||||
let formatted =
|
||||
format_module_ast(&module, &comment_ranges, case.code(), options)
|
||||
.expect("Formatting to succeed");
|
||||
|
||||
@@ -2,15 +2,12 @@ use ruff_benchmark::criterion::{
|
||||
criterion_group, criterion_main, BenchmarkGroup, BenchmarkId, Criterion, Throughput,
|
||||
};
|
||||
use ruff_benchmark::{TestCase, TestFile, TestFileDownloadError};
|
||||
use ruff_linter::linter::{lint_only, ParseSource};
|
||||
use ruff_linter::rule_selector::PreviewOptions;
|
||||
use ruff_linter::linter::lint_only;
|
||||
use ruff_linter::settings::rule_table::RuleTable;
|
||||
use ruff_linter::settings::types::PreviewMode;
|
||||
use ruff_linter::settings::{flags, LinterSettings};
|
||||
use ruff_linter::source_kind::SourceKind;
|
||||
use ruff_linter::{registry::Rule, RuleSelector};
|
||||
use ruff_python_ast::PySourceType;
|
||||
use ruff_python_parser::{lexer, parse_program_tokens, Mode};
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
#[global_allocator]
|
||||
@@ -54,12 +51,7 @@ fn benchmark_linter(mut group: BenchmarkGroup, settings: &LinterSettings) {
|
||||
BenchmarkId::from_parameter(case.name()),
|
||||
&case,
|
||||
|b, case| {
|
||||
// Tokenize the source.
|
||||
let tokens: Vec<_> = lexer::lex(case.code(), Mode::Module).collect();
|
||||
|
||||
// Parse the source.
|
||||
let ast = parse_program_tokens(tokens.clone(), case.code(), false).unwrap();
|
||||
|
||||
let kind = SourceKind::Python(case.code().to_string());
|
||||
b.iter(|| {
|
||||
let path = case.path();
|
||||
let result = lint_only(
|
||||
@@ -67,12 +59,8 @@ fn benchmark_linter(mut group: BenchmarkGroup, settings: &LinterSettings) {
|
||||
None,
|
||||
settings,
|
||||
flags::Noqa::Enabled,
|
||||
&SourceKind::Python(case.code().to_string()),
|
||||
&kind,
|
||||
PySourceType::from(path.as_path()),
|
||||
ParseSource::Precomputed {
|
||||
tokens: &tokens,
|
||||
ast: &ast,
|
||||
},
|
||||
);
|
||||
|
||||
// Assert that file contains no parse errors
|
||||
@@ -90,21 +78,12 @@ fn benchmark_default_rules(criterion: &mut Criterion) {
|
||||
benchmark_linter(group, &LinterSettings::default());
|
||||
}
|
||||
|
||||
/// Disables IO based rules because they are a source of flakiness
|
||||
fn disable_io_rules(rules: &mut RuleTable) {
|
||||
fn benchmark_all_rules(criterion: &mut Criterion) {
|
||||
let mut rules: RuleTable = RuleSelector::All.all_rules().collect();
|
||||
|
||||
// Disable IO based rules because it is a source of flakiness
|
||||
rules.disable(Rule::ShebangMissingExecutableFile);
|
||||
rules.disable(Rule::ShebangNotExecutable);
|
||||
}
|
||||
|
||||
fn benchmark_all_rules(criterion: &mut Criterion) {
|
||||
let mut rules: RuleTable = RuleSelector::All
|
||||
.rules(&PreviewOptions {
|
||||
mode: PreviewMode::Disabled,
|
||||
require_explicit: false,
|
||||
})
|
||||
.collect();
|
||||
|
||||
disable_io_rules(&mut rules);
|
||||
|
||||
let settings = LinterSettings {
|
||||
rules,
|
||||
@@ -115,22 +94,6 @@ fn benchmark_all_rules(criterion: &mut Criterion) {
|
||||
benchmark_linter(group, &settings);
|
||||
}
|
||||
|
||||
fn benchmark_preview_rules(criterion: &mut Criterion) {
|
||||
let mut rules: RuleTable = RuleSelector::All.all_rules().collect();
|
||||
|
||||
disable_io_rules(&mut rules);
|
||||
|
||||
let settings = LinterSettings {
|
||||
rules,
|
||||
preview: PreviewMode::Enabled,
|
||||
..LinterSettings::default()
|
||||
};
|
||||
|
||||
let group = criterion.benchmark_group("linter/all-with-preview-rules");
|
||||
benchmark_linter(group, &settings);
|
||||
}
|
||||
|
||||
criterion_group!(default_rules, benchmark_default_rules);
|
||||
criterion_group!(all_rules, benchmark_all_rules);
|
||||
criterion_group!(preview_rules, benchmark_preview_rules);
|
||||
criterion_main!(default_rules, all_rules, preview_rules);
|
||||
criterion_main!(default_rules, all_rules);
|
||||
|
||||
@@ -60,7 +60,7 @@ fn benchmark_parser(criterion: &mut Criterion<WallTime>) {
|
||||
&case,
|
||||
|b, case| {
|
||||
b.iter(|| {
|
||||
let parsed = parse_suite(case.code()).unwrap();
|
||||
let parsed = parse_suite(case.code(), case.name()).unwrap();
|
||||
|
||||
let mut visitor = CountVisitor { count: 0 };
|
||||
visitor.visit_body(&parsed);
|
||||
|
||||
@@ -16,10 +16,7 @@ glob = { workspace = true }
|
||||
globset = { workspace = true }
|
||||
regex = { workspace = true }
|
||||
filetime = { workspace = true }
|
||||
seahash = { workspace = true }
|
||||
seahash = "4.1.0"
|
||||
|
||||
[dev-dependencies]
|
||||
ruff_macros = { path = "../ruff_macros" }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.3.0"
|
||||
name = "ruff_cli"
|
||||
version = "0.1.3"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
@@ -10,67 +10,69 @@ documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
license = { workspace = true }
|
||||
readme = "../../README.md"
|
||||
default-run = "ruff"
|
||||
|
||||
[[bin]]
|
||||
name = "ruff"
|
||||
|
||||
[dependencies]
|
||||
ruff_linter = { path = "../ruff_linter", features = ["clap"] }
|
||||
ruff_cache = { path = "../ruff_cache" }
|
||||
ruff_diagnostics = { path = "../ruff_diagnostics" }
|
||||
ruff_linter = { path = "../ruff_linter", features = ["clap"] }
|
||||
ruff_macros = { path = "../ruff_macros" }
|
||||
ruff_formatter = { path = "../ruff_formatter" }
|
||||
ruff_notebook = { path = "../ruff_notebook" }
|
||||
ruff_macros = { path = "../ruff_macros" }
|
||||
ruff_python_ast = { path = "../ruff_python_ast" }
|
||||
ruff_python_formatter = { path = "../ruff_python_formatter" }
|
||||
ruff_source_file = { path = "../ruff_source_file" }
|
||||
ruff_text_size = { path = "../ruff_text_size" }
|
||||
ruff_python_trivia = { path = "../ruff_python_trivia" }
|
||||
ruff_workspace = { path = "../ruff_workspace" }
|
||||
ruff_text_size = { path = "../ruff_text_size" }
|
||||
|
||||
annotate-snippets = { version = "0.9.1", features = ["color"] }
|
||||
anyhow = { workspace = true }
|
||||
argfile = { workspace = true }
|
||||
bincode = { workspace = true }
|
||||
argfile = { version = "0.1.6" }
|
||||
bincode = { version = "1.3.3" }
|
||||
bitflags = { workspace = true }
|
||||
cachedir = { workspace = true }
|
||||
cachedir = { version = "0.3.0" }
|
||||
chrono = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive", "env", "wrap_help"] }
|
||||
clap_complete_command = { workspace = true }
|
||||
clearscreen = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive", "env"] }
|
||||
clap_complete_command = { version = "0.5.1" }
|
||||
clearscreen = { version = "2.0.0" }
|
||||
colored = { workspace = true }
|
||||
filetime = { workspace = true }
|
||||
glob = { workspace = true }
|
||||
ignore = { workspace = true }
|
||||
is-macro = { workspace = true }
|
||||
itertools = { workspace = true }
|
||||
log = { workspace = true }
|
||||
notify = { workspace = true }
|
||||
notify = { version = "6.1.1" }
|
||||
path-absolutize = { workspace = true, features = ["once_cell_cache"] }
|
||||
rayon = { workspace = true }
|
||||
rayon = { version = "1.8.0" }
|
||||
regex = { workspace = true }
|
||||
ruff_python_stdlib = { path = "../ruff_python_stdlib" }
|
||||
rustc-hash = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
shellexpand = { workspace = true }
|
||||
similar = { workspace = true }
|
||||
strum = { workspace = true, features = [] }
|
||||
tempfile = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
tracing = { workspace = true, features = ["log"] }
|
||||
walkdir = { workspace = true }
|
||||
wild = { workspace = true }
|
||||
walkdir = { version = "2.3.2" }
|
||||
wild = { version = "2" }
|
||||
|
||||
[dev-dependencies]
|
||||
# Enable test rules during development
|
||||
ruff_linter = { path = "../ruff_linter", features = ["clap", "test-rules"] }
|
||||
assert_cmd = { workspace = true }
|
||||
assert_cmd = { version = "2.0.8" }
|
||||
# Avoid writing colored snapshots when running tests from the terminal
|
||||
colored = { workspace = true, features = ["no-color"]}
|
||||
insta = { workspace = true, features = ["filters", "json"] }
|
||||
insta-cmd = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
insta-cmd = { version = "0.4.0" }
|
||||
tempfile = "3.8.1"
|
||||
test-case = { workspace = true }
|
||||
ureq = { version = "2.8.0", features = [] }
|
||||
|
||||
[target.'cfg(target_os = "windows")'.dependencies]
|
||||
mimalloc = { workspace = true }
|
||||
mimalloc = "0.1.39"
|
||||
|
||||
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64")))'.dependencies]
|
||||
tikv-jemallocator = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
tikv-jemallocator = "0.5.0"
|
||||
752
crates/ruff_cli/src/args.rs
Normal file
752
crates/ruff_cli/src/args.rs
Normal file
@@ -0,0 +1,752 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use clap::{command, Parser};
|
||||
use regex::Regex;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_linter::line_width::LineLength;
|
||||
use ruff_linter::logging::LogLevel;
|
||||
use ruff_linter::registry::Rule;
|
||||
use ruff_linter::settings::types::{
|
||||
FilePattern, PatternPrefixPair, PerFileIgnore, PreviewMode, PythonVersion, SerializationFormat,
|
||||
UnsafeFixes,
|
||||
};
|
||||
use ruff_linter::{RuleParser, RuleSelector, RuleSelectorParser};
|
||||
use ruff_workspace::configuration::{Configuration, RuleSelection};
|
||||
use ruff_workspace::options::PycodestyleOptions;
|
||||
use ruff_workspace::resolver::ConfigurationTransformer;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(
|
||||
author,
|
||||
name = "ruff",
|
||||
about = "Ruff: An extremely fast Python linter.",
|
||||
after_help = "For help with a specific command, see: `ruff help <command>`."
|
||||
)]
|
||||
#[command(version)]
|
||||
pub struct Args {
|
||||
#[command(subcommand)]
|
||||
pub command: Command,
|
||||
#[clap(flatten)]
|
||||
pub log_level_args: LogLevelArgs,
|
||||
}
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
#[derive(Debug, clap::Subcommand)]
|
||||
pub enum Command {
|
||||
/// Run Ruff on the given files or directories (default).
|
||||
Check(CheckCommand),
|
||||
/// Explain a rule (or all rules).
|
||||
#[clap(alias = "--explain")]
|
||||
#[command(group = clap::ArgGroup::new("selector").multiple(false).required(true))]
|
||||
Rule {
|
||||
/// Rule to explain
|
||||
#[arg(value_parser=RuleParser, group = "selector", hide_possible_values = true)]
|
||||
rule: Option<Rule>,
|
||||
|
||||
/// Explain all rules
|
||||
#[arg(long, conflicts_with = "rule", group = "selector")]
|
||||
all: bool,
|
||||
|
||||
/// Output format
|
||||
#[arg(long, value_enum, default_value = "text")]
|
||||
output_format: HelpFormat,
|
||||
|
||||
/// Output format (Deprecated: Use `--output-format` instead).
|
||||
#[arg(long, value_enum, conflicts_with = "output_format", hide = true)]
|
||||
format: Option<HelpFormat>,
|
||||
},
|
||||
/// List or describe the available configuration options.
|
||||
Config { option: Option<String> },
|
||||
/// List all supported upstream linters.
|
||||
Linter {
|
||||
/// Output format
|
||||
#[arg(long, value_enum, default_value = "text")]
|
||||
output_format: HelpFormat,
|
||||
|
||||
/// Output format (Deprecated: Use `--output-format` instead).
|
||||
#[arg(long, value_enum, conflicts_with = "output_format", hide = true)]
|
||||
format: Option<HelpFormat>,
|
||||
},
|
||||
/// Clear any caches in the current directory and any subdirectories.
|
||||
#[clap(alias = "--clean")]
|
||||
Clean,
|
||||
/// Generate shell completion.
|
||||
#[clap(alias = "--generate-shell-completion", hide = true)]
|
||||
GenerateShellCompletion { shell: clap_complete_command::Shell },
|
||||
/// Run the Ruff formatter on the given files or directories.
|
||||
Format(FormatCommand),
|
||||
/// Display Ruff's version
|
||||
Version {
|
||||
#[arg(long, value_enum, default_value = "text")]
|
||||
output_format: HelpFormat,
|
||||
},
|
||||
}
|
||||
|
||||
// The `Parser` derive is for ruff_dev, for ruff_cli `Args` would be sufficient
|
||||
#[derive(Clone, Debug, clap::Parser)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct CheckCommand {
|
||||
/// List of files or directories to check.
|
||||
pub files: Vec<PathBuf>,
|
||||
/// Apply fixes to resolve lint violations.
|
||||
/// Use `--no-fix` to disable or `--unsafe-fixes` to include unsafe fixes.
|
||||
#[arg(long, overrides_with("no_fix"))]
|
||||
fix: bool,
|
||||
#[clap(long, overrides_with("fix"), hide = true)]
|
||||
no_fix: bool,
|
||||
/// Include fixes that may not retain the original intent of the code.
|
||||
/// Use `--no-unsafe-fixes` to disable.
|
||||
#[arg(long, overrides_with("no_unsafe_fixes"))]
|
||||
unsafe_fixes: bool,
|
||||
#[arg(long, overrides_with("unsafe_fixes"), hide = true)]
|
||||
no_unsafe_fixes: bool,
|
||||
/// Show violations with source code.
|
||||
/// Use `--no-show-source` to disable.
|
||||
#[arg(long, overrides_with("no_show_source"))]
|
||||
show_source: bool,
|
||||
#[clap(long, overrides_with("show_source"), hide = true)]
|
||||
no_show_source: bool,
|
||||
/// Show an enumeration of all fixed lint violations.
|
||||
/// Use `--no-show-fixes` to disable.
|
||||
#[arg(long, overrides_with("no_show_fixes"))]
|
||||
show_fixes: bool,
|
||||
#[clap(long, overrides_with("show_fixes"), hide = true)]
|
||||
no_show_fixes: bool,
|
||||
/// Avoid writing any fixed files back; instead, output a diff for each changed file to stdout. Implies `--fix-only`.
|
||||
#[arg(long, conflicts_with = "show_fixes")]
|
||||
pub diff: bool,
|
||||
/// Run in watch mode by re-running whenever files change.
|
||||
#[arg(short, long)]
|
||||
pub watch: bool,
|
||||
/// Apply fixes to resolve lint violations, but don't report on leftover violations. Implies `--fix`.
|
||||
/// Use `--no-fix-only` to disable or `--unsafe-fixes` to include unsafe fixes.
|
||||
#[arg(long, overrides_with("no_fix_only"))]
|
||||
fix_only: bool,
|
||||
#[clap(long, overrides_with("fix_only"), hide = true)]
|
||||
no_fix_only: bool,
|
||||
/// Ignore any `# noqa` comments.
|
||||
#[arg(long)]
|
||||
ignore_noqa: bool,
|
||||
|
||||
/// Output serialization format for violations.
|
||||
#[arg(long, value_enum, env = "RUFF_OUTPUT_FORMAT")]
|
||||
pub output_format: Option<SerializationFormat>,
|
||||
|
||||
/// Specify file to write the linter output to (default: stdout).
|
||||
#[arg(short, long)]
|
||||
pub output_file: Option<PathBuf>,
|
||||
/// The minimum Python version that should be supported.
|
||||
#[arg(long, value_enum)]
|
||||
pub target_version: Option<PythonVersion>,
|
||||
/// Enable preview mode; checks will include unstable rules and fixes.
|
||||
/// Use `--no-preview` to disable.
|
||||
#[arg(long, overrides_with("no_preview"))]
|
||||
preview: bool,
|
||||
#[clap(long, overrides_with("preview"), hide = true)]
|
||||
no_preview: bool,
|
||||
/// Path to the `pyproject.toml` or `ruff.toml` file to use for
|
||||
/// configuration.
|
||||
#[arg(long, conflicts_with = "isolated")]
|
||||
pub config: Option<PathBuf>,
|
||||
/// Comma-separated list of rule codes to enable (or ALL, to enable all rules).
|
||||
#[arg(
|
||||
long,
|
||||
value_delimiter = ',',
|
||||
value_name = "RULE_CODE",
|
||||
value_parser = RuleSelectorParser,
|
||||
help_heading = "Rule selection",
|
||||
hide_possible_values = true
|
||||
)]
|
||||
pub select: Option<Vec<RuleSelector>>,
|
||||
/// Comma-separated list of rule codes to disable.
|
||||
#[arg(
|
||||
long,
|
||||
value_delimiter = ',',
|
||||
value_name = "RULE_CODE",
|
||||
value_parser = RuleSelectorParser,
|
||||
help_heading = "Rule selection",
|
||||
hide_possible_values = true
|
||||
)]
|
||||
pub ignore: Option<Vec<RuleSelector>>,
|
||||
/// Like --select, but adds additional rule codes on top of those already specified.
|
||||
#[arg(
|
||||
long,
|
||||
value_delimiter = ',',
|
||||
value_name = "RULE_CODE",
|
||||
value_parser = RuleSelectorParser,
|
||||
help_heading = "Rule selection",
|
||||
hide_possible_values = true
|
||||
)]
|
||||
pub extend_select: Option<Vec<RuleSelector>>,
|
||||
/// Like --ignore. (Deprecated: You can just use --ignore instead.)
|
||||
#[arg(
|
||||
long,
|
||||
value_delimiter = ',',
|
||||
value_name = "RULE_CODE",
|
||||
value_parser = RuleSelectorParser,
|
||||
help_heading = "Rule selection",
|
||||
hide = true
|
||||
)]
|
||||
pub extend_ignore: Option<Vec<RuleSelector>>,
|
||||
/// List of mappings from file pattern to code to exclude.
|
||||
#[arg(long, value_delimiter = ',', help_heading = "Rule selection")]
|
||||
pub per_file_ignores: Option<Vec<PatternPrefixPair>>,
|
||||
/// Like `--per-file-ignores`, but adds additional ignores on top of those already specified.
|
||||
#[arg(long, value_delimiter = ',', help_heading = "Rule selection")]
|
||||
pub extend_per_file_ignores: Option<Vec<PatternPrefixPair>>,
|
||||
/// List of paths, used to omit files and/or directories from analysis.
|
||||
#[arg(
|
||||
long,
|
||||
value_delimiter = ',',
|
||||
value_name = "FILE_PATTERN",
|
||||
help_heading = "File selection"
|
||||
)]
|
||||
pub exclude: Option<Vec<FilePattern>>,
|
||||
/// Like --exclude, but adds additional files and directories on top of those already excluded.
|
||||
#[arg(
|
||||
long,
|
||||
value_delimiter = ',',
|
||||
value_name = "FILE_PATTERN",
|
||||
help_heading = "File selection"
|
||||
)]
|
||||
pub extend_exclude: Option<Vec<FilePattern>>,
|
||||
/// List of rule codes to treat as eligible for fix. Only applicable when fix itself is enabled (e.g., via `--fix`).
|
||||
#[arg(
|
||||
long,
|
||||
value_delimiter = ',',
|
||||
value_name = "RULE_CODE",
|
||||
value_parser = RuleSelectorParser,
|
||||
help_heading = "Rule selection",
|
||||
hide_possible_values = true
|
||||
)]
|
||||
pub fixable: Option<Vec<RuleSelector>>,
|
||||
/// List of rule codes to treat as ineligible for fix. Only applicable when fix itself is enabled (e.g., via `--fix`).
|
||||
#[arg(
|
||||
long,
|
||||
value_delimiter = ',',
|
||||
value_name = "RULE_CODE",
|
||||
value_parser = RuleSelectorParser,
|
||||
help_heading = "Rule selection",
|
||||
hide_possible_values = true
|
||||
)]
|
||||
pub unfixable: Option<Vec<RuleSelector>>,
|
||||
/// Like --fixable, but adds additional rule codes on top of those already specified.
|
||||
#[arg(
|
||||
long,
|
||||
value_delimiter = ',',
|
||||
value_name = "RULE_CODE",
|
||||
value_parser = RuleSelectorParser,
|
||||
help_heading = "Rule selection",
|
||||
hide_possible_values = true
|
||||
)]
|
||||
pub extend_fixable: Option<Vec<RuleSelector>>,
|
||||
/// Like --unfixable. (Deprecated: You can just use --unfixable instead.)
|
||||
#[arg(
|
||||
long,
|
||||
value_delimiter = ',',
|
||||
value_name = "RULE_CODE",
|
||||
value_parser = RuleSelectorParser,
|
||||
help_heading = "Rule selection",
|
||||
hide = true
|
||||
)]
|
||||
pub extend_unfixable: Option<Vec<RuleSelector>>,
|
||||
/// Respect file exclusions via `.gitignore` and other standard ignore files.
|
||||
/// Use `--no-respect-gitignore` to disable.
|
||||
#[arg(
|
||||
long,
|
||||
overrides_with("no_respect_gitignore"),
|
||||
help_heading = "File selection"
|
||||
)]
|
||||
respect_gitignore: bool,
|
||||
#[clap(long, overrides_with("respect_gitignore"), hide = true)]
|
||||
no_respect_gitignore: bool,
|
||||
/// Enforce exclusions, even for paths passed to Ruff directly on the command-line.
|
||||
/// Use `--no-force-exclude` to disable.
|
||||
#[arg(
|
||||
long,
|
||||
overrides_with("no_force_exclude"),
|
||||
help_heading = "File selection"
|
||||
)]
|
||||
force_exclude: bool,
|
||||
#[clap(long, overrides_with("force_exclude"), hide = true)]
|
||||
no_force_exclude: bool,
|
||||
/// Set the line-length for length-associated rules and automatic formatting.
|
||||
#[arg(long, help_heading = "Rule configuration", hide = true)]
|
||||
pub line_length: Option<LineLength>,
|
||||
/// Regular expression matching the name of dummy variables.
|
||||
#[arg(long, help_heading = "Rule configuration", hide = true)]
|
||||
pub dummy_variable_rgx: Option<Regex>,
|
||||
/// Disable cache reads.
|
||||
#[arg(short, long, help_heading = "Miscellaneous")]
|
||||
pub no_cache: bool,
|
||||
/// Ignore all configuration files.
|
||||
#[arg(long, conflicts_with = "config", help_heading = "Miscellaneous")]
|
||||
pub isolated: bool,
|
||||
/// Path to the cache directory.
|
||||
#[arg(long, env = "RUFF_CACHE_DIR", help_heading = "Miscellaneous")]
|
||||
pub cache_dir: Option<PathBuf>,
|
||||
/// The name of the file when passing it through stdin.
|
||||
#[arg(long, help_heading = "Miscellaneous")]
|
||||
pub stdin_filename: Option<PathBuf>,
|
||||
/// Exit with status code "0", even upon detecting lint violations.
|
||||
#[arg(
|
||||
short,
|
||||
long,
|
||||
help_heading = "Miscellaneous",
|
||||
conflicts_with = "exit_non_zero_on_fix"
|
||||
)]
|
||||
pub exit_zero: bool,
|
||||
/// Exit with a non-zero status code if any files were modified via fix, even if no lint violations remain.
|
||||
#[arg(long, help_heading = "Miscellaneous", conflicts_with = "exit_zero")]
|
||||
pub exit_non_zero_on_fix: bool,
|
||||
/// Show counts for every rule with at least one violation.
|
||||
#[arg(
|
||||
long,
|
||||
// Unsupported default-command arguments.
|
||||
conflicts_with = "diff",
|
||||
conflicts_with = "show_source",
|
||||
conflicts_with = "watch",
|
||||
)]
|
||||
pub statistics: bool,
|
||||
/// Enable automatic additions of `noqa` directives to failing lines.
|
||||
#[arg(
|
||||
long,
|
||||
// conflicts_with = "add_noqa",
|
||||
conflicts_with = "show_files",
|
||||
conflicts_with = "show_settings",
|
||||
// Unsupported default-command arguments.
|
||||
conflicts_with = "ignore_noqa",
|
||||
conflicts_with = "statistics",
|
||||
conflicts_with = "stdin_filename",
|
||||
conflicts_with = "watch",
|
||||
conflicts_with = "fix",
|
||||
)]
|
||||
pub add_noqa: bool,
|
||||
/// See the files Ruff will be run against with the current settings.
|
||||
#[arg(
|
||||
long,
|
||||
// Fake subcommands.
|
||||
conflicts_with = "add_noqa",
|
||||
// conflicts_with = "show_files",
|
||||
conflicts_with = "show_settings",
|
||||
// Unsupported default-command arguments.
|
||||
conflicts_with = "ignore_noqa",
|
||||
conflicts_with = "statistics",
|
||||
conflicts_with = "stdin_filename",
|
||||
conflicts_with = "watch",
|
||||
)]
|
||||
pub show_files: bool,
|
||||
/// See the settings Ruff will use to lint a given Python file.
|
||||
#[arg(
|
||||
long,
|
||||
// Fake subcommands.
|
||||
conflicts_with = "add_noqa",
|
||||
conflicts_with = "show_files",
|
||||
// conflicts_with = "show_settings",
|
||||
// Unsupported default-command arguments.
|
||||
conflicts_with = "ignore_noqa",
|
||||
conflicts_with = "statistics",
|
||||
conflicts_with = "stdin_filename",
|
||||
conflicts_with = "watch",
|
||||
)]
|
||||
pub show_settings: bool,
|
||||
/// Dev-only argument to show fixes
|
||||
#[arg(long, hide = true)]
|
||||
pub ecosystem_ci: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, clap::Parser)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct FormatCommand {
|
||||
/// List of files or directories to format.
|
||||
pub files: Vec<PathBuf>,
|
||||
/// Avoid writing any formatted files back; instead, exit with a non-zero status code if any
|
||||
/// files would have been modified, and zero otherwise.
|
||||
#[arg(long)]
|
||||
pub check: bool,
|
||||
/// Avoid writing any formatted files back; instead, exit with a non-zero status code and the
|
||||
/// difference between the current file and how the formatted file would look like.
|
||||
#[arg(long)]
|
||||
pub diff: bool,
|
||||
/// Path to the `pyproject.toml` or `ruff.toml` file to use for configuration.
|
||||
#[arg(long, conflicts_with = "isolated")]
|
||||
pub config: Option<PathBuf>,
|
||||
|
||||
/// Disable cache reads.
|
||||
#[arg(short, long, help_heading = "Miscellaneous")]
|
||||
pub no_cache: bool,
|
||||
/// Path to the cache directory.
|
||||
#[arg(long, env = "RUFF_CACHE_DIR", help_heading = "Miscellaneous")]
|
||||
pub cache_dir: Option<PathBuf>,
|
||||
|
||||
/// Respect file exclusions via `.gitignore` and other standard ignore files.
|
||||
/// Use `--no-respect-gitignore` to disable.
|
||||
#[arg(
|
||||
long,
|
||||
overrides_with("no_respect_gitignore"),
|
||||
help_heading = "File selection"
|
||||
)]
|
||||
respect_gitignore: bool,
|
||||
#[clap(long, overrides_with("respect_gitignore"), hide = true)]
|
||||
no_respect_gitignore: bool,
|
||||
/// List of paths, used to omit files and/or directories from analysis.
|
||||
#[arg(
|
||||
long,
|
||||
value_delimiter = ',',
|
||||
value_name = "FILE_PATTERN",
|
||||
help_heading = "File selection"
|
||||
)]
|
||||
pub exclude: Option<Vec<FilePattern>>,
|
||||
|
||||
/// Enforce exclusions, even for paths passed to Ruff directly on the command-line.
|
||||
/// Use `--no-force-exclude` to disable.
|
||||
#[arg(
|
||||
long,
|
||||
overrides_with("no_force_exclude"),
|
||||
help_heading = "File selection"
|
||||
)]
|
||||
force_exclude: bool,
|
||||
#[clap(long, overrides_with("force_exclude"), hide = true)]
|
||||
no_force_exclude: bool,
|
||||
/// Set the line-length.
|
||||
#[arg(long, help_heading = "Format configuration")]
|
||||
pub line_length: Option<LineLength>,
|
||||
/// Ignore all configuration files.
|
||||
#[arg(long, conflicts_with = "config", help_heading = "Miscellaneous")]
|
||||
pub isolated: bool,
|
||||
/// The name of the file when passing it through stdin.
|
||||
#[arg(long, help_heading = "Miscellaneous")]
|
||||
pub stdin_filename: Option<PathBuf>,
|
||||
/// The minimum Python version that should be supported.
|
||||
#[arg(long, value_enum)]
|
||||
pub target_version: Option<PythonVersion>,
|
||||
/// Enable preview mode; enables unstable formatting.
|
||||
/// Use `--no-preview` to disable.
|
||||
#[arg(long, overrides_with("no_preview"))]
|
||||
preview: bool,
|
||||
#[clap(long, overrides_with("preview"), hide = true)]
|
||||
no_preview: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, clap::ValueEnum)]
|
||||
pub enum HelpFormat {
|
||||
Text,
|
||||
Json,
|
||||
}
|
||||
|
||||
#[allow(clippy::module_name_repetitions)]
|
||||
#[derive(Debug, clap::Args)]
|
||||
pub struct LogLevelArgs {
|
||||
/// Enable verbose logging.
|
||||
#[arg(
|
||||
short,
|
||||
long,
|
||||
global = true,
|
||||
group = "verbosity",
|
||||
help_heading = "Log levels"
|
||||
)]
|
||||
pub verbose: bool,
|
||||
/// Print diagnostics, but nothing else.
|
||||
#[arg(
|
||||
short,
|
||||
long,
|
||||
global = true,
|
||||
group = "verbosity",
|
||||
help_heading = "Log levels"
|
||||
)]
|
||||
pub quiet: bool,
|
||||
/// Disable all logging (but still exit with status code "1" upon detecting diagnostics).
|
||||
#[arg(
|
||||
short,
|
||||
long,
|
||||
global = true,
|
||||
group = "verbosity",
|
||||
help_heading = "Log levels"
|
||||
)]
|
||||
pub silent: bool,
|
||||
}
|
||||
|
||||
impl From<&LogLevelArgs> for LogLevel {
|
||||
fn from(args: &LogLevelArgs) -> Self {
|
||||
if args.silent {
|
||||
Self::Silent
|
||||
} else if args.quiet {
|
||||
Self::Quiet
|
||||
} else if args.verbose {
|
||||
Self::Verbose
|
||||
} else {
|
||||
Self::Default
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl CheckCommand {
|
||||
/// Partition the CLI into command-line arguments and configuration
|
||||
/// overrides.
|
||||
pub fn partition(self) -> (CheckArguments, CliOverrides) {
|
||||
(
|
||||
CheckArguments {
|
||||
add_noqa: self.add_noqa,
|
||||
config: self.config,
|
||||
diff: self.diff,
|
||||
ecosystem_ci: self.ecosystem_ci,
|
||||
exit_non_zero_on_fix: self.exit_non_zero_on_fix,
|
||||
exit_zero: self.exit_zero,
|
||||
files: self.files,
|
||||
ignore_noqa: self.ignore_noqa,
|
||||
isolated: self.isolated,
|
||||
no_cache: self.no_cache,
|
||||
output_file: self.output_file,
|
||||
show_files: self.show_files,
|
||||
show_settings: self.show_settings,
|
||||
statistics: self.statistics,
|
||||
stdin_filename: self.stdin_filename,
|
||||
watch: self.watch,
|
||||
},
|
||||
CliOverrides {
|
||||
dummy_variable_rgx: self.dummy_variable_rgx,
|
||||
exclude: self.exclude,
|
||||
extend_exclude: self.extend_exclude,
|
||||
extend_fixable: self.extend_fixable,
|
||||
extend_ignore: self.extend_ignore,
|
||||
extend_per_file_ignores: self.extend_per_file_ignores,
|
||||
extend_select: self.extend_select,
|
||||
extend_unfixable: self.extend_unfixable,
|
||||
fixable: self.fixable,
|
||||
ignore: self.ignore,
|
||||
line_length: self.line_length,
|
||||
per_file_ignores: self.per_file_ignores,
|
||||
preview: resolve_bool_arg(self.preview, self.no_preview).map(PreviewMode::from),
|
||||
respect_gitignore: resolve_bool_arg(
|
||||
self.respect_gitignore,
|
||||
self.no_respect_gitignore,
|
||||
),
|
||||
select: self.select,
|
||||
show_source: resolve_bool_arg(self.show_source, self.no_show_source),
|
||||
target_version: self.target_version,
|
||||
unfixable: self.unfixable,
|
||||
// TODO(charlie): Included in `pyproject.toml`, but not inherited.
|
||||
cache_dir: self.cache_dir,
|
||||
fix: resolve_bool_arg(self.fix, self.no_fix),
|
||||
fix_only: resolve_bool_arg(self.fix_only, self.no_fix_only),
|
||||
unsafe_fixes: resolve_bool_arg(self.unsafe_fixes, self.no_unsafe_fixes)
|
||||
.map(UnsafeFixes::from),
|
||||
force_exclude: resolve_bool_arg(self.force_exclude, self.no_force_exclude),
|
||||
output_format: self.output_format,
|
||||
show_fixes: resolve_bool_arg(self.show_fixes, self.no_show_fixes),
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl FormatCommand {
|
||||
/// Partition the CLI into command-line arguments and configuration
|
||||
/// overrides.
|
||||
pub fn partition(self) -> (FormatArguments, CliOverrides) {
|
||||
(
|
||||
FormatArguments {
|
||||
check: self.check,
|
||||
diff: self.diff,
|
||||
config: self.config,
|
||||
files: self.files,
|
||||
isolated: self.isolated,
|
||||
no_cache: self.no_cache,
|
||||
stdin_filename: self.stdin_filename,
|
||||
},
|
||||
CliOverrides {
|
||||
line_length: self.line_length,
|
||||
respect_gitignore: resolve_bool_arg(
|
||||
self.respect_gitignore,
|
||||
self.no_respect_gitignore,
|
||||
),
|
||||
exclude: self.exclude,
|
||||
preview: resolve_bool_arg(self.preview, self.no_preview).map(PreviewMode::from),
|
||||
force_exclude: resolve_bool_arg(self.force_exclude, self.no_force_exclude),
|
||||
target_version: self.target_version,
|
||||
cache_dir: self.cache_dir,
|
||||
|
||||
// Unsupported on the formatter CLI, but required on `Overrides`.
|
||||
..CliOverrides::default()
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_bool_arg(yes: bool, no: bool) -> Option<bool> {
|
||||
match (yes, no) {
|
||||
(true, false) => Some(true),
|
||||
(false, true) => Some(false),
|
||||
(false, false) => None,
|
||||
(..) => unreachable!("Clap should make this impossible"),
|
||||
}
|
||||
}
|
||||
|
||||
/// CLI settings that are distinct from configuration (commands, lists of files,
|
||||
/// etc.).
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct CheckArguments {
|
||||
pub add_noqa: bool,
|
||||
pub config: Option<PathBuf>,
|
||||
pub diff: bool,
|
||||
pub ecosystem_ci: bool,
|
||||
pub exit_non_zero_on_fix: bool,
|
||||
pub exit_zero: bool,
|
||||
pub files: Vec<PathBuf>,
|
||||
pub ignore_noqa: bool,
|
||||
pub isolated: bool,
|
||||
pub no_cache: bool,
|
||||
pub output_file: Option<PathBuf>,
|
||||
pub show_files: bool,
|
||||
pub show_settings: bool,
|
||||
pub statistics: bool,
|
||||
pub stdin_filename: Option<PathBuf>,
|
||||
pub watch: bool,
|
||||
}
|
||||
|
||||
/// CLI settings that are distinct from configuration (commands, lists of files,
|
||||
/// etc.).
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct FormatArguments {
|
||||
pub check: bool,
|
||||
pub no_cache: bool,
|
||||
pub diff: bool,
|
||||
pub config: Option<PathBuf>,
|
||||
pub files: Vec<PathBuf>,
|
||||
pub isolated: bool,
|
||||
pub stdin_filename: Option<PathBuf>,
|
||||
}
|
||||
|
||||
/// CLI settings that function as configuration overrides.
|
||||
#[derive(Clone, Default)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct CliOverrides {
|
||||
pub dummy_variable_rgx: Option<Regex>,
|
||||
pub exclude: Option<Vec<FilePattern>>,
|
||||
pub extend_exclude: Option<Vec<FilePattern>>,
|
||||
pub extend_fixable: Option<Vec<RuleSelector>>,
|
||||
pub extend_ignore: Option<Vec<RuleSelector>>,
|
||||
pub extend_select: Option<Vec<RuleSelector>>,
|
||||
pub extend_unfixable: Option<Vec<RuleSelector>>,
|
||||
pub fixable: Option<Vec<RuleSelector>>,
|
||||
pub ignore: Option<Vec<RuleSelector>>,
|
||||
pub line_length: Option<LineLength>,
|
||||
pub per_file_ignores: Option<Vec<PatternPrefixPair>>,
|
||||
pub extend_per_file_ignores: Option<Vec<PatternPrefixPair>>,
|
||||
pub preview: Option<PreviewMode>,
|
||||
pub respect_gitignore: Option<bool>,
|
||||
pub select: Option<Vec<RuleSelector>>,
|
||||
pub show_source: Option<bool>,
|
||||
pub target_version: Option<PythonVersion>,
|
||||
pub unfixable: Option<Vec<RuleSelector>>,
|
||||
// TODO(charlie): Captured in pyproject.toml as a default, but not part of `Settings`.
|
||||
pub cache_dir: Option<PathBuf>,
|
||||
pub fix: Option<bool>,
|
||||
pub fix_only: Option<bool>,
|
||||
pub unsafe_fixes: Option<UnsafeFixes>,
|
||||
pub force_exclude: Option<bool>,
|
||||
pub output_format: Option<SerializationFormat>,
|
||||
pub show_fixes: Option<bool>,
|
||||
}
|
||||
|
||||
impl ConfigurationTransformer for CliOverrides {
|
||||
fn transform(&self, mut config: Configuration) -> Configuration {
|
||||
if let Some(cache_dir) = &self.cache_dir {
|
||||
config.cache_dir = Some(cache_dir.clone());
|
||||
}
|
||||
if let Some(dummy_variable_rgx) = &self.dummy_variable_rgx {
|
||||
config.lint.dummy_variable_rgx = Some(dummy_variable_rgx.clone());
|
||||
}
|
||||
if let Some(exclude) = &self.exclude {
|
||||
config.exclude = Some(exclude.clone());
|
||||
}
|
||||
if let Some(extend_exclude) = &self.extend_exclude {
|
||||
config.extend_exclude.extend(extend_exclude.clone());
|
||||
}
|
||||
if let Some(extend_per_file_ignores) = &self.extend_per_file_ignores {
|
||||
config
|
||||
.lint
|
||||
.extend_per_file_ignores
|
||||
.extend(collect_per_file_ignores(extend_per_file_ignores.clone()));
|
||||
}
|
||||
if let Some(fix) = &self.fix {
|
||||
config.fix = Some(*fix);
|
||||
}
|
||||
if let Some(fix_only) = &self.fix_only {
|
||||
config.fix_only = Some(*fix_only);
|
||||
}
|
||||
if self.unsafe_fixes.is_some() {
|
||||
config.unsafe_fixes = self.unsafe_fixes;
|
||||
}
|
||||
config.lint.rule_selections.push(RuleSelection {
|
||||
select: self.select.clone(),
|
||||
ignore: self
|
||||
.ignore
|
||||
.iter()
|
||||
.cloned()
|
||||
.chain(self.extend_ignore.iter().cloned())
|
||||
.flatten()
|
||||
.collect(),
|
||||
extend_select: self.extend_select.clone().unwrap_or_default(),
|
||||
fixable: self.fixable.clone(),
|
||||
unfixable: self
|
||||
.unfixable
|
||||
.iter()
|
||||
.cloned()
|
||||
.chain(self.extend_unfixable.iter().cloned())
|
||||
.flatten()
|
||||
.collect(),
|
||||
extend_fixable: self.extend_fixable.clone().unwrap_or_default(),
|
||||
});
|
||||
if let Some(output_format) = &self.output_format {
|
||||
config.output_format = Some(*output_format);
|
||||
}
|
||||
if let Some(force_exclude) = &self.force_exclude {
|
||||
config.force_exclude = Some(*force_exclude);
|
||||
}
|
||||
if let Some(line_length) = self.line_length {
|
||||
config.line_length = Some(line_length);
|
||||
config.lint.pycodestyle = Some(PycodestyleOptions {
|
||||
max_line_length: Some(line_length),
|
||||
..config.lint.pycodestyle.unwrap_or_default()
|
||||
});
|
||||
}
|
||||
if let Some(preview) = &self.preview {
|
||||
config.preview = Some(*preview);
|
||||
config.lint.preview = Some(*preview);
|
||||
config.format.preview = Some(*preview);
|
||||
}
|
||||
if let Some(per_file_ignores) = &self.per_file_ignores {
|
||||
config.lint.per_file_ignores = Some(collect_per_file_ignores(per_file_ignores.clone()));
|
||||
}
|
||||
if let Some(respect_gitignore) = &self.respect_gitignore {
|
||||
config.respect_gitignore = Some(*respect_gitignore);
|
||||
}
|
||||
if let Some(show_source) = &self.show_source {
|
||||
config.show_source = Some(*show_source);
|
||||
}
|
||||
if let Some(show_fixes) = &self.show_fixes {
|
||||
config.show_fixes = Some(*show_fixes);
|
||||
}
|
||||
if let Some(target_version) = &self.target_version {
|
||||
config.target_version = Some(*target_version);
|
||||
}
|
||||
|
||||
config
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert a list of `PatternPrefixPair` structs to `PerFileIgnore`.
|
||||
pub fn collect_per_file_ignores(pairs: Vec<PatternPrefixPair>) -> Vec<PerFileIgnore> {
|
||||
let mut per_file_ignores: FxHashMap<String, Vec<RuleSelector>> = FxHashMap::default();
|
||||
for pair in pairs {
|
||||
per_file_ignores
|
||||
.entry(pair.pattern)
|
||||
.or_default()
|
||||
.push(pair.prefix);
|
||||
}
|
||||
per_file_ignores
|
||||
.into_iter()
|
||||
.map(|(pattern, prefixes)| PerFileIgnore::new(pattern, &prefixes, None))
|
||||
.collect()
|
||||
}
|
||||
@@ -3,8 +3,8 @@ use std::process::ExitCode;
|
||||
use clap::{Parser, Subcommand};
|
||||
use colored::Colorize;
|
||||
|
||||
use ruff::args::{Args, Command};
|
||||
use ruff::{run, ExitStatus};
|
||||
use ruff_cli::args::{Args, Command};
|
||||
use ruff_cli::{run, ExitStatus};
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
#[global_allocator]
|
||||
@@ -27,42 +27,26 @@ pub fn main() -> ExitCode {
|
||||
let mut args =
|
||||
argfile::expand_args_from(args, argfile::parse_fromfile, argfile::PREFIX).unwrap();
|
||||
|
||||
// We can't use `warn_user` here because logging isn't set up at this point
|
||||
// and we also don't know if the user runs ruff with quiet.
|
||||
// Keep the message and pass it to `run` that is responsible for emitting the warning.
|
||||
let deprecated_alias_warning = match args.get(1).and_then(|arg| arg.to_str()) {
|
||||
// Deprecated aliases that are handled by clap
|
||||
Some("--explain") => {
|
||||
Some("`ruff --explain <RULE>` is deprecated. Use `ruff rule <RULE>` instead.")
|
||||
}
|
||||
Some("--clean") => {
|
||||
Some("`ruff --clean` is deprecated. Use `ruff clean` instead.")
|
||||
}
|
||||
Some("--generate-shell-completion") => {
|
||||
Some("`ruff --generate-shell-completion <SHELL>` is deprecated. Use `ruff generate-shell-completion <SHELL>` instead.")
|
||||
}
|
||||
// Deprecated `ruff` alias to `ruff check`
|
||||
// Clap doesn't support default subcommands but we want to run `check` by
|
||||
// default for convenience and backwards-compatibility, so we just
|
||||
// preprocess the arguments accordingly before passing them to Clap.
|
||||
Some(arg) if !Command::has_subcommand(arg)
|
||||
// Clap doesn't support default subcommands but we want to run `check` by
|
||||
// default for convenience and backwards-compatibility, so we just
|
||||
// preprocess the arguments accordingly before passing them to Clap.
|
||||
if let Some(arg) = args.get(1) {
|
||||
if arg
|
||||
.to_str()
|
||||
.is_some_and(|arg| !Command::has_subcommand(rewrite_legacy_subcommand(arg)))
|
||||
&& arg != "-h"
|
||||
&& arg != "--help"
|
||||
&& arg != "-V"
|
||||
&& arg != "--version"
|
||||
&& arg != "help" => {
|
||||
|
||||
{
|
||||
args.insert(1, "check".into());
|
||||
Some("`ruff <path>` is deprecated. Use `ruff check <path>` instead.")
|
||||
}
|
||||
},
|
||||
_ => None
|
||||
};
|
||||
&& arg != "help"
|
||||
{
|
||||
args.insert(1, "check".into());
|
||||
}
|
||||
}
|
||||
|
||||
let args = Args::parse_from(args);
|
||||
|
||||
match run(args, deprecated_alias_warning) {
|
||||
match run(args) {
|
||||
Ok(code) => code.into(),
|
||||
Err(err) => {
|
||||
#[allow(clippy::print_stderr)]
|
||||
@@ -81,3 +65,12 @@ pub fn main() -> ExitCode {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn rewrite_legacy_subcommand(cmd: &str) -> &str {
|
||||
match cmd {
|
||||
"--explain" => "rule",
|
||||
"--clean" => "clean",
|
||||
"--generate-shell-completion" => "generate-shell-completion",
|
||||
cmd => cmd,
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::fmt::Debug;
|
||||
use std::fs::{self, File};
|
||||
use std::hash::Hasher;
|
||||
use std::io::{self, BufReader, Write};
|
||||
use std::io::{self, BufReader, BufWriter, Write};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::atomic::{AtomicU64, Ordering};
|
||||
use std::sync::Mutex;
|
||||
@@ -15,7 +15,6 @@ use rayon::iter::ParallelIterator;
|
||||
use rayon::iter::{IntoParallelIterator, ParallelBridge};
|
||||
use rustc_hash::FxHashMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tempfile::NamedTempFile;
|
||||
|
||||
use ruff_cache::{CacheKey, CacheKeyHasher};
|
||||
use ruff_diagnostics::{DiagnosticKind, Fix};
|
||||
@@ -26,9 +25,10 @@ use ruff_notebook::NotebookIndex;
|
||||
use ruff_python_ast::imports::ImportMap;
|
||||
use ruff_source_file::SourceFileBuilder;
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
use ruff_workspace::resolver::Resolver;
|
||||
use ruff_workspace::resolver::{PyprojectConfig, PyprojectDiscoveryStrategy, Resolver};
|
||||
use ruff_workspace::Settings;
|
||||
|
||||
use crate::cache;
|
||||
use crate::diagnostics::Diagnostics;
|
||||
|
||||
/// [`Path`] that is relative to the package root in [`PackageCache`].
|
||||
@@ -86,7 +86,6 @@ pub(crate) struct Cache {
|
||||
changes: Mutex<Vec<Change>>,
|
||||
/// The "current" timestamp used as cache for the updates of
|
||||
/// [`FileCache::last_seen`]
|
||||
#[allow(clippy::struct_field_names)]
|
||||
last_seen_cache: u64,
|
||||
}
|
||||
|
||||
@@ -166,29 +165,15 @@ impl Cache {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Write the cache to a temporary file first and then rename it for an "atomic" write.
|
||||
// Protects against data loss if the process is killed during the write and races between different ruff
|
||||
// processes, resulting in a corrupted cache file. https://github.com/astral-sh/ruff/issues/8147#issuecomment-1943345964
|
||||
let mut temp_file =
|
||||
NamedTempFile::new_in(self.path.parent().expect("Write path must have a parent"))
|
||||
.context("Failed to create temporary file")?;
|
||||
|
||||
// Serialize to in-memory buffer because hyperfine benchmark showed that it's faster than
|
||||
// using a `BufWriter` and our cache files are small enough that streaming isn't necessary.
|
||||
let serialized =
|
||||
bincode::serialize(&self.package).context("Failed to serialize cache data")?;
|
||||
temp_file
|
||||
.write_all(&serialized)
|
||||
.context("Failed to write serialized cache to temporary file.")?;
|
||||
|
||||
temp_file.persist(&self.path).with_context(|| {
|
||||
let file = File::create(&self.path)
|
||||
.with_context(|| format!("Failed to create cache file '{}'", self.path.display()))?;
|
||||
let writer = BufWriter::new(file);
|
||||
bincode::serialize_into(writer, &self.package).with_context(|| {
|
||||
format!(
|
||||
"Failed to rename temporary cache file to {}",
|
||||
"Failed to serialise cache to file '{}'",
|
||||
self.path.display()
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
/// Applies the pending changes without storing the cache to disk.
|
||||
@@ -457,7 +442,7 @@ pub(super) struct CacheMessage {
|
||||
pub(crate) trait PackageCaches {
|
||||
fn get(&self, package_root: &Path) -> Option<&Cache>;
|
||||
|
||||
fn persist(self) -> Result<()>;
|
||||
fn persist(self) -> anyhow::Result<()>;
|
||||
}
|
||||
|
||||
impl<T> PackageCaches for Option<T>
|
||||
@@ -483,17 +468,27 @@ pub(crate) struct PackageCacheMap<'a>(FxHashMap<&'a Path, Cache>);
|
||||
|
||||
impl<'a> PackageCacheMap<'a> {
|
||||
pub(crate) fn init(
|
||||
pyproject_config: &PyprojectConfig,
|
||||
package_roots: &FxHashMap<&'a Path, Option<&'a Path>>,
|
||||
resolver: &Resolver,
|
||||
) -> Self {
|
||||
fn init_cache(path: &Path) {
|
||||
if let Err(e) = init(path) {
|
||||
if let Err(e) = cache::init(path) {
|
||||
error!("Failed to initialize cache at {}: {e:?}", path.display());
|
||||
}
|
||||
}
|
||||
|
||||
for settings in resolver.settings() {
|
||||
init_cache(&settings.cache_dir);
|
||||
match pyproject_config.strategy {
|
||||
PyprojectDiscoveryStrategy::Fixed => {
|
||||
init_cache(&pyproject_config.settings.cache_dir);
|
||||
}
|
||||
PyprojectDiscoveryStrategy::Hierarchical => {
|
||||
for settings in
|
||||
std::iter::once(&pyproject_config.settings).chain(resolver.settings())
|
||||
{
|
||||
init_cache(&settings.cache_dir);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Self(
|
||||
@@ -503,7 +498,7 @@ impl<'a> PackageCacheMap<'a> {
|
||||
.unique()
|
||||
.par_bridge()
|
||||
.map(|cache_root| {
|
||||
let settings = resolver.resolve(cache_root);
|
||||
let settings = resolver.resolve(cache_root, pyproject_config);
|
||||
let cache = Cache::open(cache_root.to_path_buf(), settings);
|
||||
(cache_root, cache)
|
||||
})
|
||||
@@ -1065,7 +1060,6 @@ mod tests {
|
||||
&self.settings.formatter,
|
||||
PySourceType::Python,
|
||||
FormatMode::Write,
|
||||
None,
|
||||
Some(cache),
|
||||
)
|
||||
}
|
||||
@@ -12,17 +12,17 @@ use ruff_linter::warn_user_once;
|
||||
use ruff_python_ast::{PySourceType, SourceType};
|
||||
use ruff_workspace::resolver::{python_files_in_path, PyprojectConfig, ResolvedFile};
|
||||
|
||||
use crate::args::ConfigArguments;
|
||||
use crate::args::CliOverrides;
|
||||
|
||||
/// Add `noqa` directives to a collection of files.
|
||||
pub(crate) fn add_noqa(
|
||||
files: &[PathBuf],
|
||||
pyproject_config: &PyprojectConfig,
|
||||
config_arguments: &ConfigArguments,
|
||||
overrides: &CliOverrides,
|
||||
) -> Result<usize> {
|
||||
// Collect all the files to check.
|
||||
let start = Instant::now();
|
||||
let (paths, resolver) = python_files_in_path(files, pyproject_config, config_arguments)?;
|
||||
let (paths, resolver) = python_files_in_path(files, pyproject_config, overrides)?;
|
||||
let duration = start.elapsed();
|
||||
debug!("Identified files to lint in: {:?}", duration);
|
||||
|
||||
@@ -38,6 +38,7 @@ pub(crate) fn add_noqa(
|
||||
.flatten()
|
||||
.map(ResolvedFile::path)
|
||||
.collect::<Vec<_>>(),
|
||||
pyproject_config,
|
||||
);
|
||||
|
||||
let start = Instant::now();
|
||||
@@ -56,7 +57,7 @@ pub(crate) fn add_noqa(
|
||||
.parent()
|
||||
.and_then(|parent| package_roots.get(parent))
|
||||
.and_then(|package| *package);
|
||||
let settings = resolver.resolve(path);
|
||||
let settings = resolver.resolve(path, pyproject_config);
|
||||
let source_kind = match SourceKind::from_path(path, source_type) {
|
||||
Ok(Some(source_kind)) => source_kind,
|
||||
Ok(None) => return None,
|
||||
@@ -24,17 +24,16 @@ use ruff_workspace::resolver::{
|
||||
match_exclusion, python_files_in_path, PyprojectConfig, ResolvedFile,
|
||||
};
|
||||
|
||||
use crate::args::ConfigArguments;
|
||||
use crate::args::CliOverrides;
|
||||
use crate::cache::{Cache, PackageCacheMap, PackageCaches};
|
||||
use crate::diagnostics::Diagnostics;
|
||||
use crate::panic::catch_unwind;
|
||||
|
||||
/// Run the linter over a collection of files.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub(crate) fn check(
|
||||
files: &[PathBuf],
|
||||
pyproject_config: &PyprojectConfig,
|
||||
config_arguments: &ConfigArguments,
|
||||
overrides: &CliOverrides,
|
||||
cache: flags::Cache,
|
||||
noqa: flags::Noqa,
|
||||
fix_mode: flags::FixMode,
|
||||
@@ -42,7 +41,7 @@ pub(crate) fn check(
|
||||
) -> Result<Diagnostics> {
|
||||
// Collect all the Python files to check.
|
||||
let start = Instant::now();
|
||||
let (paths, resolver) = python_files_in_path(files, pyproject_config, config_arguments)?;
|
||||
let (paths, resolver) = python_files_in_path(files, pyproject_config, overrides)?;
|
||||
debug!("Identified files to lint in: {:?}", start.elapsed());
|
||||
|
||||
if paths.is_empty() {
|
||||
@@ -57,11 +56,16 @@ pub(crate) fn check(
|
||||
.flatten()
|
||||
.map(ResolvedFile::path)
|
||||
.collect::<Vec<_>>(),
|
||||
pyproject_config,
|
||||
);
|
||||
|
||||
// Load the caches.
|
||||
let caches = if bool::from(cache) {
|
||||
Some(PackageCacheMap::init(&package_roots, &resolver))
|
||||
Some(PackageCacheMap::init(
|
||||
pyproject_config,
|
||||
&package_roots,
|
||||
&resolver,
|
||||
))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
@@ -76,7 +80,7 @@ pub(crate) fn check(
|
||||
.and_then(|parent| package_roots.get(parent))
|
||||
.and_then(|package| *package);
|
||||
|
||||
let settings = resolver.resolve(path);
|
||||
let settings = resolver.resolve(path, pyproject_config);
|
||||
|
||||
if (settings.file_resolver.force_exclude || !resolved_file.is_root())
|
||||
&& match_exclusion(
|
||||
@@ -123,7 +127,7 @@ pub(crate) fn check(
|
||||
|
||||
Some(result.unwrap_or_else(|(path, message)| {
|
||||
if let Some(path) = &path {
|
||||
let settings = resolver.resolve(path);
|
||||
let settings = resolver.resolve(path, pyproject_config);
|
||||
if settings.linter.rules.enabled(Rule::IOError) {
|
||||
let dummy =
|
||||
SourceFileBuilder::new(path.to_string_lossy().as_ref(), "").finish();
|
||||
@@ -180,7 +184,6 @@ pub(crate) fn check(
|
||||
|
||||
/// Wraps [`lint_path`](crate::diagnostics::lint_path) in a [`catch_unwind`](std::panic::catch_unwind) and emits
|
||||
/// a diagnostic if the linting the file panics.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn lint_path(
|
||||
path: &Path,
|
||||
package: Option<&Path>,
|
||||
@@ -197,12 +200,12 @@ fn lint_path(
|
||||
match result {
|
||||
Ok(inner) => inner,
|
||||
Err(error) => {
|
||||
let message = r"This indicates a bug in Ruff. If you could open an issue at:
|
||||
let message = r#"This indicates a bug in Ruff. If you could open an issue at:
|
||||
|
||||
https://github.com/astral-sh/ruff/issues/new?title=%5BLinter%20panic%5D
|
||||
|
||||
...with the relevant file contents, the `pyproject.toml` settings, and the following stack trace, we'd be very appreciative!
|
||||
";
|
||||
"#;
|
||||
|
||||
error!(
|
||||
"{}{}{} {message}\n{error}",
|
||||
@@ -233,7 +236,7 @@ mod test {
|
||||
use ruff_workspace::resolver::{PyprojectConfig, PyprojectDiscoveryStrategy};
|
||||
use ruff_workspace::Settings;
|
||||
|
||||
use crate::args::ConfigArguments;
|
||||
use crate::args::CliOverrides;
|
||||
|
||||
use super::check;
|
||||
|
||||
@@ -272,7 +275,7 @@ mod test {
|
||||
// Notebooks are not included by default
|
||||
&[tempdir.path().to_path_buf(), notebook],
|
||||
&pyproject_config,
|
||||
&ConfigArguments::default(),
|
||||
&CliOverrides::default(),
|
||||
flags::Cache::Disabled,
|
||||
flags::Noqa::Disabled,
|
||||
flags::FixMode::Generate,
|
||||
@@ -4,50 +4,44 @@ use anyhow::Result;
|
||||
|
||||
use ruff_linter::packaging;
|
||||
use ruff_linter::settings::flags;
|
||||
use ruff_workspace::resolver::{match_exclusion, python_file_at_path, PyprojectConfig, Resolver};
|
||||
use ruff_workspace::resolver::{match_exclusion, python_file_at_path, PyprojectConfig};
|
||||
|
||||
use crate::args::ConfigArguments;
|
||||
use crate::args::CliOverrides;
|
||||
use crate::diagnostics::{lint_stdin, Diagnostics};
|
||||
use crate::stdin::{parrot_stdin, read_from_stdin};
|
||||
use crate::stdin::read_from_stdin;
|
||||
|
||||
/// Run the linter over a single file, read from `stdin`.
|
||||
pub(crate) fn check_stdin(
|
||||
filename: Option<&Path>,
|
||||
pyproject_config: &PyprojectConfig,
|
||||
overrides: &ConfigArguments,
|
||||
overrides: &CliOverrides,
|
||||
noqa: flags::Noqa,
|
||||
fix_mode: flags::FixMode,
|
||||
) -> Result<Diagnostics> {
|
||||
let mut resolver = Resolver::new(pyproject_config);
|
||||
|
||||
if resolver.force_exclude() {
|
||||
if pyproject_config.settings.file_resolver.force_exclude {
|
||||
if let Some(filename) = filename {
|
||||
if !python_file_at_path(filename, &mut resolver, overrides)? {
|
||||
if fix_mode.is_apply() {
|
||||
parrot_stdin()?;
|
||||
}
|
||||
if !python_file_at_path(filename, pyproject_config, overrides)? {
|
||||
return Ok(Diagnostics::default());
|
||||
}
|
||||
|
||||
if filename.file_name().is_some_and(|name| {
|
||||
match_exclusion(filename, name, &resolver.base_settings().linter.exclude)
|
||||
}) {
|
||||
if fix_mode.is_apply() {
|
||||
parrot_stdin()?;
|
||||
}
|
||||
let lint_settings = &pyproject_config.settings.linter;
|
||||
if filename
|
||||
.file_name()
|
||||
.is_some_and(|name| match_exclusion(filename, name, &lint_settings.exclude))
|
||||
{
|
||||
return Ok(Diagnostics::default());
|
||||
}
|
||||
}
|
||||
}
|
||||
let stdin = read_from_stdin()?;
|
||||
let package_root = filename.and_then(Path::parent).and_then(|path| {
|
||||
packaging::detect_package_root(path, &resolver.base_settings().linter.namespace_packages)
|
||||
packaging::detect_package_root(path, &pyproject_config.settings.linter.namespace_packages)
|
||||
});
|
||||
let stdin = read_from_stdin()?;
|
||||
let mut diagnostics = lint_stdin(
|
||||
filename,
|
||||
package_root,
|
||||
stdin,
|
||||
resolver.base_settings(),
|
||||
&pyproject_config.settings,
|
||||
noqa,
|
||||
fix_mode,
|
||||
)?;
|
||||
@@ -17,23 +17,24 @@ use tracing::debug;
|
||||
|
||||
use ruff_diagnostics::SourceMap;
|
||||
use ruff_linter::fs;
|
||||
use ruff_linter::logging::{DisplayParseError, LogLevel};
|
||||
use ruff_linter::logging::LogLevel;
|
||||
use ruff_linter::registry::Rule;
|
||||
use ruff_linter::rules::flake8_quotes::settings::Quote;
|
||||
use ruff_linter::source_kind::{SourceError, SourceKind};
|
||||
use ruff_linter::warn_user_once;
|
||||
use ruff_python_ast::{PySourceType, SourceType};
|
||||
use ruff_python_formatter::{format_module_source, format_range, FormatModuleError, QuoteStyle};
|
||||
use ruff_source_file::LineIndex;
|
||||
use ruff_python_formatter::{format_module_source, FormatModuleError, QuoteStyle};
|
||||
use ruff_text_size::{TextLen, TextRange, TextSize};
|
||||
use ruff_workspace::resolver::{match_exclusion, python_files_in_path, ResolvedFile, Resolver};
|
||||
use ruff_workspace::resolver::{
|
||||
match_exclusion, python_files_in_path, PyprojectConfig, ResolvedFile, Resolver,
|
||||
};
|
||||
use ruff_workspace::FormatterSettings;
|
||||
|
||||
use crate::args::{ConfigArguments, FormatArguments, FormatRange};
|
||||
use crate::args::{CliOverrides, FormatArguments};
|
||||
use crate::cache::{Cache, FileCacheKey, PackageCacheMap, PackageCaches};
|
||||
use crate::panic::{catch_unwind, PanicError};
|
||||
use crate::resolve::resolve;
|
||||
use crate::{resolve_default_files, ExitStatus};
|
||||
use crate::ExitStatus;
|
||||
|
||||
#[derive(Debug, Copy, Clone, is_macro::Is)]
|
||||
pub(crate) enum FormatMode {
|
||||
@@ -59,32 +60,25 @@ impl FormatMode {
|
||||
|
||||
/// Format a set of files, and return the exit status.
|
||||
pub(crate) fn format(
|
||||
cli: FormatArguments,
|
||||
config_arguments: &ConfigArguments,
|
||||
cli: &FormatArguments,
|
||||
overrides: &CliOverrides,
|
||||
log_level: LogLevel,
|
||||
) -> Result<ExitStatus> {
|
||||
let pyproject_config = resolve(
|
||||
cli.isolated,
|
||||
config_arguments,
|
||||
cli.config.as_deref(),
|
||||
overrides,
|
||||
cli.stdin_filename.as_deref(),
|
||||
)?;
|
||||
let mode = FormatMode::from_cli(&cli);
|
||||
let files = resolve_default_files(cli.files, false);
|
||||
let (paths, resolver) = python_files_in_path(&files, &pyproject_config, config_arguments)?;
|
||||
let mode = FormatMode::from_cli(cli);
|
||||
let (paths, resolver) = python_files_in_path(&cli.files, &pyproject_config, overrides)?;
|
||||
|
||||
if paths.is_empty() {
|
||||
warn_user_once!("No Python files found under the given path(s)");
|
||||
return Ok(ExitStatus::Success);
|
||||
}
|
||||
|
||||
if cli.range.is_some() && paths.len() > 1 {
|
||||
return Err(anyhow::anyhow!(
|
||||
"The `--range` option is only supported when formatting a single file but the specified paths resolve to {} files.",
|
||||
paths.len()
|
||||
));
|
||||
}
|
||||
|
||||
warn_incompatible_formatter_settings(&resolver);
|
||||
warn_incompatible_formatter_settings(&pyproject_config, Some(&resolver));
|
||||
|
||||
// Discover the package root for each Python file.
|
||||
let package_roots = resolver.package_roots(
|
||||
@@ -93,6 +87,7 @@ pub(crate) fn format(
|
||||
.flatten()
|
||||
.map(ResolvedFile::path)
|
||||
.collect::<Vec<_>>(),
|
||||
&pyproject_config,
|
||||
);
|
||||
|
||||
let caches = if cli.no_cache {
|
||||
@@ -103,7 +98,11 @@ pub(crate) fn format(
|
||||
#[cfg(debug_assertions)]
|
||||
crate::warn_user!("Detected debug build without --no-cache.");
|
||||
|
||||
Some(PackageCacheMap::init(&package_roots, &resolver))
|
||||
Some(PackageCacheMap::init(
|
||||
&pyproject_config,
|
||||
&package_roots,
|
||||
&resolver,
|
||||
))
|
||||
};
|
||||
|
||||
let start = Instant::now();
|
||||
@@ -113,19 +112,13 @@ pub(crate) fn format(
|
||||
match entry {
|
||||
Ok(resolved_file) => {
|
||||
let path = resolved_file.path();
|
||||
let settings = resolver.resolve(path);
|
||||
|
||||
let source_type = match settings.formatter.extension.get(path) {
|
||||
None => match SourceType::from(path) {
|
||||
SourceType::Python(source_type) => source_type,
|
||||
SourceType::Toml(_) => {
|
||||
// Ignore any non-Python files.
|
||||
return None;
|
||||
}
|
||||
},
|
||||
Some(language) => PySourceType::from(language),
|
||||
let SourceType::Python(source_type) = SourceType::from(&path) else {
|
||||
// Ignore any non-Python files.
|
||||
return None;
|
||||
};
|
||||
|
||||
let settings = resolver.resolve(path, &pyproject_config);
|
||||
|
||||
// Ignore files that are excluded from formatting
|
||||
if (settings.file_resolver.force_exclude || !resolved_file.is_root())
|
||||
&& match_exclusion(
|
||||
@@ -146,14 +139,7 @@ pub(crate) fn format(
|
||||
|
||||
Some(
|
||||
match catch_unwind(|| {
|
||||
format_path(
|
||||
path,
|
||||
&settings.formatter,
|
||||
source_type,
|
||||
mode,
|
||||
cli.range,
|
||||
cache,
|
||||
)
|
||||
format_path(path, &settings.formatter, source_type, mode, cache)
|
||||
}) {
|
||||
Ok(inner) => inner.map(|result| FormatPathResult {
|
||||
path: resolved_file.path().to_path_buf(),
|
||||
@@ -240,7 +226,6 @@ pub(crate) fn format_path(
|
||||
settings: &FormatterSettings,
|
||||
source_type: PySourceType,
|
||||
mode: FormatMode,
|
||||
range: Option<FormatRange>,
|
||||
cache: Option<&Cache>,
|
||||
) -> Result<FormatResult, FormatCommandError> {
|
||||
if let Some(cache) = cache {
|
||||
@@ -258,19 +243,15 @@ pub(crate) fn format_path(
|
||||
// Extract the sources from the file.
|
||||
let unformatted = match SourceKind::from_path(path, source_type) {
|
||||
Ok(Some(source_kind)) => source_kind,
|
||||
// Non-Python Jupyter notebook.
|
||||
// Non Python Jupyter notebook
|
||||
Ok(None) => return Ok(FormatResult::Skipped),
|
||||
Err(err) => {
|
||||
return Err(FormatCommandError::Read(Some(path.to_path_buf()), err));
|
||||
}
|
||||
};
|
||||
|
||||
// Don't write back to the cache if formatting a range.
|
||||
let cache = cache.filter(|_| range.is_none());
|
||||
|
||||
// Format the source.
|
||||
let format_result = match format_source(&unformatted, source_type, Some(path), settings, range)?
|
||||
{
|
||||
let format_result = match format_source(&unformatted, source_type, Some(path), settings)? {
|
||||
FormattedSource::Formatted(formatted) => match mode {
|
||||
FormatMode::Write => {
|
||||
let mut writer = File::create(path).map_err(|err| {
|
||||
@@ -338,43 +319,15 @@ pub(crate) fn format_source(
|
||||
source_type: PySourceType,
|
||||
path: Option<&Path>,
|
||||
settings: &FormatterSettings,
|
||||
range: Option<FormatRange>,
|
||||
) -> Result<FormattedSource, FormatCommandError> {
|
||||
match &source_kind {
|
||||
match source_kind {
|
||||
SourceKind::Python(unformatted) => {
|
||||
let options = settings.to_format_options(source_type, unformatted);
|
||||
|
||||
let formatted = if let Some(range) = range {
|
||||
let line_index = LineIndex::from_source_text(unformatted);
|
||||
let byte_range = range.to_text_range(unformatted, &line_index);
|
||||
format_range(unformatted, byte_range, options).map(|formatted_range| {
|
||||
let mut formatted = unformatted.to_string();
|
||||
formatted.replace_range(
|
||||
std::ops::Range::<usize>::from(formatted_range.source_range()),
|
||||
formatted_range.as_code(),
|
||||
);
|
||||
|
||||
formatted
|
||||
})
|
||||
} else {
|
||||
// Using `Printed::into_code` requires adding `ruff_formatter` as a direct dependency, and I suspect that Rust can optimize the closure away regardless.
|
||||
#[allow(clippy::redundant_closure_for_method_calls)]
|
||||
format_module_source(unformatted, options).map(|formatted| formatted.into_code())
|
||||
};
|
||||
|
||||
let formatted = formatted.map_err(|err| {
|
||||
if let FormatModuleError::ParseError(err) = err {
|
||||
DisplayParseError::from_source_kind(
|
||||
err,
|
||||
path.map(Path::to_path_buf),
|
||||
source_kind,
|
||||
)
|
||||
.into()
|
||||
} else {
|
||||
FormatCommandError::Format(path.map(Path::to_path_buf), err)
|
||||
}
|
||||
})?;
|
||||
let formatted = format_module_source(unformatted, options)
|
||||
.map_err(|err| FormatCommandError::Format(path.map(Path::to_path_buf), err))?;
|
||||
|
||||
let formatted = formatted.into_code();
|
||||
if formatted.len() == unformatted.len() && formatted == *unformatted {
|
||||
Ok(FormattedSource::Unchanged)
|
||||
} else {
|
||||
@@ -386,12 +339,6 @@ pub(crate) fn format_source(
|
||||
return Ok(FormattedSource::Unchanged);
|
||||
}
|
||||
|
||||
if range.is_some() {
|
||||
return Err(FormatCommandError::RangeFormatNotebook(
|
||||
path.map(Path::to_path_buf),
|
||||
));
|
||||
}
|
||||
|
||||
let options = settings.to_format_options(source_type, notebook.source_code());
|
||||
|
||||
let mut output: Option<String> = None;
|
||||
@@ -404,19 +351,8 @@ pub(crate) fn format_source(
|
||||
let unformatted = ¬ebook.source_code()[range];
|
||||
|
||||
// Format the cell.
|
||||
let formatted =
|
||||
format_module_source(unformatted, options.clone()).map_err(|err| {
|
||||
if let FormatModuleError::ParseError(err) = err {
|
||||
DisplayParseError::from_source_kind(
|
||||
err,
|
||||
path.map(Path::to_path_buf),
|
||||
source_kind,
|
||||
)
|
||||
.into()
|
||||
} else {
|
||||
FormatCommandError::Format(path.map(Path::to_path_buf), err)
|
||||
}
|
||||
})?;
|
||||
let formatted = format_module_source(unformatted, options.clone())
|
||||
.map_err(|err| FormatCommandError::Format(path.map(Path::to_path_buf), err))?;
|
||||
|
||||
// If the cell is unchanged, skip it.
|
||||
let formatted = formatted.as_code();
|
||||
@@ -471,13 +407,11 @@ pub(crate) fn format_source(
|
||||
pub(crate) enum FormatResult {
|
||||
/// The file was formatted.
|
||||
Formatted,
|
||||
|
||||
/// The file was formatted, [`SourceKind`] contains the formatted code
|
||||
Diff {
|
||||
unformatted: SourceKind,
|
||||
formatted: SourceKind,
|
||||
},
|
||||
|
||||
/// The file was unchanged, as the formatted contents matched the existing contents.
|
||||
Unchanged,
|
||||
|
||||
@@ -580,7 +514,7 @@ impl<'a> FormatResults<'a> {
|
||||
if changed > 0 && unchanged > 0 {
|
||||
writeln!(
|
||||
f,
|
||||
"{} file{} {}, {} file{} {}",
|
||||
"{} file{} {}, {} file{} left unchanged",
|
||||
changed,
|
||||
if changed == 1 { "" } else { "s" },
|
||||
match self.mode {
|
||||
@@ -589,10 +523,6 @@ impl<'a> FormatResults<'a> {
|
||||
},
|
||||
unchanged,
|
||||
if unchanged == 1 { "" } else { "s" },
|
||||
match self.mode {
|
||||
FormatMode::Write => "left unchanged",
|
||||
FormatMode::Check | FormatMode::Diff => "already formatted",
|
||||
},
|
||||
)
|
||||
} else if changed > 0 {
|
||||
writeln!(
|
||||
@@ -608,13 +538,9 @@ impl<'a> FormatResults<'a> {
|
||||
} else if unchanged > 0 {
|
||||
writeln!(
|
||||
f,
|
||||
"{} file{} {}",
|
||||
"{} file{} left unchanged",
|
||||
unchanged,
|
||||
if unchanged == 1 { "" } else { "s" },
|
||||
match self.mode {
|
||||
FormatMode::Write => "left unchanged",
|
||||
FormatMode::Check | FormatMode::Diff => "already formatted",
|
||||
},
|
||||
)
|
||||
} else {
|
||||
Ok(())
|
||||
@@ -626,13 +552,11 @@ impl<'a> FormatResults<'a> {
|
||||
#[derive(Error, Debug)]
|
||||
pub(crate) enum FormatCommandError {
|
||||
Ignore(#[from] ignore::Error),
|
||||
Parse(#[from] DisplayParseError),
|
||||
Panic(Option<PathBuf>, PanicError),
|
||||
Read(Option<PathBuf>, SourceError),
|
||||
Format(Option<PathBuf>, FormatModuleError),
|
||||
Write(Option<PathBuf>, SourceError),
|
||||
Diff(Option<PathBuf>, io::Error),
|
||||
RangeFormatNotebook(Option<PathBuf>),
|
||||
}
|
||||
|
||||
impl FormatCommandError {
|
||||
@@ -645,13 +569,11 @@ impl FormatCommandError {
|
||||
None
|
||||
}
|
||||
}
|
||||
Self::Parse(err) => err.path(),
|
||||
Self::Panic(path, _)
|
||||
| Self::Read(path, _)
|
||||
| Self::Format(path, _)
|
||||
| Self::Write(path, _)
|
||||
| Self::Diff(path, _)
|
||||
| Self::RangeFormatNotebook(path) => path.as_deref(),
|
||||
| Self::Diff(path, _) => path.as_deref(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -673,17 +595,13 @@ impl Display for FormatCommandError {
|
||||
} else {
|
||||
write!(
|
||||
f,
|
||||
"{header} {error}",
|
||||
header = "Encountered error:".bold(),
|
||||
error = err
|
||||
.io_error()
|
||||
"{} {}",
|
||||
"Encountered error:".bold(),
|
||||
err.io_error()
|
||||
.map_or_else(|| err.to_string(), std::string::ToString::to_string)
|
||||
)
|
||||
}
|
||||
}
|
||||
Self::Parse(err) => {
|
||||
write!(f, "{err}")
|
||||
}
|
||||
Self::Read(path, err) => {
|
||||
if let Some(path) = path {
|
||||
write!(
|
||||
@@ -694,7 +612,7 @@ impl Display for FormatCommandError {
|
||||
":".bold()
|
||||
)
|
||||
} else {
|
||||
write!(f, "{header} {err}", header = "Failed to read:".bold())
|
||||
write!(f, "{}{} {err}", "Failed to read".bold(), ":".bold())
|
||||
}
|
||||
}
|
||||
Self::Write(path, err) => {
|
||||
@@ -707,7 +625,7 @@ impl Display for FormatCommandError {
|
||||
":".bold()
|
||||
)
|
||||
} else {
|
||||
write!(f, "{header} {err}", header = "Failed to write:".bold())
|
||||
write!(f, "{}{} {err}", "Failed to write".bold(), ":".bold())
|
||||
}
|
||||
}
|
||||
Self::Format(path, err) => {
|
||||
@@ -720,7 +638,7 @@ impl Display for FormatCommandError {
|
||||
":".bold()
|
||||
)
|
||||
} else {
|
||||
write!(f, "{header} {err}", header = "Failed to format:".bold())
|
||||
write!(f, "{}{} {err}", "Failed to format".bold(), ":".bold())
|
||||
}
|
||||
}
|
||||
Self::Diff(path, err) => {
|
||||
@@ -735,35 +653,19 @@ impl Display for FormatCommandError {
|
||||
} else {
|
||||
write!(
|
||||
f,
|
||||
"{header} {err}",
|
||||
header = "Failed to generate diff:".bold(),
|
||||
)
|
||||
}
|
||||
}
|
||||
Self::RangeFormatNotebook(path) => {
|
||||
if let Some(path) = path {
|
||||
write!(
|
||||
f,
|
||||
"{header}{path}{colon} Range formatting isn't supported for notebooks.",
|
||||
header = "Failed to format ".bold(),
|
||||
path = fs::relativize_path(path).bold(),
|
||||
colon = ":".bold()
|
||||
)
|
||||
} else {
|
||||
write!(
|
||||
f,
|
||||
"{header} Range formatting isn't supported for notebooks",
|
||||
header = "Failed to format:".bold()
|
||||
"{}{} {err}",
|
||||
"Failed to generate diff".bold(),
|
||||
":".bold()
|
||||
)
|
||||
}
|
||||
}
|
||||
Self::Panic(path, err) => {
|
||||
let message = r"This indicates a bug in Ruff. If you could open an issue at:
|
||||
let message = r#"This indicates a bug in Ruff. If you could open an issue at:
|
||||
|
||||
https://github.com/astral-sh/ruff/issues/new?title=%5BFormatter%20panic%5D
|
||||
|
||||
...with the relevant file contents, the `pyproject.toml` settings, and the following stack trace, we'd be very appreciative!
|
||||
";
|
||||
"#;
|
||||
if let Some(path) = path {
|
||||
write!(
|
||||
f,
|
||||
@@ -784,10 +686,15 @@ impl Display for FormatCommandError {
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn warn_incompatible_formatter_settings(resolver: &Resolver) {
|
||||
pub(super) fn warn_incompatible_formatter_settings(
|
||||
pyproject_config: &PyprojectConfig,
|
||||
resolver: Option<&Resolver>,
|
||||
) {
|
||||
// First, collect all rules that are incompatible regardless of the linter-specific settings.
|
||||
let mut incompatible_rules = FxHashSet::default();
|
||||
for setting in resolver.settings() {
|
||||
for setting in std::iter::once(&pyproject_config.settings)
|
||||
.chain(resolver.iter().flat_map(|resolver| resolver.settings()))
|
||||
{
|
||||
for rule in [
|
||||
// The formatter might collapse implicit string concatenation on a single line.
|
||||
Rule::SingleLineImplicitStringConcatenation,
|
||||
@@ -816,7 +723,9 @@ pub(super) fn warn_incompatible_formatter_settings(resolver: &Resolver) {
|
||||
}
|
||||
|
||||
// Next, validate settings-specific incompatibilities.
|
||||
for setting in resolver.settings() {
|
||||
for setting in std::iter::once(&pyproject_config.settings)
|
||||
.chain(resolver.iter().flat_map(|resolver| resolver.settings()))
|
||||
{
|
||||
// Validate all rules that rely on tab styles.
|
||||
if setting.linter.rules.enabled(Rule::TabIndentation)
|
||||
&& setting.formatter.indent_style.is_tab()
|
||||
@@ -6,72 +6,60 @@ use log::error;
|
||||
|
||||
use ruff_linter::source_kind::SourceKind;
|
||||
use ruff_python_ast::{PySourceType, SourceType};
|
||||
use ruff_workspace::resolver::{match_exclusion, python_file_at_path, Resolver};
|
||||
use ruff_workspace::resolver::{match_exclusion, python_file_at_path};
|
||||
use ruff_workspace::FormatterSettings;
|
||||
|
||||
use crate::args::{ConfigArguments, FormatArguments, FormatRange};
|
||||
use crate::args::{CliOverrides, FormatArguments};
|
||||
use crate::commands::format::{
|
||||
format_source, warn_incompatible_formatter_settings, FormatCommandError, FormatMode,
|
||||
FormatResult, FormattedSource,
|
||||
};
|
||||
use crate::resolve::resolve;
|
||||
use crate::stdin::{parrot_stdin, read_from_stdin};
|
||||
use crate::stdin::read_from_stdin;
|
||||
use crate::ExitStatus;
|
||||
|
||||
/// Run the formatter over a single file, read from `stdin`.
|
||||
pub(crate) fn format_stdin(
|
||||
cli: &FormatArguments,
|
||||
config_arguments: &ConfigArguments,
|
||||
) -> Result<ExitStatus> {
|
||||
pub(crate) fn format_stdin(cli: &FormatArguments, overrides: &CliOverrides) -> Result<ExitStatus> {
|
||||
let pyproject_config = resolve(
|
||||
cli.isolated,
|
||||
config_arguments,
|
||||
cli.config.as_deref(),
|
||||
overrides,
|
||||
cli.stdin_filename.as_deref(),
|
||||
)?;
|
||||
|
||||
let mut resolver = Resolver::new(&pyproject_config);
|
||||
warn_incompatible_formatter_settings(&resolver);
|
||||
warn_incompatible_formatter_settings(&pyproject_config, None);
|
||||
|
||||
let mode = FormatMode::from_cli(cli);
|
||||
|
||||
if resolver.force_exclude() {
|
||||
if pyproject_config.settings.file_resolver.force_exclude {
|
||||
if let Some(filename) = cli.stdin_filename.as_deref() {
|
||||
if !python_file_at_path(filename, &mut resolver, config_arguments)? {
|
||||
if mode.is_write() {
|
||||
parrot_stdin()?;
|
||||
}
|
||||
if !python_file_at_path(filename, &pyproject_config, overrides)? {
|
||||
return Ok(ExitStatus::Success);
|
||||
}
|
||||
|
||||
if filename.file_name().is_some_and(|name| {
|
||||
match_exclusion(filename, name, &resolver.base_settings().formatter.exclude)
|
||||
}) {
|
||||
if mode.is_write() {
|
||||
parrot_stdin()?;
|
||||
}
|
||||
let format_settings = &pyproject_config.settings.formatter;
|
||||
if filename
|
||||
.file_name()
|
||||
.is_some_and(|name| match_exclusion(filename, name, &format_settings.exclude))
|
||||
{
|
||||
return Ok(ExitStatus::Success);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let path = cli.stdin_filename.as_deref();
|
||||
let settings = &resolver.base_settings().formatter;
|
||||
|
||||
let source_type = match path.and_then(|path| settings.extension.get(path)) {
|
||||
None => match path.map(SourceType::from).unwrap_or_default() {
|
||||
SourceType::Python(source_type) => source_type,
|
||||
SourceType::Toml(_) => {
|
||||
if mode.is_write() {
|
||||
parrot_stdin()?;
|
||||
}
|
||||
return Ok(ExitStatus::Success);
|
||||
}
|
||||
},
|
||||
Some(language) => PySourceType::from(language),
|
||||
let SourceType::Python(source_type) = path.map(SourceType::from).unwrap_or_default() else {
|
||||
return Ok(ExitStatus::Success);
|
||||
};
|
||||
|
||||
// Format the file.
|
||||
match format_source_code(path, cli.range, settings, source_type, mode) {
|
||||
match format_source_code(
|
||||
path,
|
||||
&pyproject_config.settings.formatter,
|
||||
source_type,
|
||||
mode,
|
||||
) {
|
||||
Ok(result) => match mode {
|
||||
FormatMode::Write => Ok(ExitStatus::Success),
|
||||
FormatMode::Check | FormatMode::Diff => {
|
||||
@@ -92,7 +80,6 @@ pub(crate) fn format_stdin(
|
||||
/// Format source code read from `stdin`.
|
||||
fn format_source_code(
|
||||
path: Option<&Path>,
|
||||
range: Option<FormatRange>,
|
||||
settings: &FormatterSettings,
|
||||
source_type: PySourceType,
|
||||
mode: FormatMode,
|
||||
@@ -110,7 +97,7 @@ fn format_source_code(
|
||||
};
|
||||
|
||||
// Format the source.
|
||||
let formatted = format_source(&source_kind, source_type, path, settings, range)?;
|
||||
let formatted = format_source(&source_kind, source_type, path, settings)?;
|
||||
|
||||
match &formatted {
|
||||
FormattedSource::Formatted(formatted) => match mode {
|
||||
@@ -18,7 +18,6 @@ struct Explanation<'a> {
|
||||
summary: &'a str,
|
||||
message_formats: &'a [&'a str],
|
||||
fix: String,
|
||||
#[allow(clippy::struct_field_names)]
|
||||
explanation: Option<&'a str>,
|
||||
preview: bool,
|
||||
}
|
||||
@@ -64,7 +63,7 @@ fn format_rule_text(rule: Rule) -> String {
|
||||
|
||||
if rule.is_preview() || rule.is_nursery() {
|
||||
output.push_str(
|
||||
r"This rule is in preview and is not stable. The `--preview` flag is required for use.",
|
||||
r#"This rule is in preview and is not stable. The `--preview` flag is required for use."#,
|
||||
);
|
||||
output.push('\n');
|
||||
output.push('\n');
|
||||
@@ -7,17 +7,17 @@ use itertools::Itertools;
|
||||
use ruff_linter::warn_user_once;
|
||||
use ruff_workspace::resolver::{python_files_in_path, PyprojectConfig, ResolvedFile};
|
||||
|
||||
use crate::args::ConfigArguments;
|
||||
use crate::args::CliOverrides;
|
||||
|
||||
/// Show the list of files to be checked based on current settings.
|
||||
pub(crate) fn show_files(
|
||||
files: &[PathBuf],
|
||||
pyproject_config: &PyprojectConfig,
|
||||
config_arguments: &ConfigArguments,
|
||||
overrides: &CliOverrides,
|
||||
writer: &mut impl Write,
|
||||
) -> Result<()> {
|
||||
// Collect all files in the hierarchy.
|
||||
let (paths, _resolver) = python_files_in_path(files, pyproject_config, config_arguments)?;
|
||||
let (paths, _resolver) = python_files_in_path(files, pyproject_config, overrides)?;
|
||||
|
||||
if paths.is_empty() {
|
||||
warn_user_once!("No Python files found under the given path(s)");
|
||||
@@ -6,17 +6,17 @@ use itertools::Itertools;
|
||||
|
||||
use ruff_workspace::resolver::{python_files_in_path, PyprojectConfig, ResolvedFile};
|
||||
|
||||
use crate::args::ConfigArguments;
|
||||
use crate::args::CliOverrides;
|
||||
|
||||
/// Print the user-facing configuration settings.
|
||||
pub(crate) fn show_settings(
|
||||
files: &[PathBuf],
|
||||
pyproject_config: &PyprojectConfig,
|
||||
config_arguments: &ConfigArguments,
|
||||
overrides: &CliOverrides,
|
||||
writer: &mut impl Write,
|
||||
) -> Result<()> {
|
||||
// Collect all files in the hierarchy.
|
||||
let (paths, resolver) = python_files_in_path(files, pyproject_config, config_arguments)?;
|
||||
let (paths, resolver) = python_files_in_path(files, pyproject_config, overrides)?;
|
||||
|
||||
// Print the list of files.
|
||||
let Some(path) = paths
|
||||
@@ -29,13 +29,13 @@ pub(crate) fn show_settings(
|
||||
bail!("No files found under the given path");
|
||||
};
|
||||
|
||||
let settings = resolver.resolve(&path);
|
||||
let settings = resolver.resolve(&path, pyproject_config);
|
||||
|
||||
writeln!(writer, "Resolved settings for: \"{}\"", path.display())?;
|
||||
writeln!(writer, "Resolved settings for: {path:?}")?;
|
||||
if let Some(settings_path) = pyproject_config.path.as_ref() {
|
||||
writeln!(writer, "Settings path: \"{}\"", settings_path.display())?;
|
||||
writeln!(writer, "Settings path: {settings_path:?}")?;
|
||||
}
|
||||
write!(writer, "{settings}")?;
|
||||
writeln!(writer, "{settings:#?}")?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
source: crates/ruff/src/commands/check.rs
|
||||
source: crates/ruff_cli/src/commands/check.rs
|
||||
---
|
||||
/home/ferris/project/code.py:1:1: E902 Permission denied (os error 13)
|
||||
/home/ferris/project/notebook.ipynb:1:1: E902 Permission denied (os error 13)
|
||||
@@ -1,6 +1,5 @@
|
||||
#![cfg_attr(target_family = "wasm", allow(dead_code))]
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::fs::File;
|
||||
use std::io;
|
||||
use std::ops::{Add, AddAssign};
|
||||
@@ -11,8 +10,9 @@ use colored::Colorize;
|
||||
use log::{debug, error, warn};
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::cache::{Cache, FileCacheKey, LintCacheData};
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_linter::linter::{lint_fix, lint_only, FixTable, FixerResult, LinterResult, ParseSource};
|
||||
use ruff_linter::linter::{lint_fix, lint_only, FixTable, FixerResult, LinterResult};
|
||||
use ruff_linter::logging::DisplayParseError;
|
||||
use ruff_linter::message::Message;
|
||||
use ruff_linter::pyproject_toml::lint_pyproject_toml;
|
||||
@@ -23,13 +23,11 @@ use ruff_linter::source_kind::{SourceError, SourceKind};
|
||||
use ruff_linter::{fs, IOError, SyntaxError};
|
||||
use ruff_notebook::{Notebook, NotebookError, NotebookIndex};
|
||||
use ruff_python_ast::imports::ImportMap;
|
||||
use ruff_python_ast::{PySourceType, SourceType, TomlSourceType};
|
||||
use ruff_source_file::SourceFileBuilder;
|
||||
use ruff_python_ast::{SourceType, TomlSourceType};
|
||||
use ruff_source_file::{LineIndex, SourceCode, SourceFileBuilder};
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
use ruff_workspace::Settings;
|
||||
|
||||
use crate::cache::{Cache, FileCacheKey, LintCacheData};
|
||||
|
||||
#[derive(Debug, Default, PartialEq)]
|
||||
pub(crate) struct Diagnostics {
|
||||
pub(crate) messages: Vec<Message>,
|
||||
@@ -223,35 +221,31 @@ pub(crate) fn lint_path(
|
||||
|
||||
debug!("Checking: {}", path.display());
|
||||
|
||||
let source_type = match settings.extension.get(path).map(PySourceType::from) {
|
||||
Some(source_type) => source_type,
|
||||
None => match SourceType::from(path) {
|
||||
SourceType::Toml(TomlSourceType::Pyproject) => {
|
||||
let messages = if settings
|
||||
.rules
|
||||
.iter_enabled()
|
||||
.any(|rule_code| rule_code.lint_source().is_pyproject_toml())
|
||||
{
|
||||
let contents = match std::fs::read_to_string(path).map_err(SourceError::from) {
|
||||
Ok(contents) => contents,
|
||||
Err(err) => {
|
||||
return Ok(Diagnostics::from_source_error(&err, Some(path), settings));
|
||||
}
|
||||
};
|
||||
let source_file =
|
||||
SourceFileBuilder::new(path.to_string_lossy(), contents).finish();
|
||||
lint_pyproject_toml(source_file, settings)
|
||||
} else {
|
||||
vec![]
|
||||
let source_type = match SourceType::from(path) {
|
||||
SourceType::Toml(TomlSourceType::Pyproject) => {
|
||||
let messages = if settings
|
||||
.rules
|
||||
.iter_enabled()
|
||||
.any(|rule_code| rule_code.lint_source().is_pyproject_toml())
|
||||
{
|
||||
let contents = match std::fs::read_to_string(path).map_err(SourceError::from) {
|
||||
Ok(contents) => contents,
|
||||
Err(err) => {
|
||||
return Ok(Diagnostics::from_source_error(&err, Some(path), settings));
|
||||
}
|
||||
};
|
||||
return Ok(Diagnostics {
|
||||
messages,
|
||||
..Diagnostics::default()
|
||||
});
|
||||
}
|
||||
SourceType::Toml(_) => return Ok(Diagnostics::default()),
|
||||
SourceType::Python(source_type) => source_type,
|
||||
},
|
||||
let source_file = SourceFileBuilder::new(path.to_string_lossy(), contents).finish();
|
||||
lint_pyproject_toml(source_file, settings)
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
return Ok(Diagnostics {
|
||||
messages,
|
||||
..Diagnostics::default()
|
||||
});
|
||||
}
|
||||
SourceType::Toml(_) => return Ok(Diagnostics::default()),
|
||||
SourceType::Python(source_type) => source_type,
|
||||
};
|
||||
|
||||
// Extract the sources from the file.
|
||||
@@ -269,7 +263,6 @@ pub(crate) fn lint_path(
|
||||
data: (messages, imports),
|
||||
error: parse_error,
|
||||
},
|
||||
transformed,
|
||||
fixed,
|
||||
) = if matches!(fix_mode, flags::FixMode::Apply | flags::FixMode::Diff) {
|
||||
if let Ok(FixerResult {
|
||||
@@ -298,40 +291,17 @@ pub(crate) fn lint_path(
|
||||
flags::FixMode::Generate => {}
|
||||
}
|
||||
}
|
||||
let transformed = if let Cow::Owned(transformed) = transformed {
|
||||
transformed
|
||||
} else {
|
||||
source_kind
|
||||
};
|
||||
(result, transformed, fixed)
|
||||
(result, fixed)
|
||||
} else {
|
||||
// If we fail to fix, lint the original source code.
|
||||
let result = lint_only(
|
||||
path,
|
||||
package,
|
||||
settings,
|
||||
noqa,
|
||||
&source_kind,
|
||||
source_type,
|
||||
ParseSource::None,
|
||||
);
|
||||
let transformed = source_kind;
|
||||
let result = lint_only(path, package, settings, noqa, &source_kind, source_type);
|
||||
let fixed = FxHashMap::default();
|
||||
(result, transformed, fixed)
|
||||
(result, fixed)
|
||||
}
|
||||
} else {
|
||||
let result = lint_only(
|
||||
path,
|
||||
package,
|
||||
settings,
|
||||
noqa,
|
||||
&source_kind,
|
||||
source_type,
|
||||
ParseSource::None,
|
||||
);
|
||||
let transformed = source_kind;
|
||||
let result = lint_only(path, package, settings, noqa, &source_kind, source_type);
|
||||
let fixed = FxHashMap::default();
|
||||
(result, transformed, fixed)
|
||||
(result, fixed)
|
||||
};
|
||||
|
||||
let imports = imports.unwrap_or_default();
|
||||
@@ -339,7 +309,7 @@ pub(crate) fn lint_path(
|
||||
if let Some((cache, relative_path, key)) = caching {
|
||||
// We don't cache parsing errors.
|
||||
if parse_error.is_none() {
|
||||
// `FixMode::Apply` and `FixMode::Diff` rely on side-effects (writing to disk,
|
||||
// `FixMode::Generate` and `FixMode::Diff` rely on side-effects (writing to disk,
|
||||
// and writing the diff to stdout, respectively). If a file has diagnostics, we
|
||||
// need to avoid reading from and writing to the cache in these modes.
|
||||
if match fix_mode {
|
||||
@@ -354,21 +324,28 @@ pub(crate) fn lint_path(
|
||||
LintCacheData::from_messages(
|
||||
&messages,
|
||||
imports.clone(),
|
||||
transformed.as_ipy_notebook().map(Notebook::index).cloned(),
|
||||
source_kind.as_ipy_notebook().map(Notebook::index).cloned(),
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(error) = parse_error {
|
||||
if let Some(err) = parse_error {
|
||||
error!(
|
||||
"{}",
|
||||
DisplayParseError::from_source_kind(error, Some(path.to_path_buf()), &transformed)
|
||||
DisplayParseError::new(
|
||||
err,
|
||||
SourceCode::new(
|
||||
source_kind.source_code(),
|
||||
&LineIndex::from_source_text(source_kind.source_code())
|
||||
),
|
||||
&source_kind,
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
let notebook_indexes = if let SourceKind::IpyNotebook(notebook) = transformed {
|
||||
let notebook_indexes = if let SourceKind::IpyNotebook(notebook) = source_kind {
|
||||
FxHashMap::from_iter([(path.to_string_lossy().to_string(), notebook.into_index())])
|
||||
} else {
|
||||
FxHashMap::default()
|
||||
@@ -393,14 +370,8 @@ pub(crate) fn lint_stdin(
|
||||
fix_mode: flags::FixMode,
|
||||
) -> Result<Diagnostics> {
|
||||
// TODO(charlie): Support `pyproject.toml`.
|
||||
let source_type = match path.and_then(|path| settings.linter.extension.get(path)) {
|
||||
None => match path.map(SourceType::from).unwrap_or_default() {
|
||||
SourceType::Python(source_type) => source_type,
|
||||
SourceType::Toml(_) => {
|
||||
return Ok(Diagnostics::default());
|
||||
}
|
||||
},
|
||||
Some(language) => PySourceType::from(language),
|
||||
let SourceType::Python(source_type) = path.map(SourceType::from).unwrap_or_default() else {
|
||||
return Ok(Diagnostics::default());
|
||||
};
|
||||
|
||||
// Extract the sources from the file.
|
||||
@@ -418,7 +389,6 @@ pub(crate) fn lint_stdin(
|
||||
data: (messages, imports),
|
||||
error: parse_error,
|
||||
},
|
||||
transformed,
|
||||
fixed,
|
||||
) = if matches!(fix_mode, flags::FixMode::Apply | flags::FixMode::Diff) {
|
||||
if let Ok(FixerResult {
|
||||
@@ -447,12 +417,8 @@ pub(crate) fn lint_stdin(
|
||||
}
|
||||
flags::FixMode::Generate => {}
|
||||
}
|
||||
let transformed = if let Cow::Owned(transformed) = transformed {
|
||||
transformed
|
||||
} else {
|
||||
source_kind
|
||||
};
|
||||
(result, transformed, fixed)
|
||||
|
||||
(result, fixed)
|
||||
} else {
|
||||
// If we fail to fix, lint the original source code.
|
||||
let result = lint_only(
|
||||
@@ -462,17 +428,15 @@ pub(crate) fn lint_stdin(
|
||||
noqa,
|
||||
&source_kind,
|
||||
source_type,
|
||||
ParseSource::None,
|
||||
);
|
||||
let fixed = FxHashMap::default();
|
||||
|
||||
// Write the contents to stdout anyway.
|
||||
if fix_mode.is_apply() {
|
||||
source_kind.write(&mut io::stdout().lock())?;
|
||||
}
|
||||
|
||||
let transformed = source_kind;
|
||||
let fixed = FxHashMap::default();
|
||||
(result, transformed, fixed)
|
||||
(result, fixed)
|
||||
}
|
||||
} else {
|
||||
let result = lint_only(
|
||||
@@ -482,23 +446,21 @@ pub(crate) fn lint_stdin(
|
||||
noqa,
|
||||
&source_kind,
|
||||
source_type,
|
||||
ParseSource::None,
|
||||
);
|
||||
let transformed = source_kind;
|
||||
let fixed = FxHashMap::default();
|
||||
(result, transformed, fixed)
|
||||
(result, fixed)
|
||||
};
|
||||
|
||||
let imports = imports.unwrap_or_default();
|
||||
|
||||
if let Some(error) = parse_error {
|
||||
if let Some(err) = parse_error {
|
||||
error!(
|
||||
"{}",
|
||||
DisplayParseError::from_source_kind(error, path.map(Path::to_path_buf), &transformed)
|
||||
"Failed to parse {}: {err}",
|
||||
path.map_or_else(|| "-".into(), fs::relativize_path).bold()
|
||||
);
|
||||
}
|
||||
|
||||
let notebook_indexes = if let SourceKind::IpyNotebook(notebook) = transformed {
|
||||
let notebook_indexes = if let SourceKind::IpyNotebook(notebook) = source_kind {
|
||||
FxHashMap::from_iter([(
|
||||
path.map_or_else(|| "-".into(), |path| path.to_string_lossy().to_string()),
|
||||
notebook.into_index(),
|
||||
@@ -18,7 +18,7 @@ use ruff_linter::settings::types::SerializationFormat;
|
||||
use ruff_linter::{fs, warn_user, warn_user_once};
|
||||
use ruff_workspace::Settings;
|
||||
|
||||
use crate::args::{Args, CheckCommand, Command, FormatCommand};
|
||||
use crate::args::{Args, CheckCommand, Command, FormatCommand, HelpFormat};
|
||||
use crate::printer::{Flags as PrinterFlags, Printer};
|
||||
|
||||
pub mod args;
|
||||
@@ -101,17 +101,13 @@ fn is_stdin(files: &[PathBuf], stdin_filename: Option<&Path>) -> bool {
|
||||
file == Path::new("-")
|
||||
}
|
||||
|
||||
/// Returns the default set of files if none are provided, otherwise returns `None`.
|
||||
fn resolve_default_files(files: Vec<PathBuf>, is_stdin: bool) -> Vec<PathBuf> {
|
||||
if files.is_empty() {
|
||||
if is_stdin {
|
||||
vec![Path::new("-").to_path_buf()]
|
||||
} else {
|
||||
vec![Path::new(".").to_path_buf()]
|
||||
}
|
||||
} else {
|
||||
files
|
||||
/// Get the actual value of the `format` desired from either `output_format`
|
||||
/// or `format`, and warn the user if they're using the deprecated form.
|
||||
fn resolve_help_output_format(output_format: HelpFormat, format: Option<HelpFormat>) -> HelpFormat {
|
||||
if format.is_some() {
|
||||
warn_user!("The `--format` argument is deprecated. Use `--output-format` instead.");
|
||||
}
|
||||
format.unwrap_or(output_format)
|
||||
}
|
||||
|
||||
pub fn run(
|
||||
@@ -119,7 +115,6 @@ pub fn run(
|
||||
command,
|
||||
log_level_args,
|
||||
}: Args,
|
||||
deprecated_alias_warning: Option<&'static str>,
|
||||
) -> Result<ExitStatus> {
|
||||
{
|
||||
let default_panic_hook = std::panic::take_hook();
|
||||
@@ -150,10 +145,6 @@ pub fn run(
|
||||
let log_level = LogLevel::from(&log_level_args);
|
||||
set_up_logging(&log_level)?;
|
||||
|
||||
if let Some(deprecated_alias_warning) = deprecated_alias_warning {
|
||||
warn_user!("{}", deprecated_alias_warning);
|
||||
}
|
||||
|
||||
match command {
|
||||
Command::Version { output_format } => {
|
||||
commands::version::version(output_format)?;
|
||||
@@ -162,8 +153,10 @@ pub fn run(
|
||||
Command::Rule {
|
||||
rule,
|
||||
all,
|
||||
output_format,
|
||||
format,
|
||||
mut output_format,
|
||||
} => {
|
||||
output_format = resolve_help_output_format(output_format, format);
|
||||
if all {
|
||||
commands::rule::rules(output_format)?;
|
||||
}
|
||||
@@ -176,7 +169,11 @@ pub fn run(
|
||||
commands::config::config(option.as_deref())?;
|
||||
Ok(ExitStatus::Success)
|
||||
}
|
||||
Command::Linter { output_format } => {
|
||||
Command::Linter {
|
||||
format,
|
||||
mut output_format,
|
||||
} => {
|
||||
output_format = resolve_help_output_format(output_format, format);
|
||||
commands::linter::linter(output_format)?;
|
||||
Ok(ExitStatus::Success)
|
||||
}
|
||||
@@ -194,23 +191,24 @@ pub fn run(
|
||||
}
|
||||
|
||||
fn format(args: FormatCommand, log_level: LogLevel) -> Result<ExitStatus> {
|
||||
let (cli, config_arguments) = args.partition()?;
|
||||
let (cli, overrides) = args.partition();
|
||||
|
||||
if is_stdin(&cli.files, cli.stdin_filename.as_deref()) {
|
||||
commands::format_stdin::format_stdin(&cli, &config_arguments)
|
||||
commands::format_stdin::format_stdin(&cli, &overrides)
|
||||
} else {
|
||||
commands::format::format(cli, &config_arguments, log_level)
|
||||
commands::format::format(&cli, &overrides, log_level)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn check(args: CheckCommand, log_level: LogLevel) -> Result<ExitStatus> {
|
||||
let (cli, config_arguments) = args.partition()?;
|
||||
let (cli, overrides) = args.partition();
|
||||
|
||||
// Construct the "default" settings. These are used when no `pyproject.toml`
|
||||
// files are present, or files are injected from outside of the hierarchy.
|
||||
let pyproject_config = resolve::resolve(
|
||||
cli.isolated,
|
||||
&config_arguments,
|
||||
cli.config.as_deref(),
|
||||
&overrides,
|
||||
cli.stdin_filename.as_deref(),
|
||||
)?;
|
||||
|
||||
@@ -224,25 +222,17 @@ pub fn check(args: CheckCommand, log_level: LogLevel) -> Result<ExitStatus> {
|
||||
};
|
||||
let stderr_writer = Box::new(BufWriter::new(io::stderr()));
|
||||
|
||||
let is_stdin = is_stdin(&cli.files, cli.stdin_filename.as_deref());
|
||||
let files = resolve_default_files(cli.files, is_stdin);
|
||||
|
||||
if cli.show_settings {
|
||||
commands::show_settings::show_settings(
|
||||
&files,
|
||||
&cli.files,
|
||||
&pyproject_config,
|
||||
&config_arguments,
|
||||
&overrides,
|
||||
&mut writer,
|
||||
)?;
|
||||
return Ok(ExitStatus::Success);
|
||||
}
|
||||
if cli.show_files {
|
||||
commands::show_files::show_files(
|
||||
&files,
|
||||
&pyproject_config,
|
||||
&config_arguments,
|
||||
&mut writer,
|
||||
)?;
|
||||
commands::show_files::show_files(&cli.files, &pyproject_config, &overrides, &mut writer)?;
|
||||
return Ok(ExitStatus::Success);
|
||||
}
|
||||
|
||||
@@ -254,6 +244,7 @@ pub fn check(args: CheckCommand, log_level: LogLevel) -> Result<ExitStatus> {
|
||||
unsafe_fixes,
|
||||
output_format,
|
||||
show_fixes,
|
||||
show_source,
|
||||
..
|
||||
} = pyproject_config.settings;
|
||||
|
||||
@@ -282,6 +273,9 @@ pub fn check(args: CheckCommand, log_level: LogLevel) -> Result<ExitStatus> {
|
||||
if show_fixes {
|
||||
printer_flags |= PrinterFlags::SHOW_FIX_SUMMARY;
|
||||
}
|
||||
if show_source {
|
||||
printer_flags |= PrinterFlags::SHOW_SOURCE;
|
||||
}
|
||||
if cli.ecosystem_ci {
|
||||
warn_user!(
|
||||
"The formatting of fixes emitted by this option is a work-in-progress, subject to \
|
||||
@@ -302,7 +296,7 @@ pub fn check(args: CheckCommand, log_level: LogLevel) -> Result<ExitStatus> {
|
||||
warn_user!("--fix is incompatible with --add-noqa.");
|
||||
}
|
||||
let modifications =
|
||||
commands::add_noqa::add_noqa(&files, &pyproject_config, &config_arguments)?;
|
||||
commands::add_noqa::add_noqa(&cli.files, &pyproject_config, &overrides)?;
|
||||
if modifications > 0 && log_level >= LogLevel::Default {
|
||||
let s = if modifications == 1 { "" } else { "s" };
|
||||
#[allow(clippy::print_stderr)]
|
||||
@@ -321,24 +315,15 @@ pub fn check(args: CheckCommand, log_level: LogLevel) -> Result<ExitStatus> {
|
||||
printer_flags,
|
||||
);
|
||||
|
||||
// the settings should already be combined with the CLI overrides at this point
|
||||
// TODO(jane): let's make this `PreviewMode`
|
||||
// TODO: this should reference the global preview mode once https://github.com/astral-sh/ruff/issues/8232
|
||||
// is resolved.
|
||||
let preview = pyproject_config.settings.linter.preview.is_enabled();
|
||||
|
||||
if cli.watch {
|
||||
if output_format != SerializationFormat::default(preview) {
|
||||
warn_user!(
|
||||
"`--output-format {}` is always used in watch mode.",
|
||||
SerializationFormat::default(preview)
|
||||
);
|
||||
if output_format != SerializationFormat::Text {
|
||||
warn_user!("`--output-format text` is always used in watch mode.");
|
||||
}
|
||||
|
||||
// Configure the file watcher.
|
||||
let (tx, rx) = channel();
|
||||
let mut watcher = recommended_watcher(tx)?;
|
||||
for file in &files {
|
||||
for file in &cli.files {
|
||||
watcher.watch(file, RecursiveMode::Recursive)?;
|
||||
}
|
||||
if let Some(file) = pyproject_config.path.as_ref() {
|
||||
@@ -350,15 +335,15 @@ pub fn check(args: CheckCommand, log_level: LogLevel) -> Result<ExitStatus> {
|
||||
printer.write_to_user("Starting linter in watch mode...\n");
|
||||
|
||||
let messages = commands::check::check(
|
||||
&files,
|
||||
&cli.files,
|
||||
&pyproject_config,
|
||||
&config_arguments,
|
||||
&overrides,
|
||||
cache.into(),
|
||||
noqa.into(),
|
||||
fix_mode,
|
||||
unsafe_fixes,
|
||||
)?;
|
||||
printer.write_continuously(&mut writer, &messages, preview)?;
|
||||
printer.write_continuously(&mut writer, &messages)?;
|
||||
|
||||
// In watch mode, we may need to re-resolve the configuration.
|
||||
// TODO(charlie): Re-compute other derivative values, like the `printer`.
|
||||
@@ -374,7 +359,8 @@ pub fn check(args: CheckCommand, log_level: LogLevel) -> Result<ExitStatus> {
|
||||
if matches!(change_kind, ChangeKind::Configuration) {
|
||||
pyproject_config = resolve::resolve(
|
||||
cli.isolated,
|
||||
&config_arguments,
|
||||
cli.config.as_deref(),
|
||||
&overrides,
|
||||
cli.stdin_filename.as_deref(),
|
||||
)?;
|
||||
}
|
||||
@@ -382,34 +368,36 @@ pub fn check(args: CheckCommand, log_level: LogLevel) -> Result<ExitStatus> {
|
||||
printer.write_to_user("File change detected...\n");
|
||||
|
||||
let messages = commands::check::check(
|
||||
&files,
|
||||
&cli.files,
|
||||
&pyproject_config,
|
||||
&config_arguments,
|
||||
&overrides,
|
||||
cache.into(),
|
||||
noqa.into(),
|
||||
fix_mode,
|
||||
unsafe_fixes,
|
||||
)?;
|
||||
printer.write_continuously(&mut writer, &messages, preview)?;
|
||||
printer.write_continuously(&mut writer, &messages)?;
|
||||
}
|
||||
Err(err) => return Err(err.into()),
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let is_stdin = is_stdin(&cli.files, cli.stdin_filename.as_deref());
|
||||
|
||||
// Generate lint violations.
|
||||
let diagnostics = if is_stdin {
|
||||
commands::check_stdin::check_stdin(
|
||||
cli.stdin_filename.map(fs::normalize_path).as_deref(),
|
||||
&pyproject_config,
|
||||
&config_arguments,
|
||||
&overrides,
|
||||
noqa.into(),
|
||||
fix_mode,
|
||||
)?
|
||||
} else {
|
||||
commands::check::check(
|
||||
&files,
|
||||
&cli.files,
|
||||
&pyproject_config,
|
||||
&config_arguments,
|
||||
&overrides,
|
||||
cache.into(),
|
||||
noqa.into(),
|
||||
fix_mode,
|
||||
@@ -13,7 +13,7 @@ use ruff_linter::fs::relativize_path;
|
||||
use ruff_linter::logging::LogLevel;
|
||||
use ruff_linter::message::{
|
||||
AzureEmitter, Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter,
|
||||
JsonEmitter, JsonLinesEmitter, JunitEmitter, PylintEmitter, SarifEmitter, TextEmitter,
|
||||
JsonEmitter, JsonLinesEmitter, JunitEmitter, PylintEmitter, TextEmitter,
|
||||
};
|
||||
use ruff_linter::notify_user;
|
||||
use ruff_linter::registry::{AsRule, Rule};
|
||||
@@ -27,6 +27,8 @@ bitflags! {
|
||||
pub(crate) struct Flags: u8 {
|
||||
/// Whether to show violations when emitting diagnostics.
|
||||
const SHOW_VIOLATIONS = 0b0000_0001;
|
||||
/// Whether to show the source code when emitting diagnostics.
|
||||
const SHOW_SOURCE = 0b000_0010;
|
||||
/// Whether to show a summary of the fixed violations when emitting diagnostics.
|
||||
const SHOW_FIX_SUMMARY = 0b0000_0100;
|
||||
/// Whether to show a diff of each fixed violation when emitting diagnostics.
|
||||
@@ -123,7 +125,15 @@ impl Printer {
|
||||
if let Some(fixables) = fixables {
|
||||
let fix_prefix = format!("[{}]", "*".cyan());
|
||||
|
||||
if self.unsafe_fixes.is_hint() {
|
||||
if self.unsafe_fixes.is_enabled() {
|
||||
if fixables.applicable > 0 {
|
||||
writeln!(
|
||||
writer,
|
||||
"{fix_prefix} {} fixable with the --fix option.",
|
||||
fixables.applicable
|
||||
)?;
|
||||
}
|
||||
} else {
|
||||
if fixables.applicable > 0 && fixables.unapplicable_unsafe > 0 {
|
||||
let es = if fixables.unapplicable_unsafe == 1 {
|
||||
""
|
||||
@@ -153,14 +163,6 @@ impl Printer {
|
||||
fixables.unapplicable_unsafe
|
||||
)?;
|
||||
}
|
||||
} else {
|
||||
if fixables.applicable > 0 {
|
||||
writeln!(
|
||||
writer,
|
||||
"{fix_prefix} {} fixable with the --fix option.",
|
||||
fixables.applicable
|
||||
)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@@ -216,10 +218,7 @@ impl Printer {
|
||||
if !self.flags.intersects(Flags::SHOW_VIOLATIONS) {
|
||||
if matches!(
|
||||
self.format,
|
||||
SerializationFormat::Text
|
||||
| SerializationFormat::Full
|
||||
| SerializationFormat::Concise
|
||||
| SerializationFormat::Grouped
|
||||
SerializationFormat::Text | SerializationFormat::Grouped
|
||||
) {
|
||||
if self.flags.intersects(Flags::SHOW_FIX_SUMMARY) {
|
||||
if !diagnostics.fixed.is_empty() {
|
||||
@@ -246,12 +245,11 @@ impl Printer {
|
||||
SerializationFormat::Junit => {
|
||||
JunitEmitter.emit(writer, &diagnostics.messages, &context)?;
|
||||
}
|
||||
SerializationFormat::Concise
|
||||
| SerializationFormat::Full => {
|
||||
SerializationFormat::Text => {
|
||||
TextEmitter::default()
|
||||
.with_show_fix_status(show_fix_status(self.fix_mode, fixables.as_ref()))
|
||||
.with_show_fix_diff(self.flags.intersects(Flags::SHOW_FIX_DIFF))
|
||||
.with_show_source(self.format == SerializationFormat::Full)
|
||||
.with_show_source(self.flags.intersects(Flags::SHOW_SOURCE))
|
||||
.with_unsafe_fixes(self.unsafe_fixes)
|
||||
.emit(writer, &diagnostics.messages, &context)?;
|
||||
|
||||
@@ -267,6 +265,7 @@ impl Printer {
|
||||
}
|
||||
SerializationFormat::Grouped => {
|
||||
GroupedEmitter::default()
|
||||
.with_show_source(self.flags.intersects(Flags::SHOW_SOURCE))
|
||||
.with_show_fix_status(show_fix_status(self.fix_mode, fixables.as_ref()))
|
||||
.with_unsafe_fixes(self.unsafe_fixes)
|
||||
.emit(writer, &diagnostics.messages, &context)?;
|
||||
@@ -292,10 +291,6 @@ impl Printer {
|
||||
SerializationFormat::Azure => {
|
||||
AzureEmitter.emit(writer, &diagnostics.messages, &context)?;
|
||||
}
|
||||
SerializationFormat::Sarif => {
|
||||
SarifEmitter.emit(writer, &diagnostics.messages, &context)?;
|
||||
}
|
||||
SerializationFormat::Text => unreachable!("Text is deprecated and should have been automatically converted to the default serialization format")
|
||||
}
|
||||
|
||||
writer.flush()?;
|
||||
@@ -344,9 +339,7 @@ impl Printer {
|
||||
}
|
||||
|
||||
match self.format {
|
||||
SerializationFormat::Text
|
||||
| SerializationFormat::Full
|
||||
| SerializationFormat::Concise => {
|
||||
SerializationFormat::Text => {
|
||||
// Compute the maximum number of digits in the count and code, for all messages,
|
||||
// to enable pretty-printing.
|
||||
let count_width = num_digits(
|
||||
@@ -407,7 +400,6 @@ impl Printer {
|
||||
&self,
|
||||
writer: &mut dyn Write,
|
||||
diagnostics: &Diagnostics,
|
||||
preview: bool,
|
||||
) -> Result<()> {
|
||||
if matches!(self.log_level, LogLevel::Silent) {
|
||||
return Ok(());
|
||||
@@ -435,7 +427,7 @@ impl Printer {
|
||||
let context = EmitterContext::new(&diagnostics.notebook_indexes);
|
||||
TextEmitter::default()
|
||||
.with_show_fix_status(show_fix_status(self.fix_mode, fixables.as_ref()))
|
||||
.with_show_source(preview)
|
||||
.with_show_source(self.flags.intersects(Flags::SHOW_SOURCE))
|
||||
.with_unsafe_fixes(self.unsafe_fixes)
|
||||
.emit(writer, &diagnostics.messages, &context)?;
|
||||
}
|
||||
@@ -11,18 +11,19 @@ use ruff_workspace::resolver::{
|
||||
Relativity,
|
||||
};
|
||||
|
||||
use crate::args::ConfigArguments;
|
||||
use crate::args::CliOverrides;
|
||||
|
||||
/// Resolve the relevant settings strategy and defaults for the current
|
||||
/// invocation.
|
||||
pub fn resolve(
|
||||
isolated: bool,
|
||||
config_arguments: &ConfigArguments,
|
||||
config: Option<&Path>,
|
||||
overrides: &CliOverrides,
|
||||
stdin_filename: Option<&Path>,
|
||||
) -> Result<PyprojectConfig> {
|
||||
// First priority: if we're running in isolated mode, use the default settings.
|
||||
if isolated {
|
||||
let config = config_arguments.transform(Configuration::default());
|
||||
let config = overrides.transform(Configuration::default());
|
||||
let settings = config.into_settings(&path_dedot::CWD)?;
|
||||
debug!("Isolated mode, not reading any pyproject.toml");
|
||||
return Ok(PyprojectConfig::new(
|
||||
@@ -35,15 +36,14 @@ pub fn resolve(
|
||||
// Second priority: the user specified a `pyproject.toml` file. Use that
|
||||
// `pyproject.toml` for _all_ configuration, and resolve paths relative to the
|
||||
// current working directory. (This matches ESLint's behavior.)
|
||||
if let Some(pyproject) = config_arguments
|
||||
.config_file()
|
||||
if let Some(pyproject) = config
|
||||
.map(|config| config.display().to_string())
|
||||
.map(|config| shellexpand::full(&config).map(|config| PathBuf::from(config.as_ref())))
|
||||
.transpose()?
|
||||
{
|
||||
let settings = resolve_root_settings(&pyproject, Relativity::Cwd, config_arguments)?;
|
||||
let settings = resolve_root_settings(&pyproject, Relativity::Cwd, overrides)?;
|
||||
debug!(
|
||||
"Using user-specified configuration file at: {}",
|
||||
"Using user specified pyproject.toml at {}",
|
||||
pyproject.display()
|
||||
);
|
||||
return Ok(PyprojectConfig::new(
|
||||
@@ -63,11 +63,8 @@ pub fn resolve(
|
||||
.as_ref()
|
||||
.unwrap_or(&path_dedot::CWD.as_path()),
|
||||
)? {
|
||||
debug!(
|
||||
"Using configuration file (via parent) at: {}",
|
||||
pyproject.display()
|
||||
);
|
||||
let settings = resolve_root_settings(&pyproject, Relativity::Parent, config_arguments)?;
|
||||
debug!("Using pyproject.toml (parent) at {}", pyproject.display());
|
||||
let settings = resolve_root_settings(&pyproject, Relativity::Parent, overrides)?;
|
||||
return Ok(PyprojectConfig::new(
|
||||
PyprojectDiscoveryStrategy::Hierarchical,
|
||||
settings,
|
||||
@@ -80,11 +77,8 @@ pub fn resolve(
|
||||
// end up the "closest" `pyproject.toml` file for every Python file later on, so
|
||||
// these act as the "default" settings.)
|
||||
if let Some(pyproject) = pyproject::find_user_settings_toml() {
|
||||
debug!(
|
||||
"Using configuration file (via cwd) at: {}",
|
||||
pyproject.display()
|
||||
);
|
||||
let settings = resolve_root_settings(&pyproject, Relativity::Cwd, config_arguments)?;
|
||||
debug!("Using pyproject.toml (cwd) at {}", pyproject.display());
|
||||
let settings = resolve_root_settings(&pyproject, Relativity::Cwd, overrides)?;
|
||||
return Ok(PyprojectConfig::new(
|
||||
PyprojectDiscoveryStrategy::Hierarchical,
|
||||
settings,
|
||||
@@ -97,7 +91,7 @@ pub fn resolve(
|
||||
// "closest" `pyproject.toml` file for every Python file later on, so these act
|
||||
// as the "default" settings.)
|
||||
debug!("Using Ruff default settings");
|
||||
let config = config_arguments.transform(Configuration::default());
|
||||
let config = overrides.transform(Configuration::default());
|
||||
let settings = config.into_settings(&path_dedot::CWD)?;
|
||||
Ok(PyprojectConfig::new(
|
||||
PyprojectDiscoveryStrategy::Hierarchical,
|
||||
@@ -0,0 +1,5 @@
|
||||
---
|
||||
source: crates/ruff_cli/src/version.rs
|
||||
expression: version
|
||||
---
|
||||
0.0.0
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
source: crates/ruff/src/version.rs
|
||||
source: crates/ruff_cli/src/version.rs
|
||||
expression: version
|
||||
---
|
||||
0.0.0 (53b0f5d92 2023-10-19)
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
source: crates/ruff/src/version.rs
|
||||
source: crates/ruff_cli/src/version.rs
|
||||
expression: version
|
||||
---
|
||||
0.0.0+24 (53b0f5d92 2023-10-19)
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
source: crates/ruff/src/version.rs
|
||||
source: crates/ruff_cli/src/version.rs
|
||||
expression: version
|
||||
---
|
||||
{
|
||||
9
crates/ruff_cli/src/stdin.rs
Normal file
9
crates/ruff_cli/src/stdin.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
use std::io;
|
||||
use std::io::Read;
|
||||
|
||||
/// Read a string from `stdin`.
|
||||
pub(crate) fn read_from_stdin() -> Result<String, io::Error> {
|
||||
let mut buffer = String::new();
|
||||
io::stdin().lock().read_to_string(&mut buffer)?;
|
||||
Ok(buffer)
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user