Compare commits

..

1 Commits

Author SHA1 Message Date
Zanie
17e77fe515 Change the Cargo.toml file 2023-12-06 22:13:44 -06:00
1929 changed files with 63198 additions and 136939 deletions

View File

@@ -1,8 +0,0 @@
[profile.ci]
# Print out output for failing tests as soon as they fail, and also at the end
# of the run (for easy scrollability).
failure-output = "immediate-final"
# Do not cancel the test run on the first failure.
fail-fast = false
status-level = "skip"

2
.gitattributes vendored
View File

@@ -2,8 +2,6 @@
crates/ruff_linter/resources/test/fixtures/isort/line_ending_crlf.py text eol=crlf
crates/ruff_linter/resources/test/fixtures/pycodestyle/W605_1.py text eol=crlf
crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_2.py text eol=crlf
crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_3.py text eol=crlf
crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring_code_examples_crlf.py text eol=crlf
crates/ruff_python_formatter/tests/snapshots/format@docstring_code_examples_crlf.py.snap text eol=crlf

6
.github/CODEOWNERS vendored
View File

@@ -7,9 +7,3 @@
# Jupyter
/crates/ruff_linter/src/jupyter/ @dhruvmanila
/crates/ruff_formatter/ @MichaReiser
/crates/ruff_python_formatter/ @MichaReiser
/crates/ruff_python_parser/ @MichaReiser
# flake8-pyi
/crates/ruff_linter/src/rules/flake8_pyi/ @AlexWaygood

View File

@@ -5,14 +5,6 @@ updates:
schedule:
interval: "weekly"
labels: ["internal"]
groups:
actions:
patterns:
- "*"
ignore:
# The latest versions of these are not compatible with our release workflow
- dependency-name: "actions/upload-artifact"
- dependency-name: "actions/download-artifact"
- package-ecosystem: "cargo"
directory: "/"

View File

@@ -35,7 +35,7 @@ jobs:
with:
fetch-depth: 0
- uses: tj-actions/changed-files@v42
- uses: tj-actions/changed-files@v40
id: changed
with:
files_yaml: |
@@ -76,7 +76,6 @@ jobs:
cargo-fmt:
name: "cargo fmt"
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- uses: actions/checkout@v4
- name: "Install Rust toolchain"
@@ -88,7 +87,6 @@ jobs:
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@v4
- name: "Install Rust toolchain"
@@ -97,26 +95,45 @@ jobs:
rustup target add wasm32-unknown-unknown
- uses: Swatinem/rust-cache@v2
- name: "Clippy"
run: cargo clippy --workspace --all-targets --all-features --locked -- -D warnings
run: cargo clippy --workspace --all-targets --all-features -- -D warnings
- name: "Clippy (wasm)"
run: cargo clippy -p ruff_wasm --target wasm32-unknown-unknown --all-features --locked -- -D warnings
run: cargo clippy -p ruff_wasm --target wasm32-unknown-unknown --all-features -- -D warnings
cargo-test-linux:
name: "cargo test (linux)"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
name: "cargo test (linux)"
steps:
- uses: actions/checkout@v4
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"
uses: rui314/setup-mold@v1
- name: "Install cargo nextest"
- name: "Install cargo insta"
uses: taiki-e/install-action@v2
with:
tool: cargo-nextest
tool: cargo-insta
- uses: Swatinem/rust-cache@v2
- name: "Run tests"
run: cargo insta test --all --all-features --unreferenced reject
# Check for broken links in the documentation.
- run: cargo doc --all --no-deps
env:
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
RUSTDOCFLAGS: "-D warnings"
- uses: actions/upload-artifact@v3
with:
name: ruff
path: target/debug/ruff
cargo-test-windows:
runs-on: windows-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
name: "cargo test (windows)"
steps:
- uses: actions/checkout@v4
- name: "Install Rust toolchain"
run: rustup show
- name: "Install cargo insta"
uses: taiki-e/install-action@v2
with:
@@ -124,47 +141,14 @@ jobs:
- uses: Swatinem/rust-cache@v2
- name: "Run tests"
shell: bash
env:
NEXTEST_PROFILE: "ci"
run: cargo insta test --all-features --unreferenced reject --test-runner nextest
# Check for broken links in the documentation.
- run: cargo doc --all --no-deps
env:
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
RUSTDOCFLAGS: "-D warnings"
- uses: actions/upload-artifact@v4
with:
name: ruff
path: target/debug/ruff
cargo-test-windows:
name: "cargo test (windows)"
runs-on: windows-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@v4
- name: "Install Rust toolchain"
run: rustup show
- name: "Install cargo nextest"
uses: taiki-e/install-action@v2
with:
tool: cargo-nextest
- uses: Swatinem/rust-cache@v2
- name: "Run tests"
shell: bash
run: |
cargo nextest run --all-features --profile ci
cargo test --all-features --doc
# We can't reject unreferenced snapshots on windows because flake8_executable can't run on windows
run: cargo insta test --all --all-features
cargo-test-wasm:
name: "cargo test (wasm)"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 10
name: "cargo test (wasm)"
steps:
- uses: actions/checkout@v4
- name: "Install Rust toolchain"
@@ -182,11 +166,10 @@ jobs:
wasm-pack test --node
cargo-fuzz:
name: "cargo fuzz"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 10
name: "cargo fuzz"
steps:
- uses: actions/checkout@v4
- name: "Install Rust toolchain"
@@ -197,7 +180,7 @@ jobs:
- name: "Install cargo-fuzz"
uses: taiki-e/install-action@v2
with:
tool: cargo-fuzz@0.11.2
tool: cargo-fuzz@0.11
- run: cargo fuzz build -s none
scripts:
@@ -205,7 +188,6 @@ jobs:
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 5
steps:
- uses: actions/checkout@v4
- name: "Install Rust toolchain"
@@ -231,26 +213,24 @@ jobs:
if: github.event_name == 'pull_request' && ${{
needs.determine_changes.outputs.code == 'true'
}}
timeout-minutes: 20
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
- uses: actions/download-artifact@v4
- uses: actions/download-artifact@v3
name: Download comparison Ruff binary
id: ruff-target
with:
name: ruff
path: target/debug
- uses: dawidd6/action-download-artifact@v3
- uses: dawidd6/action-download-artifact@v2
name: Download baseline Ruff binary
with:
name: ruff
branch: ${{ github.event.pull_request.base.ref }}
workflow: "ci.yaml"
check_artifacts: true
- name: Install ruff-ecosystem
@@ -325,13 +305,13 @@ jobs:
run: |
echo ${{ github.event.number }} > pr-number
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
name: Upload PR Number
with:
name: pr-number
path: pr-number
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
name: Upload Results
with:
name: ecosystem-result
@@ -342,7 +322,6 @@ jobs:
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@v4
- name: "Install nightly Rust toolchain"
@@ -357,10 +336,9 @@ jobs:
python-package:
name: "python package"
runs-on: ubuntu-latest
timeout-minutes: 20
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
@@ -382,10 +360,9 @@ jobs:
pre-commit:
name: "pre-commit"
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Install Rust toolchain"
@@ -394,7 +371,7 @@ jobs:
- name: "Install pre-commit"
run: pip install pre-commit
- name: "Cache pre-commit"
uses: actions/cache@v4
uses: actions/cache@v3
with:
path: ~/.cache/pre-commit
key: pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
@@ -411,15 +388,14 @@ jobs:
docs:
name: "mkdocs"
runs-on: ubuntu-latest
timeout-minutes: 10
env:
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
- name: "Add SSH key"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
uses: webfactory/ssh-agent@v0.9.0
uses: webfactory/ssh-agent@v0.8.0
with:
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
- name: "Install Rust toolchain"
@@ -449,7 +425,6 @@ jobs:
runs-on: ubuntu-latest
needs: determine_changes
if: needs.determine_changes.outputs.formatter == 'true' || github.ref == 'refs/heads/main'
timeout-minutes: 10
steps:
- uses: actions/checkout@v4
- name: "Install Rust toolchain"
@@ -466,13 +441,12 @@ jobs:
check-ruff-lsp:
name: "test ruff-lsp"
runs-on: ubuntu-latest
timeout-minutes: 5
needs:
- cargo-test-linux
- determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
steps:
- uses: extractions/setup-just@v2
- uses: extractions/setup-just@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@@ -481,11 +455,11 @@ jobs:
with:
repository: "astral-sh/ruff-lsp"
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
- uses: actions/download-artifact@v4
- uses: actions/download-artifact@v3
name: Download development ruff binary
id: ruff-target
with:
@@ -510,7 +484,6 @@ jobs:
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
steps:
- name: "Checkout Branch"
uses: actions/checkout@v4

View File

@@ -20,10 +20,10 @@ jobs:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.ref }}
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
- name: "Add SSH key"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
uses: webfactory/ssh-agent@v0.9.0
uses: webfactory/ssh-agent@v0.8.0
with:
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
- name: "Install Rust toolchain"
@@ -47,7 +47,7 @@ jobs:
run: mkdocs build --strict -f mkdocs.public.yml
- name: "Deploy to Cloudflare Pages"
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
uses: cloudflare/wrangler-action@v3.4.1
uses: cloudflare/wrangler-action@v3.3.2
with:
apiToken: ${{ secrets.CF_API_TOKEN }}
accountId: ${{ secrets.CF_ACCOUNT_ID }}

247
.github/workflows/flake8-to-ruff.yaml vendored Normal file
View File

@@ -0,0 +1,247 @@
name: "[flake8-to-ruff] Release"
on: workflow_dispatch
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
PACKAGE_NAME: flake8-to-ruff
CRATE_NAME: flake8_to_ruff
PYTHON_VERSION: "3.11"
CARGO_INCREMENTAL: 0
CARGO_NET_RETRY: 10
CARGO_TERM_COLOR: always
RUSTUP_MAX_RETRIES: 10
jobs:
macos-x86_64:
runs-on: macos-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
- name: "Install Rust toolchain"
run: rustup show
- name: "Build wheels - x86_64"
uses: PyO3/maturin-action@v1
with:
target: x86_64
args: --release --out dist --sdist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- name: "Install built wheel - x86_64"
run: |
pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
macos-universal:
runs-on: macos-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
- name: "Install Rust toolchain"
run: rustup show
- name: "Build wheels - universal2"
uses: PyO3/maturin-action@v1
with:
args: --release --target universal2-apple-darwin --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- name: "Install built wheel - universal2"
run: |
pip install dist/${{ env.CRATE_NAME }}-*universal2.whl --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
windows:
runs-on: windows-latest
strategy:
matrix:
target: [x64, x86]
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: ${{ matrix.target }}
- name: "Install Rust toolchain"
run: rustup show
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.target }}
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- name: "Install built wheel"
shell: bash
run: |
python -m pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
linux:
runs-on: ubuntu-latest
strategy:
matrix:
target: [x86_64, i686]
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.target }}
manylinux: auto
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- name: "Install built wheel"
if: matrix.target == 'x86_64'
run: |
pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
linux-cross:
runs-on: ubuntu-latest
strategy:
matrix:
target: [aarch64, armv7, s390x, ppc64le, ppc64]
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.target }}
manylinux: auto
args: --no-default-features --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- uses: uraimo/run-on-arch-action@v2
if: matrix.target != 'ppc64'
name: Install built wheel
with:
arch: ${{ matrix.target }}
distro: ubuntu20.04
githubToken: ${{ github.token }}
install: |
apt-get update
apt-get install -y --no-install-recommends python3 python3-pip
pip3 install -U pip
run: |
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
musllinux:
runs-on: ubuntu-latest
strategy:
matrix:
target:
- x86_64-unknown-linux-musl
- i686-unknown-linux-musl
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.target }}
manylinux: musllinux_1_2
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- name: "Install built wheel"
if: matrix.target == 'x86_64-unknown-linux-musl'
uses: addnab/docker-run-action@v3
with:
image: alpine:latest
options: -v ${{ github.workspace }}:/io -w /io
run: |
apk add py3-pip
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links /io/dist/ --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
musllinux-cross:
runs-on: ubuntu-latest
strategy:
matrix:
platform:
- target: aarch64-unknown-linux-musl
arch: aarch64
- target: armv7-unknown-linux-musleabihf
arch: armv7
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.platform.target }}
manylinux: musllinux_1_2
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- uses: uraimo/run-on-arch-action@v2
name: Install built wheel
with:
arch: ${{ matrix.platform.arch }}
distro: alpine_latest
githubToken: ${{ github.token }}
install: |
apk add py3-pip
run: |
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
release:
name: Release
runs-on: ubuntu-latest
needs:
- macos-universal
- macos-x86_64
- windows
- linux
- linux-cross
- musllinux
- musllinux-cross
steps:
- uses: actions/download-artifact@v3
with:
name: wheels
- uses: actions/setup-python@v4
- name: "Publish to PyPi"
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.FLAKE8_TO_RUFF_TOKEN }}
run: |
pip install --upgrade twine
twine upload --skip-existing *

View File

@@ -40,7 +40,7 @@ jobs:
working-directory: playground
- name: "Deploy to Cloudflare Pages"
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
uses: cloudflare/wrangler-action@v3.4.1
uses: cloudflare/wrangler-action@v3.3.2
with:
apiToken: ${{ secrets.CF_API_TOKEN }}
accountId: ${{ secrets.CF_ACCOUNT_ID }}

View File

@@ -17,7 +17,7 @@ jobs:
comment:
runs-on: ubuntu-latest
steps:
- uses: dawidd6/action-download-artifact@v3
- uses: dawidd6/action-download-artifact@v2
name: Download pull request number
with:
name: pr-number
@@ -32,7 +32,7 @@ jobs:
echo "pr-number=$(<pr-number)" >> $GITHUB_OUTPUT
fi
- uses: dawidd6/action-download-artifact@v3
- uses: dawidd6/action-download-artifact@v2
name: "Download ecosystem results"
id: download-ecosystem-result
if: steps.pr-number.outputs.pr-number
@@ -61,7 +61,7 @@ jobs:
echo 'EOF' >> $GITHUB_OUTPUT
- name: Find existing comment
uses: peter-evans/find-comment@v3
uses: peter-evans/find-comment@v2
if: steps.generate-comment.outcome == 'success'
id: find-comment
with:
@@ -71,7 +71,7 @@ jobs:
- name: Create or update comment
if: steps.find-comment.outcome == 'success'
uses: peter-evans/create-or-update-comment@v4
uses: peter-evans/create-or-update-comment@v3
with:
comment-id: ${{ steps.find-comment.outputs.comment-id }}
issue-number: ${{ steps.pr-number.outputs.pr-number }}

View File

@@ -36,7 +36,7 @@ jobs:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.sha }}
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Prep README.md"
@@ -52,9 +52,9 @@ jobs:
ruff --help
python -m ruff --help
- name: "Upload sdist"
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: wheels-sdist
name: wheels
path: dist
macos-x86_64:
@@ -63,7 +63,7 @@ jobs:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.sha }}
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
@@ -73,16 +73,16 @@ jobs:
uses: PyO3/maturin-action@v1
with:
target: x86_64
args: --release --locked --out dist
args: --release --out dist
- name: "Test wheel - x86_64"
run: |
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
ruff --help
python -m ruff --help
- name: "Upload wheels"
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: wheels-macos-x86_64
name: wheels
path: dist
- name: "Archive binary"
run: |
@@ -90,9 +90,9 @@ jobs:
tar czvf $ARCHIVE_FILE -C target/x86_64-apple-darwin/release ruff
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: binaries-macos-x86_64
name: binaries
path: |
*.tar.gz
*.sha256
@@ -103,7 +103,7 @@ jobs:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.sha }}
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
@@ -112,16 +112,16 @@ jobs:
- name: "Build wheels - universal2"
uses: PyO3/maturin-action@v1
with:
args: --release --locked --target universal2-apple-darwin --out dist
args: --release --target universal2-apple-darwin --out dist
- name: "Test wheel - universal2"
run: |
pip install dist/${{ env.PACKAGE_NAME }}-*universal2.whl --force-reinstall
ruff --help
python -m ruff --help
- name: "Upload wheels"
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: wheels-aarch64-apple-darwin
name: wheels
path: dist
- name: "Archive binary"
run: |
@@ -129,9 +129,9 @@ jobs:
tar czvf $ARCHIVE_FILE -C target/aarch64-apple-darwin/release ruff
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: binaries-aarch64-apple-darwin
name: binaries
path: |
*.tar.gz
*.sha256
@@ -151,7 +151,7 @@ jobs:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.sha }}
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: ${{ matrix.platform.arch }}
@@ -161,7 +161,7 @@ jobs:
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.platform.target }}
args: --release --locked --out dist
args: --release --out dist
- name: "Test wheel"
if: ${{ !startsWith(matrix.platform.target, 'aarch64') }}
shell: bash
@@ -170,9 +170,9 @@ jobs:
ruff --help
python -m ruff --help
- name: "Upload wheels"
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: wheels-${{ matrix.platform.target }}
name: wheels
path: dist
- name: "Archive binary"
shell: bash
@@ -181,9 +181,9 @@ jobs:
7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/ruff.exe
sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: binaries-${{ matrix.platform.target }}
name: binaries
path: |
*.zip
*.sha256
@@ -199,7 +199,7 @@ jobs:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.sha }}
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
@@ -210,7 +210,7 @@ jobs:
with:
target: ${{ matrix.target }}
manylinux: auto
args: --release --locked --out dist
args: --release --out dist
- name: "Test wheel"
if: ${{ startsWith(matrix.target, 'x86_64') }}
run: |
@@ -218,9 +218,9 @@ jobs:
ruff --help
python -m ruff --help
- name: "Upload wheels"
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: wheels-${{ matrix.target }}
name: wheels
path: dist
- name: "Archive binary"
run: |
@@ -228,9 +228,9 @@ jobs:
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: binaries-${{ matrix.target }}
name: binaries
path: |
*.tar.gz
*.sha256
@@ -251,18 +251,14 @@ jobs:
arch: s390x
- target: powerpc64le-unknown-linux-gnu
arch: ppc64le
# see https://github.com/astral-sh/ruff/issues/10073
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
- target: powerpc64-unknown-linux-gnu
arch: ppc64
# see https://github.com/astral-sh/ruff/issues/10073
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
steps:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.sha }}
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Prep README.md"
@@ -273,7 +269,7 @@ jobs:
target: ${{ matrix.platform.target }}
manylinux: auto
docker-options: ${{ matrix.platform.maturin_docker_options }}
args: --release --locked --out dist
args: --release --out dist
- uses: uraimo/run-on-arch-action@v2
if: matrix.platform.arch != 'ppc64'
name: Test wheel
@@ -289,9 +285,9 @@ jobs:
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
ruff --help
- name: "Upload wheels"
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: wheels-${{ matrix.platform.target }}
name: wheels
path: dist
- name: "Archive binary"
run: |
@@ -299,9 +295,9 @@ jobs:
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: binaries-${{ matrix.platform.target }}
name: binaries
path: |
*.tar.gz
*.sha256
@@ -317,7 +313,7 @@ jobs:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.sha }}
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
@@ -328,7 +324,7 @@ jobs:
with:
target: ${{ matrix.target }}
manylinux: musllinux_1_2
args: --release --locked --out dist
args: --release --out dist
- name: "Test wheel"
if: matrix.target == 'x86_64-unknown-linux-musl'
uses: addnab/docker-run-action@v3
@@ -336,14 +332,14 @@ jobs:
image: alpine:latest
options: -v ${{ github.workspace }}:/io -w /io
run: |
apk add python3
python -m venv .venv
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
.venv/bin/ruff check --help
apk add py3-pip
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links /io/dist/ --force-reinstall
ruff --help
python -m ruff --help
- name: "Upload wheels"
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: wheels-${{ matrix.target }}
name: wheels
path: dist
- name: "Archive binary"
run: |
@@ -351,9 +347,9 @@ jobs:
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: binaries-${{ matrix.target }}
name: binaries
path: |
*.tar.gz
*.sha256
@@ -373,7 +369,7 @@ jobs:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.sha }}
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Prep README.md"
@@ -383,7 +379,7 @@ jobs:
with:
target: ${{ matrix.platform.target }}
manylinux: musllinux_1_2
args: --release --locked --out dist
args: --release --out dist
docker-options: ${{ matrix.platform.maturin_docker_options }}
- uses: uraimo/run-on-arch-action@v2
name: Test wheel
@@ -392,15 +388,14 @@ jobs:
distro: alpine_latest
githubToken: ${{ github.token }}
install: |
apk add python3
apk add py3-pip
run: |
python -m venv .venv
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
.venv/bin/ruff check --help
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
ruff check --help
- name: "Upload wheels"
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: wheels-${{ matrix.platform.target }}
name: wheels
path: dist
- name: "Archive binary"
run: |
@@ -408,9 +403,9 @@ jobs:
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: binaries-${{ matrix.platform.target }}
name: binaries
path: |
*.tar.gz
*.sha256
@@ -467,11 +462,10 @@ jobs:
# For pypi trusted publishing
id-token: write
steps:
- uses: actions/download-artifact@v4
- uses: actions/download-artifact@v3
with:
pattern: wheels-*
name: wheels
path: wheels
merge-multiple: true
- name: Publish to PyPi
uses: pypa/gh-action-pypi-publish@release/v1
with:
@@ -511,13 +505,12 @@ jobs:
# For GitHub release publishing
contents: write
steps:
- uses: actions/download-artifact@v4
- uses: actions/download-artifact@v3
with:
pattern: binaries-*
name: binaries
path: binaries
merge-multiple: true
- name: "Publish to GitHub"
uses: softprops/action-gh-release@v2
uses: softprops/action-gh-release@v1
with:
draft: true
files: binaries/*

View File

@@ -4,7 +4,7 @@ exclude: |
(?x)^(
crates/ruff_linter/resources/.*|
crates/ruff_linter/src/rules/.*/snapshots/.*|
crates/ruff/resources/.*|
crates/ruff_cli/resources/.*|
crates/ruff_python_formatter/resources/.*|
crates/ruff_python_formatter/tests/snapshots/.*|
crates/ruff_python_resolver/resources/.*|

View File

@@ -1,93 +1,5 @@
# Breaking Changes
## 0.3.0
### Ruff 2024.2 style
The formatter now formats code according to the Ruff 2024.2 style guide. Read the [changelog](./CHANGELOG.md#030) for a detailed list of stabilized style changes.
### `isort`: Use one blank line after imports in typing stub files ([#9971](https://github.com/astral-sh/ruff/pull/9971))
Previously, Ruff used one or two blank lines (or the number configured by `isort.lines-after-imports`) after imports in typing stub files (`.pyi` files).
The [typing style guide for stubs](https://typing.readthedocs.io/en/latest/source/stubs.html#style-guide) recommends using at most 1 blank line for grouping.
As of this release, `isort` now always uses one blank line after imports in stub files, the same as the formatter.
### `build` is no longer excluded by default ([#10093](https://github.com/astral-sh/ruff/pull/10093))
Ruff maintains a list of directories and files that are excluded by default. This list now consists of the following patterns:
- `.bzr`
- `.direnv`
- `.eggs`
- `.git`
- `.git-rewrite`
- `.hg`
- `.ipynb_checkpoints`
- `.mypy_cache`
- `.nox`
- `.pants.d`
- `.pyenv`
- `.pytest_cache`
- `.pytype`
- `.ruff_cache`
- `.svn`
- `.tox`
- `.venv`
- `.vscode`
- `__pypackages__`
- `_build`
- `buck-out`
- `dist`
- `node_modules`
- `site-packages`
- `venv`
Previously, the `build` directory was included in this list. However, the `build` directory tends to be a not-unpopular directory
name, and excluding it by default caused confusion. Ruff now no longer excludes `build` except if it is excluded by a `.gitignore` file
or because it is listed in `extend-exclude`.
### `--format` is no longer a valid `rule` or `linter` command option
Previously, `ruff rule` and `ruff linter` accepted the `--format <FORMAT>` option as an alias for `--output-format`. Ruff no longer
supports this alias. Please use `ruff rule --output-format <FORMAT>` and `ruff linter --output-format <FORMAT>` instead.
## 0.1.9
### `site-packages` is now excluded by default ([#5513](https://github.com/astral-sh/ruff/pull/5513))
Ruff maintains a list of default exclusions, which now consists of the following patterns:
- `.bzr`
- `.direnv`
- `.eggs`
- `.git-rewrite`
- `.git`
- `.hg`
- `.ipynb_checkpoints`
- `.mypy_cache`
- `.nox`
- `.pants.d`
- `.pyenv`
- `.pytest_cache`
- `.pytype`
- `.ruff_cache`
- `.svn`
- `.tox`
- `.venv`
- `.vscode`
- `__pypackages__`
- `_build`
- `buck-out`
- `build`
- `dist`
- `node_modules`
- `site-packages`
- `venv`
Previously, the `site-packages` directory was not excluded by default. While `site-packages` tends
to be excluded anyway by virtue of the `.venv` exclusion, this may not be the case when using Ruff
from VS Code outside a virtual environment.
## 0.1.0
### The deprecated `format` setting has been removed

View File

@@ -1,730 +1,5 @@
# Changelog
## 0.3.2
### Preview features
- Improve single-`with` item formatting for Python 3.8 or older ([#10276](https://github.com/astral-sh/ruff/pull/10276))
### Rule changes
- \[`pyupgrade`\] Allow fixes for f-string rule regardless of line length (`UP032`) ([#10263](https://github.com/astral-sh/ruff/pull/10263))
- \[`pycodestyle`\] Include actual conditions in E712 diagnostics ([#10254](https://github.com/astral-sh/ruff/pull/10254))
### Bug fixes
- Fix trailing kwargs end of line comment after slash ([#10297](https://github.com/astral-sh/ruff/pull/10297))
- Fix unstable `with` items formatting ([#10274](https://github.com/astral-sh/ruff/pull/10274))
- Avoid repeating function calls in f-string conversions ([#10265](https://github.com/astral-sh/ruff/pull/10265))
- Fix E203 false positive for slices in format strings ([#10280](https://github.com/astral-sh/ruff/pull/10280))
- Fix incorrect `Parameter` range for `*args` and `**kwargs` ([#10283](https://github.com/astral-sh/ruff/pull/10283))
- Treat `typing.Annotated` subscripts as type definitions ([#10285](https://github.com/astral-sh/ruff/pull/10285))
## 0.3.1
### Preview features
- \[`pycodestyle`\] Fix E301 not triggering on decorated methods. ([#10117](https://github.com/astral-sh/ruff/pull/10117))
- \[`pycodestyle`\] Respect `isort` settings in blank line rules (`E3*`) ([#10096](https://github.com/astral-sh/ruff/pull/10096))
- \[`pycodestyle`\] Make blank lines in typing stub files optional (`E3*`) ([#10098](https://github.com/astral-sh/ruff/pull/10098))
- \[`pylint`\] Implement `singledispatch-method` (`E1519`) ([#10140](https://github.com/astral-sh/ruff/pull/10140))
- \[`pylint`\] Implement `useless-exception-statement` (`W0133`) ([#10176](https://github.com/astral-sh/ruff/pull/10176))
### Rule changes
- \[`flake8-debugger`\] Check for use of `debugpy` and `ptvsd` debug modules (#10177) ([#10194](https://github.com/astral-sh/ruff/pull/10194))
- \[`pyupgrade`\] Generate diagnostic for all valid f-string conversions regardless of line length (`UP032`) ([#10238](https://github.com/astral-sh/ruff/pull/10238))
- \[`pep8_naming`\] Add fixes for `N804` and `N805` ([#10215](https://github.com/astral-sh/ruff/pull/10215))
### CLI
- Colorize the output of `ruff format --diff` ([#10110](https://github.com/astral-sh/ruff/pull/10110))
- Make `--config` and `--isolated` global flags ([#10150](https://github.com/astral-sh/ruff/pull/10150))
- Correctly expand tildes and environment variables in paths passed to `--config` ([#10219](https://github.com/astral-sh/ruff/pull/10219))
### Configuration
- Accept a PEP 440 version specifier for `required-version` ([#10216](https://github.com/astral-sh/ruff/pull/10216))
- Implement isort's `default-section` setting ([#10149](https://github.com/astral-sh/ruff/pull/10149))
### Bug fixes
- Remove trailing space from `CapWords` message ([#10220](https://github.com/astral-sh/ruff/pull/10220))
- Respect external codes in file-level exemptions ([#10203](https://github.com/astral-sh/ruff/pull/10203))
- \[`flake8-raise`\] Avoid false-positives for parens-on-raise with `future.exception()` (`RSE102`) ([#10206](https://github.com/astral-sh/ruff/pull/10206))
- \[`pylint`\] Add fix for unary expressions in `PLC2801` ([#9587](https://github.com/astral-sh/ruff/pull/9587))
- \[`ruff`\] Fix RUF028 not allowing `# fmt: skip` on match cases ([#10178](https://github.com/astral-sh/ruff/pull/10178))
## 0.3.0
This release introduces the new Ruff formatter 2024.2 style and adds a new lint rule to
detect invalid formatter suppression comments.
### Preview features
- \[`flake8-bandit`\] Remove suspicious-lxml-import (`S410`) ([#10154](https://github.com/astral-sh/ruff/pull/10154))
- \[`pycodestyle`\] Allow `os.environ` modifications between imports (`E402`) ([#10066](https://github.com/astral-sh/ruff/pull/10066))
- \[`pycodestyle`\] Don't warn about a single whitespace character before a comma in a tuple (`E203`) ([#10094](https://github.com/astral-sh/ruff/pull/10094))
### Rule changes
- \[`eradicate`\] Detect commented out `case` statements (`ERA001`) ([#10055](https://github.com/astral-sh/ruff/pull/10055))
- \[`eradicate`\] Detect single-line code for `try:`, `except:`, etc. (`ERA001`) ([#10057](https://github.com/astral-sh/ruff/pull/10057))
- \[`flake8-boolean-trap`\] Allow boolean positionals in `__post_init__` ([#10027](https://github.com/astral-sh/ruff/pull/10027))
- \[`flake8-copyright`\] Allow © in copyright notices ([#10065](https://github.com/astral-sh/ruff/pull/10065))
- \[`isort`\]: Use one blank line after imports in typing stub files ([#9971](https://github.com/astral-sh/ruff/pull/9971))
- \[`pylint`\] New Rule `dict-iter-missing-items` (`PLE1141`) ([#9845](https://github.com/astral-sh/ruff/pull/9845))
- \[`pylint`\] Ignore `sys.version` and `sys.platform` (`PLR1714`) ([#10054](https://github.com/astral-sh/ruff/pull/10054))
- \[`pyupgrade`\] Detect literals with unary operators (`UP018`) ([#10060](https://github.com/astral-sh/ruff/pull/10060))
- \[`ruff`\] Expand rule for `list(iterable).pop(0)` idiom (`RUF015`) ([#10148](https://github.com/astral-sh/ruff/pull/10148))
### Formatter
This release introduces the Ruff 2024.2 style, stabilizing the following changes:
- Prefer splitting the assignment's value over the target or type annotation ([#8943](https://github.com/astral-sh/ruff/pull/8943))
- Remove blank lines before class docstrings ([#9154](https://github.com/astral-sh/ruff/pull/9154))
- Wrap multiple context managers in `with` parentheses when targeting Python 3.9 or newer ([#9222](https://github.com/astral-sh/ruff/pull/9222))
- Add a blank line after nested classes with a dummy body (`...`) in typing stub files ([#9155](https://github.com/astral-sh/ruff/pull/9155))
- Reduce vertical spacing for classes and functions with a dummy (`...`) body ([#7440](https://github.com/astral-sh/ruff/issues/7440), [#9240](https://github.com/astral-sh/ruff/pull/9240))
- Add a blank line after the module docstring ([#8283](https://github.com/astral-sh/ruff/pull/8283))
- Parenthesize long type hints in assignments ([#9210](https://github.com/astral-sh/ruff/pull/9210))
- Preserve indent for single multiline-string call-expressions ([#9673](https://github.com/astral-sh/ruff/pull/9637))
- Normalize hex escape and unicode escape sequences ([#9280](https://github.com/astral-sh/ruff/pull/9280))
- Format module docstrings ([#9725](https://github.com/astral-sh/ruff/pull/9725))
### CLI
- Explicitly disallow `extend` as part of a `--config` flag ([#10135](https://github.com/astral-sh/ruff/pull/10135))
- Remove `build` from the default exclusion list ([#10093](https://github.com/astral-sh/ruff/pull/10093))
- Deprecate `ruff <path>`, `ruff --explain`, `ruff --clean`, and `ruff --generate-shell-completion` in favor of `ruff check <path>`, `ruff rule`, `ruff clean`, and `ruff generate-shell-completion` ([#10169](https://github.com/astral-sh/ruff/pull/10169))
- Remove the deprecated CLI option `--format` from `ruff rule` and `ruff linter` ([#10170](https://github.com/astral-sh/ruff/pull/10170))
### Bug fixes
- \[`flake8-bugbear`\] Avoid adding default initializers to stubs (`B006`) ([#10152](https://github.com/astral-sh/ruff/pull/10152))
- \[`flake8-type-checking`\] Respect runtime-required decorators for function signatures ([#10091](https://github.com/astral-sh/ruff/pull/10091))
- \[`pycodestyle`\] Mark fixes overlapping with a multiline string as unsafe (`W293`) ([#10049](https://github.com/astral-sh/ruff/pull/10049))
- \[`pydocstyle`\] Trim whitespace when removing blank lines after section (`D413`) ([#10162](https://github.com/astral-sh/ruff/pull/10162))
- \[`pylint`\] Delete entire statement, including semicolons (`PLR0203`) ([#10074](https://github.com/astral-sh/ruff/pull/10074))
- \[`ruff`\] Avoid f-string false positives in `gettext` calls (`RUF027`) ([#10118](https://github.com/astral-sh/ruff/pull/10118))
- Fix `ruff` crashing on PowerPC systems because of too small page size ([#10080](https://github.com/astral-sh/ruff/pull/10080))
### Performance
- Add cold attribute to less likely printer queue branches in the formatter ([#10121](https://github.com/astral-sh/ruff/pull/10121))
- Skip unnecessary string normalization in the formatter ([#10116](https://github.com/astral-sh/ruff/pull/10116))
### Documentation
- Remove "Beta" Label from formatter documentation ([#10144](https://github.com/astral-sh/ruff/pull/10144))
- `line-length` option: fix link to `pycodestyle.max-line-length` ([#10136](https://github.com/astral-sh/ruff/pull/10136))
## 0.2.2
Highlights include:
- Initial support formatting f-strings (in `--preview`).
- Support for overriding arbitrary configuration options via the CLI through an expanded `--config`
argument (e.g., `--config "lint.isort.combine-as-imports=false"`).
- Significant performance improvements in Ruff's lexer, parser, and lint rules.
### Preview features
- Implement minimal f-string formatting ([#9642](https://github.com/astral-sh/ruff/pull/9642))
- \[`pycodestyle`\] Add blank line(s) rules (`E301`, `E302`, `E303`, `E304`, `E305`, `E306`) ([#9266](https://github.com/astral-sh/ruff/pull/9266))
- \[`refurb`\] Implement `readlines_in_for` (`FURB129`) ([#9880](https://github.com/astral-sh/ruff/pull/9880))
### Rule changes
- \[`ruff`\] Ensure closing parentheses for multiline sequences are always on their own line (`RUF022`, `RUF023`) ([#9793](https://github.com/astral-sh/ruff/pull/9793))
- \[`numpy`\] Add missing deprecation violations (`NPY002`) ([#9862](https://github.com/astral-sh/ruff/pull/9862))
- \[`flake8-bandit`\] Detect `mark_safe` usages in decorators ([#9887](https://github.com/astral-sh/ruff/pull/9887))
- \[`ruff`\] Expand `asyncio-dangling-task` (`RUF006`) to include `new_event_loop` ([#9976](https://github.com/astral-sh/ruff/pull/9976))
- \[`flake8-pyi`\] Ignore 'unused' private type dicts in class scopes ([#9952](https://github.com/astral-sh/ruff/pull/9952))
### Formatter
- Docstring formatting: Preserve tab indentation when using `indent-style=tabs` ([#9915](https://github.com/astral-sh/ruff/pull/9915))
- Disable top-level docstring formatting for notebooks ([#9957](https://github.com/astral-sh/ruff/pull/9957))
- Stabilize quote-style's `preserve` mode ([#9922](https://github.com/astral-sh/ruff/pull/9922))
### CLI
- Allow arbitrary configuration options to be overridden via the CLI ([#9599](https://github.com/astral-sh/ruff/pull/9599))
### Bug fixes
- Make `show-settings` filters directory-agnostic ([#9866](https://github.com/astral-sh/ruff/pull/9866))
- Respect duplicates when rewriting type aliases ([#9905](https://github.com/astral-sh/ruff/pull/9905))
- Respect tuple assignments in typing analyzer ([#9969](https://github.com/astral-sh/ruff/pull/9969))
- Use atomic write when persisting cache ([#9981](https://github.com/astral-sh/ruff/pull/9981))
- Use non-parenthesized range for `DebugText` ([#9953](https://github.com/astral-sh/ruff/pull/9953))
- \[`flake8-simplify`\] Avoid false positive with `async` for loops (`SIM113`) ([#9996](https://github.com/astral-sh/ruff/pull/9996))
- \[`flake8-trio`\] Respect `async with` in `timeout-without-await` ([#9859](https://github.com/astral-sh/ruff/pull/9859))
- \[`perflint`\] Catch a wider range of mutations in `PERF101` ([#9955](https://github.com/astral-sh/ruff/pull/9955))
- \[`pycodestyle`\] Fix `E30X` panics on blank lines with trailing white spaces ([#9907](https://github.com/astral-sh/ruff/pull/9907))
- \[`pydocstyle`\] Allow using `parameters` as a subsection header (`D405`) ([#9894](https://github.com/astral-sh/ruff/pull/9894))
- \[`pydocstyle`\] Fix blank-line docstring rules for module-level docstrings ([#9878](https://github.com/astral-sh/ruff/pull/9878))
- \[`pylint`\] Accept 0.0 and 1.0 as common magic values (`PLR2004`) ([#9964](https://github.com/astral-sh/ruff/pull/9964))
- \[`pylint`\] Avoid suggesting set rewrites for non-hashable types ([#9956](https://github.com/astral-sh/ruff/pull/9956))
- \[`ruff`\] Avoid false negatives with string literals inside of method calls (`RUF027`) ([#9865](https://github.com/astral-sh/ruff/pull/9865))
- \[`ruff`\] Fix panic on with f-string detection (`RUF027`) ([#9990](https://github.com/astral-sh/ruff/pull/9990))
- \[`ruff`\] Ignore builtins when detecting missing f-strings ([#9849](https://github.com/astral-sh/ruff/pull/9849))
### Performance
- Use `memchr` for string lexing ([#9888](https://github.com/astral-sh/ruff/pull/9888))
- Use `memchr` for tab-indentation detection ([#9853](https://github.com/astral-sh/ruff/pull/9853))
- Reduce `Result<Tok, LexicalError>` size by using `Box<str>` instead of `String` ([#9885](https://github.com/astral-sh/ruff/pull/9885))
- Reduce size of `Expr` from 80 to 64 bytes ([#9900](https://github.com/astral-sh/ruff/pull/9900))
- Improve trailing comma rule performance ([#9867](https://github.com/astral-sh/ruff/pull/9867))
- Remove unnecessary string cloning from the parser ([#9884](https://github.com/astral-sh/ruff/pull/9884))
## 0.2.1
This release includes support for range formatting (i.e., the ability to format specific lines
within a source file).
### Preview features
- \[`refurb`\] Implement `missing-f-string-syntax` (`RUF027`) ([#9728](https://github.com/astral-sh/ruff/pull/9728))
- Format module-level docstrings ([#9725](https://github.com/astral-sh/ruff/pull/9725))
### Formatter
- Add `--range` option to `ruff format` ([#9733](https://github.com/astral-sh/ruff/pull/9733))
- Don't trim last empty line in docstrings ([#9813](https://github.com/astral-sh/ruff/pull/9813))
### Bug fixes
- Skip empty lines when determining base indentation ([#9795](https://github.com/astral-sh/ruff/pull/9795))
- Drop `__get__` and `__set__` from `unnecessary-dunder-call` ([#9791](https://github.com/astral-sh/ruff/pull/9791))
- Respect generic `Protocol` in ellipsis removal ([#9841](https://github.com/astral-sh/ruff/pull/9841))
- Revert "Use publicly available Apple Silicon runners (#9726)" ([#9834](https://github.com/astral-sh/ruff/pull/9834))
### Performance
- Skip LibCST parsing for standard dedent adjustments ([#9769](https://github.com/astral-sh/ruff/pull/9769))
- Remove CST-based fixer for `C408` ([#9822](https://github.com/astral-sh/ruff/pull/9822))
- Add our own ignored-names abstractions ([#9802](https://github.com/astral-sh/ruff/pull/9802))
- Remove CST-based fixers for `C400`, `C401`, `C410`, and `C418` ([#9819](https://github.com/astral-sh/ruff/pull/9819))
- Use `AhoCorasick` to speed up quote match ([#9773](https://github.com/astral-sh/ruff/pull/9773))
- Remove CST-based fixers for `C405` and `C409` ([#9821](https://github.com/astral-sh/ruff/pull/9821))
- Add fast-path for comment detection ([#9808](https://github.com/astral-sh/ruff/pull/9808))
- Invert order of checks in `zero-sleep-call` ([#9766](https://github.com/astral-sh/ruff/pull/9766))
- Short-circuit typing matches based on imports ([#9800](https://github.com/astral-sh/ruff/pull/9800))
- Run dunder method rule on methods directly ([#9815](https://github.com/astral-sh/ruff/pull/9815))
- Track top-level module imports in the semantic model ([#9775](https://github.com/astral-sh/ruff/pull/9775))
- Slight speed-up for lowercase and uppercase identifier checks ([#9798](https://github.com/astral-sh/ruff/pull/9798))
- Remove LibCST-based fixer for `C403` ([#9818](https://github.com/astral-sh/ruff/pull/9818))
### Documentation
- Update `max-pos-args` example to `max-positional-args` ([#9797](https://github.com/astral-sh/ruff/pull/9797))
- Fixed example code in `weak_cryptographic_key.rs` ([#9774](https://github.com/astral-sh/ruff/pull/9774))
- Fix references to deprecated `ANN` rules in changelog ([#9771](https://github.com/astral-sh/ruff/pull/9771))
- Fix default for `max-positional-args` ([#9838](https://github.com/astral-sh/ruff/pull/9838))
## 0.2.0
### Breaking changes
- The `NURSERY` selector cannot be used anymore
- Legacy selection of nursery rules by exact codes is no longer allowed without preview enabled
See also, the "Remapped rules" section which may result in disabled rules.
### Deprecations
The following rules are now deprecated:
- [`missing-type-self`](https://docs.astral.sh/ruff/rules/missing-type-self/) (`ANN101`)
- [`missing-type-cls`](https://docs.astral.sh/ruff/rules/missing-type-cls/) (`ANN102`)
The following command line options are now deprecated:
- `--show-source`; use `--output-format full` instead
- `--no-show-source`; use `--output-format concise` instead
- `--output-format text`; use `full` or `concise` instead
The following settings have moved and the previous name is deprecated:
- `ruff.allowed-confusables` → [`ruff.lint.allowed-confusables`](https://docs.astral.sh//ruff/settings/#lint_allowed-confusables)
- `ruff.dummy-variable-rgx` → [`ruff.lint.dummy-variable-rgx`](https://docs.astral.sh//ruff/settings/#lint_dummy-variable-rgx)
- `ruff.explicit-preview-rules` → [`ruff.lint.explicit-preview-rules`](https://docs.astral.sh//ruff/settings/#lint_explicit-preview-rules)
- `ruff.extend-fixable` → [`ruff.lint.extend-fixable`](https://docs.astral.sh//ruff/settings/#lint_extend-fixable)
- `ruff.extend-ignore` → [`ruff.lint.extend-ignore`](https://docs.astral.sh//ruff/settings/#lint_extend-ignore)
- `ruff.extend-per-file-ignores` → [`ruff.lint.extend-per-file-ignores`](https://docs.astral.sh//ruff/settings/#lint_extend-per-file-ignores)
- `ruff.extend-safe-fixes` → [`ruff.lint.extend-safe-fixes`](https://docs.astral.sh//ruff/settings/#lint_extend-safe-fixes)
- `ruff.extend-select` → [`ruff.lint.extend-select`](https://docs.astral.sh//ruff/settings/#lint_extend-select)
- `ruff.extend-unfixable` → [`ruff.lint.extend-unfixable`](https://docs.astral.sh//ruff/settings/#lint_extend-unfixable)
- `ruff.extend-unsafe-fixes` → [`ruff.lint.extend-unsafe-fixes`](https://docs.astral.sh//ruff/settings/#lint_extend-unsafe-fixes)
- `ruff.external` → [`ruff.lint.external`](https://docs.astral.sh//ruff/settings/#lint_external)
- `ruff.fixable` → [`ruff.lint.fixable`](https://docs.astral.sh//ruff/settings/#lint_fixable)
- `ruff.flake8-annotations` → [`ruff.lint.flake8-annotations`](https://docs.astral.sh//ruff/settings/#lint_flake8-annotations)
- `ruff.flake8-bandit` → [`ruff.lint.flake8-bandit`](https://docs.astral.sh//ruff/settings/#lint_flake8-bandit)
- `ruff.flake8-bugbear` → [`ruff.lint.flake8-bugbear`](https://docs.astral.sh//ruff/settings/#lint_flake8-bugbear)
- `ruff.flake8-builtins` → [`ruff.lint.flake8-builtins`](https://docs.astral.sh//ruff/settings/#lint_flake8-builtins)
- `ruff.flake8-comprehensions` → [`ruff.lint.flake8-comprehensions`](https://docs.astral.sh//ruff/settings/#lint_flake8-comprehensions)
- `ruff.flake8-copyright` → [`ruff.lint.flake8-copyright`](https://docs.astral.sh//ruff/settings/#lint_flake8-copyright)
- `ruff.flake8-errmsg` → [`ruff.lint.flake8-errmsg`](https://docs.astral.sh//ruff/settings/#lint_flake8-errmsg)
- `ruff.flake8-gettext` → [`ruff.lint.flake8-gettext`](https://docs.astral.sh//ruff/settings/#lint_flake8-gettext)
- `ruff.flake8-implicit-str-concat` → [`ruff.lint.flake8-implicit-str-concat`](https://docs.astral.sh//ruff/settings/#lint_flake8-implicit-str-concat)
- `ruff.flake8-import-conventions` → [`ruff.lint.flake8-import-conventions`](https://docs.astral.sh//ruff/settings/#lint_flake8-import-conventions)
- `ruff.flake8-pytest-style` → [`ruff.lint.flake8-pytest-style`](https://docs.astral.sh//ruff/settings/#lint_flake8-pytest-style)
- `ruff.flake8-quotes` → [`ruff.lint.flake8-quotes`](https://docs.astral.sh//ruff/settings/#lint_flake8-quotes)
- `ruff.flake8-self` → [`ruff.lint.flake8-self`](https://docs.astral.sh//ruff/settings/#lint_flake8-self)
- `ruff.flake8-tidy-imports` → [`ruff.lint.flake8-tidy-imports`](https://docs.astral.sh//ruff/settings/#lint_flake8-tidy-imports)
- `ruff.flake8-type-checking` → [`ruff.lint.flake8-type-checking`](https://docs.astral.sh//ruff/settings/#lint_flake8-type-checking)
- `ruff.flake8-unused-arguments` → [`ruff.lint.flake8-unused-arguments`](https://docs.astral.sh//ruff/settings/#lint_flake8-unused-arguments)
- `ruff.ignore` → [`ruff.lint.ignore`](https://docs.astral.sh//ruff/settings/#lint_ignore)
- `ruff.ignore-init-module-imports` → [`ruff.lint.ignore-init-module-imports`](https://docs.astral.sh//ruff/settings/#lint_ignore-init-module-imports)
- `ruff.isort` → [`ruff.lint.isort`](https://docs.astral.sh//ruff/settings/#lint_isort)
- `ruff.logger-objects` → [`ruff.lint.logger-objects`](https://docs.astral.sh//ruff/settings/#lint_logger-objects)
- `ruff.mccabe` → [`ruff.lint.mccabe`](https://docs.astral.sh//ruff/settings/#lint_mccabe)
- `ruff.pep8-naming` → [`ruff.lint.pep8-naming`](https://docs.astral.sh//ruff/settings/#lint_pep8-naming)
- `ruff.per-file-ignores` → [`ruff.lint.per-file-ignores`](https://docs.astral.sh//ruff/settings/#lint_per-file-ignores)
- `ruff.pycodestyle` → [`ruff.lint.pycodestyle`](https://docs.astral.sh//ruff/settings/#lint_pycodestyle)
- `ruff.pydocstyle` → [`ruff.lint.pydocstyle`](https://docs.astral.sh//ruff/settings/#lint_pydocstyle)
- `ruff.pyflakes` → [`ruff.lint.pyflakes`](https://docs.astral.sh//ruff/settings/#lint_pyflakes)
- `ruff.pylint` → [`ruff.lint.pylint`](https://docs.astral.sh//ruff/settings/#lint_pylint)
- `ruff.pyupgrade` → [`ruff.lint.pyupgrade`](https://docs.astral.sh//ruff/settings/#lint_pyupgrade)
- `ruff.select` → [`ruff.lint.select`](https://docs.astral.sh//ruff/settings/#lint_select)
- `ruff.task-tags` → [`ruff.lint.task-tags`](https://docs.astral.sh//ruff/settings/#lint_task-tags)
- `ruff.typing-modules` → [`ruff.lint.typing-modules`](https://docs.astral.sh//ruff/settings/#lint_typing-modules)
- `ruff.unfixable` → [`ruff.lint.unfixable`](https://docs.astral.sh//ruff/settings/#lint_unfixable)
### Remapped rules
The following rules have been remapped to new codes:
- [`raise-without-from-inside-except`](https://docs.astral.sh/ruff/rules/raise-without-from-inside-except/): `TRY200` to `B904`
- [`suspicious-eval-usage`](https://docs.astral.sh/ruff/rules/suspicious-eval-usage/): `PGH001` to `S307`
- [`logging-warn`](https://docs.astral.sh/ruff/rules/logging-warn/): `PGH002` to `G010`
- [`static-key-dict-comprehension`](https://docs.astral.sh/ruff/rules/static-key-dict-comprehension): `RUF011` to `B035`
- [`runtime-string-union`](https://docs.astral.sh/ruff/rules/runtime-string-union): `TCH006` to `TCH010`
### Stabilizations
The following rules have been stabilized and are no longer in preview:
- [`trio-timeout-without-await`](https://docs.astral.sh/ruff/rules/trio-timeout-without-await) (`TRIO100`)
- [`trio-sync-call`](https://docs.astral.sh/ruff/rules/trio-sync-call) (`TRIO105`)
- [`trio-async-function-with-timeout`](https://docs.astral.sh/ruff/rules/trio-async-function-with-timeout) (`TRIO109`)
- [`trio-unneeded-sleep`](https://docs.astral.sh/ruff/rules/trio-unneeded-sleep) (`TRIO110`)
- [`trio-zero-sleep-call`](https://docs.astral.sh/ruff/rules/trio-zero-sleep-call) (`TRIO115`)
- [`unnecessary-escaped-quote`](https://docs.astral.sh/ruff/rules/unnecessary-escaped-quote) (`Q004`)
- [`enumerate-for-loop`](https://docs.astral.sh/ruff/rules/enumerate-for-loop) (`SIM113`)
- [`zip-dict-keys-and-values`](https://docs.astral.sh/ruff/rules/zip-dict-keys-and-values) (`SIM911`)
- [`timeout-error-alias`](https://docs.astral.sh/ruff/rules/timeout-error-alias) (`UP041`)
- [`flask-debug-true`](https://docs.astral.sh/ruff/rules/flask-debug-true) (`S201`)
- [`tarfile-unsafe-members`](https://docs.astral.sh/ruff/rules/tarfile-unsafe-members) (`S202`)
- [`ssl-insecure-version`](https://docs.astral.sh/ruff/rules/ssl-insecure-version) (`S502`)
- [`ssl-with-bad-defaults`](https://docs.astral.sh/ruff/rules/ssl-with-bad-defaults) (`S503`)
- [`ssl-with-no-version`](https://docs.astral.sh/ruff/rules/ssl-with-no-version) (`S504`)
- [`weak-cryptographic-key`](https://docs.astral.sh/ruff/rules/weak-cryptographic-key) (`S505`)
- [`ssh-no-host-key-verification`](https://docs.astral.sh/ruff/rules/ssh-no-host-key-verification) (`S507`)
- [`django-raw-sql`](https://docs.astral.sh/ruff/rules/django-raw-sql) (`S611`)
- [`mako-templates`](https://docs.astral.sh/ruff/rules/mako-templates) (`S702`)
- [`generator-return-from-iter-method`](https://docs.astral.sh/ruff/rules/generator-return-from-iter-method) (`PYI058`)
- [`runtime-string-union`](https://docs.astral.sh/ruff/rules/runtime-string-union) (`TCH006`)
- [`numpy2-deprecation`](https://docs.astral.sh/ruff/rules/numpy2-deprecation) (`NPY201`)
- [`quadratic-list-summation`](https://docs.astral.sh/ruff/rules/quadratic-list-summation) (`RUF017`)
- [`assignment-in-assert`](https://docs.astral.sh/ruff/rules/assignment-in-assert) (`RUF018`)
- [`unnecessary-key-check`](https://docs.astral.sh/ruff/rules/unnecessary-key-check) (`RUF019`)
- [`never-union`](https://docs.astral.sh/ruff/rules/never-union) (`RUF020`)
- [`direct-logger-instantiation`](https://docs.astral.sh/ruff/rules/direct-logger-instantiation) (`LOG001`)
- [`invalid-get-logger-argument`](https://docs.astral.sh/ruff/rules/invalid-get-logger-argument) (`LOG002`)
- [`exception-without-exc-info`](https://docs.astral.sh/ruff/rules/exception-without-exc-info) (`LOG007`)
- [`undocumented-warn`](https://docs.astral.sh/ruff/rules/undocumented-warn) (`LOG009`)
Fixes for the following rules have been stabilized and are now available without preview:
- [`triple-single-quotes`](https://docs.astral.sh/ruff/rules/triple-single-quotes) (`D300`)
- [`non-pep604-annotation`](https://docs.astral.sh/ruff/rules/non-pep604-annotation) (`UP007`)
- [`dict-get-with-none-default`](https://docs.astral.sh/ruff/rules/dict-get-with-none-default) (`SIM910`)
- [`in-dict-keys`](https://docs.astral.sh/ruff/rules/in-dict-keys) (`SIM118`)
- [`collapsible-else-if`](https://docs.astral.sh/ruff/rules/collapsible-else-if) (`PLR5501`)
- [`if-with-same-arms`](https://docs.astral.sh/ruff/rules/if-with-same-arms) (`SIM114`)
- [`useless-else-on-loop`](https://docs.astral.sh/ruff/rules/useless-else-on-loop) (`PLW0120`)
- [`unnecessary-literal-union`](https://docs.astral.sh/ruff/rules/unnecessary-literal-union) (`PYI030`)
- [`unnecessary-spread`](https://docs.astral.sh/ruff/rules/unnecessary-spread) (`PIE800`)
- [`error-instead-of-exception`](https://docs.astral.sh/ruff/rules/error-instead-of-exception) (`TRY400`)
- [`redefined-while-unused`](https://docs.astral.sh/ruff/rules/redefined-while-unused) (`F811`)
- [`duplicate-value`](https://docs.astral.sh/ruff/rules/duplicate-value) (`B033`)
- [`multiple-imports-on-one-line`](https://docs.astral.sh/ruff/rules/multiple-imports-on-one-line) (`E401`)
- [`non-pep585-annotation`](https://docs.astral.sh/ruff/rules/non-pep585-annotation) (`UP006`)
Fixes for the following rules have been promoted from unsafe to safe:
- [`unaliased-collections-abc-set-import`](https://docs.astral.sh/ruff/rules/unaliased-collections-abc-set-import) (`PYI025`)
The following behaviors have been stabilized:
- [`module-import-not-at-top-of-file`](https://docs.astral.sh/ruff/rules/module-import-not-at-top-of-file/) (`E402`) allows `sys.path` modifications between imports
- [`reimplemented-container-builtin`](https://docs.astral.sh/ruff/rules/reimplemented-container-builtin/) (`PIE807`) includes lambdas that can be replaced with `dict`
- [`unnecessary-placeholder`](https://docs.astral.sh/ruff/rules/unnecessary-placeholder/) (`PIE790`) applies to unnecessary ellipses (`...`)
- [`if-else-block-instead-of-dict-get`](https://docs.astral.sh/ruff/rules/if-else-block-instead-of-dict-get/) (`SIM401`) applies to `if-else` expressions
### Preview features
- \[`refurb`\] Implement `metaclass_abcmeta` (`FURB180`) ([#9658](https://github.com/astral-sh/ruff/pull/9658))
- Implement `blank_line_after_nested_stub_class` preview style ([#9155](https://github.com/astral-sh/ruff/pull/9155))
- The preview rule [`and-or-ternary`](https://docs.astral.sh/ruff/rules/and-or-ternary) (`PLR1706`) was removed
### Bug fixes
- \[`flake8-async`\] Take `pathlib.Path` into account when analyzing async functions ([#9703](https://github.com/astral-sh/ruff/pull/9703))
- \[`flake8-return`\] - fix indentation syntax error (`RET505`) ([#9705](https://github.com/astral-sh/ruff/pull/9705))
- Detect multi-statement lines in else removal ([#9748](https://github.com/astral-sh/ruff/pull/9748))
- `RUF022`, `RUF023`: never add two trailing commas to the end of a sequence ([#9698](https://github.com/astral-sh/ruff/pull/9698))
- `RUF023`: Don't sort `__match_args__`, only `__slots__` ([#9724](https://github.com/astral-sh/ruff/pull/9724))
- \[`flake8-simplify`\] - Fix syntax error in autofix (`SIM114`) ([#9704](https://github.com/astral-sh/ruff/pull/9704))
- \[`pylint`\] Show verbatim constant in `magic-value-comparison` (`PLR2004`) ([#9694](https://github.com/astral-sh/ruff/pull/9694))
- Removing trailing whitespace inside multiline strings is unsafe ([#9744](https://github.com/astral-sh/ruff/pull/9744))
- Support `IfExp` with dual string arms in `invalid-envvar-default` ([#9734](https://github.com/astral-sh/ruff/pull/9734))
- \[`pylint`\] Add `__mro_entries__` to known dunder methods (`PLW3201`) ([#9706](https://github.com/astral-sh/ruff/pull/9706))
### Documentation
- Removed rules are now retained in the documentation ([#9691](https://github.com/astral-sh/ruff/pull/9691))
- Deprecated rules are now indicated in the documentation ([#9689](https://github.com/astral-sh/ruff/pull/9689))
## 0.1.15
### Preview features
- Error when `NURSERY` selector is used with `--preview` ([#9682](https://github.com/astral-sh/ruff/pull/9682))
- Preserve indentation around multiline strings in formatter ([#9637](https://github.com/astral-sh/ruff/pull/9637))
- \[`flake8-return`\] Add fixes for all rules (`RET505`, `RET506`, `RET507`, `RET508`) ([#9595](https://github.com/astral-sh/ruff/pull/9595))
- \[`flake8-simplify`\] Add fix for `if-with-same-arms` (`SIM114`) ([#9591](https://github.com/astral-sh/ruff/pull/9591))
- \[`pycodestyle`\] Add fix for `multiple-imports-on-one-line` (`E401`) ([#9518](https://github.com/astral-sh/ruff/pull/9518))
- \[`pylint`\] Add fix for `collapsible-else-if` (`PLR5501`) ([#9594](https://github.com/astral-sh/ruff/pull/9594))
- \[`pylint`\] Add fix for `useless-else-on-loop` (`PLW0120`) ([#9590](https://github.com/astral-sh/ruff/pull/9590))
- \[`pylint`\] Implement `assigning-non-slot` (`E0237`) ([#9623](https://github.com/astral-sh/ruff/pull/9623))
- \[`pylint`\] Implement `potential-index-error` (`PLE0643`) ([#9545](https://github.com/astral-sh/ruff/pull/9545))
- \[`pylint`\] Implement `too-many-nested-blocks` (`PLR1702`) ([#9172](https://github.com/astral-sh/ruff/pull/9172))
- \[`ruff`\] Add rule to sort `__slots__` and `__match_args__` ([#9564](https://github.com/astral-sh/ruff/pull/9564))
- \[`ruff`\] Detect unnecessary `dict` comprehensions for iterables (`RUF025`) ([#9613](https://github.com/astral-sh/ruff/pull/9613))
- \[`ruff`\] Guard against use of `default_factory` as a keyword argument (`RUF026`) ([#9651](https://github.com/astral-sh/ruff/pull/9651))
- \[`ruff`\] Implement `mutable-fromkeys-value` (`RUF024`) ([#9597](https://github.com/astral-sh/ruff/pull/9597))
### CLI
- Enable auto-wrapping of `--help` output ([#9633](https://github.com/astral-sh/ruff/pull/9633))
### Bug fixes
- Avoid rendering display-only rules as fixable ([#9649](https://github.com/astral-sh/ruff/pull/9649))
- Detect automagic-like assignments in notebooks ([#9653](https://github.com/astral-sh/ruff/pull/9653))
- Generate custom JSON schema for dynamic setting ([#9632](https://github.com/astral-sh/ruff/pull/9632))
- \[`flake8-no-pep420`\] Include global `--config` when determining namespace packages ([#9603](https://github.com/astral-sh/ruff/pull/9603))
- \[`flake8-pie`\] Omit bound tuples passed to `.startswith` or `.endswith` ([#9661](https://github.com/astral-sh/ruff/pull/9661))
- \[`flake8-return`\] Avoid panic when fixing inlined else blocks ([#9657](https://github.com/astral-sh/ruff/pull/9657))
- \[`flake8-return`\] Consider exception suppression in unnecessary assignment ([#9673](https://github.com/astral-sh/ruff/pull/9673))
- \[`flake8-return`\] Take `NoReturn` annotation into account when analyzing implicit returns ([#9636](https://github.com/astral-sh/ruff/pull/9636))
- \[`flake8-simplify`\] Support inverted returns in `needless-bool` (`SIM103`) ([#9619](https://github.com/astral-sh/ruff/pull/9619))
- \[`flake8-type-checking`\] Add Pydantic's `BaseConfig` to default-copy list ([#9650](https://github.com/astral-sh/ruff/pull/9650))
- \[`flake8-type-checking`\] Avoid marking `InitVar` as a typing-only annotation ([#9688](https://github.com/astral-sh/ruff/pull/9688))
- \[`pycodestyle`\] Allow `dtype` comparisons in `type-comparison` ([#9676](https://github.com/astral-sh/ruff/pull/9676))
- \[`pydocstyle`\] Re-implement `last-line-after-section` (`D413`) ([#9654](https://github.com/astral-sh/ruff/pull/9654))
### Documentation
- \[`flake8-pytest-style`\] Add fix safety documentation for `duplicate-parameterize-test-cases` ([#9678](https://github.com/astral-sh/ruff/pull/9678))
- \[`pylint`\] Document `literal-membership` fix safety conditions ([#9677](https://github.com/astral-sh/ruff/pull/9677))
- \[`isort`\] Fix reference to `isort` rule code ([#9598](https://github.com/astral-sh/ruff/pull/9598))
## 0.1.14
### Preview features
- \[`flake8-bugbear`\] Add fix for `duplicate-value` (`B033`) ([#9510](https://github.com/astral-sh/ruff/pull/9510))
- \[`flake8-simplify`\] Implement `enumerate-for-loop` (`SIM113`) ([#7777](https://github.com/astral-sh/ruff/pull/7777))
- \[`pygrep_hooks`\] Add fix for `deprecated-log-warn` (`PGH002`) ([#9519](https://github.com/astral-sh/ruff/pull/9519))
- \[`pylint`\] Implement `import-private-name` (`C2701`) ([#5920](https://github.com/astral-sh/ruff/pull/5920))
- \[`refurb`\] Implement `regex-flag-alias` with fix (`FURB167`) ([#9516](https://github.com/astral-sh/ruff/pull/9516))
- \[`ruff`\] Add rule and fix to sort contents of `__all__` (`RUF022`) ([#9474](https://github.com/astral-sh/ruff/pull/9474))
- \[`tryceratops`\] Add fix for `error-instead-of-exception` (`TRY400`) ([#9520](https://github.com/astral-sh/ruff/pull/9520))
### Rule changes
- \[`flake8-pyi`\] Fix `PYI047` false negatives on PEP-695 type aliases ([#9566](https://github.com/astral-sh/ruff/pull/9566))
- \[`flake8-pyi`\] Fix `PYI049` false negatives on call-based `TypedDict`s ([#9567](https://github.com/astral-sh/ruff/pull/9567))
- \[`pylint`\] Exclude `self` and `cls` when counting method arguments (`PLR0917`) ([#9563](https://github.com/astral-sh/ruff/pull/9563))
### CLI
- `--show-settings` displays active settings in a far more readable format ([#9464](https://github.com/astral-sh/ruff/pull/9464))
- Add `--extension` support to the formatter ([#9483](https://github.com/astral-sh/ruff/pull/9483))
### Configuration
- Ignore preview status for fixable and unfixable selectors ([#9538](https://github.com/astral-sh/ruff/pull/9538))
- \[`pycodestyle`\] Use the configured tab size when expanding indents ([#9506](https://github.com/astral-sh/ruff/pull/9506))
### Bug fixes
- Recursively visit deferred AST nodes ([#9541](https://github.com/astral-sh/ruff/pull/9541))
- Visit deferred lambdas before type definitions ([#9540](https://github.com/astral-sh/ruff/pull/9540))
- \[`flake8-simplify`\] Avoid some more `enumerate-for-loop` false positives (`SIM113`) ([#9515](https://github.com/astral-sh/ruff/pull/9515))
- \[`pandas-vet`\] Limit inplace diagnostics to methods that accept inplace ([#9495](https://github.com/astral-sh/ruff/pull/9495))
- \[`pylint`\] Add the `__prepare__` method to the list of recognized dunder method ([#9529](https://github.com/astral-sh/ruff/pull/9529))
- \[`pylint`\] Ignore unnecessary dunder calls within dunder definitions ([#9496](https://github.com/astral-sh/ruff/pull/9496))
- \[`refurb`\] Avoid bailing when `reimplemented-operator` is called on function (`FURB118`) ([#9556](https://github.com/astral-sh/ruff/pull/9556))
- \[`ruff`\] Avoid treating named expressions as static keys (`RUF011`) ([#9494](https://github.com/astral-sh/ruff/pull/9494))
### Documentation
- Add instructions on using `noqa` with isort rules ([#9555](https://github.com/astral-sh/ruff/pull/9555))
- Documentation update for URL giving 'page not found' ([#9565](https://github.com/astral-sh/ruff/pull/9565))
- Fix admonition in dark mode ([#9502](https://github.com/astral-sh/ruff/pull/9502))
- Update contributing docs to use `cargo bench -p ruff_benchmark` ([#9535](https://github.com/astral-sh/ruff/pull/9535))
- Update emacs integration section to include `emacs-ruff-format` ([#9403](https://github.com/astral-sh/ruff/pull/9403))
- \[`flake8-blind-except`\] Document exceptions to `blind-except` rule ([#9580](https://github.com/astral-sh/ruff/pull/9580))
## 0.1.13
### Bug fixes
- Include base pyproject when initializing cache settings ([#9480](https://github.com/astral-sh/ruff/pull/9480))
- \[`flake8-simplify`\] Account for possibly-empty f-string values in truthiness logic ([#9484](https://github.com/astral-sh/ruff/pull/9484))
- \[`pylint`\] Add the missing period in `unnecessary-dunder-call` ([#9485](https://github.com/astral-sh/ruff/pull/9485))
- \[`pylint`\] Fix `__aenter__` message in `unnecessary-dunder-call` ([#9492](https://github.com/astral-sh/ruff/pull/9492))
## 0.1.12
### Preview features
- Formatter: Hug multiline-strings in preview style ([#9243](https://github.com/astral-sh/ruff/pull/9243))
- \[`flake8-bandit`\] Add `ssl-with-no-version` (`S504`) ([#9384](https://github.com/astral-sh/ruff/pull/9384))
- \[`flake8-bandit`\] Implement `ssl-insecure-version` (`S502`) ([#9390](https://github.com/astral-sh/ruff/pull/9390))
- \[`flake8-bandit`\] Implement `ssl-with-bad-defaults` (`S503`) ([#9391](https://github.com/astral-sh/ruff/pull/9391))
- \[`flake8-bandit`\] Implement suspicious import rules (`S4XX`) ([#8831](https://github.com/astral-sh/ruff/pull/8831))
- \[`flake8-simplify`\] Implement `zip-dict-keys-and-values` (`SIM911`) ([#9460](https://github.com/astral-sh/ruff/pull/9460))
- \[`pyflakes`\] Add a fix for `redefined-while-unused` (`F811`) ([#9419](https://github.com/astral-sh/ruff/pull/9419))
- \[`pylint`\] Implement `unnecessary-dunder-call` (`C2801`) ([#9166](https://github.com/astral-sh/ruff/pull/9166))
- \[`ruff`\] Add `parenthesize-chained-operators` (`RUF021`) to enforce parentheses in `a or b and c` ([#9440](https://github.com/astral-sh/ruff/pull/9440))
### Rule changes
- \[`flake8-boolean-trap`\] Allow Boolean positional arguments in setters ([#9429](https://github.com/astral-sh/ruff/pull/9429))
- \[`flake8-builtins`\] Restrict `builtin-attribute-shadowing` (`A003`) to actual shadowed references ([#9462](https://github.com/astral-sh/ruff/pull/9462))
- \[`flake8-pyi`\] Add fix for `generator-return-from-iter-method` (`PYI058`) ([#9355](https://github.com/astral-sh/ruff/pull/9355))
- \[`pyflakes`\] Don't flag `redefined-while-unused` (`F811`) in `if` branches ([#9418](https://github.com/astral-sh/ruff/pull/9418))
- \[`pyupgrade`\] Add some additional Python 3.12 typing members to `deprecated-import` ([#9445](https://github.com/astral-sh/ruff/pull/9445))
- \[`ruff`\] Add fix for `parenthesize-chained-operators` (`RUF021`) ([#9449](https://github.com/astral-sh/ruff/pull/9449))
- \[`ruff`\] Include subscripts and attributes in static key rule (`RUF011`) ([#9416](https://github.com/astral-sh/ruff/pull/9416))
- \[`ruff`\] Support variable keys in static dictionary key rule (`RUF011`) ([#9411](https://github.com/astral-sh/ruff/pull/9411))
### Formatter
- Generate deterministic IDs when formatting notebooks ([#9359](https://github.com/astral-sh/ruff/pull/9359))
- Allow `# fmt: skip` with interspersed same-line comments ([#9395](https://github.com/astral-sh/ruff/pull/9395))
- Parenthesize breaking named expressions in match guards ([#9396](https://github.com/astral-sh/ruff/pull/9396))
### Bug fixes
- Add cell indexes to all diagnostics ([#9387](https://github.com/astral-sh/ruff/pull/9387))
- Avoid infinite loop in constant vs. `None` comparisons ([#9376](https://github.com/astral-sh/ruff/pull/9376))
- Handle raises with implicit alternate branches ([#9377](https://github.com/astral-sh/ruff/pull/9377))
- Ignore trailing quotes for unclosed l-brace errors ([#9388](https://github.com/astral-sh/ruff/pull/9388))
- Respect multi-segment submodule imports when resolving qualified names ([#9382](https://github.com/astral-sh/ruff/pull/9382))
- Use `DisplayParseError` for stdin parser errors ([#9409](https://github.com/astral-sh/ruff/pull/9409))
- Use `comment_ranges` for isort directive extraction ([#9414](https://github.com/astral-sh/ruff/pull/9414))
- Use transformed source code for diagnostic locations ([#9408](https://github.com/astral-sh/ruff/pull/9408))
- \[`flake8-pyi`\] Exclude `warnings.deprecated` and `typing_extensions.deprecated` arguments ([#9423](https://github.com/astral-sh/ruff/pull/9423))
- \[`flake8-pyi`\] Fix false negative for `unused-private-protocol` (`PYI046`) with unused generic protocols ([#9405](https://github.com/astral-sh/ruff/pull/9405))
- \[`pydocstyle`\] Disambiguate argument descriptors from section headers ([#9427](https://github.com/astral-sh/ruff/pull/9427))
- \[`pylint`\] Homogenize `PLR0914` message to match other `PLR09XX` rules ([#9399](https://github.com/astral-sh/ruff/pull/9399))
- \[`ruff`\] Allow `Hashable = None` in type annotations (`RUF013`) ([#9442](https://github.com/astral-sh/ruff/pull/9442))
### Documentation
- Fix admonition hyperlink colouring ([#9385](https://github.com/astral-sh/ruff/pull/9385))
- Add missing preview link ([#9386](https://github.com/astral-sh/ruff/pull/9386))
## 0.1.11
### Preview features
- \[`pylint`\] Implement `super-without-brackets` (`W0245`) ([#9257](https://github.com/astral-sh/ruff/pull/9257))
### Bug fixes
- Check path string properly in `python -m ruff` invocations ([#9367](https://github.com/astral-sh/ruff/pull/9367))
### Documentation
- Tweak `relative-imports` message ([#9365](https://github.com/astral-sh/ruff/pull/9365))
- Add fix safety note for `yield-in-for-loop` ([#9364](https://github.com/astral-sh/ruff/pull/9364))
## 0.1.10
### Preview features
- Improve `dummy_implementations` preview style formatting ([#9240](https://github.com/astral-sh/ruff/pull/9240))
- Normalise Hex and unicode escape sequences in strings ([#9280](https://github.com/astral-sh/ruff/pull/9280))
- Parenthesize long type annotations in annotated assignments ([#9210](https://github.com/astral-sh/ruff/pull/9210))
- Parenthesize multi-context managers in `with` statements ([#9222](https://github.com/astral-sh/ruff/pull/9222))
- \[`flake8-pyi`\] Implement `generator-return-from-iter-method` (`PYI058`) ([#9313](https://github.com/astral-sh/ruff/pull/9313))
- \[`pylint`\] Implement `empty-comment` (`PLR2044`) ([#9174](https://github.com/astral-sh/ruff/pull/9174))
- \[`refurb`\] Implement `bit-count` (`FURB161`) ([#9265](https://github.com/astral-sh/ruff/pull/9265))
- \[`ruff`\] Add `never-union` rule to detect redundant `typing.NoReturn` and `typing.Never` ([#9217](https://github.com/astral-sh/ruff/pull/9217))
### CLI
- Add paths to TOML parse errors ([#9358](https://github.com/astral-sh/ruff/pull/9358))
- Add row and column numbers to formatter parse errors ([#9321](https://github.com/astral-sh/ruff/pull/9321))
- Improve responsiveness when invoked via Python ([#9315](https://github.com/astral-sh/ruff/pull/9315))
- Short rule messages should not end with a period ([#9345](https://github.com/astral-sh/ruff/pull/9345))
### Configuration
- Respect runtime-required decorators on functions ([#9317](https://github.com/astral-sh/ruff/pull/9317))
### Bug fixes
- Avoid `asyncio-dangling-task` for nonlocal and global bindings ([#9263](https://github.com/astral-sh/ruff/pull/9263))
- Escape trailing placeholders in rule documentation ([#9301](https://github.com/astral-sh/ruff/pull/9301))
- Fix continuation detection following multi-line strings ([#9332](https://github.com/astral-sh/ruff/pull/9332))
- Fix scoping for generators in named expressions in classes ([#9248](https://github.com/astral-sh/ruff/pull/9248))
- Port from obsolete wsl crate to is-wsl ([#9356](https://github.com/astral-sh/ruff/pull/9356))
- Remove special pre-visit for module docstrings ([#9261](https://github.com/astral-sh/ruff/pull/9261))
- Respect `__str__` definitions from super classes ([#9338](https://github.com/astral-sh/ruff/pull/9338))
- Respect `unused-noqa` via `per-file-ignores` ([#9300](https://github.com/astral-sh/ruff/pull/9300))
- Respect attribute chains when resolving builtin call paths ([#9309](https://github.com/astral-sh/ruff/pull/9309))
- Treat all `typing_extensions` members as typing aliases ([#9335](https://github.com/astral-sh/ruff/pull/9335))
- Use `Display` for formatter parse errors ([#9316](https://github.com/astral-sh/ruff/pull/9316))
- Wrap subscripted dicts in parens for f-string conversion ([#9238](https://github.com/astral-sh/ruff/pull/9238))
- \[`flake8-annotations`\] Avoid adding return types to stub methods ([#9277](https://github.com/astral-sh/ruff/pull/9277))
- \[`flake8-annotations`\] Respect mixed `return` and `raise` cases in return-type analysis ([#9310](https://github.com/astral-sh/ruff/pull/9310))
- \[`flake8-bandit`\] Don't report violations when `SafeLoader` is imported from `yaml.loader` (`S506`) ([#9299](https://github.com/astral-sh/ruff/pull/9299))
- \[`pylint`\] Avoid panic when comment is preceded by Unicode ([#9331](https://github.com/astral-sh/ruff/pull/9331))
- \[`pylint`\] Change `PLR0917` error message to match other `PLR09XX` messages ([#9308](https://github.com/astral-sh/ruff/pull/9308))
- \[`refurb`\] Avoid false positives for `math-constant` (`FURB152`) ([#9290](https://github.com/astral-sh/ruff/pull/9290))
### Documentation
- Expand target name for better rule documentation ([#9302](https://github.com/astral-sh/ruff/pull/9302))
- Fix typos found by codespell ([#9346](https://github.com/astral-sh/ruff/pull/9346))
- \[`perflint`\] Document `PERF102` fix un-safety ([#9351](https://github.com/astral-sh/ruff/pull/9351))
- \[`pyupgrade`\] Document `UP007` fix un-safety ([#9306](https://github.com/astral-sh/ruff/pull/9306))
## 0.1.9
### Breaking changes
- Add site-packages to default exclusions ([#9188](https://github.com/astral-sh/ruff/pull/9188))
### Preview features
- Fix: Avoid parenthesizing subscript targets and values ([#9209](https://github.com/astral-sh/ruff/pull/9209))
- \[`pylint`\] Implement `too-many-locals` (`PLR0914`) ([#9163](https://github.com/astral-sh/ruff/pull/9163))
- Implement `reimplemented_operator` (FURB118) ([#9171](https://github.com/astral-sh/ruff/pull/9171))
- Add a rule to detect string members in runtime-evaluated unions ([#9143](https://github.com/astral-sh/ruff/pull/9143))
- Implement `no_blank_line_before_class_docstring` preview style ([#9154](https://github.com/astral-sh/ruff/pull/9154))
### Rule changes
- `CONSTANT_CASE` variables are improperly flagged for yoda violation (`SIM300`) ([#9164](https://github.com/astral-sh/ruff/pull/9164))
- \[`flake8-pyi`\] Cover ParamSpecs and TypeVarTuples (`PYI018`) ([#9198](https://github.com/astral-sh/ruff/pull/9198))
- \[`flake8-bugbear`\] Add fix for `zip-without-explicit-strict` (`B905`) ([#9176](https://github.com/astral-sh/ruff/pull/9176))
- Add fix to automatically remove `print` and `pprint` statements (`T201`, `T203`) ([#9208](https://github.com/astral-sh/ruff/pull/9208))
- Prefer `Never` to `NoReturn` in auto-typing in Python >= 3.11 (`ANN201`) ([#9213](https://github.com/astral-sh/ruff/pull/9213))
### Formatter
- `can_omit_optional_parentheses`: Exit early for unparenthesized expressions ([#9125](https://github.com/astral-sh/ruff/pull/9125))
- Fix `dynamic` mode with doctests so that it doesn't exceed configured line width ([#9129](https://github.com/astral-sh/ruff/pull/9129))
- Fix `can_omit_optional_parentheses` for expressions with a right most fstring ([#9124](https://github.com/astral-sh/ruff/pull/9124))
- Add `target_version` to formatter options ([#9220](https://github.com/astral-sh/ruff/pull/9220))
### CLI
- Update `ruff format --check` to display message for already formatted files ([#9153](https://github.com/astral-sh/ruff/pull/9153))
### Bug fixes
- Reverse order of arguments for `operator.contains` ([#9192](https://github.com/astral-sh/ruff/pull/9192))
- Iterate over lambdas in deferred type annotations ([#9175](https://github.com/astral-sh/ruff/pull/9175))
- Fix panic in `D208` with multibyte indent ([#9147](https://github.com/astral-sh/ruff/pull/9147))
- Add support for `NoReturn` in auto-return-typing ([#9206](https://github.com/astral-sh/ruff/pull/9206))
- Allow removal of `typing` from `exempt-modules` ([#9214](https://github.com/astral-sh/ruff/pull/9214))
- Avoid `mutable-class-default` violations for Pydantic subclasses ([#9187](https://github.com/astral-sh/ruff/pull/9187))
- Fix dropped union expressions for piped non-types in `PYI055` autofix ([#9161](https://github.com/astral-sh/ruff/pull/9161))
- Enable annotation quoting for multi-line expressions ([#9142](https://github.com/astral-sh/ruff/pull/9142))
- Deduplicate edits when quoting annotations ([#9140](https://github.com/astral-sh/ruff/pull/9140))
- Prevent invalid utf8 indexing in cell magic detection ([#9146](https://github.com/astral-sh/ruff/pull/9146))
- Avoid nested quotations in auto-quoting fix ([#9168](https://github.com/astral-sh/ruff/pull/9168))
- Add base-class inheritance detection to flake8-django rules ([#9151](https://github.com/astral-sh/ruff/pull/9151))
- Avoid `asyncio-dangling-task` violations on shadowed bindings ([#9215](https://github.com/astral-sh/ruff/pull/9215))
### Documentation
- Fix blog post URL in changelog ([#9119](https://github.com/astral-sh/ruff/pull/9119))
- Add error suppression hint for multi-line strings ([#9205](https://github.com/astral-sh/ruff/pull/9205))
- Fix typo in SemanticModel.parent_expression docstring ([#9167](https://github.com/astral-sh/ruff/pull/9167))
- Document link between import sorting and formatter ([#9117](https://github.com/astral-sh/ruff/pull/9117))
## 0.1.8
This release includes opt-in support for formatting Python snippets within
docstrings via the `docstring-code-format` setting.
[Check out the blog post](https://astral.sh/blog/ruff-v0.1.8) for more details!
### Preview features
- Add `"preserve"` quote-style to mimic Black's skip-string-normalization ([#8822](https://github.com/astral-sh/ruff/pull/8822))
- Implement `prefer_splitting_right_hand_side_of_assignments` preview style ([#8943](https://github.com/astral-sh/ruff/pull/8943))
- \[`pycodestyle`\] Add fix for `unexpected-spaces-around-keyword-parameter-equals` ([#9072](https://github.com/astral-sh/ruff/pull/9072))
- \[`pycodestyle`\] Add fix for comment-related whitespace rules ([#9075](https://github.com/astral-sh/ruff/pull/9075))
- \[`pycodestyle`\] Allow `sys.path` modifications between imports ([#9047](https://github.com/astral-sh/ruff/pull/9047))
- \[`refurb`\] Implement `hashlib-digest-hex` (`FURB181`) ([#9077](https://github.com/astral-sh/ruff/pull/9077))
### Rule changes
- Allow `flake8-type-checking` rules to automatically quote runtime-evaluated references ([#6001](https://github.com/astral-sh/ruff/pull/6001))
- Allow transparent cell magics in Jupyter Notebooks ([#8911](https://github.com/astral-sh/ruff/pull/8911))
- \[`flake8-annotations`\] Avoid `ANN2xx` fixes for abstract methods with empty bodies ([#9034](https://github.com/astral-sh/ruff/pull/9034))
- \[`flake8-self`\] Ignore underscore references in type annotations ([#9036](https://github.com/astral-sh/ruff/pull/9036))
- \[`pep8-naming`\] Allow class names when `apps.get_model` is a non-string ([#9065](https://github.com/astral-sh/ruff/pull/9065))
- \[`pycodestyle`\] Allow `matplotlib.use` calls to intersperse imports ([#9094](https://github.com/astral-sh/ruff/pull/9094))
- \[`pyflakes`\] Support fixing unused assignments in tuples by renaming variables (`F841`) ([#9107](https://github.com/astral-sh/ruff/pull/9107))
- \[`pylint`\] Add fix for `subprocess-run-without-check` (`PLW1510`) ([#6708](https://github.com/astral-sh/ruff/pull/6708))
### Formatter
- Add `docstring-code-format` knob to enable docstring snippet formatting ([#8854](https://github.com/astral-sh/ruff/pull/8854))
- Use double quotes for all docstrings, including single-quoted docstrings ([#9020](https://github.com/astral-sh/ruff/pull/9020))
- Implement "dynamic" line width mode for docstring code formatting ([#9098](https://github.com/astral-sh/ruff/pull/9098))
- Support reformatting Markdown code blocks ([#9030](https://github.com/astral-sh/ruff/pull/9030))
- add support for formatting reStructuredText code snippets ([#9003](https://github.com/astral-sh/ruff/pull/9003))
- Avoid trailing comma for single-argument with positional separator ([#9076](https://github.com/astral-sh/ruff/pull/9076))
- Fix handling of trailing target comment ([#9051](https://github.com/astral-sh/ruff/pull/9051))
### CLI
- Hide unsafe fix suggestions when explicitly disabled ([#9095](https://github.com/astral-sh/ruff/pull/9095))
- Add SARIF support to `--output-format` ([#9078](https://github.com/astral-sh/ruff/pull/9078))
### Bug fixes
- Apply unnecessary index rule prior to enumerate rewrite ([#9012](https://github.com/astral-sh/ruff/pull/9012))
- \[`flake8-err-msg`\] Allow `EM` fixes even if `msg` variable is defined ([#9059](https://github.com/astral-sh/ruff/pull/9059))
- \[`flake8-pie`\] Prevent keyword arguments duplication ([#8450](https://github.com/astral-sh/ruff/pull/8450))
- \[`flake8-pie`\] Respect trailing comma in `unnecessary-dict-kwargs` (`PIE804`) ([#9015](https://github.com/astral-sh/ruff/pull/9015))
- \[`flake8-raise`\] Avoid removing parentheses on ctypes.WinError ([#9027](https://github.com/astral-sh/ruff/pull/9027))
- \[`isort`\] Avoid invalid combination of `force-sort-within-types` and `lines-between-types` ([#9041](https://github.com/astral-sh/ruff/pull/9041))
- \[`isort`\] Ensure that from-style imports are always ordered first in `__future__` ([#9039](https://github.com/astral-sh/ruff/pull/9039))
- \[`pycodestyle`\] Allow tab indentation before keyword ([#9099](https://github.com/astral-sh/ruff/pull/9099))
- \[`pylint`\] Ignore `@overrides` and `@overloads` for `too-many-positional` ([#9000](https://github.com/astral-sh/ruff/pull/9000))
- \[`pyupgrade`\] Enable `printf-string-formatting` fix with comments on right-hand side ([#9037](https://github.com/astral-sh/ruff/pull/9037))
- \[`refurb`\] Make `math-constant` (`FURB152`) rule more targeted ([#9054](https://github.com/astral-sh/ruff/pull/9054))
- \[`refurb`\] Support floating-point base in `redundant-log-base` (`FURB163`) ([#9100](https://github.com/astral-sh/ruff/pull/9100))
- \[`ruff`\] Detect `unused-asyncio-dangling-task` (`RUF006`) on unused assignments ([#9060](https://github.com/astral-sh/ruff/pull/9060))
## 0.1.7
### Preview features

View File

@@ -26,10 +26,6 @@ Welcome! We're happy to have you here. Thank you in advance for your contributio
- [`cargo dev`](#cargo-dev)
- [Subsystems](#subsystems)
- [Compilation Pipeline](#compilation-pipeline)
- [Import Categorization](#import-categorization)
- [Project root](#project-root)
- [Package root](#package-root)
- [Import categorization](#import-categorization-1)
## The Basics
@@ -39,7 +35,7 @@ For small changes (e.g., bug fixes), feel free to submit a PR.
For larger changes (e.g., new lint rules, new functionality, new configuration options), consider
creating an [**issue**](https://github.com/astral-sh/ruff/issues) outlining your proposed change.
You can also join us on [**Discord**](https://discord.com/invite/astral-sh) to discuss your idea with the
You can also join us on [**Discord**](https://discord.gg/c9MhzV8aU5) to discuss your idea with the
community. We've labeled [beginner-friendly tasks](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22)
in the issue tracker, along with [bugs](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3Abug)
and [improvements](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3Aaccepted)
@@ -67,7 +63,7 @@ You'll also need [Insta](https://insta.rs/docs/) to update snapshot tests:
cargo install cargo-insta
```
And you'll need pre-commit to run some validation checks:
and pre-commit to run some validation checks:
```shell
pipx install pre-commit # or `pip install pre-commit` if you have a virtualenv
@@ -80,22 +76,12 @@ when making a commit:
pre-commit install
```
We recommend [nextest](https://nexte.st/) to run Ruff's test suite (via `cargo nextest run`),
though it's not strictly necessary:
```shell
cargo install cargo-nextest --locked
```
Throughout this guide, any usages of `cargo test` can be replaced with `cargo nextest run`,
if you choose to install `nextest`.
### Development
After cloning the repository, run Ruff locally from the repository root with:
```shell
cargo run -p ruff -- check /path/to/file.py --no-cache
cargo run -p ruff_cli -- check /path/to/file.py --no-cache
```
Prior to opening a pull request, ensure that your code has been auto-formatted,
@@ -134,7 +120,7 @@ At the time of writing, the repository includes the following crates:
If you're working on a rule, this is the crate for you.
- `crates/ruff_benchmark`: binary crate for running micro-benchmarks.
- `crates/ruff_cache`: library crate for caching lint results.
- `crates/ruff`: binary crate containing Ruff's command-line interface.
- `crates/ruff_cli`: binary crate containing Ruff's command-line interface.
- `crates/ruff_dev`: binary crate containing utilities used in the development of Ruff itself (e.g.,
`cargo dev generate-all`), see the [`cargo dev`](#cargo-dev) section below.
- `crates/ruff_diagnostics`: library crate for the rule-independent abstractions in the lint
@@ -245,7 +231,7 @@ Once you've completed the code for the rule itself, you can define tests with th
For example, if you're adding a new rule named `E402`, you would run:
```shell
cargo run -p ruff -- check crates/ruff_linter/resources/test/fixtures/pycodestyle/E402.py --no-cache --preview --select E402
cargo run -p ruff_cli -- check crates/ruff_linter/resources/test/fixtures/pycodestyle/E402.py --no-cache --select E402
```
**Note:** Only a subset of rules are enabled by default. When testing a new rule, ensure that
@@ -266,7 +252,7 @@ Once you've completed the code for the rule itself, you can define tests with th
Ruff's user-facing settings live in a few different places.
First, the command-line options are defined via the `Args` struct in `crates/ruff/src/args.rs`.
First, the command-line options are defined via the `Args` struct in `crates/ruff_cli/src/args.rs`.
Second, the `pyproject.toml` options are defined in `crates/ruff_workspace/src/options.rs` (via the
`Options` struct), `crates/ruff_workspace/src/configuration.rs` (via the `Configuration` struct),
@@ -316,7 +302,7 @@ To preview any changes to the documentation locally:
```
The documentation should then be available locally at
[http://127.0.0.1:8000/ruff/](http://127.0.0.1:8000/ruff/).
[http://127.0.0.1:8000/docs/](http://127.0.0.1:8000/docs/).
## Release Process
@@ -329,29 +315,27 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
### Creating a new release
1. Install `uv`: `curl -LsSf https://astral.sh/uv/install.sh | sh`
1. Run `./scripts/release/bump.sh`; this command will:
- Generate a temporary virtual environment with `rooster`
We use an experimental in-house tool for managing releases.
1. Install `rooster`: `pip install git+https://github.com/zanieb/rooster@main`
1. Run `rooster release`; this command will:
- Generate a changelog entry in `CHANGELOG.md`
- Update versions in `pyproject.toml` and `Cargo.toml`
- Update references to versions in the `README.md` and documentation
- Display contributors for the release
1. The changelog should then be editorialized for consistency
- Often labels will be missing from pull requests they will need to be manually organized into the proper section
- Changes should be edited to be user-facing descriptions, avoiding internal details
1. Highlight any breaking changes in `BREAKING_CHANGES.md`
1. Run `cargo check`. This should update the lock file with new versions.
1. Create a pull request with the changelog and version updates
1. Merge the PR
1. Run the [release workflow](https://github.com/astral-sh/ruff/actions/workflows/release.yaml) with:
- The new version number (without starting `v`)
- The commit hash of the merged release pull request on `main`
1. Run the release workflow with the version number (without starting `v`) as input. Make sure
main has your merged PR as last commit
1. The release workflow will do the following:
1. Build all the assets. If this fails (even though we tested in step 4), we haven't tagged or
uploaded anything, you can restart after pushing a fix.
1. Upload to PyPI.
1. Create and push the Git tag (as extracted from `pyproject.toml`). We create the Git tag only
after building the wheels and uploading to PyPI, since we can't delete or modify the tag ([#4468](https://github.com/astral-sh/ruff/issues/4468)).
after building the wheels and uploading to PyPI, since we can't delete or modify the tag ([#4468](https://github.com/charliermarsh/ruff/issues/4468)).
1. Attach artifacts to draft GitHub release
1. Trigger downstream repositories. This can fail non-catastrophically, as we can run any
downstream jobs manually if needed.
@@ -359,11 +343,8 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
1. Open the draft release in the GitHub release section
1. Copy the changelog for the release into the GitHub release
- See previous releases for formatting of section headers
1. Append the contributors from the `bump.sh` script
1. If needed, [update the schemastore](https://github.com/astral-sh/ruff/blob/main/scripts/update_schemastore.py).
1. One can determine if an update is needed when
`git diff old-version-tag new-version-tag -- ruff.schema.json` returns a non-empty diff.
1. Once run successfully, you should follow the link in the output to create a PR.
1. Generate the contributor list with `rooster contributors` and add to the release notes
1. If needed, [update the schemastore](https://github.com/charliermarsh/ruff/blob/main/scripts/update_schemastore.py)
1. If needed, update the `ruff-lsp` and `ruff-vscode` repositories.
## Ecosystem CI
@@ -384,14 +365,9 @@ See the [ruff-ecosystem package](https://github.com/astral-sh/ruff/tree/main/pyt
We have several ways of benchmarking and profiling Ruff:
- Our main performance benchmark comparing Ruff with other tools on the CPython codebase
- Microbenchmarks which run the linter or the formatter on individual files. These run on pull requests.
- Microbenchmarks which the linter or the formatter on individual files. There run on pull requests.
- Profiling the linter on either the microbenchmarks or entire projects
> \[!NOTE\]
> When running benchmarks, ensure that your CPU is otherwise idle (e.g., close any background
> applications, like web browsers). You may also want to switch your CPU to a "performance"
> mode, if it exists, especially when benchmarking short-lived processes.
### CPython Benchmark
First, clone [CPython](https://github.com/python/cpython). It's a large and diverse Python codebase,
@@ -537,10 +513,10 @@ if the benchmark improved/regressed compared to that baseline.
```shell
# Run once on your "baseline" code
cargo bench -p ruff_benchmark -- --save-baseline=main
cargo benchmark --save-baseline=main
# Then iterate with
cargo bench -p ruff_benchmark -- --baseline=main
cargo benchmark --baseline=main
```
#### PR Summary
@@ -550,10 +526,10 @@ This is useful to illustrate the improvements of a PR.
```shell
# On main
cargo bench -p ruff_benchmark -- --save-baseline=main
cargo benchmark --save-baseline=main
# After applying your changes
cargo bench -p ruff_benchmark -- --save-baseline=pr
cargo benchmark --save-baseline=pr
critcmp main pr
```
@@ -566,10 +542,10 @@ cargo install critcmp
#### Tips
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo benchmark lexer`
to only run the lexer benchmarks.
- Use `cargo bench -p ruff_benchmark -- --quiet` for a more cleaned up output (without statistical relevance)
- Use `cargo bench -p ruff_benchmark -- --quick` to get faster results (more prone to noise)
- Use `cargo benchmark <filter>` to only run specific benchmarks. For example: `cargo benchmark linter/pydantic`
to only run the pydantic tests.
- Use `cargo benchmark --quiet` for a more cleaned up output (without statistical relevance)
- Use `cargo benchmark --quick` to get faster results (more prone to noise)
### Profiling Projects
@@ -580,10 +556,10 @@ examples.
#### Linux
Install `perf` and build `ruff_benchmark` with the `profiling` profile and then run it with perf
Install `perf` and build `ruff_benchmark` with the `release-debug` profile and then run it with perf
```shell
cargo bench -p ruff_benchmark --no-run --profile=profiling && perf record --call-graph dwarf -F 9999 cargo bench -p ruff_benchmark --profile=profiling -- --profile-time=1
cargo bench -p ruff_benchmark --no-run --profile=release-debug && perf record --call-graph dwarf -F 9999 cargo bench -p ruff_benchmark --profile=release-debug -- --profile-time=1
```
You can also use the `ruff_dev` launcher to run `ruff check` multiple times on a repository to
@@ -591,8 +567,8 @@ gather enough samples for a good flamegraph (change the 999, the sample rate, an
of checks, to your liking)
```shell
cargo build --bin ruff_dev --profile=profiling
perf record -g -F 999 target/profiling/ruff_dev repeat --repeat 30 --exit-zero --no-cache path/to/cpython > /dev/null
cargo build --bin ruff_dev --profile=release-debug
perf record -g -F 999 target/release-debug/ruff_dev repeat --repeat 30 --exit-zero --no-cache path/to/cpython > /dev/null
```
Then convert the recorded profile
@@ -622,7 +598,7 @@ cargo install cargo-instruments
Then run the profiler with
```shell
cargo instruments -t time --bench linter --profile profiling -p ruff_benchmark -- --profile-time=1
cargo instruments -t time --bench linter --profile release-debug -p ruff_benchmark -- --profile-time=1
```
- `-t`: Specifies what to profile. Useful options are `time` to profile the wall time and `alloc`
@@ -637,7 +613,7 @@ Otherwise, follow the instructions from the linux section.
utils with it:
- `cargo dev print-ast <file>`: Print the AST of a python file using the
[RustPython parser](https://github.com/astral-sh/ruff/tree/main/crates/ruff_python_parser) that is
[RustPython parser](https://github.com/astral-sh/RustPython-Parser/tree/main/parser) that is
mainly used in Ruff. For `if True: pass # comment`, you can see the syntax tree, the byte offsets
for start and stop of each node and also how the `:` token, the comment and whitespace are not
represented anymore:

1393
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -9,112 +9,51 @@ homepage = "https://docs.astral.sh/ruff"
documentation = "https://docs.astral.sh/ruff"
repository = "https://github.com/astral-sh/ruff"
authors = ["Charlie Marsh <charlie.r.marsh@gmail.com>"]
license = "MIT"
license = "MIT2"
[workspace.dependencies]
aho-corasick = { version = "1.1.2" }
annotate-snippets = { version = "0.9.2", features = ["color"] }
anyhow = { version = "1.0.80" }
argfile = { version = "0.1.6" }
assert_cmd = { version = "2.0.13" }
bincode = { version = "1.3.3" }
anyhow = { version = "1.0.69" }
bitflags = { version = "2.4.1" }
bstr = { version = "1.9.1" }
cachedir = { version = "0.3.1" }
chrono = { version = "0.4.35", default-features = false, features = ["clock"] }
clap = { version = "4.5.2", features = ["derive"] }
clap_complete_command = { version = "0.5.1" }
clearscreen = { version = "2.0.0" }
codspeed-criterion-compat = { version = "2.4.0", default-features = false }
colored = { version = "2.1.0" }
configparser = { version = "3.0.3" }
console_error_panic_hook = { version = "0.1.7" }
console_log = { version = "1.0.0" }
countme = { version = "3.0.1" }
criterion = { version = "0.5.1", default-features = false }
crossbeam = { version = "0.8.4" }
dirs = { version = "5.0.0" }
drop_bomb = { version = "0.1.5" }
env_logger = { version = "0.10.1" }
fern = { version = "0.6.1" }
filetime = { version = "0.2.23" }
fs-err = { version = "2.11.0" }
chrono = { version = "0.4.31", default-features = false, features = ["clock"] }
clap = { version = "4.4.7", features = ["derive"] }
colored = { version = "2.0.0" }
filetime = { version = "0.2.20" }
glob = { version = "0.3.1" }
globset = { version = "0.4.14" }
hexf-parse = { version = "0.2.1" }
ignore = { version = "0.4.22" }
imara-diff = { version = "0.1.5" }
imperative = { version = "1.0.4" }
indicatif = { version = "0.17.8" }
indoc = { version = "2.0.4" }
insta = { version = "1.35.1", feature = ["filters", "glob"] }
insta-cmd = { version = "0.4.0" }
is-macro = { version = "0.3.5" }
is-wsl = { version = "0.4.0" }
itertools = { version = "0.12.1" }
js-sys = { version = "0.3.69" }
jod-thread = { version = "0.1.2" }
lalrpop-util = { version = "0.20.0", default-features = false }
lexical-parse-float = { version = "0.8.0", features = ["format"] }
libc = { version = "0.2.153" }
ignore = { version = "0.4.20" }
insta = { version = "1.34.0", feature = ["filters", "glob"] }
is-macro = { version = "0.3.0" }
itertools = { version = "0.11.0" }
libcst = { version = "1.1.0", default-features = false }
log = { version = "0.4.17" }
lsp-server = { version = "0.7.6" }
lsp-types = { version = "0.95.0", features = ["proposed"] }
memchr = { version = "2.7.1" }
mimalloc = { version = "0.1.39" }
natord = { version = "1.0.9" }
notify = { version = "6.1.1" }
once_cell = { version = "1.19.0" }
memchr = { version = "2.6.4" }
once_cell = { version = "1.17.1" }
path-absolutize = { version = "3.1.1" }
pathdiff = { version = "0.2.1" }
pep440_rs = { version = "0.4.0", features = ["serde"] }
pretty_assertions = "1.3.0"
proc-macro2 = { version = "1.0.78" }
pyproject-toml = { version = "0.9.0" }
quick-junit = { version = "0.3.5" }
proc-macro2 = { version = "1.0.70" }
quote = { version = "1.0.23" }
rand = { version = "0.8.5" }
rayon = { version = "1.8.1" }
regex = { version = "1.10.2" }
result-like = { version = "0.5.0" }
rustc-hash = { version = "1.1.0" }
schemars = { version = "0.8.16" }
seahash = { version = "4.1.0" }
serde = { version = "1.0.197", features = ["derive"] }
serde-wasm-bindgen = { version = "0.6.4" }
serde_json = { version = "1.0.113" }
serde_test = { version = "1.0.152" }
serde_with = { version = "3.6.0", default-features = false, features = ["macros"] }
serde = { version = "1.0.190", features = ["derive"] }
serde_json = { version = "1.0.108" }
shellexpand = { version = "3.0.0" }
shlex = { version = "1.3.0" }
similar = { version = "2.4.0", features = ["inline"] }
smallvec = { version = "1.13.1" }
similar = { version = "2.3.0", features = ["inline"] }
smallvec = { version = "1.11.2" }
static_assertions = "1.1.0"
strum = { version = "0.25.0", features = ["strum_macros"] }
strum_macros = { version = "0.25.3" }
syn = { version = "2.0.51" }
tempfile = { version = "3.9.0" }
test-case = { version = "3.3.1" }
thiserror = { version = "1.0.57" }
tikv-jemallocator = { version = "0.5.0" }
toml = { version = "0.8.9" }
syn = { version = "2.0.39" }
test-case = { version = "3.2.1" }
thiserror = { version = "1.0.50" }
toml = { version = "0.7.8" }
tracing = { version = "0.1.40" }
tracing-indicatif = { version = "0.3.6" }
tracing-indicatif = { version = "0.3.4" }
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
tracing-tree = { version = "0.2.4" }
typed-arena = { version = "2.0.2" }
unic-ucd-category = { version = "0.9" }
unicode-ident = { version = "1.0.12" }
unicode_names2 = { version = "1.2.0" }
unicode-width = { version = "0.1.11" }
unicode_names2 = { version = "1.2.2" }
ureq = { version = "2.9.6" }
url = { version = "2.5.0" }
uuid = { version = "1.6.1", features = ["v4", "fast-rng", "macro-diagnostics", "js"] }
walkdir = { version = "2.3.2" }
wasm-bindgen = { version = "0.2.92" }
wasm-bindgen-test = { version = "0.3.40" }
wild = { version = "2" }
wsl = { version = "0.1.0" }
[workspace.lints.rust]
unsafe_code = "warn"
@@ -149,20 +88,7 @@ rc_mutex = "warn"
rest_pat_in_fully_bound_structs = "warn"
[profile.release]
# Note that we set these explicitly, and these values
# were chosen based on a trade-off between compile times
# and runtime performance[1].
#
# [1]: https://github.com/astral-sh/ruff/pull/9031
lto = "thin"
codegen-units = 16
# Some crates don't change as much but benefit more from
# more expensive optimization passes, so we selectively
# decrease codegen-units in some cases.
[profile.release.package.ruff_python_parser]
codegen-units = 1
[profile.release.package.ruff_python_ast]
lto = "fat"
codegen-units = 1
[profile.dev.package.insta]
@@ -176,8 +102,8 @@ opt-level = 3
[profile.dev.package.ruff_python_parser]
opt-level = 1
# Use the `--profile profiling` flag to show symbols in release mode.
# e.g. `cargo build --profile profiling`
[profile.profiling]
# Use the `--profile release-debug` flag to show symbols in release mode.
# e.g. `cargo build --profile release-debug`
[profile.release-debug]
inherits = "release"
debug = 1

View File

@@ -7,9 +7,8 @@
[![image](https://img.shields.io/pypi/l/ruff.svg)](https://pypi.python.org/pypi/ruff)
[![image](https://img.shields.io/pypi/pyversions/ruff.svg)](https://pypi.python.org/pypi/ruff)
[![Actions status](https://github.com/astral-sh/ruff/workflows/CI/badge.svg)](https://github.com/astral-sh/ruff/actions)
[![Discord](https://img.shields.io/badge/Discord-%235865F2.svg?logo=discord&logoColor=white)](https://discord.com/invite/astral-sh)
[**Docs**](https://docs.astral.sh/ruff/) | [**Playground**](https://play.ruff.rs/)
[**Discord**](https://discord.gg/c9MhzV8aU5) | [**Docs**](https://docs.astral.sh/ruff/) | [**Playground**](https://play.ruff.rs/)
An extremely fast Python linter and code formatter, written in Rust.
@@ -151,7 +150,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
```yaml
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.3.2
rev: v0.1.7
hooks:
# Run the linter.
- id: ruff
@@ -173,7 +172,7 @@ jobs:
ruff:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- uses: chartboost/ruff-action@v1
```
@@ -195,25 +194,20 @@ exclude = [
".git",
".git-rewrite",
".hg",
".ipynb_checkpoints",
".mypy_cache",
".nox",
".pants.d",
".pyenv",
".pytest_cache",
".pytype",
".ruff_cache",
".svn",
".tox",
".venv",
".vscode",
"__pypackages__",
"_build",
"buck-out",
"build",
"dist",
"node_modules",
"site-packages",
"venv",
]
@@ -342,14 +336,14 @@ For a complete enumeration of the supported rules, see [_Rules_](https://docs.as
Contributions are welcome and highly appreciated. To get started, check out the
[**contributing guidelines**](https://docs.astral.sh/ruff/contributing/).
You can also join us on [**Discord**](https://discord.com/invite/astral-sh).
You can also join us on [**Discord**](https://discord.gg/c9MhzV8aU5).
## Support
Having trouble? Check out the existing issues on [**GitHub**](https://github.com/astral-sh/ruff/issues),
or feel free to [**open a new one**](https://github.com/astral-sh/ruff/issues/new).
You can also ask for help on [**Discord**](https://discord.com/invite/astral-sh).
You can also ask for help on [**Discord**](https://discord.gg/c9MhzV8aU5).
## Acknowledgements
@@ -379,7 +373,6 @@ Ruff is released under the MIT license.
Ruff is used by a number of major open-source projects and companies, including:
- [Albumentations](https://github.com/albumentations-team/albumentations)
- Amazon ([AWS SAM](https://github.com/aws/serverless-application-model))
- Anthropic ([Python SDK](https://github.com/anthropics/anthropic-sdk-python))
- [Apache Airflow](https://github.com/apache/airflow)
@@ -388,7 +381,6 @@ Ruff is used by a number of major open-source projects and companies, including:
- Benchling ([Refac](https://github.com/benchling/refac))
- [Bokeh](https://github.com/bokeh/bokeh)
- [Cryptography (PyCA)](https://github.com/pyca/cryptography)
- CERN ([Indico](https://getindico.io/))
- [DVC](https://github.com/iterative/dvc)
- [Dagger](https://github.com/dagger/dagger)
- [Dagster](https://github.com/dagster-io/dagster)
@@ -404,9 +396,7 @@ Ruff is used by a number of major open-source projects and companies, including:
[Diffusers](https://github.com/huggingface/diffusers))
- ING Bank ([popmon](https://github.com/ing-bank/popmon), [probatus](https://github.com/ing-bank/probatus))
- [Ibis](https://github.com/ibis-project/ibis)
- [ivy](https://github.com/unifyai/ivy)
- [Jupyter](https://github.com/jupyter-server/jupyter_server)
- [Kraken Tech](https://kraken.tech/)
- [LangChain](https://github.com/hwchase17/langchain)
- [Litestar](https://litestar.dev/)
- [LlamaIndex](https://github.com/jerryjliu/llama_index)
@@ -422,7 +412,6 @@ Ruff is used by a number of major open-source projects and companies, including:
- Netflix ([Dispatch](https://github.com/Netflix/dispatch))
- [Neon](https://github.com/neondatabase/neon)
- [NoneBot](https://github.com/nonebot/nonebot2)
- [NumPyro](https://github.com/pyro-ppl/numpyro)
- [ONNX](https://github.com/onnx/onnx)
- [OpenBB](https://github.com/OpenBB-finance/OpenBBTerminal)
- [PDM](https://github.com/pdm-project/pdm)
@@ -434,13 +423,10 @@ Ruff is used by a number of major open-source projects and companies, including:
- [PostHog](https://github.com/PostHog/posthog)
- Prefect ([Python SDK](https://github.com/PrefectHQ/prefect), [Marvin](https://github.com/PrefectHQ/marvin))
- [PyInstaller](https://github.com/pyinstaller/pyinstaller)
- [PyMC](https://github.com/pymc-devs/pymc/)
- [PyMC-Marketing](https://github.com/pymc-labs/pymc-marketing)
- [pytest](https://github.com/pytest-dev/pytest)
- [PyTorch](https://github.com/pytorch/pytorch)
- [Pydantic](https://github.com/pydantic/pydantic)
- [Pylint](https://github.com/PyCQA/pylint)
- [PyVista](https://github.com/pyvista/pyvista)
- [Reflex](https://github.com/reflex-dev/reflex)
- [River](https://github.com/online-ml/river)
- [Rippling](https://rippling.com)
@@ -467,7 +453,7 @@ Ruff is used by a number of major open-source projects and companies, including:
### Show Your Support
If you're using Ruff, consider adding the Ruff badge to your project's `README.md`:
If you're using Ruff, consider adding the Ruff badge to project's `README.md`:
```md
[![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff)
@@ -493,6 +479,6 @@ MIT
<div align="center">
<a target="_blank" href="https://astral.sh" style="background:none">
<img src="https://raw.githubusercontent.com/astral-sh/ruff/main/assets/svg/Astral.svg" alt="Made by Astral">
<img src="https://raw.githubusercontent.com/astral-sh/ruff/main/assets/svg/Astral.svg">
</a>
</div>

View File

@@ -0,0 +1,39 @@
[package]
name = "flake8-to-ruff"
version = "0.1.7"
description = """
Convert Flake8 configuration files to Ruff configuration files.
"""
authors = { workspace = true }
edition = { workspace = true }
rust-version = { workspace = true }
homepage = { workspace = true }
documentation = { workspace = true }
repository = { workspace = true }
license = { workspace = true }
[dependencies]
ruff_linter = { path = "../ruff_linter", default-features = false }
ruff_workspace = { path = "../ruff_workspace" }
anyhow = { workspace = true }
clap = { workspace = true }
colored = { workspace = true }
configparser = { version = "3.0.3" }
itertools = { workspace = true }
log = { workspace = true }
once_cell = { workspace = true }
pep440_rs = { version = "0.3.12", features = ["serde"] }
regex = { workspace = true }
rustc-hash = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
strum = { workspace = true }
strum_macros = { workspace = true }
toml = { workspace = true }
[dev-dependencies]
pretty_assertions = "1.3.0"
[lints]
workspace = true

View File

@@ -0,0 +1,99 @@
# flake8-to-ruff
Convert existing Flake8 configuration files (`setup.cfg`, `tox.ini`, or `.flake8`) for use with
[Ruff](https://github.com/astral-sh/ruff).
Generates a Ruff-compatible `pyproject.toml` section.
## Installation and Usage
### Installation
Available as [`flake8-to-ruff`](https://pypi.org/project/flake8-to-ruff/) on PyPI:
```shell
pip install flake8-to-ruff
```
### Usage
To run `flake8-to-ruff`:
```shell
flake8-to-ruff path/to/setup.cfg
flake8-to-ruff path/to/tox.ini
flake8-to-ruff path/to/.flake8
```
`flake8-to-ruff` will print the relevant `pyproject.toml` sections to standard output, like so:
```toml
[tool.ruff]
exclude = [
'.svn',
'CVS',
'.bzr',
'.hg',
'.git',
'__pycache__',
'.tox',
'.idea',
'.mypy_cache',
'.venv',
'node_modules',
'_state_machine.py',
'test_fstring.py',
'bad_coding2.py',
'badsyntax_*.py',
]
select = [
'A',
'E',
'F',
'Q',
]
ignore = []
[tool.ruff.flake8-quotes]
inline-quotes = 'single'
[tool.ruff.pep8-naming]
ignore-names = [
'foo',
'bar',
]
```
### Plugins
`flake8-to-ruff` will attempt to infer any activated plugins based on the settings provided in your
configuration file.
For example, if your `.flake8` file includes a `docstring-convention` property, `flake8-to-ruff`
will enable the appropriate [`flake8-docstrings`](https://pypi.org/project/flake8-docstrings/)
checks.
Alternatively, you can manually specify plugins on the command-line:
```shell
flake8-to-ruff path/to/.flake8 --plugin flake8-builtins --plugin flake8-quotes
```
## Limitations
1. Ruff only supports a subset of the Flake configuration options. `flake8-to-ruff` will warn on and
ignore unsupported options in the `.flake8` file (or equivalent). (Similarly, Ruff has a few
configuration options that don't exist in Flake8.)
1. Ruff will omit any rule codes that are unimplemented or unsupported by Ruff, including rule
codes from unsupported plugins. (See the
[documentation](https://docs.astral.sh/ruff/faq/#how-does-ruff-compare-to-flake8) for the complete
list of supported plugins.)
## License
MIT
## Contributing
Contributions are welcome and hugely appreciated. To get started, check out the
[contributing guidelines](https://github.com/astral-sh/ruff/blob/main/CONTRIBUTING.md).

View File

@@ -0,0 +1,65 @@
[build-system]
requires = [
# The minimum setuptools version is specific to the PEP 517 backend,
# and may be stricter than the version required in `setup.cfg`
"setuptools>=40.6.0,!=60.9.0",
"wheel",
# Must be kept in sync with the `install_requirements` in `setup.cfg`
"cffi>=1.12; platform_python_implementation != 'PyPy'",
"setuptools-rust>=0.11.4",
]
build-backend = "setuptools.build_meta"
[tool.black]
line-length = 79
target-version = ["py36"]
[tool.pytest.ini_options]
addopts = "-r s --capture=no --strict-markers --benchmark-disable"
markers = [
"skip_fips: this test is not executed in FIPS mode",
"supported: parametrized test requiring only_if and skip_message",
]
[tool.mypy]
show_error_codes = true
check_untyped_defs = true
no_implicit_reexport = true
warn_redundant_casts = true
warn_unused_ignores = true
warn_unused_configs = true
strict_equality = true
[[tool.mypy.overrides]]
module = [
"pretend"
]
ignore_missing_imports = true
[tool.coverage.run]
branch = true
relative_files = true
source = [
"cryptography",
"tests/",
]
[tool.coverage.paths]
source = [
"src/cryptography",
"*.tox/*/lib*/python*/site-packages/cryptography",
"*.tox\\*\\Lib\\site-packages\\cryptography",
"*.tox/pypy/site-packages/cryptography",
]
tests =[
"tests/",
"*tests\\",
]
[tool.coverage.report]
exclude_lines = [
"@abc.abstractmethod",
"@abc.abstractproperty",
"@typing.overload",
"if typing.TYPE_CHECKING",
]

View File

@@ -0,0 +1,91 @@
[metadata]
name = cryptography
version = attr: cryptography.__version__
description = cryptography is a package which provides cryptographic recipes and primitives to Python developers.
long_description = file: README.rst
long_description_content_type = text/x-rst
license = BSD-3-Clause OR Apache-2.0
url = https://github.com/pyca/cryptography
author = The Python Cryptographic Authority and individual contributors
author_email = cryptography-dev@python.org
project_urls =
Documentation=https://cryptography.io/
Source=https://github.com/pyca/cryptography/
Issues=https://github.com/pyca/cryptography/issues
Changelog=https://cryptography.io/en/latest/changelog/
classifiers =
Development Status :: 5 - Production/Stable
Intended Audience :: Developers
License :: OSI Approved :: Apache Software License
License :: OSI Approved :: BSD License
Natural Language :: English
Operating System :: MacOS :: MacOS X
Operating System :: POSIX
Operating System :: POSIX :: BSD
Operating System :: POSIX :: Linux
Operating System :: Microsoft :: Windows
Programming Language :: Python
Programming Language :: Python :: 3
Programming Language :: Python :: 3 :: Only
Programming Language :: Python :: 3.6
Programming Language :: Python :: 3.7
Programming Language :: Python :: 3.8
Programming Language :: Python :: 3.9
Programming Language :: Python :: 3.10
Programming Language :: Python :: 3.11
Programming Language :: Python :: Implementation :: CPython
Programming Language :: Python :: Implementation :: PyPy
Topic :: Security :: Cryptography
[options]
python_requires = >=3.6
include_package_data = True
zip_safe = False
package_dir =
=src
packages = find:
# `install_requires` must be kept in sync with `pyproject.toml`
install_requires =
cffi >=1.12
[options.packages.find]
where = src
exclude =
_cffi_src
_cffi_src.*
[options.extras_require]
test =
pytest>=6.2.0
pytest-benchmark
pytest-cov
pytest-subtests
pytest-xdist
pretend
iso8601
pytz
hypothesis>=1.11.4,!=3.79.2
docs =
sphinx >= 1.6.5,!=1.8.0,!=3.1.0,!=3.1.1,!=5.2.0,!=5.2.0.post0
sphinx_rtd_theme
docstest =
pyenchant >= 1.6.11
twine >= 1.12.0
sphinxcontrib-spelling >= 4.0.1
sdist =
setuptools_rust >= 0.11.4
pep8test =
black
flake8
flake8-import-order
pep8-naming
# This extra is for OpenSSH private keys that use bcrypt KDF
# Versions: v3.1.3 - ignore_few_rounds, v3.1.5 - abi3
ssh =
bcrypt >= 3.1.5
[flake8]
ignore = E203,E211,W503,W504,N818
exclude = .tox,*.egg,.git,_build,.hypothesis
select = E,W,F,N,I
application-import-names = cryptography,cryptography_vectors,tests

View File

@@ -0,0 +1,19 @@
[flake8]
# Ignore style and complexity
# E: style errors
# W: style warnings
# C: complexity
# D: docstring warnings (unused pydocstyle extension)
# F841: local variable assigned but never used
ignore = E, C, W, D, F841
builtins = c, get_config
exclude =
.cache,
.github,
docs,
jupyterhub/alembic*,
onbuild,
scripts,
share,
tools,
setup.py

View File

@@ -0,0 +1,43 @@
[flake8]
# Exclude the grpc generated code
exclude = ./manim/grpc/gen/*
max-complexity = 15
max-line-length = 88
statistics = True
# Prevents some flake8-rst-docstrings errors
rst-roles = attr,class,func,meth,mod,obj,ref,doc,exc
rst-directives = manim, SEEALSO, seealso
docstring-convention=numpy
select = A,A00,B,B9,C4,C90,D,E,F,F,PT,RST,SIM,W
# General Compatibility
extend-ignore = E203, W503, D202, D212, D213, D404
# Misc
F401, F403, F405, F841, E501, E731, E402, F811, F821,
# Plug-in: flake8-builtins
A001, A002, A003,
# Plug-in: flake8-bugbear
B006, B007, B008, B009, B010, B903, B950,
# Plug-in: flake8-simplify
SIM105, SIM106, SIM119,
# Plug-in: flake8-comprehensions
C901
# Plug-in: flake8-pytest-style
PT001, PT004, PT006, PT011, PT018, PT022, PT023,
# Plug-in: flake8-docstrings
D100, D101, D102, D103, D104, D105, D106, D107,
D200, D202, D204, D205, D209,
D301,
D400, D401, D402, D403, D405, D406, D407, D409, D411, D412, D414,
# Plug-in: flake8-rst-docstrings
RST201, RST203, RST210, RST212, RST213, RST215,
RST301, RST303,

View File

@@ -0,0 +1,36 @@
[flake8]
min_python_version = 3.7.0
max-line-length = 88
ban-relative-imports = true
# flake8-use-fstring: https://github.com/MichaelKim0407/flake8-use-fstring#--percent-greedy-and---format-greedy
format-greedy = 1
inline-quotes = double
enable-extensions = TC, TC1
type-checking-strict = true
eradicate-whitelist-extend = ^-.*;
extend-ignore =
# E203: Whitespace before ':' (pycqa/pycodestyle#373)
E203,
# SIM106: Handle error-cases first
SIM106,
# ANN101: Missing type annotation for self in method
ANN101,
# ANN102: Missing type annotation for cls in classmethod
ANN102,
# PIE781: assign-and-return
PIE781,
# PIE798 no-unnecessary-class: Consider using a module for namespacing instead
PIE798,
per-file-ignores =
# TC002: Move third-party import '...' into a type-checking block
__init__.py:TC002,
# ANN201: Missing return type annotation for public function
tests/test_*:ANN201
tests/**/test_*:ANN201
extend-exclude =
# Frozen and not subject to change in this repo:
get-poetry.py,
install-poetry.py,
# External to the project's coding standards:
tests/fixtures/*,
tests/**/fixtures/*,

View File

@@ -0,0 +1,19 @@
[flake8]
max-line-length=120
docstring-convention=all
import-order-style=pycharm
application_import_names=bot,tests
exclude=.cache,.venv,.git,constants.py
extend-ignore=
B311,W503,E226,S311,T000,E731
# Missing Docstrings
D100,D104,D105,D107,
# Docstring Whitespace
D203,D212,D214,D215,
# Docstring Quotes
D301,D302,
# Docstring Content
D400,D401,D402,D404,D405,D406,D407,D408,D409,D410,D411,D412,D413,D414,D416,D417
# Type Annotations
ANN002,ANN003,ANN101,ANN102,ANN204,ANN206,ANN401
per-file-ignores=tests/*:D,ANN

View File

@@ -0,0 +1,6 @@
[flake8]
ignore = E203, E501, W503
per-file-ignores =
requests/__init__.py:E402, F401
requests/compat.py:E402, F401
tests/compat.py:F401

View File

@@ -0,0 +1,34 @@
[project]
name = "flake8-to-ruff"
keywords = ["automation", "flake8", "pycodestyle", "pyflakes", "pylint", "clippy"]
classifiers = [
"Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3 :: Only",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Software Development :: Quality Assurance",
]
author = "Charlie Marsh"
author_email = "charlie.r.marsh@gmail.com"
description = "Convert existing Flake8 configuration to Ruff."
requires-python = ">=3.7"
[project.urls]
repository = "https://github.com/astral-sh/ruff#subdirectory=crates/flake8_to_ruff"
[build-system]
requires = ["maturin>=1.0,<2.0"]
build-backend = "maturin"
[tool.maturin]
bindings = "bin"
strip = true

View File

@@ -0,0 +1,13 @@
//! Extract Black configuration settings from a pyproject.toml.
use ruff_linter::line_width::LineLength;
use ruff_linter::settings::types::PythonVersion;
use serde::{Deserialize, Serialize};
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
pub(crate) struct Black {
#[serde(alias = "line-length", alias = "line_length")]
pub(crate) line_length: Option<LineLength>,
#[serde(alias = "target-version", alias = "target_version")]
pub(crate) target_version: Option<Vec<PythonVersion>>,
}

View File

@@ -0,0 +1,687 @@
use std::collections::{HashMap, HashSet};
use std::str::FromStr;
use itertools::Itertools;
use ruff_linter::line_width::LineLength;
use ruff_linter::registry::Linter;
use ruff_linter::rule_selector::RuleSelector;
use ruff_linter::rules::flake8_pytest_style::types::{
ParametrizeNameType, ParametrizeValuesRowType, ParametrizeValuesType,
};
use ruff_linter::rules::flake8_quotes::settings::Quote;
use ruff_linter::rules::flake8_tidy_imports::settings::Strictness;
use ruff_linter::rules::pydocstyle::settings::Convention;
use ruff_linter::settings::types::PythonVersion;
use ruff_linter::settings::DEFAULT_SELECTORS;
use ruff_linter::warn_user;
use ruff_workspace::options::{
Flake8AnnotationsOptions, Flake8BugbearOptions, Flake8BuiltinsOptions, Flake8ErrMsgOptions,
Flake8PytestStyleOptions, Flake8QuotesOptions, Flake8TidyImportsOptions, LintCommonOptions,
LintOptions, McCabeOptions, Options, Pep8NamingOptions, PydocstyleOptions,
};
use ruff_workspace::pyproject::Pyproject;
use super::external_config::ExternalConfig;
use super::plugin::Plugin;
use super::{parser, plugin};
pub(crate) fn convert(
config: &HashMap<String, HashMap<String, Option<String>>>,
external_config: &ExternalConfig,
plugins: Option<Vec<Plugin>>,
) -> Pyproject {
// Extract the Flake8 section.
let flake8 = config
.get("flake8")
.expect("Unable to find flake8 section in INI file");
// Extract all referenced rule code prefixes, to power plugin inference.
let mut referenced_codes: HashSet<RuleSelector> = HashSet::default();
for (key, value) in flake8 {
if let Some(value) = value {
match key.as_str() {
"select" | "ignore" | "extend-select" | "extend_select" | "extend-ignore"
| "extend_ignore" => {
referenced_codes.extend(parser::parse_prefix_codes(value.as_ref()));
}
"per-file-ignores" | "per_file_ignores" => {
if let Ok(per_file_ignores) =
parser::parse_files_to_codes_mapping(value.as_ref())
{
for (_, codes) in parser::collect_per_file_ignores(per_file_ignores) {
referenced_codes.extend(codes);
}
}
}
_ => {}
}
}
}
// Infer plugins, if not provided.
let plugins = plugins.unwrap_or_else(|| {
let from_options = plugin::infer_plugins_from_options(flake8);
if !from_options.is_empty() {
#[allow(clippy::print_stderr)]
{
eprintln!("Inferred plugins from settings: {from_options:#?}");
}
}
let from_codes = plugin::infer_plugins_from_codes(&referenced_codes);
if !from_codes.is_empty() {
#[allow(clippy::print_stderr)]
{
eprintln!("Inferred plugins from referenced codes: {from_codes:#?}");
}
}
from_options.into_iter().chain(from_codes).collect()
});
// Check if the user has specified a `select`. If not, we'll add our own
// default `select`, and populate it based on user plugins.
let mut select = flake8
.get("select")
.and_then(|value| {
value
.as_ref()
.map(|value| HashSet::from_iter(parser::parse_prefix_codes(value)))
})
.unwrap_or_else(|| resolve_select(&plugins));
let mut ignore: HashSet<RuleSelector> = flake8
.get("ignore")
.and_then(|value| {
value
.as_ref()
.map(|value| HashSet::from_iter(parser::parse_prefix_codes(value)))
})
.unwrap_or_default();
// Parse each supported option.
let mut options = Options::default();
let mut lint_options = LintCommonOptions::default();
let mut flake8_annotations = Flake8AnnotationsOptions::default();
let mut flake8_bugbear = Flake8BugbearOptions::default();
let mut flake8_builtins = Flake8BuiltinsOptions::default();
let mut flake8_errmsg = Flake8ErrMsgOptions::default();
let mut flake8_pytest_style = Flake8PytestStyleOptions::default();
let mut flake8_quotes = Flake8QuotesOptions::default();
let mut flake8_tidy_imports = Flake8TidyImportsOptions::default();
let mut mccabe = McCabeOptions::default();
let mut pep8_naming = Pep8NamingOptions::default();
let mut pydocstyle = PydocstyleOptions::default();
for (key, value) in flake8 {
if let Some(value) = value {
match key.as_str() {
// flake8
"builtins" => {
options.builtins = Some(parser::parse_strings(value.as_ref()));
}
"max-line-length" | "max_line_length" => match LineLength::from_str(value) {
Ok(line_length) => options.line_length = Some(line_length),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
},
"select" => {
// No-op (handled above).
select.extend(parser::parse_prefix_codes(value.as_ref()));
}
"ignore" => {
// No-op (handled above).
}
"extend-select" | "extend_select" => {
// Unlike Flake8, use a single explicit `select`.
select.extend(parser::parse_prefix_codes(value.as_ref()));
}
"extend-ignore" | "extend_ignore" => {
// Unlike Flake8, use a single explicit `ignore`.
ignore.extend(parser::parse_prefix_codes(value.as_ref()));
}
"exclude" => {
options.exclude = Some(parser::parse_strings(value.as_ref()));
}
"extend-exclude" | "extend_exclude" => {
options.extend_exclude = Some(parser::parse_strings(value.as_ref()));
}
"per-file-ignores" | "per_file_ignores" => {
match parser::parse_files_to_codes_mapping(value.as_ref()) {
Ok(per_file_ignores) => {
lint_options.per_file_ignores =
Some(parser::collect_per_file_ignores(per_file_ignores));
}
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
// flake8-bugbear
"extend-immutable-calls" | "extend_immutable_calls" => {
flake8_bugbear.extend_immutable_calls =
Some(parser::parse_strings(value.as_ref()));
}
// flake8-builtins
"builtins-ignorelist" | "builtins_ignorelist" => {
flake8_builtins.builtins_ignorelist =
Some(parser::parse_strings(value.as_ref()));
}
// flake8-annotations
"suppress-none-returning" | "suppress_none_returning" => {
match parser::parse_bool(value.as_ref()) {
Ok(bool) => flake8_annotations.suppress_none_returning = Some(bool),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
"suppress-dummy-args" | "suppress_dummy_args" => {
match parser::parse_bool(value.as_ref()) {
Ok(bool) => flake8_annotations.suppress_dummy_args = Some(bool),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
"mypy-init-return" | "mypy_init_return" => {
match parser::parse_bool(value.as_ref()) {
Ok(bool) => flake8_annotations.mypy_init_return = Some(bool),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
"allow-star-arg-any" | "allow_star_arg_any" => {
match parser::parse_bool(value.as_ref()) {
Ok(bool) => flake8_annotations.allow_star_arg_any = Some(bool),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
// flake8-quotes
"quotes" | "inline-quotes" | "inline_quotes" => match value.trim() {
"'" | "single" => flake8_quotes.inline_quotes = Some(Quote::Single),
"\"" | "double" => flake8_quotes.inline_quotes = Some(Quote::Double),
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
},
"multiline-quotes" | "multiline_quotes" => match value.trim() {
"'" | "single" => flake8_quotes.multiline_quotes = Some(Quote::Single),
"\"" | "double" => flake8_quotes.multiline_quotes = Some(Quote::Double),
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
},
"docstring-quotes" | "docstring_quotes" => match value.trim() {
"'" | "single" => flake8_quotes.docstring_quotes = Some(Quote::Single),
"\"" | "double" => flake8_quotes.docstring_quotes = Some(Quote::Double),
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
},
"avoid-escape" | "avoid_escape" => match parser::parse_bool(value.as_ref()) {
Ok(bool) => flake8_quotes.avoid_escape = Some(bool),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
},
// pep8-naming
"ignore-names" | "ignore_names" => {
pep8_naming.ignore_names = Some(parser::parse_strings(value.as_ref()));
}
"classmethod-decorators" | "classmethod_decorators" => {
pep8_naming.classmethod_decorators =
Some(parser::parse_strings(value.as_ref()));
}
"staticmethod-decorators" | "staticmethod_decorators" => {
pep8_naming.staticmethod_decorators =
Some(parser::parse_strings(value.as_ref()));
}
// flake8-tidy-imports
"ban-relative-imports" | "ban_relative_imports" => match value.trim() {
"true" => flake8_tidy_imports.ban_relative_imports = Some(Strictness::All),
"parents" => {
flake8_tidy_imports.ban_relative_imports = Some(Strictness::Parents);
}
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
},
// flake8-docstrings
"docstring-convention" => match value.trim() {
"google" => pydocstyle.convention = Some(Convention::Google),
"numpy" => pydocstyle.convention = Some(Convention::Numpy),
"pep257" => pydocstyle.convention = Some(Convention::Pep257),
"all" => pydocstyle.convention = None,
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
},
// mccabe
"max-complexity" | "max_complexity" => match value.parse::<usize>() {
Ok(max_complexity) => mccabe.max_complexity = Some(max_complexity),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
},
// flake8-errmsg
"errmsg-max-string-length" | "errmsg_max_string_length" => {
match value.parse::<usize>() {
Ok(max_string_length) => {
flake8_errmsg.max_string_length = Some(max_string_length);
}
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
// flake8-pytest-style
"pytest-fixture-no-parentheses" | "pytest_fixture_no_parentheses " => {
match parser::parse_bool(value.as_ref()) {
Ok(bool) => flake8_pytest_style.fixture_parentheses = Some(!bool),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
"pytest-parametrize-names-type" | "pytest_parametrize_names_type" => {
match value.trim() {
"csv" => {
flake8_pytest_style.parametrize_names_type =
Some(ParametrizeNameType::Csv);
}
"tuple" => {
flake8_pytest_style.parametrize_names_type =
Some(ParametrizeNameType::Tuple);
}
"list" => {
flake8_pytest_style.parametrize_names_type =
Some(ParametrizeNameType::List);
}
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
}
}
"pytest-parametrize-values-type" | "pytest_parametrize_values_type" => {
match value.trim() {
"tuple" => {
flake8_pytest_style.parametrize_values_type =
Some(ParametrizeValuesType::Tuple);
}
"list" => {
flake8_pytest_style.parametrize_values_type =
Some(ParametrizeValuesType::List);
}
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
}
}
"pytest-parametrize-values-row-type" | "pytest_parametrize_values_row_type" => {
match value.trim() {
"tuple" => {
flake8_pytest_style.parametrize_values_row_type =
Some(ParametrizeValuesRowType::Tuple);
}
"list" => {
flake8_pytest_style.parametrize_values_row_type =
Some(ParametrizeValuesRowType::List);
}
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
}
}
"pytest-raises-require-match-for" | "pytest_raises_require_match_for" => {
flake8_pytest_style.raises_require_match_for =
Some(parser::parse_strings(value.as_ref()));
}
"pytest-mark-no-parentheses" | "pytest_mark_no_parentheses" => {
match parser::parse_bool(value.as_ref()) {
Ok(bool) => flake8_pytest_style.mark_parentheses = Some(!bool),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
// Unknown
_ => {
warn_user!("Skipping unsupported property: {}", key);
}
}
}
}
// Deduplicate and sort.
lint_options.select = Some(
select
.into_iter()
.sorted_by_key(RuleSelector::prefix_and_code)
.collect(),
);
lint_options.ignore = Some(
ignore
.into_iter()
.sorted_by_key(RuleSelector::prefix_and_code)
.collect(),
);
if flake8_annotations != Flake8AnnotationsOptions::default() {
lint_options.flake8_annotations = Some(flake8_annotations);
}
if flake8_bugbear != Flake8BugbearOptions::default() {
lint_options.flake8_bugbear = Some(flake8_bugbear);
}
if flake8_builtins != Flake8BuiltinsOptions::default() {
lint_options.flake8_builtins = Some(flake8_builtins);
}
if flake8_errmsg != Flake8ErrMsgOptions::default() {
lint_options.flake8_errmsg = Some(flake8_errmsg);
}
if flake8_pytest_style != Flake8PytestStyleOptions::default() {
lint_options.flake8_pytest_style = Some(flake8_pytest_style);
}
if flake8_quotes != Flake8QuotesOptions::default() {
lint_options.flake8_quotes = Some(flake8_quotes);
}
if flake8_tidy_imports != Flake8TidyImportsOptions::default() {
lint_options.flake8_tidy_imports = Some(flake8_tidy_imports);
}
if mccabe != McCabeOptions::default() {
lint_options.mccabe = Some(mccabe);
}
if pep8_naming != Pep8NamingOptions::default() {
lint_options.pep8_naming = Some(pep8_naming);
}
if pydocstyle != PydocstyleOptions::default() {
lint_options.pydocstyle = Some(pydocstyle);
}
// Extract any settings from the existing `pyproject.toml`.
if let Some(black) = &external_config.black {
if let Some(line_length) = &black.line_length {
options.line_length = Some(*line_length);
}
if let Some(target_version) = &black.target_version {
if let Some(target_version) = target_version.iter().min() {
options.target_version = Some(*target_version);
}
}
}
if let Some(isort) = &external_config.isort {
if let Some(src_paths) = &isort.src_paths {
match options.src.as_mut() {
Some(src) => {
src.extend_from_slice(src_paths);
}
None => {
options.src = Some(src_paths.clone());
}
}
}
}
if let Some(project) = &external_config.project {
if let Some(requires_python) = &project.requires_python {
if options.target_version.is_none() {
options.target_version =
PythonVersion::get_minimum_supported_version(requires_python);
}
}
}
if lint_options != LintCommonOptions::default() {
options.lint = Some(LintOptions {
common: lint_options,
..LintOptions::default()
});
}
// Create the pyproject.toml.
Pyproject::new(options)
}
/// Resolve the set of enabled `RuleSelector` values for the given
/// plugins.
fn resolve_select(plugins: &[Plugin]) -> HashSet<RuleSelector> {
let mut select: HashSet<_> = DEFAULT_SELECTORS.iter().cloned().collect();
select.extend(plugins.iter().map(|p| Linter::from(p).into()));
select
}
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use std::str::FromStr;
use anyhow::Result;
use itertools::Itertools;
use pep440_rs::VersionSpecifiers;
use pretty_assertions::assert_eq;
use ruff_linter::line_width::LineLength;
use ruff_linter::registry::Linter;
use ruff_linter::rule_selector::RuleSelector;
use ruff_linter::rules::flake8_quotes;
use ruff_linter::rules::pydocstyle::settings::Convention;
use ruff_linter::settings::types::PythonVersion;
use ruff_workspace::options::{
Flake8QuotesOptions, LintCommonOptions, LintOptions, Options, PydocstyleOptions,
};
use ruff_workspace::pyproject::Pyproject;
use crate::converter::DEFAULT_SELECTORS;
use crate::pep621::Project;
use crate::ExternalConfig;
use super::super::plugin::Plugin;
use super::convert;
fn lint_default_options(plugins: impl IntoIterator<Item = RuleSelector>) -> LintCommonOptions {
LintCommonOptions {
ignore: Some(vec![]),
select: Some(
DEFAULT_SELECTORS
.iter()
.cloned()
.chain(plugins)
.sorted_by_key(RuleSelector::prefix_and_code)
.collect(),
),
..LintCommonOptions::default()
}
}
#[test]
fn it_converts_empty() {
let actual = convert(
&HashMap::from([("flake8".to_string(), HashMap::default())]),
&ExternalConfig::default(),
None,
);
let expected = Pyproject::new(Options {
lint: Some(LintOptions {
common: lint_default_options([]),
..LintOptions::default()
}),
..Options::default()
});
assert_eq!(actual, expected);
}
#[test]
fn it_converts_dashes() {
let actual = convert(
&HashMap::from([(
"flake8".to_string(),
HashMap::from([("max-line-length".to_string(), Some("100".to_string()))]),
)]),
&ExternalConfig::default(),
Some(vec![]),
);
let expected = Pyproject::new(Options {
line_length: Some(LineLength::try_from(100).unwrap()),
lint: Some(LintOptions {
common: lint_default_options([]),
..LintOptions::default()
}),
..Options::default()
});
assert_eq!(actual, expected);
}
#[test]
fn it_converts_underscores() {
let actual = convert(
&HashMap::from([(
"flake8".to_string(),
HashMap::from([("max_line_length".to_string(), Some("100".to_string()))]),
)]),
&ExternalConfig::default(),
Some(vec![]),
);
let expected = Pyproject::new(Options {
line_length: Some(LineLength::try_from(100).unwrap()),
lint: Some(LintOptions {
common: lint_default_options([]),
..LintOptions::default()
}),
..Options::default()
});
assert_eq!(actual, expected);
}
#[test]
fn it_ignores_parse_errors() {
let actual = convert(
&HashMap::from([(
"flake8".to_string(),
HashMap::from([("max_line_length".to_string(), Some("abc".to_string()))]),
)]),
&ExternalConfig::default(),
Some(vec![]),
);
let expected = Pyproject::new(Options {
lint: Some(LintOptions {
common: lint_default_options([]),
..LintOptions::default()
}),
..Options::default()
});
assert_eq!(actual, expected);
}
#[test]
fn it_converts_plugin_options() {
let actual = convert(
&HashMap::from([(
"flake8".to_string(),
HashMap::from([("inline-quotes".to_string(), Some("single".to_string()))]),
)]),
&ExternalConfig::default(),
Some(vec![]),
);
let expected = Pyproject::new(Options {
lint: Some(LintOptions {
common: LintCommonOptions {
flake8_quotes: Some(Flake8QuotesOptions {
inline_quotes: Some(flake8_quotes::settings::Quote::Single),
multiline_quotes: None,
docstring_quotes: None,
avoid_escape: None,
}),
..lint_default_options([])
},
..LintOptions::default()
}),
..Options::default()
});
assert_eq!(actual, expected);
}
#[test]
fn it_converts_docstring_conventions() {
let actual = convert(
&HashMap::from([(
"flake8".to_string(),
HashMap::from([(
"docstring-convention".to_string(),
Some("numpy".to_string()),
)]),
)]),
&ExternalConfig::default(),
Some(vec![Plugin::Flake8Docstrings]),
);
let expected = Pyproject::new(Options {
lint: Some(LintOptions {
common: LintCommonOptions {
pydocstyle: Some(PydocstyleOptions {
convention: Some(Convention::Numpy),
ignore_decorators: None,
property_decorators: None,
}),
..lint_default_options([Linter::Pydocstyle.into()])
},
..LintOptions::default()
}),
..Options::default()
});
assert_eq!(actual, expected);
}
#[test]
fn it_infers_plugins_if_omitted() {
let actual = convert(
&HashMap::from([(
"flake8".to_string(),
HashMap::from([("inline-quotes".to_string(), Some("single".to_string()))]),
)]),
&ExternalConfig::default(),
None,
);
let expected = Pyproject::new(Options {
lint: Some(LintOptions {
common: LintCommonOptions {
flake8_quotes: Some(Flake8QuotesOptions {
inline_quotes: Some(flake8_quotes::settings::Quote::Single),
multiline_quotes: None,
docstring_quotes: None,
avoid_escape: None,
}),
..lint_default_options([Linter::Flake8Quotes.into()])
},
..LintOptions::default()
}),
..Options::default()
});
assert_eq!(actual, expected);
}
#[test]
fn it_converts_project_requires_python() -> Result<()> {
let actual = convert(
&HashMap::from([("flake8".to_string(), HashMap::default())]),
&ExternalConfig {
project: Some(&Project {
requires_python: Some(VersionSpecifiers::from_str(">=3.8.16, <3.11")?),
}),
..ExternalConfig::default()
},
Some(vec![]),
);
let expected = Pyproject::new(Options {
target_version: Some(PythonVersion::Py38),
lint: Some(LintOptions {
common: lint_default_options([]),
..LintOptions::default()
}),
..Options::default()
});
assert_eq!(actual, expected);
Ok(())
}
}

View File

@@ -0,0 +1,10 @@
use super::black::Black;
use super::isort::Isort;
use super::pep621::Project;
#[derive(Default)]
pub(crate) struct ExternalConfig<'a> {
pub(crate) black: Option<&'a Black>,
pub(crate) isort: Option<&'a Isort>,
pub(crate) project: Option<&'a Project>,
}

View File

@@ -0,0 +1,10 @@
//! Extract isort configuration settings from a pyproject.toml.
use serde::{Deserialize, Serialize};
/// The [isort configuration](https://pycqa.github.io/isort/docs/configuration/config_files.html).
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
pub(crate) struct Isort {
#[serde(alias = "src-paths", alias = "src_paths")]
pub(crate) src_paths: Option<Vec<String>>,
}

View File

@@ -0,0 +1,80 @@
//! Utility to generate Ruff's `pyproject.toml` section from a Flake8 INI file.
mod black;
mod converter;
mod external_config;
mod isort;
mod parser;
mod pep621;
mod plugin;
mod pyproject;
use std::path::PathBuf;
use anyhow::Result;
use clap::Parser;
use configparser::ini::Ini;
use crate::converter::convert;
use crate::external_config::ExternalConfig;
use crate::plugin::Plugin;
use crate::pyproject::parse;
use ruff_linter::logging::{set_up_logging, LogLevel};
#[derive(Parser)]
#[command(
about = "Convert existing Flake8 configuration to Ruff.",
long_about = None
)]
struct Args {
/// Path to the Flake8 configuration file (e.g., `setup.cfg`, `tox.ini`, or
/// `.flake8`).
#[arg(required = true)]
file: PathBuf,
/// Optional path to a `pyproject.toml` file, used to ensure compatibility
/// with Black.
#[arg(long)]
pyproject: Option<PathBuf>,
/// List of plugins to enable.
#[arg(long, value_delimiter = ',')]
plugin: Option<Vec<Plugin>>,
}
fn main() -> Result<()> {
set_up_logging(&LogLevel::Default)?;
let args = Args::parse();
// Read the INI file.
let mut ini = Ini::new_cs();
ini.set_multiline(true);
let config = ini.load(args.file).map_err(|msg| anyhow::anyhow!(msg))?;
// Read the pyproject.toml file.
let pyproject = args.pyproject.map(parse).transpose()?;
let external_config = pyproject
.as_ref()
.and_then(|pyproject| pyproject.tool.as_ref())
.map(|tool| ExternalConfig {
black: tool.black.as_ref(),
isort: tool.isort.as_ref(),
..Default::default()
})
.unwrap_or_default();
let external_config = ExternalConfig {
project: pyproject
.as_ref()
.and_then(|pyproject| pyproject.project.as_ref()),
..external_config
};
// Create Ruff's pyproject.toml section.
let pyproject = convert(&config, &external_config, args.plugin);
#[allow(clippy::print_stdout)]
{
println!("{}", toml::to_string_pretty(&pyproject)?);
}
Ok(())
}

View File

@@ -0,0 +1,391 @@
use std::str::FromStr;
use anyhow::{bail, Result};
use once_cell::sync::Lazy;
use regex::Regex;
use rustc_hash::FxHashMap;
use ruff_linter::settings::types::PatternPrefixPair;
use ruff_linter::{warn_user, RuleSelector};
static COMMA_SEPARATED_LIST_RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"[,\s]").unwrap());
/// Parse a comma-separated list of `RuleSelector` values (e.g.,
/// "F401,E501").
pub(crate) fn parse_prefix_codes(value: &str) -> Vec<RuleSelector> {
let mut codes: Vec<RuleSelector> = vec![];
for code in COMMA_SEPARATED_LIST_RE.split(value) {
let code = code.trim();
if code.is_empty() {
continue;
}
if let Ok(code) = RuleSelector::from_str(code) {
codes.push(code);
} else {
warn_user!("Unsupported prefix code: {code}");
}
}
codes
}
/// Parse a comma-separated list of strings (e.g., "__init__.py,__main__.py").
pub(crate) fn parse_strings(value: &str) -> Vec<String> {
COMMA_SEPARATED_LIST_RE
.split(value)
.map(str::trim)
.filter(|part| !part.is_empty())
.map(String::from)
.collect()
}
/// Parse a boolean.
pub(crate) fn parse_bool(value: &str) -> Result<bool> {
match value.trim() {
"true" => Ok(true),
"false" => Ok(false),
_ => bail!("Unexpected boolean value: {value}"),
}
}
#[derive(Debug)]
struct Token {
token_name: TokenType,
src: String,
}
#[derive(Debug, Copy, Clone)]
enum TokenType {
Code,
File,
Colon,
Comma,
Ws,
Eof,
}
struct State {
seen_sep: bool,
seen_colon: bool,
filenames: Vec<String>,
codes: Vec<String>,
}
impl State {
const fn new() -> Self {
Self {
seen_sep: true,
seen_colon: false,
filenames: vec![],
codes: vec![],
}
}
/// Generate the list of `StrRuleCodePair` pairs for the current
/// state.
fn parse(&self) -> Vec<PatternPrefixPair> {
let mut codes: Vec<PatternPrefixPair> = vec![];
for code in &self.codes {
if let Ok(code) = RuleSelector::from_str(code) {
for filename in &self.filenames {
codes.push(PatternPrefixPair {
pattern: filename.clone(),
prefix: code.clone(),
});
}
} else {
warn_user!("Unsupported prefix code: {code}");
}
}
codes
}
}
/// Tokenize the raw 'files-to-codes' mapping.
fn tokenize_files_to_codes_mapping(value: &str) -> Vec<Token> {
let mut tokens = vec![];
let mut i = 0;
while i < value.len() {
for (token_re, token_name) in [
(
Regex::new(r"([A-Z]+[0-9]*)(?:$|\s|,)").unwrap(),
TokenType::Code,
),
(Regex::new(r"([^\s:,]+)").unwrap(), TokenType::File),
(Regex::new(r"(\s*:\s*)").unwrap(), TokenType::Colon),
(Regex::new(r"(\s*,\s*)").unwrap(), TokenType::Comma),
(Regex::new(r"(\s+)").unwrap(), TokenType::Ws),
] {
if let Some(cap) = token_re.captures(&value[i..]) {
let mat = cap.get(1).unwrap();
if mat.start() == 0 {
tokens.push(Token {
token_name,
src: mat.as_str().trim().to_string(),
});
i += mat.end();
break;
}
}
}
}
tokens.push(Token {
token_name: TokenType::Eof,
src: String::new(),
});
tokens
}
/// Parse a 'files-to-codes' mapping, mimicking Flake8's internal logic.
/// See: <https://github.com/PyCQA/flake8/blob/7dfe99616fc2f07c0017df2ba5fa884158f3ea8a/src/flake8/utils.py#L45>
pub(crate) fn parse_files_to_codes_mapping(value: &str) -> Result<Vec<PatternPrefixPair>> {
if value.trim().is_empty() {
return Ok(vec![]);
}
let mut codes: Vec<PatternPrefixPair> = vec![];
let mut state = State::new();
for token in tokenize_files_to_codes_mapping(value) {
if matches!(token.token_name, TokenType::Comma | TokenType::Ws) {
state.seen_sep = true;
} else if !state.seen_colon {
if matches!(token.token_name, TokenType::Colon) {
state.seen_colon = true;
state.seen_sep = true;
} else if state.seen_sep && matches!(token.token_name, TokenType::File) {
state.filenames.push(token.src);
state.seen_sep = false;
} else {
bail!("Unexpected token: {:?}", token.token_name);
}
} else {
if matches!(token.token_name, TokenType::Eof) {
codes.extend(state.parse());
state = State::new();
} else if state.seen_sep && matches!(token.token_name, TokenType::Code) {
state.codes.push(token.src);
state.seen_sep = false;
} else if state.seen_sep && matches!(token.token_name, TokenType::File) {
codes.extend(state.parse());
state = State::new();
state.filenames.push(token.src);
state.seen_sep = false;
} else {
bail!("Unexpected token: {:?}", token.token_name);
}
}
}
Ok(codes)
}
/// Collect a list of `PatternPrefixPair` structs as a `BTreeMap`.
pub(crate) fn collect_per_file_ignores(
pairs: Vec<PatternPrefixPair>,
) -> FxHashMap<String, Vec<RuleSelector>> {
let mut per_file_ignores: FxHashMap<String, Vec<RuleSelector>> = FxHashMap::default();
for pair in pairs {
per_file_ignores
.entry(pair.pattern)
.or_default()
.push(pair.prefix);
}
per_file_ignores
}
#[cfg(test)]
mod tests {
use anyhow::Result;
use ruff_linter::codes;
use ruff_linter::registry::Linter;
use ruff_linter::settings::types::PatternPrefixPair;
use ruff_linter::RuleSelector;
use super::{parse_files_to_codes_mapping, parse_prefix_codes, parse_strings};
#[test]
fn it_parses_prefix_codes() {
let actual = parse_prefix_codes("");
let expected: Vec<RuleSelector> = vec![];
assert_eq!(actual, expected);
let actual = parse_prefix_codes(" ");
let expected: Vec<RuleSelector> = vec![];
assert_eq!(actual, expected);
let actual = parse_prefix_codes("F401");
let expected = vec![codes::Pyflakes::_401.into()];
assert_eq!(actual, expected);
let actual = parse_prefix_codes("F401,");
let expected = vec![codes::Pyflakes::_401.into()];
assert_eq!(actual, expected);
let actual = parse_prefix_codes("F401,E501");
let expected = vec![
codes::Pyflakes::_401.into(),
codes::Pycodestyle::E501.into(),
];
assert_eq!(actual, expected);
let actual = parse_prefix_codes("F401, E501");
let expected = vec![
codes::Pyflakes::_401.into(),
codes::Pycodestyle::E501.into(),
];
assert_eq!(actual, expected);
}
#[test]
fn it_parses_strings() {
let actual = parse_strings("");
let expected: Vec<String> = vec![];
assert_eq!(actual, expected);
let actual = parse_strings(" ");
let expected: Vec<String> = vec![];
assert_eq!(actual, expected);
let actual = parse_strings("__init__.py");
let expected = vec!["__init__.py".to_string()];
assert_eq!(actual, expected);
let actual = parse_strings("__init__.py,");
let expected = vec!["__init__.py".to_string()];
assert_eq!(actual, expected);
let actual = parse_strings("__init__.py,__main__.py");
let expected = vec!["__init__.py".to_string(), "__main__.py".to_string()];
assert_eq!(actual, expected);
let actual = parse_strings("__init__.py, __main__.py");
let expected = vec!["__init__.py".to_string(), "__main__.py".to_string()];
assert_eq!(actual, expected);
}
#[test]
fn it_parse_files_to_codes_mapping() -> Result<()> {
let actual = parse_files_to_codes_mapping("")?;
let expected: Vec<PatternPrefixPair> = vec![];
assert_eq!(actual, expected);
let actual = parse_files_to_codes_mapping(" ")?;
let expected: Vec<PatternPrefixPair> = vec![];
assert_eq!(actual, expected);
// Ex) locust
let actual = parse_files_to_codes_mapping(
"per-file-ignores =
locust/test/*: F841
examples/*: F841
*.pyi: E302,E704"
.strip_prefix("per-file-ignores =")
.unwrap(),
)?;
let expected: Vec<PatternPrefixPair> = vec![
PatternPrefixPair {
pattern: "locust/test/*".to_string(),
prefix: codes::Pyflakes::_841.into(),
},
PatternPrefixPair {
pattern: "examples/*".to_string(),
prefix: codes::Pyflakes::_841.into(),
},
];
assert_eq!(actual, expected);
// Ex) celery
let actual = parse_files_to_codes_mapping(
"per-file-ignores =
t/*,setup.py,examples/*,docs/*,extra/*:
D,"
.strip_prefix("per-file-ignores =")
.unwrap(),
)?;
let expected: Vec<PatternPrefixPair> = vec![
PatternPrefixPair {
pattern: "t/*".to_string(),
prefix: Linter::Pydocstyle.into(),
},
PatternPrefixPair {
pattern: "setup.py".to_string(),
prefix: Linter::Pydocstyle.into(),
},
PatternPrefixPair {
pattern: "examples/*".to_string(),
prefix: Linter::Pydocstyle.into(),
},
PatternPrefixPair {
pattern: "docs/*".to_string(),
prefix: Linter::Pydocstyle.into(),
},
PatternPrefixPair {
pattern: "extra/*".to_string(),
prefix: Linter::Pydocstyle.into(),
},
];
assert_eq!(actual, expected);
// Ex) scrapy
let actual = parse_files_to_codes_mapping(
"per-file-ignores =
scrapy/__init__.py:E402
scrapy/core/downloader/handlers/http.py:F401
scrapy/http/__init__.py:F401
scrapy/linkextractors/__init__.py:E402,F401
scrapy/selector/__init__.py:F401
scrapy/spiders/__init__.py:E402,F401
scrapy/utils/url.py:F403,F405
tests/test_loader.py:E741"
.strip_prefix("per-file-ignores =")
.unwrap(),
)?;
let expected: Vec<PatternPrefixPair> = vec![
PatternPrefixPair {
pattern: "scrapy/__init__.py".to_string(),
prefix: codes::Pycodestyle::E402.into(),
},
PatternPrefixPair {
pattern: "scrapy/core/downloader/handlers/http.py".to_string(),
prefix: codes::Pyflakes::_401.into(),
},
PatternPrefixPair {
pattern: "scrapy/http/__init__.py".to_string(),
prefix: codes::Pyflakes::_401.into(),
},
PatternPrefixPair {
pattern: "scrapy/linkextractors/__init__.py".to_string(),
prefix: codes::Pycodestyle::E402.into(),
},
PatternPrefixPair {
pattern: "scrapy/linkextractors/__init__.py".to_string(),
prefix: codes::Pyflakes::_401.into(),
},
PatternPrefixPair {
pattern: "scrapy/selector/__init__.py".to_string(),
prefix: codes::Pyflakes::_401.into(),
},
PatternPrefixPair {
pattern: "scrapy/spiders/__init__.py".to_string(),
prefix: codes::Pycodestyle::E402.into(),
},
PatternPrefixPair {
pattern: "scrapy/spiders/__init__.py".to_string(),
prefix: codes::Pyflakes::_401.into(),
},
PatternPrefixPair {
pattern: "scrapy/utils/url.py".to_string(),
prefix: codes::Pyflakes::_403.into(),
},
PatternPrefixPair {
pattern: "scrapy/utils/url.py".to_string(),
prefix: codes::Pyflakes::_405.into(),
},
PatternPrefixPair {
pattern: "tests/test_loader.py".to_string(),
prefix: codes::Pycodestyle::E741.into(),
},
];
assert_eq!(actual, expected);
Ok(())
}
}

View File

@@ -0,0 +1,10 @@
//! Extract PEP 621 configuration settings from a pyproject.toml.
use pep440_rs::VersionSpecifiers;
use serde::{Deserialize, Serialize};
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
pub(crate) struct Project {
#[serde(alias = "requires-python", alias = "requires_python")]
pub(crate) requires_python: Option<VersionSpecifiers>,
}

View File

@@ -0,0 +1,368 @@
use std::collections::{BTreeSet, HashMap, HashSet};
use std::fmt;
use std::str::FromStr;
use anyhow::anyhow;
use ruff_linter::registry::Linter;
use ruff_linter::rule_selector::PreviewOptions;
use ruff_linter::RuleSelector;
#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq)]
pub enum Plugin {
Flake82020,
Flake8Annotations,
Flake8Bandit,
Flake8BlindExcept,
Flake8BooleanTrap,
Flake8Bugbear,
Flake8Builtins,
Flake8Commas,
Flake8Comprehensions,
Flake8Datetimez,
Flake8Debugger,
Flake8Docstrings,
Flake8Eradicate,
Flake8ErrMsg,
Flake8Executable,
Flake8ImplicitStrConcat,
Flake8ImportConventions,
Flake8NoPep420,
Flake8Pie,
Flake8Print,
Flake8PytestStyle,
Flake8Quotes,
Flake8Return,
Flake8Simplify,
Flake8TidyImports,
Flake8TypeChecking,
Flake8UnusedArguments,
Flake8UsePathlib,
McCabe,
PEP8Naming,
PandasVet,
Pyupgrade,
Tryceratops,
}
impl FromStr for Plugin {
type Err = anyhow::Error;
fn from_str(string: &str) -> Result<Self, Self::Err> {
match string {
"flake8-2020" => Ok(Plugin::Flake82020),
"flake8-annotations" => Ok(Plugin::Flake8Annotations),
"flake8-bandit" => Ok(Plugin::Flake8Bandit),
"flake8-blind-except" => Ok(Plugin::Flake8BlindExcept),
"flake8-boolean-trap" => Ok(Plugin::Flake8BooleanTrap),
"flake8-bugbear" => Ok(Plugin::Flake8Bugbear),
"flake8-builtins" => Ok(Plugin::Flake8Builtins),
"flake8-commas" => Ok(Plugin::Flake8Commas),
"flake8-comprehensions" => Ok(Plugin::Flake8Comprehensions),
"flake8-datetimez" => Ok(Plugin::Flake8Datetimez),
"flake8-debugger" => Ok(Plugin::Flake8Debugger),
"flake8-docstrings" => Ok(Plugin::Flake8Docstrings),
"flake8-eradicate" => Ok(Plugin::Flake8Eradicate),
"flake8-errmsg" => Ok(Plugin::Flake8ErrMsg),
"flake8-executable" => Ok(Plugin::Flake8Executable),
"flake8-implicit-str-concat" => Ok(Plugin::Flake8ImplicitStrConcat),
"flake8-import-conventions" => Ok(Plugin::Flake8ImportConventions),
"flake8-no-pep420" => Ok(Plugin::Flake8NoPep420),
"flake8-pie" => Ok(Plugin::Flake8Pie),
"flake8-print" => Ok(Plugin::Flake8Print),
"flake8-pytest-style" => Ok(Plugin::Flake8PytestStyle),
"flake8-quotes" => Ok(Plugin::Flake8Quotes),
"flake8-return" => Ok(Plugin::Flake8Return),
"flake8-simplify" => Ok(Plugin::Flake8Simplify),
"flake8-tidy-imports" => Ok(Plugin::Flake8TidyImports),
"flake8-type-checking" => Ok(Plugin::Flake8TypeChecking),
"flake8-unused-arguments" => Ok(Plugin::Flake8UnusedArguments),
"flake8-use-pathlib" => Ok(Plugin::Flake8UsePathlib),
"mccabe" => Ok(Plugin::McCabe),
"pep8-naming" => Ok(Plugin::PEP8Naming),
"pandas-vet" => Ok(Plugin::PandasVet),
"pyupgrade" => Ok(Plugin::Pyupgrade),
"tryceratops" => Ok(Plugin::Tryceratops),
_ => Err(anyhow!("Unknown plugin: {string}")),
}
}
}
impl fmt::Debug for Plugin {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"{}",
match self {
Plugin::Flake82020 => "flake8-2020",
Plugin::Flake8Annotations => "flake8-annotations",
Plugin::Flake8Bandit => "flake8-bandit",
Plugin::Flake8BlindExcept => "flake8-blind-except",
Plugin::Flake8BooleanTrap => "flake8-boolean-trap",
Plugin::Flake8Bugbear => "flake8-bugbear",
Plugin::Flake8Builtins => "flake8-builtins",
Plugin::Flake8Commas => "flake8-commas",
Plugin::Flake8Comprehensions => "flake8-comprehensions",
Plugin::Flake8Datetimez => "flake8-datetimez",
Plugin::Flake8Debugger => "flake8-debugger",
Plugin::Flake8Docstrings => "flake8-docstrings",
Plugin::Flake8Eradicate => "flake8-eradicate",
Plugin::Flake8ErrMsg => "flake8-errmsg",
Plugin::Flake8Executable => "flake8-executable",
Plugin::Flake8ImplicitStrConcat => "flake8-implicit-str-concat",
Plugin::Flake8ImportConventions => "flake8-import-conventions",
Plugin::Flake8NoPep420 => "flake8-no-pep420",
Plugin::Flake8Pie => "flake8-pie",
Plugin::Flake8Print => "flake8-print",
Plugin::Flake8PytestStyle => "flake8-pytest-style",
Plugin::Flake8Quotes => "flake8-quotes",
Plugin::Flake8Return => "flake8-return",
Plugin::Flake8Simplify => "flake8-simplify",
Plugin::Flake8TidyImports => "flake8-tidy-imports",
Plugin::Flake8TypeChecking => "flake8-type-checking",
Plugin::Flake8UnusedArguments => "flake8-unused-arguments",
Plugin::Flake8UsePathlib => "flake8-use-pathlib",
Plugin::McCabe => "mccabe",
Plugin::PEP8Naming => "pep8-naming",
Plugin::PandasVet => "pandas-vet",
Plugin::Pyupgrade => "pyupgrade",
Plugin::Tryceratops => "tryceratops",
}
)
}
}
impl From<&Plugin> for Linter {
fn from(plugin: &Plugin) -> Self {
match plugin {
Plugin::Flake82020 => Linter::Flake82020,
Plugin::Flake8Annotations => Linter::Flake8Annotations,
Plugin::Flake8Bandit => Linter::Flake8Bandit,
Plugin::Flake8BlindExcept => Linter::Flake8BlindExcept,
Plugin::Flake8BooleanTrap => Linter::Flake8BooleanTrap,
Plugin::Flake8Bugbear => Linter::Flake8Bugbear,
Plugin::Flake8Builtins => Linter::Flake8Builtins,
Plugin::Flake8Commas => Linter::Flake8Commas,
Plugin::Flake8Comprehensions => Linter::Flake8Comprehensions,
Plugin::Flake8Datetimez => Linter::Flake8Datetimez,
Plugin::Flake8Debugger => Linter::Flake8Debugger,
Plugin::Flake8Docstrings => Linter::Pydocstyle,
Plugin::Flake8Eradicate => Linter::Eradicate,
Plugin::Flake8ErrMsg => Linter::Flake8ErrMsg,
Plugin::Flake8Executable => Linter::Flake8Executable,
Plugin::Flake8ImplicitStrConcat => Linter::Flake8ImplicitStrConcat,
Plugin::Flake8ImportConventions => Linter::Flake8ImportConventions,
Plugin::Flake8NoPep420 => Linter::Flake8NoPep420,
Plugin::Flake8Pie => Linter::Flake8Pie,
Plugin::Flake8Print => Linter::Flake8Print,
Plugin::Flake8PytestStyle => Linter::Flake8PytestStyle,
Plugin::Flake8Quotes => Linter::Flake8Quotes,
Plugin::Flake8Return => Linter::Flake8Return,
Plugin::Flake8Simplify => Linter::Flake8Simplify,
Plugin::Flake8TidyImports => Linter::Flake8TidyImports,
Plugin::Flake8TypeChecking => Linter::Flake8TypeChecking,
Plugin::Flake8UnusedArguments => Linter::Flake8UnusedArguments,
Plugin::Flake8UsePathlib => Linter::Flake8UsePathlib,
Plugin::McCabe => Linter::McCabe,
Plugin::PEP8Naming => Linter::PEP8Naming,
Plugin::PandasVet => Linter::PandasVet,
Plugin::Pyupgrade => Linter::Pyupgrade,
Plugin::Tryceratops => Linter::Tryceratops,
}
}
}
/// Infer the enabled plugins based on user-provided options.
///
/// For example, if the user specified a `mypy-init-return` setting, we should
/// infer that `flake8-annotations` is active.
pub(crate) fn infer_plugins_from_options(flake8: &HashMap<String, Option<String>>) -> Vec<Plugin> {
let mut plugins = BTreeSet::new();
for key in flake8.keys() {
match key.as_str() {
// flake8-annotations
"suppress-none-returning" | "suppress_none_returning" => {
plugins.insert(Plugin::Flake8Annotations);
}
"suppress-dummy-args" | "suppress_dummy_args" => {
plugins.insert(Plugin::Flake8Annotations);
}
"allow-untyped-defs" | "allow_untyped_defs" => {
plugins.insert(Plugin::Flake8Annotations);
}
"allow-untyped-nested" | "allow_untyped_nested" => {
plugins.insert(Plugin::Flake8Annotations);
}
"mypy-init-return" | "mypy_init_return" => {
plugins.insert(Plugin::Flake8Annotations);
}
"dispatch-decorators" | "dispatch_decorators" => {
plugins.insert(Plugin::Flake8Annotations);
}
"overload-decorators" | "overload_decorators" => {
plugins.insert(Plugin::Flake8Annotations);
}
"allow-star-arg-any" | "allow_star_arg_any" => {
plugins.insert(Plugin::Flake8Annotations);
}
// flake8-bugbear
"extend-immutable-calls" | "extend_immutable_calls" => {
plugins.insert(Plugin::Flake8Bugbear);
}
// flake8-builtins
"builtins-ignorelist" | "builtins_ignorelist" => {
plugins.insert(Plugin::Flake8Builtins);
}
// flake8-docstrings
"docstring-convention" | "docstring_convention" => {
plugins.insert(Plugin::Flake8Docstrings);
}
// flake8-eradicate
"eradicate-aggressive" | "eradicate_aggressive" => {
plugins.insert(Plugin::Flake8Eradicate);
}
"eradicate-whitelist" | "eradicate_whitelist" => {
plugins.insert(Plugin::Flake8Eradicate);
}
"eradicate-whitelist-extend" | "eradicate_whitelist_extend" => {
plugins.insert(Plugin::Flake8Eradicate);
}
// flake8-pytest-style
"pytest-fixture-no-parentheses" | "pytest_fixture_no_parentheses " => {
plugins.insert(Plugin::Flake8PytestStyle);
}
"pytest-parametrize-names-type" | "pytest_parametrize_names_type" => {
plugins.insert(Plugin::Flake8PytestStyle);
}
"pytest-parametrize-values-type" | "pytest_parametrize_values_type" => {
plugins.insert(Plugin::Flake8PytestStyle);
}
"pytest-parametrize-values-row-type" | "pytest_parametrize_values_row_type" => {
plugins.insert(Plugin::Flake8PytestStyle);
}
"pytest-raises-require-match-for" | "pytest_raises_require_match_for" => {
plugins.insert(Plugin::Flake8PytestStyle);
}
"pytest-mark-no-parentheses" | "pytest_mark_no_parentheses" => {
plugins.insert(Plugin::Flake8PytestStyle);
}
// flake8-quotes
"quotes" | "inline-quotes" | "inline_quotes" => {
plugins.insert(Plugin::Flake8Quotes);
}
"multiline-quotes" | "multiline_quotes" => {
plugins.insert(Plugin::Flake8Quotes);
}
"docstring-quotes" | "docstring_quotes" => {
plugins.insert(Plugin::Flake8Quotes);
}
"avoid-escape" | "avoid_escape" => {
plugins.insert(Plugin::Flake8Quotes);
}
// flake8-tidy-imports
"ban-relative-imports" | "ban_relative_imports" => {
plugins.insert(Plugin::Flake8TidyImports);
}
"banned-modules" | "banned_modules" => {
plugins.insert(Plugin::Flake8TidyImports);
}
// mccabe
"max-complexity" | "max_complexity" => {
plugins.insert(Plugin::McCabe);
}
// pep8-naming
"ignore-names" | "ignore_names" => {
plugins.insert(Plugin::PEP8Naming);
}
"classmethod-decorators" | "classmethod_decorators" => {
plugins.insert(Plugin::PEP8Naming);
}
"staticmethod-decorators" | "staticmethod_decorators" => {
plugins.insert(Plugin::PEP8Naming);
}
"max-string-length" | "max_string_length" => {
plugins.insert(Plugin::Flake8ErrMsg);
}
_ => {}
}
}
Vec::from_iter(plugins)
}
/// Infer the enabled plugins based on the referenced prefixes.
///
/// For example, if the user ignores `ANN101`, we should infer that
/// `flake8-annotations` is active.
pub(crate) fn infer_plugins_from_codes(selectors: &HashSet<RuleSelector>) -> Vec<Plugin> {
// Ignore cases in which we've knowingly changed rule prefixes.
[
Plugin::Flake82020,
Plugin::Flake8Annotations,
Plugin::Flake8Bandit,
// Plugin::Flake8BlindExcept,
Plugin::Flake8BooleanTrap,
Plugin::Flake8Bugbear,
Plugin::Flake8Builtins,
// Plugin::Flake8Commas,
Plugin::Flake8Comprehensions,
Plugin::Flake8Datetimez,
Plugin::Flake8Debugger,
Plugin::Flake8Docstrings,
// Plugin::Flake8Eradicate,
Plugin::Flake8ErrMsg,
Plugin::Flake8Executable,
Plugin::Flake8ImplicitStrConcat,
// Plugin::Flake8ImportConventions,
Plugin::Flake8NoPep420,
Plugin::Flake8Pie,
Plugin::Flake8Print,
Plugin::Flake8PytestStyle,
Plugin::Flake8Quotes,
Plugin::Flake8Return,
Plugin::Flake8Simplify,
// Plugin::Flake8TidyImports,
// Plugin::Flake8TypeChecking,
Plugin::Flake8UnusedArguments,
// Plugin::Flake8UsePathlib,
Plugin::McCabe,
Plugin::PEP8Naming,
Plugin::PandasVet,
Plugin::Tryceratops,
]
.into_iter()
.filter(|plugin| {
for selector in selectors {
if selector
.rules(&PreviewOptions::default())
.any(|rule| Linter::from(plugin).rules().any(|r| r == rule))
{
return true;
}
}
false
})
.collect()
}
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use super::{infer_plugins_from_options, Plugin};
#[test]
fn it_infers_plugins() {
let actual = infer_plugins_from_options(&HashMap::from([(
"inline-quotes".to_string(),
Some("single".to_string()),
)]));
let expected = vec![Plugin::Flake8Quotes];
assert_eq!(actual, expected);
let actual = infer_plugins_from_options(&HashMap::from([(
"staticmethod-decorators".to_string(),
Some("[]".to_string()),
)]));
let expected = vec![Plugin::PEP8Naming];
assert_eq!(actual, expected);
}
}

View File

@@ -0,0 +1,26 @@
use std::path::Path;
use anyhow::Result;
use serde::{Deserialize, Serialize};
use super::black::Black;
use super::isort::Isort;
use super::pep621::Project;
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
pub(crate) struct Tools {
pub(crate) black: Option<Black>,
pub(crate) isort: Option<Isort>,
}
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
pub(crate) struct Pyproject {
pub(crate) tool: Option<Tools>,
pub(crate) project: Option<Project>,
}
pub(crate) fn parse<P: AsRef<Path>>(path: P) -> Result<Pyproject> {
let contents = std::fs::read_to_string(path)?;
let pyproject = toml::from_str::<Pyproject>(&contents)?;
Ok(pyproject)
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,69 +0,0 @@
use crate::ExitStatus;
use anyhow::Result;
use ruff_linter::logging::LogLevel;
use ruff_server::Server;
use tracing::{level_filters::LevelFilter, metadata::Level, subscriber::Interest, Metadata};
use tracing_subscriber::{
layer::{Context, Filter, SubscriberExt},
Layer, Registry,
};
use tracing_tree::time::Uptime;
pub(crate) fn run_server(log_level: LogLevel) -> Result<ExitStatus> {
let trace_level = if log_level == LogLevel::Verbose {
Level::TRACE
} else {
Level::DEBUG
};
let subscriber = Registry::default().with(
tracing_tree::HierarchicalLayer::default()
.with_indent_lines(true)
.with_indent_amount(2)
.with_bracketed_fields(true)
.with_targets(true)
.with_writer(|| Box::new(std::io::stderr()))
.with_timer(Uptime::default())
.with_filter(LoggingFilter { trace_level }),
);
tracing::subscriber::set_global_default(subscriber)?;
let server = Server::new()?;
server.run().map(|()| ExitStatus::Success)
}
struct LoggingFilter {
trace_level: Level,
}
impl LoggingFilter {
fn is_enabled(&self, meta: &Metadata<'_>) -> bool {
let filter = if meta.target().starts_with("ruff") {
self.trace_level
} else {
Level::INFO
};
meta.level() <= &filter
}
}
impl<S> Filter<S> for LoggingFilter {
fn enabled(&self, meta: &Metadata<'_>, _cx: &Context<'_, S>) -> bool {
self.is_enabled(meta)
}
fn callsite_enabled(&self, meta: &'static Metadata<'static>) -> Interest {
if self.is_enabled(meta) {
Interest::always()
} else {
Interest::never()
}
}
fn max_level_hint(&self) -> Option<LevelFilter> {
Some(LevelFilter::from_level(self.trace_level))
}
}

View File

@@ -1,5 +0,0 @@
---
source: crates/ruff/src/version.rs
expression: version
---
0.0.0

View File

@@ -1,150 +0,0 @@
//! A test suite that ensures deprecated command line options have appropriate warnings / behaviors
use ruff_linter::settings::types::SerializationFormat;
use std::process::Command;
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
const BIN_NAME: &str = "ruff";
const STDIN: &str = "l = 1";
fn ruff_check(show_source: Option<bool>, output_format: Option<String>) -> Command {
let mut cmd = Command::new(get_cargo_bin(BIN_NAME));
let output_format = output_format.unwrap_or(format!("{}", SerializationFormat::default(false)));
cmd.arg("check")
.arg("--output-format")
.arg(output_format)
.arg("--no-cache");
match show_source {
Some(true) => {
cmd.arg("--show-source");
}
Some(false) => {
cmd.arg("--no-show-source");
}
None => {}
}
cmd.arg("-");
cmd
}
#[test]
fn ensure_show_source_is_deprecated() {
assert_cmd_snapshot!(ruff_check(Some(true), None).pass_stdin(STDIN), @r###"
success: false
exit_code: 1
----- stdout -----
-:1:1: E741 Ambiguous variable name: `l`
Found 1 error.
----- stderr -----
warning: The `--show-source` argument is deprecated and has been ignored in favor of `--output-format=concise`.
"###);
}
#[test]
fn ensure_no_show_source_is_deprecated() {
assert_cmd_snapshot!(ruff_check(Some(false), None).pass_stdin(STDIN), @r###"
success: false
exit_code: 1
----- stdout -----
-:1:1: E741 Ambiguous variable name: `l`
Found 1 error.
----- stderr -----
warning: The `--no-show-source` argument is deprecated and has been ignored in favor of `--output-format=concise`.
"###);
}
#[test]
fn ensure_output_format_is_deprecated() {
assert_cmd_snapshot!(ruff_check(None, Some("text".into())).pass_stdin(STDIN), @r###"
success: false
exit_code: 1
----- stdout -----
-:1:1: E741 Ambiguous variable name: `l`
Found 1 error.
----- stderr -----
warning: `--output-format=text` is deprecated. Use `--output-format=full` or `--output-format=concise` instead. `text` will be treated as `concise`.
"###);
}
#[test]
fn ensure_output_format_overrides_show_source() {
assert_cmd_snapshot!(ruff_check(Some(true), Some("concise".into())).pass_stdin(STDIN), @r###"
success: false
exit_code: 1
----- stdout -----
-:1:1: E741 Ambiguous variable name: `l`
Found 1 error.
----- stderr -----
warning: The `--show-source` argument is deprecated and has been ignored in favor of `--output-format=concise`.
"###);
}
#[test]
fn ensure_full_output_format_overrides_no_show_source() {
assert_cmd_snapshot!(ruff_check(Some(false), Some("full".into())).pass_stdin(STDIN), @r###"
success: false
exit_code: 1
----- stdout -----
-:1:1: E741 Ambiguous variable name: `l`
|
1 | l = 1
| ^ E741
|
Found 1 error.
----- stderr -----
warning: The `--no-show-source` argument is deprecated and has been ignored in favor of `--output-format=full`.
"###);
}
#[test]
fn ensure_output_format_uses_concise_over_no_show_source() {
assert_cmd_snapshot!(ruff_check(Some(false), Some("concise".into())).pass_stdin(STDIN), @r###"
success: false
exit_code: 1
----- stdout -----
-:1:1: E741 Ambiguous variable name: `l`
Found 1 error.
----- stderr -----
warning: The `--no-show-source` argument is deprecated and has been ignored in favor of `--output-format=concise`.
"###);
}
#[test]
fn ensure_deprecated_output_format_overrides_show_source() {
assert_cmd_snapshot!(ruff_check(Some(true), Some("text".into())).pass_stdin(STDIN), @r###"
success: false
exit_code: 1
----- stdout -----
-:1:1: E741 Ambiguous variable name: `l`
Found 1 error.
----- stderr -----
warning: The `--show-source` argument is deprecated and has been ignored in favor of `--output-format=text`.
warning: `--output-format=text` is deprecated. Use `--output-format=full` or `--output-format=concise` instead. `text` will be treated as `concise`.
"###);
}
#[test]
fn ensure_deprecated_output_format_overrides_no_show_source() {
assert_cmd_snapshot!(ruff_check(Some(false), Some("text".into())).pass_stdin(STDIN), @r###"
success: false
exit_code: 1
----- stdout -----
-:1:1: E741 Ambiguous variable name: `l`
Found 1 error.
----- stderr -----
warning: The `--no-show-source` argument is deprecated and has been ignored in favor of `--output-format=text`.
warning: `--output-format=text` is deprecated. Use `--output-format=full` or `--output-format=concise` instead. `text` will be treated as `concise`.
"###);
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,33 +0,0 @@
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
use std::path::Path;
use std::process::Command;
const BIN_NAME: &str = "ruff";
#[test]
fn display_default_settings() {
// Navigate from the crate directory to the workspace root.
let base_path = Path::new(env!("CARGO_MANIFEST_DIR"))
.parent()
.unwrap()
.parent()
.unwrap();
let base_path = base_path.to_string_lossy();
// Escape the backslashes for the regex.
let base_path = regex::escape(&base_path);
#[cfg(not(target_os = "windows"))]
let test_filters = &[(base_path.as_ref(), "[BASEPATH]")];
#[cfg(target_os = "windows")]
let test_filters = &[
(base_path.as_ref(), "[BASEPATH]"),
(r#"\\+(\w\w|\s|\.|")"#, "/$1"),
];
insta::with_settings!({ filters => test_filters.to_vec() }, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["check", "--show-settings", "unformatted.py"]).current_dir(Path::new("./resources/test/fixtures")));
});
}

View File

@@ -1,383 +0,0 @@
---
source: crates/ruff/tests/show_settings.rs
info:
program: ruff
args:
- check
- "--show-settings"
- unformatted.py
---
success: true
exit_code: 0
----- stdout -----
Resolved settings for: "[BASEPATH]/crates/ruff/resources/test/fixtures/unformatted.py"
Settings path: "[BASEPATH]/pyproject.toml"
# General Settings
cache_dir = "[BASEPATH]/.ruff_cache"
fix = false
fix_only = false
output_format = concise
show_fixes = false
unsafe_fixes = hint
# File Resolver Settings
file_resolver.exclude = [
".bzr",
".direnv",
".eggs",
".git",
".git-rewrite",
".hg",
".ipynb_checkpoints",
".mypy_cache",
".nox",
".pants.d",
".pyenv",
".pytest_cache",
".pytype",
".ruff_cache",
".svn",
".tox",
".venv",
".vscode",
"__pypackages__",
"_build",
"buck-out",
"dist",
"node_modules",
"site-packages",
"venv",
]
file_resolver.extend_exclude = [
"crates/ruff_linter/resources/",
"crates/ruff_python_formatter/resources/",
]
file_resolver.force_exclude = false
file_resolver.include = [
"*.py",
"*.pyi",
"**/pyproject.toml",
]
file_resolver.extend_include = []
file_resolver.respect_gitignore = true
file_resolver.project_root = "[BASEPATH]"
# Linter Settings
linter.exclude = []
linter.project_root = "[BASEPATH]"
linter.rules.enabled = [
MultipleImportsOnOneLine,
ModuleImportNotAtTopOfFile,
MultipleStatementsOnOneLineColon,
MultipleStatementsOnOneLineSemicolon,
UselessSemicolon,
NoneComparison,
TrueFalseComparison,
NotInTest,
NotIsTest,
TypeComparison,
BareExcept,
LambdaAssignment,
AmbiguousVariableName,
AmbiguousClassName,
AmbiguousFunctionName,
IOError,
SyntaxError,
UnusedImport,
ImportShadowedByLoopVar,
UndefinedLocalWithImportStar,
LateFutureImport,
UndefinedLocalWithImportStarUsage,
UndefinedLocalWithNestedImportStarUsage,
FutureFeatureNotDefined,
PercentFormatInvalidFormat,
PercentFormatExpectedMapping,
PercentFormatExpectedSequence,
PercentFormatExtraNamedArguments,
PercentFormatMissingArgument,
PercentFormatMixedPositionalAndNamed,
PercentFormatPositionalCountMismatch,
PercentFormatStarRequiresSequence,
PercentFormatUnsupportedFormatCharacter,
StringDotFormatInvalidFormat,
StringDotFormatExtraNamedArguments,
StringDotFormatExtraPositionalArguments,
StringDotFormatMissingArguments,
StringDotFormatMixingAutomatic,
FStringMissingPlaceholders,
MultiValueRepeatedKeyLiteral,
MultiValueRepeatedKeyVariable,
ExpressionsInStarAssignment,
MultipleStarredExpressions,
AssertTuple,
IsLiteral,
InvalidPrintSyntax,
IfTuple,
BreakOutsideLoop,
ContinueOutsideLoop,
YieldOutsideFunction,
ReturnOutsideFunction,
DefaultExceptNotLast,
ForwardAnnotationSyntaxError,
RedefinedWhileUnused,
UndefinedName,
UndefinedExport,
UndefinedLocal,
UnusedVariable,
UnusedAnnotation,
RaiseNotImplemented,
]
linter.rules.should_fix = [
MultipleImportsOnOneLine,
ModuleImportNotAtTopOfFile,
MultipleStatementsOnOneLineColon,
MultipleStatementsOnOneLineSemicolon,
UselessSemicolon,
NoneComparison,
TrueFalseComparison,
NotInTest,
NotIsTest,
TypeComparison,
BareExcept,
LambdaAssignment,
AmbiguousVariableName,
AmbiguousClassName,
AmbiguousFunctionName,
IOError,
SyntaxError,
UnusedImport,
ImportShadowedByLoopVar,
UndefinedLocalWithImportStar,
LateFutureImport,
UndefinedLocalWithImportStarUsage,
UndefinedLocalWithNestedImportStarUsage,
FutureFeatureNotDefined,
PercentFormatInvalidFormat,
PercentFormatExpectedMapping,
PercentFormatExpectedSequence,
PercentFormatExtraNamedArguments,
PercentFormatMissingArgument,
PercentFormatMixedPositionalAndNamed,
PercentFormatPositionalCountMismatch,
PercentFormatStarRequiresSequence,
PercentFormatUnsupportedFormatCharacter,
StringDotFormatInvalidFormat,
StringDotFormatExtraNamedArguments,
StringDotFormatExtraPositionalArguments,
StringDotFormatMissingArguments,
StringDotFormatMixingAutomatic,
FStringMissingPlaceholders,
MultiValueRepeatedKeyLiteral,
MultiValueRepeatedKeyVariable,
ExpressionsInStarAssignment,
MultipleStarredExpressions,
AssertTuple,
IsLiteral,
InvalidPrintSyntax,
IfTuple,
BreakOutsideLoop,
ContinueOutsideLoop,
YieldOutsideFunction,
ReturnOutsideFunction,
DefaultExceptNotLast,
ForwardAnnotationSyntaxError,
RedefinedWhileUnused,
UndefinedName,
UndefinedExport,
UndefinedLocal,
UnusedVariable,
UnusedAnnotation,
RaiseNotImplemented,
]
linter.per_file_ignores = {}
linter.safety_table.forced_safe = []
linter.safety_table.forced_unsafe = []
linter.target_version = Py37
linter.preview = disabled
linter.explicit_preview_rules = false
linter.extension.mapping = {}
linter.allowed_confusables = []
linter.builtins = []
linter.dummy_variable_rgx = ^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$
linter.external = []
linter.ignore_init_module_imports = false
linter.logger_objects = []
linter.namespace_packages = []
linter.src = [
"[BASEPATH]",
]
linter.tab_size = 4
linter.line_length = 88
linter.task_tags = [
TODO,
FIXME,
XXX,
]
linter.typing_modules = []
# Linter Plugins
linter.flake8_annotations.mypy_init_return = false
linter.flake8_annotations.suppress_dummy_args = false
linter.flake8_annotations.suppress_none_returning = false
linter.flake8_annotations.allow_star_arg_any = false
linter.flake8_annotations.ignore_fully_untyped = false
linter.flake8_bandit.hardcoded_tmp_directory = [
/tmp,
/var/tmp,
/dev/shm,
]
linter.flake8_bandit.check_typed_exception = false
linter.flake8_bugbear.extend_immutable_calls = []
linter.flake8_builtins.builtins_ignorelist = []
linter.flake8_comprehensions.allow_dict_calls_with_keyword_arguments = false
linter.flake8_copyright.notice_rgx = (?i)Copyright\s+((?:\(C\)|©)\s+)?\d{4}(-\d{4})*
linter.flake8_copyright.author = none
linter.flake8_copyright.min_file_size = 0
linter.flake8_errmsg.max_string_length = 0
linter.flake8_gettext.functions_names = [
_,
gettext,
ngettext,
]
linter.flake8_implicit_str_concat.allow_multiline = true
linter.flake8_import_conventions.aliases = {
altair = alt,
holoviews = hv,
matplotlib = mpl,
matplotlib.pyplot = plt,
networkx = nx,
numpy = np,
pandas = pd,
panel = pn,
plotly.express = px,
polars = pl,
pyarrow = pa,
seaborn = sns,
tensorflow = tf,
tkinter = tk,
}
linter.flake8_import_conventions.banned_aliases = {}
linter.flake8_import_conventions.banned_from = []
linter.flake8_pytest_style.fixture_parentheses = true
linter.flake8_pytest_style.parametrize_names_type = tuple
linter.flake8_pytest_style.parametrize_values_type = list
linter.flake8_pytest_style.parametrize_values_row_type = tuple
linter.flake8_pytest_style.raises_require_match_for = [
BaseException,
Exception,
ValueError,
OSError,
IOError,
EnvironmentError,
socket.error,
]
linter.flake8_pytest_style.raises_extend_require_match_for = []
linter.flake8_pytest_style.mark_parentheses = true
linter.flake8_quotes.inline_quotes = double
linter.flake8_quotes.multiline_quotes = double
linter.flake8_quotes.docstring_quotes = double
linter.flake8_quotes.avoid_escape = true
linter.flake8_self.ignore_names = [
_make,
_asdict,
_replace,
_fields,
_field_defaults,
_name_,
_value_,
]
linter.flake8_tidy_imports.ban_relative_imports = "parents"
linter.flake8_tidy_imports.banned_api = {}
linter.flake8_tidy_imports.banned_module_level_imports = []
linter.flake8_type_checking.strict = false
linter.flake8_type_checking.exempt_modules = [
typing,
typing_extensions,
]
linter.flake8_type_checking.runtime_required_base_classes = []
linter.flake8_type_checking.runtime_required_decorators = []
linter.flake8_type_checking.quote_annotations = false
linter.flake8_unused_arguments.ignore_variadic_names = false
linter.isort.required_imports = []
linter.isort.combine_as_imports = false
linter.isort.force_single_line = false
linter.isort.force_sort_within_sections = false
linter.isort.detect_same_package = true
linter.isort.case_sensitive = false
linter.isort.force_wrap_aliases = false
linter.isort.force_to_top = []
linter.isort.known_modules = {}
linter.isort.order_by_type = true
linter.isort.relative_imports_order = furthest_to_closest
linter.isort.single_line_exclusions = []
linter.isort.split_on_trailing_comma = true
linter.isort.classes = []
linter.isort.constants = []
linter.isort.variables = []
linter.isort.no_lines_before = []
linter.isort.lines_after_imports = -1
linter.isort.lines_between_types = 0
linter.isort.forced_separate = []
linter.isort.section_order = [
known { type = future },
known { type = standard_library },
known { type = third_party },
known { type = first_party },
known { type = local_folder },
]
linter.isort.default_section = known { type = third_party }
linter.isort.no_sections = false
linter.isort.from_first = false
linter.isort.length_sort = false
linter.isort.length_sort_straight = false
linter.mccabe.max_complexity = 10
linter.pep8_naming.ignore_names = [
setUp,
tearDown,
setUpClass,
tearDownClass,
setUpModule,
tearDownModule,
asyncSetUp,
asyncTearDown,
setUpTestData,
failureException,
longMessage,
maxDiff,
]
linter.pep8_naming.classmethod_decorators = []
linter.pep8_naming.staticmethod_decorators = []
linter.pycodestyle.max_line_length = 88
linter.pycodestyle.max_doc_length = none
linter.pycodestyle.ignore_overlong_task_comments = false
linter.pyflakes.extend_generics = []
linter.pylint.allow_magic_value_types = [
str,
bytes,
]
linter.pylint.allow_dunder_method_names = []
linter.pylint.max_args = 5
linter.pylint.max_positional_args = 5
linter.pylint.max_returns = 6
linter.pylint.max_bool_expr = 5
linter.pylint.max_branches = 12
linter.pylint.max_statements = 50
linter.pylint.max_public_methods = 20
linter.pylint.max_locals = 15
linter.pyupgrade.keep_runtime_typing = false
# Formatter Settings
formatter.exclude = []
formatter.target_version = Py37
formatter.preview = disabled
formatter.line_width = 88
formatter.line_ending = auto
formatter.indent_style = space
formatter.indent_width = 4
formatter.quote_style = double
formatter.magic_trailing_comma = respect
formatter.docstring_code_format = disabled
formatter.docstring_code_line_width = dynamic
----- stderr -----

View File

@@ -1,105 +0,0 @@
//! Tests for the --version command
use std::fs;
use std::process::Command;
use anyhow::Result;
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
use tempfile::TempDir;
const BIN_NAME: &str = "ruff";
const VERSION_FILTER: [(&str, &str); 1] = [(
r"\d+\.\d+\.\d+(\+\d+)?( \(\w{9} \d\d\d\d-\d\d-\d\d\))?",
"[VERSION]",
)];
#[test]
fn version_basics() {
insta::with_settings!({filters => VERSION_FILTER.to_vec()}, {
assert_cmd_snapshot!(
Command::new(get_cargo_bin(BIN_NAME)).arg("version"), @r###"
success: true
exit_code: 0
----- stdout -----
ruff [VERSION]
----- stderr -----
"###
);
});
}
/// `--config` is a global option,
/// so it's allowed to pass --config to subcommands such as `version`
/// -- the flag is simply ignored
#[test]
fn config_option_allowed_but_ignored() -> Result<()> {
let tempdir = TempDir::new()?;
let ruff_dot_toml = tempdir.path().join("ruff.toml");
fs::File::create(&ruff_dot_toml)?;
insta::with_settings!({filters => VERSION_FILTER.to_vec()}, {
assert_cmd_snapshot!(
Command::new(get_cargo_bin(BIN_NAME))
.arg("version")
.arg("--config")
.arg(&ruff_dot_toml)
.args(["--config", "lint.isort.extra-standard-library = ['foo', 'bar']"]), @r###"
success: true
exit_code: 0
----- stdout -----
ruff [VERSION]
----- stderr -----
"###
);
});
Ok(())
}
#[test]
fn config_option_ignored_but_validated() {
insta::with_settings!({filters => VERSION_FILTER.to_vec()}, {
assert_cmd_snapshot!(
Command::new(get_cargo_bin(BIN_NAME))
.arg("version")
.args(["--config", "foo = bar"]), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: invalid value 'foo = bar' for '--config <CONFIG_OPTION>'
tip: A `--config` flag must either be a path to a `.toml` configuration file
or a TOML `<KEY> = <VALUE>` pair overriding a specific configuration
option
The supplied argument is not valid TOML:
TOML parse error at line 1, column 7
|
1 | foo = bar
| ^
invalid string
expected `"`, `'`
For more information, try '--help'.
"###
);
});
}
/// `--isolated` is also a global option,
#[test]
fn isolated_option_allowed() {
insta::with_settings!({filters => VERSION_FILTER.to_vec()}, {
assert_cmd_snapshot!(
Command::new(get_cargo_bin(BIN_NAME)).arg("version").arg("--isolated"), @r###"
success: true
exit_code: 0
----- stdout -----
ruff [VERSION]
----- stderr -----
"###
);
});
}

View File

@@ -13,7 +13,6 @@ license = { workspace = true }
[lib]
bench = false
doctest = false
[[bench]]
name = "linter"
@@ -32,17 +31,17 @@ name = "formatter"
harness = false
[dependencies]
once_cell = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
url = { workspace = true }
ureq = { workspace = true }
criterion = { workspace = true, default-features = false }
codspeed-criterion-compat = { workspace = true, default-features = false, optional = true}
once_cell.workspace = true
serde.workspace = true
serde_json.workspace = true
url = "2.5.0"
ureq = "2.9.1"
criterion = { version = "0.5.1", default-features = false }
codspeed-criterion-compat = { version="2.3.3", default-features = false, optional = true}
[dev-dependencies]
ruff_linter = { path = "../ruff_linter" }
ruff_python_ast = { path = "../ruff_python_ast" }
ruff_linter.path = "../ruff_linter"
ruff_python_ast.path = "../ruff_python_ast"
ruff_python_formatter = { path = "../ruff_python_formatter" }
ruff_python_index = { path = "../ruff_python_index" }
ruff_python_parser = { path = "../ruff_python_parser" }
@@ -54,7 +53,7 @@ workspace = true
codspeed = ["codspeed-criterion-compat"]
[target.'cfg(target_os = "windows")'.dev-dependencies]
mimalloc = { workspace = true }
mimalloc = "0.1.39"
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64")))'.dev-dependencies]
tikv-jemallocator = { workspace = true }
tikv-jemallocator = "0.5.0"

View File

@@ -1,16 +1,5 @@
# Ruff Benchmarks
The `ruff_benchmark` crate benchmarks the linter and the formatter on individual files:
```shell
# Run once on the "baseline".
cargo bench -p ruff_benchmark -- --save-baseline=main
# Compare against the "baseline".
cargo bench -p ruff_benchmark -- --baseline=main
# Run the lexer benchmarks.
cargo bench -p ruff_benchmark lexer -- --baseline=main
```
The `ruff_benchmark` crate benchmarks the linter and the formatter on individual files.
See [CONTRIBUTING.md](../../CONTRIBUTING.md) on how to use these benchmarks.

View File

@@ -7,7 +7,7 @@ use ruff_benchmark::{TestCase, TestFile, TestFileDownloadError};
use ruff_python_formatter::{format_module_ast, PreviewMode, PyFormatOptions};
use ruff_python_index::CommentRangesBuilder;
use ruff_python_parser::lexer::lex;
use ruff_python_parser::{allocate_tokens_vec, parse_tokens, Mode};
use ruff_python_parser::{parse_tokens, Mode};
#[cfg(target_os = "windows")]
#[global_allocator]
@@ -52,7 +52,7 @@ fn benchmark_formatter(criterion: &mut Criterion) {
BenchmarkId::from_parameter(case.name()),
&case,
|b, case| {
let mut tokens = allocate_tokens_vec(case.code());
let mut tokens = Vec::new();
let mut comment_ranges = CommentRangesBuilder::default();
for result in lex(case.code(), Mode::Module) {
@@ -65,7 +65,7 @@ fn benchmark_formatter(criterion: &mut Criterion) {
let comment_ranges = comment_ranges.finish();
// Parse the AST.
let module = parse_tokens(tokens, case.code(), Mode::Module)
let module = parse_tokens(tokens, case.code(), Mode::Module, "<filename>")
.expect("Input to be a valid python program");
b.iter(|| {

View File

@@ -2,7 +2,7 @@ use ruff_benchmark::criterion::{
criterion_group, criterion_main, BenchmarkGroup, BenchmarkId, Criterion, Throughput,
};
use ruff_benchmark::{TestCase, TestFile, TestFileDownloadError};
use ruff_linter::linter::{lint_only, ParseSource};
use ruff_linter::linter::lint_only;
use ruff_linter::rule_selector::PreviewOptions;
use ruff_linter::settings::rule_table::RuleTable;
use ruff_linter::settings::types::PreviewMode;
@@ -10,7 +10,6 @@ use ruff_linter::settings::{flags, LinterSettings};
use ruff_linter::source_kind::SourceKind;
use ruff_linter::{registry::Rule, RuleSelector};
use ruff_python_ast::PySourceType;
use ruff_python_parser::{lexer, parse_program_tokens, Mode};
#[cfg(target_os = "windows")]
#[global_allocator]
@@ -54,12 +53,7 @@ fn benchmark_linter(mut group: BenchmarkGroup, settings: &LinterSettings) {
BenchmarkId::from_parameter(case.name()),
&case,
|b, case| {
// Tokenize the source.
let tokens: Vec<_> = lexer::lex(case.code(), Mode::Module).collect();
// Parse the source.
let ast = parse_program_tokens(tokens.clone(), case.code(), false).unwrap();
let kind = SourceKind::Python(case.code().to_string());
b.iter(|| {
let path = case.path();
let result = lint_only(
@@ -67,12 +61,8 @@ fn benchmark_linter(mut group: BenchmarkGroup, settings: &LinterSettings) {
None,
settings,
flags::Noqa::Enabled,
&SourceKind::Python(case.code().to_string()),
&kind,
PySourceType::from(path.as_path()),
ParseSource::Precomputed {
tokens: &tokens,
ast: &ast,
},
);
// Assert that file contains no parse errors

View File

@@ -60,7 +60,7 @@ fn benchmark_parser(criterion: &mut Criterion<WallTime>) {
&case,
|b, case| {
b.iter(|| {
let parsed = parse_suite(case.code()).unwrap();
let parsed = parse_suite(case.code(), case.name()).unwrap();
let mut visitor = CountVisitor { count: 0 };
visitor.visit_body(&parsed);

View File

@@ -16,7 +16,7 @@ glob = { workspace = true }
globset = { workspace = true }
regex = { workspace = true }
filetime = { workspace = true }
seahash = { workspace = true }
seahash = "4.1.0"
[dev-dependencies]
ruff_macros = { path = "../ruff_macros" }

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff"
version = "0.3.2"
name = "ruff_cli"
version = "0.1.7"
publish = false
authors = { workspace = true }
edition = { workspace = true }
@@ -10,70 +10,72 @@ documentation = { workspace = true }
repository = { workspace = true }
license = { workspace = true }
readme = "../../README.md"
default-run = "ruff"
[[bin]]
name = "ruff"
[dependencies]
ruff_linter = { path = "../ruff_linter", features = ["clap"] }
ruff_cache = { path = "../ruff_cache" }
ruff_diagnostics = { path = "../ruff_diagnostics" }
ruff_linter = { path = "../ruff_linter", features = ["clap"] }
ruff_macros = { path = "../ruff_macros" }
ruff_formatter = { path = "../ruff_formatter" }
ruff_notebook = { path = "../ruff_notebook" }
ruff_macros = { path = "../ruff_macros" }
ruff_python_ast = { path = "../ruff_python_ast" }
ruff_python_formatter = { path = "../ruff_python_formatter" }
ruff_server = { path = "../ruff_server" }
ruff_source_file = { path = "../ruff_source_file" }
ruff_text_size = { path = "../ruff_text_size" }
ruff_python_trivia = { path = "../ruff_python_trivia" }
ruff_workspace = { path = "../ruff_workspace" }
ruff_text_size = { path = "../ruff_text_size" }
annotate-snippets = { version = "0.9.2", features = ["color"] }
anyhow = { workspace = true }
argfile = { workspace = true }
bincode = { workspace = true }
argfile = { version = "0.1.6" }
bincode = { version = "1.3.3" }
bitflags = { workspace = true }
cachedir = { workspace = true }
cachedir = { version = "0.3.0" }
chrono = { workspace = true }
clap = { workspace = true, features = ["derive", "env", "wrap_help"] }
clap_complete_command = { workspace = true }
clearscreen = { workspace = true }
clap = { workspace = true, features = ["derive", "env"] }
clap_complete_command = { version = "0.5.1" }
clearscreen = { version = "2.0.0" }
colored = { workspace = true }
filetime = { workspace = true }
glob = { workspace = true }
ignore = { workspace = true }
is-macro = { workspace = true }
itertools = { workspace = true }
log = { workspace = true }
notify = { workspace = true }
notify = { version = "6.1.1" }
path-absolutize = { workspace = true, features = ["once_cell_cache"] }
rayon = { workspace = true }
rayon = { version = "1.8.0" }
regex = { workspace = true }
ruff_python_stdlib = { path = "../ruff_python_stdlib" }
rustc-hash = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
shellexpand = { workspace = true }
similar = { workspace = true }
strum = { workspace = true, features = [] }
tempfile = { workspace = true }
thiserror = { workspace = true }
toml = { workspace = true }
tracing = { workspace = true, features = ["log"] }
tracing-subscriber = { workspace = true, features = ["registry"]}
tracing-tree = { workspace = true }
walkdir = { workspace = true }
wild = { workspace = true }
walkdir = { version = "2.3.2" }
wild = { version = "2" }
[dev-dependencies]
# Enable test rules during development
ruff_linter = { path = "../ruff_linter", features = ["clap", "test-rules"] }
assert_cmd = { workspace = true }
assert_cmd = { version = "2.0.8" }
# Avoid writing colored snapshots when running tests from the terminal
colored = { workspace = true, features = ["no-color"]}
insta = { workspace = true, features = ["filters", "json"] }
insta-cmd = { workspace = true }
tempfile = { workspace = true }
insta-cmd = { version = "0.4.0" }
tempfile = "3.8.1"
test-case = { workspace = true }
ureq = { version = "2.9.1", features = [] }
[target.'cfg(target_os = "windows")'.dependencies]
mimalloc = { workspace = true }
mimalloc = "0.1.39"
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64")))'.dependencies]
tikv-jemallocator = { workspace = true }
tikv-jemallocator = "0.5.0"
[lints]
workspace = true

762
crates/ruff_cli/src/args.rs Normal file
View File

@@ -0,0 +1,762 @@
use std::path::PathBuf;
use clap::{command, Parser};
use regex::Regex;
use rustc_hash::FxHashMap;
use ruff_linter::line_width::LineLength;
use ruff_linter::logging::LogLevel;
use ruff_linter::registry::Rule;
use ruff_linter::settings::types::{
ExtensionPair, FilePattern, PatternPrefixPair, PerFileIgnore, PreviewMode, PythonVersion,
SerializationFormat, UnsafeFixes,
};
use ruff_linter::{RuleParser, RuleSelector, RuleSelectorParser};
use ruff_workspace::configuration::{Configuration, RuleSelection};
use ruff_workspace::options::PycodestyleOptions;
use ruff_workspace::resolver::ConfigurationTransformer;
#[derive(Debug, Parser)]
#[command(
author,
name = "ruff",
about = "Ruff: An extremely fast Python linter.",
after_help = "For help with a specific command, see: `ruff help <command>`."
)]
#[command(version)]
pub struct Args {
#[command(subcommand)]
pub command: Command,
#[clap(flatten)]
pub log_level_args: LogLevelArgs,
}
#[allow(clippy::large_enum_variant)]
#[derive(Debug, clap::Subcommand)]
pub enum Command {
/// Run Ruff on the given files or directories (default).
Check(CheckCommand),
/// Explain a rule (or all rules).
#[clap(alias = "--explain")]
#[command(group = clap::ArgGroup::new("selector").multiple(false).required(true))]
Rule {
/// Rule to explain
#[arg(value_parser=RuleParser, group = "selector", hide_possible_values = true)]
rule: Option<Rule>,
/// Explain all rules
#[arg(long, conflicts_with = "rule", group = "selector")]
all: bool,
/// Output format
#[arg(long, value_enum, default_value = "text")]
output_format: HelpFormat,
/// Output format (Deprecated: Use `--output-format` instead).
#[arg(long, value_enum, conflicts_with = "output_format", hide = true)]
format: Option<HelpFormat>,
},
/// List or describe the available configuration options.
Config { option: Option<String> },
/// List all supported upstream linters.
Linter {
/// Output format
#[arg(long, value_enum, default_value = "text")]
output_format: HelpFormat,
/// Output format (Deprecated: Use `--output-format` instead).
#[arg(long, value_enum, conflicts_with = "output_format", hide = true)]
format: Option<HelpFormat>,
},
/// Clear any caches in the current directory and any subdirectories.
#[clap(alias = "--clean")]
Clean,
/// Generate shell completion.
#[clap(alias = "--generate-shell-completion", hide = true)]
GenerateShellCompletion { shell: clap_complete_command::Shell },
/// Run the Ruff formatter on the given files or directories.
Format(FormatCommand),
/// Display Ruff's version
Version {
#[arg(long, value_enum, default_value = "text")]
output_format: HelpFormat,
},
}
// The `Parser` derive is for ruff_dev, for ruff_cli `Args` would be sufficient
#[derive(Clone, Debug, clap::Parser)]
#[allow(clippy::struct_excessive_bools)]
pub struct CheckCommand {
/// List of files or directories to check.
#[clap(help = "List of files or directories to check [default: .]")]
pub files: Vec<PathBuf>,
/// Apply fixes to resolve lint violations.
/// Use `--no-fix` to disable or `--unsafe-fixes` to include unsafe fixes.
#[arg(long, overrides_with("no_fix"))]
fix: bool,
#[clap(long, overrides_with("fix"), hide = true)]
no_fix: bool,
/// Include fixes that may not retain the original intent of the code.
/// Use `--no-unsafe-fixes` to disable.
#[arg(long, overrides_with("no_unsafe_fixes"))]
unsafe_fixes: bool,
#[arg(long, overrides_with("unsafe_fixes"), hide = true)]
no_unsafe_fixes: bool,
/// Show violations with source code.
/// Use `--no-show-source` to disable.
#[arg(long, overrides_with("no_show_source"))]
show_source: bool,
#[clap(long, overrides_with("show_source"), hide = true)]
no_show_source: bool,
/// Show an enumeration of all fixed lint violations.
/// Use `--no-show-fixes` to disable.
#[arg(long, overrides_with("no_show_fixes"))]
show_fixes: bool,
#[clap(long, overrides_with("show_fixes"), hide = true)]
no_show_fixes: bool,
/// Avoid writing any fixed files back; instead, output a diff for each changed file to stdout. Implies `--fix-only`.
#[arg(long, conflicts_with = "show_fixes")]
pub diff: bool,
/// Run in watch mode by re-running whenever files change.
#[arg(short, long)]
pub watch: bool,
/// Apply fixes to resolve lint violations, but don't report on leftover violations. Implies `--fix`.
/// Use `--no-fix-only` to disable or `--unsafe-fixes` to include unsafe fixes.
#[arg(long, overrides_with("no_fix_only"))]
fix_only: bool,
#[clap(long, overrides_with("fix_only"), hide = true)]
no_fix_only: bool,
/// Ignore any `# noqa` comments.
#[arg(long)]
ignore_noqa: bool,
/// Output serialization format for violations.
#[arg(long, value_enum, env = "RUFF_OUTPUT_FORMAT")]
pub output_format: Option<SerializationFormat>,
/// Specify file to write the linter output to (default: stdout).
#[arg(short, long)]
pub output_file: Option<PathBuf>,
/// The minimum Python version that should be supported.
#[arg(long, value_enum)]
pub target_version: Option<PythonVersion>,
/// Enable preview mode; checks will include unstable rules and fixes.
/// Use `--no-preview` to disable.
#[arg(long, overrides_with("no_preview"))]
preview: bool,
#[clap(long, overrides_with("preview"), hide = true)]
no_preview: bool,
/// Path to the `pyproject.toml` or `ruff.toml` file to use for
/// configuration.
#[arg(long, conflicts_with = "isolated")]
pub config: Option<PathBuf>,
/// Comma-separated list of rule codes to enable (or ALL, to enable all rules).
#[arg(
long,
value_delimiter = ',',
value_name = "RULE_CODE",
value_parser = RuleSelectorParser,
help_heading = "Rule selection",
hide_possible_values = true
)]
pub select: Option<Vec<RuleSelector>>,
/// Comma-separated list of rule codes to disable.
#[arg(
long,
value_delimiter = ',',
value_name = "RULE_CODE",
value_parser = RuleSelectorParser,
help_heading = "Rule selection",
hide_possible_values = true
)]
pub ignore: Option<Vec<RuleSelector>>,
/// Like --select, but adds additional rule codes on top of those already specified.
#[arg(
long,
value_delimiter = ',',
value_name = "RULE_CODE",
value_parser = RuleSelectorParser,
help_heading = "Rule selection",
hide_possible_values = true
)]
pub extend_select: Option<Vec<RuleSelector>>,
/// Like --ignore. (Deprecated: You can just use --ignore instead.)
#[arg(
long,
value_delimiter = ',',
value_name = "RULE_CODE",
value_parser = RuleSelectorParser,
help_heading = "Rule selection",
hide = true
)]
pub extend_ignore: Option<Vec<RuleSelector>>,
/// List of mappings from file pattern to code to exclude.
#[arg(long, value_delimiter = ',', help_heading = "Rule selection")]
pub per_file_ignores: Option<Vec<PatternPrefixPair>>,
/// Like `--per-file-ignores`, but adds additional ignores on top of those already specified.
#[arg(long, value_delimiter = ',', help_heading = "Rule selection")]
pub extend_per_file_ignores: Option<Vec<PatternPrefixPair>>,
/// List of paths, used to omit files and/or directories from analysis.
#[arg(
long,
value_delimiter = ',',
value_name = "FILE_PATTERN",
help_heading = "File selection"
)]
pub exclude: Option<Vec<FilePattern>>,
/// Like --exclude, but adds additional files and directories on top of those already excluded.
#[arg(
long,
value_delimiter = ',',
value_name = "FILE_PATTERN",
help_heading = "File selection"
)]
pub extend_exclude: Option<Vec<FilePattern>>,
/// List of rule codes to treat as eligible for fix. Only applicable when fix itself is enabled (e.g., via `--fix`).
#[arg(
long,
value_delimiter = ',',
value_name = "RULE_CODE",
value_parser = RuleSelectorParser,
help_heading = "Rule selection",
hide_possible_values = true
)]
pub fixable: Option<Vec<RuleSelector>>,
/// List of rule codes to treat as ineligible for fix. Only applicable when fix itself is enabled (e.g., via `--fix`).
#[arg(
long,
value_delimiter = ',',
value_name = "RULE_CODE",
value_parser = RuleSelectorParser,
help_heading = "Rule selection",
hide_possible_values = true
)]
pub unfixable: Option<Vec<RuleSelector>>,
/// Like --fixable, but adds additional rule codes on top of those already specified.
#[arg(
long,
value_delimiter = ',',
value_name = "RULE_CODE",
value_parser = RuleSelectorParser,
help_heading = "Rule selection",
hide_possible_values = true
)]
pub extend_fixable: Option<Vec<RuleSelector>>,
/// Like --unfixable. (Deprecated: You can just use --unfixable instead.)
#[arg(
long,
value_delimiter = ',',
value_name = "RULE_CODE",
value_parser = RuleSelectorParser,
help_heading = "Rule selection",
hide = true
)]
pub extend_unfixable: Option<Vec<RuleSelector>>,
/// Respect file exclusions via `.gitignore` and other standard ignore files.
/// Use `--no-respect-gitignore` to disable.
#[arg(
long,
overrides_with("no_respect_gitignore"),
help_heading = "File selection"
)]
respect_gitignore: bool,
#[clap(long, overrides_with("respect_gitignore"), hide = true)]
no_respect_gitignore: bool,
/// Enforce exclusions, even for paths passed to Ruff directly on the command-line.
/// Use `--no-force-exclude` to disable.
#[arg(
long,
overrides_with("no_force_exclude"),
help_heading = "File selection"
)]
force_exclude: bool,
#[clap(long, overrides_with("force_exclude"), hide = true)]
no_force_exclude: bool,
/// Set the line-length for length-associated rules and automatic formatting.
#[arg(long, help_heading = "Rule configuration", hide = true)]
pub line_length: Option<LineLength>,
/// Regular expression matching the name of dummy variables.
#[arg(long, help_heading = "Rule configuration", hide = true)]
pub dummy_variable_rgx: Option<Regex>,
/// Disable cache reads.
#[arg(short, long, env = "RUFF_NO_CACHE", help_heading = "Miscellaneous")]
pub no_cache: bool,
/// Ignore all configuration files.
#[arg(long, conflicts_with = "config", help_heading = "Miscellaneous")]
pub isolated: bool,
/// Path to the cache directory.
#[arg(long, env = "RUFF_CACHE_DIR", help_heading = "Miscellaneous")]
pub cache_dir: Option<PathBuf>,
/// The name of the file when passing it through stdin.
#[arg(long, help_heading = "Miscellaneous")]
pub stdin_filename: Option<PathBuf>,
/// Exit with status code "0", even upon detecting lint violations.
#[arg(
short,
long,
help_heading = "Miscellaneous",
conflicts_with = "exit_non_zero_on_fix"
)]
pub exit_zero: bool,
/// Exit with a non-zero status code if any files were modified via fix, even if no lint violations remain.
#[arg(long, help_heading = "Miscellaneous", conflicts_with = "exit_zero")]
pub exit_non_zero_on_fix: bool,
/// Show counts for every rule with at least one violation.
#[arg(
long,
// Unsupported default-command arguments.
conflicts_with = "diff",
conflicts_with = "show_source",
conflicts_with = "watch",
)]
pub statistics: bool,
/// Enable automatic additions of `noqa` directives to failing lines.
#[arg(
long,
// conflicts_with = "add_noqa",
conflicts_with = "show_files",
conflicts_with = "show_settings",
// Unsupported default-command arguments.
conflicts_with = "ignore_noqa",
conflicts_with = "statistics",
conflicts_with = "stdin_filename",
conflicts_with = "watch",
conflicts_with = "fix",
)]
pub add_noqa: bool,
/// See the files Ruff will be run against with the current settings.
#[arg(
long,
// Fake subcommands.
conflicts_with = "add_noqa",
// conflicts_with = "show_files",
conflicts_with = "show_settings",
// Unsupported default-command arguments.
conflicts_with = "ignore_noqa",
conflicts_with = "statistics",
conflicts_with = "stdin_filename",
conflicts_with = "watch",
)]
pub show_files: bool,
/// See the settings Ruff will use to lint a given Python file.
#[arg(
long,
// Fake subcommands.
conflicts_with = "add_noqa",
conflicts_with = "show_files",
// conflicts_with = "show_settings",
// Unsupported default-command arguments.
conflicts_with = "ignore_noqa",
conflicts_with = "statistics",
conflicts_with = "stdin_filename",
conflicts_with = "watch",
)]
pub show_settings: bool,
/// List of mappings from file extension to language (one of ["python", "ipynb", "pyi"]).
#[arg(long, value_delimiter = ',', hide = true)]
pub extension: Option<Vec<ExtensionPair>>,
/// Dev-only argument to show fixes
#[arg(long, hide = true)]
pub ecosystem_ci: bool,
}
#[derive(Clone, Debug, clap::Parser)]
#[allow(clippy::struct_excessive_bools)]
pub struct FormatCommand {
/// List of files or directories to format.
#[clap(help = "List of files or directories to format [default: .]")]
pub files: Vec<PathBuf>,
/// Avoid writing any formatted files back; instead, exit with a non-zero status code if any
/// files would have been modified, and zero otherwise.
#[arg(long)]
pub check: bool,
/// Avoid writing any formatted files back; instead, exit with a non-zero status code and the
/// difference between the current file and how the formatted file would look like.
#[arg(long)]
pub diff: bool,
/// Path to the `pyproject.toml` or `ruff.toml` file to use for configuration.
#[arg(long, conflicts_with = "isolated")]
pub config: Option<PathBuf>,
/// Disable cache reads.
#[arg(short, long, env = "RUFF_NO_CACHE", help_heading = "Miscellaneous")]
pub no_cache: bool,
/// Path to the cache directory.
#[arg(long, env = "RUFF_CACHE_DIR", help_heading = "Miscellaneous")]
pub cache_dir: Option<PathBuf>,
/// Respect file exclusions via `.gitignore` and other standard ignore files.
/// Use `--no-respect-gitignore` to disable.
#[arg(
long,
overrides_with("no_respect_gitignore"),
help_heading = "File selection"
)]
respect_gitignore: bool,
#[clap(long, overrides_with("respect_gitignore"), hide = true)]
no_respect_gitignore: bool,
/// List of paths, used to omit files and/or directories from analysis.
#[arg(
long,
value_delimiter = ',',
value_name = "FILE_PATTERN",
help_heading = "File selection"
)]
pub exclude: Option<Vec<FilePattern>>,
/// Enforce exclusions, even for paths passed to Ruff directly on the command-line.
/// Use `--no-force-exclude` to disable.
#[arg(
long,
overrides_with("no_force_exclude"),
help_heading = "File selection"
)]
force_exclude: bool,
#[clap(long, overrides_with("force_exclude"), hide = true)]
no_force_exclude: bool,
/// Set the line-length.
#[arg(long, help_heading = "Format configuration")]
pub line_length: Option<LineLength>,
/// Ignore all configuration files.
#[arg(long, conflicts_with = "config", help_heading = "Miscellaneous")]
pub isolated: bool,
/// The name of the file when passing it through stdin.
#[arg(long, help_heading = "Miscellaneous")]
pub stdin_filename: Option<PathBuf>,
/// The minimum Python version that should be supported.
#[arg(long, value_enum)]
pub target_version: Option<PythonVersion>,
/// Enable preview mode; enables unstable formatting.
/// Use `--no-preview` to disable.
#[arg(long, overrides_with("no_preview"))]
preview: bool,
#[clap(long, overrides_with("preview"), hide = true)]
no_preview: bool,
}
#[derive(Debug, Clone, Copy, clap::ValueEnum)]
pub enum HelpFormat {
Text,
Json,
}
#[allow(clippy::module_name_repetitions)]
#[derive(Debug, clap::Args)]
pub struct LogLevelArgs {
/// Enable verbose logging.
#[arg(
short,
long,
global = true,
group = "verbosity",
help_heading = "Log levels"
)]
pub verbose: bool,
/// Print diagnostics, but nothing else.
#[arg(
short,
long,
global = true,
group = "verbosity",
help_heading = "Log levels"
)]
pub quiet: bool,
/// Disable all logging (but still exit with status code "1" upon detecting diagnostics).
#[arg(
short,
long,
global = true,
group = "verbosity",
help_heading = "Log levels"
)]
pub silent: bool,
}
impl From<&LogLevelArgs> for LogLevel {
fn from(args: &LogLevelArgs) -> Self {
if args.silent {
Self::Silent
} else if args.quiet {
Self::Quiet
} else if args.verbose {
Self::Verbose
} else {
Self::Default
}
}
}
impl CheckCommand {
/// Partition the CLI into command-line arguments and configuration
/// overrides.
pub fn partition(self) -> (CheckArguments, CliOverrides) {
(
CheckArguments {
add_noqa: self.add_noqa,
config: self.config,
diff: self.diff,
ecosystem_ci: self.ecosystem_ci,
exit_non_zero_on_fix: self.exit_non_zero_on_fix,
exit_zero: self.exit_zero,
files: self.files,
ignore_noqa: self.ignore_noqa,
isolated: self.isolated,
no_cache: self.no_cache,
output_file: self.output_file,
show_files: self.show_files,
show_settings: self.show_settings,
statistics: self.statistics,
stdin_filename: self.stdin_filename,
watch: self.watch,
},
CliOverrides {
dummy_variable_rgx: self.dummy_variable_rgx,
exclude: self.exclude,
extend_exclude: self.extend_exclude,
extend_fixable: self.extend_fixable,
extend_ignore: self.extend_ignore,
extend_per_file_ignores: self.extend_per_file_ignores,
extend_select: self.extend_select,
extend_unfixable: self.extend_unfixable,
fixable: self.fixable,
ignore: self.ignore,
line_length: self.line_length,
per_file_ignores: self.per_file_ignores,
preview: resolve_bool_arg(self.preview, self.no_preview).map(PreviewMode::from),
respect_gitignore: resolve_bool_arg(
self.respect_gitignore,
self.no_respect_gitignore,
),
select: self.select,
show_source: resolve_bool_arg(self.show_source, self.no_show_source),
target_version: self.target_version,
unfixable: self.unfixable,
// TODO(charlie): Included in `pyproject.toml`, but not inherited.
cache_dir: self.cache_dir,
fix: resolve_bool_arg(self.fix, self.no_fix),
fix_only: resolve_bool_arg(self.fix_only, self.no_fix_only),
unsafe_fixes: resolve_bool_arg(self.unsafe_fixes, self.no_unsafe_fixes)
.map(UnsafeFixes::from),
force_exclude: resolve_bool_arg(self.force_exclude, self.no_force_exclude),
output_format: self.output_format,
show_fixes: resolve_bool_arg(self.show_fixes, self.no_show_fixes),
extension: self.extension,
},
)
}
}
impl FormatCommand {
/// Partition the CLI into command-line arguments and configuration
/// overrides.
pub fn partition(self) -> (FormatArguments, CliOverrides) {
(
FormatArguments {
check: self.check,
diff: self.diff,
config: self.config,
files: self.files,
isolated: self.isolated,
no_cache: self.no_cache,
stdin_filename: self.stdin_filename,
},
CliOverrides {
line_length: self.line_length,
respect_gitignore: resolve_bool_arg(
self.respect_gitignore,
self.no_respect_gitignore,
),
exclude: self.exclude,
preview: resolve_bool_arg(self.preview, self.no_preview).map(PreviewMode::from),
force_exclude: resolve_bool_arg(self.force_exclude, self.no_force_exclude),
target_version: self.target_version,
cache_dir: self.cache_dir,
// Unsupported on the formatter CLI, but required on `Overrides`.
..CliOverrides::default()
},
)
}
}
fn resolve_bool_arg(yes: bool, no: bool) -> Option<bool> {
match (yes, no) {
(true, false) => Some(true),
(false, true) => Some(false),
(false, false) => None,
(..) => unreachable!("Clap should make this impossible"),
}
}
/// CLI settings that are distinct from configuration (commands, lists of files,
/// etc.).
#[allow(clippy::struct_excessive_bools)]
pub struct CheckArguments {
pub add_noqa: bool,
pub config: Option<PathBuf>,
pub diff: bool,
pub ecosystem_ci: bool,
pub exit_non_zero_on_fix: bool,
pub exit_zero: bool,
pub files: Vec<PathBuf>,
pub ignore_noqa: bool,
pub isolated: bool,
pub no_cache: bool,
pub output_file: Option<PathBuf>,
pub show_files: bool,
pub show_settings: bool,
pub statistics: bool,
pub stdin_filename: Option<PathBuf>,
pub watch: bool,
}
/// CLI settings that are distinct from configuration (commands, lists of files,
/// etc.).
#[allow(clippy::struct_excessive_bools)]
pub struct FormatArguments {
pub check: bool,
pub no_cache: bool,
pub diff: bool,
pub config: Option<PathBuf>,
pub files: Vec<PathBuf>,
pub isolated: bool,
pub stdin_filename: Option<PathBuf>,
}
/// CLI settings that function as configuration overrides.
#[derive(Clone, Default)]
#[allow(clippy::struct_excessive_bools)]
pub struct CliOverrides {
pub dummy_variable_rgx: Option<Regex>,
pub exclude: Option<Vec<FilePattern>>,
pub extend_exclude: Option<Vec<FilePattern>>,
pub extend_fixable: Option<Vec<RuleSelector>>,
pub extend_ignore: Option<Vec<RuleSelector>>,
pub extend_select: Option<Vec<RuleSelector>>,
pub extend_unfixable: Option<Vec<RuleSelector>>,
pub fixable: Option<Vec<RuleSelector>>,
pub ignore: Option<Vec<RuleSelector>>,
pub line_length: Option<LineLength>,
pub per_file_ignores: Option<Vec<PatternPrefixPair>>,
pub extend_per_file_ignores: Option<Vec<PatternPrefixPair>>,
pub preview: Option<PreviewMode>,
pub respect_gitignore: Option<bool>,
pub select: Option<Vec<RuleSelector>>,
pub show_source: Option<bool>,
pub target_version: Option<PythonVersion>,
pub unfixable: Option<Vec<RuleSelector>>,
// TODO(charlie): Captured in pyproject.toml as a default, but not part of `Settings`.
pub cache_dir: Option<PathBuf>,
pub fix: Option<bool>,
pub fix_only: Option<bool>,
pub unsafe_fixes: Option<UnsafeFixes>,
pub force_exclude: Option<bool>,
pub output_format: Option<SerializationFormat>,
pub show_fixes: Option<bool>,
pub extension: Option<Vec<ExtensionPair>>,
}
impl ConfigurationTransformer for CliOverrides {
fn transform(&self, mut config: Configuration) -> Configuration {
if let Some(cache_dir) = &self.cache_dir {
config.cache_dir = Some(cache_dir.clone());
}
if let Some(dummy_variable_rgx) = &self.dummy_variable_rgx {
config.lint.dummy_variable_rgx = Some(dummy_variable_rgx.clone());
}
if let Some(exclude) = &self.exclude {
config.exclude = Some(exclude.clone());
}
if let Some(extend_exclude) = &self.extend_exclude {
config.extend_exclude.extend(extend_exclude.clone());
}
if let Some(extend_per_file_ignores) = &self.extend_per_file_ignores {
config
.lint
.extend_per_file_ignores
.extend(collect_per_file_ignores(extend_per_file_ignores.clone()));
}
if let Some(fix) = &self.fix {
config.fix = Some(*fix);
}
if let Some(fix_only) = &self.fix_only {
config.fix_only = Some(*fix_only);
}
if self.unsafe_fixes.is_some() {
config.unsafe_fixes = self.unsafe_fixes;
}
config.lint.rule_selections.push(RuleSelection {
select: self.select.clone(),
ignore: self
.ignore
.iter()
.cloned()
.chain(self.extend_ignore.iter().cloned())
.flatten()
.collect(),
extend_select: self.extend_select.clone().unwrap_or_default(),
fixable: self.fixable.clone(),
unfixable: self
.unfixable
.iter()
.cloned()
.chain(self.extend_unfixable.iter().cloned())
.flatten()
.collect(),
extend_fixable: self.extend_fixable.clone().unwrap_or_default(),
});
if let Some(output_format) = &self.output_format {
config.output_format = Some(*output_format);
}
if let Some(force_exclude) = &self.force_exclude {
config.force_exclude = Some(*force_exclude);
}
if let Some(line_length) = self.line_length {
config.line_length = Some(line_length);
config.lint.pycodestyle = Some(PycodestyleOptions {
max_line_length: Some(line_length),
..config.lint.pycodestyle.unwrap_or_default()
});
}
if let Some(preview) = &self.preview {
config.preview = Some(*preview);
config.lint.preview = Some(*preview);
config.format.preview = Some(*preview);
}
if let Some(per_file_ignores) = &self.per_file_ignores {
config.lint.per_file_ignores = Some(collect_per_file_ignores(per_file_ignores.clone()));
}
if let Some(respect_gitignore) = &self.respect_gitignore {
config.respect_gitignore = Some(*respect_gitignore);
}
if let Some(show_source) = &self.show_source {
config.show_source = Some(*show_source);
}
if let Some(show_fixes) = &self.show_fixes {
config.show_fixes = Some(*show_fixes);
}
if let Some(target_version) = &self.target_version {
config.target_version = Some(*target_version);
}
if let Some(extension) = &self.extension {
config.lint.extension = Some(extension.clone().into_iter().collect());
}
config
}
}
/// Convert a list of `PatternPrefixPair` structs to `PerFileIgnore`.
pub fn collect_per_file_ignores(pairs: Vec<PatternPrefixPair>) -> Vec<PerFileIgnore> {
let mut per_file_ignores: FxHashMap<String, Vec<RuleSelector>> = FxHashMap::default();
for pair in pairs {
per_file_ignores
.entry(pair.pattern)
.or_default()
.push(pair.prefix);
}
per_file_ignores
.into_iter()
.map(|(pattern, prefixes)| PerFileIgnore::new(pattern, &prefixes, None))
.collect()
}

View File

@@ -3,8 +3,8 @@ use std::process::ExitCode;
use clap::{Parser, Subcommand};
use colored::Colorize;
use ruff::args::{Args, Command};
use ruff::{run, ExitStatus};
use ruff_cli::args::{Args, Command};
use ruff_cli::{run, ExitStatus};
#[cfg(target_os = "windows")]
#[global_allocator]
@@ -27,42 +27,26 @@ pub fn main() -> ExitCode {
let mut args =
argfile::expand_args_from(args, argfile::parse_fromfile, argfile::PREFIX).unwrap();
// We can't use `warn_user` here because logging isn't set up at this point
// and we also don't know if the user runs ruff with quiet.
// Keep the message and pass it to `run` that is responsible for emitting the warning.
let deprecated_alias_warning = match args.get(1).and_then(|arg| arg.to_str()) {
// Deprecated aliases that are handled by clap
Some("--explain") => {
Some("`ruff --explain <RULE>` is deprecated. Use `ruff rule <RULE>` instead.")
}
Some("--clean") => {
Some("`ruff --clean` is deprecated. Use `ruff clean` instead.")
}
Some("--generate-shell-completion") => {
Some("`ruff --generate-shell-completion <SHELL>` is deprecated. Use `ruff generate-shell-completion <SHELL>` instead.")
}
// Deprecated `ruff` alias to `ruff check`
// Clap doesn't support default subcommands but we want to run `check` by
// default for convenience and backwards-compatibility, so we just
// preprocess the arguments accordingly before passing them to Clap.
Some(arg) if !Command::has_subcommand(arg)
// Clap doesn't support default subcommands but we want to run `check` by
// default for convenience and backwards-compatibility, so we just
// preprocess the arguments accordingly before passing them to Clap.
if let Some(arg) = args.get(1) {
if arg
.to_str()
.is_some_and(|arg| !Command::has_subcommand(rewrite_legacy_subcommand(arg)))
&& arg != "-h"
&& arg != "--help"
&& arg != "-V"
&& arg != "--version"
&& arg != "help" => {
{
args.insert(1, "check".into());
Some("`ruff <path>` is deprecated. Use `ruff check <path>` instead.")
}
},
_ => None
};
&& arg != "help"
{
args.insert(1, "check".into());
}
}
let args = Args::parse_from(args);
match run(args, deprecated_alias_warning) {
match run(args) {
Ok(code) => code.into(),
Err(err) => {
#[allow(clippy::print_stderr)]
@@ -81,3 +65,12 @@ pub fn main() -> ExitCode {
}
}
}
fn rewrite_legacy_subcommand(cmd: &str) -> &str {
match cmd {
"--explain" => "rule",
"--clean" => "clean",
"--generate-shell-completion" => "generate-shell-completion",
cmd => cmd,
}
}

View File

@@ -1,7 +1,7 @@
use std::fmt::Debug;
use std::fs::{self, File};
use std::hash::Hasher;
use std::io::{self, BufReader, Write};
use std::io::{self, BufReader, BufWriter, Write};
use std::path::{Path, PathBuf};
use std::sync::atomic::{AtomicU64, Ordering};
use std::sync::Mutex;
@@ -15,7 +15,6 @@ use rayon::iter::ParallelIterator;
use rayon::iter::{IntoParallelIterator, ParallelBridge};
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use tempfile::NamedTempFile;
use ruff_cache::{CacheKey, CacheKeyHasher};
use ruff_diagnostics::{DiagnosticKind, Fix};
@@ -26,9 +25,10 @@ use ruff_notebook::NotebookIndex;
use ruff_python_ast::imports::ImportMap;
use ruff_source_file::SourceFileBuilder;
use ruff_text_size::{TextRange, TextSize};
use ruff_workspace::resolver::Resolver;
use ruff_workspace::resolver::{PyprojectConfig, PyprojectDiscoveryStrategy, Resolver};
use ruff_workspace::Settings;
use crate::cache;
use crate::diagnostics::Diagnostics;
/// [`Path`] that is relative to the package root in [`PackageCache`].
@@ -86,7 +86,6 @@ pub(crate) struct Cache {
changes: Mutex<Vec<Change>>,
/// The "current" timestamp used as cache for the updates of
/// [`FileCache::last_seen`]
#[allow(clippy::struct_field_names)]
last_seen_cache: u64,
}
@@ -166,29 +165,15 @@ impl Cache {
return Ok(());
}
// Write the cache to a temporary file first and then rename it for an "atomic" write.
// Protects against data loss if the process is killed during the write and races between different ruff
// processes, resulting in a corrupted cache file. https://github.com/astral-sh/ruff/issues/8147#issuecomment-1943345964
let mut temp_file =
NamedTempFile::new_in(self.path.parent().expect("Write path must have a parent"))
.context("Failed to create temporary file")?;
// Serialize to in-memory buffer because hyperfine benchmark showed that it's faster than
// using a `BufWriter` and our cache files are small enough that streaming isn't necessary.
let serialized =
bincode::serialize(&self.package).context("Failed to serialize cache data")?;
temp_file
.write_all(&serialized)
.context("Failed to write serialized cache to temporary file.")?;
temp_file.persist(&self.path).with_context(|| {
let file = File::create(&self.path)
.with_context(|| format!("Failed to create cache file '{}'", self.path.display()))?;
let writer = BufWriter::new(file);
bincode::serialize_into(writer, &self.package).with_context(|| {
format!(
"Failed to rename temporary cache file to {}",
"Failed to serialise cache to file '{}'",
self.path.display()
)
})?;
Ok(())
})
}
/// Applies the pending changes without storing the cache to disk.
@@ -383,7 +368,7 @@ pub(crate) fn init(path: &Path) -> Result<()> {
let gitignore_path = path.join(".gitignore");
if !gitignore_path.exists() {
let mut file = fs::File::create(gitignore_path)?;
file.write_all(b"# Automatically created by ruff.\n*\n")?;
file.write_all(b"*")?;
}
Ok(())
@@ -457,7 +442,7 @@ pub(super) struct CacheMessage {
pub(crate) trait PackageCaches {
fn get(&self, package_root: &Path) -> Option<&Cache>;
fn persist(self) -> Result<()>;
fn persist(self) -> anyhow::Result<()>;
}
impl<T> PackageCaches for Option<T>
@@ -483,17 +468,27 @@ pub(crate) struct PackageCacheMap<'a>(FxHashMap<&'a Path, Cache>);
impl<'a> PackageCacheMap<'a> {
pub(crate) fn init(
pyproject_config: &PyprojectConfig,
package_roots: &FxHashMap<&'a Path, Option<&'a Path>>,
resolver: &Resolver,
) -> Self {
fn init_cache(path: &Path) {
if let Err(e) = init(path) {
if let Err(e) = cache::init(path) {
error!("Failed to initialize cache at {}: {e:?}", path.display());
}
}
for settings in resolver.settings() {
init_cache(&settings.cache_dir);
match pyproject_config.strategy {
PyprojectDiscoveryStrategy::Fixed => {
init_cache(&pyproject_config.settings.cache_dir);
}
PyprojectDiscoveryStrategy::Hierarchical => {
for settings in
std::iter::once(&pyproject_config.settings).chain(resolver.settings())
{
init_cache(&settings.cache_dir);
}
}
}
Self(
@@ -503,7 +498,7 @@ impl<'a> PackageCacheMap<'a> {
.unique()
.par_bridge()
.map(|cache_root| {
let settings = resolver.resolve(cache_root);
let settings = resolver.resolve(cache_root, pyproject_config);
let cache = Cache::open(cache_root.to_path_buf(), settings);
(cache_root, cache)
})
@@ -1065,7 +1060,6 @@ mod tests {
&self.settings.formatter,
PySourceType::Python,
FormatMode::Write,
None,
Some(cache),
)
}

View File

@@ -12,17 +12,17 @@ use ruff_linter::warn_user_once;
use ruff_python_ast::{PySourceType, SourceType};
use ruff_workspace::resolver::{python_files_in_path, PyprojectConfig, ResolvedFile};
use crate::args::ConfigArguments;
use crate::args::CliOverrides;
/// Add `noqa` directives to a collection of files.
pub(crate) fn add_noqa(
files: &[PathBuf],
pyproject_config: &PyprojectConfig,
config_arguments: &ConfigArguments,
overrides: &CliOverrides,
) -> Result<usize> {
// Collect all the files to check.
let start = Instant::now();
let (paths, resolver) = python_files_in_path(files, pyproject_config, config_arguments)?;
let (paths, resolver) = python_files_in_path(files, pyproject_config, overrides)?;
let duration = start.elapsed();
debug!("Identified files to lint in: {:?}", duration);
@@ -38,6 +38,7 @@ pub(crate) fn add_noqa(
.flatten()
.map(ResolvedFile::path)
.collect::<Vec<_>>(),
pyproject_config,
);
let start = Instant::now();
@@ -56,7 +57,7 @@ pub(crate) fn add_noqa(
.parent()
.and_then(|parent| package_roots.get(parent))
.and_then(|package| *package);
let settings = resolver.resolve(path);
let settings = resolver.resolve(path, pyproject_config);
let source_kind = match SourceKind::from_path(path, source_type) {
Ok(Some(source_kind)) => source_kind,
Ok(None) => return None,

View File

@@ -24,7 +24,7 @@ use ruff_workspace::resolver::{
match_exclusion, python_files_in_path, PyprojectConfig, ResolvedFile,
};
use crate::args::ConfigArguments;
use crate::args::CliOverrides;
use crate::cache::{Cache, PackageCacheMap, PackageCaches};
use crate::diagnostics::Diagnostics;
use crate::panic::catch_unwind;
@@ -34,7 +34,7 @@ use crate::panic::catch_unwind;
pub(crate) fn check(
files: &[PathBuf],
pyproject_config: &PyprojectConfig,
config_arguments: &ConfigArguments,
overrides: &CliOverrides,
cache: flags::Cache,
noqa: flags::Noqa,
fix_mode: flags::FixMode,
@@ -42,7 +42,7 @@ pub(crate) fn check(
) -> Result<Diagnostics> {
// Collect all the Python files to check.
let start = Instant::now();
let (paths, resolver) = python_files_in_path(files, pyproject_config, config_arguments)?;
let (paths, resolver) = python_files_in_path(files, pyproject_config, overrides)?;
debug!("Identified files to lint in: {:?}", start.elapsed());
if paths.is_empty() {
@@ -57,11 +57,16 @@ pub(crate) fn check(
.flatten()
.map(ResolvedFile::path)
.collect::<Vec<_>>(),
pyproject_config,
);
// Load the caches.
let caches = if bool::from(cache) {
Some(PackageCacheMap::init(&package_roots, &resolver))
Some(PackageCacheMap::init(
pyproject_config,
&package_roots,
&resolver,
))
} else {
None
};
@@ -76,7 +81,7 @@ pub(crate) fn check(
.and_then(|parent| package_roots.get(parent))
.and_then(|package| *package);
let settings = resolver.resolve(path);
let settings = resolver.resolve(path, pyproject_config);
if (settings.file_resolver.force_exclude || !resolved_file.is_root())
&& match_exclusion(
@@ -123,7 +128,7 @@ pub(crate) fn check(
Some(result.unwrap_or_else(|(path, message)| {
if let Some(path) = &path {
let settings = resolver.resolve(path);
let settings = resolver.resolve(path, pyproject_config);
if settings.linter.rules.enabled(Rule::IOError) {
let dummy =
SourceFileBuilder::new(path.to_string_lossy().as_ref(), "").finish();
@@ -233,7 +238,7 @@ mod test {
use ruff_workspace::resolver::{PyprojectConfig, PyprojectDiscoveryStrategy};
use ruff_workspace::Settings;
use crate::args::ConfigArguments;
use crate::args::CliOverrides;
use super::check;
@@ -272,7 +277,7 @@ mod test {
// Notebooks are not included by default
&[tempdir.path().to_path_buf(), notebook],
&pyproject_config,
&ConfigArguments::default(),
&CliOverrides::default(),
flags::Cache::Disabled,
flags::Noqa::Disabled,
flags::FixMode::Generate,

View File

@@ -4,9 +4,9 @@ use anyhow::Result;
use ruff_linter::packaging;
use ruff_linter::settings::flags;
use ruff_workspace::resolver::{match_exclusion, python_file_at_path, PyprojectConfig, Resolver};
use ruff_workspace::resolver::{match_exclusion, python_file_at_path, PyprojectConfig};
use crate::args::ConfigArguments;
use crate::args::CliOverrides;
use crate::diagnostics::{lint_stdin, Diagnostics};
use crate::stdin::{parrot_stdin, read_from_stdin};
@@ -14,24 +14,24 @@ use crate::stdin::{parrot_stdin, read_from_stdin};
pub(crate) fn check_stdin(
filename: Option<&Path>,
pyproject_config: &PyprojectConfig,
overrides: &ConfigArguments,
overrides: &CliOverrides,
noqa: flags::Noqa,
fix_mode: flags::FixMode,
) -> Result<Diagnostics> {
let mut resolver = Resolver::new(pyproject_config);
if resolver.force_exclude() {
if pyproject_config.settings.file_resolver.force_exclude {
if let Some(filename) = filename {
if !python_file_at_path(filename, &mut resolver, overrides)? {
if !python_file_at_path(filename, pyproject_config, overrides)? {
if fix_mode.is_apply() {
parrot_stdin()?;
}
return Ok(Diagnostics::default());
}
if filename.file_name().is_some_and(|name| {
match_exclusion(filename, name, &resolver.base_settings().linter.exclude)
}) {
let lint_settings = &pyproject_config.settings.linter;
if filename
.file_name()
.is_some_and(|name| match_exclusion(filename, name, &lint_settings.exclude))
{
if fix_mode.is_apply() {
parrot_stdin()?;
}
@@ -41,13 +41,13 @@ pub(crate) fn check_stdin(
}
let stdin = read_from_stdin()?;
let package_root = filename.and_then(Path::parent).and_then(|path| {
packaging::detect_package_root(path, &resolver.base_settings().linter.namespace_packages)
packaging::detect_package_root(path, &pyproject_config.settings.linter.namespace_packages)
});
let mut diagnostics = lint_stdin(
filename,
package_root,
stdin,
resolver.base_settings(),
&pyproject_config.settings,
noqa,
fix_mode,
)?;

View File

@@ -17,19 +17,20 @@ use tracing::debug;
use ruff_diagnostics::SourceMap;
use ruff_linter::fs;
use ruff_linter::logging::{DisplayParseError, LogLevel};
use ruff_linter::logging::LogLevel;
use ruff_linter::registry::Rule;
use ruff_linter::rules::flake8_quotes::settings::Quote;
use ruff_linter::source_kind::{SourceError, SourceKind};
use ruff_linter::warn_user_once;
use ruff_python_ast::{PySourceType, SourceType};
use ruff_python_formatter::{format_module_source, format_range, FormatModuleError, QuoteStyle};
use ruff_source_file::LineIndex;
use ruff_python_formatter::{format_module_source, FormatModuleError, QuoteStyle};
use ruff_text_size::{TextLen, TextRange, TextSize};
use ruff_workspace::resolver::{match_exclusion, python_files_in_path, ResolvedFile, Resolver};
use ruff_workspace::resolver::{
match_exclusion, python_files_in_path, PyprojectConfig, ResolvedFile, Resolver,
};
use ruff_workspace::FormatterSettings;
use crate::args::{ConfigArguments, FormatArguments, FormatRange};
use crate::args::{CliOverrides, FormatArguments};
use crate::cache::{Cache, FileCacheKey, PackageCacheMap, PackageCaches};
use crate::panic::{catch_unwind, PanicError};
use crate::resolve::resolve;
@@ -60,26 +61,25 @@ impl FormatMode {
/// Format a set of files, and return the exit status.
pub(crate) fn format(
cli: FormatArguments,
config_arguments: &ConfigArguments,
overrides: &CliOverrides,
log_level: LogLevel,
) -> Result<ExitStatus> {
let pyproject_config = resolve(config_arguments, cli.stdin_filename.as_deref())?;
let pyproject_config = resolve(
cli.isolated,
cli.config.as_deref(),
overrides,
cli.stdin_filename.as_deref(),
)?;
let mode = FormatMode::from_cli(&cli);
let files = resolve_default_files(cli.files, false);
let (paths, resolver) = python_files_in_path(&files, &pyproject_config, config_arguments)?;
let (paths, resolver) = python_files_in_path(&files, &pyproject_config, overrides)?;
if paths.is_empty() {
warn_user_once!("No Python files found under the given path(s)");
return Ok(ExitStatus::Success);
}
if cli.range.is_some() && paths.len() > 1 {
return Err(anyhow::anyhow!(
"The `--range` option is only supported when formatting a single file but the specified paths resolve to {} files.",
paths.len()
));
}
warn_incompatible_formatter_settings(&resolver);
warn_incompatible_formatter_settings(&pyproject_config, Some(&resolver));
// Discover the package root for each Python file.
let package_roots = resolver.package_roots(
@@ -88,6 +88,7 @@ pub(crate) fn format(
.flatten()
.map(ResolvedFile::path)
.collect::<Vec<_>>(),
&pyproject_config,
);
let caches = if cli.no_cache {
@@ -98,7 +99,11 @@ pub(crate) fn format(
#[cfg(debug_assertions)]
crate::warn_user!("Detected debug build without --no-cache.");
Some(PackageCacheMap::init(&package_roots, &resolver))
Some(PackageCacheMap::init(
&pyproject_config,
&package_roots,
&resolver,
))
};
let start = Instant::now();
@@ -108,19 +113,13 @@ pub(crate) fn format(
match entry {
Ok(resolved_file) => {
let path = resolved_file.path();
let settings = resolver.resolve(path);
let source_type = match settings.formatter.extension.get(path) {
None => match SourceType::from(path) {
SourceType::Python(source_type) => source_type,
SourceType::Toml(_) => {
// Ignore any non-Python files.
return None;
}
},
Some(language) => PySourceType::from(language),
let SourceType::Python(source_type) = SourceType::from(&path) else {
// Ignore any non-Python files.
return None;
};
let settings = resolver.resolve(path, &pyproject_config);
// Ignore files that are excluded from formatting
if (settings.file_resolver.force_exclude || !resolved_file.is_root())
&& match_exclusion(
@@ -141,14 +140,7 @@ pub(crate) fn format(
Some(
match catch_unwind(|| {
format_path(
path,
&settings.formatter,
source_type,
mode,
cli.range,
cache,
)
format_path(path, &settings.formatter, source_type, mode, cache)
}) {
Ok(inner) => inner.map(|result| FormatPathResult {
path: resolved_file.path().to_path_buf(),
@@ -197,7 +189,7 @@ pub(crate) fn format(
}
// Report on the formatting changes.
if config_arguments.log_level >= LogLevel::Default {
if log_level >= LogLevel::Default {
if mode.is_diff() {
// Allow piping the diff to e.g. a file by writing the summary to stderr
results.write_summary(&mut stderr().lock())?;
@@ -235,7 +227,6 @@ pub(crate) fn format_path(
settings: &FormatterSettings,
source_type: PySourceType,
mode: FormatMode,
range: Option<FormatRange>,
cache: Option<&Cache>,
) -> Result<FormatResult, FormatCommandError> {
if let Some(cache) = cache {
@@ -253,19 +244,15 @@ pub(crate) fn format_path(
// Extract the sources from the file.
let unformatted = match SourceKind::from_path(path, source_type) {
Ok(Some(source_kind)) => source_kind,
// Non-Python Jupyter notebook.
// Non Python Jupyter notebook
Ok(None) => return Ok(FormatResult::Skipped),
Err(err) => {
return Err(FormatCommandError::Read(Some(path.to_path_buf()), err));
}
};
// Don't write back to the cache if formatting a range.
let cache = cache.filter(|_| range.is_none());
// Format the source.
let format_result = match format_source(&unformatted, source_type, Some(path), settings, range)?
{
let format_result = match format_source(&unformatted, source_type, Some(path), settings)? {
FormattedSource::Formatted(formatted) => match mode {
FormatMode::Write => {
let mut writer = File::create(path).map_err(|err| {
@@ -333,43 +320,15 @@ pub(crate) fn format_source(
source_type: PySourceType,
path: Option<&Path>,
settings: &FormatterSettings,
range: Option<FormatRange>,
) -> Result<FormattedSource, FormatCommandError> {
match &source_kind {
match source_kind {
SourceKind::Python(unformatted) => {
let options = settings.to_format_options(source_type, unformatted);
let formatted = if let Some(range) = range {
let line_index = LineIndex::from_source_text(unformatted);
let byte_range = range.to_text_range(unformatted, &line_index);
format_range(unformatted, byte_range, options).map(|formatted_range| {
let mut formatted = unformatted.to_string();
formatted.replace_range(
std::ops::Range::<usize>::from(formatted_range.source_range()),
formatted_range.as_code(),
);
formatted
})
} else {
// Using `Printed::into_code` requires adding `ruff_formatter` as a direct dependency, and I suspect that Rust can optimize the closure away regardless.
#[allow(clippy::redundant_closure_for_method_calls)]
format_module_source(unformatted, options).map(|formatted| formatted.into_code())
};
let formatted = formatted.map_err(|err| {
if let FormatModuleError::ParseError(err) = err {
DisplayParseError::from_source_kind(
err,
path.map(Path::to_path_buf),
source_kind,
)
.into()
} else {
FormatCommandError::Format(path.map(Path::to_path_buf), err)
}
})?;
let formatted = format_module_source(unformatted, options)
.map_err(|err| FormatCommandError::Format(path.map(Path::to_path_buf), err))?;
let formatted = formatted.into_code();
if formatted.len() == unformatted.len() && formatted == *unformatted {
Ok(FormattedSource::Unchanged)
} else {
@@ -381,12 +340,6 @@ pub(crate) fn format_source(
return Ok(FormattedSource::Unchanged);
}
if range.is_some() {
return Err(FormatCommandError::RangeFormatNotebook(
path.map(Path::to_path_buf),
));
}
let options = settings.to_format_options(source_type, notebook.source_code());
let mut output: Option<String> = None;
@@ -399,19 +352,8 @@ pub(crate) fn format_source(
let unformatted = &notebook.source_code()[range];
// Format the cell.
let formatted =
format_module_source(unformatted, options.clone()).map_err(|err| {
if let FormatModuleError::ParseError(err) = err {
DisplayParseError::from_source_kind(
err,
path.map(Path::to_path_buf),
source_kind,
)
.into()
} else {
FormatCommandError::Format(path.map(Path::to_path_buf), err)
}
})?;
let formatted = format_module_source(unformatted, options.clone())
.map_err(|err| FormatCommandError::Format(path.map(Path::to_path_buf), err))?;
// If the cell is unchanged, skip it.
let formatted = formatted.as_code();
@@ -466,13 +408,11 @@ pub(crate) fn format_source(
pub(crate) enum FormatResult {
/// The file was formatted.
Formatted,
/// The file was formatted, [`SourceKind`] contains the formatted code
Diff {
unformatted: SourceKind,
formatted: SourceKind,
},
/// The file was unchanged, as the formatted contents matched the existing contents.
Unchanged,
@@ -527,7 +467,7 @@ impl<'a> FormatResults<'a> {
})
.sorted_unstable_by_key(|(path, _, _)| *path)
{
write!(f, "{}", unformatted.diff(formatted, Some(path)).unwrap())?;
unformatted.diff(formatted, Some(path), f)?;
}
Ok(())
@@ -575,7 +515,7 @@ impl<'a> FormatResults<'a> {
if changed > 0 && unchanged > 0 {
writeln!(
f,
"{} file{} {}, {} file{} {}",
"{} file{} {}, {} file{} left unchanged",
changed,
if changed == 1 { "" } else { "s" },
match self.mode {
@@ -584,10 +524,6 @@ impl<'a> FormatResults<'a> {
},
unchanged,
if unchanged == 1 { "" } else { "s" },
match self.mode {
FormatMode::Write => "left unchanged",
FormatMode::Check | FormatMode::Diff => "already formatted",
},
)
} else if changed > 0 {
writeln!(
@@ -603,13 +539,9 @@ impl<'a> FormatResults<'a> {
} else if unchanged > 0 {
writeln!(
f,
"{} file{} {}",
"{} file{} left unchanged",
unchanged,
if unchanged == 1 { "" } else { "s" },
match self.mode {
FormatMode::Write => "left unchanged",
FormatMode::Check | FormatMode::Diff => "already formatted",
},
)
} else {
Ok(())
@@ -621,13 +553,11 @@ impl<'a> FormatResults<'a> {
#[derive(Error, Debug)]
pub(crate) enum FormatCommandError {
Ignore(#[from] ignore::Error),
Parse(#[from] DisplayParseError),
Panic(Option<PathBuf>, PanicError),
Read(Option<PathBuf>, SourceError),
Format(Option<PathBuf>, FormatModuleError),
Write(Option<PathBuf>, SourceError),
Diff(Option<PathBuf>, io::Error),
RangeFormatNotebook(Option<PathBuf>),
}
impl FormatCommandError {
@@ -640,13 +570,11 @@ impl FormatCommandError {
None
}
}
Self::Parse(err) => err.path(),
Self::Panic(path, _)
| Self::Read(path, _)
| Self::Format(path, _)
| Self::Write(path, _)
| Self::Diff(path, _)
| Self::RangeFormatNotebook(path) => path.as_deref(),
| Self::Diff(path, _) => path.as_deref(),
}
}
}
@@ -668,17 +596,13 @@ impl Display for FormatCommandError {
} else {
write!(
f,
"{header} {error}",
header = "Encountered error:".bold(),
error = err
.io_error()
"{} {}",
"Encountered error:".bold(),
err.io_error()
.map_or_else(|| err.to_string(), std::string::ToString::to_string)
)
}
}
Self::Parse(err) => {
write!(f, "{err}")
}
Self::Read(path, err) => {
if let Some(path) = path {
write!(
@@ -689,7 +613,7 @@ impl Display for FormatCommandError {
":".bold()
)
} else {
write!(f, "{header} {err}", header = "Failed to read:".bold())
write!(f, "{}{} {err}", "Failed to read".bold(), ":".bold())
}
}
Self::Write(path, err) => {
@@ -702,7 +626,7 @@ impl Display for FormatCommandError {
":".bold()
)
} else {
write!(f, "{header} {err}", header = "Failed to write:".bold())
write!(f, "{}{} {err}", "Failed to write".bold(), ":".bold())
}
}
Self::Format(path, err) => {
@@ -715,7 +639,7 @@ impl Display for FormatCommandError {
":".bold()
)
} else {
write!(f, "{header} {err}", header = "Failed to format:".bold())
write!(f, "{}{} {err}", "Failed to format".bold(), ":".bold())
}
}
Self::Diff(path, err) => {
@@ -730,25 +654,9 @@ impl Display for FormatCommandError {
} else {
write!(
f,
"{header} {err}",
header = "Failed to generate diff:".bold(),
)
}
}
Self::RangeFormatNotebook(path) => {
if let Some(path) = path {
write!(
f,
"{header}{path}{colon} Range formatting isn't supported for notebooks.",
header = "Failed to format ".bold(),
path = fs::relativize_path(path).bold(),
colon = ":".bold()
)
} else {
write!(
f,
"{header} Range formatting isn't supported for notebooks",
header = "Failed to format:".bold()
"{}{} {err}",
"Failed to generate diff".bold(),
":".bold()
)
}
}
@@ -779,10 +687,15 @@ impl Display for FormatCommandError {
}
}
pub(super) fn warn_incompatible_formatter_settings(resolver: &Resolver) {
pub(super) fn warn_incompatible_formatter_settings(
pyproject_config: &PyprojectConfig,
resolver: Option<&Resolver>,
) {
// First, collect all rules that are incompatible regardless of the linter-specific settings.
let mut incompatible_rules = FxHashSet::default();
for setting in resolver.settings() {
for setting in std::iter::once(&pyproject_config.settings)
.chain(resolver.iter().flat_map(|resolver| resolver.settings()))
{
for rule in [
// The formatter might collapse implicit string concatenation on a single line.
Rule::SingleLineImplicitStringConcatenation,
@@ -811,7 +724,9 @@ pub(super) fn warn_incompatible_formatter_settings(resolver: &Resolver) {
}
// Next, validate settings-specific incompatibilities.
for setting in resolver.settings() {
for setting in std::iter::once(&pyproject_config.settings)
.chain(resolver.iter().flat_map(|resolver| resolver.settings()))
{
// Validate all rules that rely on tab styles.
if setting.linter.rules.enabled(Rule::TabIndentation)
&& setting.formatter.indent_style.is_tab()

View File

@@ -6,10 +6,10 @@ use log::error;
use ruff_linter::source_kind::SourceKind;
use ruff_python_ast::{PySourceType, SourceType};
use ruff_workspace::resolver::{match_exclusion, python_file_at_path, Resolver};
use ruff_workspace::resolver::{match_exclusion, python_file_at_path};
use ruff_workspace::FormatterSettings;
use crate::args::{ConfigArguments, FormatArguments, FormatRange};
use crate::args::{CliOverrides, FormatArguments};
use crate::commands::format::{
format_source, warn_incompatible_formatter_settings, FormatCommandError, FormatMode,
FormatResult, FormattedSource,
@@ -19,29 +19,32 @@ use crate::stdin::{parrot_stdin, read_from_stdin};
use crate::ExitStatus;
/// Run the formatter over a single file, read from `stdin`.
pub(crate) fn format_stdin(
cli: &FormatArguments,
config_arguments: &ConfigArguments,
) -> Result<ExitStatus> {
let pyproject_config = resolve(config_arguments, cli.stdin_filename.as_deref())?;
pub(crate) fn format_stdin(cli: &FormatArguments, overrides: &CliOverrides) -> Result<ExitStatus> {
let pyproject_config = resolve(
cli.isolated,
cli.config.as_deref(),
overrides,
cli.stdin_filename.as_deref(),
)?;
let mut resolver = Resolver::new(&pyproject_config);
warn_incompatible_formatter_settings(&resolver);
warn_incompatible_formatter_settings(&pyproject_config, None);
let mode = FormatMode::from_cli(cli);
if resolver.force_exclude() {
if pyproject_config.settings.file_resolver.force_exclude {
if let Some(filename) = cli.stdin_filename.as_deref() {
if !python_file_at_path(filename, &mut resolver, config_arguments)? {
if !python_file_at_path(filename, &pyproject_config, overrides)? {
if mode.is_write() {
parrot_stdin()?;
}
return Ok(ExitStatus::Success);
}
if filename.file_name().is_some_and(|name| {
match_exclusion(filename, name, &resolver.base_settings().formatter.exclude)
}) {
let format_settings = &pyproject_config.settings.formatter;
if filename
.file_name()
.is_some_and(|name| match_exclusion(filename, name, &format_settings.exclude))
{
if mode.is_write() {
parrot_stdin()?;
}
@@ -51,23 +54,21 @@ pub(crate) fn format_stdin(
}
let path = cli.stdin_filename.as_deref();
let settings = &resolver.base_settings().formatter;
let source_type = match path.and_then(|path| settings.extension.get(path)) {
None => match path.map(SourceType::from).unwrap_or_default() {
SourceType::Python(source_type) => source_type,
SourceType::Toml(_) => {
if mode.is_write() {
parrot_stdin()?;
}
return Ok(ExitStatus::Success);
}
},
Some(language) => PySourceType::from(language),
let SourceType::Python(source_type) = path.map(SourceType::from).unwrap_or_default() else {
if mode.is_write() {
parrot_stdin()?;
}
return Ok(ExitStatus::Success);
};
// Format the file.
match format_source_code(path, cli.range, settings, source_type, mode) {
match format_source_code(
path,
&pyproject_config.settings.formatter,
source_type,
mode,
) {
Ok(result) => match mode {
FormatMode::Write => Ok(ExitStatus::Success),
FormatMode::Check | FormatMode::Diff => {
@@ -88,7 +89,6 @@ pub(crate) fn format_stdin(
/// Format source code read from `stdin`.
fn format_source_code(
path: Option<&Path>,
range: Option<FormatRange>,
settings: &FormatterSettings,
source_type: PySourceType,
mode: FormatMode,
@@ -106,7 +106,7 @@ fn format_source_code(
};
// Format the source.
let formatted = format_source(&source_kind, source_type, path, settings, range)?;
let formatted = format_source(&source_kind, source_type, path, settings)?;
match &formatted {
FormattedSource::Formatted(formatted) => match mode {
@@ -118,13 +118,9 @@ fn format_source_code(
}
FormatMode::Check => {}
FormatMode::Diff => {
use std::io::Write;
write!(
&mut stdout().lock(),
"{}",
source_kind.diff(formatted, path).unwrap()
)
.map_err(|err| FormatCommandError::Diff(path.map(Path::to_path_buf), err))?;
source_kind
.diff(formatted, path, &mut stdout().lock())
.map_err(|err| FormatCommandError::Diff(path.map(Path::to_path_buf), err))?;
}
},
FormattedSource::Unchanged => {

View File

@@ -7,7 +7,6 @@ pub(crate) mod format;
pub(crate) mod format_stdin;
pub(crate) mod linter;
pub(crate) mod rule;
pub(crate) mod server;
pub(crate) mod show_files;
pub(crate) mod show_settings;
pub(crate) mod version;

View File

@@ -18,7 +18,6 @@ struct Explanation<'a> {
summary: &'a str,
message_formats: &'a [&'a str],
fix: String,
#[allow(clippy::struct_field_names)]
explanation: Option<&'a str>,
preview: bool,
}

View File

@@ -7,17 +7,17 @@ use itertools::Itertools;
use ruff_linter::warn_user_once;
use ruff_workspace::resolver::{python_files_in_path, PyprojectConfig, ResolvedFile};
use crate::args::ConfigArguments;
use crate::args::CliOverrides;
/// Show the list of files to be checked based on current settings.
pub(crate) fn show_files(
files: &[PathBuf],
pyproject_config: &PyprojectConfig,
config_arguments: &ConfigArguments,
overrides: &CliOverrides,
writer: &mut impl Write,
) -> Result<()> {
// Collect all files in the hierarchy.
let (paths, _resolver) = python_files_in_path(files, pyproject_config, config_arguments)?;
let (paths, _resolver) = python_files_in_path(files, pyproject_config, overrides)?;
if paths.is_empty() {
warn_user_once!("No Python files found under the given path(s)");

View File

@@ -6,17 +6,17 @@ use itertools::Itertools;
use ruff_workspace::resolver::{python_files_in_path, PyprojectConfig, ResolvedFile};
use crate::args::ConfigArguments;
use crate::args::CliOverrides;
/// Print the user-facing configuration settings.
pub(crate) fn show_settings(
files: &[PathBuf],
pyproject_config: &PyprojectConfig,
config_arguments: &ConfigArguments,
overrides: &CliOverrides,
writer: &mut impl Write,
) -> Result<()> {
// Collect all files in the hierarchy.
let (paths, resolver) = python_files_in_path(files, pyproject_config, config_arguments)?;
let (paths, resolver) = python_files_in_path(files, pyproject_config, overrides)?;
// Print the list of files.
let Some(path) = paths
@@ -29,13 +29,13 @@ pub(crate) fn show_settings(
bail!("No files found under the given path");
};
let settings = resolver.resolve(&path);
let settings = resolver.resolve(&path, pyproject_config);
writeln!(writer, "Resolved settings for: \"{}\"", path.display())?;
writeln!(writer, "Resolved settings for: {path:?}")?;
if let Some(settings_path) = pyproject_config.path.as_ref() {
writeln!(writer, "Settings path: \"{}\"", settings_path.display())?;
writeln!(writer, "Settings path: {settings_path:?}")?;
}
write!(writer, "{settings}")?;
writeln!(writer, "{settings:#?}")?;
Ok(())
}

View File

@@ -1,5 +1,5 @@
---
source: crates/ruff/src/commands/check.rs
source: crates/ruff_cli/src/commands/check.rs
---
/home/ferris/project/code.py:1:1: E902 Permission denied (os error 13)
/home/ferris/project/notebook.ipynb:1:1: E902 Permission denied (os error 13)

View File

@@ -1,9 +1,7 @@
#![cfg_attr(target_family = "wasm", allow(dead_code))]
use std::borrow::Cow;
use std::fs::File;
use std::io;
use std::io::Write;
use std::ops::{Add, AddAssign};
use std::path::Path;
@@ -12,25 +10,24 @@ use colored::Colorize;
use log::{debug, error, warn};
use rustc_hash::FxHashMap;
use crate::cache::{Cache, FileCacheKey, LintCacheData};
use ruff_diagnostics::Diagnostic;
use ruff_linter::linter::{lint_fix, lint_only, FixTable, FixerResult, LinterResult, ParseSource};
use ruff_linter::linter::{lint_fix, lint_only, FixTable, FixerResult, LinterResult};
use ruff_linter::logging::DisplayParseError;
use ruff_linter::message::Message;
use ruff_linter::pyproject_toml::lint_pyproject_toml;
use ruff_linter::registry::AsRule;
use ruff_linter::settings::types::UnsafeFixes;
use ruff_linter::settings::types::{ExtensionMapping, UnsafeFixes};
use ruff_linter::settings::{flags, LinterSettings};
use ruff_linter::source_kind::{SourceError, SourceKind};
use ruff_linter::{fs, IOError, SyntaxError};
use ruff_notebook::{Notebook, NotebookError, NotebookIndex};
use ruff_python_ast::imports::ImportMap;
use ruff_python_ast::{PySourceType, SourceType, TomlSourceType};
use ruff_source_file::SourceFileBuilder;
use ruff_source_file::{LineIndex, SourceCode, SourceFileBuilder};
use ruff_text_size::{TextRange, TextSize};
use ruff_workspace::Settings;
use crate::cache::{Cache, FileCacheKey, LintCacheData};
#[derive(Debug, Default, PartialEq)]
pub(crate) struct Diagnostics {
pub(crate) messages: Vec<Message>,
@@ -180,6 +177,11 @@ impl AddAssign for FixMap {
}
}
fn override_source_type(path: Option<&Path>, extension: &ExtensionMapping) -> Option<PySourceType> {
let ext = path?.extension()?.to_str()?;
extension.get(ext).map(PySourceType::from)
}
/// Lint the source code at the given `Path`.
pub(crate) fn lint_path(
path: &Path,
@@ -224,7 +226,7 @@ pub(crate) fn lint_path(
debug!("Checking: {}", path.display());
let source_type = match settings.extension.get(path).map(PySourceType::from) {
let source_type = match override_source_type(Some(path), &settings.extension) {
Some(source_type) => source_type,
None => match SourceType::from(path) {
SourceType::Toml(TomlSourceType::Pyproject) => {
@@ -270,7 +272,6 @@ pub(crate) fn lint_path(
data: (messages, imports),
error: parse_error,
},
transformed,
fixed,
) = if matches!(fix_mode, flags::FixMode::Apply | flags::FixMode::Diff) {
if let Ok(FixerResult {
@@ -290,49 +291,26 @@ pub(crate) fn lint_path(
match fix_mode {
flags::FixMode::Apply => transformed.write(&mut File::create(path)?)?,
flags::FixMode::Diff => {
write!(
source_kind.diff(
transformed.as_ref(),
Some(path),
&mut io::stdout().lock(),
"{}",
source_kind.diff(&transformed, Some(path)).unwrap()
)?;
}
flags::FixMode::Generate => {}
}
}
let transformed = if let Cow::Owned(transformed) = transformed {
transformed
} else {
source_kind
};
(result, transformed, fixed)
(result, fixed)
} else {
// If we fail to fix, lint the original source code.
let result = lint_only(
path,
package,
settings,
noqa,
&source_kind,
source_type,
ParseSource::None,
);
let transformed = source_kind;
let result = lint_only(path, package, settings, noqa, &source_kind, source_type);
let fixed = FxHashMap::default();
(result, transformed, fixed)
(result, fixed)
}
} else {
let result = lint_only(
path,
package,
settings,
noqa,
&source_kind,
source_type,
ParseSource::None,
);
let transformed = source_kind;
let result = lint_only(path, package, settings, noqa, &source_kind, source_type);
let fixed = FxHashMap::default();
(result, transformed, fixed)
(result, fixed)
};
let imports = imports.unwrap_or_default();
@@ -340,7 +318,7 @@ pub(crate) fn lint_path(
if let Some((cache, relative_path, key)) = caching {
// We don't cache parsing errors.
if parse_error.is_none() {
// `FixMode::Apply` and `FixMode::Diff` rely on side-effects (writing to disk,
// `FixMode::Generate` and `FixMode::Diff` rely on side-effects (writing to disk,
// and writing the diff to stdout, respectively). If a file has diagnostics, we
// need to avoid reading from and writing to the cache in these modes.
if match fix_mode {
@@ -355,21 +333,28 @@ pub(crate) fn lint_path(
LintCacheData::from_messages(
&messages,
imports.clone(),
transformed.as_ipy_notebook().map(Notebook::index).cloned(),
source_kind.as_ipy_notebook().map(Notebook::index).cloned(),
),
);
}
}
}
if let Some(error) = parse_error {
if let Some(err) = parse_error {
error!(
"{}",
DisplayParseError::from_source_kind(error, Some(path.to_path_buf()), &transformed)
DisplayParseError::new(
err,
SourceCode::new(
source_kind.source_code(),
&LineIndex::from_source_text(source_kind.source_code())
),
&source_kind,
)
);
}
let notebook_indexes = if let SourceKind::IpyNotebook(notebook) = transformed {
let notebook_indexes = if let SourceKind::IpyNotebook(notebook) = source_kind {
FxHashMap::from_iter([(path.to_string_lossy().to_string(), notebook.into_index())])
} else {
FxHashMap::default()
@@ -394,14 +379,15 @@ pub(crate) fn lint_stdin(
fix_mode: flags::FixMode,
) -> Result<Diagnostics> {
// TODO(charlie): Support `pyproject.toml`.
let source_type = match path.and_then(|path| settings.linter.extension.get(path)) {
None => match path.map(SourceType::from).unwrap_or_default() {
SourceType::Python(source_type) => source_type,
SourceType::Toml(_) => {
return Ok(Diagnostics::default());
}
},
Some(language) => PySourceType::from(language),
let source_type = if let Some(source_type) =
override_source_type(path, &settings.linter.extension)
{
source_type
} else {
let SourceType::Python(source_type) = path.map(SourceType::from).unwrap_or_default() else {
return Ok(Diagnostics::default());
};
source_type
};
// Extract the sources from the file.
@@ -419,7 +405,6 @@ pub(crate) fn lint_stdin(
data: (messages, imports),
error: parse_error,
},
transformed,
fixed,
) = if matches!(fix_mode, flags::FixMode::Apply | flags::FixMode::Diff) {
if let Ok(FixerResult {
@@ -443,21 +428,13 @@ pub(crate) fn lint_stdin(
flags::FixMode::Diff => {
// But only write a diff if it's non-empty.
if !fixed.is_empty() {
write!(
&mut io::stdout().lock(),
"{}",
source_kind.diff(&transformed, path).unwrap()
)?;
source_kind.diff(transformed.as_ref(), path, &mut io::stdout().lock())?;
}
}
flags::FixMode::Generate => {}
}
let transformed = if let Cow::Owned(transformed) = transformed {
transformed
} else {
source_kind
};
(result, transformed, fixed)
(result, fixed)
} else {
// If we fail to fix, lint the original source code.
let result = lint_only(
@@ -467,17 +444,15 @@ pub(crate) fn lint_stdin(
noqa,
&source_kind,
source_type,
ParseSource::None,
);
let fixed = FxHashMap::default();
// Write the contents to stdout anyway.
if fix_mode.is_apply() {
source_kind.write(&mut io::stdout().lock())?;
}
let transformed = source_kind;
let fixed = FxHashMap::default();
(result, transformed, fixed)
(result, fixed)
}
} else {
let result = lint_only(
@@ -487,23 +462,21 @@ pub(crate) fn lint_stdin(
noqa,
&source_kind,
source_type,
ParseSource::None,
);
let transformed = source_kind;
let fixed = FxHashMap::default();
(result, transformed, fixed)
(result, fixed)
};
let imports = imports.unwrap_or_default();
if let Some(error) = parse_error {
if let Some(err) = parse_error {
error!(
"{}",
DisplayParseError::from_source_kind(error, path.map(Path::to_path_buf), &transformed)
"Failed to parse {}: {err}",
path.map_or_else(|| "-".into(), fs::relativize_path).bold()
);
}
let notebook_indexes = if let SourceKind::IpyNotebook(notebook) = transformed {
let notebook_indexes = if let SourceKind::IpyNotebook(notebook) = source_kind {
FxHashMap::from_iter([(
path.map_or_else(|| "-".into(), |path| path.to_string_lossy().to_string()),
notebook.into_index(),

View File

@@ -7,7 +7,6 @@ use std::process::ExitCode;
use std::sync::mpsc::channel;
use anyhow::Result;
use args::{GlobalConfigArgs, ServerCommand};
use clap::CommandFactory;
use colored::Colorize;
use log::warn;
@@ -19,7 +18,7 @@ use ruff_linter::settings::types::SerializationFormat;
use ruff_linter::{fs, warn_user, warn_user_once};
use ruff_workspace::Settings;
use crate::args::{Args, CheckCommand, Command, FormatCommand};
use crate::args::{Args, CheckCommand, Command, FormatCommand, HelpFormat};
use crate::printer::{Flags as PrinterFlags, Printer};
pub mod args;
@@ -115,12 +114,20 @@ fn resolve_default_files(files: Vec<PathBuf>, is_stdin: bool) -> Vec<PathBuf> {
}
}
/// Get the actual value of the `format` desired from either `output_format`
/// or `format`, and warn the user if they're using the deprecated form.
fn resolve_help_output_format(output_format: HelpFormat, format: Option<HelpFormat>) -> HelpFormat {
if format.is_some() {
warn_user!("The `--format` argument is deprecated. Use `--output-format` instead.");
}
format.unwrap_or(output_format)
}
pub fn run(
Args {
command,
global_options,
log_level_args,
}: Args,
deprecated_alias_warning: Option<&'static str>,
) -> Result<ExitStatus> {
{
let default_panic_hook = std::panic::take_hook();
@@ -148,11 +155,8 @@ pub fn run(
#[cfg(windows)]
assert!(colored::control::set_virtual_terminal(true).is_ok());
set_up_logging(global_options.log_level())?;
if let Some(deprecated_alias_warning) = deprecated_alias_warning {
warn_user!("{}", deprecated_alias_warning);
}
let log_level = LogLevel::from(&log_level_args);
set_up_logging(&log_level)?;
match command {
Command::Version { output_format } => {
@@ -162,8 +166,10 @@ pub fn run(
Command::Rule {
rule,
all,
output_format,
format,
mut output_format,
} => {
output_format = resolve_help_output_format(output_format, format);
if all {
commands::rule::rules(output_format)?;
}
@@ -176,46 +182,48 @@ pub fn run(
commands::config::config(option.as_deref())?;
Ok(ExitStatus::Success)
}
Command::Linter { output_format } => {
Command::Linter {
format,
mut output_format,
} => {
output_format = resolve_help_output_format(output_format, format);
commands::linter::linter(output_format)?;
Ok(ExitStatus::Success)
}
Command::Clean => {
commands::clean::clean(global_options.log_level())?;
commands::clean::clean(log_level)?;
Ok(ExitStatus::Success)
}
Command::GenerateShellCompletion { shell } => {
shell.generate(&mut Args::command(), &mut stdout());
Ok(ExitStatus::Success)
}
Command::Check(args) => check(args, global_options),
Command::Format(args) => format(args, global_options),
Command::Server(args) => server(args, global_options.log_level()),
Command::Check(args) => check(args, log_level),
Command::Format(args) => format(args, log_level),
}
}
fn format(args: FormatCommand, global_options: GlobalConfigArgs) -> Result<ExitStatus> {
let (cli, config_arguments) = args.partition(global_options)?;
fn format(args: FormatCommand, log_level: LogLevel) -> Result<ExitStatus> {
let (cli, overrides) = args.partition();
if is_stdin(&cli.files, cli.stdin_filename.as_deref()) {
commands::format_stdin::format_stdin(&cli, &config_arguments)
commands::format_stdin::format_stdin(&cli, &overrides)
} else {
commands::format::format(cli, &config_arguments)
commands::format::format(cli, &overrides, log_level)
}
}
#[allow(clippy::needless_pass_by_value)] // TODO: remove once we start taking arguments from here
fn server(args: ServerCommand, log_level: LogLevel) -> Result<ExitStatus> {
let ServerCommand {} = args;
commands::server::run_server(log_level)
}
pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<ExitStatus> {
let (cli, config_arguments) = args.partition(global_options)?;
pub fn check(args: CheckCommand, log_level: LogLevel) -> Result<ExitStatus> {
let (cli, overrides) = args.partition();
// Construct the "default" settings. These are used when no `pyproject.toml`
// files are present, or files are injected from outside of the hierarchy.
let pyproject_config = resolve::resolve(&config_arguments, cli.stdin_filename.as_deref())?;
let pyproject_config = resolve::resolve(
cli.isolated,
cli.config.as_deref(),
&overrides,
cli.stdin_filename.as_deref(),
)?;
let mut writer: Box<dyn Write> = match cli.output_file {
Some(path) if !cli.watch => {
@@ -231,21 +239,11 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<Exi
let files = resolve_default_files(cli.files, is_stdin);
if cli.show_settings {
commands::show_settings::show_settings(
&files,
&pyproject_config,
&config_arguments,
&mut writer,
)?;
commands::show_settings::show_settings(&files, &pyproject_config, &overrides, &mut writer)?;
return Ok(ExitStatus::Success);
}
if cli.show_files {
commands::show_files::show_files(
&files,
&pyproject_config,
&config_arguments,
&mut writer,
)?;
commands::show_files::show_files(&files, &pyproject_config, &overrides, &mut writer)?;
return Ok(ExitStatus::Success);
}
@@ -257,6 +255,7 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<Exi
unsafe_fixes,
output_format,
show_fixes,
show_source,
..
} = pyproject_config.settings;
@@ -285,6 +284,9 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<Exi
if show_fixes {
printer_flags |= PrinterFlags::SHOW_FIX_SUMMARY;
}
if show_source {
printer_flags |= PrinterFlags::SHOW_SOURCE;
}
if cli.ecosystem_ci {
warn_user!(
"The formatting of fixes emitted by this option is a work-in-progress, subject to \
@@ -304,9 +306,8 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<Exi
if !fix_mode.is_generate() {
warn_user!("--fix is incompatible with --add-noqa.");
}
let modifications =
commands::add_noqa::add_noqa(&files, &pyproject_config, &config_arguments)?;
if modifications > 0 && config_arguments.log_level >= LogLevel::Default {
let modifications = commands::add_noqa::add_noqa(&files, &pyproject_config, &overrides)?;
if modifications > 0 && log_level >= LogLevel::Default {
let s = if modifications == 1 { "" } else { "s" };
#[allow(clippy::print_stderr)]
{
@@ -318,24 +319,15 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<Exi
let printer = Printer::new(
output_format,
config_arguments.log_level,
log_level,
fix_mode,
unsafe_fixes,
printer_flags,
);
// the settings should already be combined with the CLI overrides at this point
// TODO(jane): let's make this `PreviewMode`
// TODO: this should reference the global preview mode once https://github.com/astral-sh/ruff/issues/8232
// is resolved.
let preview = pyproject_config.settings.linter.preview.is_enabled();
if cli.watch {
if output_format != SerializationFormat::default(preview) {
warn_user!(
"`--output-format {}` is always used in watch mode.",
SerializationFormat::default(preview)
);
if output_format != SerializationFormat::Text {
warn_user!("`--output-format text` is always used in watch mode.");
}
// Configure the file watcher.
@@ -355,13 +347,13 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<Exi
let messages = commands::check::check(
&files,
&pyproject_config,
&config_arguments,
&overrides,
cache.into(),
noqa.into(),
fix_mode,
unsafe_fixes,
)?;
printer.write_continuously(&mut writer, &messages, preview)?;
printer.write_continuously(&mut writer, &messages)?;
// In watch mode, we may need to re-resolve the configuration.
// TODO(charlie): Re-compute other derivative values, like the `printer`.
@@ -375,8 +367,12 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<Exi
};
if matches!(change_kind, ChangeKind::Configuration) {
pyproject_config =
resolve::resolve(&config_arguments, cli.stdin_filename.as_deref())?;
pyproject_config = resolve::resolve(
cli.isolated,
cli.config.as_deref(),
&overrides,
cli.stdin_filename.as_deref(),
)?;
}
Printer::clear_screen()?;
printer.write_to_user("File change detected...\n");
@@ -384,13 +380,13 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<Exi
let messages = commands::check::check(
&files,
&pyproject_config,
&config_arguments,
&overrides,
cache.into(),
noqa.into(),
fix_mode,
unsafe_fixes,
)?;
printer.write_continuously(&mut writer, &messages, preview)?;
printer.write_continuously(&mut writer, &messages)?;
}
Err(err) => return Err(err.into()),
}
@@ -401,7 +397,7 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<Exi
commands::check_stdin::check_stdin(
cli.stdin_filename.map(fs::normalize_path).as_deref(),
&pyproject_config,
&config_arguments,
&overrides,
noqa.into(),
fix_mode,
)?
@@ -409,7 +405,7 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<Exi
commands::check::check(
&files,
&pyproject_config,
&config_arguments,
&overrides,
cache.into(),
noqa.into(),
fix_mode,

View File

@@ -13,7 +13,7 @@ use ruff_linter::fs::relativize_path;
use ruff_linter::logging::LogLevel;
use ruff_linter::message::{
AzureEmitter, Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter,
JsonEmitter, JsonLinesEmitter, JunitEmitter, PylintEmitter, SarifEmitter, TextEmitter,
JsonEmitter, JsonLinesEmitter, JunitEmitter, PylintEmitter, TextEmitter,
};
use ruff_linter::notify_user;
use ruff_linter::registry::{AsRule, Rule};
@@ -27,6 +27,8 @@ bitflags! {
pub(crate) struct Flags: u8 {
/// Whether to show violations when emitting diagnostics.
const SHOW_VIOLATIONS = 0b0000_0001;
/// Whether to show the source code when emitting diagnostics.
const SHOW_SOURCE = 0b000_0010;
/// Whether to show a summary of the fixed violations when emitting diagnostics.
const SHOW_FIX_SUMMARY = 0b0000_0100;
/// Whether to show a diff of each fixed violation when emitting diagnostics.
@@ -123,7 +125,15 @@ impl Printer {
if let Some(fixables) = fixables {
let fix_prefix = format!("[{}]", "*".cyan());
if self.unsafe_fixes.is_hint() {
if self.unsafe_fixes.is_enabled() {
if fixables.applicable > 0 {
writeln!(
writer,
"{fix_prefix} {} fixable with the --fix option.",
fixables.applicable
)?;
}
} else {
if fixables.applicable > 0 && fixables.unapplicable_unsafe > 0 {
let es = if fixables.unapplicable_unsafe == 1 {
""
@@ -153,14 +163,6 @@ impl Printer {
fixables.unapplicable_unsafe
)?;
}
} else {
if fixables.applicable > 0 {
writeln!(
writer,
"{fix_prefix} {} fixable with the --fix option.",
fixables.applicable
)?;
}
}
}
} else {
@@ -216,10 +218,7 @@ impl Printer {
if !self.flags.intersects(Flags::SHOW_VIOLATIONS) {
if matches!(
self.format,
SerializationFormat::Text
| SerializationFormat::Full
| SerializationFormat::Concise
| SerializationFormat::Grouped
SerializationFormat::Text | SerializationFormat::Grouped
) {
if self.flags.intersects(Flags::SHOW_FIX_SUMMARY) {
if !diagnostics.fixed.is_empty() {
@@ -246,12 +245,11 @@ impl Printer {
SerializationFormat::Junit => {
JunitEmitter.emit(writer, &diagnostics.messages, &context)?;
}
SerializationFormat::Concise
| SerializationFormat::Full => {
SerializationFormat::Text => {
TextEmitter::default()
.with_show_fix_status(show_fix_status(self.fix_mode, fixables.as_ref()))
.with_show_fix_diff(self.flags.intersects(Flags::SHOW_FIX_DIFF))
.with_show_source(self.format == SerializationFormat::Full)
.with_show_source(self.flags.intersects(Flags::SHOW_SOURCE))
.with_unsafe_fixes(self.unsafe_fixes)
.emit(writer, &diagnostics.messages, &context)?;
@@ -267,6 +265,7 @@ impl Printer {
}
SerializationFormat::Grouped => {
GroupedEmitter::default()
.with_show_source(self.flags.intersects(Flags::SHOW_SOURCE))
.with_show_fix_status(show_fix_status(self.fix_mode, fixables.as_ref()))
.with_unsafe_fixes(self.unsafe_fixes)
.emit(writer, &diagnostics.messages, &context)?;
@@ -292,10 +291,6 @@ impl Printer {
SerializationFormat::Azure => {
AzureEmitter.emit(writer, &diagnostics.messages, &context)?;
}
SerializationFormat::Sarif => {
SarifEmitter.emit(writer, &diagnostics.messages, &context)?;
}
SerializationFormat::Text => unreachable!("Text is deprecated and should have been automatically converted to the default serialization format")
}
writer.flush()?;
@@ -344,9 +339,7 @@ impl Printer {
}
match self.format {
SerializationFormat::Text
| SerializationFormat::Full
| SerializationFormat::Concise => {
SerializationFormat::Text => {
// Compute the maximum number of digits in the count and code, for all messages,
// to enable pretty-printing.
let count_width = num_digits(
@@ -407,7 +400,6 @@ impl Printer {
&self,
writer: &mut dyn Write,
diagnostics: &Diagnostics,
preview: bool,
) -> Result<()> {
if matches!(self.log_level, LogLevel::Silent) {
return Ok(());
@@ -435,7 +427,7 @@ impl Printer {
let context = EmitterContext::new(&diagnostics.notebook_indexes);
TextEmitter::default()
.with_show_fix_status(show_fix_status(self.fix_mode, fixables.as_ref()))
.with_show_source(preview)
.with_show_source(self.flags.intersects(Flags::SHOW_SOURCE))
.with_unsafe_fixes(self.unsafe_fixes)
.emit(writer, &diagnostics.messages, &context)?;
}

View File

@@ -1,4 +1,4 @@
use std::path::Path;
use std::path::{Path, PathBuf};
use anyhow::Result;
use log::debug;
@@ -11,17 +11,19 @@ use ruff_workspace::resolver::{
Relativity,
};
use crate::args::ConfigArguments;
use crate::args::CliOverrides;
/// Resolve the relevant settings strategy and defaults for the current
/// invocation.
pub fn resolve(
config_arguments: &ConfigArguments,
isolated: bool,
config: Option<&Path>,
overrides: &CliOverrides,
stdin_filename: Option<&Path>,
) -> Result<PyprojectConfig> {
// First priority: if we're running in isolated mode, use the default settings.
if config_arguments.isolated {
let config = config_arguments.transform(Configuration::default());
if isolated {
let config = overrides.transform(Configuration::default());
let settings = config.into_settings(&path_dedot::CWD)?;
debug!("Isolated mode, not reading any pyproject.toml");
return Ok(PyprojectConfig::new(
@@ -34,8 +36,12 @@ pub fn resolve(
// Second priority: the user specified a `pyproject.toml` file. Use that
// `pyproject.toml` for _all_ configuration, and resolve paths relative to the
// current working directory. (This matches ESLint's behavior.)
if let Some(pyproject) = config_arguments.config_file() {
let settings = resolve_root_settings(pyproject, Relativity::Cwd, config_arguments)?;
if let Some(pyproject) = config
.map(|config| config.display().to_string())
.map(|config| shellexpand::full(&config).map(|config| PathBuf::from(config.as_ref())))
.transpose()?
{
let settings = resolve_root_settings(&pyproject, Relativity::Cwd, overrides)?;
debug!(
"Using user-specified configuration file at: {}",
pyproject.display()
@@ -43,7 +49,7 @@ pub fn resolve(
return Ok(PyprojectConfig::new(
PyprojectDiscoveryStrategy::Fixed,
settings,
Some(pyproject.to_path_buf()),
Some(pyproject),
));
}
@@ -61,7 +67,7 @@ pub fn resolve(
"Using configuration file (via parent) at: {}",
pyproject.display()
);
let settings = resolve_root_settings(&pyproject, Relativity::Parent, config_arguments)?;
let settings = resolve_root_settings(&pyproject, Relativity::Parent, overrides)?;
return Ok(PyprojectConfig::new(
PyprojectDiscoveryStrategy::Hierarchical,
settings,
@@ -78,7 +84,7 @@ pub fn resolve(
"Using configuration file (via cwd) at: {}",
pyproject.display()
);
let settings = resolve_root_settings(&pyproject, Relativity::Cwd, config_arguments)?;
let settings = resolve_root_settings(&pyproject, Relativity::Cwd, overrides)?;
return Ok(PyprojectConfig::new(
PyprojectDiscoveryStrategy::Hierarchical,
settings,
@@ -91,7 +97,7 @@ pub fn resolve(
// "closest" `pyproject.toml` file for every Python file later on, so these act
// as the "default" settings.)
debug!("Using Ruff default settings");
let config = config_arguments.transform(Configuration::default());
let config = overrides.transform(Configuration::default());
let settings = config.into_settings(&path_dedot::CWD)?;
Ok(PyprojectConfig::new(
PyprojectDiscoveryStrategy::Hierarchical,

View File

@@ -0,0 +1,5 @@
---
source: crates/ruff_cli/src/version.rs
expression: version
---
0.0.0

View File

@@ -1,5 +1,5 @@
---
source: crates/ruff/src/version.rs
source: crates/ruff_cli/src/version.rs
expression: version
---
0.0.0 (53b0f5d92 2023-10-19)

View File

@@ -1,5 +1,5 @@
---
source: crates/ruff/src/version.rs
source: crates/ruff_cli/src/version.rs
expression: version
---
0.0.0+24 (53b0f5d92 2023-10-19)

View File

@@ -1,5 +1,5 @@
---
source: crates/ruff/src/version.rs
source: crates/ruff_cli/src/version.rs
expression: version
---
{

View File

@@ -7,15 +7,10 @@ use std::str;
use anyhow::Result;
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
use regex::escape;
use tempfile::TempDir;
const BIN_NAME: &str = "ruff";
fn tempdir_filter(tempdir: &TempDir) -> String {
format!(r"{}\\?/?", escape(tempdir.path().to_str().unwrap()))
}
#[test]
fn default_options() {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
@@ -95,182 +90,6 @@ fn format_warn_stdin_filename_with_files() {
"###);
}
#[test]
fn nonexistent_config_file() {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["format", "--config", "foo.toml", "."]), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: invalid value 'foo.toml' for '--config <CONFIG_OPTION>'
tip: A `--config` flag must either be a path to a `.toml` configuration file
or a TOML `<KEY> = <VALUE>` pair overriding a specific configuration
option
It looks like you were trying to pass a path to a configuration file.
The path `foo.toml` does not point to a configuration file
For more information, try '--help'.
"###);
}
#[test]
fn config_override_rejected_if_invalid_toml() {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["format", "--config", "foo = bar", "."]), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: invalid value 'foo = bar' for '--config <CONFIG_OPTION>'
tip: A `--config` flag must either be a path to a `.toml` configuration file
or a TOML `<KEY> = <VALUE>` pair overriding a specific configuration
option
The supplied argument is not valid TOML:
TOML parse error at line 1, column 7
|
1 | foo = bar
| ^
invalid string
expected `"`, `'`
For more information, try '--help'.
"###);
}
#[test]
fn too_many_config_files() -> Result<()> {
let tempdir = TempDir::new()?;
let ruff_dot_toml = tempdir.path().join("ruff.toml");
let ruff2_dot_toml = tempdir.path().join("ruff2.toml");
fs::File::create(&ruff_dot_toml)?;
fs::File::create(&ruff2_dot_toml)?;
insta::with_settings!({
filters => vec![(tempdir_filter(&tempdir).as_str(), "[TMP]/")]
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("format")
.arg("--config")
.arg(&ruff_dot_toml)
.arg("--config")
.arg(&ruff2_dot_toml)
.arg("."), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
ruff failed
Cause: You cannot specify more than one configuration file on the command line.
tip: remove either `--config=[TMP]/ruff.toml` or `--config=[TMP]/ruff2.toml`.
For more information, try `--help`.
"###);
});
Ok(())
}
#[test]
fn config_file_and_isolated() -> Result<()> {
let tempdir = TempDir::new()?;
let ruff_dot_toml = tempdir.path().join("ruff.toml");
fs::File::create(&ruff_dot_toml)?;
insta::with_settings!({
filters => vec![(tempdir_filter(&tempdir).as_str(), "[TMP]/")]
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("format")
.arg("--config")
.arg(&ruff_dot_toml)
.arg("--isolated")
.arg("."), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
ruff failed
Cause: The argument `--config=[TMP]/ruff.toml` cannot be used with `--isolated`
tip: You cannot specify a configuration file and also specify `--isolated`,
as `--isolated` causes ruff to ignore all configuration files.
For more information, try `--help`.
"###);
});
Ok(())
}
#[test]
fn config_override_via_cli() -> Result<()> {
let tempdir = TempDir::new()?;
let ruff_toml = tempdir.path().join("ruff.toml");
fs::write(&ruff_toml, "line-length = 100")?;
let fixture = r#"
def foo():
print("looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong string")
"#;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("format")
.arg("--config")
.arg(&ruff_toml)
// This overrides the long line length set in the config file
.args(["--config", "line-length=80"])
.arg("-")
.pass_stdin(fixture), @r###"
success: true
exit_code: 0
----- stdout -----
def foo():
print(
"looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong string"
)
----- stderr -----
"###);
Ok(())
}
#[test]
fn config_doubly_overridden_via_cli() -> Result<()> {
let tempdir = TempDir::new()?;
let ruff_toml = tempdir.path().join("ruff.toml");
fs::write(&ruff_toml, "line-length = 70")?;
let fixture = r#"
def foo():
print("looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong string")
"#;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("format")
.arg("--config")
.arg(&ruff_toml)
// This overrides the long line length set in the config file...
.args(["--config", "line-length=80"])
// ...but this overrides them both:
.args(["--line-length", "100"])
.arg("-")
.pass_stdin(fixture), @r###"
success: true
exit_code: 0
----- stdout -----
def foo():
print("looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong string")
----- stderr -----
"###);
Ok(())
}
#[test]
fn format_options() -> Result<()> {
let tempdir = TempDir::new()?;
@@ -320,93 +139,6 @@ if condition:
Ok(())
}
#[test]
fn docstring_options() -> Result<()> {
let tempdir = TempDir::new()?;
let ruff_toml = tempdir.path().join("ruff.toml");
fs::write(
&ruff_toml,
r#"
[format]
docstring-code-format = true
docstring-code-line-length = 20
"#,
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["format", "--config"])
.arg(&ruff_toml)
.arg("-")
.pass_stdin(r#"
def f(x):
'''
Something about `f`. And an example:
.. code-block:: python
foo, bar, quux = this_is_a_long_line(lion, hippo, lemur, bear)
Another example:
```py
foo, bar, quux = this_is_a_long_line(lion, hippo, lemur, bear)
```
And another:
>>> foo, bar, quux = this_is_a_long_line(lion, hippo, lemur, bear)
'''
pass
"#), @r###"
success: true
exit_code: 0
----- stdout -----
def f(x):
"""
Something about `f`. And an example:
.. code-block:: python
foo, bar, quux = (
this_is_a_long_line(
lion,
hippo,
lemur,
bear,
)
)
Another example:
```py
foo, bar, quux = (
this_is_a_long_line(
lion,
hippo,
lemur,
bear,
)
)
```
And another:
>>> foo, bar, quux = (
... this_is_a_long_line(
... lion,
... hippo,
... lemur,
... bear,
... )
... )
"""
pass
----- stderr -----
"###);
Ok(())
}
#[test]
fn mixed_line_endings() -> Result<()> {
let tempdir = TempDir::new()?;
@@ -430,7 +162,7 @@ fn mixed_line_endings() -> Result<()> {
----- stdout -----
----- stderr -----
2 files already formatted
2 files left unchanged
"###);
Ok(())
}
@@ -503,86 +235,6 @@ OTHER = "OTHER"
Ok(())
}
#[test]
fn syntax_error() -> Result<()> {
let tempdir = TempDir::new()?;
fs::write(
tempdir.path().join("main.py"),
r#"
from module import =
"#,
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.current_dir(tempdir.path())
.args(["format", "--no-cache", "--isolated", "--check"])
.arg("main.py"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to parse main.py:2:20: Unexpected token '='
"###);
Ok(())
}
#[test]
fn messages() -> Result<()> {
let tempdir = TempDir::new()?;
fs::write(
tempdir.path().join("main.py"),
r#"
from test import say_hy
if __name__ == "__main__":
say_hy("dear Ruff contributor")
"#,
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.current_dir(tempdir.path())
.args(["format", "--no-cache", "--isolated", "--check"])
.arg("main.py"), @r###"
success: false
exit_code: 1
----- stdout -----
Would reformat: main.py
1 file would be reformatted
----- stderr -----
"###);
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.current_dir(tempdir.path())
.args(["format", "--no-cache", "--isolated"])
.arg("main.py"), @r###"
success: true
exit_code: 0
----- stdout -----
1 file reformatted
----- stderr -----
"###);
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.current_dir(tempdir.path())
.args(["format", "--no-cache", "--isolated"])
.arg("main.py"), @r###"
success: true
exit_code: 0
----- stdout -----
1 file left unchanged
----- stderr -----
"###);
Ok(())
}
#[test]
fn force_exclude() -> Result<()> {
let tempdir = TempDir::new()?;
@@ -683,9 +335,6 @@ if __name__ == '__main__':
say_hy("dear Ruff contributor")
----- stderr -----
warning: The top-level linter settings are deprecated in favour of their counterparts in the `lint` section. Please update the following options in `ruff.toml`:
- 'extend-select' -> 'lint.extend-select'
- 'ignore' -> 'lint.ignore'
"###);
Ok(())
}
@@ -724,9 +373,6 @@ if __name__ == '__main__':
say_hy("dear Ruff contributor")
----- stderr -----
warning: The top-level linter settings are deprecated in favour of their counterparts in the `lint` section. Please update the following options in `ruff.toml`:
- 'extend-select' -> 'lint.extend-select'
- 'ignore' -> 'lint.ignore'
"###);
Ok(())
}
@@ -853,8 +499,7 @@ format = "json"
----- stderr -----
ruff failed
Cause: Failed to parse [RUFF-TOML-PATH]
Cause: TOML parse error at line 2, column 10
Cause: Failed to parse `[RUFF-TOML-PATH]`: TOML parse error at line 2, column 10
|
2 | format = "json"
| ^^^^^^
@@ -1138,7 +783,7 @@ fn test_diff() {
----- stderr -----
2 files would be reformatted, 1 file already formatted
2 files would be reformatted, 1 file left unchanged
"###);
});
}
@@ -1649,392 +1294,3 @@ fn test_notebook_trailing_semicolon() {
----- stderr -----
"###);
}
#[test]
fn extension() -> Result<()> {
let tempdir = TempDir::new()?;
let ruff_toml = tempdir.path().join("ruff.toml");
fs::write(
&ruff_toml,
r#"
include = ["*.ipy"]
"#,
)?;
fs::write(
tempdir.path().join("main.ipy"),
r#"
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"id": "ad6f36d9-4b7d-4562-8d00-f15a0f1fbb6d",
"metadata": {},
"outputs": [],
"source": [
"x=1"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.0"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
"#,
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.current_dir(tempdir.path())
.arg("format")
.arg("--no-cache")
.args(["--config", &ruff_toml.file_name().unwrap().to_string_lossy()])
.args(["--extension", "ipy:ipynb"])
.arg("."), @r###"
success: true
exit_code: 0
----- stdout -----
1 file reformatted
----- stderr -----
"###);
Ok(())
}
#[test]
fn range_formatting() {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["format", "--isolated", "--stdin-filename", "test.py", "--range=2:8-2:14"])
.arg("-")
.pass_stdin(r#"
def foo(arg1, arg2,):
print("Shouldn't format this" )
"#), @r###"
success: true
exit_code: 0
----- stdout -----
def foo(
arg1,
arg2,
):
print("Shouldn't format this" )
----- stderr -----
"###);
}
#[test]
fn range_formatting_unicode() {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["format", "--isolated", "--stdin-filename", "test.py", "--range=2:21-3"])
.arg("-")
.pass_stdin(r#"
def foo(arg1="👋🏽" ): print("Format this" )
"#), @r###"
success: true
exit_code: 0
----- stdout -----
def foo(arg1="👋🏽" ):
print("Format this")
----- stderr -----
"###);
}
#[test]
fn range_formatting_multiple_files() -> std::io::Result<()> {
let tempdir = TempDir::new()?;
let file1 = tempdir.path().join("file1.py");
fs::write(
&file1,
r#"
def file1(arg1, arg2,):
print("Shouldn't format this" )
"#,
)?;
let file2 = tempdir.path().join("file2.py");
fs::write(
&file2,
r#"
def file2(arg1, arg2,):
print("Shouldn't format this" )
"#,
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["format", "--isolated", "--range=1:8-1:15"])
.arg(file1)
.arg(file2), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
ruff failed
Cause: The `--range` option is only supported when formatting a single file but the specified paths resolve to 2 files.
"###);
Ok(())
}
#[test]
fn range_formatting_out_of_bounds() {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["format", "--isolated", "--stdin-filename", "test.py", "--range=100:40-200:1"])
.arg("-")
.pass_stdin(r#"
def foo(arg1, arg2,):
print("Shouldn't format this" )
"#), @r###"
success: true
exit_code: 0
----- stdout -----
def foo(arg1, arg2,):
print("Shouldn't format this" )
----- stderr -----
"###);
}
#[test]
fn range_start_larger_than_end() {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["format", "--isolated", "--stdin-filename", "test.py", "--range=90-50"])
.arg("-")
.pass_stdin(r#"
def foo(arg1, arg2,):
print("Shouldn't format this" )
"#), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: invalid value '90-50' for '--range <RANGE>': the start position '90:1' is greater than the end position '50:1'.
tip: Try switching start and end: '50:1-90:1'
For more information, try '--help'.
"###);
}
#[test]
fn range_line_numbers_only() {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["format", "--isolated", "--stdin-filename", "test.py", "--range=2-3"])
.arg("-")
.pass_stdin(r#"
def foo(arg1, arg2,):
print("Shouldn't format this" )
"#), @r###"
success: true
exit_code: 0
----- stdout -----
def foo(
arg1,
arg2,
):
print("Shouldn't format this" )
----- stderr -----
"###);
}
#[test]
fn range_start_only() {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["format", "--isolated", "--stdin-filename", "test.py", "--range=3"])
.arg("-")
.pass_stdin(r#"
def foo(arg1, arg2,):
print("Should format this" )
"#), @r###"
success: true
exit_code: 0
----- stdout -----
def foo(arg1, arg2,):
print("Should format this")
----- stderr -----
"###);
}
#[test]
fn range_end_only() {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["format", "--isolated", "--stdin-filename", "test.py", "--range=-3"])
.arg("-")
.pass_stdin(r#"
def foo(arg1, arg2,):
print("Should format this" )
"#), @r###"
success: true
exit_code: 0
----- stdout -----
def foo(
arg1,
arg2,
):
print("Should format this" )
----- stderr -----
"###);
}
#[test]
fn range_missing_line() {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["format", "--isolated", "--stdin-filename", "test.py", "--range=1-:20"])
.arg("-")
.pass_stdin(r#"
def foo(arg1, arg2,):
print("Should format this" )
"#), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: invalid value '1-:20' for '--range <RANGE>': the end line is not a valid number (cannot parse integer from empty string)
tip: The format is 'line:column'.
For more information, try '--help'.
"###);
}
#[test]
fn zero_line_number() {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["format", "--isolated", "--stdin-filename", "test.py", "--range=0:2"])
.arg("-")
.pass_stdin(r#"
def foo(arg1, arg2,):
print("Should format this" )
"#), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: invalid value '0:2' for '--range <RANGE>': the start line is 0, but it should be 1 or greater.
tip: The line numbers start at 1.
tip: Try 1:2 instead.
For more information, try '--help'.
"###);
}
#[test]
fn column_and_line_zero() {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["format", "--isolated", "--stdin-filename", "test.py", "--range=0:0"])
.arg("-")
.pass_stdin(r#"
def foo(arg1, arg2,):
print("Should format this" )
"#), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: invalid value '0:0' for '--range <RANGE>': the start line and column are both 0, but they should be 1 or greater.
tip: The line and column numbers start at 1.
tip: Try 1:1 instead.
For more information, try '--help'.
"###);
}
#[test]
fn range_formatting_notebook() {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["format", "--isolated", "--no-cache", "--stdin-filename", "main.ipynb", "--range=1-2"])
.arg("-")
.pass_stdin(r#"
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"id": "ad6f36d9-4b7d-4562-8d00-f15a0f1fbb6d",
"metadata": {},
"outputs": [],
"source": [
"x=1"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.0"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
"#), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to format main.ipynb: Range formatting isn't supported for notebooks.
"###);
}

View File

@@ -0,0 +1,438 @@
//! Tests the interaction of the `lint` configuration section
#![cfg(not(target_family = "wasm"))]
use std::fs;
use std::process::Command;
use std::str;
use anyhow::Result;
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
use tempfile::TempDir;
const BIN_NAME: &str = "ruff";
const STDIN_BASE_OPTIONS: &[&str] = &["--no-cache", "--output-format", "text"];
#[test]
fn top_level_options() -> Result<()> {
let tempdir = TempDir::new()?;
let ruff_toml = tempdir.path().join("ruff.toml");
fs::write(
&ruff_toml,
r#"
extend-select = ["B", "Q"]
[flake8-quotes]
inline-quotes = "single"
"#,
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(STDIN_BASE_OPTIONS)
.arg("--config")
.arg(&ruff_toml)
.args(["--stdin-filename", "test.py"])
.arg("-")
.pass_stdin(r#"a = "abcba".strip("aba")"#), @r###"
success: false
exit_code: 1
----- stdout -----
test.py:1:5: Q000 [*] Double quotes found but single quotes preferred
test.py:1:5: B005 Using `.strip()` with multi-character strings is misleading
test.py:1:19: Q000 [*] Double quotes found but single quotes preferred
Found 3 errors.
[*] 2 fixable with the `--fix` option.
----- stderr -----
"###);
Ok(())
}
#[test]
fn lint_options() -> Result<()> {
let tempdir = TempDir::new()?;
let ruff_toml = tempdir.path().join("ruff.toml");
fs::write(
&ruff_toml,
r#"
[lint]
extend-select = ["B", "Q"]
[lint.flake8-quotes]
inline-quotes = "single"
"#,
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(STDIN_BASE_OPTIONS)
.arg("--config")
.arg(&ruff_toml)
.arg("-")
.pass_stdin(r#"a = "abcba".strip("aba")"#), @r###"
success: false
exit_code: 1
----- stdout -----
-:1:5: Q000 [*] Double quotes found but single quotes preferred
-:1:5: B005 Using `.strip()` with multi-character strings is misleading
-:1:19: Q000 [*] Double quotes found but single quotes preferred
Found 3 errors.
[*] 2 fixable with the `--fix` option.
----- stderr -----
"###);
Ok(())
}
/// Tests that configurations from the top-level and `lint` section are merged together.
#[test]
fn mixed_levels() -> Result<()> {
let tempdir = TempDir::new()?;
let ruff_toml = tempdir.path().join("ruff.toml");
fs::write(
&ruff_toml,
r#"
extend-select = ["B", "Q"]
[lint.flake8-quotes]
inline-quotes = "single"
"#,
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(STDIN_BASE_OPTIONS)
.arg("--config")
.arg(&ruff_toml)
.arg("-")
.pass_stdin(r#"a = "abcba".strip("aba")"#), @r###"
success: false
exit_code: 1
----- stdout -----
-:1:5: Q000 [*] Double quotes found but single quotes preferred
-:1:5: B005 Using `.strip()` with multi-character strings is misleading
-:1:19: Q000 [*] Double quotes found but single quotes preferred
Found 3 errors.
[*] 2 fixable with the `--fix` option.
----- stderr -----
"###);
Ok(())
}
/// Tests that options in the `lint` section have higher precedence than top-level options (because they are more specific).
#[test]
fn precedence() -> Result<()> {
let tempdir = TempDir::new()?;
let ruff_toml = tempdir.path().join("ruff.toml");
fs::write(
&ruff_toml,
r#"
[lint]
extend-select = ["B", "Q"]
[flake8-quotes]
inline-quotes = "double"
[lint.flake8-quotes]
inline-quotes = "single"
"#,
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(STDIN_BASE_OPTIONS)
.arg("--config")
.arg(&ruff_toml)
.arg("-")
.pass_stdin(r#"a = "abcba".strip("aba")"#), @r###"
success: false
exit_code: 1
----- stdout -----
-:1:5: Q000 [*] Double quotes found but single quotes preferred
-:1:5: B005 Using `.strip()` with multi-character strings is misleading
-:1:19: Q000 [*] Double quotes found but single quotes preferred
Found 3 errors.
[*] 2 fixable with the `--fix` option.
----- stderr -----
"###);
Ok(())
}
#[test]
fn exclude() -> Result<()> {
let tempdir = TempDir::new()?;
let ruff_toml = tempdir.path().join("ruff.toml");
fs::write(
&ruff_toml,
r#"
extend-select = ["B", "Q"]
extend-exclude = ["out"]
[lint]
exclude = ["test.py", "generated.py"]
[lint.flake8-quotes]
inline-quotes = "single"
"#,
)?;
fs::write(
tempdir.path().join("main.py"),
r#"
from test import say_hy
if __name__ == "__main__":
say_hy("dear Ruff contributor")
"#,
)?;
// Excluded file but passed to the CLI directly, should be linted
let test_path = tempdir.path().join("test.py");
fs::write(
&test_path,
r#"
def say_hy(name: str):
print(f"Hy {name}")"#,
)?;
fs::write(
tempdir.path().join("generated.py"),
r#"NUMBERS = [
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
10, 11, 12, 13, 14, 15, 16, 17, 18, 19
]
OTHER = "OTHER"
"#,
)?;
let out_dir = tempdir.path().join("out");
fs::create_dir(&out_dir)?;
fs::write(out_dir.join("a.py"), r#"a = "a""#)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.current_dir(tempdir.path())
.arg("check")
.args(STDIN_BASE_OPTIONS)
.args(["--config", &ruff_toml.file_name().unwrap().to_string_lossy()])
// Explicitly pass test.py, should be linted regardless of it being excluded by lint.exclude
.arg(test_path.file_name().unwrap())
// Lint all other files in the directory, should respect the `exclude` and `lint.exclude` options
.arg("."), @r###"
success: false
exit_code: 1
----- stdout -----
main.py:4:16: Q000 [*] Double quotes found but single quotes preferred
main.py:5:12: Q000 [*] Double quotes found but single quotes preferred
test.py:3:15: Q000 [*] Double quotes found but single quotes preferred
Found 3 errors.
[*] 3 fixable with the `--fix` option.
----- stderr -----
"###);
Ok(())
}
#[test]
fn exclude_stdin() -> Result<()> {
let tempdir = TempDir::new()?;
let ruff_toml = tempdir.path().join("ruff.toml");
fs::write(
&ruff_toml,
r#"
extend-select = ["B", "Q"]
[lint]
exclude = ["generated.py"]
[lint.flake8-quotes]
inline-quotes = "single"
"#,
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.current_dir(tempdir.path())
.arg("check")
.args(STDIN_BASE_OPTIONS)
.args(["--config", &ruff_toml.file_name().unwrap().to_string_lossy()])
.args(["--stdin-filename", "generated.py"])
.arg("-")
.pass_stdin(r#"
from test import say_hy
if __name__ == "__main__":
say_hy("dear Ruff contributor")
"#), @r###"
success: false
exit_code: 1
----- stdout -----
generated.py:4:16: Q000 [*] Double quotes found but single quotes preferred
generated.py:5:12: Q000 [*] Double quotes found but single quotes preferred
Found 2 errors.
[*] 2 fixable with the `--fix` option.
----- stderr -----
"###);
Ok(())
}
#[test]
fn line_too_long_width_override() -> Result<()> {
let tempdir = TempDir::new()?;
let ruff_toml = tempdir.path().join("ruff.toml");
fs::write(
&ruff_toml,
r#"
line-length = 80
select = ["E501"]
[pycodestyle]
max-line-length = 100
"#,
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(STDIN_BASE_OPTIONS)
.arg("--config")
.arg(&ruff_toml)
.args(["--stdin-filename", "test.py"])
.arg("-")
.pass_stdin(r#"
# longer than 80, but less than 100
_ = "---------------------------------------------------------------------------亜亜亜亜亜亜"
# longer than 100
_ = "---------------------------------------------------------------------------亜亜亜亜亜亜亜亜亜亜亜亜亜亜"
"#), @r###"
success: false
exit_code: 1
----- stdout -----
test.py:5:91: E501 Line too long (109 > 100)
Found 1 error.
----- stderr -----
"###);
Ok(())
}
#[test]
fn per_file_ignores_stdin() -> Result<()> {
let tempdir = TempDir::new()?;
let ruff_toml = tempdir.path().join("ruff.toml");
fs::write(
&ruff_toml,
r#"
extend-select = ["B", "Q"]
[lint.flake8-quotes]
inline-quotes = "single"
"#,
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.current_dir(tempdir.path())
.arg("check")
.args(STDIN_BASE_OPTIONS)
.args(["--config", &ruff_toml.file_name().unwrap().to_string_lossy()])
.args(["--stdin-filename", "generated.py"])
.args(["--per-file-ignores", "generated.py:Q"])
.arg("-")
.pass_stdin(r#"
import os
from test import say_hy
if __name__ == "__main__":
say_hy("dear Ruff contributor")
"#), @r###"
success: false
exit_code: 1
----- stdout -----
generated.py:2:8: F401 [*] `os` imported but unused
Found 1 error.
[*] 1 fixable with the `--fix` option.
----- stderr -----
"###);
Ok(())
}
#[test]
fn extend_per_file_ignores_stdin() -> Result<()> {
let tempdir = TempDir::new()?;
let ruff_toml = tempdir.path().join("ruff.toml");
fs::write(
&ruff_toml,
r#"
extend-select = ["B", "Q"]
[lint.flake8-quotes]
inline-quotes = "single"
"#,
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.current_dir(tempdir.path())
.arg("check")
.args(STDIN_BASE_OPTIONS)
.args(["--config", &ruff_toml.file_name().unwrap().to_string_lossy()])
.args(["--stdin-filename", "generated.py"])
.args(["--extend-per-file-ignores", "generated.py:Q"])
.arg("-")
.pass_stdin(r#"
import os
from test import say_hy
if __name__ == "__main__":
say_hy("dear Ruff contributor")
"#), @r###"
success: false
exit_code: 1
----- stdout -----
generated.py:2:8: F401 [*] `os` imported but unused
Found 1 error.
[*] 1 fixable with the `--fix` option.
----- stderr -----
"###);
Ok(())
}
/// Regression test for [#8858](https://github.com/astral-sh/ruff/issues/8858)
#[test]
fn parent_configuration_override() -> Result<()> {
let tempdir = TempDir::new()?;
let root_ruff = tempdir.path().join("ruff.toml");
fs::write(
root_ruff,
r#"
[lint]
select = ["ALL"]
"#,
)?;
let sub_dir = tempdir.path().join("subdirectory");
fs::create_dir(&sub_dir)?;
let subdirectory_ruff = sub_dir.join("ruff.toml");
fs::write(
subdirectory_ruff,
r#"
[lint]
ignore = ["D203", "D212"]
"#,
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.current_dir(sub_dir)
.arg("check")
.args(STDIN_BASE_OPTIONS)
, @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
warning: No Python files found under the given path(s)
"###);
Ok(())
}

View File

@@ -39,8 +39,6 @@ fn check_project_include_defaults() {
[BASEPATH]/include-test/subdirectory/c.py
----- stderr -----
warning: The top-level linter settings are deprecated in favour of their counterparts in the `lint` section. Please update the following options in `nested-project/pyproject.toml`:
- 'select' -> 'lint.select'
"###);
});
}

View File

@@ -1,9 +1,9 @@
---
source: crates/ruff/tests/integration_test.rs
source: crates/ruff_cli/tests/integration_test.rs
info:
program: ruff
args:
- rule
- "--explain"
- F401
---
success: true
@@ -25,15 +25,6 @@ import cycles. They also increase the cognitive load of reading the code.
If an import statement is used to check for the availability or existence
of a module, consider using `importlib.util.find_spec` instead.
If an import statement is used to re-export a symbol as part of a module's
public interface, consider using a "redundant" import alias, which
instructs Ruff (and other tools) to respect the re-export, and avoid
marking it as unused, as in:
```python
from module import member as member
```
## Example
```python
import numpy as np # unused import
@@ -60,11 +51,11 @@ else:
```
## Options
- `lint.ignore-init-module-imports`
- `pyflakes.extend-generics`
## References
- [Python documentation: `import`](https://docs.python.org/3/reference/simple_stmts.html#the-import-statement)
- [Python documentation: `importlib.util.find_spec`](https://docs.python.org/3/library/importlib.html#importlib.util.find_spec)
- [Typing documentation: interface conventions](https://typing.readthedocs.io/en/latest/source/libraries.html#library-interface-public-and-private-symbols)
----- stderr -----

View File

@@ -1,5 +1,5 @@
---
source: crates/ruff/tests/integration_test.rs
source: crates/ruff_cli/tests/integration_test.rs
info:
program: ruff
args:

View File

@@ -11,43 +11,45 @@ repository = { workspace = true }
license = { workspace = true }
[dependencies]
ruff = { path = "../ruff" }
ruff_linter = { path = "../ruff_linter", features = ["schemars"] }
ruff_cli = { path = "../ruff_cli" }
ruff_diagnostics = { path = "../ruff_diagnostics" }
ruff_formatter = { path = "../ruff_formatter" }
ruff_linter = { path = "../ruff_linter", features = ["schemars"] }
ruff_notebook = { path = "../ruff_notebook" }
ruff_python_ast = { path = "../ruff_python_ast" }
ruff_python_codegen = { path = "../ruff_python_codegen" }
ruff_python_formatter = { path = "../ruff_python_formatter" }
ruff_notebook = { path = "../ruff_notebook" }
ruff_python_literal = { path = "../ruff_python_literal" }
ruff_python_parser = { path = "../ruff_python_parser" }
ruff_python_stdlib = { path = "../ruff_python_stdlib" }
ruff_python_trivia = { path = "../ruff_python_trivia" }
ruff_workspace = { path = "../ruff_workspace", features = ["schemars"]}
anyhow = { workspace = true }
clap = { workspace = true, features = ["wrap_help"] }
clap = { workspace = true }
ignore = { workspace = true }
imara-diff = { workspace = true }
indicatif = { workspace = true }
indicatif = "0.17.7"
itertools = { workspace = true }
libcst = { workspace = true }
once_cell = { workspace = true }
pretty_assertions = { workspace = true }
rayon = { workspace = true }
pretty_assertions = { version = "1.3.0" }
rayon = "1.8.0"
regex = { workspace = true }
schemars = { workspace = true }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
similar = { workspace = true }
strum = { workspace = true }
tempfile = { workspace = true }
strum_macros = { workspace = true }
tempfile = "3.8.1"
toml = { workspace = true, features = ["parse"] }
tracing = { workspace = true }
tracing-indicatif = { workspace = true }
tracing-subscriber = { workspace = true, features = ["env-filter"] }
imara-diff = "0.1.5"
[dev-dependencies]
indoc = { workspace = true }
indoc = "2.0.4"
[features]
# Turn off rayon for profiling

Some files were not shown because too many files have changed in this diff Show More