Compare commits

..

6 Commits

Author SHA1 Message Date
konstin
de239ace74 Add test cases 2023-09-22 15:50:34 +02:00
konstin
53b5121f30 . 2023-09-21 16:58:54 +02:00
konstin
be93983e8e Basic nested range formatting 2023-09-21 15:23:17 +02:00
konstin
a1239b8f2d Make basic range formatting work in vs code 2023-09-21 13:38:05 +02:00
konstin
d1b12acb3c Add byte offset indexing 2023-09-21 13:37:52 +02:00
konstin
1aabf59f77 Formatter and parser refactoring
I got confused and refactored a bit
2023-09-21 13:28:59 +02:00
3666 changed files with 92882 additions and 282745 deletions

View File

@@ -1,3 +1,37 @@
[alias]
dev = "run --package ruff_dev --bin ruff_dev"
benchmark = "bench -p ruff_benchmark --bench linter --bench formatter --"
[target.'cfg(all())']
rustflags = [
# CLIPPY LINT SETTINGS
# This is a workaround to configure lints for the entire workspace, pending the ability to configure this via TOML.
# See: `https://github.com/rust-lang/cargo/issues/5034`
# `https://github.com/EmbarkStudios/rust-ecosystem/issues/22#issuecomment-947011395`
"-Dunsafe_code",
"-Wclippy::pedantic",
# Allowed pedantic lints
"-Wclippy::char_lit_as_u8",
"-Aclippy::collapsible_else_if",
"-Aclippy::collapsible_if",
"-Aclippy::implicit_hasher",
"-Aclippy::match_same_arms",
"-Aclippy::missing_errors_doc",
"-Aclippy::missing_panics_doc",
"-Aclippy::module_name_repetitions",
"-Aclippy::must_use_candidate",
"-Aclippy::similar_names",
"-Aclippy::too_many_lines",
# Disallowed restriction lints
"-Wclippy::print_stdout",
"-Wclippy::print_stderr",
"-Wclippy::dbg_macro",
"-Wclippy::empty_drop",
"-Wclippy::empty_structs_with_brackets",
"-Wclippy::exit",
"-Wclippy::get_unwrap",
"-Wclippy::rc_buffer",
"-Wclippy::rc_mutex",
"-Wclippy::rest_pat_in_fully_bound_structs",
"-Wunreachable_pub"
]

View File

@@ -1,8 +0,0 @@
[profile.ci]
# Print out output for failing tests as soon as they fail, and also at the end
# of the run (for easy scrollability).
failure-output = "immediate-final"
# Do not cancel the test run on the first failure.
fail-fast = false
status-level = "skip"

5
.gitattributes vendored
View File

@@ -2,11 +2,6 @@
crates/ruff_linter/resources/test/fixtures/isort/line_ending_crlf.py text eol=crlf
crates/ruff_linter/resources/test/fixtures/pycodestyle/W605_1.py text eol=crlf
crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_2.py text eol=crlf
crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_3.py text eol=crlf
crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring_code_examples_crlf.py text eol=crlf
crates/ruff_python_formatter/tests/snapshots/format@docstring_code_examples_crlf.py.snap text eol=crlf
ruff.schema.json linguist-generated=true text=auto eol=lf
*.md.snap linguist-language=Markdown

6
.github/CODEOWNERS vendored
View File

@@ -7,9 +7,3 @@
# Jupyter
/crates/ruff_linter/src/jupyter/ @dhruvmanila
/crates/ruff_formatter/ @MichaReiser
/crates/ruff_python_formatter/ @MichaReiser
/crates/ruff_python_parser/ @MichaReiser
# flake8-pyi
/crates/ruff_linter/src/rules/flake8_pyi/ @AlexWaygood

View File

@@ -3,8 +3,6 @@ Thank you for taking the time to report an issue! We're glad to have you involve
If you're filing a bug report, please consider including the following information:
* List of keywords you searched for before creating this issue. Write them down here so that others can find this issue more easily and help provide feedback.
e.g. "RUF001", "unused variable", "Jupyter notebook"
* A minimal code snippet that reproduces the bug.
* The command you invoked (e.g., `ruff /path/to/file.py --fix`), ideally including the `--isolated` flag.
* The current Ruff settings (any relevant sections from your `pyproject.toml`).

13
.github/dependabot.yml vendored Normal file
View File

@@ -0,0 +1,13 @@
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
labels: ["internal"]
- package-ecosystem: "cargo"
directory: "/"
schedule:
interval: "daily"
labels: ["internal"]

28
.github/release.yml vendored Normal file
View File

@@ -0,0 +1,28 @@
# https://docs.github.com/en/repositories/releasing-projects-on-github/automatically-generated-release-notes#configuring-automatically-generated-release-notes
changelog:
exclude:
labels:
- internal
- documentation
- formatter
categories:
- title: Breaking Changes
labels:
- breaking
- title: Rules
labels:
- rule
- autofix
- title: Settings
labels:
- configuration
- cli
- title: Bug Fixes
labels:
- bug
- title: Preview
labels:
- preview
- title: Other Changes
labels:
- "*"

View File

@@ -1,60 +0,0 @@
{
$schema: "https://docs.renovatebot.com/renovate-schema.json",
dependencyDashboard: true,
suppressNotifications: ["prEditedNotification"],
extends: ["config:recommended"],
labels: ["internal"],
schedule: ["on Monday"],
separateMajorMinor: false,
prHourlyLimit: 10,
enabledManagers: ["github-actions", "pre-commit", "cargo", "pep621", "npm"],
cargo: {
// See https://docs.renovatebot.com/configuration-options/#rangestrategy
rangeStrategy: "update-lockfile",
},
pep621: {
fileMatch: ["^(python|scripts)/.*pyproject\\.toml$"],
},
npm: {
fileMatch: ["^playground/.*package\\.json$"],
},
"pre-commit": {
enabled: true,
},
packageRules: [
{
// Group upload/download artifact updates, the versions are dependent
groupName: "Artifact GitHub Actions dependencies",
matchManagers: ["github-actions"],
matchPackagePatterns: ["actions/.*-artifact"],
description: "Weekly update of artifact-related GitHub Actions dependencies",
},
{
groupName: "pre-commit dependencies",
matchManagers: ["pre-commit"],
description: "Weekly update of pre-commit dependencies",
},
{
groupName: "NPM Development dependencies",
matchManagers: ["npm"],
matchDepTypes: ["devDependencies"],
description: "Weekly update of NPM development dependencies",
},
{
groupName: "Monaco",
matchManagers: ["npm"],
matchPackagePatterns: ["monaco"],
description: "Weekly update of the Monaco editor",
},
{
groupName: "strum",
matchManagers: ["cargo"],
matchPackagePatterns: ["strum"],
description: "Weekly update of strum dependencies",
},
],
vulnerabilityAlerts: {
commitMessageSuffix: "",
labels: ["internal", "security"],
},
}

View File

@@ -23,19 +23,14 @@ jobs:
name: "Determine changes"
runs-on: ubuntu-latest
outputs:
# Flag that is raised when any code that affects linter is changed
linter: ${{ steps.changed.outputs.linter_any_changed }}
# Flag that is raised when any code that affects formatter is changed
formatter: ${{ steps.changed.outputs.formatter_any_changed }}
# Flag that is raised when any code is changed
# This is superset of the linter and formatter
code: ${{ steps.changed.outputs.code_any_changed }}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: tj-actions/changed-files@v44
- uses: tj-actions/changed-files@v39
id: changed
with:
files_yaml: |
@@ -48,8 +43,6 @@ jobs:
- "!crates/ruff_dev/**"
- "!crates/ruff_shrinking/**"
- scripts/*
- python/**
- .github/workflows/ci.yaml
formatter:
- Cargo.toml
@@ -64,19 +57,10 @@ jobs:
- crates/ruff_python_parser/**
- crates/ruff_dev/**
- scripts/*
- python/**
- .github/workflows/ci.yaml
code:
- "**/*"
- "!**/*.md"
- "!docs/**"
- "!assets/**"
cargo-fmt:
name: "cargo fmt"
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- uses: actions/checkout@v4
- name: "Install Rust toolchain"
@@ -86,9 +70,6 @@ jobs:
cargo-clippy:
name: "cargo clippy"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@v4
- name: "Install Rust toolchain"
@@ -97,79 +78,73 @@ jobs:
rustup target add wasm32-unknown-unknown
- uses: Swatinem/rust-cache@v2
- name: "Clippy"
run: cargo clippy --workspace --all-targets --all-features --locked -- -D warnings
run: cargo clippy --workspace --all-targets --all-features -- -D warnings
- name: "Clippy (wasm)"
run: cargo clippy -p ruff_wasm --target wasm32-unknown-unknown --all-features --locked -- -D warnings
run: cargo clippy -p ruff_wasm --target wasm32-unknown-unknown --all-features -- -D warnings
cargo-test-linux:
name: "cargo test (linux)"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
cargo-test:
strategy:
matrix:
os: [ubuntu-latest, windows-latest]
runs-on: ${{ matrix.os }}
name: "cargo test | ${{ matrix.os }}"
steps:
- uses: actions/checkout@v4
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"
uses: rui314/setup-mold@v1
- name: "Install cargo nextest"
uses: taiki-e/install-action@v2
with:
tool: cargo-nextest
- name: "Install cargo insta"
uses: taiki-e/install-action@v2
with:
tool: cargo-insta
- run: pip install black[d]==23.1.0
- uses: Swatinem/rust-cache@v2
- name: "Run tests"
- name: "Run tests (Ubuntu)"
if: ${{ matrix.os == 'ubuntu-latest' }}
run: cargo insta test --all --all-features --unreferenced reject
- name: "Run tests (Windows)"
if: ${{ matrix.os == 'windows-latest' }}
shell: bash
env:
NEXTEST_PROFILE: "ci"
run: cargo insta test --all-features --unreferenced reject --test-runner nextest
# We can't reject unreferenced snapshots on windows because flake8_executable can't run on windows
run: cargo insta test --all --all-features
- run: cargo test --package ruff_cli --test black_compatibility_test -- --ignored
# TODO: Skipped as it's currently broken. The resource were moved from the
# ruff_cli to ruff crate, but this test was not updated.
if: false
# Check for broken links in the documentation.
- run: cargo doc --all --no-deps
env:
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
RUSTDOCFLAGS: "-D warnings"
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
if: ${{ matrix.os == 'ubuntu-latest' }}
with:
name: ruff
path: target/debug/ruff
cargo-test-windows:
name: "cargo test (windows)"
runs-on: windows-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
cargo-fuzz:
runs-on: ubuntu-latest
name: "cargo fuzz"
steps:
- uses: actions/checkout@v4
- name: "Install Rust toolchain"
run: rustup show
- name: "Install cargo nextest"
- uses: Swatinem/rust-cache@v2
with:
workspaces: "fuzz -> target"
- name: "Install cargo-fuzz"
uses: taiki-e/install-action@v2
with:
tool: cargo-nextest
- uses: Swatinem/rust-cache@v2
- name: "Run tests"
shell: bash
run: |
cargo nextest run --all-features --profile ci
cargo test --all-features --doc
tool: cargo-fuzz@0.11
- run: cargo fuzz build -s none
cargo-test-wasm:
name: "cargo test (wasm)"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 10
name: "cargo test (wasm)"
steps:
- uses: actions/checkout@v4
- name: "Install Rust toolchain"
run: rustup target add wasm32-unknown-unknown
- uses: actions/setup-node@v4
- uses: actions/setup-node@v3
with:
node-version: 18
cache: "npm"
@@ -181,31 +156,9 @@ jobs:
cd crates/ruff_wasm
wasm-pack test --node
cargo-fuzz:
name: "cargo fuzz"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 10
steps:
- uses: actions/checkout@v4
- name: "Install Rust toolchain"
run: rustup show
- uses: Swatinem/rust-cache@v2
with:
workspaces: "fuzz -> target"
- name: "Install cargo-fuzz"
uses: taiki-e/install-action@v2
with:
tool: cargo-fuzz@0.11.2
- run: cargo fuzz build -s none
scripts:
name: "test scripts"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 5
steps:
- uses: actions/checkout@v4
- name: "Install Rust toolchain"
@@ -224,114 +177,47 @@ jobs:
name: "ecosystem"
runs-on: ubuntu-latest
needs:
- cargo-test-linux
- cargo-test
- determine_changes
# Only runs on pull requests, since that is the only we way we can find the base version for comparison.
# Ecosystem check needs linter and/or formatter changes.
if: github.event_name == 'pull_request' && ${{
needs.determine_changes.outputs.code == 'true'
}}
timeout-minutes: 20
if: github.event_name == 'pull_request' && needs.determine_changes.outputs.linter == 'true'
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
- uses: actions/download-artifact@v4
name: Download comparison Ruff binary
- uses: actions/download-artifact@v3
name: Download Ruff binary
id: ruff-target
with:
name: ruff
path: target/debug
- uses: dawidd6/action-download-artifact@v3
name: Download baseline Ruff binary
- uses: dawidd6/action-download-artifact@v2
name: Download base results
with:
name: ruff
branch: ${{ github.event.pull_request.base.ref }}
workflow: "ci.yaml"
check_artifacts: true
- name: Install ruff-ecosystem
run: |
pip install ./python/ruff-ecosystem
- name: Run `ruff check` stable ecosystem check
if: ${{ needs.determine_changes.outputs.linter == 'true' }}
- name: Run ecosystem check
run: |
# Make executable, since artifact download doesn't preserve this
chmod +x ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
chmod +x ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
# Set pipefail to avoid hiding errors with tee
set -eo pipefail
scripts/check_ecosystem.py ruff ${{ steps.ruff-target.outputs.download-path }}/ruff | tee ecosystem-result
cat ecosystem-result > $GITHUB_STEP_SUMMARY
ruff-ecosystem check ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown | tee ecosystem-result-check-stable
cat ecosystem-result-check-stable > $GITHUB_STEP_SUMMARY
echo "### Linter (stable)" > ecosystem-result
cat ecosystem-result-check-stable >> ecosystem-result
echo "" >> ecosystem-result
- name: Run `ruff check` preview ecosystem check
if: ${{ needs.determine_changes.outputs.linter == 'true' }}
run: |
# Make executable, since artifact download doesn't preserve this
chmod +x ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
# Set pipefail to avoid hiding errors with tee
set -eo pipefail
ruff-ecosystem check ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-check-preview
cat ecosystem-result-check-preview > $GITHUB_STEP_SUMMARY
echo "### Linter (preview)" >> ecosystem-result
cat ecosystem-result-check-preview >> ecosystem-result
echo "" >> ecosystem-result
- name: Run `ruff format` stable ecosystem check
if: ${{ needs.determine_changes.outputs.formatter == 'true' }}
run: |
# Make executable, since artifact download doesn't preserve this
chmod +x ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
# Set pipefail to avoid hiding errors with tee
set -eo pipefail
ruff-ecosystem format ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown | tee ecosystem-result-format-stable
cat ecosystem-result-format-stable > $GITHUB_STEP_SUMMARY
echo "### Formatter (stable)" >> ecosystem-result
cat ecosystem-result-format-stable >> ecosystem-result
echo "" >> ecosystem-result
- name: Run `ruff format` preview ecosystem check
if: ${{ needs.determine_changes.outputs.formatter == 'true' }}
run: |
# Make executable, since artifact download doesn't preserve this
chmod +x ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
# Set pipefail to avoid hiding errors with tee
set -eo pipefail
ruff-ecosystem format ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-format-preview
cat ecosystem-result-format-preview > $GITHUB_STEP_SUMMARY
echo "### Formatter (preview)" >> ecosystem-result
cat ecosystem-result-format-preview >> ecosystem-result
echo "" >> ecosystem-result
- name: Export pull request number
run: |
echo ${{ github.event.number }} > pr-number
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
name: Upload PR Number
with:
name: pr-number
path: pr-number
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
name: Upload Results
with:
name: ecosystem-result
@@ -340,27 +226,23 @@ jobs:
cargo-udeps:
name: "cargo udeps"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@v4
- name: "Install nightly Rust toolchain"
# Only pinned to make caching work, update freely
run: rustup toolchain install nightly-2023-10-15
run: rustup toolchain install nightly-2023-06-08
- uses: Swatinem/rust-cache@v2
- name: "Install cargo-udeps"
uses: taiki-e/install-action@cargo-udeps
- name: "Run cargo-udeps"
run: cargo +nightly-2023-10-15 udeps
run: cargo +nightly-2023-06-08 udeps
python-package:
name: "python package"
runs-on: ubuntu-latest
timeout-minutes: 20
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
@@ -382,10 +264,9 @@ jobs:
pre-commit:
name: "pre-commit"
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Install Rust toolchain"
@@ -394,7 +275,7 @@ jobs:
- name: "Install pre-commit"
run: pip install pre-commit
- name: "Cache pre-commit"
uses: actions/cache@v4
uses: actions/cache@v3
with:
path: ~/.cache/pre-commit
key: pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
@@ -411,15 +292,14 @@ jobs:
docs:
name: "mkdocs"
runs-on: ubuntu-latest
timeout-minutes: 10
env:
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
- name: "Add SSH key"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
uses: webfactory/ssh-agent@v0.9.0
uses: webfactory/ssh-agent@v0.8.0
with:
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
- name: "Install Rust toolchain"
@@ -442,14 +322,13 @@ jobs:
run: mkdocs build --strict -f mkdocs.insiders.yml
- name: "Build docs"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
run: mkdocs build --strict -f mkdocs.public.yml
run: mkdocs build --strict -f mkdocs.generated.yml
check-formatter-instability-and-black-similarity:
name: "formatter instabilities and black similarity"
check-formatter-ecosystem:
name: "Formatter ecosystem and progress checks"
runs-on: ubuntu-latest
needs: determine_changes
if: needs.determine_changes.outputs.formatter == 'true' || github.ref == 'refs/heads/main'
timeout-minutes: 10
steps:
- uses: actions/checkout@v4
- name: "Install Rust toolchain"
@@ -463,54 +342,8 @@ jobs:
- name: "Remove checkouts from cache"
run: rm -r target/progress_projects
check-ruff-lsp:
name: "test ruff-lsp"
runs-on: ubuntu-latest
timeout-minutes: 5
needs:
- cargo-test-linux
- determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
steps:
- uses: extractions/setup-just@v2
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- uses: actions/checkout@v4
name: "Download ruff-lsp source"
with:
repository: "astral-sh/ruff-lsp"
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- uses: actions/download-artifact@v4
name: Download development ruff binary
id: ruff-target
with:
name: ruff
path: target/debug
- name: Install ruff-lsp dependencies
run: |
just install
- name: Run ruff-lsp tests
run: |
# Setup development binary
pip uninstall --yes ruff
chmod +x ${{ steps.ruff-target.outputs.download-path }}/ruff
export PATH=${{ steps.ruff-target.outputs.download-path }}:$PATH
ruff version
just test
benchmarks:
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
steps:
- name: "Checkout Branch"
uses: actions/checkout@v4
@@ -525,26 +358,11 @@ jobs:
- uses: Swatinem/rust-cache@v2
# Codspeed comes with a very ancient cargo version (1.66) that resolves features flags differently than what we use now.
# This can result in build failures; see https://github.com/astral-sh/ruff/pull/10700.
# There's a pending codspeed PR to upgrade to a newer cargo version, but until that's merged, we need to use the workaround below.
# https://github.com/CodSpeedHQ/codspeed-rust/pull/31
# What we do is to call cargo build manually with the correct feature flags and RUSTC settings. We'll have to
# manually maintain the list of benchmarks to run with codspeed (the benefit is that we could detect which benchmarks to run and build based on the changes).
# This is inspired by https://github.com/oxc-project/oxc/blob/a0532adc654039a0c7ead7b35216dfa0b0cb8e8f/.github/workflows/benchmark.yml
- name: "Build benchmarks"
env:
RUSTFLAGS: "-C debuginfo=2 -C strip=none -g --cfg codspeed"
shell: bash
# Build all benchmarks, copy the binary to the codspeed directory, remove any `*.d` files that might have been created.
run: |
cargo build --release -p ruff_benchmark --bench parser --bench linter --bench formatter --bench lexer --features=codspeed
mkdir -p ./target/codspeed/ruff_benchmark
cp ./target/release/deps/{lexer,parser,linter,formatter}* target/codspeed/ruff_benchmark/
rm -rf ./target/codspeed/ruff_benchmark/*.d
run: cargo codspeed build --features codspeed -p ruff_benchmark
- name: "Run benchmarks"
uses: CodSpeedHQ/action@v2
uses: CodSpeedHQ/action@v1
with:
run: cargo codspeed run
token: ${{ secrets.CODSPEED_TOKEN }}

View File

@@ -20,10 +20,10 @@ jobs:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.ref }}
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
- name: "Add SSH key"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
uses: webfactory/ssh-agent@v0.9.0
uses: webfactory/ssh-agent@v0.8.0
with:
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
- name: "Install Rust toolchain"
@@ -44,10 +44,10 @@ jobs:
run: mkdocs build --strict -f mkdocs.insiders.yml
- name: "Build docs"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
run: mkdocs build --strict -f mkdocs.public.yml
run: mkdocs build --strict -f mkdocs.generated.yml
- name: "Deploy to Cloudflare Pages"
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
uses: cloudflare/wrangler-action@v3.4.1
uses: cloudflare/wrangler-action@v3.1.1
with:
apiToken: ${{ secrets.CF_API_TOKEN }}
accountId: ${{ secrets.CF_ACCOUNT_ID }}

247
.github/workflows/flake8-to-ruff.yaml vendored Normal file
View File

@@ -0,0 +1,247 @@
name: "[flake8-to-ruff] Release"
on: workflow_dispatch
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
PACKAGE_NAME: flake8-to-ruff
CRATE_NAME: flake8_to_ruff
PYTHON_VERSION: "3.11"
CARGO_INCREMENTAL: 0
CARGO_NET_RETRY: 10
CARGO_TERM_COLOR: always
RUSTUP_MAX_RETRIES: 10
jobs:
macos-x86_64:
runs-on: macos-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
- name: "Install Rust toolchain"
run: rustup show
- name: "Build wheels - x86_64"
uses: PyO3/maturin-action@v1
with:
target: x86_64
args: --release --out dist --sdist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- name: "Install built wheel - x86_64"
run: |
pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
macos-universal:
runs-on: macos-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
- name: "Install Rust toolchain"
run: rustup show
- name: "Build wheels - universal2"
uses: PyO3/maturin-action@v1
with:
args: --release --target universal2-apple-darwin --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- name: "Install built wheel - universal2"
run: |
pip install dist/${{ env.CRATE_NAME }}-*universal2.whl --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
windows:
runs-on: windows-latest
strategy:
matrix:
target: [x64, x86]
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: ${{ matrix.target }}
- name: "Install Rust toolchain"
run: rustup show
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.target }}
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- name: "Install built wheel"
shell: bash
run: |
python -m pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
linux:
runs-on: ubuntu-latest
strategy:
matrix:
target: [x86_64, i686]
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.target }}
manylinux: auto
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- name: "Install built wheel"
if: matrix.target == 'x86_64'
run: |
pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
linux-cross:
runs-on: ubuntu-latest
strategy:
matrix:
target: [aarch64, armv7, s390x, ppc64le, ppc64]
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.target }}
manylinux: auto
args: --no-default-features --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- uses: uraimo/run-on-arch-action@v2
if: matrix.target != 'ppc64'
name: Install built wheel
with:
arch: ${{ matrix.target }}
distro: ubuntu20.04
githubToken: ${{ github.token }}
install: |
apt-get update
apt-get install -y --no-install-recommends python3 python3-pip
pip3 install -U pip
run: |
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
musllinux:
runs-on: ubuntu-latest
strategy:
matrix:
target:
- x86_64-unknown-linux-musl
- i686-unknown-linux-musl
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.target }}
manylinux: musllinux_1_2
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- name: "Install built wheel"
if: matrix.target == 'x86_64-unknown-linux-musl'
uses: addnab/docker-run-action@v3
with:
image: alpine:latest
options: -v ${{ github.workspace }}:/io -w /io
run: |
apk add py3-pip
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links /io/dist/ --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
musllinux-cross:
runs-on: ubuntu-latest
strategy:
matrix:
platform:
- target: aarch64-unknown-linux-musl
arch: aarch64
- target: armv7-unknown-linux-musleabihf
arch: armv7
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.platform.target }}
manylinux: musllinux_1_2
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- uses: uraimo/run-on-arch-action@v2
name: Install built wheel
with:
arch: ${{ matrix.platform.arch }}
distro: alpine_latest
githubToken: ${{ github.token }}
install: |
apk add py3-pip
run: |
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
release:
name: Release
runs-on: ubuntu-latest
needs:
- macos-universal
- macos-x86_64
- windows
- linux
- linux-cross
- musllinux
- musllinux-cross
steps:
- uses: actions/download-artifact@v3
with:
name: wheels
- uses: actions/setup-python@v4
- name: "Publish to PyPi"
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.FLAKE8_TO_RUFF_TOKEN }}
run: |
pip install --upgrade twine
twine upload --skip-existing *

View File

@@ -20,7 +20,7 @@ jobs:
- uses: actions/checkout@v4
- name: "Install Rust toolchain"
run: rustup target add wasm32-unknown-unknown
- uses: actions/setup-node@v4
- uses: actions/setup-node@v3
with:
node-version: 18
cache: "npm"
@@ -40,9 +40,8 @@ jobs:
working-directory: playground
- name: "Deploy to Cloudflare Pages"
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
uses: cloudflare/wrangler-action@v3.4.1
uses: cloudflare/wrangler-action@v3.1.1
with:
apiToken: ${{ secrets.CF_API_TOKEN }}
accountId: ${{ secrets.CF_ACCOUNT_ID }}
# `github.head_ref` is only set during pull requests and for manual runs or tags we use `main` to deploy to production
command: pages deploy playground/dist --project-name=ruff-playground --branch ${{ github.head_ref || 'main' }} --commit-hash ${GITHUB_SHA}
command: pages deploy playground/dist --project-name=ruff-playground --branch ${GITHUB_HEAD_REF} --commit-hash ${GITHUB_SHA}

View File

@@ -1,4 +1,4 @@
name: Ecosystem check comment
name: PR Check Comment
on:
workflow_run:
@@ -17,14 +17,14 @@ jobs:
comment:
runs-on: ubuntu-latest
steps:
- uses: dawidd6/action-download-artifact@v3
name: Download pull request number
- uses: dawidd6/action-download-artifact@v2
name: Download PR Number
with:
name: pr-number
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
if_no_artifact_found: ignore
- name: Parse pull request number
- name: Extract PR Number
id: pr-number
run: |
if [[ -f pr-number ]]
@@ -32,8 +32,8 @@ jobs:
echo "pr-number=$(<pr-number)" >> $GITHUB_OUTPUT
fi
- uses: dawidd6/action-download-artifact@v3
name: "Download ecosystem results"
- uses: dawidd6/action-download-artifact@v2
name: "Download Ecosystem Result"
id: download-ecosystem-result
if: steps.pr-number.outputs.pr-number
with:
@@ -41,18 +41,15 @@ jobs:
workflow: ci.yaml
pr: ${{ steps.pr-number.outputs.pr-number }}
path: pr/ecosystem
workflow_conclusion: completed
if_no_artifact_found: ignore
- name: Generate comment content
- name: Generate Comment
id: generate-comment
if: steps.download-ecosystem-result.outputs.found_artifact == 'true'
run: |
# Note this identifier is used to find the comment to update on
# subsequent runs
echo '<!-- generated-comment ecosystem -->' >> comment.txt
echo '## PR Check Results' >> comment.txt
echo '## `ruff-ecosystem` results' >> comment.txt
echo "### Ecosystem" >> comment.txt
cat pr/ecosystem/ecosystem-result >> comment.txt
echo "" >> comment.txt
@@ -60,18 +57,18 @@ jobs:
cat comment.txt >> $GITHUB_OUTPUT
echo 'EOF' >> $GITHUB_OUTPUT
- name: Find existing comment
uses: peter-evans/find-comment@v3
- name: Find Comment
uses: peter-evans/find-comment@v2
if: steps.generate-comment.outcome == 'success'
id: find-comment
with:
issue-number: ${{ steps.pr-number.outputs.pr-number }}
comment-author: "github-actions[bot]"
body-includes: "<!-- generated-comment ecosystem -->"
body-includes: PR Check Results
- name: Create or update comment
if: steps.find-comment.outcome == 'success'
uses: peter-evans/create-or-update-comment@v4
uses: peter-evans/create-or-update-comment@v3
with:
comment-id: ${{ steps.find-comment.outputs.comment-id }}
issue-number: ${{ steps.pr-number.outputs.pr-number }}

View File

@@ -36,7 +36,7 @@ jobs:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.sha }}
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Prep README.md"
@@ -48,13 +48,14 @@ jobs:
args: --out dist
- name: "Test sdist"
run: |
rustup default $(cat rust-toolchain)
pip install dist/${{ env.PACKAGE_NAME }}-*.tar.gz --force-reinstall
ruff --help
python -m ruff --help
- name: "Upload sdist"
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: wheels-sdist
name: wheels
path: dist
macos-x86_64:
@@ -63,7 +64,7 @@ jobs:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.sha }}
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
@@ -73,26 +74,26 @@ jobs:
uses: PyO3/maturin-action@v1
with:
target: x86_64
args: --release --locked --out dist
args: --release --out dist
- name: "Test wheel - x86_64"
run: |
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
ruff --help
python -m ruff --help
- name: "Upload wheels"
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: wheels-macos-x86_64
name: wheels
path: dist
- name: "Archive binary"
run: |
ARCHIVE_FILE=ruff-${{ inputs.tag }}-x86_64-apple-darwin.tar.gz
ARCHIVE_FILE=ruff-x86_64-apple-darwin.tar.gz
tar czvf $ARCHIVE_FILE -C target/x86_64-apple-darwin/release ruff
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: binaries-macos-x86_64
name: binaries
path: |
*.tar.gz
*.sha256
@@ -103,7 +104,7 @@ jobs:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.sha }}
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
@@ -112,26 +113,26 @@ jobs:
- name: "Build wheels - universal2"
uses: PyO3/maturin-action@v1
with:
args: --release --locked --target universal2-apple-darwin --out dist
args: --release --target universal2-apple-darwin --out dist
- name: "Test wheel - universal2"
run: |
pip install dist/${{ env.PACKAGE_NAME }}-*universal2.whl --force-reinstall
ruff --help
python -m ruff --help
- name: "Upload wheels"
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: wheels-aarch64-apple-darwin
name: wheels
path: dist
- name: "Archive binary"
run: |
ARCHIVE_FILE=ruff-${{ inputs.tag }}-aarch64-apple-darwin.tar.gz
ARCHIVE_FILE=ruff-aarch64-apple-darwin.tar.gz
tar czvf $ARCHIVE_FILE -C target/aarch64-apple-darwin/release ruff
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: binaries-aarch64-apple-darwin
name: binaries
path: |
*.tar.gz
*.sha256
@@ -151,7 +152,7 @@ jobs:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.sha }}
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: ${{ matrix.platform.arch }}
@@ -161,7 +162,7 @@ jobs:
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.platform.target }}
args: --release --locked --out dist
args: --release --out dist
- name: "Test wheel"
if: ${{ !startsWith(matrix.platform.target, 'aarch64') }}
shell: bash
@@ -170,20 +171,20 @@ jobs:
ruff --help
python -m ruff --help
- name: "Upload wheels"
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: wheels-${{ matrix.platform.target }}
name: wheels
path: dist
- name: "Archive binary"
shell: bash
run: |
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.zip
ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.zip
7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/ruff.exe
sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: binaries-${{ matrix.platform.target }}
name: binaries
path: |
*.zip
*.sha256
@@ -199,7 +200,7 @@ jobs:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.sha }}
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
@@ -210,7 +211,7 @@ jobs:
with:
target: ${{ matrix.target }}
manylinux: auto
args: --release --locked --out dist
args: --release --out dist
- name: "Test wheel"
if: ${{ startsWith(matrix.target, 'x86_64') }}
run: |
@@ -218,19 +219,19 @@ jobs:
ruff --help
python -m ruff --help
- name: "Upload wheels"
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: wheels-${{ matrix.target }}
name: wheels
path: dist
- name: "Archive binary"
run: |
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.target }}.tar.gz
ARCHIVE_FILE=ruff-${{ matrix.target }}.tar.gz
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: binaries-${{ matrix.target }}
name: binaries
path: |
*.tar.gz
*.sha256
@@ -251,18 +252,14 @@ jobs:
arch: s390x
- target: powerpc64le-unknown-linux-gnu
arch: ppc64le
# see https://github.com/astral-sh/ruff/issues/10073
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
- target: powerpc64-unknown-linux-gnu
arch: ppc64
# see https://github.com/astral-sh/ruff/issues/10073
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
steps:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.sha }}
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Prep README.md"
@@ -273,7 +270,7 @@ jobs:
target: ${{ matrix.platform.target }}
manylinux: auto
docker-options: ${{ matrix.platform.maturin_docker_options }}
args: --release --locked --out dist
args: --release --out dist
- uses: uraimo/run-on-arch-action@v2
if: matrix.platform.arch != 'ppc64'
name: Test wheel
@@ -289,19 +286,19 @@ jobs:
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
ruff --help
- name: "Upload wheels"
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: wheels-${{ matrix.platform.target }}
name: wheels
path: dist
- name: "Archive binary"
run: |
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.tar.gz
ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.tar.gz
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: binaries-${{ matrix.platform.target }}
name: binaries
path: |
*.tar.gz
*.sha256
@@ -317,7 +314,7 @@ jobs:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.sha }}
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
@@ -328,7 +325,7 @@ jobs:
with:
target: ${{ matrix.target }}
manylinux: musllinux_1_2
args: --release --locked --out dist
args: --release --out dist
- name: "Test wheel"
if: matrix.target == 'x86_64-unknown-linux-musl'
uses: addnab/docker-run-action@v3
@@ -336,24 +333,24 @@ jobs:
image: alpine:latest
options: -v ${{ github.workspace }}:/io -w /io
run: |
apk add python3
python -m venv .venv
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
.venv/bin/ruff check --help
apk add py3-pip
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links /io/dist/ --force-reinstall
ruff --help
python -m ruff --help
- name: "Upload wheels"
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: wheels-${{ matrix.target }}
name: wheels
path: dist
- name: "Archive binary"
run: |
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.target }}.tar.gz
ARCHIVE_FILE=ruff-${{ matrix.target }}.tar.gz
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: binaries-${{ matrix.target }}
name: binaries
path: |
*.tar.gz
*.sha256
@@ -373,7 +370,7 @@ jobs:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.sha }}
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Prep README.md"
@@ -383,7 +380,7 @@ jobs:
with:
target: ${{ matrix.platform.target }}
manylinux: musllinux_1_2
args: --release --locked --out dist
args: --release --out dist
docker-options: ${{ matrix.platform.maturin_docker_options }}
- uses: uraimo/run-on-arch-action@v2
name: Test wheel
@@ -392,25 +389,24 @@ jobs:
distro: alpine_latest
githubToken: ${{ github.token }}
install: |
apk add python3
apk add py3-pip
run: |
python -m venv .venv
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
.venv/bin/ruff check --help
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
ruff check --help
- name: "Upload wheels"
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: wheels-${{ matrix.platform.target }}
name: wheels
path: dist
- name: "Archive binary"
run: |
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.tar.gz
ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.tar.gz
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: binaries-${{ matrix.platform.target }}
name: binaries
path: |
*.tar.gz
*.sha256
@@ -467,11 +463,10 @@ jobs:
# For pypi trusted publishing
id-token: write
steps:
- uses: actions/download-artifact@v4
- uses: actions/download-artifact@v3
with:
pattern: wheels-*
name: wheels
path: wheels
merge-multiple: true
- name: Publish to PyPi
uses: pypa/gh-action-pypi-publish@release/v1
with:
@@ -511,74 +506,17 @@ jobs:
# For GitHub release publishing
contents: write
steps:
- uses: actions/download-artifact@v4
- uses: actions/download-artifact@v3
with:
pattern: binaries-*
name: binaries
path: binaries
merge-multiple: true
- name: "Publish to GitHub"
uses: softprops/action-gh-release@v2
uses: softprops/action-gh-release@v1
with:
draft: true
files: binaries/*
tag_name: v${{ inputs.tag }}
docker-publish:
# This action doesn't need to wait on any other task, it's easy to re-tag if something failed and we're validating
# the tag here also
name: Push Docker image ghcr.io/astral-sh/ruff
runs-on: ubuntu-latest
environment:
name: release
permissions:
# For the docker push
packages: write
steps:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.sha }}
- uses: docker/setup-buildx-action@v3
- uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v5
with:
images: ghcr.io/astral-sh/ruff
- name: Check tag consistency
# Unlike validate-tag we don't check if the commit is on the main branch, but it seems good enough since we can
# change docker tags
if: ${{ inputs.tag }}
run: |
version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
if [ "${{ inputs.tag }}" != "${version}" ]; then
echo "The input tag does not match the version from pyproject.toml:" >&2
echo "${{ inputs.tag }}" >&2
echo "${version}" >&2
exit 1
else
echo "Releasing ${version}"
fi
- name: "Build and push Docker image"
uses: docker/build-push-action@v5
with:
context: .
platforms: linux/amd64,linux/arm64
# Reuse the builder
cache-from: type=gha
cache-to: type=gha,mode=max
push: ${{ inputs.tag != '' }}
tags: ghcr.io/astral-sh/ruff:latest,ghcr.io/astral-sh/ruff:${{ inputs.tag || 'dry-run' }}
labels: ${{ steps.meta.outputs.labels }}
# After the release has been published, we update downstream repositories
# This is separate because if this fails the release is still fine, we just need to do some manual workflow triggers
update-dependents:
@@ -587,7 +525,7 @@ jobs:
needs: publish-release
steps:
- name: "Update pre-commit mirror"
uses: actions/github-script@v7
uses: actions/github-script@v6
with:
github-token: ${{ secrets.RUFF_PRE_COMMIT_PAT }}
script: |

7
.gitignore vendored
View File

@@ -92,7 +92,6 @@ coverage.xml
.hypothesis/
.pytest_cache/
cover/
repos/
# Translations
*.mo
@@ -209,9 +208,3 @@ cython_debug/
# VIM
.*.sw?
.sw?
# Custom re-inclusions for the resolver test cases
!crates/ruff_python_resolver/resources/test/airflow/venv/
!crates/ruff_python_resolver/resources/test/airflow/venv/lib
!crates/ruff_python_resolver/resources/test/airflow/venv/lib/python3.11/site-packages/_watchdog_fsevents.cpython-311-darwin.so
!crates/ruff_python_resolver/resources/test/airflow/venv/lib/python3.11/site-packages/orjson/orjson.cpython-311-darwin.so

View File

@@ -13,8 +13,3 @@ MD041: false
# MD013/line-length
MD013: false
# MD024/no-duplicate-heading
MD024:
# Allow when nested under different parents e.g. CHANGELOG.md
siblings_only: true

View File

@@ -4,7 +4,7 @@ exclude: |
(?x)^(
crates/ruff_linter/resources/.*|
crates/ruff_linter/src/rules/.*/snapshots/.*|
crates/ruff/resources/.*|
crates/ruff_cli/resources/.*|
crates/ruff_python_formatter/resources/.*|
crates/ruff_python_formatter/tests/snapshots/.*|
crates/ruff_python_resolver/resources/.*|
@@ -13,35 +13,24 @@ exclude: |
repos:
- repo: https://github.com/abravalheri/validate-pyproject
rev: v0.16
rev: v0.12.1
hooks:
- id: validate-pyproject
- repo: https://github.com/executablebooks/mdformat
rev: 0.7.17
rev: 0.7.16
hooks:
- id: mdformat
additional_dependencies:
- mdformat-mkdocs
- mdformat-admon
exclude: |
(?x)^(
docs/formatter/black\.md
| docs/\w+\.md
)$
- repo: https://github.com/igorshubovych/markdownlint-cli
rev: v0.39.0
rev: v0.33.0
hooks:
- id: markdownlint-fix
exclude: |
(?x)^(
docs/formatter/black\.md
| docs/\w+\.md
)$
- repo: https://github.com/crate-ci/typos
rev: v1.20.4
rev: v1.14.12
hooks:
- id: typos
@@ -53,13 +42,10 @@ repos:
language: system
types: [rust]
pass_filenames: false # This makes it a lot faster
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.3.5
hooks:
- id: ruff-format
- id: ruff
args: [--fix, --exit-non-zero-on-fix]
name: ruff
entry: cargo run --bin ruff -- check --no-cache --force-exclude --fix --exit-non-zero-on-fix
language: system
types_or: [python, pyi]
require_serial: true
exclude: |
@@ -68,9 +54,15 @@ repos:
crates/ruff_python_formatter/resources/.*
)$
# Black
- repo: https://github.com/psf/black
rev: 23.1.0
hooks:
- id: black
# Prettier
- repo: https://github.com/pre-commit/mirrors-prettier
rev: v3.1.0
rev: v3.0.0
hooks:
- id: prettier
types: [yaml]

View File

@@ -1,123 +1,5 @@
# Breaking Changes
## 0.3.0
### Ruff 2024.2 style
The formatter now formats code according to the Ruff 2024.2 style guide. Read the [changelog](./CHANGELOG.md#030) for a detailed list of stabilized style changes.
### `isort`: Use one blank line after imports in typing stub files ([#9971](https://github.com/astral-sh/ruff/pull/9971))
Previously, Ruff used one or two blank lines (or the number configured by `isort.lines-after-imports`) after imports in typing stub files (`.pyi` files).
The [typing style guide for stubs](https://typing.readthedocs.io/en/latest/source/stubs.html#style-guide) recommends using at most 1 blank line for grouping.
As of this release, `isort` now always uses one blank line after imports in stub files, the same as the formatter.
### `build` is no longer excluded by default ([#10093](https://github.com/astral-sh/ruff/pull/10093))
Ruff maintains a list of directories and files that are excluded by default. This list now consists of the following patterns:
- `.bzr`
- `.direnv`
- `.eggs`
- `.git`
- `.git-rewrite`
- `.hg`
- `.ipynb_checkpoints`
- `.mypy_cache`
- `.nox`
- `.pants.d`
- `.pyenv`
- `.pytest_cache`
- `.pytype`
- `.ruff_cache`
- `.svn`
- `.tox`
- `.venv`
- `.vscode`
- `__pypackages__`
- `_build`
- `buck-out`
- `dist`
- `node_modules`
- `site-packages`
- `venv`
Previously, the `build` directory was included in this list. However, the `build` directory tends to be a not-unpopular directory
name, and excluding it by default caused confusion. Ruff now no longer excludes `build` except if it is excluded by a `.gitignore` file
or because it is listed in `extend-exclude`.
### `--format` is no longer a valid `rule` or `linter` command option
Previously, `ruff rule` and `ruff linter` accepted the `--format <FORMAT>` option as an alias for `--output-format`. Ruff no longer
supports this alias. Please use `ruff rule --output-format <FORMAT>` and `ruff linter --output-format <FORMAT>` instead.
## 0.1.9
### `site-packages` is now excluded by default ([#5513](https://github.com/astral-sh/ruff/pull/5513))
Ruff maintains a list of default exclusions, which now consists of the following patterns:
- `.bzr`
- `.direnv`
- `.eggs`
- `.git-rewrite`
- `.git`
- `.hg`
- `.ipynb_checkpoints`
- `.mypy_cache`
- `.nox`
- `.pants.d`
- `.pyenv`
- `.pytest_cache`
- `.pytype`
- `.ruff_cache`
- `.svn`
- `.tox`
- `.venv`
- `.vscode`
- `__pypackages__`
- `_build`
- `buck-out`
- `build`
- `dist`
- `node_modules`
- `site-packages`
- `venv`
Previously, the `site-packages` directory was not excluded by default. While `site-packages` tends
to be excluded anyway by virtue of the `.venv` exclusion, this may not be the case when using Ruff
from VS Code outside a virtual environment.
## 0.1.0
### The deprecated `format` setting has been removed
Ruff previously used the `format` setting, `--format` CLI option, and `RUFF_FORMAT` environment variable to
configure the output format of the CLI. This usage was deprecated in `v0.0.291` — the `format` setting is now used
to control Ruff's code formatting. As of this release:
- The `format` setting cannot be used to configure the output format, use `output-format` instead
- The `RUFF_FORMAT` environment variable is ignored, use `RUFF_OUTPUT_FORMAT` instead
- The `--format` option has been removed from `ruff check`, use `--output-format` instead
### Unsafe fixes are not applied by default ([#7769](https://github.com/astral-sh/ruff/pull/7769))
Ruff labels fixes as "safe" and "unsafe". The meaning and intent of your code will be retained when applying safe
fixes, but the meaning could be changed when applying unsafe fixes. Previously, unsafe fixes were always displayed
and applied when fixing was enabled. Now, unsafe fixes are hidden by default and not applied. The `--unsafe-fixes`
flag or `unsafe-fixes` configuration option can be used to enable unsafe fixes.
See the [docs](https://docs.astral.sh/ruff/configuration/#fix-safety) for details.
### Remove formatter-conflicting rules from the default rule set ([#7900](https://github.com/astral-sh/ruff/pull/7900))
Previously, Ruff enabled all implemented rules in Pycodestyle (`E`) by default. Ruff now only includes the
Pycodestyle prefixes `E4`, `E7`, and `E9` to exclude rules that conflict with automatic formatters. Consequently,
the stable rule set no longer includes `line-too-long` (`E501`) and `mixed-spaces-and-tabs` (`E101`). Other
excluded Pycodestyle rules include whitespace enforcement in `E1` and `E2`; these rules are currently in preview, and are already omitted by default.
This change only affects those using Ruff under its default rule set. Users that include `E` in their `select` will experience no change in behavior.
## 0.0.288
### Remove support for emoji identifiers ([#7212](https://github.com/astral-sh/ruff/pull/7212))

File diff suppressed because it is too large Load Diff

View File

@@ -72,7 +72,7 @@ representative at an online or offline event.
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
<charlie.r.marsh@gmail.com>.
charlie.r.marsh@gmail.com.
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the

View File

@@ -26,25 +26,30 @@ Welcome! We're happy to have you here. Thank you in advance for your contributio
- [`cargo dev`](#cargo-dev)
- [Subsystems](#subsystems)
- [Compilation Pipeline](#compilation-pipeline)
- [Import Categorization](#import-categorization)
- [Project root](#project-root)
- [Package root](#package-root)
- [Import categorization](#import-categorization-1)
## The Basics
Ruff welcomes contributions in the form of pull requests.
Ruff welcomes contributions in the form of Pull Requests.
For small changes (e.g., bug fixes), feel free to submit a PR.
For larger changes (e.g., new lint rules, new functionality, new configuration options), consider
creating an [**issue**](https://github.com/astral-sh/ruff/issues) outlining your proposed change.
You can also join us on [Discord](https://discord.com/invite/astral-sh) to discuss your idea with the
You can also join us on [**Discord**](https://discord.gg/c9MhzV8aU5) to discuss your idea with the
community. We've labeled [beginner-friendly tasks](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22)
in the issue tracker, along with [bugs](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3Abug)
and [improvements](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3Aaccepted)
that are ready for contributions.
If you're looking for a place to start, we recommend implementing a new lint rule (see:
[_Adding a new lint rule_](#example-adding-a-new-lint-rule), which will allow you to learn from and
pattern-match against the examples in the existing codebase. Many lint rules are inspired by
existing Python plugins, which can be used as a reference implementation.
As a concrete example: consider taking on one of the rules from the [`flake8-pyi`](https://github.com/astral-sh/ruff/issues/848)
plugin, and looking to the originating [Python source](https://github.com/PyCQA/flake8-pyi) for
guidance.
If you have suggestions on how we might improve the contributing documentation, [let us know](https://github.com/astral-sh/ruff/discussions/5693)!
### Prerequisites
@@ -58,7 +63,7 @@ You'll also need [Insta](https://insta.rs/docs/) to update snapshot tests:
cargo install cargo-insta
```
And you'll need pre-commit to run some validation checks:
and pre-commit to run some validation checks:
```shell
pipx install pre-commit # or `pip install pre-commit` if you have a virtualenv
@@ -71,22 +76,12 @@ when making a commit:
pre-commit install
```
We recommend [nextest](https://nexte.st/) to run Ruff's test suite (via `cargo nextest run`),
though it's not strictly necessary:
```shell
cargo install cargo-nextest --locked
```
Throughout this guide, any usages of `cargo test` can be replaced with `cargo nextest run`,
if you choose to install `nextest`.
### Development
After cloning the repository, run Ruff locally from the repository root with:
```shell
cargo run -p ruff -- check /path/to/file.py --no-cache
cargo run -p ruff_cli -- check /path/to/file.py --no-cache
```
Prior to opening a pull request, ensure that your code has been auto-formatted,
@@ -98,7 +93,7 @@ RUFF_UPDATE_SCHEMA=1 cargo test # Rust testing and updating ruff.schema.json
pre-commit run --all-files --show-diff-on-failure # Rust and Python formatting, Markdown and Python linting, etc.
```
These checks will run on GitHub Actions when you open your pull request, but running them locally
These checks will run on GitHub Actions when you open your Pull Request, but running them locally
will save you time and expedite the merge process.
Note that many code changes also require updating the snapshot tests, which is done interactively
@@ -108,14 +103,7 @@ after running `cargo test` like so:
cargo insta review
```
If your pull request relates to a specific lint rule, include the category and rule code in the
title, as in the following examples:
- \[`flake8-bugbear`\] Avoid false positive for usage after `continue` (`B031`)
- \[`flake8-simplify`\] Detect implicit `else` cases in `needless-bool` (`SIM103`)
- \[`pycodestyle`\] Implement `redundant-backslash` (`E502`)
Your pull request will be reviewed by a maintainer, which may involve a few rounds of iteration
Your Pull Request will be reviewed by a maintainer, which may involve a few rounds of iteration
prior to merging.
### Project Structure
@@ -123,16 +111,16 @@ prior to merging.
Ruff is structured as a monorepo with a [flat crate structure](https://matklad.github.io/2021/08/22/large-rust-workspaces.html),
such that all crates are contained in a flat `crates` directory.
The vast majority of the code, including all lint rules, lives in the `ruff_linter` crate (located
at `crates/ruff_linter`). As a contributor, that's the crate that'll be most relevant to you.
The vast majority of the code, including all lint rules, lives in the `ruff` crate (located at
`crates/ruff_linter`). As a contributor, that's the crate that'll be most relevant to you.
At the time of writing, the repository includes the following crates:
At time of writing, the repository includes the following crates:
- `crates/ruff_linter`: library crate containing all lint rules and the core logic for running them.
If you're working on a rule, this is the crate for you.
- `crates/ruff_benchmark`: binary crate for running micro-benchmarks.
- `crates/ruff_cache`: library crate for caching lint results.
- `crates/ruff`: binary crate containing Ruff's command-line interface.
- `crates/ruff_cli`: binary crate containing Ruff's command-line interface.
- `crates/ruff_dev`: binary crate containing utilities used in the development of Ruff itself (e.g.,
`cargo dev generate-all`), see the [`cargo dev`](#cargo-dev) section below.
- `crates/ruff_diagnostics`: library crate for the rule-independent abstractions in the lint
@@ -182,8 +170,7 @@ At a high level, the steps involved in adding a new lint rule are as follows:
statements, like imports) or `analyze/expression.rs` (if your rule is based on analyzing
expressions, like function calls).
1. Map the violation struct to a rule code in `crates/ruff_linter/src/codes.rs` (e.g., `B011`). New rules
should be added in `RuleGroup::Preview`.
1. Map the violation struct to a rule code in `crates/ruff_linter/src/codes.rs` (e.g., `B011`).
1. Add proper [testing](#rule-testing-fixtures-and-snapshots) for your rule.
@@ -197,14 +184,11 @@ and calling out to lint rule analyzer functions as it goes.
If you need to inspect the AST, you can run `cargo dev print-ast` with a Python file. Grep
for the `Diagnostic::new` invocations to understand how other, similar rules are implemented.
Once you're satisfied with your code, add tests for your rule
(see: [rule testing](#rule-testing-fixtures-and-snapshots)), and regenerate the documentation and
associated assets (like our JSON Schema) with `cargo dev generate-all`.
Once you're satisfied with your code, add tests for your rule. See [rule testing](#rule-testing-fixtures-and-snapshots)
for more details.
Finally, submit a pull request, and include the category, rule name, and rule code in the title, as
in:
> \[`pycodestyle`\] Implement `redundant-backslash` (`E502`)
Finally, regenerate the documentation and other generated assets (like our JSON Schema) with:
`cargo dev generate-all`.
#### Rule naming convention
@@ -220,7 +204,7 @@ As such, rule names should...
For example, `AssertFalse` guards against `assert False` statements.
- _Not_ contain instructions on how to fix the violation, which instead belong in the rule
documentation and the `fix_title`.
documentation and the `autofix_title`.
- _Not_ contain a redundant prefix, like `Disallow` or `Banned`, which are already implied by the
convention.
@@ -246,7 +230,7 @@ Once you've completed the code for the rule itself, you can define tests with th
For example, if you're adding a new rule named `E402`, you would run:
```shell
cargo run -p ruff -- check crates/ruff_linter/resources/test/fixtures/pycodestyle/E402.py --no-cache --preview --select E402
cargo run -p ruff_cli -- check crates/ruff_linter/resources/test/fixtures/pycodestyle/E402.py --no-cache --select E402
```
**Note:** Only a subset of rules are enabled by default. When testing a new rule, ensure that
@@ -267,7 +251,7 @@ Once you've completed the code for the rule itself, you can define tests with th
Ruff's user-facing settings live in a few different places.
First, the command-line options are defined via the `Args` struct in `crates/ruff/src/args.rs`.
First, the command-line options are defined via the `Args` struct in `crates/ruff_cli/src/args.rs`.
Second, the `pyproject.toml` options are defined in `crates/ruff_workspace/src/options.rs` (via the
`Options` struct), `crates/ruff_workspace/src/configuration.rs` (via the `Configuration` struct),
@@ -310,14 +294,14 @@ To preview any changes to the documentation locally:
```shell
# For contributors.
mkdocs serve -f mkdocs.public.yml
mkdocs serve -f mkdocs.generated.yml
# For members of the Astral org, which has access to MkDocs Insiders via sponsorship.
mkdocs serve -f mkdocs.insiders.yml
```
The documentation should then be available locally at
[http://127.0.0.1:8000/ruff/](http://127.0.0.1:8000/ruff/).
[http://127.0.0.1:8000/docs/](http://127.0.0.1:8000/docs/).
## Release Process
@@ -330,69 +314,47 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
### Creating a new release
1. Install `uv`: `curl -LsSf https://astral.sh/uv/install.sh | sh`
1. Run `./scripts/release/bump.sh`; this command will:
- Generate a temporary virtual environment with `rooster`
- Generate a changelog entry in `CHANGELOG.md`
- Update versions in `pyproject.toml` and `Cargo.toml`
- Update references to versions in the `README.md` and documentation
- Display contributors for the release
1. The changelog should then be editorialized for consistency
- Often labels will be missing from pull requests they will need to be manually organized into the proper section
- Changes should be edited to be user-facing descriptions, avoiding internal details
1. Highlight any breaking changes in `BREAKING_CHANGES.md`
1. Run `cargo check`. This should update the lock file with new versions.
1. Create a pull request with the changelog and version updates
1. Update the version with `rg 0.0.269 --files-with-matches | xargs sed -i 's/0.0.269/0.0.270/g'`
1. Update `BREAKING_CHANGES.md`
1. Create a PR with the version and `BREAKING_CHANGES.md` updated
1. Merge the PR
1. Run the [release workflow](https://github.com/astral-sh/ruff/actions/workflows/release.yaml) with:
- The new version number (without starting `v`)
- The commit hash of the merged release pull request on `main`
1. Run the release workflow with the version number (without starting `v`) as input. Make sure
main has your merged PR as last commit
1. The release workflow will do the following:
1. Build all the assets. If this fails (even though we tested in step 4), we haven't tagged or
uploaded anything, you can restart after pushing a fix.
1. Upload to PyPI.
1. Create and push the Git tag (as extracted from `pyproject.toml`). We create the Git tag only
after building the wheels and uploading to PyPI, since we can't delete or modify the tag ([#4468](https://github.com/astral-sh/ruff/issues/4468)).
after building the wheels and uploading to PyPI, since we can't delete or modify the tag ([#4468](https://github.com/charliermarsh/ruff/issues/4468)).
1. Attach artifacts to draft GitHub release
1. Trigger downstream repositories. This can fail non-catastrophically, as we can run any
downstream jobs manually if needed.
1. Publish the GitHub release
1. Open the draft release in the GitHub release section
1. Copy the changelog for the release into the GitHub release
- See previous releases for formatting of section headers
1. Append the contributors from the `bump.sh` script
1. If needed, [update the schemastore](https://github.com/astral-sh/ruff/blob/main/scripts/update_schemastore.py).
1. One can determine if an update is needed when
`git diff old-version-tag new-version-tag -- ruff.schema.json` returns a non-empty diff.
1. Once run successfully, you should follow the link in the output to create a PR.
1. Create release notes in GitHub UI and promote from draft.
1. If needed, [update the schemastore](https://github.com/charliermarsh/ruff/blob/main/scripts/update_schemastore.py)
1. If needed, update the `ruff-lsp` and `ruff-vscode` repositories.
## Ecosystem CI
GitHub Actions will run your changes against a number of real-world projects from GitHub and
report on any linter or formatter differences. You can also run those checks locally via:
report on any diagnostic differences. You can also run those checks locally via:
```shell
pip install -e ./python/ruff-ecosystem
ruff-ecosystem check ruff "./target/debug/ruff"
ruff-ecosystem format ruff "./target/debug/ruff"
python scripts/check_ecosystem.py path/to/your/ruff path/to/older/ruff
```
See the [ruff-ecosystem package](https://github.com/astral-sh/ruff/tree/main/python/ruff-ecosystem) for more details.
You can also run the Ecosystem CI check in a Docker container across a larger set of projects by
downloading the [`known-github-tomls.json`](https://github.com/akx/ruff-usage-aggregate/blob/master/data/known-github-tomls.jsonl)
as `github_search.jsonl` and following the instructions in [scripts/Dockerfile.ecosystem](https://github.com/astral-sh/ruff/blob/main/scripts/Dockerfile.ecosystem).
Note that this check will take a while to run.
## Benchmarking and Profiling
We have several ways of benchmarking and profiling Ruff:
- Our main performance benchmark comparing Ruff with other tools on the CPython codebase
- Microbenchmarks which run the linter or the formatter on individual files. These run on pull requests.
- Microbenchmarks which the linter or the formatter on individual files. There run on pull requests.
- Profiling the linter on either the microbenchmarks or entire projects
> \[!NOTE\]
> When running benchmarks, ensure that your CPU is otherwise idle (e.g., close any background
> applications, like web browsers). You may also want to switch your CPU to a "performance"
> mode, if it exists, especially when benchmarking short-lived processes.
### CPython Benchmark
First, clone [CPython](https://github.com/python/cpython). It's a large and diverse Python codebase,
@@ -538,10 +500,10 @@ if the benchmark improved/regressed compared to that baseline.
```shell
# Run once on your "baseline" code
cargo bench -p ruff_benchmark -- --save-baseline=main
cargo benchmark --save-baseline=main
# Then iterate with
cargo bench -p ruff_benchmark -- --baseline=main
cargo benchmark --baseline=main
```
#### PR Summary
@@ -551,10 +513,10 @@ This is useful to illustrate the improvements of a PR.
```shell
# On main
cargo bench -p ruff_benchmark -- --save-baseline=main
cargo benchmark --save-baseline=main
# After applying your changes
cargo bench -p ruff_benchmark -- --save-baseline=pr
cargo benchmark --save-baseline=pr
critcmp main pr
```
@@ -567,10 +529,10 @@ cargo install critcmp
#### Tips
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo benchmark lexer`
to only run the lexer benchmarks.
- Use `cargo bench -p ruff_benchmark -- --quiet` for a more cleaned up output (without statistical relevance)
- Use `cargo bench -p ruff_benchmark -- --quick` to get faster results (more prone to noise)
- Use `cargo benchmark <filter>` to only run specific benchmarks. For example: `cargo benchmark linter/pydantic`
to only run the pydantic tests.
- Use `cargo benchmark --quiet` for a more cleaned up output (without statistical relevance)
- Use `cargo benchmark --quick` to get faster results (more prone to noise)
### Profiling Projects
@@ -581,10 +543,10 @@ examples.
#### Linux
Install `perf` and build `ruff_benchmark` with the `profiling` profile and then run it with perf
Install `perf` and build `ruff_benchmark` with the `release-debug` profile and then run it with perf
```shell
cargo bench -p ruff_benchmark --no-run --profile=profiling && perf record --call-graph dwarf -F 9999 cargo bench -p ruff_benchmark --profile=profiling -- --profile-time=1
cargo bench -p ruff_benchmark --no-run --profile=release-debug && perf record --call-graph dwarf -F 9999 cargo bench -p ruff_benchmark --profile=release-debug -- --profile-time=1
```
You can also use the `ruff_dev` launcher to run `ruff check` multiple times on a repository to
@@ -592,8 +554,8 @@ gather enough samples for a good flamegraph (change the 999, the sample rate, an
of checks, to your liking)
```shell
cargo build --bin ruff_dev --profile=profiling
perf record -g -F 999 target/profiling/ruff_dev repeat --repeat 30 --exit-zero --no-cache path/to/cpython > /dev/null
cargo build --bin ruff_dev --profile=release-debug
perf record -g -F 999 target/release-debug/ruff_dev repeat --repeat 30 --exit-zero --no-cache path/to/cpython > /dev/null
```
Then convert the recorded profile
@@ -623,7 +585,7 @@ cargo install cargo-instruments
Then run the profiler with
```shell
cargo instruments -t time --bench linter --profile profiling -p ruff_benchmark -- --profile-time=1
cargo instruments -t time --bench linter --profile release-debug -p ruff_benchmark -- --profile-time=1
```
- `-t`: Specifies what to profile. Useful options are `time` to profile the wall time and `alloc`
@@ -638,7 +600,7 @@ Otherwise, follow the instructions from the linux section.
utils with it:
- `cargo dev print-ast <file>`: Print the AST of a python file using the
[RustPython parser](https://github.com/astral-sh/ruff/tree/main/crates/ruff_python_parser) that is
[RustPython parser](https://github.com/astral-sh/RustPython-Parser/tree/main/parser) that is
mainly used in Ruff. For `if True: pass # comment`, you can see the syntax tree, the byte offsets
for start and stop of each node and also how the `:` token, the comment and whitespace are not
represented anymore:
@@ -814,8 +776,8 @@ To understand Ruff's import categorization system, we first need to define two c
"project root".)
- "Package root": The top-most directory defining the Python package that includes a given Python
file. To find the package root for a given Python file, traverse up its parent directories until
you reach a parent directory that doesn't contain an `__init__.py` file (and isn't in a subtree
marked as a [namespace package](https://docs.astral.sh/ruff/settings/#namespace-packages)); take the directory
you reach a parent directory that doesn't contain an `__init__.py` file (and isn't marked as
a [namespace package](https://docs.astral.sh/ruff/settings/#namespace-packages)); take the directory
just before that, i.e., the first directory in the package.
For example, given:
@@ -914,5 +876,5 @@ By default, `src` is set to the project root. In the above example, we'd want to
`src = ["./src"]` to ensure that we locate `./my_project/src/foo` and thus categorize `import foo`
as first-party in `baz.py`. In practice, for this limited example, setting `src = ["./src"]` is
unnecessary, as all imports within `./my_project/src/foo` would be categorized as first-party via
the same-package heuristic; but if your project contains multiple packages, you'll want to set `src`
the same-package heuristic; but your project contains multiple packages, you'll want to set `src`
explicitly.

1661
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -12,157 +12,54 @@ authors = ["Charlie Marsh <charlie.r.marsh@gmail.com>"]
license = "MIT"
[workspace.dependencies]
aho-corasick = { version = "1.1.3" }
annotate-snippets = { version = "0.9.2", features = ["color"] }
anyhow = { version = "1.0.80" }
argfile = { version = "0.1.6" }
bincode = { version = "1.3.3" }
bitflags = { version = "2.5.0" }
bstr = { version = "1.9.1" }
cachedir = { version = "0.3.1" }
chrono = { version = "0.4.35", default-features = false, features = ["clock"] }
clap = { version = "4.5.3", features = ["derive"] }
clap_complete_command = { version = "0.5.1" }
clearscreen = { version = "3.0.0" }
codspeed-criterion-compat = { version = "2.4.0", default-features = false }
colored = { version = "2.1.0" }
console_error_panic_hook = { version = "0.1.7" }
console_log = { version = "1.0.0" }
countme = { version = "3.0.1" }
criterion = { version = "0.5.1", default-features = false }
crossbeam = { version = "0.8.4" }
dirs = { version = "5.0.0" }
drop_bomb = { version = "0.1.5" }
env_logger = { version = "0.11.0" }
fern = { version = "0.6.1" }
filetime = { version = "0.2.23" }
fs-err = { version = "2.11.0" }
anyhow = { version = "1.0.69" }
bitflags = { version = "2.3.1" }
chrono = { version = "0.4.31", default-features = false, features = ["clock"] }
clap = { version = "4.4.4", features = ["derive"] }
colored = { version = "2.0.0" }
filetime = { version = "0.2.20" }
glob = { version = "0.3.1" }
globset = { version = "0.4.14" }
hexf-parse = { version = "0.2.1" }
ignore = { version = "0.4.22" }
imara-diff = { version = "0.1.5" }
imperative = { version = "1.0.4" }
indicatif = { version = "0.17.8" }
indoc = { version = "2.0.4" }
insta = { version = "1.35.1", feature = ["filters", "glob"] }
insta-cmd = { version = "0.5.0" }
is-macro = { version = "0.3.5" }
is-wsl = { version = "0.4.0" }
itertools = { version = "0.12.1" }
js-sys = { version = "0.3.69" }
jod-thread = { version = "0.1.2" }
lalrpop-util = { version = "0.20.0", default-features = false }
lexical-parse-float = { version = "0.8.0", features = ["format"] }
libc = { version = "0.2.153" }
libcst = { version = "1.1.0", default-features = false }
globset = { version = "0.4.10" }
ignore = { version = "0.4.20" }
insta = { version = "1.32.0", feature = ["filters", "glob"] }
is-macro = { version = "0.3.0" }
itertools = { version = "0.11.0" }
log = { version = "0.4.17" }
lsp-server = { version = "0.7.6" }
lsp-types = { version = "0.95.0", features = ["proposed"] }
memchr = { version = "2.7.1" }
mimalloc = { version = "0.1.39" }
natord = { version = "1.0.9" }
notify = { version = "6.1.1" }
num_cpus = { version = "1.16.0" }
once_cell = { version = "1.19.0" }
memchr = "2.6.3"
num-bigint = { version = "0.4.3" }
num-traits = { version = "0.2.15" }
once_cell = { version = "1.17.1" }
path-absolutize = { version = "3.1.1" }
pathdiff = { version = "0.2.1" }
pep440_rs = { version = "0.5.0", features = ["serde"] }
pretty_assertions = "1.3.0"
proc-macro2 = { version = "1.0.79" }
pyproject-toml = { version = "0.9.0" }
quick-junit = { version = "0.3.5" }
proc-macro2 = { version = "1.0.67" }
quote = { version = "1.0.23" }
rand = { version = "0.8.5" }
rayon = { version = "1.10.0" }
regex = { version = "1.10.2" }
result-like = { version = "0.5.0" }
regex = { version = "1.9.5" }
rustc-hash = { version = "1.1.0" }
schemars = { version = "0.8.16" }
seahash = { version = "4.1.0" }
serde = { version = "1.0.197", features = ["derive"] }
serde-wasm-bindgen = { version = "0.6.4" }
serde_json = { version = "1.0.113" }
serde_test = { version = "1.0.152" }
serde_with = { version = "3.6.0", default-features = false, features = ["macros"] }
schemars = { version = "0.8.15" }
serde = { version = "1.0.152", features = ["derive"] }
serde_json = { version = "1.0.107" }
shellexpand = { version = "3.0.0" }
shlex = { version = "1.3.0" }
similar = { version = "2.4.0", features = ["inline"] }
smallvec = { version = "1.13.2" }
similar = { version = "2.2.1", features = ["inline"] }
smallvec = { version = "1.11.1" }
static_assertions = "1.1.0"
strum = { version = "0.26.0", features = ["strum_macros"] }
strum_macros = { version = "0.26.0" }
syn = { version = "2.0.55" }
tempfile = { version = "3.9.0" }
test-case = { version = "3.3.1" }
thiserror = { version = "1.0.58" }
tikv-jemallocator = { version = "0.5.0" }
toml = { version = "0.8.11" }
tracing = { version = "0.1.40" }
tracing-indicatif = { version = "0.3.6" }
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
tracing-tree = { version = "0.3.0" }
typed-arena = { version = "2.0.2" }
unic-ucd-category = { version = "0.9" }
unicode-ident = { version = "1.0.12" }
unicode-width = { version = "0.1.11" }
unicode_names2 = { version = "1.2.2" }
unicode-normalization = { version = "0.1.23" }
ureq = { version = "2.9.6" }
url = { version = "2.5.0" }
uuid = { version = "1.6.1", features = ["v4", "fast-rng", "macro-diagnostics", "js"] }
walkdir = { version = "2.3.2" }
wasm-bindgen = { version = "0.2.92" }
wasm-bindgen-test = { version = "0.3.42" }
wild = { version = "2" }
strum = { version = "0.25.0", features = ["strum_macros"] }
strum_macros = { version = "0.25.2" }
syn = { version = "2.0.37" }
test-case = { version = "3.2.1" }
thiserror = { version = "1.0.48" }
toml = { version = "0.7.8" }
tracing = "0.1.37"
tracing-indicatif = "0.3.4"
tracing-subscriber = { version = "0.3.17", features = ["env-filter"] }
unicode-ident = "1.0.12"
unicode-width = "0.1.11"
uuid = { version = "1.4.1", features = ["v4", "fast-rng", "macro-diagnostics", "js"] }
wsl = { version = "0.1.0" }
[workspace.lints.rust]
unsafe_code = "warn"
unreachable_pub = "warn"
[workspace.lints.clippy]
pedantic = { level = "warn", priority = -2 }
# Allowed pedantic lints
char_lit_as_u8 = "allow"
collapsible_else_if = "allow"
collapsible_if = "allow"
implicit_hasher = "allow"
match_same_arms = "allow"
missing_errors_doc = "allow"
missing_panics_doc = "allow"
module_name_repetitions = "allow"
must_use_candidate = "allow"
similar_names = "allow"
too_many_lines = "allow"
# To allow `#[allow(clippy::all)]` in `crates/ruff_python_parser/src/python.rs`.
needless_raw_string_hashes = "allow"
# Disallowed restriction lints
print_stdout = "warn"
print_stderr = "warn"
dbg_macro = "warn"
empty_drop = "warn"
empty_structs_with_brackets = "warn"
exit = "warn"
get_unwrap = "warn"
rc_buffer = "warn"
rc_mutex = "warn"
rest_pat_in_fully_bound_structs = "warn"
# v1.0.1
libcst = { version = "0.1.0", default-features = false }
[profile.release]
# Note that we set these explicitly, and these values
# were chosen based on a trade-off between compile times
# and runtime performance[1].
#
# [1]: https://github.com/astral-sh/ruff/pull/9031
lto = "thin"
codegen-units = 16
# Some crates don't change as much but benefit more from
# more expensive optimization passes, so we selectively
# decrease codegen-units in some cases.
[profile.release.package.ruff_python_parser]
codegen-units = 1
[profile.release.package.ruff_python_ast]
lto = "fat"
codegen-units = 1
[profile.dev.package.insta]
@@ -176,8 +73,8 @@ opt-level = 3
[profile.dev.package.ruff_python_parser]
opt-level = 1
# Use the `--profile profiling` flag to show symbols in release mode.
# e.g. `cargo build --profile profiling`
[profile.profiling]
# Use the `--profile release-debug` flag to show symbols in release mode.
# e.g. `cargo build --profile release-debug`
[profile.release-debug]
inherits = "release"
debug = 1

View File

@@ -1,38 +0,0 @@
FROM --platform=$BUILDPLATFORM ubuntu as build
ENV HOME="/root"
WORKDIR $HOME
RUN apt update && apt install -y build-essential curl python3-venv
# Setup zig as cross compiling linker
RUN python3 -m venv $HOME/.venv
RUN .venv/bin/pip install cargo-zigbuild
ENV PATH="$HOME/.venv/bin:$PATH"
# Install rust
ARG TARGETPLATFORM
RUN case "$TARGETPLATFORM" in \
"linux/arm64") echo "aarch64-unknown-linux-musl" > rust_target.txt ;; \
"linux/amd64") echo "x86_64-unknown-linux-musl" > rust_target.txt ;; \
*) exit 1 ;; \
esac
# Update rustup whenever we bump the rust version
COPY rust-toolchain.toml rust-toolchain.toml
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --target $(cat rust_target.txt) --profile minimal --default-toolchain none
ENV PATH="$HOME/.cargo/bin:$PATH"
# Installs the correct toolchain version from rust-toolchain.toml and then the musl target
RUN rustup target add $(cat rust_target.txt)
# Build
COPY crates crates
COPY Cargo.toml Cargo.toml
COPY Cargo.lock Cargo.lock
RUN cargo zigbuild --bin ruff --target $(cat rust_target.txt) --release
RUN cp target/$(cat rust_target.txt)/release/ruff /ruff
# TODO: Optimize binary size, with a version that also works when cross compiling
# RUN strip --strip-all /ruff
FROM scratch
COPY --from=build /ruff /ruff
WORKDIR /io
ENTRYPOINT ["/ruff"]

25
LICENSE
View File

@@ -1269,31 +1269,6 @@ are:
SOFTWARE.
"""
- flake8-trio, licensed as follows:
"""
MIT License
Copyright (c) 2022 Zac Hatfield-Dodds
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
- Pyright, licensed as follows:
"""
MIT License

169
README.md
View File

@@ -7,11 +7,10 @@
[![image](https://img.shields.io/pypi/l/ruff.svg)](https://pypi.python.org/pypi/ruff)
[![image](https://img.shields.io/pypi/pyversions/ruff.svg)](https://pypi.python.org/pypi/ruff)
[![Actions status](https://github.com/astral-sh/ruff/workflows/CI/badge.svg)](https://github.com/astral-sh/ruff/actions)
[![Discord](https://img.shields.io/badge/Discord-%235865F2.svg?logo=discord&logoColor=white)](https://discord.com/invite/astral-sh)
[**Docs**](https://docs.astral.sh/ruff/) | [**Playground**](https://play.ruff.rs/)
[**Discord**](https://discord.gg/c9MhzV8aU5) | [**Docs**](https://docs.astral.sh/ruff/) | [**Playground**](https://play.ruff.rs/)
An extremely fast Python linter and code formatter, written in Rust.
An extremely fast Python linter, written in Rust.
<p align="center">
<picture align="center">
@@ -25,16 +24,17 @@ An extremely fast Python linter and code formatter, written in Rust.
<i>Linting the CPython codebase from scratch.</i>
</p>
- ⚡️ 10-100x faster than existing linters (like Flake8) and formatters (like Black)
- ⚡️ 10-100x faster than existing linters
- 🐍 Installable via `pip`
- 🛠️ `pyproject.toml` support
- 🤝 Python 3.12 compatibility
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruff-compare-to-flake8), isort, and Black
- 🤝 Python 3.11 compatibility
- 📦 Built-in caching, to avoid re-analyzing unchanged files
- 🔧 Fix support, for automatic error correction (e.g., automatically remove unused imports)
- 📏 Over [800 built-in rules](https://docs.astral.sh/ruff/rules/), with native re-implementations
of popular Flake8 plugins, like flake8-bugbear
- ⌨️ First-party [editor integrations](https://docs.astral.sh/ruff/integrations/) for
- 🔧 Autofix support, for automatic error correction (e.g., automatically remove unused imports)
- 📏 Over [600 built-in rules](https://docs.astral.sh/ruff/rules/)
- ⚖️ [Near-parity](https://docs.astral.sh/ruff/faq/#how-does-ruff-compare-to-flake8) with the
built-in Flake8 rule set
- 🔌 Native re-implementations of dozens of Flake8 plugins, like flake8-bugbear
- ⌨️ First-party [editor integrations](https://docs.astral.sh/ruff/editor-integrations/) for
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://github.com/astral-sh/ruff-lsp)
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://docs.astral.sh/ruff/configuration/#pyprojecttoml-discovery)
@@ -42,10 +42,10 @@ Ruff aims to be orders of magnitude faster than alternative tools while integrat
functionality behind a single, common interface.
Ruff can be used to replace [Flake8](https://pypi.org/project/flake8/) (plus dozens of plugins),
[Black](https://github.com/psf/black), [isort](https://pypi.org/project/isort/),
[pydocstyle](https://pypi.org/project/pydocstyle/), [pyupgrade](https://pypi.org/project/pyupgrade/),
[autoflake](https://pypi.org/project/autoflake/), and more, all while executing tens or hundreds of
times faster than any individual tool.
[isort](https://pypi.org/project/isort/), [pydocstyle](https://pypi.org/project/pydocstyle/),
[yesqa](https://github.com/asottile/yesqa), [eradicate](https://pypi.org/project/eradicate/),
[pyupgrade](https://pypi.org/project/pyupgrade/), and [autoflake](https://pypi.org/project/autoflake/),
all while executing tens or hundreds of times faster than any individual tool.
Ruff is extremely actively developed and used in major open-source projects like:
@@ -55,7 +55,7 @@ Ruff is extremely actively developed and used in major open-source projects like
- [Pandas](https://github.com/pandas-dev/pandas)
- [SciPy](https://github.com/scipy/scipy)
...and [many more](#whos-using-ruff).
...and many more.
Ruff is backed by [Astral](https://astral.sh). Read the [launch post](https://astral.sh/blog/announcing-astral-the-company-behind-ruff),
or the original [project announcement](https://notes.crmarsh.com/python-tooling-could-be-much-much-faster).
@@ -126,38 +126,23 @@ and with [a variety of other package managers](https://docs.astral.sh/ruff/insta
### Usage
To run Ruff as a linter, try any of the following:
To run Ruff, try any of the following:
```shell
ruff check # Lint all files in the current directory (and any subdirectories).
ruff check path/to/code/ # Lint all files in `/path/to/code` (and any subdirectories).
ruff check path/to/code/*.py # Lint all `.py` files in `/path/to/code`.
ruff check path/to/code/to/file.py # Lint `file.py`.
ruff check @arguments.txt # Lint using an input file, treating its contents as newline-delimited command-line arguments.
ruff check . # Lint all files in the current directory (and any subdirectories)
ruff check path/to/code/ # Lint all files in `/path/to/code` (and any subdirectories)
ruff check path/to/code/*.py # Lint all `.py` files in `/path/to/code`
ruff check path/to/code/to/file.py # Lint `file.py`
```
Or, to run Ruff as a formatter:
```shell
ruff format # Format all files in the current directory (and any subdirectories).
ruff format path/to/code/ # Format all files in `/path/to/code` (and any subdirectories).
ruff format path/to/code/*.py # Format all `.py` files in `/path/to/code`.
ruff format path/to/code/to/file.py # Format `file.py`.
ruff format @arguments.txt # Format using an input file, treating its contents as newline-delimited command-line arguments.
```
Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff-pre-commit`](https://github.com/astral-sh/ruff-pre-commit):
Ruff can also be used as a [pre-commit](https://pre-commit.com) hook:
```yaml
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.3.7
rev: v0.0.290
hooks:
# Run the linter.
- id: ruff
args: [ --fix ]
# Run the formatter.
- id: ruff-format
```
Ruff can also be used as a [VS Code extension](https://github.com/astral-sh/ruff-vscode) or
@@ -173,7 +158,7 @@ jobs:
ruff:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- uses: chartboost/ruff-action@v1
```
@@ -183,9 +168,18 @@ Ruff can be configured through a `pyproject.toml`, `ruff.toml`, or `.ruff.toml`
[_Configuration_](https://docs.astral.sh/ruff/configuration/), or [_Settings_](https://docs.astral.sh/ruff/settings/)
for a complete list of all configuration options).
If left unspecified, Ruff's default configuration is equivalent to the following `ruff.toml` file:
If left unspecified, the default configuration is equivalent to:
```toml
[tool.ruff]
# Enable pycodestyle (`E`) and Pyflakes (`F`) codes by default.
select = ["E", "F"]
ignore = []
# Allow autofix for all enabled rules (when `--fix`) is provided.
fixable = ["A", "B", "C", "D", "E", "F", "G", "I", "N", "Q", "S", "T", "W", "ANN", "ARG", "BLE", "COM", "DJ", "DTZ", "EM", "ERA", "EXE", "FBT", "ICN", "INP", "ISC", "NPY", "PD", "PGH", "PIE", "PL", "PT", "PTH", "PYI", "RET", "RSE", "RUF", "SIM", "SLF", "TCH", "TID", "TRY", "UP", "YTT"]
unfixable = []
# Exclude a variety of commonly ignored directories.
exclude = [
".bzr",
@@ -194,90 +188,58 @@ exclude = [
".git",
".git-rewrite",
".hg",
".ipynb_checkpoints",
".mypy_cache",
".nox",
".pants.d",
".pyenv",
".pytest_cache",
".pytype",
".ruff_cache",
".svn",
".tox",
".venv",
".vscode",
"__pypackages__",
"_build",
"buck-out",
"build",
"dist",
"node_modules",
"site-packages",
"venv",
]
# Same as Black.
line-length = 88
indent-width = 4
# Assume Python 3.8
target-version = "py38"
[lint]
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
select = ["E4", "E7", "E9", "F"]
ignore = []
# Allow fix for all enabled rules (when `--fix`) is provided.
fixable = ["ALL"]
unfixable = []
# Allow unused variables when underscore-prefixed.
dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
[format]
# Like Black, use double quotes for strings.
quote-style = "double"
# Assume Python 3.8
target-version = "py38"
# Like Black, indent with spaces, rather than tabs.
indent-style = "space"
# Like Black, respect magic trailing commas.
skip-magic-trailing-comma = false
# Like Black, automatically detect the appropriate line ending.
line-ending = "auto"
[tool.ruff.mccabe]
# Unlike Flake8, default to a complexity level of 10.
max-complexity = 10
```
Note that, in a `pyproject.toml`, each section header should be prefixed with `tool.ruff`. For
example, `[lint]` should be replaced with `[tool.ruff.lint]`.
Some configuration options can be provided via dedicated command-line arguments, such as those
related to rule enablement and disablement, file discovery, and logging level:
Some configuration options can be provided via the command-line, such as those related to
rule enablement and disablement, file discovery, logging level, and more:
```shell
ruff check --select F401 --select F403 --quiet
ruff check path/to/code/ --select F401 --select F403 --quiet
```
The remaining configuration options can be provided through a catch-all `--config` argument:
```shell
ruff check --config "lint.per-file-ignores = {'some_file.py' = ['F841']}"
```
See `ruff help` for more on Ruff's top-level commands, or `ruff help check` and `ruff help format`
for more on the linting and formatting commands, respectively.
See `ruff help` for more on Ruff's top-level commands, or `ruff help check` for more on the
linting command.
## Rules
<!-- Begin section: Rules -->
**Ruff supports over 800 lint rules**, many of which are inspired by popular tools like Flake8,
**Ruff supports over 600 lint rules**, many of which are inspired by popular tools like Flake8,
isort, pyupgrade, and others. Regardless of the rule's origin, Ruff re-implements every rule in
Rust as a first-party feature.
By default, Ruff enables Flake8's `F` rules, along with a subset of the `E` rules, omitting any
stylistic rules that overlap with the use of a formatter, like `ruff format` or
By default, Ruff enables Flake8's `E` and `F` rules. Ruff supports all rules from the `F` category,
and a [subset](https://docs.astral.sh/ruff/rules/#error-e) of the `E` category, omitting those
stylistic rules made obsolete by the use of an autoformatter, like
[Black](https://github.com/psf/black).
If you're just getting started with Ruff, **the default rule set is a great place to start**: it
@@ -328,7 +290,6 @@ quality tools, including:
- [flake8-super](https://pypi.org/project/flake8-super/)
- [flake8-tidy-imports](https://pypi.org/project/flake8-tidy-imports/)
- [flake8-todos](https://pypi.org/project/flake8-todos/)
- [flake8-trio](https://pypi.org/project/flake8-trio/)
- [flake8-type-checking](https://pypi.org/project/flake8-type-checking/)
- [flake8-use-pathlib](https://pypi.org/project/flake8-use-pathlib/)
- [flynt](https://pypi.org/project/flynt/) ([#2102](https://github.com/astral-sh/ruff/issues/2102))
@@ -350,14 +311,14 @@ For a complete enumeration of the supported rules, see [_Rules_](https://docs.as
Contributions are welcome and highly appreciated. To get started, check out the
[**contributing guidelines**](https://docs.astral.sh/ruff/contributing/).
You can also join us on [**Discord**](https://discord.com/invite/astral-sh).
You can also join us on [**Discord**](https://discord.gg/c9MhzV8aU5).
## Support
Having trouble? Check out the existing issues on [**GitHub**](https://github.com/astral-sh/ruff/issues),
or feel free to [**open a new one**](https://github.com/astral-sh/ruff/issues/new).
You can also ask for help on [**Discord**](https://discord.com/invite/astral-sh).
You can also ask for help on [**Discord**](https://discord.gg/c9MhzV8aU5).
## Acknowledgements
@@ -370,7 +331,7 @@ In some cases, Ruff includes a "direct" Rust port of the corresponding tool.
We're grateful to the maintainers of these tools for their work, and for all
the value they've provided to the Python community.
Ruff's formatter is built on a fork of Rome's [`rome_formatter`](https://github.com/rome/tools/tree/main/crates/rome_formatter),
Ruff's autoformatter is built on a fork of Rome's [`rome_formatter`](https://github.com/rome/tools/tree/main/crates/rome_formatter),
and again draws on both API and implementation details from [Rome](https://github.com/rome/tools),
[Prettier](https://github.com/prettier/prettier), and [Black](https://github.com/psf/black).
@@ -387,16 +348,14 @@ Ruff is released under the MIT license.
Ruff is used by a number of major open-source projects and companies, including:
- [Albumentations](https://github.com/albumentations-team/albumentations)
- Amazon ([AWS SAM](https://github.com/aws/serverless-application-model))
- Anthropic ([Python SDK](https://github.com/anthropics/anthropic-sdk-python))
- [Apache Airflow](https://github.com/apache/airflow)
- AstraZeneca ([Magnus](https://github.com/AstraZeneca/magnus-core))
- [Babel](https://github.com/python-babel/babel)
- Benchling ([Refac](https://github.com/benchling/refac))
- [Babel](https://github.com/python-babel/babel)
- [Bokeh](https://github.com/bokeh/bokeh)
- [Cryptography (PyCA)](https://github.com/pyca/cryptography)
- CERN ([Indico](https://getindico.io/))
- [DVC](https://github.com/iterative/dvc)
- [Dagger](https://github.com/dagger/dagger)
- [Dagster](https://github.com/dagster-io/dagster)
@@ -405,18 +364,15 @@ Ruff is used by a number of major open-source projects and companies, including:
- [Gradio](https://github.com/gradio-app/gradio)
- [Great Expectations](https://github.com/great-expectations/great_expectations)
- [HTTPX](https://github.com/encode/httpx)
- [Hatch](https://github.com/pypa/hatch)
- [Home Assistant](https://github.com/home-assistant/core)
- Hugging Face ([Transformers](https://github.com/huggingface/transformers),
[Datasets](https://github.com/huggingface/datasets),
[Diffusers](https://github.com/huggingface/diffusers))
- [Hatch](https://github.com/pypa/hatch)
- [Home Assistant](https://github.com/home-assistant/core)
- ING Bank ([popmon](https://github.com/ing-bank/popmon), [probatus](https://github.com/ing-bank/probatus))
- [Ibis](https://github.com/ibis-project/ibis)
- [ivy](https://github.com/unifyai/ivy)
- [Jupyter](https://github.com/jupyter-server/jupyter_server)
- [Kraken Tech](https://kraken.tech/)
- [LangChain](https://github.com/hwchase17/langchain)
- [Litestar](https://litestar.dev/)
- [LlamaIndex](https://github.com/jerryjliu/llama_index)
- Matrix ([Synapse](https://github.com/matrix-org/synapse))
- [MegaLinter](https://github.com/oxsecurity/megalinter)
@@ -429,38 +385,29 @@ Ruff is used by a number of major open-source projects and companies, including:
- [Mypy](https://github.com/python/mypy)
- Netflix ([Dispatch](https://github.com/Netflix/dispatch))
- [Neon](https://github.com/neondatabase/neon)
- [Nokia](https://nokia.com/)
- [NoneBot](https://github.com/nonebot/nonebot2)
- [NumPyro](https://github.com/pyro-ppl/numpyro)
- [ONNX](https://github.com/onnx/onnx)
- [OpenBB](https://github.com/OpenBB-finance/OpenBBTerminal)
- [PDM](https://github.com/pdm-project/pdm)
- [PaddlePaddle](https://github.com/PaddlePaddle/Paddle)
- [Pandas](https://github.com/pandas-dev/pandas)
- [Pillow](https://github.com/python-pillow/Pillow)
- [Poetry](https://github.com/python-poetry/poetry)
- [Polars](https://github.com/pola-rs/polars)
- [PostHog](https://github.com/PostHog/posthog)
- Prefect ([Python SDK](https://github.com/PrefectHQ/prefect), [Marvin](https://github.com/PrefectHQ/marvin))
- [PyInstaller](https://github.com/pyinstaller/pyinstaller)
- [PyMC](https://github.com/pymc-devs/pymc/)
- [PyMC-Marketing](https://github.com/pymc-labs/pymc-marketing)
- [pytest](https://github.com/pytest-dev/pytest)
- [PyTorch](https://github.com/pytorch/pytorch)
- [Pydantic](https://github.com/pydantic/pydantic)
- [Pylint](https://github.com/PyCQA/pylint)
- [PyVista](https://github.com/pyvista/pyvista)
- [Reflex](https://github.com/reflex-dev/reflex)
- [River](https://github.com/online-ml/river)
- [Rippling](https://rippling.com)
- [Robyn](https://github.com/sansyrox/robyn)
- [Saleor](https://github.com/saleor/saleor)
- Scale AI ([Launch SDK](https://github.com/scaleapi/launch-python-client))
- [SciPy](https://github.com/scipy/scipy)
- Snowflake ([SnowCLI](https://github.com/Snowflake-Labs/snowcli))
- [Saleor](https://github.com/saleor/saleor)
- [SciPy](https://github.com/scipy/scipy)
- [Sphinx](https://github.com/sphinx-doc/sphinx)
- [Stable Baselines3](https://github.com/DLR-RM/stable-baselines3)
- [Starlette](https://github.com/encode/starlette)
- [Litestar](https://litestar.dev/)
- [The Algorithms](https://github.com/TheAlgorithms/Python)
- [Vega-Altair](https://github.com/altair-viz/altair)
- WordPress ([Openverse](https://github.com/WordPress/openverse))
@@ -476,7 +423,7 @@ Ruff is used by a number of major open-source projects and companies, including:
### Show Your Support
If you're using Ruff, consider adding the Ruff badge to your project's `README.md`:
If you're using Ruff, consider adding the Ruff badge to project's `README.md`:
```md
[![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff)
@@ -502,6 +449,6 @@ MIT
<div align="center">
<a target="_blank" href="https://astral.sh" style="background:none">
<img src="https://raw.githubusercontent.com/astral-sh/ruff/main/assets/svg/Astral.svg" alt="Made by Astral">
<img src="https://raw.githubusercontent.com/astral-sh/ruff/main/assets/svg/Astral.svg">
</a>
</div>

View File

@@ -1,13 +1,10 @@
[files]
# https://github.com/crate-ci/typos/issues/868
extend-exclude = ["**/resources/**/*", "**/snapshots/**/*"]
extend-exclude = ["resources", "snapshots"]
[default.extend-words]
"arange" = "arange" # e.g. `numpy.arange`
hel = "hel"
whos = "whos"
spawnve = "spawnve"
ned = "ned"
pn = "pn" # `import panel as pd` is a thing
poit = "poit"
BA = "BA" # acronym for "Bad Allowed", used in testing.

View File

@@ -1,8 +0,0 @@
{
"label": "code style",
"message": "Ruff",
"logoSvg": "<svg width=\"510\" height=\"622\" viewBox=\"0 0 510 622\" fill=\"none\" xmlns=\"http://www.w3.org/2000/svg\"><path fill-rule=\"evenodd\" clip-rule=\"evenodd\" d=\"M206.701 0C200.964 0 196.314 4.64131 196.314 10.3667V41.4667C196.314 47.192 191.663 51.8333 185.927 51.8333H156.843C151.107 51.8333 146.456 56.4746 146.456 62.2V145.133C146.456 150.859 141.806 155.5 136.069 155.5H106.986C101.249 155.5 96.5988 160.141 96.5988 165.867V222.883C96.5988 228.609 91.9484 233.25 86.2118 233.25H57.1283C51.3917 233.25 46.7413 237.891 46.7413 243.617V300.633C46.7413 306.359 42.0909 311 36.3544 311H10.387C4.6504 311 0 315.641 0 321.367V352.467C0 358.192 4.6504 362.833 10.387 362.833H145.418C151.154 362.833 155.804 367.475 155.804 373.2V430.217C155.804 435.942 151.154 440.583 145.418 440.583H116.334C110.597 440.583 105.947 445.225 105.947 450.95V507.967C105.947 513.692 101.297 518.333 95.5601 518.333H66.4766C60.74 518.333 56.0896 522.975 56.0896 528.7V611.633C56.0896 617.359 60.74 622 66.4766 622H149.572C155.309 622 159.959 617.359 159.959 611.633V570.167H201.507C207.244 570.167 211.894 565.525 211.894 559.8V528.7C211.894 522.975 216.544 518.333 222.281 518.333H251.365C257.101 518.333 261.752 513.692 261.752 507.967V476.867C261.752 471.141 266.402 466.5 272.138 466.5H301.222C306.959 466.5 311.609 461.859 311.609 456.133V425.033C311.609 419.308 316.259 414.667 321.996 414.667H351.079C356.816 414.667 361.466 410.025 361.466 404.3V373.2C361.466 367.475 366.117 362.833 371.853 362.833H400.937C406.673 362.833 411.324 358.192 411.324 352.467V321.367C411.324 315.641 415.974 311 421.711 311H450.794C456.531 311 461.181 306.359 461.181 300.633V217.7C461.181 211.975 456.531 207.333 450.794 207.333H420.672C414.936 207.333 410.285 202.692 410.285 196.967V165.867C410.285 160.141 414.936 155.5 420.672 155.5H449.756C455.492 155.5 460.143 150.859 460.143 145.133V114.033C460.143 108.308 464.793 103.667 470.53 103.667H499.613C505.35 103.667 510 99.0253 510 93.3V10.3667C510 4.64132 505.35 0 499.613 0H206.701ZM168.269 440.583C162.532 440.583 157.882 445.225 157.882 450.95V507.967C157.882 513.692 153.231 518.333 147.495 518.333H118.411C112.675 518.333 108.024 522.975 108.024 528.7V559.8C108.024 565.525 112.675 570.167 118.411 570.167H159.959V528.7C159.959 522.975 164.61 518.333 170.346 518.333H199.43C205.166 518.333 209.817 513.692 209.817 507.967V476.867C209.817 471.141 214.467 466.5 220.204 466.5H249.287C255.024 466.5 259.674 461.859 259.674 456.133V425.033C259.674 419.308 264.325 414.667 270.061 414.667H299.145C304.881 414.667 309.532 410.025 309.532 404.3V373.2C309.532 367.475 314.182 362.833 319.919 362.833H349.002C354.739 362.833 359.389 358.192 359.389 352.467V321.367C359.389 315.641 364.039 311 369.776 311H398.859C404.596 311 409.246 306.359 409.246 300.633V269.533C409.246 263.808 404.596 259.167 398.859 259.167H318.88C313.143 259.167 308.493 254.525 308.493 248.8V217.7C308.493 211.975 313.143 207.333 318.88 207.333H347.963C353.7 207.333 358.35 202.692 358.35 196.967V165.867C358.35 160.141 363.001 155.5 368.737 155.5H397.821C403.557 155.5 408.208 150.859 408.208 145.133V114.033C408.208 108.308 412.858 103.667 418.595 103.667H447.678C453.415 103.667 458.065 99.0253 458.065 93.3V62.2C458.065 56.4746 453.415 51.8333 447.678 51.8333H208.778C203.041 51.8333 198.391 56.4746 198.391 62.2V145.133C198.391 150.859 193.741 155.5 188.004 155.5H158.921C153.184 155.5 148.534 160.141 148.534 165.867V222.883C148.534 228.609 143.883 233.25 138.147 233.25H109.063C103.327 233.25 98.6762 237.891 98.6762 243.617V300.633C98.6762 306.359 103.327 311 109.063 311H197.352C203.089 311 207.739 315.641 207.739 321.367V430.217C207.739 435.942 203.089 440.583 197.352 440.583H168.269Z\" fill=\"#D7FF64\"/></svg>",
"logoWidth": 10,
"labelColor": "grey",
"color": "#261230"
}

View File

@@ -0,0 +1,36 @@
[package]
name = "flake8-to-ruff"
version = "0.0.290"
description = """
Convert Flake8 configuration files to Ruff configuration files.
"""
authors = { workspace = true }
edition = { workspace = true }
rust-version = { workspace = true }
homepage = { workspace = true }
documentation = { workspace = true }
repository = { workspace = true }
license = { workspace = true }
[dependencies]
ruff_linter = { path = "../ruff_linter", default-features = false }
ruff_workspace = { path = "../ruff_workspace" }
anyhow = { workspace = true }
clap = { workspace = true }
colored = { workspace = true }
configparser = { version = "3.0.2" }
itertools = { workspace = true }
log = { workspace = true }
once_cell = { workspace = true }
pep440_rs = { version = "0.3.1", features = ["serde"] }
regex = { workspace = true }
rustc-hash = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
strum = { workspace = true }
strum_macros = { workspace = true }
toml = { workspace = true }
[dev-dependencies]
pretty_assertions = "1.3.0"

View File

@@ -0,0 +1,99 @@
# flake8-to-ruff
Convert existing Flake8 configuration files (`setup.cfg`, `tox.ini`, or `.flake8`) for use with
[Ruff](https://github.com/astral-sh/ruff).
Generates a Ruff-compatible `pyproject.toml` section.
## Installation and Usage
### Installation
Available as [`flake8-to-ruff`](https://pypi.org/project/flake8-to-ruff/) on PyPI:
```shell
pip install flake8-to-ruff
```
### Usage
To run `flake8-to-ruff`:
```shell
flake8-to-ruff path/to/setup.cfg
flake8-to-ruff path/to/tox.ini
flake8-to-ruff path/to/.flake8
```
`flake8-to-ruff` will print the relevant `pyproject.toml` sections to standard output, like so:
```toml
[tool.ruff]
exclude = [
'.svn',
'CVS',
'.bzr',
'.hg',
'.git',
'__pycache__',
'.tox',
'.idea',
'.mypy_cache',
'.venv',
'node_modules',
'_state_machine.py',
'test_fstring.py',
'bad_coding2.py',
'badsyntax_*.py',
]
select = [
'A',
'E',
'F',
'Q',
]
ignore = []
[tool.ruff.flake8-quotes]
inline-quotes = 'single'
[tool.ruff.pep8-naming]
ignore-names = [
'foo',
'bar',
]
```
### Plugins
`flake8-to-ruff` will attempt to infer any activated plugins based on the settings provided in your
configuration file.
For example, if your `.flake8` file includes a `docstring-convention` property, `flake8-to-ruff`
will enable the appropriate [`flake8-docstrings`](https://pypi.org/project/flake8-docstrings/)
checks.
Alternatively, you can manually specify plugins on the command-line:
```shell
flake8-to-ruff path/to/.flake8 --plugin flake8-builtins --plugin flake8-quotes
```
## Limitations
1. Ruff only supports a subset of the Flake configuration options. `flake8-to-ruff` will warn on and
ignore unsupported options in the `.flake8` file (or equivalent). (Similarly, Ruff has a few
configuration options that don't exist in Flake8.)
1. Ruff will omit any rule codes that are unimplemented or unsupported by Ruff, including rule
codes from unsupported plugins. (See the
[documentation](https://docs.astral.sh/ruff/faq/#how-does-ruff-compare-to-flake8) for the complete
list of supported plugins.)
## License
MIT
## Contributing
Contributions are welcome and hugely appreciated. To get started, check out the
[contributing guidelines](https://github.com/astral-sh/ruff/blob/main/CONTRIBUTING.md).

View File

@@ -0,0 +1,65 @@
[build-system]
requires = [
# The minimum setuptools version is specific to the PEP 517 backend,
# and may be stricter than the version required in `setup.cfg`
"setuptools>=40.6.0,!=60.9.0",
"wheel",
# Must be kept in sync with the `install_requirements` in `setup.cfg`
"cffi>=1.12; platform_python_implementation != 'PyPy'",
"setuptools-rust>=0.11.4",
]
build-backend = "setuptools.build_meta"
[tool.black]
line-length = 79
target-version = ["py36"]
[tool.pytest.ini_options]
addopts = "-r s --capture=no --strict-markers --benchmark-disable"
markers = [
"skip_fips: this test is not executed in FIPS mode",
"supported: parametrized test requiring only_if and skip_message",
]
[tool.mypy]
show_error_codes = true
check_untyped_defs = true
no_implicit_reexport = true
warn_redundant_casts = true
warn_unused_ignores = true
warn_unused_configs = true
strict_equality = true
[[tool.mypy.overrides]]
module = [
"pretend"
]
ignore_missing_imports = true
[tool.coverage.run]
branch = true
relative_files = true
source = [
"cryptography",
"tests/",
]
[tool.coverage.paths]
source = [
"src/cryptography",
"*.tox/*/lib*/python*/site-packages/cryptography",
"*.tox\\*\\Lib\\site-packages\\cryptography",
"*.tox/pypy/site-packages/cryptography",
]
tests =[
"tests/",
"*tests\\",
]
[tool.coverage.report]
exclude_lines = [
"@abc.abstractmethod",
"@abc.abstractproperty",
"@typing.overload",
"if typing.TYPE_CHECKING",
]

View File

@@ -0,0 +1,91 @@
[metadata]
name = cryptography
version = attr: cryptography.__version__
description = cryptography is a package which provides cryptographic recipes and primitives to Python developers.
long_description = file: README.rst
long_description_content_type = text/x-rst
license = BSD-3-Clause OR Apache-2.0
url = https://github.com/pyca/cryptography
author = The Python Cryptographic Authority and individual contributors
author_email = cryptography-dev@python.org
project_urls =
Documentation=https://cryptography.io/
Source=https://github.com/pyca/cryptography/
Issues=https://github.com/pyca/cryptography/issues
Changelog=https://cryptography.io/en/latest/changelog/
classifiers =
Development Status :: 5 - Production/Stable
Intended Audience :: Developers
License :: OSI Approved :: Apache Software License
License :: OSI Approved :: BSD License
Natural Language :: English
Operating System :: MacOS :: MacOS X
Operating System :: POSIX
Operating System :: POSIX :: BSD
Operating System :: POSIX :: Linux
Operating System :: Microsoft :: Windows
Programming Language :: Python
Programming Language :: Python :: 3
Programming Language :: Python :: 3 :: Only
Programming Language :: Python :: 3.6
Programming Language :: Python :: 3.7
Programming Language :: Python :: 3.8
Programming Language :: Python :: 3.9
Programming Language :: Python :: 3.10
Programming Language :: Python :: 3.11
Programming Language :: Python :: Implementation :: CPython
Programming Language :: Python :: Implementation :: PyPy
Topic :: Security :: Cryptography
[options]
python_requires = >=3.6
include_package_data = True
zip_safe = False
package_dir =
=src
packages = find:
# `install_requires` must be kept in sync with `pyproject.toml`
install_requires =
cffi >=1.12
[options.packages.find]
where = src
exclude =
_cffi_src
_cffi_src.*
[options.extras_require]
test =
pytest>=6.2.0
pytest-benchmark
pytest-cov
pytest-subtests
pytest-xdist
pretend
iso8601
pytz
hypothesis>=1.11.4,!=3.79.2
docs =
sphinx >= 1.6.5,!=1.8.0,!=3.1.0,!=3.1.1,!=5.2.0,!=5.2.0.post0
sphinx_rtd_theme
docstest =
pyenchant >= 1.6.11
twine >= 1.12.0
sphinxcontrib-spelling >= 4.0.1
sdist =
setuptools_rust >= 0.11.4
pep8test =
black
flake8
flake8-import-order
pep8-naming
# This extra is for OpenSSH private keys that use bcrypt KDF
# Versions: v3.1.3 - ignore_few_rounds, v3.1.5 - abi3
ssh =
bcrypt >= 3.1.5
[flake8]
ignore = E203,E211,W503,W504,N818
exclude = .tox,*.egg,.git,_build,.hypothesis
select = E,W,F,N,I
application-import-names = cryptography,cryptography_vectors,tests

View File

@@ -0,0 +1,19 @@
[flake8]
# Ignore style and complexity
# E: style errors
# W: style warnings
# C: complexity
# D: docstring warnings (unused pydocstyle extension)
# F841: local variable assigned but never used
ignore = E, C, W, D, F841
builtins = c, get_config
exclude =
.cache,
.github,
docs,
jupyterhub/alembic*,
onbuild,
scripts,
share,
tools,
setup.py

View File

@@ -0,0 +1,43 @@
[flake8]
# Exclude the grpc generated code
exclude = ./manim/grpc/gen/*
max-complexity = 15
max-line-length = 88
statistics = True
# Prevents some flake8-rst-docstrings errors
rst-roles = attr,class,func,meth,mod,obj,ref,doc,exc
rst-directives = manim, SEEALSO, seealso
docstring-convention=numpy
select = A,A00,B,B9,C4,C90,D,E,F,F,PT,RST,SIM,W
# General Compatibility
extend-ignore = E203, W503, D202, D212, D213, D404
# Misc
F401, F403, F405, F841, E501, E731, E402, F811, F821,
# Plug-in: flake8-builtins
A001, A002, A003,
# Plug-in: flake8-bugbear
B006, B007, B008, B009, B010, B903, B950,
# Plug-in: flake8-simplify
SIM105, SIM106, SIM119,
# Plug-in: flake8-comprehensions
C901
# Plug-in: flake8-pytest-style
PT001, PT004, PT006, PT011, PT018, PT022, PT023,
# Plug-in: flake8-docstrings
D100, D101, D102, D103, D104, D105, D106, D107,
D200, D202, D204, D205, D209,
D301,
D400, D401, D402, D403, D405, D406, D407, D409, D411, D412, D414,
# Plug-in: flake8-rst-docstrings
RST201, RST203, RST210, RST212, RST213, RST215,
RST301, RST303,

View File

@@ -0,0 +1,36 @@
[flake8]
min_python_version = 3.7.0
max-line-length = 88
ban-relative-imports = true
# flake8-use-fstring: https://github.com/MichaelKim0407/flake8-use-fstring#--percent-greedy-and---format-greedy
format-greedy = 1
inline-quotes = double
enable-extensions = TC, TC1
type-checking-strict = true
eradicate-whitelist-extend = ^-.*;
extend-ignore =
# E203: Whitespace before ':' (pycqa/pycodestyle#373)
E203,
# SIM106: Handle error-cases first
SIM106,
# ANN101: Missing type annotation for self in method
ANN101,
# ANN102: Missing type annotation for cls in classmethod
ANN102,
# PIE781: assign-and-return
PIE781,
# PIE798 no-unnecessary-class: Consider using a module for namespacing instead
PIE798,
per-file-ignores =
# TC002: Move third-party import '...' into a type-checking block
__init__.py:TC002,
# ANN201: Missing return type annotation for public function
tests/test_*:ANN201
tests/**/test_*:ANN201
extend-exclude =
# Frozen and not subject to change in this repo:
get-poetry.py,
install-poetry.py,
# External to the project's coding standards:
tests/fixtures/*,
tests/**/fixtures/*,

View File

@@ -0,0 +1,19 @@
[flake8]
max-line-length=120
docstring-convention=all
import-order-style=pycharm
application_import_names=bot,tests
exclude=.cache,.venv,.git,constants.py
extend-ignore=
B311,W503,E226,S311,T000,E731
# Missing Docstrings
D100,D104,D105,D107,
# Docstring Whitespace
D203,D212,D214,D215,
# Docstring Quotes
D301,D302,
# Docstring Content
D400,D401,D402,D404,D405,D406,D407,D408,D409,D410,D411,D412,D413,D414,D416,D417
# Type Annotations
ANN002,ANN003,ANN101,ANN102,ANN204,ANN206,ANN401
per-file-ignores=tests/*:D,ANN

View File

@@ -0,0 +1,6 @@
[flake8]
ignore = E203, E501, W503
per-file-ignores =
requests/__init__.py:E402, F401
requests/compat.py:E402, F401
tests/compat.py:F401

View File

@@ -0,0 +1,34 @@
[project]
name = "flake8-to-ruff"
keywords = ["automation", "flake8", "pycodestyle", "pyflakes", "pylint", "clippy"]
classifiers = [
"Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3 :: Only",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Software Development :: Quality Assurance",
]
author = "Charlie Marsh"
author_email = "charlie.r.marsh@gmail.com"
description = "Convert existing Flake8 configuration to Ruff."
requires-python = ">=3.7"
[project.urls]
repository = "https://github.com/astral-sh/ruff#subdirectory=crates/flake8_to_ruff"
[build-system]
requires = ["maturin>=1.0,<2.0"]
build-backend = "maturin"
[tool.maturin]
bindings = "bin"
strip = true

View File

@@ -0,0 +1,13 @@
//! Extract Black configuration settings from a pyproject.toml.
use ruff_linter::line_width::LineLength;
use ruff_linter::settings::types::PythonVersion;
use serde::{Deserialize, Serialize};
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
pub(crate) struct Black {
#[serde(alias = "line-length", alias = "line_length")]
pub(crate) line_length: Option<LineLength>,
#[serde(alias = "target-version", alias = "target_version")]
pub(crate) target_version: Option<Vec<PythonVersion>>,
}

View File

@@ -0,0 +1,639 @@
use std::collections::{HashMap, HashSet};
use std::str::FromStr;
use itertools::Itertools;
use ruff_linter::line_width::LineLength;
use ruff_linter::registry::Linter;
use ruff_linter::rule_selector::RuleSelector;
use ruff_linter::rules::flake8_pytest_style::types::{
ParametrizeNameType, ParametrizeValuesRowType, ParametrizeValuesType,
};
use ruff_linter::rules::flake8_quotes::settings::Quote;
use ruff_linter::rules::flake8_tidy_imports::settings::Strictness;
use ruff_linter::rules::pydocstyle::settings::Convention;
use ruff_linter::settings::types::PythonVersion;
use ruff_linter::warn_user;
use ruff_workspace::options::{
Flake8AnnotationsOptions, Flake8BugbearOptions, Flake8BuiltinsOptions, Flake8ErrMsgOptions,
Flake8PytestStyleOptions, Flake8QuotesOptions, Flake8TidyImportsOptions, McCabeOptions,
Options, Pep8NamingOptions, PydocstyleOptions,
};
use ruff_workspace::pyproject::Pyproject;
use super::external_config::ExternalConfig;
use super::plugin::Plugin;
use super::{parser, plugin};
const DEFAULT_SELECTORS: &[RuleSelector] = &[
RuleSelector::Linter(Linter::Pyflakes),
RuleSelector::Linter(Linter::Pycodestyle),
];
pub(crate) fn convert(
config: &HashMap<String, HashMap<String, Option<String>>>,
external_config: &ExternalConfig,
plugins: Option<Vec<Plugin>>,
) -> Pyproject {
// Extract the Flake8 section.
let flake8 = config
.get("flake8")
.expect("Unable to find flake8 section in INI file");
// Extract all referenced rule code prefixes, to power plugin inference.
let mut referenced_codes: HashSet<RuleSelector> = HashSet::default();
for (key, value) in flake8 {
if let Some(value) = value {
match key.as_str() {
"select" | "ignore" | "extend-select" | "extend_select" | "extend-ignore"
| "extend_ignore" => {
referenced_codes.extend(parser::parse_prefix_codes(value.as_ref()));
}
"per-file-ignores" | "per_file_ignores" => {
if let Ok(per_file_ignores) =
parser::parse_files_to_codes_mapping(value.as_ref())
{
for (_, codes) in parser::collect_per_file_ignores(per_file_ignores) {
referenced_codes.extend(codes);
}
}
}
_ => {}
}
}
}
// Infer plugins, if not provided.
let plugins = plugins.unwrap_or_else(|| {
let from_options = plugin::infer_plugins_from_options(flake8);
if !from_options.is_empty() {
#[allow(clippy::print_stderr)]
{
eprintln!("Inferred plugins from settings: {from_options:#?}");
}
}
let from_codes = plugin::infer_plugins_from_codes(&referenced_codes);
if !from_codes.is_empty() {
#[allow(clippy::print_stderr)]
{
eprintln!("Inferred plugins from referenced codes: {from_codes:#?}");
}
}
from_options.into_iter().chain(from_codes).collect()
});
// Check if the user has specified a `select`. If not, we'll add our own
// default `select`, and populate it based on user plugins.
let mut select = flake8
.get("select")
.and_then(|value| {
value
.as_ref()
.map(|value| HashSet::from_iter(parser::parse_prefix_codes(value)))
})
.unwrap_or_else(|| resolve_select(&plugins));
let mut ignore: HashSet<RuleSelector> = flake8
.get("ignore")
.and_then(|value| {
value
.as_ref()
.map(|value| HashSet::from_iter(parser::parse_prefix_codes(value)))
})
.unwrap_or_default();
// Parse each supported option.
let mut options = Options::default();
let mut flake8_annotations = Flake8AnnotationsOptions::default();
let mut flake8_bugbear = Flake8BugbearOptions::default();
let mut flake8_builtins = Flake8BuiltinsOptions::default();
let mut flake8_errmsg = Flake8ErrMsgOptions::default();
let mut flake8_pytest_style = Flake8PytestStyleOptions::default();
let mut flake8_quotes = Flake8QuotesOptions::default();
let mut flake8_tidy_imports = Flake8TidyImportsOptions::default();
let mut mccabe = McCabeOptions::default();
let mut pep8_naming = Pep8NamingOptions::default();
let mut pydocstyle = PydocstyleOptions::default();
for (key, value) in flake8 {
if let Some(value) = value {
match key.as_str() {
// flake8
"builtins" => {
options.builtins = Some(parser::parse_strings(value.as_ref()));
}
"max-line-length" | "max_line_length" => match LineLength::from_str(value) {
Ok(line_length) => options.line_length = Some(line_length),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
},
"select" => {
// No-op (handled above).
select.extend(parser::parse_prefix_codes(value.as_ref()));
}
"ignore" => {
// No-op (handled above).
}
"extend-select" | "extend_select" => {
// Unlike Flake8, use a single explicit `select`.
select.extend(parser::parse_prefix_codes(value.as_ref()));
}
"extend-ignore" | "extend_ignore" => {
// Unlike Flake8, use a single explicit `ignore`.
ignore.extend(parser::parse_prefix_codes(value.as_ref()));
}
"exclude" => {
options.exclude = Some(parser::parse_strings(value.as_ref()));
}
"extend-exclude" | "extend_exclude" => {
options.extend_exclude = Some(parser::parse_strings(value.as_ref()));
}
"per-file-ignores" | "per_file_ignores" => {
match parser::parse_files_to_codes_mapping(value.as_ref()) {
Ok(per_file_ignores) => {
options.per_file_ignores =
Some(parser::collect_per_file_ignores(per_file_ignores));
}
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
// flake8-bugbear
"extend-immutable-calls" | "extend_immutable_calls" => {
flake8_bugbear.extend_immutable_calls =
Some(parser::parse_strings(value.as_ref()));
}
// flake8-builtins
"builtins-ignorelist" | "builtins_ignorelist" => {
flake8_builtins.builtins_ignorelist =
Some(parser::parse_strings(value.as_ref()));
}
// flake8-annotations
"suppress-none-returning" | "suppress_none_returning" => {
match parser::parse_bool(value.as_ref()) {
Ok(bool) => flake8_annotations.suppress_none_returning = Some(bool),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
"suppress-dummy-args" | "suppress_dummy_args" => {
match parser::parse_bool(value.as_ref()) {
Ok(bool) => flake8_annotations.suppress_dummy_args = Some(bool),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
"mypy-init-return" | "mypy_init_return" => {
match parser::parse_bool(value.as_ref()) {
Ok(bool) => flake8_annotations.mypy_init_return = Some(bool),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
"allow-star-arg-any" | "allow_star_arg_any" => {
match parser::parse_bool(value.as_ref()) {
Ok(bool) => flake8_annotations.allow_star_arg_any = Some(bool),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
// flake8-quotes
"quotes" | "inline-quotes" | "inline_quotes" => match value.trim() {
"'" | "single" => flake8_quotes.inline_quotes = Some(Quote::Single),
"\"" | "double" => flake8_quotes.inline_quotes = Some(Quote::Double),
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
},
"multiline-quotes" | "multiline_quotes" => match value.trim() {
"'" | "single" => flake8_quotes.multiline_quotes = Some(Quote::Single),
"\"" | "double" => flake8_quotes.multiline_quotes = Some(Quote::Double),
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
},
"docstring-quotes" | "docstring_quotes" => match value.trim() {
"'" | "single" => flake8_quotes.docstring_quotes = Some(Quote::Single),
"\"" | "double" => flake8_quotes.docstring_quotes = Some(Quote::Double),
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
},
"avoid-escape" | "avoid_escape" => match parser::parse_bool(value.as_ref()) {
Ok(bool) => flake8_quotes.avoid_escape = Some(bool),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
},
// pep8-naming
"ignore-names" | "ignore_names" => {
pep8_naming.ignore_names = Some(parser::parse_strings(value.as_ref()));
}
"classmethod-decorators" | "classmethod_decorators" => {
pep8_naming.classmethod_decorators =
Some(parser::parse_strings(value.as_ref()));
}
"staticmethod-decorators" | "staticmethod_decorators" => {
pep8_naming.staticmethod_decorators =
Some(parser::parse_strings(value.as_ref()));
}
// flake8-tidy-imports
"ban-relative-imports" | "ban_relative_imports" => match value.trim() {
"true" => flake8_tidy_imports.ban_relative_imports = Some(Strictness::All),
"parents" => {
flake8_tidy_imports.ban_relative_imports = Some(Strictness::Parents);
}
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
},
// flake8-docstrings
"docstring-convention" => match value.trim() {
"google" => pydocstyle.convention = Some(Convention::Google),
"numpy" => pydocstyle.convention = Some(Convention::Numpy),
"pep257" => pydocstyle.convention = Some(Convention::Pep257),
"all" => pydocstyle.convention = None,
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
},
// mccabe
"max-complexity" | "max_complexity" => match value.parse::<usize>() {
Ok(max_complexity) => mccabe.max_complexity = Some(max_complexity),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
},
// flake8-errmsg
"errmsg-max-string-length" | "errmsg_max_string_length" => {
match value.parse::<usize>() {
Ok(max_string_length) => {
flake8_errmsg.max_string_length = Some(max_string_length);
}
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
// flake8-pytest-style
"pytest-fixture-no-parentheses" | "pytest_fixture_no_parentheses " => {
match parser::parse_bool(value.as_ref()) {
Ok(bool) => flake8_pytest_style.fixture_parentheses = Some(!bool),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
"pytest-parametrize-names-type" | "pytest_parametrize_names_type" => {
match value.trim() {
"csv" => {
flake8_pytest_style.parametrize_names_type =
Some(ParametrizeNameType::Csv);
}
"tuple" => {
flake8_pytest_style.parametrize_names_type =
Some(ParametrizeNameType::Tuple);
}
"list" => {
flake8_pytest_style.parametrize_names_type =
Some(ParametrizeNameType::List);
}
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
}
}
"pytest-parametrize-values-type" | "pytest_parametrize_values_type" => {
match value.trim() {
"tuple" => {
flake8_pytest_style.parametrize_values_type =
Some(ParametrizeValuesType::Tuple);
}
"list" => {
flake8_pytest_style.parametrize_values_type =
Some(ParametrizeValuesType::List);
}
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
}
}
"pytest-parametrize-values-row-type" | "pytest_parametrize_values_row_type" => {
match value.trim() {
"tuple" => {
flake8_pytest_style.parametrize_values_row_type =
Some(ParametrizeValuesRowType::Tuple);
}
"list" => {
flake8_pytest_style.parametrize_values_row_type =
Some(ParametrizeValuesRowType::List);
}
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
}
}
"pytest-raises-require-match-for" | "pytest_raises_require_match_for" => {
flake8_pytest_style.raises_require_match_for =
Some(parser::parse_strings(value.as_ref()));
}
"pytest-mark-no-parentheses" | "pytest_mark_no_parentheses" => {
match parser::parse_bool(value.as_ref()) {
Ok(bool) => flake8_pytest_style.mark_parentheses = Some(!bool),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
// Unknown
_ => {
warn_user!("Skipping unsupported property: {}", key);
}
}
}
}
// Deduplicate and sort.
options.select = Some(
select
.into_iter()
.sorted_by_key(RuleSelector::prefix_and_code)
.collect(),
);
options.ignore = Some(
ignore
.into_iter()
.sorted_by_key(RuleSelector::prefix_and_code)
.collect(),
);
if flake8_annotations != Flake8AnnotationsOptions::default() {
options.flake8_annotations = Some(flake8_annotations);
}
if flake8_bugbear != Flake8BugbearOptions::default() {
options.flake8_bugbear = Some(flake8_bugbear);
}
if flake8_builtins != Flake8BuiltinsOptions::default() {
options.flake8_builtins = Some(flake8_builtins);
}
if flake8_errmsg != Flake8ErrMsgOptions::default() {
options.flake8_errmsg = Some(flake8_errmsg);
}
if flake8_pytest_style != Flake8PytestStyleOptions::default() {
options.flake8_pytest_style = Some(flake8_pytest_style);
}
if flake8_quotes != Flake8QuotesOptions::default() {
options.flake8_quotes = Some(flake8_quotes);
}
if flake8_tidy_imports != Flake8TidyImportsOptions::default() {
options.flake8_tidy_imports = Some(flake8_tidy_imports);
}
if mccabe != McCabeOptions::default() {
options.mccabe = Some(mccabe);
}
if pep8_naming != Pep8NamingOptions::default() {
options.pep8_naming = Some(pep8_naming);
}
if pydocstyle != PydocstyleOptions::default() {
options.pydocstyle = Some(pydocstyle);
}
// Extract any settings from the existing `pyproject.toml`.
if let Some(black) = &external_config.black {
if let Some(line_length) = &black.line_length {
options.line_length = Some(*line_length);
}
if let Some(target_version) = &black.target_version {
if let Some(target_version) = target_version.iter().min() {
options.target_version = Some(*target_version);
}
}
}
if let Some(isort) = &external_config.isort {
if let Some(src_paths) = &isort.src_paths {
match options.src.as_mut() {
Some(src) => {
src.extend_from_slice(src_paths);
}
None => {
options.src = Some(src_paths.clone());
}
}
}
}
if let Some(project) = &external_config.project {
if let Some(requires_python) = &project.requires_python {
if options.target_version.is_none() {
options.target_version =
PythonVersion::get_minimum_supported_version(requires_python);
}
}
}
// Create the pyproject.toml.
Pyproject::new(options)
}
/// Resolve the set of enabled `RuleSelector` values for the given
/// plugins.
fn resolve_select(plugins: &[Plugin]) -> HashSet<RuleSelector> {
let mut select: HashSet<_> = DEFAULT_SELECTORS.iter().cloned().collect();
select.extend(plugins.iter().map(|p| Linter::from(p).into()));
select
}
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use std::str::FromStr;
use anyhow::Result;
use itertools::Itertools;
use pep440_rs::VersionSpecifiers;
use pretty_assertions::assert_eq;
use ruff_linter::line_width::LineLength;
use ruff_linter::registry::Linter;
use ruff_linter::rule_selector::RuleSelector;
use ruff_linter::rules::flake8_quotes;
use ruff_linter::rules::pydocstyle::settings::Convention;
use ruff_linter::settings::types::PythonVersion;
use ruff_workspace::options::{Flake8QuotesOptions, Options, PydocstyleOptions};
use ruff_workspace::pyproject::Pyproject;
use crate::converter::DEFAULT_SELECTORS;
use crate::pep621::Project;
use crate::ExternalConfig;
use super::super::plugin::Plugin;
use super::convert;
fn default_options(plugins: impl IntoIterator<Item = RuleSelector>) -> Options {
Options {
ignore: Some(vec![]),
select: Some(
DEFAULT_SELECTORS
.iter()
.cloned()
.chain(plugins)
.sorted_by_key(RuleSelector::prefix_and_code)
.collect(),
),
..Options::default()
}
}
#[test]
fn it_converts_empty() {
let actual = convert(
&HashMap::from([("flake8".to_string(), HashMap::default())]),
&ExternalConfig::default(),
None,
);
let expected = Pyproject::new(default_options([]));
assert_eq!(actual, expected);
}
#[test]
fn it_converts_dashes() {
let actual = convert(
&HashMap::from([(
"flake8".to_string(),
HashMap::from([("max-line-length".to_string(), Some("100".to_string()))]),
)]),
&ExternalConfig::default(),
Some(vec![]),
);
let expected = Pyproject::new(Options {
line_length: Some(LineLength::try_from(100).unwrap()),
..default_options([])
});
assert_eq!(actual, expected);
}
#[test]
fn it_converts_underscores() {
let actual = convert(
&HashMap::from([(
"flake8".to_string(),
HashMap::from([("max_line_length".to_string(), Some("100".to_string()))]),
)]),
&ExternalConfig::default(),
Some(vec![]),
);
let expected = Pyproject::new(Options {
line_length: Some(LineLength::try_from(100).unwrap()),
..default_options([])
});
assert_eq!(actual, expected);
}
#[test]
fn it_ignores_parse_errors() {
let actual = convert(
&HashMap::from([(
"flake8".to_string(),
HashMap::from([("max_line_length".to_string(), Some("abc".to_string()))]),
)]),
&ExternalConfig::default(),
Some(vec![]),
);
let expected = Pyproject::new(default_options([]));
assert_eq!(actual, expected);
}
#[test]
fn it_converts_plugin_options() {
let actual = convert(
&HashMap::from([(
"flake8".to_string(),
HashMap::from([("inline-quotes".to_string(), Some("single".to_string()))]),
)]),
&ExternalConfig::default(),
Some(vec![]),
);
let expected = Pyproject::new(Options {
flake8_quotes: Some(Flake8QuotesOptions {
inline_quotes: Some(flake8_quotes::settings::Quote::Single),
multiline_quotes: None,
docstring_quotes: None,
avoid_escape: None,
}),
..default_options([])
});
assert_eq!(actual, expected);
}
#[test]
fn it_converts_docstring_conventions() {
let actual = convert(
&HashMap::from([(
"flake8".to_string(),
HashMap::from([(
"docstring-convention".to_string(),
Some("numpy".to_string()),
)]),
)]),
&ExternalConfig::default(),
Some(vec![Plugin::Flake8Docstrings]),
);
let expected = Pyproject::new(Options {
pydocstyle: Some(PydocstyleOptions {
convention: Some(Convention::Numpy),
ignore_decorators: None,
property_decorators: None,
}),
..default_options([Linter::Pydocstyle.into()])
});
assert_eq!(actual, expected);
}
#[test]
fn it_infers_plugins_if_omitted() {
let actual = convert(
&HashMap::from([(
"flake8".to_string(),
HashMap::from([("inline-quotes".to_string(), Some("single".to_string()))]),
)]),
&ExternalConfig::default(),
None,
);
let expected = Pyproject::new(Options {
flake8_quotes: Some(Flake8QuotesOptions {
inline_quotes: Some(flake8_quotes::settings::Quote::Single),
multiline_quotes: None,
docstring_quotes: None,
avoid_escape: None,
}),
..default_options([Linter::Flake8Quotes.into()])
});
assert_eq!(actual, expected);
}
#[test]
fn it_converts_project_requires_python() -> Result<()> {
let actual = convert(
&HashMap::from([("flake8".to_string(), HashMap::default())]),
&ExternalConfig {
project: Some(&Project {
requires_python: Some(VersionSpecifiers::from_str(">=3.8.16, <3.11")?),
}),
..ExternalConfig::default()
},
Some(vec![]),
);
let expected = Pyproject::new(Options {
target_version: Some(PythonVersion::Py38),
..default_options([])
});
assert_eq!(actual, expected);
Ok(())
}
}

View File

@@ -0,0 +1,10 @@
use super::black::Black;
use super::isort::Isort;
use super::pep621::Project;
#[derive(Default)]
pub(crate) struct ExternalConfig<'a> {
pub(crate) black: Option<&'a Black>,
pub(crate) isort: Option<&'a Isort>,
pub(crate) project: Option<&'a Project>,
}

View File

@@ -0,0 +1,10 @@
//! Extract isort configuration settings from a pyproject.toml.
use serde::{Deserialize, Serialize};
/// The [isort configuration](https://pycqa.github.io/isort/docs/configuration/config_files.html).
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
pub(crate) struct Isort {
#[serde(alias = "src-paths", alias = "src_paths")]
pub(crate) src_paths: Option<Vec<String>>,
}

View File

@@ -0,0 +1,80 @@
//! Utility to generate Ruff's `pyproject.toml` section from a Flake8 INI file.
mod black;
mod converter;
mod external_config;
mod isort;
mod parser;
mod pep621;
mod plugin;
mod pyproject;
use std::path::PathBuf;
use anyhow::Result;
use clap::Parser;
use configparser::ini::Ini;
use crate::converter::convert;
use crate::external_config::ExternalConfig;
use crate::plugin::Plugin;
use crate::pyproject::parse;
use ruff_linter::logging::{set_up_logging, LogLevel};
#[derive(Parser)]
#[command(
about = "Convert existing Flake8 configuration to Ruff.",
long_about = None
)]
struct Args {
/// Path to the Flake8 configuration file (e.g., `setup.cfg`, `tox.ini`, or
/// `.flake8`).
#[arg(required = true)]
file: PathBuf,
/// Optional path to a `pyproject.toml` file, used to ensure compatibility
/// with Black.
#[arg(long)]
pyproject: Option<PathBuf>,
/// List of plugins to enable.
#[arg(long, value_delimiter = ',')]
plugin: Option<Vec<Plugin>>,
}
fn main() -> Result<()> {
set_up_logging(&LogLevel::Default)?;
let args = Args::parse();
// Read the INI file.
let mut ini = Ini::new_cs();
ini.set_multiline(true);
let config = ini.load(args.file).map_err(|msg| anyhow::anyhow!(msg))?;
// Read the pyproject.toml file.
let pyproject = args.pyproject.map(parse).transpose()?;
let external_config = pyproject
.as_ref()
.and_then(|pyproject| pyproject.tool.as_ref())
.map(|tool| ExternalConfig {
black: tool.black.as_ref(),
isort: tool.isort.as_ref(),
..Default::default()
})
.unwrap_or_default();
let external_config = ExternalConfig {
project: pyproject
.as_ref()
.and_then(|pyproject| pyproject.project.as_ref()),
..external_config
};
// Create Ruff's pyproject.toml section.
let pyproject = convert(&config, &external_config, args.plugin);
#[allow(clippy::print_stdout)]
{
println!("{}", toml::to_string_pretty(&pyproject)?);
}
Ok(())
}

View File

@@ -0,0 +1,391 @@
use std::str::FromStr;
use anyhow::{bail, Result};
use once_cell::sync::Lazy;
use regex::Regex;
use rustc_hash::FxHashMap;
use ruff_linter::settings::types::PatternPrefixPair;
use ruff_linter::{warn_user, RuleSelector};
static COMMA_SEPARATED_LIST_RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"[,\s]").unwrap());
/// Parse a comma-separated list of `RuleSelector` values (e.g.,
/// "F401,E501").
pub(crate) fn parse_prefix_codes(value: &str) -> Vec<RuleSelector> {
let mut codes: Vec<RuleSelector> = vec![];
for code in COMMA_SEPARATED_LIST_RE.split(value) {
let code = code.trim();
if code.is_empty() {
continue;
}
if let Ok(code) = RuleSelector::from_str(code) {
codes.push(code);
} else {
warn_user!("Unsupported prefix code: {code}");
}
}
codes
}
/// Parse a comma-separated list of strings (e.g., "__init__.py,__main__.py").
pub(crate) fn parse_strings(value: &str) -> Vec<String> {
COMMA_SEPARATED_LIST_RE
.split(value)
.map(str::trim)
.filter(|part| !part.is_empty())
.map(String::from)
.collect()
}
/// Parse a boolean.
pub(crate) fn parse_bool(value: &str) -> Result<bool> {
match value.trim() {
"true" => Ok(true),
"false" => Ok(false),
_ => bail!("Unexpected boolean value: {value}"),
}
}
#[derive(Debug)]
struct Token {
token_name: TokenType,
src: String,
}
#[derive(Debug, Copy, Clone)]
enum TokenType {
Code,
File,
Colon,
Comma,
Ws,
Eof,
}
struct State {
seen_sep: bool,
seen_colon: bool,
filenames: Vec<String>,
codes: Vec<String>,
}
impl State {
const fn new() -> Self {
Self {
seen_sep: true,
seen_colon: false,
filenames: vec![],
codes: vec![],
}
}
/// Generate the list of `StrRuleCodePair` pairs for the current
/// state.
fn parse(&self) -> Vec<PatternPrefixPair> {
let mut codes: Vec<PatternPrefixPair> = vec![];
for code in &self.codes {
if let Ok(code) = RuleSelector::from_str(code) {
for filename in &self.filenames {
codes.push(PatternPrefixPair {
pattern: filename.clone(),
prefix: code.clone(),
});
}
} else {
warn_user!("Unsupported prefix code: {code}");
}
}
codes
}
}
/// Tokenize the raw 'files-to-codes' mapping.
fn tokenize_files_to_codes_mapping(value: &str) -> Vec<Token> {
let mut tokens = vec![];
let mut i = 0;
while i < value.len() {
for (token_re, token_name) in [
(
Regex::new(r"([A-Z]+[0-9]*)(?:$|\s|,)").unwrap(),
TokenType::Code,
),
(Regex::new(r"([^\s:,]+)").unwrap(), TokenType::File),
(Regex::new(r"(\s*:\s*)").unwrap(), TokenType::Colon),
(Regex::new(r"(\s*,\s*)").unwrap(), TokenType::Comma),
(Regex::new(r"(\s+)").unwrap(), TokenType::Ws),
] {
if let Some(cap) = token_re.captures(&value[i..]) {
let mat = cap.get(1).unwrap();
if mat.start() == 0 {
tokens.push(Token {
token_name,
src: mat.as_str().trim().to_string(),
});
i += mat.end();
break;
}
}
}
}
tokens.push(Token {
token_name: TokenType::Eof,
src: String::new(),
});
tokens
}
/// Parse a 'files-to-codes' mapping, mimicking Flake8's internal logic.
/// See: <https://github.com/PyCQA/flake8/blob/7dfe99616fc2f07c0017df2ba5fa884158f3ea8a/src/flake8/utils.py#L45>
pub(crate) fn parse_files_to_codes_mapping(value: &str) -> Result<Vec<PatternPrefixPair>> {
if value.trim().is_empty() {
return Ok(vec![]);
}
let mut codes: Vec<PatternPrefixPair> = vec![];
let mut state = State::new();
for token in tokenize_files_to_codes_mapping(value) {
if matches!(token.token_name, TokenType::Comma | TokenType::Ws) {
state.seen_sep = true;
} else if !state.seen_colon {
if matches!(token.token_name, TokenType::Colon) {
state.seen_colon = true;
state.seen_sep = true;
} else if state.seen_sep && matches!(token.token_name, TokenType::File) {
state.filenames.push(token.src);
state.seen_sep = false;
} else {
bail!("Unexpected token: {:?}", token.token_name);
}
} else {
if matches!(token.token_name, TokenType::Eof) {
codes.extend(state.parse());
state = State::new();
} else if state.seen_sep && matches!(token.token_name, TokenType::Code) {
state.codes.push(token.src);
state.seen_sep = false;
} else if state.seen_sep && matches!(token.token_name, TokenType::File) {
codes.extend(state.parse());
state = State::new();
state.filenames.push(token.src);
state.seen_sep = false;
} else {
bail!("Unexpected token: {:?}", token.token_name);
}
}
}
Ok(codes)
}
/// Collect a list of `PatternPrefixPair` structs as a `BTreeMap`.
pub(crate) fn collect_per_file_ignores(
pairs: Vec<PatternPrefixPair>,
) -> FxHashMap<String, Vec<RuleSelector>> {
let mut per_file_ignores: FxHashMap<String, Vec<RuleSelector>> = FxHashMap::default();
for pair in pairs {
per_file_ignores
.entry(pair.pattern)
.or_insert_with(Vec::new)
.push(pair.prefix);
}
per_file_ignores
}
#[cfg(test)]
mod tests {
use anyhow::Result;
use ruff_linter::codes;
use ruff_linter::registry::Linter;
use ruff_linter::settings::types::PatternPrefixPair;
use ruff_linter::RuleSelector;
use super::{parse_files_to_codes_mapping, parse_prefix_codes, parse_strings};
#[test]
fn it_parses_prefix_codes() {
let actual = parse_prefix_codes("");
let expected: Vec<RuleSelector> = vec![];
assert_eq!(actual, expected);
let actual = parse_prefix_codes(" ");
let expected: Vec<RuleSelector> = vec![];
assert_eq!(actual, expected);
let actual = parse_prefix_codes("F401");
let expected = vec![codes::Pyflakes::_401.into()];
assert_eq!(actual, expected);
let actual = parse_prefix_codes("F401,");
let expected = vec![codes::Pyflakes::_401.into()];
assert_eq!(actual, expected);
let actual = parse_prefix_codes("F401,E501");
let expected = vec![
codes::Pyflakes::_401.into(),
codes::Pycodestyle::E501.into(),
];
assert_eq!(actual, expected);
let actual = parse_prefix_codes("F401, E501");
let expected = vec![
codes::Pyflakes::_401.into(),
codes::Pycodestyle::E501.into(),
];
assert_eq!(actual, expected);
}
#[test]
fn it_parses_strings() {
let actual = parse_strings("");
let expected: Vec<String> = vec![];
assert_eq!(actual, expected);
let actual = parse_strings(" ");
let expected: Vec<String> = vec![];
assert_eq!(actual, expected);
let actual = parse_strings("__init__.py");
let expected = vec!["__init__.py".to_string()];
assert_eq!(actual, expected);
let actual = parse_strings("__init__.py,");
let expected = vec!["__init__.py".to_string()];
assert_eq!(actual, expected);
let actual = parse_strings("__init__.py,__main__.py");
let expected = vec!["__init__.py".to_string(), "__main__.py".to_string()];
assert_eq!(actual, expected);
let actual = parse_strings("__init__.py, __main__.py");
let expected = vec!["__init__.py".to_string(), "__main__.py".to_string()];
assert_eq!(actual, expected);
}
#[test]
fn it_parse_files_to_codes_mapping() -> Result<()> {
let actual = parse_files_to_codes_mapping("")?;
let expected: Vec<PatternPrefixPair> = vec![];
assert_eq!(actual, expected);
let actual = parse_files_to_codes_mapping(" ")?;
let expected: Vec<PatternPrefixPair> = vec![];
assert_eq!(actual, expected);
// Ex) locust
let actual = parse_files_to_codes_mapping(
"per-file-ignores =
locust/test/*: F841
examples/*: F841
*.pyi: E302,E704"
.strip_prefix("per-file-ignores =")
.unwrap(),
)?;
let expected: Vec<PatternPrefixPair> = vec![
PatternPrefixPair {
pattern: "locust/test/*".to_string(),
prefix: codes::Pyflakes::_841.into(),
},
PatternPrefixPair {
pattern: "examples/*".to_string(),
prefix: codes::Pyflakes::_841.into(),
},
];
assert_eq!(actual, expected);
// Ex) celery
let actual = parse_files_to_codes_mapping(
"per-file-ignores =
t/*,setup.py,examples/*,docs/*,extra/*:
D,"
.strip_prefix("per-file-ignores =")
.unwrap(),
)?;
let expected: Vec<PatternPrefixPair> = vec![
PatternPrefixPair {
pattern: "t/*".to_string(),
prefix: Linter::Pydocstyle.into(),
},
PatternPrefixPair {
pattern: "setup.py".to_string(),
prefix: Linter::Pydocstyle.into(),
},
PatternPrefixPair {
pattern: "examples/*".to_string(),
prefix: Linter::Pydocstyle.into(),
},
PatternPrefixPair {
pattern: "docs/*".to_string(),
prefix: Linter::Pydocstyle.into(),
},
PatternPrefixPair {
pattern: "extra/*".to_string(),
prefix: Linter::Pydocstyle.into(),
},
];
assert_eq!(actual, expected);
// Ex) scrapy
let actual = parse_files_to_codes_mapping(
"per-file-ignores =
scrapy/__init__.py:E402
scrapy/core/downloader/handlers/http.py:F401
scrapy/http/__init__.py:F401
scrapy/linkextractors/__init__.py:E402,F401
scrapy/selector/__init__.py:F401
scrapy/spiders/__init__.py:E402,F401
scrapy/utils/url.py:F403,F405
tests/test_loader.py:E741"
.strip_prefix("per-file-ignores =")
.unwrap(),
)?;
let expected: Vec<PatternPrefixPair> = vec![
PatternPrefixPair {
pattern: "scrapy/__init__.py".to_string(),
prefix: codes::Pycodestyle::E402.into(),
},
PatternPrefixPair {
pattern: "scrapy/core/downloader/handlers/http.py".to_string(),
prefix: codes::Pyflakes::_401.into(),
},
PatternPrefixPair {
pattern: "scrapy/http/__init__.py".to_string(),
prefix: codes::Pyflakes::_401.into(),
},
PatternPrefixPair {
pattern: "scrapy/linkextractors/__init__.py".to_string(),
prefix: codes::Pycodestyle::E402.into(),
},
PatternPrefixPair {
pattern: "scrapy/linkextractors/__init__.py".to_string(),
prefix: codes::Pyflakes::_401.into(),
},
PatternPrefixPair {
pattern: "scrapy/selector/__init__.py".to_string(),
prefix: codes::Pyflakes::_401.into(),
},
PatternPrefixPair {
pattern: "scrapy/spiders/__init__.py".to_string(),
prefix: codes::Pycodestyle::E402.into(),
},
PatternPrefixPair {
pattern: "scrapy/spiders/__init__.py".to_string(),
prefix: codes::Pyflakes::_401.into(),
},
PatternPrefixPair {
pattern: "scrapy/utils/url.py".to_string(),
prefix: codes::Pyflakes::_403.into(),
},
PatternPrefixPair {
pattern: "scrapy/utils/url.py".to_string(),
prefix: codes::Pyflakes::_405.into(),
},
PatternPrefixPair {
pattern: "tests/test_loader.py".to_string(),
prefix: codes::Pycodestyle::E741.into(),
},
];
assert_eq!(actual, expected);
Ok(())
}
}

View File

@@ -0,0 +1,10 @@
//! Extract PEP 621 configuration settings from a pyproject.toml.
use pep440_rs::VersionSpecifiers;
use serde::{Deserialize, Serialize};
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
pub(crate) struct Project {
#[serde(alias = "requires-python", alias = "requires_python")]
pub(crate) requires_python: Option<VersionSpecifiers>,
}

View File

@@ -0,0 +1,368 @@
use std::collections::{BTreeSet, HashMap, HashSet};
use std::fmt;
use std::str::FromStr;
use anyhow::anyhow;
use ruff_linter::registry::Linter;
use ruff_linter::settings::types::PreviewMode;
use ruff_linter::RuleSelector;
#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq)]
pub enum Plugin {
Flake82020,
Flake8Annotations,
Flake8Bandit,
Flake8BlindExcept,
Flake8BooleanTrap,
Flake8Bugbear,
Flake8Builtins,
Flake8Commas,
Flake8Comprehensions,
Flake8Datetimez,
Flake8Debugger,
Flake8Docstrings,
Flake8Eradicate,
Flake8ErrMsg,
Flake8Executable,
Flake8ImplicitStrConcat,
Flake8ImportConventions,
Flake8NoPep420,
Flake8Pie,
Flake8Print,
Flake8PytestStyle,
Flake8Quotes,
Flake8Return,
Flake8Simplify,
Flake8TidyImports,
Flake8TypeChecking,
Flake8UnusedArguments,
Flake8UsePathlib,
McCabe,
PEP8Naming,
PandasVet,
Pyupgrade,
Tryceratops,
}
impl FromStr for Plugin {
type Err = anyhow::Error;
fn from_str(string: &str) -> Result<Self, Self::Err> {
match string {
"flake8-2020" => Ok(Plugin::Flake82020),
"flake8-annotations" => Ok(Plugin::Flake8Annotations),
"flake8-bandit" => Ok(Plugin::Flake8Bandit),
"flake8-blind-except" => Ok(Plugin::Flake8BlindExcept),
"flake8-boolean-trap" => Ok(Plugin::Flake8BooleanTrap),
"flake8-bugbear" => Ok(Plugin::Flake8Bugbear),
"flake8-builtins" => Ok(Plugin::Flake8Builtins),
"flake8-commas" => Ok(Plugin::Flake8Commas),
"flake8-comprehensions" => Ok(Plugin::Flake8Comprehensions),
"flake8-datetimez" => Ok(Plugin::Flake8Datetimez),
"flake8-debugger" => Ok(Plugin::Flake8Debugger),
"flake8-docstrings" => Ok(Plugin::Flake8Docstrings),
"flake8-eradicate" => Ok(Plugin::Flake8Eradicate),
"flake8-errmsg" => Ok(Plugin::Flake8ErrMsg),
"flake8-executable" => Ok(Plugin::Flake8Executable),
"flake8-implicit-str-concat" => Ok(Plugin::Flake8ImplicitStrConcat),
"flake8-import-conventions" => Ok(Plugin::Flake8ImportConventions),
"flake8-no-pep420" => Ok(Plugin::Flake8NoPep420),
"flake8-pie" => Ok(Plugin::Flake8Pie),
"flake8-print" => Ok(Plugin::Flake8Print),
"flake8-pytest-style" => Ok(Plugin::Flake8PytestStyle),
"flake8-quotes" => Ok(Plugin::Flake8Quotes),
"flake8-return" => Ok(Plugin::Flake8Return),
"flake8-simplify" => Ok(Plugin::Flake8Simplify),
"flake8-tidy-imports" => Ok(Plugin::Flake8TidyImports),
"flake8-type-checking" => Ok(Plugin::Flake8TypeChecking),
"flake8-unused-arguments" => Ok(Plugin::Flake8UnusedArguments),
"flake8-use-pathlib" => Ok(Plugin::Flake8UsePathlib),
"mccabe" => Ok(Plugin::McCabe),
"pep8-naming" => Ok(Plugin::PEP8Naming),
"pandas-vet" => Ok(Plugin::PandasVet),
"pyupgrade" => Ok(Plugin::Pyupgrade),
"tryceratops" => Ok(Plugin::Tryceratops),
_ => Err(anyhow!("Unknown plugin: {string}")),
}
}
}
impl fmt::Debug for Plugin {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"{}",
match self {
Plugin::Flake82020 => "flake8-2020",
Plugin::Flake8Annotations => "flake8-annotations",
Plugin::Flake8Bandit => "flake8-bandit",
Plugin::Flake8BlindExcept => "flake8-blind-except",
Plugin::Flake8BooleanTrap => "flake8-boolean-trap",
Plugin::Flake8Bugbear => "flake8-bugbear",
Plugin::Flake8Builtins => "flake8-builtins",
Plugin::Flake8Commas => "flake8-commas",
Plugin::Flake8Comprehensions => "flake8-comprehensions",
Plugin::Flake8Datetimez => "flake8-datetimez",
Plugin::Flake8Debugger => "flake8-debugger",
Plugin::Flake8Docstrings => "flake8-docstrings",
Plugin::Flake8Eradicate => "flake8-eradicate",
Plugin::Flake8ErrMsg => "flake8-errmsg",
Plugin::Flake8Executable => "flake8-executable",
Plugin::Flake8ImplicitStrConcat => "flake8-implicit-str-concat",
Plugin::Flake8ImportConventions => "flake8-import-conventions",
Plugin::Flake8NoPep420 => "flake8-no-pep420",
Plugin::Flake8Pie => "flake8-pie",
Plugin::Flake8Print => "flake8-print",
Plugin::Flake8PytestStyle => "flake8-pytest-style",
Plugin::Flake8Quotes => "flake8-quotes",
Plugin::Flake8Return => "flake8-return",
Plugin::Flake8Simplify => "flake8-simplify",
Plugin::Flake8TidyImports => "flake8-tidy-imports",
Plugin::Flake8TypeChecking => "flake8-type-checking",
Plugin::Flake8UnusedArguments => "flake8-unused-arguments",
Plugin::Flake8UsePathlib => "flake8-use-pathlib",
Plugin::McCabe => "mccabe",
Plugin::PEP8Naming => "pep8-naming",
Plugin::PandasVet => "pandas-vet",
Plugin::Pyupgrade => "pyupgrade",
Plugin::Tryceratops => "tryceratops",
}
)
}
}
impl From<&Plugin> for Linter {
fn from(plugin: &Plugin) -> Self {
match plugin {
Plugin::Flake82020 => Linter::Flake82020,
Plugin::Flake8Annotations => Linter::Flake8Annotations,
Plugin::Flake8Bandit => Linter::Flake8Bandit,
Plugin::Flake8BlindExcept => Linter::Flake8BlindExcept,
Plugin::Flake8BooleanTrap => Linter::Flake8BooleanTrap,
Plugin::Flake8Bugbear => Linter::Flake8Bugbear,
Plugin::Flake8Builtins => Linter::Flake8Builtins,
Plugin::Flake8Commas => Linter::Flake8Commas,
Plugin::Flake8Comprehensions => Linter::Flake8Comprehensions,
Plugin::Flake8Datetimez => Linter::Flake8Datetimez,
Plugin::Flake8Debugger => Linter::Flake8Debugger,
Plugin::Flake8Docstrings => Linter::Pydocstyle,
Plugin::Flake8Eradicate => Linter::Eradicate,
Plugin::Flake8ErrMsg => Linter::Flake8ErrMsg,
Plugin::Flake8Executable => Linter::Flake8Executable,
Plugin::Flake8ImplicitStrConcat => Linter::Flake8ImplicitStrConcat,
Plugin::Flake8ImportConventions => Linter::Flake8ImportConventions,
Plugin::Flake8NoPep420 => Linter::Flake8NoPep420,
Plugin::Flake8Pie => Linter::Flake8Pie,
Plugin::Flake8Print => Linter::Flake8Print,
Plugin::Flake8PytestStyle => Linter::Flake8PytestStyle,
Plugin::Flake8Quotes => Linter::Flake8Quotes,
Plugin::Flake8Return => Linter::Flake8Return,
Plugin::Flake8Simplify => Linter::Flake8Simplify,
Plugin::Flake8TidyImports => Linter::Flake8TidyImports,
Plugin::Flake8TypeChecking => Linter::Flake8TypeChecking,
Plugin::Flake8UnusedArguments => Linter::Flake8UnusedArguments,
Plugin::Flake8UsePathlib => Linter::Flake8UsePathlib,
Plugin::McCabe => Linter::McCabe,
Plugin::PEP8Naming => Linter::PEP8Naming,
Plugin::PandasVet => Linter::PandasVet,
Plugin::Pyupgrade => Linter::Pyupgrade,
Plugin::Tryceratops => Linter::Tryceratops,
}
}
}
/// Infer the enabled plugins based on user-provided options.
///
/// For example, if the user specified a `mypy-init-return` setting, we should
/// infer that `flake8-annotations` is active.
pub(crate) fn infer_plugins_from_options(flake8: &HashMap<String, Option<String>>) -> Vec<Plugin> {
let mut plugins = BTreeSet::new();
for key in flake8.keys() {
match key.as_str() {
// flake8-annotations
"suppress-none-returning" | "suppress_none_returning" => {
plugins.insert(Plugin::Flake8Annotations);
}
"suppress-dummy-args" | "suppress_dummy_args" => {
plugins.insert(Plugin::Flake8Annotations);
}
"allow-untyped-defs" | "allow_untyped_defs" => {
plugins.insert(Plugin::Flake8Annotations);
}
"allow-untyped-nested" | "allow_untyped_nested" => {
plugins.insert(Plugin::Flake8Annotations);
}
"mypy-init-return" | "mypy_init_return" => {
plugins.insert(Plugin::Flake8Annotations);
}
"dispatch-decorators" | "dispatch_decorators" => {
plugins.insert(Plugin::Flake8Annotations);
}
"overload-decorators" | "overload_decorators" => {
plugins.insert(Plugin::Flake8Annotations);
}
"allow-star-arg-any" | "allow_star_arg_any" => {
plugins.insert(Plugin::Flake8Annotations);
}
// flake8-bugbear
"extend-immutable-calls" | "extend_immutable_calls" => {
plugins.insert(Plugin::Flake8Bugbear);
}
// flake8-builtins
"builtins-ignorelist" | "builtins_ignorelist" => {
plugins.insert(Plugin::Flake8Builtins);
}
// flake8-docstrings
"docstring-convention" | "docstring_convention" => {
plugins.insert(Plugin::Flake8Docstrings);
}
// flake8-eradicate
"eradicate-aggressive" | "eradicate_aggressive" => {
plugins.insert(Plugin::Flake8Eradicate);
}
"eradicate-whitelist" | "eradicate_whitelist" => {
plugins.insert(Plugin::Flake8Eradicate);
}
"eradicate-whitelist-extend" | "eradicate_whitelist_extend" => {
plugins.insert(Plugin::Flake8Eradicate);
}
// flake8-pytest-style
"pytest-fixture-no-parentheses" | "pytest_fixture_no_parentheses " => {
plugins.insert(Plugin::Flake8PytestStyle);
}
"pytest-parametrize-names-type" | "pytest_parametrize_names_type" => {
plugins.insert(Plugin::Flake8PytestStyle);
}
"pytest-parametrize-values-type" | "pytest_parametrize_values_type" => {
plugins.insert(Plugin::Flake8PytestStyle);
}
"pytest-parametrize-values-row-type" | "pytest_parametrize_values_row_type" => {
plugins.insert(Plugin::Flake8PytestStyle);
}
"pytest-raises-require-match-for" | "pytest_raises_require_match_for" => {
plugins.insert(Plugin::Flake8PytestStyle);
}
"pytest-mark-no-parentheses" | "pytest_mark_no_parentheses" => {
plugins.insert(Plugin::Flake8PytestStyle);
}
// flake8-quotes
"quotes" | "inline-quotes" | "inline_quotes" => {
plugins.insert(Plugin::Flake8Quotes);
}
"multiline-quotes" | "multiline_quotes" => {
plugins.insert(Plugin::Flake8Quotes);
}
"docstring-quotes" | "docstring_quotes" => {
plugins.insert(Plugin::Flake8Quotes);
}
"avoid-escape" | "avoid_escape" => {
plugins.insert(Plugin::Flake8Quotes);
}
// flake8-tidy-imports
"ban-relative-imports" | "ban_relative_imports" => {
plugins.insert(Plugin::Flake8TidyImports);
}
"banned-modules" | "banned_modules" => {
plugins.insert(Plugin::Flake8TidyImports);
}
// mccabe
"max-complexity" | "max_complexity" => {
plugins.insert(Plugin::McCabe);
}
// pep8-naming
"ignore-names" | "ignore_names" => {
plugins.insert(Plugin::PEP8Naming);
}
"classmethod-decorators" | "classmethod_decorators" => {
plugins.insert(Plugin::PEP8Naming);
}
"staticmethod-decorators" | "staticmethod_decorators" => {
plugins.insert(Plugin::PEP8Naming);
}
"max-string-length" | "max_string_length" => {
plugins.insert(Plugin::Flake8ErrMsg);
}
_ => {}
}
}
Vec::from_iter(plugins)
}
/// Infer the enabled plugins based on the referenced prefixes.
///
/// For example, if the user ignores `ANN101`, we should infer that
/// `flake8-annotations` is active.
pub(crate) fn infer_plugins_from_codes(selectors: &HashSet<RuleSelector>) -> Vec<Plugin> {
// Ignore cases in which we've knowingly changed rule prefixes.
[
Plugin::Flake82020,
Plugin::Flake8Annotations,
Plugin::Flake8Bandit,
// Plugin::Flake8BlindExcept,
Plugin::Flake8BooleanTrap,
Plugin::Flake8Bugbear,
Plugin::Flake8Builtins,
// Plugin::Flake8Commas,
Plugin::Flake8Comprehensions,
Plugin::Flake8Datetimez,
Plugin::Flake8Debugger,
Plugin::Flake8Docstrings,
// Plugin::Flake8Eradicate,
Plugin::Flake8ErrMsg,
Plugin::Flake8Executable,
Plugin::Flake8ImplicitStrConcat,
// Plugin::Flake8ImportConventions,
Plugin::Flake8NoPep420,
Plugin::Flake8Pie,
Plugin::Flake8Print,
Plugin::Flake8PytestStyle,
Plugin::Flake8Quotes,
Plugin::Flake8Return,
Plugin::Flake8Simplify,
// Plugin::Flake8TidyImports,
// Plugin::Flake8TypeChecking,
Plugin::Flake8UnusedArguments,
// Plugin::Flake8UsePathlib,
Plugin::McCabe,
Plugin::PEP8Naming,
Plugin::PandasVet,
Plugin::Tryceratops,
]
.into_iter()
.filter(|plugin| {
for selector in selectors {
if selector
.rules(PreviewMode::Disabled)
.any(|rule| Linter::from(plugin).rules().any(|r| r == rule))
{
return true;
}
}
false
})
.collect()
}
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use super::{infer_plugins_from_options, Plugin};
#[test]
fn it_infers_plugins() {
let actual = infer_plugins_from_options(&HashMap::from([(
"inline-quotes".to_string(),
Some("single".to_string()),
)]));
let expected = vec![Plugin::Flake8Quotes];
assert_eq!(actual, expected);
let actual = infer_plugins_from_options(&HashMap::from([(
"staticmethod-decorators".to_string(),
Some("[]".to_string()),
)]));
let expected = vec![Plugin::PEP8Naming];
assert_eq!(actual, expected);
}
}

View File

@@ -0,0 +1,26 @@
use std::path::Path;
use anyhow::Result;
use serde::{Deserialize, Serialize};
use super::black::Black;
use super::isort::Isort;
use super::pep621::Project;
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
pub(crate) struct Tools {
pub(crate) black: Option<Black>,
pub(crate) isort: Option<Isort>,
}
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
pub(crate) struct Pyproject {
pub(crate) tool: Option<Tools>,
pub(crate) project: Option<Project>,
}
pub(crate) fn parse<P: AsRef<Path>>(path: P) -> Result<Pyproject> {
let contents = std::fs::read_to_string(path)?;
let pyproject = toml::from_str::<Pyproject>(&contents)?;
Ok(pyproject)
}

View File

@@ -1,80 +0,0 @@
use std::{fs, path::Path, process::Command};
fn main() {
// The workspace root directory is not available without walking up the tree
// https://github.com/rust-lang/cargo/issues/3946
let workspace_root = Path::new(&std::env::var("CARGO_MANIFEST_DIR").unwrap())
.join("..")
.join("..");
commit_info(&workspace_root);
#[allow(clippy::disallowed_methods)]
let target = std::env::var("TARGET").unwrap();
println!("cargo:rustc-env=RUST_HOST_TARGET={target}");
}
fn commit_info(workspace_root: &Path) {
// If not in a git repository, do not attempt to retrieve commit information
let git_dir = workspace_root.join(".git");
if !git_dir.exists() {
return;
}
let git_head_path = git_dir.join("HEAD");
println!(
"cargo:rerun-if-changed={}",
git_head_path.as_path().display()
);
let git_head_contents = fs::read_to_string(git_head_path);
if let Ok(git_head_contents) = git_head_contents {
// The contents are either a commit or a reference in the following formats
// - "<commit>" when the head is detached
// - "ref <ref>" when working on a branch
// If a commit, checking if the HEAD file has changed is sufficient
// If a ref, we need to add the head file for that ref to rebuild on commit
let mut git_ref_parts = git_head_contents.split_whitespace();
git_ref_parts.next();
if let Some(git_ref) = git_ref_parts.next() {
let git_ref_path = git_dir.join(git_ref);
println!(
"cargo:rerun-if-changed={}",
git_ref_path.as_path().display()
);
}
}
let output = match Command::new("git")
.arg("log")
.arg("-1")
.arg("--date=short")
.arg("--abbrev=9")
.arg("--format=%H %h %cd %(describe)")
.output()
{
Ok(output) if output.status.success() => output,
_ => return,
};
let stdout = String::from_utf8(output.stdout).unwrap();
let mut parts = stdout.split_whitespace();
let mut next = || parts.next().unwrap();
println!("cargo:rustc-env=RUFF_COMMIT_HASH={}", next());
println!("cargo:rustc-env=RUFF_COMMIT_SHORT_HASH={}", next());
println!("cargo:rustc-env=RUFF_COMMIT_DATE={}", next());
// Describe can fail for some commits
// https://git-scm.com/docs/pretty-formats#Documentation/pretty-formats.txt-emdescribeoptionsem
if let Some(describe) = parts.next() {
let mut describe_parts = describe.split('-');
println!(
"cargo:rustc-env=RUFF_LAST_TAG={}",
describe_parts.next().unwrap()
);
// If this is the tagged commit, this component will be missing
println!(
"cargo:rustc-env=RUFF_LAST_TAG_DISTANCE={}",
describe_parts.next().unwrap_or("0")
);
}
}

View File

@@ -1 +0,0 @@
print("All formatted!")

View File

@@ -1,2 +0,0 @@
[tool.ruff]
select = []

View File

@@ -1,2 +0,0 @@
[tool.ruff]
include = ["a.py", "subdirectory/c.py"]

View File

@@ -1,413 +0,0 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"id": "4f8ce941-1492-4d4e-8ab5-70d733fe891a",
"metadata": {},
"outputs": [],
"source": [
"%config ZMQInteractiveShell.ast_node_interactivity=\"last_expr_or_assign\""
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "721ec705-0c65-4bfb-9809-7ed8bc534186",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1"
]
},
"execution_count": 2,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Assignment statement without a semicolon\n",
"x = 1"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "de50e495-17e5-41cc-94bd-565757555d7e",
"metadata": {},
"outputs": [],
"source": [
"# Assignment statement with a semicolon\n",
"x = 1;\n",
"x = 1;"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "39e31201-23da-44eb-8684-41bba3663991",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"2"
]
},
"execution_count": 4,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Augmented assignment without a semicolon\n",
"x += 1"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "6b73d3dd-c73a-4697-9e97-e109a6c1fbab",
"metadata": {},
"outputs": [],
"source": [
"# Augmented assignment without a semicolon\n",
"x += 1;\n",
"x += 1; # comment\n",
"# comment"
]
},
{
"cell_type": "code",
"execution_count": 6,
"id": "2a3e5b86-aa5b-46ba-b9c6-0386d876f58c",
"metadata": {},
"outputs": [],
"source": [
"# Multiple assignment without a semicolon\n",
"x = y = 1"
]
},
{
"cell_type": "code",
"execution_count": 7,
"id": "07f89e51-9357-4cfb-8fc5-76fb75e35949",
"metadata": {},
"outputs": [],
"source": [
"# Multiple assignment with a semicolon\n",
"x = y = 1;\n",
"x = y = 1;"
]
},
{
"cell_type": "code",
"execution_count": 8,
"id": "c22b539d-473e-48f8-a236-625e58c47a00",
"metadata": {},
"outputs": [],
"source": [
"# Tuple unpacking without a semicolon\n",
"x, y = 1, 2"
]
},
{
"cell_type": "code",
"execution_count": 9,
"id": "12c87940-a0d5-403b-a81c-7507eb06dc7e",
"metadata": {},
"outputs": [],
"source": [
"# Tuple unpacking with a semicolon (irrelevant)\n",
"x, y = 1, 2;\n",
"x, y = 1, 2; # comment\n",
"# comment"
]
},
{
"cell_type": "code",
"execution_count": 10,
"id": "5a768c76-6bc4-470c-b37e-8cc14bc6caf4",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1"
]
},
"execution_count": 10,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Annotated assignment statement without a semicolon\n",
"x: int = 1"
]
},
{
"cell_type": "code",
"execution_count": 11,
"id": "21bfda82-1a9a-4ba1-9078-74ac480804b5",
"metadata": {},
"outputs": [],
"source": [
"# Annotated assignment statement without a semicolon\n",
"x: int = 1;\n",
"x: int = 1; # comment\n",
"# comment"
]
},
{
"cell_type": "code",
"execution_count": 12,
"id": "09929999-ff29-4d10-ad2b-e665af15812d",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1"
]
},
"execution_count": 12,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Assignment expression without a semicolon\n",
"(x := 1)"
]
},
{
"cell_type": "code",
"execution_count": 13,
"id": "32a83217-1bad-4f61-855e-ffcdb119c763",
"metadata": {},
"outputs": [],
"source": [
"# Assignment expression with a semicolon\n",
"(x := 1);\n",
"(x := 1); # comment\n",
"# comment"
]
},
{
"cell_type": "code",
"execution_count": 14,
"id": "61b81865-277e-4964-b03e-eb78f1f318eb",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1"
]
},
"execution_count": 14,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"x = 1\n",
"# Expression without a semicolon\n",
"x"
]
},
{
"cell_type": "code",
"execution_count": 15,
"id": "974c29be-67e1-4000-95fa-6ca118a63bad",
"metadata": {},
"outputs": [],
"source": [
"x = 1\n",
"# Expression with a semicolon\n",
"x;"
]
},
{
"cell_type": "code",
"execution_count": 16,
"id": "cfeb1757-46d6-4f13-969f-a283b6d0304f",
"metadata": {},
"outputs": [],
"source": [
"class Point:\n",
" def __init__(self, x, y):\n",
" self.x = x\n",
" self.y = y\n",
"\n",
"\n",
"p = Point(0, 0);"
]
},
{
"cell_type": "code",
"execution_count": 17,
"id": "2ee7f1a5-ccfe-4004-bfa4-ef834a58da97",
"metadata": {},
"outputs": [],
"source": [
"# Assignment statement where the left is an attribute access doesn't\n",
"# print the value.\n",
"p.x = 1;"
]
},
{
"cell_type": "code",
"execution_count": 18,
"id": "3e49370a-048b-474d-aa0a-3d1d4a73ad37",
"metadata": {},
"outputs": [],
"source": [
"data = {}\n",
"\n",
"# Neither does the subscript node\n",
"data[\"foo\"] = 1;"
]
},
{
"cell_type": "code",
"execution_count": 19,
"id": "d594bdd3-eaa9-41ef-8cda-cf01bc273b2d",
"metadata": {},
"outputs": [],
"source": [
"if (x := 1):\n",
" # It should be the top level statement\n",
" x"
]
},
{
"cell_type": "code",
"execution_count": 20,
"id": "e532f0cf-80c7-42b7-8226-6002fcf74fb6",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1"
]
},
"execution_count": 20,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Parentheses with comments\n",
"(\n",
" x := 1 # comment\n",
") # comment"
]
},
{
"cell_type": "code",
"execution_count": 21,
"id": "473c5d62-871b-46ed-8a34-27095243f462",
"metadata": {},
"outputs": [],
"source": [
"# Parentheses with comments\n",
"(\n",
" x := 1 # comment\n",
"); # comment"
]
},
{
"cell_type": "code",
"execution_count": 22,
"id": "8c3c2361-f49f-45fe-bbe3-7e27410a8a86",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"'Hello world!'"
]
},
"execution_count": 22,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"\"\"\"Hello world!\"\"\""
]
},
{
"cell_type": "code",
"execution_count": 23,
"id": "23dbe9b5-3f68-4890-ab2d-ab0dbfd0712a",
"metadata": {},
"outputs": [],
"source": [
"\"\"\"Hello world!\"\"\"; # comment\n",
"# comment"
]
},
{
"cell_type": "code",
"execution_count": 24,
"id": "3ce33108-d95d-4c70-83d1-0d4fd36a2951",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"'x = 1'"
]
},
"execution_count": 24,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"x = 1\n",
"f\"x = {x}\""
]
},
{
"cell_type": "code",
"execution_count": 25,
"id": "654a4a67-de43-4684-824a-9451c67db48f",
"metadata": {},
"outputs": [],
"source": [
"x = 1\n",
"f\"x = {x}\";\n",
"f\"x = {x}\"; # comment\n",
"# comment"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python (ruff-playground)",
"language": "python",
"name": "ruff-playground"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.3"
}
},
"nbformat": 4,
"nbformat_minor": 5
}

View File

@@ -1,72 +0,0 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"id": "98e1dd71-14a2-454d-9be0-061dde560b07",
"metadata": {},
"outputs": [],
"source": [
"import numpy\n",
"maths = (numpy.arange(100)**2).sum()\n",
"stats= numpy.asarray([1,2,3,4]).median()"
]
},
{
"cell_type": "markdown",
"id": "83a0b1b8",
"metadata": {},
"source": [
"A markdown cell"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "ae12f012",
"metadata": {},
"outputs": [],
"source": [
"# A cell with IPython escape command\n",
"def some_function(foo, bar):\n",
" pass\n",
"%matplotlib inline"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "10f3bbf9",
"metadata": {},
"outputs": [],
"source": [
"foo = %pwd\n",
"def some_function(foo,bar,):\n",
" # Another cell with IPython escape command\n",
" foo = %pwd\n",
" print(foo)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.12"
}
},
"nbformat": 4,
"nbformat_minor": 5
}

View File

@@ -1,3 +0,0 @@
x = 1
y=2
z = 3

File diff suppressed because it is too large Load Diff

View File

@@ -1,56 +0,0 @@
use std::path::Path;
use anyhow::Result;
use ruff_linter::packaging;
use ruff_linter::settings::flags;
use ruff_workspace::resolver::{match_exclusion, python_file_at_path, PyprojectConfig, Resolver};
use crate::args::ConfigArguments;
use crate::diagnostics::{lint_stdin, Diagnostics};
use crate::stdin::{parrot_stdin, read_from_stdin};
/// Run the linter over a single file, read from `stdin`.
pub(crate) fn check_stdin(
filename: Option<&Path>,
pyproject_config: &PyprojectConfig,
overrides: &ConfigArguments,
noqa: flags::Noqa,
fix_mode: flags::FixMode,
) -> Result<Diagnostics> {
let mut resolver = Resolver::new(pyproject_config);
if resolver.force_exclude() {
if let Some(filename) = filename {
if !python_file_at_path(filename, &mut resolver, overrides)? {
if fix_mode.is_apply() {
parrot_stdin()?;
}
return Ok(Diagnostics::default());
}
if filename.file_name().is_some_and(|name| {
match_exclusion(filename, name, &resolver.base_settings().linter.exclude)
}) {
if fix_mode.is_apply() {
parrot_stdin()?;
}
return Ok(Diagnostics::default());
}
}
}
let stdin = read_from_stdin()?;
let package_root = filename.and_then(Path::parent).and_then(|path| {
packaging::detect_package_root(path, &resolver.base_settings().linter.namespace_packages)
});
let mut diagnostics = lint_stdin(
filename,
package_root,
stdin,
resolver.base_settings(),
noqa,
fix_mode,
)?;
diagnostics.messages.sort_unstable();
Ok(diagnostics)
}

View File

@@ -1,898 +0,0 @@
use std::fmt::{Display, Formatter};
use std::fs::File;
use std::io;
use std::io::{stderr, stdout, Write};
use std::path::{Path, PathBuf};
use std::time::Instant;
use anyhow::Result;
use colored::Colorize;
use itertools::Itertools;
use log::{error, warn};
use rayon::iter::Either::{Left, Right};
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use rustc_hash::FxHashSet;
use thiserror::Error;
use tracing::debug;
use ruff_diagnostics::SourceMap;
use ruff_linter::fs;
use ruff_linter::logging::{DisplayParseError, LogLevel};
use ruff_linter::registry::Rule;
use ruff_linter::rules::flake8_quotes::settings::Quote;
use ruff_linter::source_kind::{SourceError, SourceKind};
use ruff_linter::warn_user_once;
use ruff_python_ast::{PySourceType, SourceType};
use ruff_python_formatter::{format_module_source, format_range, FormatModuleError, QuoteStyle};
use ruff_source_file::LineIndex;
use ruff_text_size::{TextLen, TextRange, TextSize};
use ruff_workspace::resolver::{match_exclusion, python_files_in_path, ResolvedFile, Resolver};
use ruff_workspace::FormatterSettings;
use crate::args::{ConfigArguments, FormatArguments, FormatRange};
use crate::cache::{Cache, FileCacheKey, PackageCacheMap, PackageCaches};
use crate::panic::{catch_unwind, PanicError};
use crate::resolve::resolve;
use crate::{resolve_default_files, ExitStatus};
#[derive(Debug, Copy, Clone, is_macro::Is)]
pub(crate) enum FormatMode {
/// Write the formatted contents back to the file.
Write,
/// Check if the file is formatted, but do not write the formatted contents back.
Check,
/// Check if the file is formatted, show a diff if not.
Diff,
}
impl FormatMode {
pub(crate) fn from_cli(cli: &FormatArguments) -> Self {
if cli.diff {
FormatMode::Diff
} else if cli.check {
FormatMode::Check
} else {
FormatMode::Write
}
}
}
/// Format a set of files, and return the exit status.
pub(crate) fn format(
cli: FormatArguments,
config_arguments: &ConfigArguments,
) -> Result<ExitStatus> {
let pyproject_config = resolve(config_arguments, cli.stdin_filename.as_deref())?;
let mode = FormatMode::from_cli(&cli);
let files = resolve_default_files(cli.files, false);
let (paths, resolver) = python_files_in_path(&files, &pyproject_config, config_arguments)?;
if paths.is_empty() {
warn_user_once!("No Python files found under the given path(s)");
return Ok(ExitStatus::Success);
}
if cli.range.is_some() && paths.len() > 1 {
return Err(anyhow::anyhow!(
"The `--range` option is only supported when formatting a single file but the specified paths resolve to {} files.",
paths.len()
));
}
warn_incompatible_formatter_settings(&resolver);
// Discover the package root for each Python file.
let package_roots = resolver.package_roots(
&paths
.iter()
.flatten()
.map(ResolvedFile::path)
.collect::<Vec<_>>(),
);
let caches = if cli.no_cache {
None
} else {
// `--no-cache` doesn't respect code changes, and so is often confusing during
// development.
#[cfg(debug_assertions)]
crate::warn_user!("Detected debug build without --no-cache.");
Some(PackageCacheMap::init(&package_roots, &resolver))
};
let start = Instant::now();
let (results, mut errors): (Vec<_>, Vec<_>) = paths
.par_iter()
.filter_map(|entry| {
match entry {
Ok(resolved_file) => {
let path = resolved_file.path();
let settings = resolver.resolve(path);
let source_type = match settings.formatter.extension.get(path) {
None => match SourceType::from(path) {
SourceType::Python(source_type) => source_type,
SourceType::Toml(_) => {
// Ignore any non-Python files.
return None;
}
},
Some(language) => PySourceType::from(language),
};
// Ignore files that are excluded from formatting
if (settings.file_resolver.force_exclude || !resolved_file.is_root())
&& match_exclusion(
path,
resolved_file.file_name(),
&settings.formatter.exclude,
)
{
return None;
}
let package = path
.parent()
.and_then(|parent| package_roots.get(parent).copied())
.flatten();
let cache_root = package.unwrap_or_else(|| path.parent().unwrap_or(path));
let cache = caches.get(cache_root);
Some(
match catch_unwind(|| {
format_path(
path,
&settings.formatter,
source_type,
mode,
cli.range,
cache,
)
}) {
Ok(inner) => inner.map(|result| FormatPathResult {
path: resolved_file.path().to_path_buf(),
result,
}),
Err(error) => Err(FormatCommandError::Panic(
Some(resolved_file.path().to_path_buf()),
error,
)),
},
)
}
Err(err) => Some(Err(FormatCommandError::Ignore(err.clone()))),
}
})
.partition_map(|result| match result {
Ok(diagnostic) => Left(diagnostic),
Err(err) => Right(err),
});
let duration = start.elapsed();
debug!(
"Formatted {} files in {:.2?}",
results.len() + errors.len(),
duration
);
caches.persist()?;
// Report on any errors.
errors.sort_unstable_by(|a, b| a.path().cmp(&b.path()));
for error in &errors {
error!("{error}");
}
let results = FormatResults::new(results.as_slice(), mode);
match mode {
FormatMode::Write => {}
FormatMode::Check => {
results.write_changed(&mut stdout().lock())?;
}
FormatMode::Diff => {
results.write_diff(&mut stdout().lock())?;
}
}
// Report on the formatting changes.
if config_arguments.log_level >= LogLevel::Default {
if mode.is_diff() {
// Allow piping the diff to e.g. a file by writing the summary to stderr
results.write_summary(&mut stderr().lock())?;
} else {
results.write_summary(&mut stdout().lock())?;
}
}
match mode {
FormatMode::Write => {
if errors.is_empty() {
Ok(ExitStatus::Success)
} else {
Ok(ExitStatus::Error)
}
}
FormatMode::Check | FormatMode::Diff => {
if errors.is_empty() {
if results.any_formatted() {
Ok(ExitStatus::Failure)
} else {
Ok(ExitStatus::Success)
}
} else {
Ok(ExitStatus::Error)
}
}
}
}
/// Format the file at the given [`Path`].
#[tracing::instrument(level="debug", skip_all, fields(path = %path.display()))]
pub(crate) fn format_path(
path: &Path,
settings: &FormatterSettings,
source_type: PySourceType,
mode: FormatMode,
range: Option<FormatRange>,
cache: Option<&Cache>,
) -> Result<FormatResult, FormatCommandError> {
if let Some(cache) = cache {
let relative_path = cache
.relative_path(path)
.expect("wrong package cache for file");
if let Ok(cache_key) = FileCacheKey::from_path(path) {
if cache.is_formatted(relative_path, &cache_key) {
return Ok(FormatResult::Unchanged);
}
}
}
// Extract the sources from the file.
let unformatted = match SourceKind::from_path(path, source_type) {
Ok(Some(source_kind)) => source_kind,
// Non-Python Jupyter notebook.
Ok(None) => return Ok(FormatResult::Skipped),
Err(err) => {
return Err(FormatCommandError::Read(Some(path.to_path_buf()), err));
}
};
// Don't write back to the cache if formatting a range.
let cache = cache.filter(|_| range.is_none());
// Format the source.
let format_result = match format_source(&unformatted, source_type, Some(path), settings, range)?
{
FormattedSource::Formatted(formatted) => match mode {
FormatMode::Write => {
let mut writer = File::create(path).map_err(|err| {
FormatCommandError::Write(Some(path.to_path_buf()), err.into())
})?;
formatted
.write(&mut writer)
.map_err(|err| FormatCommandError::Write(Some(path.to_path_buf()), err))?;
if let Some(cache) = cache {
if let Ok(cache_key) = FileCacheKey::from_path(path) {
let relative_path = cache
.relative_path(path)
.expect("wrong package cache for file");
cache.set_formatted(relative_path.to_path_buf(), &cache_key);
}
}
FormatResult::Formatted
}
FormatMode::Check => FormatResult::Formatted,
FormatMode::Diff => FormatResult::Diff {
unformatted,
formatted,
},
},
FormattedSource::Unchanged => {
if let Some(cache) = cache {
if let Ok(cache_key) = FileCacheKey::from_path(path) {
let relative_path = cache
.relative_path(path)
.expect("wrong package cache for file");
cache.set_formatted(relative_path.to_path_buf(), &cache_key);
}
}
FormatResult::Unchanged
}
};
Ok(format_result)
}
#[derive(Debug)]
pub(crate) enum FormattedSource {
/// The source was formatted, and the [`SourceKind`] contains the transformed source code.
Formatted(SourceKind),
/// The source was unchanged.
Unchanged,
}
impl From<FormattedSource> for FormatResult {
fn from(value: FormattedSource) -> Self {
match value {
FormattedSource::Formatted(_) => FormatResult::Formatted,
FormattedSource::Unchanged => FormatResult::Unchanged,
}
}
}
/// Format a [`SourceKind`], returning the transformed [`SourceKind`], or `None` if the source was
/// unchanged.
pub(crate) fn format_source(
source_kind: &SourceKind,
source_type: PySourceType,
path: Option<&Path>,
settings: &FormatterSettings,
range: Option<FormatRange>,
) -> Result<FormattedSource, FormatCommandError> {
match &source_kind {
SourceKind::Python(unformatted) => {
let options = settings.to_format_options(source_type, unformatted);
let formatted = if let Some(range) = range {
let line_index = LineIndex::from_source_text(unformatted);
let byte_range = range.to_text_range(unformatted, &line_index);
format_range(unformatted, byte_range, options).map(|formatted_range| {
let mut formatted = unformatted.to_string();
formatted.replace_range(
std::ops::Range::<usize>::from(formatted_range.source_range()),
formatted_range.as_code(),
);
formatted
})
} else {
// Using `Printed::into_code` requires adding `ruff_formatter` as a direct dependency, and I suspect that Rust can optimize the closure away regardless.
#[allow(clippy::redundant_closure_for_method_calls)]
format_module_source(unformatted, options).map(|formatted| formatted.into_code())
};
let formatted = formatted.map_err(|err| {
if let FormatModuleError::ParseError(err) = err {
DisplayParseError::from_source_kind(
err,
path.map(Path::to_path_buf),
source_kind,
)
.into()
} else {
FormatCommandError::Format(path.map(Path::to_path_buf), err)
}
})?;
if formatted.len() == unformatted.len() && formatted == *unformatted {
Ok(FormattedSource::Unchanged)
} else {
Ok(FormattedSource::Formatted(SourceKind::Python(formatted)))
}
}
SourceKind::IpyNotebook(notebook) => {
if !notebook.is_python_notebook() {
return Ok(FormattedSource::Unchanged);
}
if range.is_some() {
return Err(FormatCommandError::RangeFormatNotebook(
path.map(Path::to_path_buf),
));
}
let options = settings.to_format_options(source_type, notebook.source_code());
let mut output: Option<String> = None;
let mut last: Option<TextSize> = None;
let mut source_map = SourceMap::default();
// Format each cell individually.
for (start, end) in notebook.cell_offsets().iter().tuple_windows::<(_, _)>() {
let range = TextRange::new(*start, *end);
let unformatted = &notebook.source_code()[range];
// Format the cell.
let formatted =
format_module_source(unformatted, options.clone()).map_err(|err| {
if let FormatModuleError::ParseError(err) = err {
DisplayParseError::from_source_kind(
err,
path.map(Path::to_path_buf),
source_kind,
)
.into()
} else {
FormatCommandError::Format(path.map(Path::to_path_buf), err)
}
})?;
// If the cell is unchanged, skip it.
let formatted = formatted.as_code();
if formatted.len() == unformatted.len() && formatted == unformatted {
continue;
}
// If this is the first newly-formatted cell, initialize the output.
let output = output
.get_or_insert_with(|| String::with_capacity(notebook.source_code().len()));
// Add all contents from `last` to the current cell.
let slice = &notebook.source_code()
[TextRange::new(last.unwrap_or_default(), range.start())];
output.push_str(slice);
// Add the start source marker for the cell.
source_map.push_marker(*start, output.text_len());
// Add the cell itself.
output.push_str(formatted);
// Add the end source marker for the added cell.
source_map.push_marker(*end, output.text_len());
// Track that the cell was formatted.
last = Some(*end);
}
// If the file was unchanged, return `None`.
let (Some(mut output), Some(last)) = (output, last) else {
return Ok(FormattedSource::Unchanged);
};
// Add the remaining content.
let slice = &notebook.source_code()[usize::from(last)..];
output.push_str(slice);
// Update the notebook.
let mut formatted = notebook.clone();
formatted.update(&source_map, output);
Ok(FormattedSource::Formatted(SourceKind::IpyNotebook(
formatted,
)))
}
}
}
/// The result of an individual formatting operation.
#[derive(Debug, Clone, is_macro::Is)]
pub(crate) enum FormatResult {
/// The file was formatted.
Formatted,
/// The file was formatted, [`SourceKind`] contains the formatted code
Diff {
unformatted: SourceKind,
formatted: SourceKind,
},
/// The file was unchanged, as the formatted contents matched the existing contents.
Unchanged,
/// Skipped formatting because its an unsupported file format
Skipped,
}
/// The coupling of a [`FormatResult`] with the path of the file that was analyzed.
#[derive(Debug)]
struct FormatPathResult {
path: PathBuf,
result: FormatResult,
}
/// The results of formatting a set of files
#[derive(Debug)]
struct FormatResults<'a> {
/// The individual formatting results.
results: &'a [FormatPathResult],
/// The format mode that was used.
mode: FormatMode,
}
impl<'a> FormatResults<'a> {
fn new(results: &'a [FormatPathResult], mode: FormatMode) -> Self {
Self { results, mode }
}
/// Returns `true` if any of the files require formatting.
fn any_formatted(&self) -> bool {
self.results.iter().any(|result| match result.result {
FormatResult::Formatted | FormatResult::Diff { .. } => true,
FormatResult::Unchanged | FormatResult::Skipped => false,
})
}
/// Write a diff of the formatting changes to the given writer.
fn write_diff(&self, f: &mut impl Write) -> io::Result<()> {
for (path, unformatted, formatted) in self
.results
.iter()
.filter_map(|result| {
if let FormatResult::Diff {
unformatted,
formatted,
} = &result.result
{
Some((result.path.as_path(), unformatted, formatted))
} else {
None
}
})
.sorted_unstable_by_key(|(path, _, _)| *path)
{
write!(f, "{}", unformatted.diff(formatted, Some(path)).unwrap())?;
}
Ok(())
}
/// Write a list of the files that would be changed to the given writer.
fn write_changed(&self, f: &mut impl Write) -> io::Result<()> {
for path in self
.results
.iter()
.filter_map(|result| {
if result.result.is_formatted() {
Some(result.path.as_path())
} else {
None
}
})
.sorted_unstable()
{
writeln!(f, "Would reformat: {}", fs::relativize_path(path).bold())?;
}
Ok(())
}
/// Write a summary of the formatting results to the given writer.
fn write_summary(&self, f: &mut impl Write) -> io::Result<()> {
// Compute the number of changed and unchanged files.
let mut changed = 0u32;
let mut unchanged = 0u32;
for result in self.results {
match &result.result {
FormatResult::Formatted => {
changed += 1;
}
FormatResult::Unchanged => unchanged += 1,
FormatResult::Diff { .. } => {
changed += 1;
}
FormatResult::Skipped => {}
}
}
// Write out a summary of the formatting results.
if changed > 0 && unchanged > 0 {
writeln!(
f,
"{} file{} {}, {} file{} {}",
changed,
if changed == 1 { "" } else { "s" },
match self.mode {
FormatMode::Write => "reformatted",
FormatMode::Check | FormatMode::Diff => "would be reformatted",
},
unchanged,
if unchanged == 1 { "" } else { "s" },
match self.mode {
FormatMode::Write => "left unchanged",
FormatMode::Check | FormatMode::Diff => "already formatted",
},
)
} else if changed > 0 {
writeln!(
f,
"{} file{} {}",
changed,
if changed == 1 { "" } else { "s" },
match self.mode {
FormatMode::Write => "reformatted",
FormatMode::Check | FormatMode::Diff => "would be reformatted",
}
)
} else if unchanged > 0 {
writeln!(
f,
"{} file{} {}",
unchanged,
if unchanged == 1 { "" } else { "s" },
match self.mode {
FormatMode::Write => "left unchanged",
FormatMode::Check | FormatMode::Diff => "already formatted",
},
)
} else {
Ok(())
}
}
}
/// An error that can occur while formatting a set of files.
#[derive(Error, Debug)]
pub(crate) enum FormatCommandError {
Ignore(#[from] ignore::Error),
Parse(#[from] DisplayParseError),
Panic(Option<PathBuf>, PanicError),
Read(Option<PathBuf>, SourceError),
Format(Option<PathBuf>, FormatModuleError),
Write(Option<PathBuf>, SourceError),
Diff(Option<PathBuf>, io::Error),
RangeFormatNotebook(Option<PathBuf>),
}
impl FormatCommandError {
fn path(&self) -> Option<&Path> {
match self {
Self::Ignore(err) => {
if let ignore::Error::WithPath { path, .. } = err {
Some(path.as_path())
} else {
None
}
}
Self::Parse(err) => err.path(),
Self::Panic(path, _)
| Self::Read(path, _)
| Self::Format(path, _)
| Self::Write(path, _)
| Self::Diff(path, _)
| Self::RangeFormatNotebook(path) => path.as_deref(),
}
}
}
impl Display for FormatCommandError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
Self::Ignore(err) => {
if let ignore::Error::WithPath { path, .. } = err {
write!(
f,
"{}{}{} {}",
"Failed to format ".bold(),
fs::relativize_path(path).bold(),
":".bold(),
err.io_error()
.map_or_else(|| err.to_string(), std::string::ToString::to_string)
)
} else {
write!(
f,
"{header} {error}",
header = "Encountered error:".bold(),
error = err
.io_error()
.map_or_else(|| err.to_string(), std::string::ToString::to_string)
)
}
}
Self::Parse(err) => {
write!(f, "{err}")
}
Self::Read(path, err) => {
if let Some(path) = path {
write!(
f,
"{}{}{} {err}",
"Failed to read ".bold(),
fs::relativize_path(path).bold(),
":".bold()
)
} else {
write!(f, "{header} {err}", header = "Failed to read:".bold())
}
}
Self::Write(path, err) => {
if let Some(path) = path {
write!(
f,
"{}{}{} {err}",
"Failed to write ".bold(),
fs::relativize_path(path).bold(),
":".bold()
)
} else {
write!(f, "{header} {err}", header = "Failed to write:".bold())
}
}
Self::Format(path, err) => {
if let Some(path) = path {
write!(
f,
"{}{}{} {err}",
"Failed to format ".bold(),
fs::relativize_path(path).bold(),
":".bold()
)
} else {
write!(f, "{header} {err}", header = "Failed to format:".bold())
}
}
Self::Diff(path, err) => {
if let Some(path) = path {
write!(
f,
"{}{}{} {err}",
"Failed to generate diff for ".bold(),
fs::relativize_path(path).bold(),
":".bold()
)
} else {
write!(
f,
"{header} {err}",
header = "Failed to generate diff:".bold(),
)
}
}
Self::RangeFormatNotebook(path) => {
if let Some(path) = path {
write!(
f,
"{header}{path}{colon} Range formatting isn't supported for notebooks.",
header = "Failed to format ".bold(),
path = fs::relativize_path(path).bold(),
colon = ":".bold()
)
} else {
write!(
f,
"{header} Range formatting isn't supported for notebooks",
header = "Failed to format:".bold()
)
}
}
Self::Panic(path, err) => {
let message = r"This indicates a bug in Ruff. If you could open an issue at:
https://github.com/astral-sh/ruff/issues/new?title=%5BFormatter%20panic%5D
...with the relevant file contents, the `pyproject.toml` settings, and the following stack trace, we'd be very appreciative!
";
if let Some(path) = path {
write!(
f,
"{}{}{} {message}\n{err}",
"Panicked while formatting ".bold(),
fs::relativize_path(path).bold(),
":".bold()
)
} else {
write!(
f,
"{} {message}\n{err}",
"Panicked while formatting.".bold()
)
}
}
}
}
}
pub(super) fn warn_incompatible_formatter_settings(resolver: &Resolver) {
// First, collect all rules that are incompatible regardless of the linter-specific settings.
let mut incompatible_rules = FxHashSet::default();
for setting in resolver.settings() {
for rule in [
// The formatter might collapse implicit string concatenation on a single line.
Rule::SingleLineImplicitStringConcatenation,
// Flags missing trailing commas when all arguments are on its own line:
// ```python
// def args(
// aaaaaaaa, bbbbbbbbb, cccccccccc, ddddddddd, eeeeeeee, ffffff, gggggggggggg, hhhh
// ):
// pass
// ```
Rule::MissingTrailingComma,
] {
if setting.linter.rules.enabled(rule) {
incompatible_rules.insert(rule);
}
}
}
if !incompatible_rules.is_empty() {
let mut rule_names: Vec<_> = incompatible_rules
.into_iter()
.map(|rule| format!("`{}`", rule.noqa_code()))
.collect();
rule_names.sort();
warn_user_once!("The following rules may cause conflicts when used with the formatter: {}. To avoid unexpected behavior, we recommend disabling these rules, either by removing them from the `select` or `extend-select` configuration, or adding them to the `ignore` configuration.", rule_names.join(", "));
}
// Next, validate settings-specific incompatibilities.
for setting in resolver.settings() {
// Validate all rules that rely on tab styles.
if setting.linter.rules.enabled(Rule::TabIndentation)
&& setting.formatter.indent_style.is_tab()
{
warn_user_once!("The `format.indent-style=\"tab\"` option is incompatible with `W191`, which lints against all uses of tabs. We recommend disabling these rules when using the formatter, which enforces a consistent indentation style. Alternatively, set the `format.indent-style` option to `\"space\"`.");
}
// Validate all rules that rely on tab styles.
if setting.linter.rules.enabled(Rule::IndentWithSpaces)
&& setting.formatter.indent_style.is_tab()
{
warn_user_once!("The `format.indent-style=\"tab\"` option is incompatible with `D206`, with requires space-based indentation. We recommend disabling these rules when using the formatter, which enforces a consistent indentation style. Alternatively, set the `format.indent-style` option to `\"space\"`.");
}
// Validate all rules that rely on custom indent widths.
if setting.linter.rules.any_enabled(&[
Rule::IndentationWithInvalidMultiple,
Rule::IndentationWithInvalidMultipleComment,
]) && setting.formatter.indent_width.value() != 4
{
warn_user_once!("The `format.indent-width` option with a value other than 4 is incompatible with `E111` and `E114`. We recommend disabling these rules when using the formatter, which enforces a consistent indentation width. Alternatively, set the `format.indent-width` option to `4`.");
}
// Validate all rules that rely on quote styles.
if setting
.linter
.rules
.any_enabled(&[Rule::BadQuotesInlineString, Rule::AvoidableEscapedQuote])
{
match (
setting.linter.flake8_quotes.inline_quotes,
setting.formatter.quote_style,
) {
(Quote::Double, QuoteStyle::Single) => {
warn_user_once!("The `flake8-quotes.inline-quotes=\"double\"` option is incompatible with the formatter's `format.quote-style=\"single\"`. We recommend disabling `Q000` and `Q003` when using the formatter, which enforces a consistent quote style. Alternatively, set both options to either `\"single\"` or `\"double\"`.");
}
(Quote::Single, QuoteStyle::Double) => {
warn_user_once!("The `flake8-quotes.inline-quotes=\"single\"` option is incompatible with the formatter's `format.quote-style=\"double\"`. We recommend disabling `Q000` and `Q003` when using the formatter, which enforces a consistent quote style. Alternatively, set both options to either `\"single\"` or `\"double\"`.");
}
_ => {}
}
}
if setting.linter.rules.enabled(Rule::BadQuotesMultilineString)
&& setting.linter.flake8_quotes.multiline_quotes == Quote::Single
{
warn_user_once!("The `flake8-quotes.multiline-quotes=\"single\"` option is incompatible with the formatter. We recommend disabling `Q001` when using the formatter, which enforces double quotes for multiline strings. Alternatively, set the `flake8-quotes.multiline-quotes` option to `\"double\"`.`");
}
if setting.linter.rules.enabled(Rule::BadQuotesDocstring)
&& setting.linter.flake8_quotes.docstring_quotes == Quote::Single
{
warn_user_once!("The `flake8-quotes.multiline-quotes=\"single\"` option is incompatible with the formatter. We recommend disabling `Q002` when using the formatter, which enforces double quotes for docstrings. Alternatively, set the `flake8-quotes.docstring-quotes` option to `\"double\"`.`");
}
// Validate all isort settings.
if setting.linter.rules.enabled(Rule::UnsortedImports) {
// The formatter removes empty lines if the value is larger than 2 but always inserts a empty line after imports.
// Two empty lines are okay because `isort` only uses this setting for top-level imports (not in nested blocks).
if !matches!(setting.linter.isort.lines_after_imports, 1 | 2 | -1) {
warn_user_once!("The isort option `isort.lines-after-imports` with a value other than `-1`, `1` or `2` is incompatible with the formatter. To avoid unexpected behavior, we recommend setting the option to one of: `2`, `1`, or `-1` (default).");
}
// Values larger than two get reduced to one line by the formatter if the import is in a nested block.
if setting.linter.isort.lines_between_types > 1 {
warn_user_once!("The isort option `isort.lines-between-types` with a value greater than 1 is incompatible with the formatter. To avoid unexpected behavior, we recommend setting the option to one of: `1` or `0` (default).");
}
// isort inserts a trailing comma which the formatter preserves, but only if `skip-magic-trailing-comma` isn't false.
// This isn't relevant when using `force-single-line`, since isort will never include a trailing comma in that case.
if setting.formatter.magic_trailing_comma.is_ignore()
&& !setting.linter.isort.force_single_line
{
if setting.linter.isort.force_wrap_aliases {
warn_user_once!("The isort option `isort.force-wrap-aliases` is incompatible with the formatter `format.skip-magic-trailing-comma=true` option. To avoid unexpected behavior, we recommend either setting `isort.force-wrap-aliases=false` or `format.skip-magic-trailing-comma=false`.");
}
if setting.linter.isort.split_on_trailing_comma {
warn_user_once!("The isort option `isort.split-on-trailing-comma` is incompatible with the formatter `format.skip-magic-trailing-comma=true` option. To avoid unexpected behavior, we recommend either setting `isort.split-on-trailing-comma=false` or `format.skip-magic-trailing-comma=false`.");
}
}
}
}
}

View File

@@ -1,142 +0,0 @@
use std::io::stdout;
use std::path::Path;
use anyhow::Result;
use log::error;
use ruff_linter::source_kind::SourceKind;
use ruff_python_ast::{PySourceType, SourceType};
use ruff_workspace::resolver::{match_exclusion, python_file_at_path, Resolver};
use ruff_workspace::FormatterSettings;
use crate::args::{ConfigArguments, FormatArguments, FormatRange};
use crate::commands::format::{
format_source, warn_incompatible_formatter_settings, FormatCommandError, FormatMode,
FormatResult, FormattedSource,
};
use crate::resolve::resolve;
use crate::stdin::{parrot_stdin, read_from_stdin};
use crate::ExitStatus;
/// Run the formatter over a single file, read from `stdin`.
pub(crate) fn format_stdin(
cli: &FormatArguments,
config_arguments: &ConfigArguments,
) -> Result<ExitStatus> {
let pyproject_config = resolve(config_arguments, cli.stdin_filename.as_deref())?;
let mut resolver = Resolver::new(&pyproject_config);
warn_incompatible_formatter_settings(&resolver);
let mode = FormatMode::from_cli(cli);
if resolver.force_exclude() {
if let Some(filename) = cli.stdin_filename.as_deref() {
if !python_file_at_path(filename, &mut resolver, config_arguments)? {
if mode.is_write() {
parrot_stdin()?;
}
return Ok(ExitStatus::Success);
}
if filename.file_name().is_some_and(|name| {
match_exclusion(filename, name, &resolver.base_settings().formatter.exclude)
}) {
if mode.is_write() {
parrot_stdin()?;
}
return Ok(ExitStatus::Success);
}
}
}
let path = cli.stdin_filename.as_deref();
let settings = &resolver.base_settings().formatter;
let source_type = match path.and_then(|path| settings.extension.get(path)) {
None => match path.map(SourceType::from).unwrap_or_default() {
SourceType::Python(source_type) => source_type,
SourceType::Toml(_) => {
if mode.is_write() {
parrot_stdin()?;
}
return Ok(ExitStatus::Success);
}
},
Some(language) => PySourceType::from(language),
};
// Format the file.
match format_source_code(path, cli.range, settings, source_type, mode) {
Ok(result) => match mode {
FormatMode::Write => Ok(ExitStatus::Success),
FormatMode::Check | FormatMode::Diff => {
if result.is_formatted() {
Ok(ExitStatus::Failure)
} else {
Ok(ExitStatus::Success)
}
}
},
Err(err) => {
error!("{err}");
Ok(ExitStatus::Error)
}
}
}
/// Format source code read from `stdin`.
fn format_source_code(
path: Option<&Path>,
range: Option<FormatRange>,
settings: &FormatterSettings,
source_type: PySourceType,
mode: FormatMode,
) -> Result<FormatResult, FormatCommandError> {
// Read the source from stdin.
let source_code = read_from_stdin()
.map_err(|err| FormatCommandError::Read(path.map(Path::to_path_buf), err.into()))?;
let source_kind = match SourceKind::from_source_code(source_code, source_type) {
Ok(Some(source_kind)) => source_kind,
Ok(None) => return Ok(FormatResult::Unchanged),
Err(err) => {
return Err(FormatCommandError::Read(path.map(Path::to_path_buf), err));
}
};
// Format the source.
let formatted = format_source(&source_kind, source_type, path, settings, range)?;
match &formatted {
FormattedSource::Formatted(formatted) => match mode {
FormatMode::Write => {
let mut writer = stdout().lock();
formatted
.write(&mut writer)
.map_err(|err| FormatCommandError::Write(path.map(Path::to_path_buf), err))?;
}
FormatMode::Check => {}
FormatMode::Diff => {
use std::io::Write;
write!(
&mut stdout().lock(),
"{}",
source_kind.diff(formatted, path).unwrap()
)
.map_err(|err| FormatCommandError::Diff(path.map(Path::to_path_buf), err))?;
}
},
FormattedSource::Unchanged => {
// Write to stdout regardless of whether the source was formatted
if mode.is_write() {
let mut writer = stdout().lock();
source_kind
.write(&mut writer)
.map_err(|err| FormatCommandError::Write(path.map(Path::to_path_buf), err))?;
}
}
}
Ok(FormatResult::from(formatted))
}

View File

@@ -1,79 +0,0 @@
use std::num::NonZeroUsize;
use crate::ExitStatus;
use anyhow::Result;
use ruff_linter::logging::LogLevel;
use ruff_server::Server;
use tracing::{level_filters::LevelFilter, metadata::Level, subscriber::Interest, Metadata};
use tracing_subscriber::{
layer::{Context, Filter, SubscriberExt},
Layer, Registry,
};
use tracing_tree::time::Uptime;
pub(crate) fn run_server(
preview: bool,
worker_threads: NonZeroUsize,
log_level: LogLevel,
) -> Result<ExitStatus> {
if !preview {
tracing::error!("--preview needs to be provided as a command line argument while the server is still unstable.\nFor example: `ruff server --preview`");
return Ok(ExitStatus::Error);
}
let trace_level = if log_level == LogLevel::Verbose {
Level::TRACE
} else {
Level::DEBUG
};
let subscriber = Registry::default().with(
tracing_tree::HierarchicalLayer::default()
.with_indent_lines(true)
.with_indent_amount(2)
.with_bracketed_fields(true)
.with_targets(true)
.with_writer(|| Box::new(std::io::stderr()))
.with_timer(Uptime::default())
.with_filter(LoggingFilter { trace_level }),
);
tracing::subscriber::set_global_default(subscriber)?;
let server = Server::new(worker_threads)?;
server.run().map(|()| ExitStatus::Success)
}
struct LoggingFilter {
trace_level: Level,
}
impl LoggingFilter {
fn is_enabled(&self, meta: &Metadata<'_>) -> bool {
let filter = if meta.target().starts_with("ruff") {
self.trace_level
} else {
Level::INFO
};
meta.level() <= &filter
}
}
impl<S> Filter<S> for LoggingFilter {
fn enabled(&self, meta: &Metadata<'_>, _cx: &Context<'_, S>) -> bool {
self.is_enabled(meta)
}
fn callsite_enabled(&self, meta: &'static Metadata<'static>) -> Interest {
if self.is_enabled(meta) {
Interest::always()
} else {
Interest::never()
}
}
fn max_level_hint(&self) -> Option<LevelFilter> {
Some(LevelFilter::from_level(self.trace_level))
}
}

View File

@@ -1,21 +0,0 @@
use std::io::{self, BufWriter, Write};
use anyhow::Result;
use crate::args::HelpFormat;
/// Display version information
pub(crate) fn version(output_format: HelpFormat) -> Result<()> {
let mut stdout = BufWriter::new(io::stdout().lock());
let version_info = crate::version::version();
match output_format {
HelpFormat::Text => {
writeln!(stdout, "ruff {}", &version_info)?;
}
HelpFormat::Json => {
serde_json::to_writer_pretty(stdout, &version_info)?;
}
};
Ok(())
}

View File

@@ -1,524 +0,0 @@
#![cfg_attr(target_family = "wasm", allow(dead_code))]
use std::borrow::Cow;
use std::fs::File;
use std::io;
use std::io::Write;
use std::ops::{Add, AddAssign};
use std::path::Path;
use anyhow::{Context, Result};
use colored::Colorize;
use log::{debug, error, warn};
use rustc_hash::FxHashMap;
use ruff_diagnostics::Diagnostic;
use ruff_linter::linter::{lint_fix, lint_only, FixTable, FixerResult, LinterResult, ParseSource};
use ruff_linter::logging::DisplayParseError;
use ruff_linter::message::Message;
use ruff_linter::pyproject_toml::lint_pyproject_toml;
use ruff_linter::registry::AsRule;
use ruff_linter::settings::types::UnsafeFixes;
use ruff_linter::settings::{flags, LinterSettings};
use ruff_linter::source_kind::{SourceError, SourceKind};
use ruff_linter::{fs, IOError, SyntaxError};
use ruff_notebook::{Notebook, NotebookError, NotebookIndex};
use ruff_python_ast::imports::ImportMap;
use ruff_python_ast::{PySourceType, SourceType, TomlSourceType};
use ruff_source_file::SourceFileBuilder;
use ruff_text_size::{TextRange, TextSize};
use ruff_workspace::Settings;
use crate::cache::{Cache, FileCacheKey, LintCacheData};
#[derive(Debug, Default, PartialEq)]
pub(crate) struct Diagnostics {
pub(crate) messages: Vec<Message>,
pub(crate) fixed: FixMap,
pub(crate) imports: ImportMap,
pub(crate) notebook_indexes: FxHashMap<String, NotebookIndex>,
}
impl Diagnostics {
pub(crate) fn new(
messages: Vec<Message>,
imports: ImportMap,
notebook_indexes: FxHashMap<String, NotebookIndex>,
) -> Self {
Self {
messages,
fixed: FixMap::default(),
imports,
notebook_indexes,
}
}
/// Generate [`Diagnostics`] based on a [`SourceError`].
pub(crate) fn from_source_error(
err: &SourceError,
path: Option<&Path>,
settings: &LinterSettings,
) -> Self {
let diagnostic = match err {
// IO errors.
SourceError::Io(_)
| SourceError::Notebook(NotebookError::Io(_) | NotebookError::Json(_)) => {
Diagnostic::new(
IOError {
message: err.to_string(),
},
TextRange::default(),
)
}
// Syntax errors.
SourceError::Notebook(
NotebookError::InvalidJson(_)
| NotebookError::InvalidSchema(_)
| NotebookError::InvalidFormat(_),
) => Diagnostic::new(
SyntaxError {
message: err.to_string(),
},
TextRange::default(),
),
};
if settings.rules.enabled(diagnostic.kind.rule()) {
let name = path.map_or_else(|| "-".into(), Path::to_string_lossy);
let dummy = SourceFileBuilder::new(name, "").finish();
Self::new(
vec![Message::from_diagnostic(
diagnostic,
dummy,
TextSize::default(),
)],
ImportMap::default(),
FxHashMap::default(),
)
} else {
match path {
Some(path) => {
warn!(
"{}{}{} {err}",
"Failed to lint ".bold(),
fs::relativize_path(path).bold(),
":".bold()
);
}
None => {
warn!("{}{} {err}", "Failed to lint".bold(), ":".bold());
}
}
Self::default()
}
}
}
impl Add for Diagnostics {
type Output = Diagnostics;
fn add(mut self, other: Self) -> Self::Output {
self += other;
self
}
}
impl AddAssign for Diagnostics {
fn add_assign(&mut self, other: Self) {
self.messages.extend(other.messages);
self.imports.extend(other.imports);
self.fixed += other.fixed;
self.notebook_indexes.extend(other.notebook_indexes);
}
}
/// A collection of fixes indexed by file path.
#[derive(Debug, Default, PartialEq)]
pub(crate) struct FixMap(FxHashMap<String, FixTable>);
impl FixMap {
/// Returns `true` if there are no fixes in the map.
pub(crate) fn is_empty(&self) -> bool {
self.0.is_empty()
}
/// Returns an iterator over the fixes in the map, along with the file path.
pub(crate) fn iter(&self) -> impl Iterator<Item = (&String, &FixTable)> {
self.0.iter()
}
/// Returns an iterator over the fixes in the map.
pub(crate) fn values(&self) -> impl Iterator<Item = &FixTable> {
self.0.values()
}
}
impl FromIterator<(String, FixTable)> for FixMap {
fn from_iter<T: IntoIterator<Item = (String, FixTable)>>(iter: T) -> Self {
Self(
iter.into_iter()
.filter(|(_, fixes)| !fixes.is_empty())
.collect(),
)
}
}
impl AddAssign for FixMap {
fn add_assign(&mut self, rhs: Self) {
for (filename, fixed) in rhs.0 {
if fixed.is_empty() {
continue;
}
let fixed_in_file = self.0.entry(filename).or_default();
for (rule, count) in fixed {
if count > 0 {
*fixed_in_file.entry(rule).or_default() += count;
}
}
}
}
}
/// Lint the source code at the given `Path`.
pub(crate) fn lint_path(
path: &Path,
package: Option<&Path>,
settings: &LinterSettings,
cache: Option<&Cache>,
noqa: flags::Noqa,
fix_mode: flags::FixMode,
unsafe_fixes: UnsafeFixes,
) -> Result<Diagnostics> {
// Check the cache.
let caching = match cache {
Some(cache) if noqa.into() => {
let relative_path = cache
.relative_path(path)
.expect("wrong package cache for file");
let cache_key = FileCacheKey::from_path(path).context("Failed to create cache key")?;
let cached_diagnostics = cache
.get(relative_path, &cache_key)
.and_then(|entry| entry.to_diagnostics(path));
if let Some(diagnostics) = cached_diagnostics {
// `FixMode::Generate` and `FixMode::Diff` rely on side-effects (writing to disk,
// and writing the diff to stdout, respectively). If a file has diagnostics, we
// need to avoid reading from and writing to the cache in these modes.
if match fix_mode {
flags::FixMode::Generate => true,
flags::FixMode::Apply | flags::FixMode::Diff => {
diagnostics.messages.is_empty() && diagnostics.fixed.is_empty()
}
} {
return Ok(diagnostics);
}
}
// Stash the file metadata for later so when we update the cache it reflects the prerun
// information
Some((cache, relative_path, cache_key))
}
_ => None,
};
debug!("Checking: {}", path.display());
let source_type = match settings.extension.get(path).map(PySourceType::from) {
Some(source_type) => source_type,
None => match SourceType::from(path) {
SourceType::Toml(TomlSourceType::Pyproject) => {
let messages = if settings
.rules
.iter_enabled()
.any(|rule_code| rule_code.lint_source().is_pyproject_toml())
{
let contents = match std::fs::read_to_string(path).map_err(SourceError::from) {
Ok(contents) => contents,
Err(err) => {
return Ok(Diagnostics::from_source_error(&err, Some(path), settings));
}
};
let source_file =
SourceFileBuilder::new(path.to_string_lossy(), contents).finish();
lint_pyproject_toml(source_file, settings)
} else {
vec![]
};
return Ok(Diagnostics {
messages,
..Diagnostics::default()
});
}
SourceType::Toml(_) => return Ok(Diagnostics::default()),
SourceType::Python(source_type) => source_type,
},
};
// Extract the sources from the file.
let source_kind = match SourceKind::from_path(path, source_type) {
Ok(Some(source_kind)) => source_kind,
Ok(None) => return Ok(Diagnostics::default()),
Err(err) => {
return Ok(Diagnostics::from_source_error(&err, Some(path), settings));
}
};
// Lint the file.
let (
LinterResult {
data: (messages, imports),
error: parse_error,
},
transformed,
fixed,
) = if matches!(fix_mode, flags::FixMode::Apply | flags::FixMode::Diff) {
if let Ok(FixerResult {
result,
transformed,
fixed,
}) = lint_fix(
path,
package,
noqa,
unsafe_fixes,
settings,
&source_kind,
source_type,
) {
if !fixed.is_empty() {
match fix_mode {
flags::FixMode::Apply => transformed.write(&mut File::create(path)?)?,
flags::FixMode::Diff => {
write!(
&mut io::stdout().lock(),
"{}",
source_kind.diff(&transformed, Some(path)).unwrap()
)?;
}
flags::FixMode::Generate => {}
}
}
let transformed = if let Cow::Owned(transformed) = transformed {
transformed
} else {
source_kind
};
(result, transformed, fixed)
} else {
// If we fail to fix, lint the original source code.
let result = lint_only(
path,
package,
settings,
noqa,
&source_kind,
source_type,
ParseSource::None,
);
let transformed = source_kind;
let fixed = FxHashMap::default();
(result, transformed, fixed)
}
} else {
let result = lint_only(
path,
package,
settings,
noqa,
&source_kind,
source_type,
ParseSource::None,
);
let transformed = source_kind;
let fixed = FxHashMap::default();
(result, transformed, fixed)
};
let imports = imports.unwrap_or_default();
if let Some((cache, relative_path, key)) = caching {
// We don't cache parsing errors.
if parse_error.is_none() {
// `FixMode::Apply` and `FixMode::Diff` rely on side-effects (writing to disk,
// and writing the diff to stdout, respectively). If a file has diagnostics, we
// need to avoid reading from and writing to the cache in these modes.
if match fix_mode {
flags::FixMode::Generate => true,
flags::FixMode::Apply | flags::FixMode::Diff => {
messages.is_empty() && fixed.is_empty()
}
} {
cache.update_lint(
relative_path.to_owned(),
&key,
LintCacheData::from_messages(
&messages,
imports.clone(),
transformed.as_ipy_notebook().map(Notebook::index).cloned(),
),
);
}
}
}
if let Some(error) = parse_error {
error!(
"{}",
DisplayParseError::from_source_kind(error, Some(path.to_path_buf()), &transformed)
);
}
let notebook_indexes = if let SourceKind::IpyNotebook(notebook) = transformed {
FxHashMap::from_iter([(path.to_string_lossy().to_string(), notebook.into_index())])
} else {
FxHashMap::default()
};
Ok(Diagnostics {
messages,
fixed: FixMap::from_iter([(fs::relativize_path(path), fixed)]),
imports,
notebook_indexes,
})
}
/// Generate `Diagnostic`s from source code content derived from
/// stdin.
pub(crate) fn lint_stdin(
path: Option<&Path>,
package: Option<&Path>,
contents: String,
settings: &Settings,
noqa: flags::Noqa,
fix_mode: flags::FixMode,
) -> Result<Diagnostics> {
// TODO(charlie): Support `pyproject.toml`.
let source_type = match path.and_then(|path| settings.linter.extension.get(path)) {
None => match path.map(SourceType::from).unwrap_or_default() {
SourceType::Python(source_type) => source_type,
SourceType::Toml(_) => {
return Ok(Diagnostics::default());
}
},
Some(language) => PySourceType::from(language),
};
// Extract the sources from the file.
let source_kind = match SourceKind::from_source_code(contents, source_type) {
Ok(Some(source_kind)) => source_kind,
Ok(None) => return Ok(Diagnostics::default()),
Err(err) => {
return Ok(Diagnostics::from_source_error(&err, path, &settings.linter));
}
};
// Lint the inputs.
let (
LinterResult {
data: (messages, imports),
error: parse_error,
},
transformed,
fixed,
) = if matches!(fix_mode, flags::FixMode::Apply | flags::FixMode::Diff) {
if let Ok(FixerResult {
result,
transformed,
fixed,
}) = lint_fix(
path.unwrap_or_else(|| Path::new("-")),
package,
noqa,
settings.unsafe_fixes,
&settings.linter,
&source_kind,
source_type,
) {
match fix_mode {
flags::FixMode::Apply => {
// Write the contents to stdout, regardless of whether any errors were fixed.
transformed.write(&mut io::stdout().lock())?;
}
flags::FixMode::Diff => {
// But only write a diff if it's non-empty.
if !fixed.is_empty() {
write!(
&mut io::stdout().lock(),
"{}",
source_kind.diff(&transformed, path).unwrap()
)?;
}
}
flags::FixMode::Generate => {}
}
let transformed = if let Cow::Owned(transformed) = transformed {
transformed
} else {
source_kind
};
(result, transformed, fixed)
} else {
// If we fail to fix, lint the original source code.
let result = lint_only(
path.unwrap_or_else(|| Path::new("-")),
package,
&settings.linter,
noqa,
&source_kind,
source_type,
ParseSource::None,
);
// Write the contents to stdout anyway.
if fix_mode.is_apply() {
source_kind.write(&mut io::stdout().lock())?;
}
let transformed = source_kind;
let fixed = FxHashMap::default();
(result, transformed, fixed)
}
} else {
let result = lint_only(
path.unwrap_or_else(|| Path::new("-")),
package,
&settings.linter,
noqa,
&source_kind,
source_type,
ParseSource::None,
);
let transformed = source_kind;
let fixed = FxHashMap::default();
(result, transformed, fixed)
};
let imports = imports.unwrap_or_default();
if let Some(error) = parse_error {
error!(
"{}",
DisplayParseError::from_source_kind(error, path.map(Path::to_path_buf), &transformed)
);
}
let notebook_indexes = if let SourceKind::IpyNotebook(notebook) = transformed {
FxHashMap::from_iter([(
path.map_or_else(|| "-".into(), |path| path.to_string_lossy().to_string()),
notebook.into_index(),
)])
} else {
FxHashMap::default()
};
Ok(Diagnostics {
messages,
fixed: FixMap::from_iter([(
fs::relativize_path(path.unwrap_or_else(|| Path::new("-"))),
fixed,
)]),
imports,
notebook_indexes,
})
}

View File

@@ -1,5 +0,0 @@
---
source: crates/ruff/src/version.rs
expression: version
---
0.0.0

View File

@@ -1,5 +0,0 @@
---
source: crates/ruff/src/version.rs
expression: version
---
0.0.0 (53b0f5d92 2023-10-19)

View File

@@ -1,5 +0,0 @@
---
source: crates/ruff/src/version.rs
expression: version
---
0.0.0+24 (53b0f5d92 2023-10-19)

View File

@@ -1,14 +0,0 @@
---
source: crates/ruff/src/version.rs
expression: version
---
{
"version": "0.0.0",
"commit_info": {
"short_commit_hash": "53b0f5d92",
"commit_hash": "53b0f5d924110e5b26fbf09f6fd3a03d67b475b7",
"commit_date": "2023-10-19",
"last_tag": "v0.0.1",
"commits_since_last_tag": 0
}
}

View File

@@ -1,17 +0,0 @@
use std::io;
use std::io::{Read, Write};
/// Read a string from `stdin`.
pub(crate) fn read_from_stdin() -> Result<String, io::Error> {
let mut buffer = String::new();
io::stdin().lock().read_to_string(&mut buffer)?;
Ok(buffer)
}
/// Read bytes from `stdin` and write them to `stdout`.
pub(crate) fn parrot_stdin() -> Result<(), io::Error> {
let mut buffer = String::new();
io::stdin().lock().read_to_string(&mut buffer)?;
io::stdout().write_all(buffer.as_bytes())?;
Ok(())
}

View File

@@ -1,130 +0,0 @@
//! Code for representing Ruff's release version number.
use serde::Serialize;
use std::fmt;
/// Information about the git repository where Ruff was built from.
#[derive(Serialize)]
pub(crate) struct CommitInfo {
short_commit_hash: String,
commit_hash: String,
commit_date: String,
last_tag: Option<String>,
commits_since_last_tag: u32,
}
/// Ruff's version.
#[derive(Serialize)]
pub(crate) struct VersionInfo {
/// Ruff's version, such as "0.5.1"
version: String,
/// Information about the git commit we may have been built from.
///
/// `None` if not built from a git repo or if retrieval failed.
commit_info: Option<CommitInfo>,
}
impl fmt::Display for VersionInfo {
/// Formatted version information: "<version>[+<commits>] (<commit> <date>)"
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.version)?;
if let Some(ref ci) = self.commit_info {
if ci.commits_since_last_tag > 0 {
write!(f, "+{}", ci.commits_since_last_tag)?;
}
write!(f, " ({} {})", ci.short_commit_hash, ci.commit_date)?;
}
Ok(())
}
}
/// Returns information about Ruff's version.
pub(crate) fn version() -> VersionInfo {
// Environment variables are only read at compile-time
macro_rules! option_env_str {
($name:expr) => {
option_env!($name).map(|s| s.to_string())
};
}
// This version is pulled from Cargo.toml and set by Cargo
let version = option_env_str!("CARGO_PKG_VERSION").unwrap();
// Commit info is pulled from git and set by `build.rs`
let commit_info = option_env_str!("RUFF_COMMIT_HASH").map(|commit_hash| CommitInfo {
short_commit_hash: option_env_str!("RUFF_COMMIT_SHORT_HASH").unwrap(),
commit_hash,
commit_date: option_env_str!("RUFF_COMMIT_DATE").unwrap(),
last_tag: option_env_str!("RUFF_LAST_TAG"),
commits_since_last_tag: option_env_str!("RUFF_LAST_TAG_DISTANCE")
.as_deref()
.map_or(0, |value| value.parse::<u32>().unwrap_or(0)),
});
VersionInfo {
version,
commit_info,
}
}
#[cfg(test)]
mod tests {
use insta::{assert_json_snapshot, assert_snapshot};
use super::{CommitInfo, VersionInfo};
#[test]
fn version_formatting() {
let version = VersionInfo {
version: "0.0.0".to_string(),
commit_info: None,
};
assert_snapshot!(version);
}
#[test]
fn version_formatting_with_commit_info() {
let version = VersionInfo {
version: "0.0.0".to_string(),
commit_info: Some(CommitInfo {
short_commit_hash: "53b0f5d92".to_string(),
commit_hash: "53b0f5d924110e5b26fbf09f6fd3a03d67b475b7".to_string(),
last_tag: Some("v0.0.1".to_string()),
commit_date: "2023-10-19".to_string(),
commits_since_last_tag: 0,
}),
};
assert_snapshot!(version);
}
#[test]
fn version_formatting_with_commits_since_last_tag() {
let version = VersionInfo {
version: "0.0.0".to_string(),
commit_info: Some(CommitInfo {
short_commit_hash: "53b0f5d92".to_string(),
commit_hash: "53b0f5d924110e5b26fbf09f6fd3a03d67b475b7".to_string(),
last_tag: Some("v0.0.1".to_string()),
commit_date: "2023-10-19".to_string(),
commits_since_last_tag: 24,
}),
};
assert_snapshot!(version);
}
#[test]
fn version_serializable() {
let version = VersionInfo {
version: "0.0.0".to_string(),
commit_info: Some(CommitInfo {
short_commit_hash: "53b0f5d92".to_string(),
commit_hash: "53b0f5d924110e5b26fbf09f6fd3a03d67b475b7".to_string(),
last_tag: Some("v0.0.1".to_string()),
commit_date: "2023-10-19".to_string(),
commits_since_last_tag: 0,
}),
};
assert_json_snapshot!(version);
}
}

View File

@@ -1,150 +0,0 @@
//! A test suite that ensures deprecated command line options have appropriate warnings / behaviors
use ruff_linter::settings::types::SerializationFormat;
use std::process::Command;
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
const BIN_NAME: &str = "ruff";
const STDIN: &str = "l = 1";
fn ruff_check(show_source: Option<bool>, output_format: Option<String>) -> Command {
let mut cmd = Command::new(get_cargo_bin(BIN_NAME));
let output_format = output_format.unwrap_or(format!("{}", SerializationFormat::default(false)));
cmd.arg("check")
.arg("--output-format")
.arg(output_format)
.arg("--no-cache");
match show_source {
Some(true) => {
cmd.arg("--show-source");
}
Some(false) => {
cmd.arg("--no-show-source");
}
None => {}
}
cmd.arg("-");
cmd
}
#[test]
fn ensure_show_source_is_deprecated() {
assert_cmd_snapshot!(ruff_check(Some(true), None).pass_stdin(STDIN), @r###"
success: false
exit_code: 1
----- stdout -----
-:1:1: E741 Ambiguous variable name: `l`
Found 1 error.
----- stderr -----
warning: The `--show-source` argument is deprecated and has been ignored in favor of `--output-format=concise`.
"###);
}
#[test]
fn ensure_no_show_source_is_deprecated() {
assert_cmd_snapshot!(ruff_check(Some(false), None).pass_stdin(STDIN), @r###"
success: false
exit_code: 1
----- stdout -----
-:1:1: E741 Ambiguous variable name: `l`
Found 1 error.
----- stderr -----
warning: The `--no-show-source` argument is deprecated and has been ignored in favor of `--output-format=concise`.
"###);
}
#[test]
fn ensure_output_format_is_deprecated() {
assert_cmd_snapshot!(ruff_check(None, Some("text".into())).pass_stdin(STDIN), @r###"
success: false
exit_code: 1
----- stdout -----
-:1:1: E741 Ambiguous variable name: `l`
Found 1 error.
----- stderr -----
warning: `--output-format=text` is deprecated. Use `--output-format=full` or `--output-format=concise` instead. `text` will be treated as `concise`.
"###);
}
#[test]
fn ensure_output_format_overrides_show_source() {
assert_cmd_snapshot!(ruff_check(Some(true), Some("concise".into())).pass_stdin(STDIN), @r###"
success: false
exit_code: 1
----- stdout -----
-:1:1: E741 Ambiguous variable name: `l`
Found 1 error.
----- stderr -----
warning: The `--show-source` argument is deprecated and has been ignored in favor of `--output-format=concise`.
"###);
}
#[test]
fn ensure_full_output_format_overrides_no_show_source() {
assert_cmd_snapshot!(ruff_check(Some(false), Some("full".into())).pass_stdin(STDIN), @r###"
success: false
exit_code: 1
----- stdout -----
-:1:1: E741 Ambiguous variable name: `l`
|
1 | l = 1
| ^ E741
|
Found 1 error.
----- stderr -----
warning: The `--no-show-source` argument is deprecated and has been ignored in favor of `--output-format=full`.
"###);
}
#[test]
fn ensure_output_format_uses_concise_over_no_show_source() {
assert_cmd_snapshot!(ruff_check(Some(false), Some("concise".into())).pass_stdin(STDIN), @r###"
success: false
exit_code: 1
----- stdout -----
-:1:1: E741 Ambiguous variable name: `l`
Found 1 error.
----- stderr -----
warning: The `--no-show-source` argument is deprecated and has been ignored in favor of `--output-format=concise`.
"###);
}
#[test]
fn ensure_deprecated_output_format_overrides_show_source() {
assert_cmd_snapshot!(ruff_check(Some(true), Some("text".into())).pass_stdin(STDIN), @r###"
success: false
exit_code: 1
----- stdout -----
-:1:1: E741 Ambiguous variable name: `l`
Found 1 error.
----- stderr -----
warning: The `--show-source` argument is deprecated and has been ignored in favor of `--output-format=text`.
warning: `--output-format=text` is deprecated. Use `--output-format=full` or `--output-format=concise` instead. `text` will be treated as `concise`.
"###);
}
#[test]
fn ensure_deprecated_output_format_overrides_no_show_source() {
assert_cmd_snapshot!(ruff_check(Some(false), Some("text".into())).pass_stdin(STDIN), @r###"
success: false
exit_code: 1
----- stdout -----
-:1:1: E741 Ambiguous variable name: `l`
Found 1 error.
----- stderr -----
warning: The `--no-show-source` argument is deprecated and has been ignored in favor of `--output-format=text`.
warning: `--output-format=text` is deprecated. Use `--output-format=full` or `--output-format=concise` instead. `text` will be treated as `concise`.
"###);
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,103 +0,0 @@
#![cfg(not(target_family = "wasm"))]
use std::path::Path;
use std::process::Command;
use std::str;
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
const BIN_NAME: &str = "ruff";
#[cfg(not(target_os = "windows"))]
const TEST_FILTERS: &[(&str, &str)] = &[(".*/resources/test/fixtures/", "[BASEPATH]/")];
#[cfg(target_os = "windows")]
const TEST_FILTERS: &[(&str, &str)] = &[
(r".*\\resources\\test\\fixtures\\", "[BASEPATH]\\"),
(r"\\", "/"),
];
#[test]
fn check_project_include_defaults() {
// Defaults to checking the current working directory
//
// The test directory includes:
// - A pyproject.toml which specifies an include
// - A nested pyproject.toml which has a Ruff section
//
// The nested project should all be checked instead of respecting the parent includes
insta::with_settings!({
filters => TEST_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["check", "--show-files"]).current_dir(Path::new("./resources/test/fixtures/include-test")), @r###"
success: true
exit_code: 0
----- stdout -----
[BASEPATH]/include-test/a.py
[BASEPATH]/include-test/nested-project/e.py
[BASEPATH]/include-test/nested-project/pyproject.toml
[BASEPATH]/include-test/subdirectory/c.py
----- stderr -----
warning: The top-level linter settings are deprecated in favour of their counterparts in the `lint` section. Please update the following options in `nested-project/pyproject.toml`:
- 'select' -> 'lint.select'
"###);
});
}
#[test]
fn check_project_respects_direct_paths() {
// Given a direct path not included in the project `includes`, it should be checked
insta::with_settings!({
filters => TEST_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["check", "--show-files", "b.py"]).current_dir(Path::new("./resources/test/fixtures/include-test")), @r###"
success: true
exit_code: 0
----- stdout -----
[BASEPATH]/include-test/b.py
----- stderr -----
"###);
});
}
#[test]
fn check_project_respects_subdirectory_includes() {
// Given a direct path to a subdirectory, the include should be respected
insta::with_settings!({
filters => TEST_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["check", "--show-files", "subdirectory"]).current_dir(Path::new("./resources/test/fixtures/include-test")), @r###"
success: true
exit_code: 0
----- stdout -----
[BASEPATH]/include-test/subdirectory/c.py
----- stderr -----
"###);
});
}
#[test]
fn check_project_from_project_subdirectory_respects_includes() {
// Run from a project subdirectory, the include specified in the parent directory should be respected
insta::with_settings!({
filters => TEST_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["check", "--show-files"]).current_dir(Path::new("./resources/test/fixtures/include-test/subdirectory")), @r###"
success: true
exit_code: 0
----- stdout -----
[BASEPATH]/include-test/subdirectory/c.py
----- stderr -----
"###);
});
}

View File

@@ -1,33 +0,0 @@
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
use std::path::Path;
use std::process::Command;
const BIN_NAME: &str = "ruff";
#[test]
fn display_default_settings() {
// Navigate from the crate directory to the workspace root.
let base_path = Path::new(env!("CARGO_MANIFEST_DIR"))
.parent()
.unwrap()
.parent()
.unwrap();
let base_path = base_path.to_string_lossy();
// Escape the backslashes for the regex.
let base_path = regex::escape(&base_path);
#[cfg(not(target_os = "windows"))]
let test_filters = &[(base_path.as_ref(), "[BASEPATH]")];
#[cfg(target_os = "windows")]
let test_filters = &[
(base_path.as_ref(), "[BASEPATH]"),
(r#"\\+(\w\w|\s|\.|")"#, "/$1"),
];
insta::with_settings!({ filters => test_filters.to_vec() }, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["check", "--show-settings", "unformatted.py"]).current_dir(Path::new("./resources/test/fixtures")));
});
}

View File

@@ -1,384 +0,0 @@
---
source: crates/ruff/tests/show_settings.rs
info:
program: ruff
args:
- check
- "--show-settings"
- unformatted.py
---
success: true
exit_code: 0
----- stdout -----
Resolved settings for: "[BASEPATH]/crates/ruff/resources/test/fixtures/unformatted.py"
Settings path: "[BASEPATH]/pyproject.toml"
# General Settings
cache_dir = "[BASEPATH]/.ruff_cache"
fix = false
fix_only = false
output_format = concise
show_fixes = false
unsafe_fixes = hint
# File Resolver Settings
file_resolver.exclude = [
".bzr",
".direnv",
".eggs",
".git",
".git-rewrite",
".hg",
".ipynb_checkpoints",
".mypy_cache",
".nox",
".pants.d",
".pyenv",
".pytest_cache",
".pytype",
".ruff_cache",
".svn",
".tox",
".venv",
".vscode",
"__pypackages__",
"_build",
"buck-out",
"dist",
"node_modules",
"site-packages",
"venv",
]
file_resolver.extend_exclude = [
"crates/ruff/resources/",
"crates/ruff_linter/resources/",
"crates/ruff_python_formatter/resources/",
]
file_resolver.force_exclude = false
file_resolver.include = [
"*.py",
"*.pyi",
"**/pyproject.toml",
]
file_resolver.extend_include = []
file_resolver.respect_gitignore = true
file_resolver.project_root = "[BASEPATH]"
# Linter Settings
linter.exclude = []
linter.project_root = "[BASEPATH]"
linter.rules.enabled = [
MultipleImportsOnOneLine,
ModuleImportNotAtTopOfFile,
MultipleStatementsOnOneLineColon,
MultipleStatementsOnOneLineSemicolon,
UselessSemicolon,
NoneComparison,
TrueFalseComparison,
NotInTest,
NotIsTest,
TypeComparison,
BareExcept,
LambdaAssignment,
AmbiguousVariableName,
AmbiguousClassName,
AmbiguousFunctionName,
IOError,
SyntaxError,
UnusedImport,
ImportShadowedByLoopVar,
UndefinedLocalWithImportStar,
LateFutureImport,
UndefinedLocalWithImportStarUsage,
UndefinedLocalWithNestedImportStarUsage,
FutureFeatureNotDefined,
PercentFormatInvalidFormat,
PercentFormatExpectedMapping,
PercentFormatExpectedSequence,
PercentFormatExtraNamedArguments,
PercentFormatMissingArgument,
PercentFormatMixedPositionalAndNamed,
PercentFormatPositionalCountMismatch,
PercentFormatStarRequiresSequence,
PercentFormatUnsupportedFormatCharacter,
StringDotFormatInvalidFormat,
StringDotFormatExtraNamedArguments,
StringDotFormatExtraPositionalArguments,
StringDotFormatMissingArguments,
StringDotFormatMixingAutomatic,
FStringMissingPlaceholders,
MultiValueRepeatedKeyLiteral,
MultiValueRepeatedKeyVariable,
ExpressionsInStarAssignment,
MultipleStarredExpressions,
AssertTuple,
IsLiteral,
InvalidPrintSyntax,
IfTuple,
BreakOutsideLoop,
ContinueOutsideLoop,
YieldOutsideFunction,
ReturnOutsideFunction,
DefaultExceptNotLast,
ForwardAnnotationSyntaxError,
RedefinedWhileUnused,
UndefinedName,
UndefinedExport,
UndefinedLocal,
UnusedVariable,
UnusedAnnotation,
RaiseNotImplemented,
]
linter.rules.should_fix = [
MultipleImportsOnOneLine,
ModuleImportNotAtTopOfFile,
MultipleStatementsOnOneLineColon,
MultipleStatementsOnOneLineSemicolon,
UselessSemicolon,
NoneComparison,
TrueFalseComparison,
NotInTest,
NotIsTest,
TypeComparison,
BareExcept,
LambdaAssignment,
AmbiguousVariableName,
AmbiguousClassName,
AmbiguousFunctionName,
IOError,
SyntaxError,
UnusedImport,
ImportShadowedByLoopVar,
UndefinedLocalWithImportStar,
LateFutureImport,
UndefinedLocalWithImportStarUsage,
UndefinedLocalWithNestedImportStarUsage,
FutureFeatureNotDefined,
PercentFormatInvalidFormat,
PercentFormatExpectedMapping,
PercentFormatExpectedSequence,
PercentFormatExtraNamedArguments,
PercentFormatMissingArgument,
PercentFormatMixedPositionalAndNamed,
PercentFormatPositionalCountMismatch,
PercentFormatStarRequiresSequence,
PercentFormatUnsupportedFormatCharacter,
StringDotFormatInvalidFormat,
StringDotFormatExtraNamedArguments,
StringDotFormatExtraPositionalArguments,
StringDotFormatMissingArguments,
StringDotFormatMixingAutomatic,
FStringMissingPlaceholders,
MultiValueRepeatedKeyLiteral,
MultiValueRepeatedKeyVariable,
ExpressionsInStarAssignment,
MultipleStarredExpressions,
AssertTuple,
IsLiteral,
InvalidPrintSyntax,
IfTuple,
BreakOutsideLoop,
ContinueOutsideLoop,
YieldOutsideFunction,
ReturnOutsideFunction,
DefaultExceptNotLast,
ForwardAnnotationSyntaxError,
RedefinedWhileUnused,
UndefinedName,
UndefinedExport,
UndefinedLocal,
UnusedVariable,
UnusedAnnotation,
RaiseNotImplemented,
]
linter.per_file_ignores = {}
linter.safety_table.forced_safe = []
linter.safety_table.forced_unsafe = []
linter.target_version = Py37
linter.preview = disabled
linter.explicit_preview_rules = false
linter.extension.mapping = {}
linter.allowed_confusables = []
linter.builtins = []
linter.dummy_variable_rgx = ^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$
linter.external = []
linter.ignore_init_module_imports = true
linter.logger_objects = []
linter.namespace_packages = []
linter.src = [
"[BASEPATH]",
]
linter.tab_size = 4
linter.line_length = 88
linter.task_tags = [
TODO,
FIXME,
XXX,
]
linter.typing_modules = []
# Linter Plugins
linter.flake8_annotations.mypy_init_return = false
linter.flake8_annotations.suppress_dummy_args = false
linter.flake8_annotations.suppress_none_returning = false
linter.flake8_annotations.allow_star_arg_any = false
linter.flake8_annotations.ignore_fully_untyped = false
linter.flake8_bandit.hardcoded_tmp_directory = [
/tmp,
/var/tmp,
/dev/shm,
]
linter.flake8_bandit.check_typed_exception = false
linter.flake8_bugbear.extend_immutable_calls = []
linter.flake8_builtins.builtins_ignorelist = []
linter.flake8_comprehensions.allow_dict_calls_with_keyword_arguments = false
linter.flake8_copyright.notice_rgx = (?i)Copyright\s+((?:\(C\)|©)\s+)?\d{4}((-|,\s)\d{4})*
linter.flake8_copyright.author = none
linter.flake8_copyright.min_file_size = 0
linter.flake8_errmsg.max_string_length = 0
linter.flake8_gettext.functions_names = [
_,
gettext,
ngettext,
]
linter.flake8_implicit_str_concat.allow_multiline = true
linter.flake8_import_conventions.aliases = {
altair = alt,
holoviews = hv,
matplotlib = mpl,
matplotlib.pyplot = plt,
networkx = nx,
numpy = np,
pandas = pd,
panel = pn,
plotly.express = px,
polars = pl,
pyarrow = pa,
seaborn = sns,
tensorflow = tf,
tkinter = tk,
}
linter.flake8_import_conventions.banned_aliases = {}
linter.flake8_import_conventions.banned_from = []
linter.flake8_pytest_style.fixture_parentheses = true
linter.flake8_pytest_style.parametrize_names_type = tuple
linter.flake8_pytest_style.parametrize_values_type = list
linter.flake8_pytest_style.parametrize_values_row_type = tuple
linter.flake8_pytest_style.raises_require_match_for = [
BaseException,
Exception,
ValueError,
OSError,
IOError,
EnvironmentError,
socket.error,
]
linter.flake8_pytest_style.raises_extend_require_match_for = []
linter.flake8_pytest_style.mark_parentheses = true
linter.flake8_quotes.inline_quotes = double
linter.flake8_quotes.multiline_quotes = double
linter.flake8_quotes.docstring_quotes = double
linter.flake8_quotes.avoid_escape = true
linter.flake8_self.ignore_names = [
_make,
_asdict,
_replace,
_fields,
_field_defaults,
_name_,
_value_,
]
linter.flake8_tidy_imports.ban_relative_imports = "parents"
linter.flake8_tidy_imports.banned_api = {}
linter.flake8_tidy_imports.banned_module_level_imports = []
linter.flake8_type_checking.strict = false
linter.flake8_type_checking.exempt_modules = [
typing,
typing_extensions,
]
linter.flake8_type_checking.runtime_required_base_classes = []
linter.flake8_type_checking.runtime_required_decorators = []
linter.flake8_type_checking.quote_annotations = false
linter.flake8_unused_arguments.ignore_variadic_names = false
linter.isort.required_imports = []
linter.isort.combine_as_imports = false
linter.isort.force_single_line = false
linter.isort.force_sort_within_sections = false
linter.isort.detect_same_package = true
linter.isort.case_sensitive = false
linter.isort.force_wrap_aliases = false
linter.isort.force_to_top = []
linter.isort.known_modules = {}
linter.isort.order_by_type = true
linter.isort.relative_imports_order = furthest_to_closest
linter.isort.single_line_exclusions = []
linter.isort.split_on_trailing_comma = true
linter.isort.classes = []
linter.isort.constants = []
linter.isort.variables = []
linter.isort.no_lines_before = []
linter.isort.lines_after_imports = -1
linter.isort.lines_between_types = 0
linter.isort.forced_separate = []
linter.isort.section_order = [
known { type = future },
known { type = standard_library },
known { type = third_party },
known { type = first_party },
known { type = local_folder },
]
linter.isort.default_section = known { type = third_party }
linter.isort.no_sections = false
linter.isort.from_first = false
linter.isort.length_sort = false
linter.isort.length_sort_straight = false
linter.mccabe.max_complexity = 10
linter.pep8_naming.ignore_names = [
setUp,
tearDown,
setUpClass,
tearDownClass,
setUpModule,
tearDownModule,
asyncSetUp,
asyncTearDown,
setUpTestData,
failureException,
longMessage,
maxDiff,
]
linter.pep8_naming.classmethod_decorators = []
linter.pep8_naming.staticmethod_decorators = []
linter.pycodestyle.max_line_length = 88
linter.pycodestyle.max_doc_length = none
linter.pycodestyle.ignore_overlong_task_comments = false
linter.pyflakes.extend_generics = []
linter.pylint.allow_magic_value_types = [
str,
bytes,
]
linter.pylint.allow_dunder_method_names = []
linter.pylint.max_args = 5
linter.pylint.max_positional_args = 5
linter.pylint.max_returns = 6
linter.pylint.max_bool_expr = 5
linter.pylint.max_branches = 12
linter.pylint.max_statements = 50
linter.pylint.max_public_methods = 20
linter.pylint.max_locals = 15
linter.pyupgrade.keep_runtime_typing = false
# Formatter Settings
formatter.exclude = []
formatter.target_version = Py37
formatter.preview = disabled
formatter.line_width = 88
formatter.line_ending = auto
formatter.indent_style = space
formatter.indent_width = 4
formatter.quote_style = double
formatter.magic_trailing_comma = respect
formatter.docstring_code_format = disabled
formatter.docstring_code_line_width = dynamic
----- stderr -----

View File

@@ -1,105 +0,0 @@
//! Tests for the --version command
use std::fs;
use std::process::Command;
use anyhow::Result;
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
use tempfile::TempDir;
const BIN_NAME: &str = "ruff";
const VERSION_FILTER: [(&str, &str); 1] = [(
r"\d+\.\d+\.\d+(\+\d+)?( \(\w{9} \d\d\d\d-\d\d-\d\d\))?",
"[VERSION]",
)];
#[test]
fn version_basics() {
insta::with_settings!({filters => VERSION_FILTER.to_vec()}, {
assert_cmd_snapshot!(
Command::new(get_cargo_bin(BIN_NAME)).arg("version"), @r###"
success: true
exit_code: 0
----- stdout -----
ruff [VERSION]
----- stderr -----
"###
);
});
}
/// `--config` is a global option,
/// so it's allowed to pass --config to subcommands such as `version`
/// -- the flag is simply ignored
#[test]
fn config_option_allowed_but_ignored() -> Result<()> {
let tempdir = TempDir::new()?;
let ruff_dot_toml = tempdir.path().join("ruff.toml");
fs::File::create(&ruff_dot_toml)?;
insta::with_settings!({filters => VERSION_FILTER.to_vec()}, {
assert_cmd_snapshot!(
Command::new(get_cargo_bin(BIN_NAME))
.arg("version")
.arg("--config")
.arg(&ruff_dot_toml)
.args(["--config", "lint.isort.extra-standard-library = ['foo', 'bar']"]), @r###"
success: true
exit_code: 0
----- stdout -----
ruff [VERSION]
----- stderr -----
"###
);
});
Ok(())
}
#[test]
fn config_option_ignored_but_validated() {
insta::with_settings!({filters => VERSION_FILTER.to_vec()}, {
assert_cmd_snapshot!(
Command::new(get_cargo_bin(BIN_NAME))
.arg("version")
.args(["--config", "foo = bar"]), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: invalid value 'foo = bar' for '--config <CONFIG_OPTION>'
tip: A `--config` flag must either be a path to a `.toml` configuration file
or a TOML `<KEY> = <VALUE>` pair overriding a specific configuration
option
The supplied argument is not valid TOML:
TOML parse error at line 1, column 7
|
1 | foo = bar
| ^
invalid string
expected `"`, `'`
For more information, try '--help'.
"###
);
});
}
/// `--isolated` is also a global option,
#[test]
fn isolated_option_allowed() {
insta::with_settings!({filters => VERSION_FILTER.to_vec()}, {
assert_cmd_snapshot!(
Command::new(get_cargo_bin(BIN_NAME)).arg("version").arg("--isolated"), @r###"
success: true
exit_code: 0
----- stdout -----
ruff [VERSION]
----- stderr -----
"###
);
});
}

View File

@@ -13,7 +13,6 @@ license = { workspace = true }
[lib]
bench = false
doctest = false
[[bench]]
name = "linter"
@@ -32,29 +31,26 @@ name = "formatter"
harness = false
[dependencies]
once_cell = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
url = { workspace = true }
ureq = { workspace = true }
criterion = { workspace = true, default-features = false }
codspeed-criterion-compat = { workspace = true, default-features = false, optional = true}
once_cell.workspace = true
serde.workspace = true
serde_json.workspace = true
url = "2.3.1"
ureq = "2.6.2"
criterion = { version = "0.5.1", default-features = false }
codspeed-criterion-compat = { version="2.2.0", default-features = false, optional = true}
[dev-dependencies]
ruff_linter = { path = "../ruff_linter" }
ruff_python_ast = { path = "../ruff_python_ast" }
ruff_linter.path = "../ruff_linter"
ruff_python_ast.path = "../ruff_python_ast"
ruff_python_formatter = { path = "../ruff_python_formatter" }
ruff_python_index = { path = "../ruff_python_index" }
ruff_python_parser = { path = "../ruff_python_parser" }
[lints]
workspace = true
[features]
codspeed = ["codspeed-criterion-compat"]
[target.'cfg(target_os = "windows")'.dev-dependencies]
mimalloc = { workspace = true }
mimalloc = "0.1.39"
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64")))'.dev-dependencies]
tikv-jemallocator = { workspace = true }
tikv-jemallocator = "0.5.0"

View File

@@ -1,16 +1,5 @@
# Ruff Benchmarks
The `ruff_benchmark` crate benchmarks the linter and the formatter on individual files:
```shell
# Run once on the "baseline".
cargo bench -p ruff_benchmark -- --save-baseline=main
# Compare against the "baseline".
cargo bench -p ruff_benchmark -- --baseline=main
# Run the lexer benchmarks.
cargo bench -p ruff_benchmark lexer -- --baseline=main
```
The `ruff_benchmark` crate benchmarks the linter and the formatter on individual files.
See [CONTRIBUTING.md](../../CONTRIBUTING.md) on how to use these benchmarks.

View File

@@ -4,10 +4,10 @@ use ruff_benchmark::criterion::{
criterion_group, criterion_main, BenchmarkId, Criterion, Throughput,
};
use ruff_benchmark::{TestCase, TestFile, TestFileDownloadError};
use ruff_python_formatter::{format_module_ast, PreviewMode, PyFormatOptions};
use ruff_python_formatter::{format_module_ast, PyFormatOptions};
use ruff_python_index::CommentRangesBuilder;
use ruff_python_parser::lexer::lex;
use ruff_python_parser::{allocate_tokens_vec, parse_tokens, Mode};
use ruff_python_parser::{parse_tokens, Mode};
#[cfg(target_os = "windows")]
#[global_allocator]
@@ -52,7 +52,7 @@ fn benchmark_formatter(criterion: &mut Criterion) {
BenchmarkId::from_parameter(case.name()),
&case,
|b, case| {
let mut tokens = allocate_tokens_vec(case.code());
let mut tokens = Vec::new();
let mut comment_ranges = CommentRangesBuilder::default();
for result in lex(case.code(), Mode::Module) {
@@ -65,12 +65,11 @@ fn benchmark_formatter(criterion: &mut Criterion) {
let comment_ranges = comment_ranges.finish();
// Parse the AST.
let module = parse_tokens(tokens, case.code(), Mode::Module)
let module = parse_tokens(tokens, Mode::Module, "<filename>")
.expect("Input to be a valid python program");
b.iter(|| {
let options = PyFormatOptions::from_extension(Path::new(case.name()))
.with_preview(PreviewMode::Enabled);
let options = PyFormatOptions::from_extension(Path::new(case.name()));
let formatted =
format_module_ast(&module, &comment_ranges, case.code(), options)
.expect("Formatting to succeed");

View File

@@ -2,15 +2,12 @@ use ruff_benchmark::criterion::{
criterion_group, criterion_main, BenchmarkGroup, BenchmarkId, Criterion, Throughput,
};
use ruff_benchmark::{TestCase, TestFile, TestFileDownloadError};
use ruff_linter::linter::{lint_only, ParseSource};
use ruff_linter::rule_selector::PreviewOptions;
use ruff_linter::linter::lint_only;
use ruff_linter::settings::rule_table::RuleTable;
use ruff_linter::settings::types::PreviewMode;
use ruff_linter::settings::{flags, LinterSettings};
use ruff_linter::source_kind::SourceKind;
use ruff_linter::{registry::Rule, RuleSelector};
use ruff_python_ast::PySourceType;
use ruff_python_parser::{lexer, parse_program_tokens, Mode};
#[cfg(target_os = "windows")]
#[global_allocator]
@@ -54,12 +51,7 @@ fn benchmark_linter(mut group: BenchmarkGroup, settings: &LinterSettings) {
BenchmarkId::from_parameter(case.name()),
&case,
|b, case| {
// Tokenize the source.
let tokens: Vec<_> = lexer::lex(case.code(), Mode::Module).collect();
// Parse the source.
let ast = parse_program_tokens(tokens.clone(), case.code(), false).unwrap();
let kind = SourceKind::Python(case.code().to_string());
b.iter(|| {
let path = case.path();
let result = lint_only(
@@ -67,12 +59,8 @@ fn benchmark_linter(mut group: BenchmarkGroup, settings: &LinterSettings) {
None,
settings,
flags::Noqa::Enabled,
&SourceKind::Python(case.code().to_string()),
&kind,
PySourceType::from(path.as_path()),
ParseSource::Precomputed {
tokens: &tokens,
ast: &ast,
},
);
// Assert that file contains no parse errors
@@ -90,21 +78,12 @@ fn benchmark_default_rules(criterion: &mut Criterion) {
benchmark_linter(group, &LinterSettings::default());
}
/// Disables IO based rules because they are a source of flakiness
fn disable_io_rules(rules: &mut RuleTable) {
fn benchmark_all_rules(criterion: &mut Criterion) {
let mut rules: RuleTable = RuleSelector::All.all_rules().collect();
// Disable IO based rules because it is a source of flakiness
rules.disable(Rule::ShebangMissingExecutableFile);
rules.disable(Rule::ShebangNotExecutable);
}
fn benchmark_all_rules(criterion: &mut Criterion) {
let mut rules: RuleTable = RuleSelector::All
.rules(&PreviewOptions {
mode: PreviewMode::Disabled,
require_explicit: false,
})
.collect();
disable_io_rules(&mut rules);
let settings = LinterSettings {
rules,
@@ -115,22 +94,6 @@ fn benchmark_all_rules(criterion: &mut Criterion) {
benchmark_linter(group, &settings);
}
fn benchmark_preview_rules(criterion: &mut Criterion) {
let mut rules: RuleTable = RuleSelector::All.all_rules().collect();
disable_io_rules(&mut rules);
let settings = LinterSettings {
rules,
preview: PreviewMode::Enabled,
..LinterSettings::default()
};
let group = criterion.benchmark_group("linter/all-with-preview-rules");
benchmark_linter(group, &settings);
}
criterion_group!(default_rules, benchmark_default_rules);
criterion_group!(all_rules, benchmark_all_rules);
criterion_group!(preview_rules, benchmark_preview_rules);
criterion_main!(default_rules, all_rules, preview_rules);
criterion_main!(default_rules, all_rules);

View File

@@ -60,7 +60,7 @@ fn benchmark_parser(criterion: &mut Criterion<WallTime>) {
&case,
|b, case| {
b.iter(|| {
let parsed = parse_suite(case.code()).unwrap();
let parsed = parse_suite(case.code(), case.name()).unwrap();
let mut visitor = CountVisitor { count: 0 };
visitor.visit_body(&parsed);

View File

@@ -16,10 +16,7 @@ glob = { workspace = true }
globset = { workspace = true }
regex = { workspace = true }
filetime = { workspace = true }
seahash = { workspace = true }
seahash = "4.1.0"
[dev-dependencies]
ruff_macros = { path = "../ruff_macros" }
[lints]
workspace = true

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff"
version = "0.3.7"
name = "ruff_cli"
version = "0.0.290"
publish = false
authors = { workspace = true }
edition = { workspace = true }
@@ -10,70 +10,70 @@ documentation = { workspace = true }
repository = { workspace = true }
license = { workspace = true }
readme = "../../README.md"
default-run = "ruff"
[[bin]]
name = "ruff"
[dependencies]
ruff_linter = { path = "../ruff_linter", features = ["clap"] }
ruff_cache = { path = "../ruff_cache" }
ruff_diagnostics = { path = "../ruff_diagnostics" }
ruff_linter = { path = "../ruff_linter", features = ["clap"] }
ruff_macros = { path = "../ruff_macros" }
ruff_formatter = { path = "../ruff_formatter" }
ruff_notebook = { path = "../ruff_notebook" }
ruff_macros = { path = "../ruff_macros" }
ruff_python_ast = { path = "../ruff_python_ast" }
ruff_python_formatter = { path = "../ruff_python_formatter" }
ruff_server = { path = "../ruff_server" }
ruff_source_file = { path = "../ruff_source_file" }
ruff_text_size = { path = "../ruff_text_size" }
ruff_python_trivia = { path = "../ruff_python_trivia" }
ruff_workspace = { path = "../ruff_workspace" }
ruff_text_size = { path = "../ruff_text_size" }
annotate-snippets = { version = "0.9.1", features = ["color"] }
anyhow = { workspace = true }
argfile = { workspace = true }
bincode = { workspace = true }
argfile = { version = "0.1.6" }
bincode = { version = "1.3.3" }
bitflags = { workspace = true }
cachedir = { workspace = true }
cachedir = { version = "0.3.0" }
chrono = { workspace = true }
clap = { workspace = true, features = ["derive", "env", "wrap_help"] }
clap_complete_command = { workspace = true }
clearscreen = { workspace = true }
clap = { workspace = true, features = ["derive", "env"] }
clap_complete_command = { version = "0.5.1" }
clearscreen = { version = "2.0.0" }
colored = { workspace = true }
filetime = { workspace = true }
glob = { workspace = true }
ignore = { workspace = true }
is-macro = { workspace = true }
itertools = { workspace = true }
itoa = { version = "1.0.6" }
log = { workspace = true }
notify = { workspace = true }
num_cpus = { workspace = true }
notify = { version = "6.1.1" }
path-absolutize = { workspace = true, features = ["once_cell_cache"] }
rayon = { workspace = true }
rayon = { version = "1.8.0" }
regex = { workspace = true }
ruff_python_stdlib = { path = "../ruff_python_stdlib" }
rustc-hash = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
shellexpand = { workspace = true }
similar = { workspace = true }
strum = { workspace = true, features = [] }
tempfile = { workspace = true }
thiserror = { workspace = true }
toml = { workspace = true }
tracing = { workspace = true, features = ["log"] }
tracing-subscriber = { workspace = true, features = ["registry"] }
tracing-tree = { workspace = true }
walkdir = { workspace = true }
wild = { workspace = true }
walkdir = { version = "2.3.2" }
wild = { version = "2" }
[dev-dependencies]
# Enable test rules during development
ruff_linter = { path = "../ruff_linter", features = ["clap", "test-rules"] }
assert_cmd = { version = "2.0.8" }
# Avoid writing colored snapshots when running tests from the terminal
colored = { workspace = true, features = ["no-color"] }
insta = { workspace = true, features = ["filters", "json"] }
insta-cmd = { workspace = true }
tempfile = { workspace = true }
colored = { workspace = true, features = ["no-color"]}
insta = { workspace = true, features = ["filters"] }
insta-cmd = { version = "0.4.0" }
tempfile = "3.6.0"
test-case = { workspace = true }
ureq = { version = "2.6.2", features = [] }
[target.'cfg(target_os = "windows")'.dependencies]
mimalloc = { workspace = true }
mimalloc = "0.1.39"
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64")))'.dependencies]
tikv-jemallocator = { workspace = true }
[lints]
workspace = true
tikv-jemallocator = "0.5.0"

View File

@@ -0,0 +1,3 @@
a = 1
__all__ = list(["a", "b"])

705
crates/ruff_cli/src/args.rs Normal file
View File

@@ -0,0 +1,705 @@
use std::path::PathBuf;
use clap::{command, Parser};
use regex::Regex;
use rustc_hash::FxHashMap;
use ruff_linter::line_width::LineLength;
use ruff_linter::logging::LogLevel;
use ruff_linter::registry::Rule;
use ruff_linter::settings::types::{
FilePattern, PatternPrefixPair, PerFileIgnore, PreviewMode, PythonVersion, SerializationFormat,
};
use ruff_linter::{RuleParser, RuleSelector, RuleSelectorParser};
use ruff_python_formatter::LspRowColumn;
use ruff_workspace::configuration::{Configuration, RuleSelection};
use ruff_workspace::resolver::ConfigurationTransformer;
#[derive(Debug, Parser)]
#[command(
author,
name = "ruff",
about = "Ruff: An extremely fast Python linter.",
after_help = "For help with a specific command, see: `ruff help <command>`."
)]
#[command(version)]
pub struct Args {
#[command(subcommand)]
pub command: Command,
#[clap(flatten)]
pub log_level_args: LogLevelArgs,
}
#[allow(clippy::large_enum_variant)]
#[derive(Debug, clap::Subcommand)]
pub enum Command {
/// Run Ruff on the given files or directories (default).
Check(CheckCommand),
/// Explain a rule (or all rules).
#[clap(alias = "--explain")]
#[command(group = clap::ArgGroup::new("selector").multiple(false).required(true))]
Rule {
/// Rule to explain
#[arg(value_parser=RuleParser, group = "selector", hide_possible_values = true)]
rule: Option<Rule>,
/// Explain all rules
#[arg(long, conflicts_with = "rule", group = "selector")]
all: bool,
/// Output format
#[arg(long, value_enum, default_value = "text")]
format: HelpFormat,
},
/// List or describe the available configuration options.
Config { option: Option<String> },
/// List all supported upstream linters.
Linter {
/// Output format
#[arg(long, value_enum, default_value = "text")]
format: HelpFormat,
},
/// Clear any caches in the current directory and any subdirectories.
#[clap(alias = "--clean")]
Clean,
/// Generate shell completion.
#[clap(alias = "--generate-shell-completion", hide = true)]
GenerateShellCompletion { shell: clap_complete_command::Shell },
/// Run the Ruff formatter on the given files or directories.
#[doc(hidden)]
#[clap(hide = true)]
Format(FormatCommand),
}
// The `Parser` derive is for ruff_dev, for ruff_cli `Args` would be sufficient
#[derive(Clone, Debug, clap::Parser)]
#[allow(clippy::struct_excessive_bools)]
pub struct CheckCommand {
/// List of files or directories to check.
pub files: Vec<PathBuf>,
/// Attempt to automatically fix lint violations.
/// Use `--no-fix` to disable.
#[arg(long, overrides_with("no_fix"))]
fix: bool,
#[clap(long, overrides_with("fix"), hide = true)]
no_fix: bool,
/// Show violations with source code.
/// Use `--no-show-source` to disable.
#[arg(long, overrides_with("no_show_source"))]
show_source: bool,
#[clap(long, overrides_with("show_source"), hide = true)]
no_show_source: bool,
/// Show an enumeration of all autofixed lint violations.
/// Use `--no-show-fixes` to disable.
#[arg(long, overrides_with("no_show_fixes"))]
show_fixes: bool,
#[clap(long, overrides_with("show_fixes"), hide = true)]
no_show_fixes: bool,
/// Avoid writing any fixed files back; instead, output a diff for each changed file to stdout. Implies `--fix-only`.
#[arg(long, conflicts_with = "show_fixes")]
pub diff: bool,
/// Run in watch mode by re-running whenever files change.
#[arg(short, long)]
pub watch: bool,
/// Fix any fixable lint violations, but don't report on leftover violations. Implies `--fix`.
/// Use `--no-fix-only` to disable.
#[arg(long, overrides_with("no_fix_only"))]
fix_only: bool,
#[clap(long, overrides_with("fix_only"), hide = true)]
no_fix_only: bool,
/// Ignore any `# noqa` comments.
#[arg(long)]
ignore_noqa: bool,
/// Output serialization format for violations. (Deprecated: Use `--output-format` instead).
#[arg(
long,
value_enum,
env = "RUFF_FORMAT",
conflicts_with = "output_format",
hide = true
)]
pub format: Option<SerializationFormat>,
/// Output serialization format for violations.
#[arg(long, value_enum, env = "RUFF_OUTPUT_FORMAT")]
pub output_format: Option<SerializationFormat>,
/// Specify file to write the linter output to (default: stdout).
#[arg(short, long)]
pub output_file: Option<PathBuf>,
/// The minimum Python version that should be supported.
#[arg(long, value_enum)]
pub target_version: Option<PythonVersion>,
/// Enable preview mode; checks will include unstable rules and fixes.
/// Use `--no-preview` to disable.
#[arg(long, overrides_with("no_preview"))]
preview: bool,
#[clap(long, overrides_with("preview"), hide = true)]
no_preview: bool,
/// Path to the `pyproject.toml` or `ruff.toml` file to use for
/// configuration.
#[arg(long, conflicts_with = "isolated")]
pub config: Option<PathBuf>,
/// Comma-separated list of rule codes to enable (or ALL, to enable all rules).
#[arg(
long,
value_delimiter = ',',
value_name = "RULE_CODE",
value_parser = RuleSelectorParser,
help_heading = "Rule selection",
hide_possible_values = true
)]
pub select: Option<Vec<RuleSelector>>,
/// Comma-separated list of rule codes to disable.
#[arg(
long,
value_delimiter = ',',
value_name = "RULE_CODE",
value_parser = RuleSelectorParser,
help_heading = "Rule selection",
hide_possible_values = true
)]
pub ignore: Option<Vec<RuleSelector>>,
/// Like --select, but adds additional rule codes on top of those already specified.
#[arg(
long,
value_delimiter = ',',
value_name = "RULE_CODE",
value_parser = RuleSelectorParser,
help_heading = "Rule selection",
hide_possible_values = true
)]
pub extend_select: Option<Vec<RuleSelector>>,
/// Like --ignore. (Deprecated: You can just use --ignore instead.)
#[arg(
long,
value_delimiter = ',',
value_name = "RULE_CODE",
value_parser = RuleSelectorParser,
help_heading = "Rule selection",
hide = true
)]
pub extend_ignore: Option<Vec<RuleSelector>>,
/// List of mappings from file pattern to code to exclude.
#[arg(long, value_delimiter = ',', help_heading = "Rule selection")]
pub per_file_ignores: Option<Vec<PatternPrefixPair>>,
/// Like `--per-file-ignores`, but adds additional ignores on top of those already specified.
#[arg(long, value_delimiter = ',', help_heading = "Rule selection")]
pub extend_per_file_ignores: Option<Vec<PatternPrefixPair>>,
/// List of paths, used to omit files and/or directories from analysis.
#[arg(
long,
value_delimiter = ',',
value_name = "FILE_PATTERN",
help_heading = "File selection"
)]
pub exclude: Option<Vec<FilePattern>>,
/// Like --exclude, but adds additional files and directories on top of those already excluded.
#[arg(
long,
value_delimiter = ',',
value_name = "FILE_PATTERN",
help_heading = "File selection"
)]
pub extend_exclude: Option<Vec<FilePattern>>,
/// List of rule codes to treat as eligible for autofix. Only applicable when autofix itself is enabled (e.g., via `--fix`).
#[arg(
long,
value_delimiter = ',',
value_name = "RULE_CODE",
value_parser = RuleSelectorParser,
help_heading = "Rule selection",
hide_possible_values = true
)]
pub fixable: Option<Vec<RuleSelector>>,
/// List of rule codes to treat as ineligible for autofix. Only applicable when autofix itself is enabled (e.g., via `--fix`).
#[arg(
long,
value_delimiter = ',',
value_name = "RULE_CODE",
value_parser = RuleSelectorParser,
help_heading = "Rule selection",
hide_possible_values = true
)]
pub unfixable: Option<Vec<RuleSelector>>,
/// Like --fixable, but adds additional rule codes on top of those already specified.
#[arg(
long,
value_delimiter = ',',
value_name = "RULE_CODE",
value_parser = RuleSelectorParser,
help_heading = "Rule selection",
hide_possible_values = true
)]
pub extend_fixable: Option<Vec<RuleSelector>>,
/// Like --unfixable. (Deprecated: You can just use --unfixable instead.)
#[arg(
long,
value_delimiter = ',',
value_name = "RULE_CODE",
value_parser = RuleSelectorParser,
help_heading = "Rule selection",
hide = true
)]
pub extend_unfixable: Option<Vec<RuleSelector>>,
/// Respect file exclusions via `.gitignore` and other standard ignore files.
/// Use `--no-respect-gitignore` to disable.
#[arg(
long,
overrides_with("no_respect_gitignore"),
help_heading = "File selection"
)]
respect_gitignore: bool,
#[clap(long, overrides_with("respect_gitignore"), hide = true)]
no_respect_gitignore: bool,
/// Enforce exclusions, even for paths passed to Ruff directly on the command-line.
/// Use `--no-force-exclude` to disable.
#[arg(
long,
overrides_with("no_force_exclude"),
help_heading = "File selection"
)]
force_exclude: bool,
#[clap(long, overrides_with("force_exclude"), hide = true)]
no_force_exclude: bool,
/// Set the line-length for length-associated rules and automatic formatting.
#[arg(long, help_heading = "Rule configuration", hide = true)]
pub line_length: Option<LineLength>,
/// Regular expression matching the name of dummy variables.
#[arg(long, help_heading = "Rule configuration", hide = true)]
pub dummy_variable_rgx: Option<Regex>,
/// Disable cache reads.
#[arg(short, long, help_heading = "Miscellaneous")]
pub no_cache: bool,
/// Ignore all configuration files.
#[arg(long, conflicts_with = "config", help_heading = "Miscellaneous")]
pub isolated: bool,
/// Path to the cache directory.
#[arg(long, env = "RUFF_CACHE_DIR", help_heading = "Miscellaneous")]
pub cache_dir: Option<PathBuf>,
/// The name of the file when passing it through stdin.
#[arg(long, help_heading = "Miscellaneous")]
pub stdin_filename: Option<PathBuf>,
/// Exit with status code "0", even upon detecting lint violations.
#[arg(
short,
long,
help_heading = "Miscellaneous",
conflicts_with = "exit_non_zero_on_fix"
)]
pub exit_zero: bool,
/// Exit with a non-zero status code if any files were modified via autofix, even if no lint violations remain.
#[arg(long, help_heading = "Miscellaneous", conflicts_with = "exit_zero")]
pub exit_non_zero_on_fix: bool,
/// Show counts for every rule with at least one violation.
#[arg(
long,
// Unsupported default-command arguments.
conflicts_with = "diff",
conflicts_with = "show_source",
conflicts_with = "watch",
)]
pub statistics: bool,
/// Enable automatic additions of `noqa` directives to failing lines.
#[arg(
long,
// conflicts_with = "add_noqa",
conflicts_with = "show_files",
conflicts_with = "show_settings",
// Unsupported default-command arguments.
conflicts_with = "ignore_noqa",
conflicts_with = "statistics",
conflicts_with = "stdin_filename",
conflicts_with = "watch",
conflicts_with = "fix",
)]
pub add_noqa: bool,
/// See the files Ruff will be run against with the current settings.
#[arg(
long,
// Fake subcommands.
conflicts_with = "add_noqa",
// conflicts_with = "show_files",
conflicts_with = "show_settings",
// Unsupported default-command arguments.
conflicts_with = "ignore_noqa",
conflicts_with = "statistics",
conflicts_with = "stdin_filename",
conflicts_with = "watch",
)]
pub show_files: bool,
/// See the settings Ruff will use to lint a given Python file.
#[arg(
long,
// Fake subcommands.
conflicts_with = "add_noqa",
conflicts_with = "show_files",
// conflicts_with = "show_settings",
// Unsupported default-command arguments.
conflicts_with = "ignore_noqa",
conflicts_with = "statistics",
conflicts_with = "stdin_filename",
conflicts_with = "watch",
)]
pub show_settings: bool,
/// Dev-only argument to show fixes
#[arg(long, hide = true)]
pub ecosystem_ci: bool,
}
#[derive(Clone, Debug, clap::Parser)]
#[allow(clippy::struct_excessive_bools)]
pub struct FormatCommand {
/// List of files or directories to format.
pub files: Vec<PathBuf>,
/// Avoid writing any formatted files back; instead, exit with a non-zero status code if any
/// files would have been modified, and zero otherwise.
#[arg(long)]
pub check: bool,
/// Path to the `pyproject.toml` or `ruff.toml` file to use for configuration.
#[arg(long, conflicts_with = "isolated")]
pub config: Option<PathBuf>,
/// Respect file exclusions via `.gitignore` and other standard ignore files.
/// Use `--no-respect-gitignore` to disable.
#[arg(
long,
overrides_with("no_respect_gitignore"),
help_heading = "File selection"
)]
respect_gitignore: bool,
#[clap(long, overrides_with("respect_gitignore"), hide = true)]
no_respect_gitignore: bool,
/// Enforce exclusions, even for paths passed to Ruff directly on the command-line.
/// Use `--no-force-exclude` to disable.
#[arg(
long,
overrides_with("no_force_exclude"),
help_heading = "File selection"
)]
force_exclude: bool,
#[clap(long, overrides_with("force_exclude"), hide = true)]
no_force_exclude: bool,
/// Set the line-length.
#[arg(long, help_heading = "Rule configuration", hide = true)]
pub line_length: Option<LineLength>,
/// Ignore all configuration files.
#[arg(long, conflicts_with = "config", help_heading = "Miscellaneous")]
pub isolated: bool,
/// The name of the file when passing it through stdin.
#[arg(long, help_heading = "Miscellaneous")]
pub stdin_filename: Option<PathBuf>,
/// Enable preview mode; checks will include unstable rules and fixes.
/// Use `--no-preview` to disable.
#[arg(long, overrides_with("no_preview"), hide = true)]
preview: bool,
#[clap(long, overrides_with("preview"), hide = true)]
no_preview: bool,
/// Range formatting start: Zero-indexed row and zero-indexed char-based column separated by
/// colon, e.g. `1:2`
#[clap(long)]
pub start: Option<LspRowColumn>,
/// Range formatting end: Zero-indexed row and zero-indexed char-based column separated by
/// colon, e.g. `3:4`
#[clap(long)]
pub end: Option<LspRowColumn>,
}
#[derive(Debug, Clone, Copy, clap::ValueEnum)]
pub enum HelpFormat {
Text,
Json,
}
#[allow(clippy::module_name_repetitions)]
#[derive(Debug, clap::Args)]
pub struct LogLevelArgs {
/// Enable verbose logging.
#[arg(
short,
long,
global = true,
group = "verbosity",
help_heading = "Log levels"
)]
pub verbose: bool,
/// Print diagnostics, but nothing else.
#[arg(
short,
long,
global = true,
group = "verbosity",
help_heading = "Log levels"
)]
pub quiet: bool,
/// Disable all logging (but still exit with status code "1" upon detecting diagnostics).
#[arg(
short,
long,
global = true,
group = "verbosity",
help_heading = "Log levels"
)]
pub silent: bool,
}
impl From<&LogLevelArgs> for LogLevel {
fn from(args: &LogLevelArgs) -> Self {
if args.silent {
Self::Silent
} else if args.quiet {
Self::Quiet
} else if args.verbose {
Self::Verbose
} else {
Self::Default
}
}
}
impl CheckCommand {
/// Partition the CLI into command-line arguments and configuration
/// overrides.
pub fn partition(self) -> (CheckArguments, CliOverrides) {
(
CheckArguments {
add_noqa: self.add_noqa,
config: self.config,
diff: self.diff,
ecosystem_ci: self.ecosystem_ci,
exit_non_zero_on_fix: self.exit_non_zero_on_fix,
exit_zero: self.exit_zero,
files: self.files,
ignore_noqa: self.ignore_noqa,
isolated: self.isolated,
no_cache: self.no_cache,
output_file: self.output_file,
show_files: self.show_files,
show_settings: self.show_settings,
statistics: self.statistics,
stdin_filename: self.stdin_filename,
watch: self.watch,
},
CliOverrides {
dummy_variable_rgx: self.dummy_variable_rgx,
exclude: self.exclude,
extend_exclude: self.extend_exclude,
extend_fixable: self.extend_fixable,
extend_ignore: self.extend_ignore,
extend_select: self.extend_select,
extend_unfixable: self.extend_unfixable,
fixable: self.fixable,
ignore: self.ignore,
line_length: self.line_length,
per_file_ignores: self.per_file_ignores,
preview: resolve_bool_arg(self.preview, self.no_preview).map(PreviewMode::from),
respect_gitignore: resolve_bool_arg(
self.respect_gitignore,
self.no_respect_gitignore,
),
select: self.select,
show_source: resolve_bool_arg(self.show_source, self.no_show_source),
target_version: self.target_version,
unfixable: self.unfixable,
// TODO(charlie): Included in `pyproject.toml`, but not inherited.
cache_dir: self.cache_dir,
fix: resolve_bool_arg(self.fix, self.no_fix),
fix_only: resolve_bool_arg(self.fix_only, self.no_fix_only),
force_exclude: resolve_bool_arg(self.force_exclude, self.no_force_exclude),
output_format: self.output_format.or(self.format),
show_fixes: resolve_bool_arg(self.show_fixes, self.no_show_fixes),
},
)
}
}
impl FormatCommand {
/// Partition the CLI into command-line arguments and configuration
/// overrides.
pub fn partition(self) -> (FormatArguments, CliOverrides) {
(
FormatArguments {
check: self.check,
config: self.config,
files: self.files,
isolated: self.isolated,
stdin_filename: self.stdin_filename,
start: self.start,
end: self.end,
},
CliOverrides {
line_length: self.line_length,
respect_gitignore: resolve_bool_arg(
self.respect_gitignore,
self.no_respect_gitignore,
),
preview: resolve_bool_arg(self.preview, self.no_preview).map(PreviewMode::from),
force_exclude: resolve_bool_arg(self.force_exclude, self.no_force_exclude),
// Unsupported on the formatter CLI, but required on `Overrides`.
..CliOverrides::default()
},
)
}
}
fn resolve_bool_arg(yes: bool, no: bool) -> Option<bool> {
match (yes, no) {
(true, false) => Some(true),
(false, true) => Some(false),
(false, false) => None,
(..) => unreachable!("Clap should make this impossible"),
}
}
/// CLI settings that are distinct from configuration (commands, lists of files,
/// etc.).
#[allow(clippy::struct_excessive_bools)]
pub struct CheckArguments {
pub add_noqa: bool,
pub config: Option<PathBuf>,
pub diff: bool,
pub ecosystem_ci: bool,
pub exit_non_zero_on_fix: bool,
pub exit_zero: bool,
pub files: Vec<PathBuf>,
pub ignore_noqa: bool,
pub isolated: bool,
pub no_cache: bool,
pub output_file: Option<PathBuf>,
pub show_files: bool,
pub show_settings: bool,
pub statistics: bool,
pub stdin_filename: Option<PathBuf>,
pub watch: bool,
}
/// CLI settings that are distinct from configuration (commands, lists of files,
/// etc.).
#[allow(clippy::struct_excessive_bools)]
pub struct FormatArguments {
pub check: bool,
pub config: Option<PathBuf>,
pub files: Vec<PathBuf>,
pub isolated: bool,
pub stdin_filename: Option<PathBuf>,
pub start: Option<LspRowColumn>,
pub end: Option<LspRowColumn>,
}
/// CLI settings that function as configuration overrides.
#[derive(Clone, Default)]
#[allow(clippy::struct_excessive_bools)]
pub struct CliOverrides {
pub dummy_variable_rgx: Option<Regex>,
pub exclude: Option<Vec<FilePattern>>,
pub extend_exclude: Option<Vec<FilePattern>>,
pub extend_fixable: Option<Vec<RuleSelector>>,
pub extend_ignore: Option<Vec<RuleSelector>>,
pub extend_select: Option<Vec<RuleSelector>>,
pub extend_unfixable: Option<Vec<RuleSelector>>,
pub fixable: Option<Vec<RuleSelector>>,
pub ignore: Option<Vec<RuleSelector>>,
pub line_length: Option<LineLength>,
pub per_file_ignores: Option<Vec<PatternPrefixPair>>,
pub preview: Option<PreviewMode>,
pub respect_gitignore: Option<bool>,
pub select: Option<Vec<RuleSelector>>,
pub show_source: Option<bool>,
pub target_version: Option<PythonVersion>,
pub unfixable: Option<Vec<RuleSelector>>,
// TODO(charlie): Captured in pyproject.toml as a default, but not part of `Settings`.
pub cache_dir: Option<PathBuf>,
pub fix: Option<bool>,
pub fix_only: Option<bool>,
pub force_exclude: Option<bool>,
pub output_format: Option<SerializationFormat>,
pub show_fixes: Option<bool>,
}
impl ConfigurationTransformer for CliOverrides {
fn transform(&self, mut config: Configuration) -> Configuration {
if let Some(cache_dir) = &self.cache_dir {
config.cache_dir = Some(cache_dir.clone());
}
if let Some(dummy_variable_rgx) = &self.dummy_variable_rgx {
config.dummy_variable_rgx = Some(dummy_variable_rgx.clone());
}
if let Some(exclude) = &self.exclude {
config.exclude = Some(exclude.clone());
}
if let Some(extend_exclude) = &self.extend_exclude {
config.extend_exclude.extend(extend_exclude.clone());
}
if let Some(fix) = &self.fix {
config.fix = Some(*fix);
}
if let Some(fix_only) = &self.fix_only {
config.fix_only = Some(*fix_only);
}
config.rule_selections.push(RuleSelection {
select: self.select.clone(),
ignore: self
.ignore
.iter()
.cloned()
.chain(self.extend_ignore.iter().cloned())
.flatten()
.collect(),
extend_select: self.extend_select.clone().unwrap_or_default(),
fixable: self.fixable.clone(),
unfixable: self
.unfixable
.iter()
.cloned()
.chain(self.extend_unfixable.iter().cloned())
.flatten()
.collect(),
extend_fixable: self.extend_fixable.clone().unwrap_or_default(),
});
if let Some(output_format) = &self.output_format {
config.output_format = Some(*output_format);
}
if let Some(force_exclude) = &self.force_exclude {
config.force_exclude = Some(*force_exclude);
}
if let Some(line_length) = &self.line_length {
config.line_length = Some(*line_length);
}
if let Some(preview) = &self.preview {
config.preview = Some(*preview);
}
if let Some(per_file_ignores) = &self.per_file_ignores {
config.per_file_ignores = Some(collect_per_file_ignores(per_file_ignores.clone()));
}
if let Some(respect_gitignore) = &self.respect_gitignore {
config.respect_gitignore = Some(*respect_gitignore);
}
if let Some(show_source) = &self.show_source {
config.show_source = Some(*show_source);
}
if let Some(show_fixes) = &self.show_fixes {
config.show_fixes = Some(*show_fixes);
}
if let Some(target_version) = &self.target_version {
config.target_version = Some(*target_version);
}
config
}
}
/// Convert a list of `PatternPrefixPair` structs to `PerFileIgnore`.
pub fn collect_per_file_ignores(pairs: Vec<PatternPrefixPair>) -> Vec<PerFileIgnore> {
let mut per_file_ignores: FxHashMap<String, Vec<RuleSelector>> = FxHashMap::default();
for pair in pairs {
per_file_ignores
.entry(pair.pattern)
.or_insert_with(Vec::new)
.push(pair.prefix);
}
per_file_ignores
.into_iter()
.map(|(pattern, prefixes)| PerFileIgnore::new(pattern, &prefixes, None))
.collect()
}

View File

@@ -3,8 +3,8 @@ use std::process::ExitCode;
use clap::{Parser, Subcommand};
use colored::Colorize;
use ruff::args::{Args, Command};
use ruff::{run, ExitStatus};
use ruff_cli::args::{Args, Command};
use ruff_cli::{run, ExitStatus};
#[cfg(target_os = "windows")]
#[global_allocator]
@@ -27,42 +27,26 @@ pub fn main() -> ExitCode {
let mut args =
argfile::expand_args_from(args, argfile::parse_fromfile, argfile::PREFIX).unwrap();
// We can't use `warn_user` here because logging isn't set up at this point
// and we also don't know if the user runs ruff with quiet.
// Keep the message and pass it to `run` that is responsible for emitting the warning.
let deprecated_alias_warning = match args.get(1).and_then(|arg| arg.to_str()) {
// Deprecated aliases that are handled by clap
Some("--explain") => {
Some("`ruff --explain <RULE>` is deprecated. Use `ruff rule <RULE>` instead.")
}
Some("--clean") => {
Some("`ruff --clean` is deprecated. Use `ruff clean` instead.")
}
Some("--generate-shell-completion") => {
Some("`ruff --generate-shell-completion <SHELL>` is deprecated. Use `ruff generate-shell-completion <SHELL>` instead.")
}
// Deprecated `ruff` alias to `ruff check`
// Clap doesn't support default subcommands but we want to run `check` by
// default for convenience and backwards-compatibility, so we just
// preprocess the arguments accordingly before passing them to Clap.
Some(arg) if !Command::has_subcommand(arg)
// Clap doesn't support default subcommands but we want to run `check` by
// default for convenience and backwards-compatibility, so we just
// preprocess the arguments accordingly before passing them to Clap.
if let Some(arg) = args.get(1) {
if arg
.to_str()
.is_some_and(|arg| !Command::has_subcommand(rewrite_legacy_subcommand(arg)))
&& arg != "-h"
&& arg != "--help"
&& arg != "-V"
&& arg != "--version"
&& arg != "help" => {
{
args.insert(1, "check".into());
Some("`ruff <path>` is deprecated. Use `ruff check <path>` instead.")
}
},
_ => None
};
&& arg != "help"
{
args.insert(1, "check".into());
}
}
let args = Args::parse_from(args);
match run(args, deprecated_alias_warning) {
match run(args) {
Ok(code) => code.into(),
Err(err) => {
#[allow(clippy::print_stderr)]
@@ -81,3 +65,12 @@ pub fn main() -> ExitCode {
}
}
}
fn rewrite_legacy_subcommand(cmd: &str) -> &str {
match cmd {
"--explain" => "rule",
"--clean" => "clean",
"--generate-shell-completion" => "generate-shell-completion",
cmd => cmd,
}
}

View File

@@ -1,69 +1,36 @@
use std::fmt::Debug;
use std::collections::HashMap;
use std::fs::{self, File};
use std::hash::Hasher;
use std::io::{self, BufReader, Write};
use std::io::{self, BufReader, BufWriter, Write};
use std::path::{Path, PathBuf};
use std::sync::atomic::{AtomicU64, Ordering};
use std::sync::Mutex;
use std::time::{Duration, SystemTime};
use anyhow::{Context, Result};
use filetime::FileTime;
use itertools::Itertools;
use log::{debug, error};
use rayon::iter::ParallelIterator;
use rayon::iter::{IntoParallelIterator, ParallelBridge};
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use tempfile::NamedTempFile;
use ruff_cache::{CacheKey, CacheKeyHasher};
use ruff_diagnostics::{DiagnosticKind, Fix};
use ruff_linter::message::Message;
use ruff_linter::{warn_user, VERSION};
use ruff_macros::CacheKey;
use ruff_linter::warn_user;
use ruff_notebook::NotebookIndex;
use ruff_python_ast::imports::ImportMap;
use ruff_source_file::SourceFileBuilder;
use ruff_text_size::{TextRange, TextSize};
use ruff_workspace::resolver::Resolver;
use ruff_workspace::Settings;
use crate::diagnostics::Diagnostics;
/// Maximum duration for which we keep a file in cache that hasn't been seen.
const MAX_LAST_SEEN: Duration = Duration::from_secs(30 * 24 * 60 * 60); // 30 days.
/// [`Path`] that is relative to the package root in [`PackageCache`].
pub(crate) type RelativePath = Path;
/// [`PathBuf`] that is relative to the package root in [`PackageCache`].
pub(crate) type RelativePathBuf = PathBuf;
#[derive(CacheKey)]
pub(crate) struct FileCacheKey {
/// Timestamp when the file was last modified before the (cached) check.
file_last_modified: FileTime,
/// Permissions of the file before the (cached) check.
file_permissions_mode: u32,
}
impl FileCacheKey {
pub(crate) fn from_path(path: &Path) -> io::Result<FileCacheKey> {
// Construct a cache key for the file
let metadata = path.metadata()?;
#[cfg(unix)]
let permissions = {
use std::os::unix::fs::PermissionsExt;
metadata.permissions().mode()
};
#[cfg(windows)]
let permissions: u32 = metadata.permissions().readonly().into();
Ok(FileCacheKey {
file_last_modified: FileTime::from_last_modification_time(&metadata),
file_permissions_mode: permissions,
})
}
}
/// Cache.
///
/// `Cache` holds everything required to display the diagnostics for a single
@@ -83,10 +50,9 @@ pub(crate) struct Cache {
/// Files that are linted, but are not in `package.files` or are in
/// `package.files` but are outdated. This gets merged with `package.files`
/// when the cache is written back to disk in [`Cache::store`].
changes: Mutex<Vec<Change>>,
new_files: Mutex<HashMap<RelativePathBuf, FileCache>>,
/// The "current" timestamp used as cache for the updates of
/// [`FileCache::last_seen`]
#[allow(clippy::struct_field_names)]
last_seen_cache: u64,
}
@@ -99,11 +65,12 @@ impl Cache {
///
/// Finally `settings` is used to ensure we don't open a cache for different
/// settings. It also defines the directory where to store the cache.
pub(crate) fn open(package_root: PathBuf, settings: &Settings) -> Self {
pub(crate) fn open(package_root: PathBuf, settings: &Settings) -> Cache {
debug_assert!(package_root.is_absolute(), "package root not canonicalized");
let key = format!("{}", cache_key(&package_root, settings));
let path = PathBuf::from_iter([&settings.cache_dir, Path::new(VERSION), Path::new(&key)]);
let mut buf = itoa::Buffer::new();
let key = Path::new(buf.format(cache_key(&package_root, settings)));
let path = PathBuf::from_iter([&settings.cache_dir, Path::new("content"), key]);
let file = match File::open(&path) {
Ok(file) => file,
@@ -138,70 +105,36 @@ impl Cache {
}
/// Create an empty `Cache`.
fn empty(path: PathBuf, package_root: PathBuf) -> Self {
fn empty(path: PathBuf, package_root: PathBuf) -> Cache {
let package = PackageCache {
package_root,
files: FxHashMap::default(),
files: HashMap::new(),
};
Cache::new(path, package)
}
#[allow(clippy::cast_possible_truncation)]
fn new(path: PathBuf, package: PackageCache) -> Self {
fn new(path: PathBuf, package: PackageCache) -> Cache {
Cache {
path,
package,
changes: Mutex::new(Vec::new()),
new_files: Mutex::new(HashMap::new()),
// SAFETY: this will be truncated to the year ~2554 (so don't use
// this code after that!).
last_seen_cache: SystemTime::UNIX_EPOCH.elapsed().unwrap().as_millis() as u64,
}
}
/// Applies the pending changes and persists the cache to disk, if it has been changed.
pub(crate) fn persist(mut self) -> Result<()> {
if !self.save() {
/// Store the cache to disk, if it has been changed.
#[allow(clippy::cast_possible_truncation)]
pub(crate) fn store(mut self) -> Result<()> {
let new_files = self.new_files.into_inner().unwrap();
if new_files.is_empty() {
// No changes made, no need to write the same cache file back to
// disk.
return Ok(());
}
// Write the cache to a temporary file first and then rename it for an "atomic" write.
// Protects against data loss if the process is killed during the write and races between different ruff
// processes, resulting in a corrupted cache file. https://github.com/astral-sh/ruff/issues/8147#issuecomment-1943345964
let mut temp_file =
NamedTempFile::new_in(self.path.parent().expect("Write path must have a parent"))
.context("Failed to create temporary file")?;
// Serialize to in-memory buffer because hyperfine benchmark showed that it's faster than
// using a `BufWriter` and our cache files are small enough that streaming isn't necessary.
let serialized =
bincode::serialize(&self.package).context("Failed to serialize cache data")?;
temp_file
.write_all(&serialized)
.context("Failed to write serialized cache to temporary file.")?;
temp_file.persist(&self.path).with_context(|| {
format!(
"Failed to rename temporary cache file to {}",
self.path.display()
)
})?;
Ok(())
}
/// Applies the pending changes without storing the cache to disk.
#[allow(clippy::cast_possible_truncation)]
pub(crate) fn save(&mut self) -> bool {
/// Maximum duration for which we keep a file in cache that hasn't been seen.
const MAX_LAST_SEEN: Duration = Duration::from_secs(30 * 24 * 60 * 60); // 30 days.
let changes = std::mem::take(self.changes.get_mut().unwrap());
if changes.is_empty() {
return false;
}
// Remove cached files that we haven't seen in a while.
let now = self.last_seen_cache;
self.package.files.retain(|_, file| {
@@ -210,32 +143,17 @@ impl Cache {
});
// Apply any changes made and keep track of when we last saw files.
for change in changes {
let entry = self
.package
.files
.entry(change.path)
.and_modify(|existing| {
if existing.key != change.new_key {
// Reset the data if the key change.
existing.data = FileCacheData::default();
}
self.package.files.extend(new_files);
existing.key = change.new_key;
existing
.last_seen
.store(self.last_seen_cache, Ordering::Relaxed);
})
.or_insert_with(|| FileCache {
key: change.new_key,
last_seen: AtomicU64::new(self.last_seen_cache),
data: FileCacheData::default(),
});
change.new_data.apply(&mut entry.data);
}
true
let file = File::create(&self.path)
.with_context(|| format!("Failed to create cache file '{}'", self.path.display()))?;
let writer = BufWriter::new(file);
bincode::serialize_into(writer, &self.package).with_context(|| {
format!(
"Failed to serialise cache to file '{}'",
self.path.display()
)
})
}
/// Returns the relative path based on `path` and the package root.
@@ -251,7 +169,7 @@ impl Cache {
///
/// This returns `None` if `key` differs from the cached key or if the
/// cache doesn't contain results for the file.
pub(crate) fn get(&self, path: &RelativePath, key: &FileCacheKey) -> Option<&FileCache> {
pub(crate) fn get<T: CacheKey>(&self, path: &RelativePath, key: &T) -> Option<&FileCache> {
let file = self.package.files.get(path)?;
let mut hasher = CacheKeyHasher::new();
@@ -267,34 +185,50 @@ impl Cache {
Some(file)
}
pub(crate) fn is_formatted(&self, path: &RelativePath, key: &FileCacheKey) -> bool {
self.get(path, key)
.is_some_and(|entry| entry.data.formatted)
}
/// Add or update a file cache at `path` relative to the package root.
fn update(&self, path: RelativePathBuf, key: &FileCacheKey, data: ChangeData) {
pub(crate) fn update<T: CacheKey>(
&self,
path: RelativePathBuf,
key: T,
messages: &[Message],
imports: &ImportMap,
notebook_index: Option<&NotebookIndex>,
) {
let source = if let Some(msg) = messages.first() {
msg.file.source_text().to_owned()
} else {
String::new() // No messages, no need to keep the source!
};
let messages = messages
.iter()
.map(|msg| {
// Make sure that all message use the same source file.
assert!(
msg.file == messages.first().unwrap().file,
"message uses a different source file"
);
CacheMessage {
kind: msg.kind.clone(),
range: msg.range,
fix: msg.fix.clone(),
noqa_offset: msg.noqa_offset,
}
})
.collect();
let mut hasher = CacheKeyHasher::new();
key.cache_key(&mut hasher);
self.changes.lock().unwrap().push(Change {
path,
new_key: hasher.finish(),
new_data: data,
});
}
pub(crate) fn update_lint(
&self,
path: RelativePathBuf,
key: &FileCacheKey,
data: LintCacheData,
) {
self.update(path, key, ChangeData::Lint(data));
}
pub(crate) fn set_formatted(&self, path: RelativePathBuf, key: &FileCacheKey) {
self.update(path, key, ChangeData::Formatted);
let file = FileCache {
key: hasher.finish(),
last_seen: AtomicU64::new(self.last_seen_cache),
imports: imports.clone(),
messages,
source,
notebook_index: notebook_index.cloned(),
};
self.new_files.lock().unwrap().insert(path, file);
}
}
@@ -307,7 +241,7 @@ struct PackageCache {
/// single file "packages", e.g. scripts.
package_root: PathBuf,
/// Mapping of source file path to it's cached data.
files: FxHashMap<RelativePathBuf, FileCache>,
files: HashMap<RelativePathBuf, FileCache>,
}
/// On disk representation of the cache per source file.
@@ -320,135 +254,50 @@ pub(crate) struct FileCache {
/// Represented as the number of milliseconds since Unix epoch. This will
/// break in 1970 + ~584 years (~2554).
last_seen: AtomicU64,
data: FileCacheData,
}
impl FileCache {
/// Convert the file cache into `Diagnostics`, using `path` as file name.
pub(crate) fn to_diagnostics(&self, path: &Path) -> Option<Diagnostics> {
self.data.lint.as_ref().map(|lint| {
let messages = if lint.messages.is_empty() {
Vec::new()
} else {
let file = SourceFileBuilder::new(path.to_string_lossy(), &*lint.source).finish();
lint.messages
.iter()
.map(|msg| Message {
kind: msg.kind.clone(),
range: msg.range,
fix: msg.fix.clone(),
file: file.clone(),
noqa_offset: msg.noqa_offset,
})
.collect()
};
let notebook_indexes = if let Some(notebook_index) = lint.notebook_index.as_ref() {
FxHashMap::from_iter([(path.to_string_lossy().to_string(), notebook_index.clone())])
} else {
FxHashMap::default()
};
Diagnostics::new(messages, lint.imports.clone(), notebook_indexes)
})
}
}
#[derive(Debug, Default, Deserialize, Serialize)]
struct FileCacheData {
lint: Option<LintCacheData>,
formatted: bool,
}
/// Returns a hash key based on the `package_root`, `settings` and the crate
/// version.
fn cache_key(package_root: &Path, settings: &Settings) -> u64 {
let mut hasher = CacheKeyHasher::new();
package_root.cache_key(&mut hasher);
settings.cache_key(&mut hasher);
hasher.finish()
}
/// Initialize the cache at the specified `Path`.
pub(crate) fn init(path: &Path) -> Result<()> {
// Create the cache directories.
fs::create_dir_all(path.join(VERSION))?;
// Add the CACHEDIR.TAG.
cachedir::ensure_tag(path)?;
// Add the .gitignore.
match fs::OpenOptions::new()
.write(true)
.create_new(true)
.open(path.join(".gitignore"))
{
Ok(mut file) => file.write_all(b"# Automatically created by ruff.\n*\n")?,
Err(err) if err.kind() == io::ErrorKind::AlreadyExists => (),
Err(err) => return Err(err.into()),
}
Ok(())
}
#[derive(Deserialize, Debug, Serialize, PartialEq)]
pub(crate) struct LintCacheData {
/// Imports made.
pub(super) imports: ImportMap,
imports: ImportMap,
/// Diagnostic messages.
pub(super) messages: Vec<CacheMessage>,
messages: Vec<CacheMessage>,
/// Source code of the file.
///
/// # Notes
///
/// This will be empty if `messages` is empty.
pub(super) source: String,
source: String,
/// Notebook index if this file is a Jupyter Notebook.
pub(super) notebook_index: Option<NotebookIndex>,
notebook_index: Option<NotebookIndex>,
}
impl LintCacheData {
pub(crate) fn from_messages(
messages: &[Message],
imports: ImportMap,
notebook_index: Option<NotebookIndex>,
) -> Self {
let source = if let Some(msg) = messages.first() {
msg.file.source_text().to_owned()
impl FileCache {
/// Convert the file cache into `Diagnostics`, using `path` as file name.
pub(crate) fn as_diagnostics(&self, path: &Path) -> Diagnostics {
let messages = if self.messages.is_empty() {
Vec::new()
} else {
String::new() // No messages, no need to keep the source!
};
let messages = messages
.iter()
.map(|msg| {
// Make sure that all message use the same source file.
assert_eq!(
msg.file,
messages.first().unwrap().file,
"message uses a different source file"
);
CacheMessage {
let file = SourceFileBuilder::new(path.to_string_lossy(), &*self.source).finish();
self.messages
.iter()
.map(|msg| Message {
kind: msg.kind.clone(),
range: msg.range,
fix: msg.fix.clone(),
file: file.clone(),
noqa_offset: msg.noqa_offset,
}
})
.collect();
Self {
imports,
messages,
source,
notebook_index,
}
})
.collect()
};
let notebook_indexes = if let Some(notebook_index) = self.notebook_index.as_ref() {
FxHashMap::from_iter([(path.to_string_lossy().to_string(), notebook_index.clone())])
} else {
FxHashMap::default()
};
Diagnostics::new(messages, self.imports.clone(), notebook_indexes)
}
}
/// On disk representation of a diagnostic message.
#[derive(Deserialize, Debug, Serialize, PartialEq)]
pub(super) struct CacheMessage {
#[derive(Deserialize, Debug, Serialize)]
struct CacheMessage {
kind: DiagnosticKind,
/// Range into the message's [`FileCache::source`].
range: TextRange,
@@ -456,134 +305,62 @@ pub(super) struct CacheMessage {
noqa_offset: TextSize,
}
pub(crate) trait PackageCaches {
fn get(&self, package_root: &Path) -> Option<&Cache>;
fn persist(self) -> Result<()>;
/// Returns a hash key based on the `package_root`, `settings` and the crate
/// version.
fn cache_key(package_root: &Path, settings: &Settings) -> u64 {
let mut hasher = CacheKeyHasher::new();
env!("CARGO_PKG_VERSION").cache_key(&mut hasher);
package_root.cache_key(&mut hasher);
settings.cache_key(&mut hasher);
hasher.finish()
}
impl<T> PackageCaches for Option<T>
where
T: PackageCaches,
{
fn get(&self, package_root: &Path) -> Option<&Cache> {
match self {
None => None,
Some(caches) => caches.get(package_root),
}
/// Initialize the cache at the specified `Path`.
pub(crate) fn init(path: &Path) -> Result<()> {
// Create the cache directories.
fs::create_dir_all(path.join("content"))?;
// Add the CACHEDIR.TAG.
if !cachedir::is_tagged(path)? {
cachedir::add_tag(path)?;
}
fn persist(self) -> Result<()> {
match self {
None => Ok(()),
Some(caches) => caches.persist(),
}
}
}
pub(crate) struct PackageCacheMap<'a>(FxHashMap<&'a Path, Cache>);
impl<'a> PackageCacheMap<'a> {
pub(crate) fn init(
package_roots: &FxHashMap<&'a Path, Option<&'a Path>>,
resolver: &Resolver,
) -> Self {
fn init_cache(path: &Path) {
if let Err(e) = init(path) {
error!("Failed to initialize cache at {}: {e:?}", path.display());
}
}
for settings in resolver.settings() {
init_cache(&settings.cache_dir);
}
Self(
package_roots
.iter()
.map(|(package, package_root)| package_root.unwrap_or(package))
.unique()
.par_bridge()
.map(|cache_root| {
let settings = resolver.resolve(cache_root);
let cache = Cache::open(cache_root.to_path_buf(), settings);
(cache_root, cache)
})
.collect(),
)
}
}
impl PackageCaches for PackageCacheMap<'_> {
fn get(&self, package_root: &Path) -> Option<&Cache> {
let cache = self.0.get(package_root);
if cache.is_none() {
debug!("No cache found for {}", package_root.display());
}
cache
// Add the .gitignore.
let gitignore_path = path.join(".gitignore");
if !gitignore_path.exists() {
let mut file = fs::File::create(gitignore_path)?;
file.write_all(b"*")?;
}
fn persist(self) -> Result<()> {
self.0
.into_par_iter()
.try_for_each(|(_, cache)| cache.persist())
}
}
#[derive(Debug)]
struct Change {
path: PathBuf,
new_key: u64,
new_data: ChangeData,
}
#[derive(Debug)]
enum ChangeData {
Lint(LintCacheData),
Formatted,
}
impl ChangeData {
fn apply(self, data: &mut FileCacheData) {
match self {
ChangeData::Lint(new_lint) => {
data.lint = Some(new_lint);
}
ChangeData::Formatted => {
data.formatted = true;
}
}
}
Ok(())
}
#[cfg(test)]
mod tests {
use filetime::{set_file_mtime, FileTime};
use std::env::temp_dir;
use std::fs;
use std::io;
use std::io::Write;
use std::path::{Path, PathBuf};
use std::sync::atomic::AtomicU64;
use std::time::SystemTime;
use anyhow::Result;
use filetime::{set_file_mtime, FileTime};
use itertools::Itertools;
use test_case::test_case;
use ruff_cache::CACHE_DIR_NAME;
use ruff_linter::settings::flags;
use ruff_linter::settings::types::UnsafeFixes;
use ruff_python_ast::PySourceType;
use ruff_workspace::Settings;
use crate::cache::{self, FileCache, FileCacheData, FileCacheKey};
use crate::cache::{Cache, RelativePathBuf};
use crate::commands::format::{format_path, FormatCommandError, FormatMode, FormatResult};
use crate::cache::RelativePathBuf;
use crate::cache::{self, Cache, FileCache};
use crate::diagnostics::{lint_path, Diagnostics};
use std::sync::atomic::AtomicU64;
use anyhow::Result;
use ruff_python_ast::imports::ImportMap;
use ruff_workspace::Settings;
use test_case::test_case;
#[test_case("../ruff_linter/resources/test/fixtures", "ruff_tests/cache_same_results_ruff_linter"; "ruff_linter_fixtures")]
#[test_case("../ruff_notebook/resources/test/fixtures", "ruff_tests/cache_same_results_ruff_notebook"; "ruff_notebook_fixtures")]
fn same_results(package_root: &str, cache_dir_path: &str) {
@@ -599,7 +376,7 @@ mod tests {
let package_root = fs::canonicalize(package_root).unwrap();
let cache = Cache::open(package_root.clone(), &settings);
assert_eq!(cache.changes.lock().unwrap().len(), 0);
assert_eq!(cache.new_files.lock().unwrap().len(), 0);
let mut paths = Vec::new();
let mut parse_errors = Vec::new();
@@ -633,7 +410,6 @@ mod tests {
Some(&cache),
flags::Noqa::Enabled,
flags::FixMode::Generate,
UnsafeFixes::Enabled,
)
.unwrap();
if diagnostics
@@ -649,7 +425,7 @@ mod tests {
}
assert_ne!(paths, &[] as &[std::path::PathBuf], "no files checked");
cache.persist().unwrap();
cache.store().unwrap();
let cache = Cache::open(package_root.clone(), &settings);
assert_ne!(cache.package.files.len(), 0);
@@ -679,7 +455,6 @@ mod tests {
Some(&cache),
flags::Noqa::Enabled,
flags::FixMode::Generate,
UnsafeFixes::Enabled,
)
.unwrap();
}
@@ -694,21 +469,21 @@ mod tests {
let test_cache = TestCache::new("cache_adds_file_on_lint");
let cache = test_cache.open();
test_cache.write_source_file("source.py", source);
assert_eq!(cache.changes.lock().unwrap().len(), 0);
assert_eq!(cache.new_files.lock().unwrap().len(), 0);
cache.persist().unwrap();
cache.store().unwrap();
let cache = test_cache.open();
test_cache
.lint_file_with_cache("source.py", &cache)
.expect("Failed to lint test file");
assert_eq!(
cache.changes.lock().unwrap().len(),
cache.new_files.lock().unwrap().len(),
1,
"A single new file should be added to the cache"
);
cache.persist().unwrap();
cache.store().unwrap();
}
#[test]
@@ -719,9 +494,9 @@ mod tests {
let cache = test_cache.open();
test_cache.write_source_file("source_1.py", source);
test_cache.write_source_file("source_2.py", source);
assert_eq!(cache.changes.lock().unwrap().len(), 0);
assert_eq!(cache.new_files.lock().unwrap().len(), 0);
cache.persist().unwrap();
cache.store().unwrap();
let cache = test_cache.open();
test_cache
@@ -731,39 +506,12 @@ mod tests {
.lint_file_with_cache("source_2.py", &cache)
.expect("Failed to lint test file");
assert_eq!(
cache.changes.lock().unwrap().len(),
2,
"Both files should be added to the cache"
);
cache.persist().unwrap();
}
#[test]
fn cache_adds_files_on_format() {
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\", \"b\"])\n";
let test_cache = TestCache::new("cache_adds_files_on_format");
let cache = test_cache.open();
test_cache.write_source_file("source_1.py", source);
test_cache.write_source_file("source_2.py", source);
assert_eq!(cache.changes.lock().unwrap().len(), 0);
cache.persist().unwrap();
let cache = test_cache.open();
test_cache
.format_file_with_cache("source_1.py", &cache)
.expect("Failed to format test file");
test_cache
.format_file_with_cache("source_2.py", &cache)
.expect("Failed to format test file");
assert_eq!(
cache.changes.lock().unwrap().len(),
cache.new_files.lock().unwrap().len(),
2,
"Both files should be added to the cache"
);
cache.persist().unwrap();
cache.store().unwrap();
}
#[test]
@@ -773,13 +521,13 @@ mod tests {
let test_cache = TestCache::new("cache_invalidated_on_file_modified_time");
let cache = test_cache.open();
let source_path = test_cache.write_source_file("source.py", source);
assert_eq!(cache.changes.lock().unwrap().len(), 0);
assert_eq!(cache.new_files.lock().unwrap().len(), 0);
let expected_diagnostics = test_cache
.lint_file_with_cache("source.py", &cache)
.expect("Failed to lint test file");
cache.persist().unwrap();
cache.store().unwrap();
let cache = test_cache.open();
// Update the modified time of the file to a time in the future
@@ -794,7 +542,7 @@ mod tests {
.expect("Failed to lint test file");
assert_eq!(
cache.changes.lock().unwrap().len(),
cache.new_files.lock().unwrap().len(),
1,
"Cache should not be used, the file should be treated as new and added to the cache"
);
@@ -832,13 +580,13 @@ mod tests {
let test_cache = TestCache::new("cache_invalidated_on_permission_change");
let cache = test_cache.open();
let path = test_cache.write_source_file("source.py", source);
assert_eq!(cache.changes.lock().unwrap().len(), 0);
assert_eq!(cache.new_files.lock().unwrap().len(), 0);
let expected_diagnostics = test_cache
.lint_file_with_cache("source.py", &cache)
.unwrap();
cache.persist().unwrap();
cache.store().unwrap();
let cache = test_cache.open();
// Flip the permissions on the file
@@ -852,7 +600,7 @@ mod tests {
.unwrap();
assert_eq!(
cache.changes.lock().unwrap().len(),
cache.new_files.lock().unwrap().len(),
1,
"Cache should not be used, the file should be treated as new and added to the cache"
);
@@ -864,8 +612,8 @@ mod tests {
}
#[test]
fn cache_removes_stale_files_on_persist() {
let test_cache = TestCache::new("cache_removes_stale_files_on_persist");
fn cache_removes_stale_files_on_store() {
let test_cache = TestCache::new("cache_removes_stale_files_on_store");
let mut cache = test_cache.open();
// Add a file to the cache that hasn't been linted or seen since the '70s!
@@ -875,7 +623,10 @@ mod tests {
FileCache {
key: 123,
last_seen: AtomicU64::new(123),
data: FileCacheData::default(),
imports: ImportMap::new(),
messages: Vec::new(),
source: String::new(),
notebook_index: None,
},
);
@@ -883,125 +634,34 @@ mod tests {
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\", \"b\"])\n";
test_cache.write_source_file("new.py", source);
let new_path_key = RelativePathBuf::from("new.py");
assert_eq!(cache.changes.lock().unwrap().len(), 0);
assert_eq!(cache.new_files.lock().unwrap().len(), 0);
test_cache
.lint_file_with_cache("new.py", &cache)
.expect("Failed to lint test file");
// Storing the cache should remove the old (`old.py`) file.
cache.persist().unwrap();
cache.store().unwrap();
// So we when we open the cache again it shouldn't contain `old.py`.
let cache = test_cache.open();
assert_eq!(
cache.package.files.keys().collect_vec(),
vec![&new_path_key],
assert!(
cache.package.files.keys().collect_vec() == vec![&new_path_key],
"Only the new file should be present"
);
}
#[test]
fn format_updates_cache_entry() {
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\", \"b\"])\n";
let test_cache = TestCache::new("format_updates_cache_entry");
let cache = test_cache.open();
test_cache.write_source_file("source.py", source);
assert_eq!(cache.changes.lock().unwrap().len(), 0);
cache.persist().unwrap();
let cache = test_cache.open();
// Cache the lint results
test_cache
.lint_file_with_cache("source.py", &cache)
.expect("Failed to lint test file");
cache.persist().unwrap();
let mut cache = test_cache.open();
// Now lint the file
test_cache
.format_file_with_cache("source.py", &cache)
.expect("Failed to format test file");
cache.save();
assert_eq!(cache.package.files.len(), 1);
let Some(file_cache) = cache.get(
Path::new("source.py"),
&FileCacheKey::from_path(&test_cache.package_root.join("source.py")).unwrap(),
) else {
panic!("Cache entry for `source.py` is missing.");
};
assert!(file_cache.data.lint.is_some());
assert!(file_cache.data.formatted);
}
#[test]
fn file_changes_invalidate_file_cache() {
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\", \"b\"])\n";
let test_cache = TestCache::new("file_changes_invalidate_file_cache");
let cache = test_cache.open();
let source_path = test_cache.write_source_file("source.py", source);
assert_eq!(cache.changes.lock().unwrap().len(), 0);
cache.persist().unwrap();
let cache = test_cache.open();
// Cache the format and lint results
test_cache
.lint_file_with_cache("source.py", &cache)
.expect("Failed to lint test file");
test_cache
.format_file_with_cache("source.py", &cache)
.expect("Failed to format test file");
cache.persist().unwrap();
let mut cache = test_cache.open();
assert_eq!(cache.package.files.len(), 1);
set_file_mtime(
&source_path,
FileTime::from_system_time(SystemTime::now() + std::time::Duration::from_secs(1)),
)
.unwrap();
test_cache
.format_file_with_cache("source.py", &cache)
.expect("Failed to format test file");
cache.save();
assert_eq!(cache.package.files.len(), 1);
let Some(file_cache) = cache.get(
Path::new("source.py"),
&FileCacheKey::from_path(&source_path).unwrap(),
) else {
panic!("Cache entry for `source.py` is missing.");
};
assert_eq!(file_cache.data.lint, None);
assert!(file_cache.data.formatted);
}
struct TestCache {
package_root: PathBuf,
settings: Settings,
}
impl TestCache {
fn new(test_case: &str) -> Self {
fn new(name: &str) -> Self {
// Build a new cache directory and clear it
let mut test_dir = temp_dir();
test_dir.push("ruff_tests/cache");
test_dir.push(test_case);
test_dir.push(name);
let _ = fs::remove_dir_all(&test_dir);
@@ -1052,23 +712,6 @@ mod tests {
Some(cache),
flags::Noqa::Enabled,
flags::FixMode::Generate,
UnsafeFixes::Enabled,
)
}
fn format_file_with_cache(
&self,
path: &str,
cache: &Cache,
) -> Result<FormatResult, FormatCommandError> {
let file_path = self.package_root.join(path);
format_path(
&file_path,
&self.settings.formatter,
PySourceType::Python,
FormatMode::Write,
None,
Some(cache),
)
}
}

View File

@@ -7,22 +7,22 @@ use log::{debug, error};
use rayon::prelude::*;
use ruff_linter::linter::add_noqa_to_path;
use ruff_linter::source_kind::SourceKind;
use ruff_linter::warn_user_once;
use ruff_python_ast::{PySourceType, SourceType};
use ruff_workspace::resolver::{python_files_in_path, PyprojectConfig, ResolvedFile};
use ruff_workspace::resolver::{python_files_in_path, PyprojectConfig};
use crate::args::ConfigArguments;
use crate::args::CliOverrides;
use crate::diagnostics::LintSource;
/// Add `noqa` directives to a collection of files.
pub(crate) fn add_noqa(
files: &[PathBuf],
pyproject_config: &PyprojectConfig,
config_arguments: &ConfigArguments,
overrides: &CliOverrides,
) -> Result<usize> {
// Collect all the files to check.
let start = Instant::now();
let (paths, resolver) = python_files_in_path(files, pyproject_config, config_arguments)?;
let (paths, resolver) = python_files_in_path(files, pyproject_config, overrides)?;
let duration = start.elapsed();
debug!("Identified files to lint in: {:?}", duration);
@@ -36,29 +36,29 @@ pub(crate) fn add_noqa(
&paths
.iter()
.flatten()
.map(ResolvedFile::path)
.map(ignore::DirEntry::path)
.collect::<Vec<_>>(),
pyproject_config,
);
let start = Instant::now();
let modifications: usize = paths
.par_iter()
.flatten()
.filter_map(|resolved_file| {
.filter_map(|entry| {
let path = entry.path();
let SourceType::Python(source_type @ (PySourceType::Python | PySourceType::Stub)) =
SourceType::from(resolved_file.path())
SourceType::from(path)
else {
return None;
};
let path = resolved_file.path();
let package = resolved_file
.path()
let package = path
.parent()
.and_then(|parent| package_roots.get(parent))
.and_then(|package| *package);
let settings = resolver.resolve(path);
let source_kind = match SourceKind::from_path(path, source_type) {
Ok(Some(source_kind)) => source_kind,
let settings = resolver.resolve(path, pyproject_config);
let LintSource(source_kind) = match LintSource::try_from_path(path, source_type) {
Ok(Some(source)) => source,
Ok(None) => return None,
Err(e) => {
error!("Failed to extract source from {}: {e}", path.display());

View File

@@ -1,3 +1,4 @@
use std::collections::HashMap;
use std::fmt::Write;
use std::io;
use std::path::{Path, PathBuf};
@@ -6,6 +7,7 @@ use std::time::Instant;
use anyhow::Result;
use colored::Colorize;
use ignore::Error;
use itertools::Itertools;
use log::{debug, error, warn};
#[cfg(not(target_family = "wasm"))]
use rayon::prelude::*;
@@ -14,195 +16,203 @@ use rustc_hash::FxHashMap;
use ruff_diagnostics::Diagnostic;
use ruff_linter::message::Message;
use ruff_linter::registry::Rule;
use ruff_linter::settings::types::UnsafeFixes;
use ruff_linter::settings::{flags, LinterSettings};
use ruff_linter::{fs, warn_user_once, IOError};
use ruff_python_ast::imports::ImportMap;
use ruff_source_file::SourceFileBuilder;
use ruff_text_size::{TextRange, TextSize};
use ruff_workspace::resolver::{
match_exclusion, python_files_in_path, PyprojectConfig, ResolvedFile,
};
use ruff_workspace::resolver::{python_files_in_path, PyprojectConfig, PyprojectDiscoveryStrategy};
use crate::args::ConfigArguments;
use crate::cache::{Cache, PackageCacheMap, PackageCaches};
use crate::args::CliOverrides;
use crate::cache::{self, Cache};
use crate::diagnostics::Diagnostics;
use crate::panic::catch_unwind;
/// Run the linter over a collection of files.
#[allow(clippy::too_many_arguments)]
pub(crate) fn check(
files: &[PathBuf],
pyproject_config: &PyprojectConfig,
config_arguments: &ConfigArguments,
overrides: &CliOverrides,
cache: flags::Cache,
noqa: flags::Noqa,
fix_mode: flags::FixMode,
unsafe_fixes: UnsafeFixes,
autofix: flags::FixMode,
) -> Result<Diagnostics> {
// Collect all the Python files to check.
let start = Instant::now();
let (paths, resolver) = python_files_in_path(files, pyproject_config, config_arguments)?;
debug!("Identified files to lint in: {:?}", start.elapsed());
let (paths, resolver) = python_files_in_path(files, pyproject_config, overrides)?;
let duration = start.elapsed();
debug!("Identified files to lint in: {:?}", duration);
if paths.is_empty() {
warn_user_once!("No Python files found under the given path(s)");
return Ok(Diagnostics::default());
}
// Initialize the cache.
if cache.into() {
fn init_cache(path: &Path) {
if let Err(e) = cache::init(path) {
error!("Failed to initialize cache at {}: {e:?}", path.display());
}
}
match pyproject_config.strategy {
PyprojectDiscoveryStrategy::Fixed => {
init_cache(&pyproject_config.settings.cache_dir);
}
PyprojectDiscoveryStrategy::Hierarchical => {
for settings in
std::iter::once(&pyproject_config.settings).chain(resolver.settings())
{
init_cache(&settings.cache_dir);
}
}
}
};
// Discover the package root for each Python file.
let package_roots = resolver.package_roots(
&paths
.iter()
.flatten()
.map(ResolvedFile::path)
.map(ignore::DirEntry::path)
.collect::<Vec<_>>(),
pyproject_config,
);
// Load the caches.
let caches = if bool::from(cache) {
Some(PackageCacheMap::init(&package_roots, &resolver))
} else {
None
};
let start = Instant::now();
let diagnostics_per_file = paths.par_iter().filter_map(|resolved_file| {
let result = match resolved_file {
Ok(resolved_file) => {
let path = resolved_file.path();
let package = path
.parent()
.and_then(|parent| package_roots.get(parent))
.and_then(|package| *package);
let settings = resolver.resolve(path);
if (settings.file_resolver.force_exclude || !resolved_file.is_root())
&& match_exclusion(
resolved_file.path(),
resolved_file.file_name(),
&settings.linter.exclude,
)
{
return None;
}
let cache_root = package.unwrap_or_else(|| path.parent().unwrap_or(path));
let cache = caches.get(cache_root);
lint_path(
path,
package,
&settings.linter,
cache,
noqa,
fix_mode,
unsafe_fixes,
)
.map_err(|e| {
(Some(path.to_path_buf()), {
let mut error = e.to_string();
for cause in e.chain() {
write!(&mut error, "\n Cause: {cause}").unwrap();
}
error
})
})
}
Err(e) => Err((
if let Error::WithPath { path, .. } = e {
Some(path.clone())
} else {
None
},
e.io_error()
.map_or_else(|| e.to_string(), io::Error::to_string),
)),
};
Some(result.unwrap_or_else(|(path, message)| {
if let Some(path) = &path {
let settings = resolver.resolve(path);
if settings.linter.rules.enabled(Rule::IOError) {
let dummy =
SourceFileBuilder::new(path.to_string_lossy().as_ref(), "").finish();
Diagnostics::new(
vec![Message::from_diagnostic(
Diagnostic::new(IOError { message }, TextRange::default()),
dummy,
TextSize::default(),
)],
ImportMap::default(),
FxHashMap::default(),
)
} else {
warn!(
"{}{}{} {message}",
"Failed to lint ".bold(),
fs::relativize_path(path).bold(),
":".bold()
);
Diagnostics::default()
}
} else {
warn!("{} {message}", "Encountered error:".bold());
Diagnostics::default()
}
}))
let caches = bool::from(cache).then(|| {
package_roots
.iter()
.map(|(package, package_root)| package_root.unwrap_or(package))
.unique()
.par_bridge()
.map(|cache_root| {
let settings = resolver.resolve(cache_root, pyproject_config);
let cache = Cache::open(cache_root.to_path_buf(), settings);
(cache_root, cache)
})
.collect::<HashMap<&Path, Cache>>()
});
// Aggregate the diagnostics of all checked files and count the checked files.
// This can't be a regular for loop because we use `par_iter`.
let (mut all_diagnostics, checked_files) = diagnostics_per_file
.fold(
|| (Diagnostics::default(), 0u64),
|(all_diagnostics, checked_files), file_diagnostics| {
(all_diagnostics + file_diagnostics, checked_files + 1)
},
)
.reduce(
|| (Diagnostics::default(), 0u64),
|a, b| (a.0 + b.0, a.1 + b.1),
);
let start = Instant::now();
let mut diagnostics: Diagnostics = paths
.par_iter()
.map(|entry| {
match entry {
Ok(entry) => {
let path = entry.path();
let package = path
.parent()
.and_then(|parent| package_roots.get(parent))
.and_then(|package| *package);
all_diagnostics.messages.sort();
let settings = resolver.resolve(path, pyproject_config);
let cache_root = package.unwrap_or_else(|| path.parent().unwrap_or(path));
let cache = caches.as_ref().and_then(|caches| {
if let Some(cache) = caches.get(&cache_root) {
Some(cache)
} else {
debug!("No cache found for {}", cache_root.display());
None
}
});
lint_path(path, package, &settings.linter, cache, noqa, autofix).map_err(|e| {
(Some(path.to_owned()), {
let mut error = e.to_string();
for cause in e.chain() {
write!(&mut error, "\n Cause: {cause}").unwrap();
}
error
})
})
}
Err(e) => Err((
if let Error::WithPath { path, .. } = e {
Some(path.clone())
} else {
None
},
e.io_error()
.map_or_else(|| e.to_string(), io::Error::to_string),
)),
}
.unwrap_or_else(|(path, message)| {
if let Some(path) = &path {
let settings = resolver.resolve(path, pyproject_config);
if settings.linter.rules.enabled(Rule::IOError) {
let dummy =
SourceFileBuilder::new(path.to_string_lossy().as_ref(), "").finish();
Diagnostics::new(
vec![Message::from_diagnostic(
Diagnostic::new(IOError { message }, TextRange::default()),
dummy,
TextSize::default(),
)],
ImportMap::default(),
FxHashMap::default(),
)
} else {
warn!(
"{}{}{} {message}",
"Failed to lint ".bold(),
fs::relativize_path(path).bold(),
":".bold()
);
Diagnostics::default()
}
} else {
warn!("{} {message}", "Encountered error:".bold());
Diagnostics::default()
}
})
})
.reduce(Diagnostics::default, |mut acc, item| {
acc += item;
acc
});
diagnostics.messages.sort();
// Store the caches.
caches.persist()?;
if let Some(caches) = caches {
caches
.into_par_iter()
.try_for_each(|(_, cache)| cache.store())?;
}
let duration = start.elapsed();
debug!("Checked {:?} files in: {:?}", checked_files, duration);
debug!("Checked {:?} files in: {:?}", paths.len(), duration);
Ok(all_diagnostics)
Ok(diagnostics)
}
/// Wraps [`lint_path`](crate::diagnostics::lint_path) in a [`catch_unwind`](std::panic::catch_unwind) and emits
/// a diagnostic if the linting the file panics.
#[allow(clippy::too_many_arguments)]
fn lint_path(
path: &Path,
package: Option<&Path>,
settings: &LinterSettings,
cache: Option<&Cache>,
noqa: flags::Noqa,
fix_mode: flags::FixMode,
unsafe_fixes: UnsafeFixes,
autofix: flags::FixMode,
) -> Result<Diagnostics> {
let result = catch_unwind(|| {
crate::diagnostics::lint_path(path, package, settings, cache, noqa, fix_mode, unsafe_fixes)
crate::diagnostics::lint_path(path, package, settings, cache, noqa, autofix)
});
match result {
Ok(inner) => inner,
Err(error) => {
let message = r"This indicates a bug in Ruff. If you could open an issue at:
let message = r#"This indicates a bug in Ruff. If you could open an issue at:
https://github.com/astral-sh/ruff/issues/new?title=%5BLinter%20panic%5D
...with the relevant file contents, the `pyproject.toml` settings, and the following stack trace, we'd be very appreciative!
";
"#;
error!(
"{}{}{} {message}\n{error}",
@@ -228,12 +238,11 @@ mod test {
use ruff_linter::message::{Emitter, EmitterContext, TextEmitter};
use ruff_linter::registry::Rule;
use ruff_linter::settings::types::UnsafeFixes;
use ruff_linter::settings::{flags, LinterSettings};
use ruff_workspace::resolver::{PyprojectConfig, PyprojectDiscoveryStrategy};
use ruff_workspace::Settings;
use crate::args::ConfigArguments;
use crate::args::CliOverrides;
use super::check;
@@ -252,7 +261,6 @@ mod test {
for file in [&pyproject_toml, &python_file, &notebook] {
fs::OpenOptions::new()
.create(true)
.truncate(true)
.write(true)
.mode(0o000)
.open(file)?;
@@ -273,11 +281,10 @@ mod test {
// Notebooks are not included by default
&[tempdir.path().to_path_buf(), notebook],
&pyproject_config,
&ConfigArguments::default(),
&CliOverrides::default(),
flags::Cache::Disabled,
flags::Noqa::Disabled,
flags::FixMode::Generate,
UnsafeFixes::Enabled,
)
.unwrap();
let mut output = Vec::new();

View File

@@ -0,0 +1,40 @@
use std::path::Path;
use anyhow::Result;
use ruff_linter::packaging;
use ruff_linter::settings::flags;
use ruff_workspace::resolver::{python_file_at_path, PyprojectConfig};
use crate::args::CliOverrides;
use crate::diagnostics::{lint_stdin, Diagnostics};
use crate::stdin::read_from_stdin;
/// Run the linter over a single file, read from `stdin`.
pub(crate) fn check_stdin(
filename: Option<&Path>,
pyproject_config: &PyprojectConfig,
overrides: &CliOverrides,
noqa: flags::Noqa,
autofix: flags::FixMode,
) -> Result<Diagnostics> {
if let Some(filename) = filename {
if !python_file_at_path(filename, pyproject_config, overrides)? {
return Ok(Diagnostics::default());
}
}
let package_root = filename.and_then(Path::parent).and_then(|path| {
packaging::detect_package_root(path, &pyproject_config.settings.linter.namespace_packages)
});
let stdin = read_from_stdin()?;
let mut diagnostics = lint_stdin(
filename,
package_root,
stdin,
&pyproject_config.settings,
noqa,
autofix,
)?;
diagnostics.messages.sort_unstable();
Ok(diagnostics)
}

View File

@@ -1,13 +1,12 @@
use anyhow::{anyhow, Result};
use ruff_workspace::options::Options;
use ruff_workspace::options_base::OptionsMetadata;
#[allow(clippy::print_stdout)]
pub(crate) fn config(key: Option<&str>) -> Result<()> {
match key {
None => print!("{}", Options::metadata()),
Some(key) => match Options::metadata().find(key) {
Some(key) => match Options::metadata().get(key) {
None => {
return Err(anyhow!("Unknown option: {key}"));
}

View File

@@ -0,0 +1,343 @@
use std::fmt::{Display, Formatter};
use std::io;
use std::path::{Path, PathBuf};
use std::time::Instant;
use anyhow::Result;
use colored::Colorize;
use log::error;
use rayon::iter::Either::{Left, Right};
use rayon::iter::{IntoParallelIterator, ParallelIterator};
use thiserror::Error;
use tracing::debug;
use ruff_linter::fs;
use ruff_linter::logging::LogLevel;
use ruff_linter::warn_user_once;
use ruff_python_ast::{PySourceType, SourceType};
use ruff_python_formatter::{format_module_source, FormatModuleError, PyFormatOptions};
use ruff_source_file::{find_newline, LineEnding};
use ruff_workspace::resolver::python_files_in_path;
use crate::args::{CliOverrides, FormatArguments};
use crate::panic::{catch_unwind, PanicError};
use crate::resolve::resolve;
use crate::ExitStatus;
#[derive(Debug, Copy, Clone, is_macro::Is)]
pub(crate) enum FormatMode {
/// Write the formatted contents back to the file.
Write,
/// Check if the file is formatted, but do not write the formatted contents back.
Check,
}
/// Format a set of files, and return the exit status.
pub(crate) fn format(
cli: &FormatArguments,
overrides: &CliOverrides,
log_level: LogLevel,
) -> Result<ExitStatus> {
let pyproject_config = resolve(
cli.isolated,
cli.config.as_deref(),
overrides,
cli.stdin_filename.as_deref(),
)?;
let mode = if cli.check {
FormatMode::Check
} else {
FormatMode::Write
};
let (paths, resolver) = python_files_in_path(&cli.files, &pyproject_config, overrides)?;
if paths.is_empty() {
warn_user_once!("No Python files found under the given path(s)");
return Ok(ExitStatus::Success);
}
let start = Instant::now();
let (results, errors): (Vec<_>, Vec<_>) = paths
.into_par_iter()
.filter_map(|entry| {
match entry {
Ok(entry) => {
let path = entry.path();
let SourceType::Python(
source_type @ (PySourceType::Python | PySourceType::Stub),
) = SourceType::from(path)
else {
// Ignore any non-Python files.
return None;
};
let resolved_settings = resolver.resolve(path, &pyproject_config);
let options = resolved_settings.formatter.to_format_options(source_type);
debug!("Formatting {} with {:?}", path.display(), options);
Some(match catch_unwind(|| format_path(path, options, mode)) {
Ok(inner) => inner,
Err(error) => {
Err(FormatCommandError::Panic(Some(path.to_path_buf()), error))
}
})
}
Err(err) => Some(Err(FormatCommandError::Ignore(err))),
}
})
.partition_map(|result| match result {
Ok(diagnostic) => Left(diagnostic),
Err(err) => Right(err),
});
let duration = start.elapsed();
debug!(
"Formatted {} files in {:.2?}",
results.len() + errors.len(),
duration
);
// Report on any errors.
for error in &errors {
error!("{error}");
}
let summary = FormatResultSummary::new(results, mode);
// Report on the formatting changes.
if log_level >= LogLevel::Default {
#[allow(clippy::print_stdout)]
{
println!("{summary}");
}
}
match mode {
FormatMode::Write => {
if errors.is_empty() {
Ok(ExitStatus::Success)
} else {
Ok(ExitStatus::Error)
}
}
FormatMode::Check => {
if errors.is_empty() {
if summary.formatted > 0 {
Ok(ExitStatus::Failure)
} else {
Ok(ExitStatus::Success)
}
} else {
Ok(ExitStatus::Error)
}
}
}
}
/// Format the file at the given [`Path`].
#[tracing::instrument(skip_all, fields(path = %path.display()))]
fn format_path(
path: &Path,
options: PyFormatOptions,
mode: FormatMode,
) -> Result<FormatCommandResult, FormatCommandError> {
let unformatted = std::fs::read_to_string(path)
.map_err(|err| FormatCommandError::Read(Some(path.to_path_buf()), err))?;
let line_ending = match find_newline(&unformatted) {
Some((_, LineEnding::Lf)) | None => ruff_formatter::printer::LineEnding::LineFeed,
Some((_, LineEnding::Cr)) => ruff_formatter::printer::LineEnding::CarriageReturn,
Some((_, LineEnding::CrLf)) => ruff_formatter::printer::LineEnding::CarriageReturnLineFeed,
};
let options = options.with_line_ending(line_ending);
let formatted = format_module_source(&unformatted, options)
.map_err(|err| FormatCommandError::FormatModule(Some(path.to_path_buf()), err))?;
let formatted = formatted.as_code();
if formatted.len() == unformatted.len() && formatted == unformatted {
Ok(FormatCommandResult::Unchanged)
} else {
if mode.is_write() {
std::fs::write(path, formatted.as_bytes())
.map_err(|err| FormatCommandError::Write(Some(path.to_path_buf()), err))?;
}
Ok(FormatCommandResult::Formatted)
}
}
#[derive(Debug, Clone, Copy, is_macro::Is)]
pub(crate) enum FormatCommandResult {
/// The file was formatted.
Formatted,
/// The file was unchanged, as the formatted contents matched the existing contents.
Unchanged,
}
#[derive(Debug)]
struct FormatResultSummary {
/// The format mode that was used.
mode: FormatMode,
/// The number of files that were formatted.
formatted: usize,
/// The number of files that were unchanged.
unchanged: usize,
}
impl FormatResultSummary {
fn new(diagnostics: Vec<FormatCommandResult>, mode: FormatMode) -> Self {
let mut summary = Self {
mode,
formatted: 0,
unchanged: 0,
};
for diagnostic in diagnostics {
match diagnostic {
FormatCommandResult::Formatted => summary.formatted += 1,
FormatCommandResult::Unchanged => summary.unchanged += 1,
}
}
summary
}
}
impl Display for FormatResultSummary {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
if self.formatted > 0 && self.unchanged > 0 {
write!(
f,
"{} file{} {}, {} file{} left unchanged",
self.formatted,
if self.formatted == 1 { "" } else { "s" },
match self.mode {
FormatMode::Write => "reformatted",
FormatMode::Check => "would be reformatted",
},
self.unchanged,
if self.unchanged == 1 { "" } else { "s" },
)
} else if self.formatted > 0 {
write!(
f,
"{} file{} {}",
self.formatted,
if self.formatted == 1 { "" } else { "s" },
match self.mode {
FormatMode::Write => "reformatted",
FormatMode::Check => "would be reformatted",
}
)
} else if self.unchanged > 0 {
write!(
f,
"{} file{} left unchanged",
self.unchanged,
if self.unchanged == 1 { "" } else { "s" },
)
} else {
Ok(())
}
}
}
/// An error that can occur while formatting a set of files.
#[derive(Error, Debug)]
pub(crate) enum FormatCommandError {
Ignore(#[from] ignore::Error),
Read(Option<PathBuf>, io::Error),
Write(Option<PathBuf>, io::Error),
FormatModule(Option<PathBuf>, FormatModuleError),
Panic(Option<PathBuf>, PanicError),
}
impl Display for FormatCommandError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
Self::Ignore(err) => {
if let ignore::Error::WithPath { path, .. } = err {
write!(
f,
"{}{}{} {}",
"Failed to format ".bold(),
fs::relativize_path(path).bold(),
":".bold(),
err.io_error()
.map_or_else(|| err.to_string(), std::string::ToString::to_string)
)
} else {
write!(
f,
"{} {}",
"Encountered error:".bold(),
err.io_error()
.map_or_else(|| err.to_string(), std::string::ToString::to_string)
)
}
}
Self::Read(path, err) => {
if let Some(path) = path {
write!(
f,
"{}{}{} {err}",
"Failed to read ".bold(),
fs::relativize_path(path).bold(),
":".bold()
)
} else {
write!(f, "{}{} {err}", "Failed to read".bold(), ":".bold())
}
}
Self::Write(path, err) => {
if let Some(path) = path {
write!(
f,
"{}{}{} {err}",
"Failed to write ".bold(),
fs::relativize_path(path).bold(),
":".bold()
)
} else {
write!(f, "{}{} {err}", "Failed to write".bold(), ":".bold())
}
}
Self::FormatModule(path, err) => {
if let Some(path) = path {
write!(
f,
"{}{}{} {err}",
"Failed to format ".bold(),
fs::relativize_path(path).bold(),
":".bold()
)
} else {
write!(f, "{}{} {err}", "Failed to format".bold(), ":".bold())
}
}
Self::Panic(path, err) => {
let message = r#"This indicates a bug in Ruff. If you could open an issue at:
https://github.com/astral-sh/ruff/issues/new?title=%5BFormatter%20panic%5D
...with the relevant file contents, the `pyproject.toml` settings, and the following stack trace, we'd be very appreciative!
"#;
if let Some(path) = path {
write!(
f,
"{}{}{} {message}\n{err}",
"Panicked while formatting ".bold(),
fs::relativize_path(path).bold(),
":".bold()
)
} else {
write!(
f,
"{} {message}\n{err}",
"Panicked while formatting.".bold()
)
}
}
}
}
}

View File

@@ -0,0 +1,96 @@
use std::io::{stdout, Write};
use std::path::Path;
use anyhow::Result;
use log::warn;
use ruff_python_ast::PySourceType;
use ruff_python_formatter::{
format_module_source, format_module_source_range, LspRowColumn, PyFormatOptions,
};
use ruff_workspace::resolver::python_file_at_path;
use crate::args::{CliOverrides, FormatArguments};
use crate::commands::format::{FormatCommandError, FormatCommandResult, FormatMode};
use crate::resolve::resolve;
use crate::stdin::read_from_stdin;
use crate::ExitStatus;
/// Run the formatter over a single file, read from `stdin`.
pub(crate) fn format_stdin(cli: &FormatArguments, overrides: &CliOverrides) -> Result<ExitStatus> {
let pyproject_config = resolve(
cli.isolated,
cli.config.as_deref(),
overrides,
cli.stdin_filename.as_deref(),
)?;
let mode = if cli.check {
FormatMode::Check
} else {
FormatMode::Write
};
if let Some(filename) = cli.stdin_filename.as_deref() {
if !python_file_at_path(filename, &pyproject_config, overrides)? {
return Ok(ExitStatus::Success);
}
}
// Format the file.
let path = cli.stdin_filename.as_deref();
let options = pyproject_config
.settings
.formatter
.to_format_options(path.map(PySourceType::from).unwrap_or_default());
match format_source(path, options, mode, cli.start, cli.end) {
Ok(result) => match mode {
FormatMode::Write => Ok(ExitStatus::Success),
FormatMode::Check => {
if result.is_formatted() {
Ok(ExitStatus::Failure)
} else {
Ok(ExitStatus::Success)
}
}
},
Err(err) => {
warn!("{err}");
Ok(ExitStatus::Error)
}
}
}
/// Format source code read from `stdin`.
fn format_source(
path: Option<&Path>,
options: PyFormatOptions,
mode: FormatMode,
start: Option<LspRowColumn>,
end: Option<LspRowColumn>,
) -> Result<FormatCommandResult, FormatCommandError> {
let unformatted = read_from_stdin()
.map_err(|err| FormatCommandError::Read(path.map(Path::to_path_buf), err))?;
let formatted = if start.is_some() || end.is_some() {
let formatted = format_module_source_range(&unformatted, options, start, end)
.map_err(|err| FormatCommandError::FormatModule(path.map(Path::to_path_buf), err))?;
formatted
} else {
let formatted = format_module_source(&unformatted, options)
.map_err(|err| FormatCommandError::FormatModule(path.map(Path::to_path_buf), err))?;
let formatted = formatted.as_code();
formatted.to_string()
};
if formatted.len() == unformatted.len() && formatted == unformatted {
Ok(FormatCommandResult::Unchanged)
} else {
if mode.is_write() {
stdout()
.lock()
.write_all(formatted.as_bytes())
.map_err(|err| FormatCommandError::Write(path.map(Path::to_path_buf), err))?;
}
Ok(FormatCommandResult::Formatted)
}
}

Some files were not shown because too many files have changed in this diff Show More