Compare commits
2 Commits
simplify-S
...
indent-lam
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d5a18a697c | ||
|
|
3e218fa2ec |
@@ -1,3 +1,37 @@
|
|||||||
[alias]
|
[alias]
|
||||||
dev = "run --package ruff_dev --bin ruff_dev"
|
dev = "run --package ruff_dev --bin ruff_dev"
|
||||||
benchmark = "bench -p ruff_benchmark --bench linter --bench formatter --"
|
benchmark = "bench -p ruff_benchmark --bench linter --bench formatter --"
|
||||||
|
|
||||||
|
[target.'cfg(all())']
|
||||||
|
rustflags = [
|
||||||
|
# CLIPPY LINT SETTINGS
|
||||||
|
# This is a workaround to configure lints for the entire workspace, pending the ability to configure this via TOML.
|
||||||
|
# See: `https://github.com/rust-lang/cargo/issues/5034`
|
||||||
|
# `https://github.com/EmbarkStudios/rust-ecosystem/issues/22#issuecomment-947011395`
|
||||||
|
"-Dunsafe_code",
|
||||||
|
"-Wclippy::pedantic",
|
||||||
|
# Allowed pedantic lints
|
||||||
|
"-Wclippy::char_lit_as_u8",
|
||||||
|
"-Aclippy::collapsible_else_if",
|
||||||
|
"-Aclippy::collapsible_if",
|
||||||
|
"-Aclippy::implicit_hasher",
|
||||||
|
"-Aclippy::match_same_arms",
|
||||||
|
"-Aclippy::missing_errors_doc",
|
||||||
|
"-Aclippy::missing_panics_doc",
|
||||||
|
"-Aclippy::module_name_repetitions",
|
||||||
|
"-Aclippy::must_use_candidate",
|
||||||
|
"-Aclippy::similar_names",
|
||||||
|
"-Aclippy::too_many_lines",
|
||||||
|
# Disallowed restriction lints
|
||||||
|
"-Wclippy::print_stdout",
|
||||||
|
"-Wclippy::print_stderr",
|
||||||
|
"-Wclippy::dbg_macro",
|
||||||
|
"-Wclippy::empty_drop",
|
||||||
|
"-Wclippy::empty_structs_with_brackets",
|
||||||
|
"-Wclippy::exit",
|
||||||
|
"-Wclippy::get_unwrap",
|
||||||
|
"-Wclippy::rc_buffer",
|
||||||
|
"-Wclippy::rc_mutex",
|
||||||
|
"-Wclippy::rest_pat_in_fully_bound_structs",
|
||||||
|
"-Wunreachable_pub"
|
||||||
|
]
|
||||||
|
|||||||
3
.gitattributes
vendored
3
.gitattributes
vendored
@@ -3,8 +3,5 @@
|
|||||||
crates/ruff_linter/resources/test/fixtures/isort/line_ending_crlf.py text eol=crlf
|
crates/ruff_linter/resources/test/fixtures/isort/line_ending_crlf.py text eol=crlf
|
||||||
crates/ruff_linter/resources/test/fixtures/pycodestyle/W605_1.py text eol=crlf
|
crates/ruff_linter/resources/test/fixtures/pycodestyle/W605_1.py text eol=crlf
|
||||||
|
|
||||||
crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring_code_examples_crlf.py text eol=crlf
|
|
||||||
crates/ruff_python_formatter/tests/snapshots/format@docstring_code_examples_crlf.py.snap text eol=crlf
|
|
||||||
|
|
||||||
ruff.schema.json linguist-generated=true text=auto eol=lf
|
ruff.schema.json linguist-generated=true text=auto eol=lf
|
||||||
*.md.snap linguist-language=Markdown
|
*.md.snap linguist-language=Markdown
|
||||||
|
|||||||
4
.github/dependabot.yml
vendored
4
.github/dependabot.yml
vendored
@@ -5,10 +5,6 @@ updates:
|
|||||||
schedule:
|
schedule:
|
||||||
interval: "weekly"
|
interval: "weekly"
|
||||||
labels: ["internal"]
|
labels: ["internal"]
|
||||||
groups:
|
|
||||||
actions:
|
|
||||||
patterns:
|
|
||||||
- "*"
|
|
||||||
|
|
||||||
- package-ecosystem: "cargo"
|
- package-ecosystem: "cargo"
|
||||||
directory: "/"
|
directory: "/"
|
||||||
|
|||||||
29
.github/release.yml
vendored
Normal file
29
.github/release.yml
vendored
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
# https://docs.github.com/en/repositories/releasing-projects-on-github/automatically-generated-release-notes#configuring-automatically-generated-release-notes
|
||||||
|
changelog:
|
||||||
|
exclude:
|
||||||
|
labels:
|
||||||
|
- internal
|
||||||
|
- documentation
|
||||||
|
categories:
|
||||||
|
- title: Breaking Changes
|
||||||
|
labels:
|
||||||
|
- breaking
|
||||||
|
- title: Rules
|
||||||
|
labels:
|
||||||
|
- rule
|
||||||
|
- title: Settings
|
||||||
|
labels:
|
||||||
|
- configuration
|
||||||
|
- cli
|
||||||
|
- title: Bug Fixes
|
||||||
|
labels:
|
||||||
|
- bug
|
||||||
|
- title: Formatter
|
||||||
|
labels:
|
||||||
|
- formatter
|
||||||
|
- title: Preview
|
||||||
|
labels:
|
||||||
|
- preview
|
||||||
|
- title: Other Changes
|
||||||
|
labels:
|
||||||
|
- "*"
|
||||||
61
.github/workflows/ci.yaml
vendored
61
.github/workflows/ci.yaml
vendored
@@ -23,13 +23,8 @@ jobs:
|
|||||||
name: "Determine changes"
|
name: "Determine changes"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
outputs:
|
outputs:
|
||||||
# Flag that is raised when any code that affects linter is changed
|
|
||||||
linter: ${{ steps.changed.outputs.linter_any_changed }}
|
linter: ${{ steps.changed.outputs.linter_any_changed }}
|
||||||
# Flag that is raised when any code that affects formatter is changed
|
|
||||||
formatter: ${{ steps.changed.outputs.formatter_any_changed }}
|
formatter: ${{ steps.changed.outputs.formatter_any_changed }}
|
||||||
# Flag that is raised when any code is changed
|
|
||||||
# This is superset of the linter and formatter
|
|
||||||
code: ${{ steps.changed.outputs.code_any_changed }}
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
@@ -48,7 +43,6 @@ jobs:
|
|||||||
- "!crates/ruff_dev/**"
|
- "!crates/ruff_dev/**"
|
||||||
- "!crates/ruff_shrinking/**"
|
- "!crates/ruff_shrinking/**"
|
||||||
- scripts/*
|
- scripts/*
|
||||||
- python/**
|
|
||||||
- .github/workflows/ci.yaml
|
- .github/workflows/ci.yaml
|
||||||
|
|
||||||
formatter:
|
formatter:
|
||||||
@@ -64,15 +58,8 @@ jobs:
|
|||||||
- crates/ruff_python_parser/**
|
- crates/ruff_python_parser/**
|
||||||
- crates/ruff_dev/**
|
- crates/ruff_dev/**
|
||||||
- scripts/*
|
- scripts/*
|
||||||
- python/**
|
|
||||||
- .github/workflows/ci.yaml
|
- .github/workflows/ci.yaml
|
||||||
|
|
||||||
code:
|
|
||||||
- "**/*"
|
|
||||||
- "!**/*.md"
|
|
||||||
- "!docs/**"
|
|
||||||
- "!assets/**"
|
|
||||||
|
|
||||||
cargo-fmt:
|
cargo-fmt:
|
||||||
name: "cargo fmt"
|
name: "cargo fmt"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -85,8 +72,6 @@ jobs:
|
|||||||
cargo-clippy:
|
cargo-clippy:
|
||||||
name: "cargo clippy"
|
name: "cargo clippy"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: determine_changes
|
|
||||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
@@ -95,14 +80,12 @@ jobs:
|
|||||||
rustup target add wasm32-unknown-unknown
|
rustup target add wasm32-unknown-unknown
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
- name: "Clippy"
|
- name: "Clippy"
|
||||||
run: cargo clippy --workspace --all-targets --all-features --locked -- -D warnings
|
run: cargo clippy --workspace --all-targets --all-features -- -D warnings
|
||||||
- name: "Clippy (wasm)"
|
- name: "Clippy (wasm)"
|
||||||
run: cargo clippy -p ruff_wasm --target wasm32-unknown-unknown --all-features --locked -- -D warnings
|
run: cargo clippy -p ruff_wasm --target wasm32-unknown-unknown --all-features -- -D warnings
|
||||||
|
|
||||||
cargo-test-linux:
|
cargo-test-linux:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: determine_changes
|
|
||||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
|
||||||
name: "cargo test (linux)"
|
name: "cargo test (linux)"
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
@@ -127,8 +110,6 @@ jobs:
|
|||||||
|
|
||||||
cargo-test-windows:
|
cargo-test-windows:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
needs: determine_changes
|
|
||||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
|
||||||
name: "cargo test (windows)"
|
name: "cargo test (windows)"
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
@@ -146,8 +127,6 @@ jobs:
|
|||||||
|
|
||||||
cargo-test-wasm:
|
cargo-test-wasm:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: determine_changes
|
|
||||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
|
||||||
name: "cargo test (wasm)"
|
name: "cargo test (wasm)"
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
@@ -167,8 +146,6 @@ jobs:
|
|||||||
|
|
||||||
cargo-fuzz:
|
cargo-fuzz:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: determine_changes
|
|
||||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
|
||||||
name: "cargo fuzz"
|
name: "cargo fuzz"
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
@@ -180,14 +157,12 @@ jobs:
|
|||||||
- name: "Install cargo-fuzz"
|
- name: "Install cargo-fuzz"
|
||||||
uses: taiki-e/install-action@v2
|
uses: taiki-e/install-action@v2
|
||||||
with:
|
with:
|
||||||
tool: cargo-fuzz@0.11.2
|
tool: cargo-fuzz@0.11
|
||||||
- run: cargo fuzz build -s none
|
- run: cargo fuzz build -s none
|
||||||
|
|
||||||
scripts:
|
scripts:
|
||||||
name: "test scripts"
|
name: "test scripts"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: determine_changes
|
|
||||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
@@ -209,13 +184,10 @@ jobs:
|
|||||||
- cargo-test-linux
|
- cargo-test-linux
|
||||||
- determine_changes
|
- determine_changes
|
||||||
# Only runs on pull requests, since that is the only we way we can find the base version for comparison.
|
# Only runs on pull requests, since that is the only we way we can find the base version for comparison.
|
||||||
# Ecosystem check needs linter and/or formatter changes.
|
if: github.event_name == 'pull_request'
|
||||||
if: github.event_name == 'pull_request' && ${{
|
|
||||||
needs.determine_changes.outputs.code == 'true'
|
|
||||||
}}
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
|
||||||
@@ -226,7 +198,7 @@ jobs:
|
|||||||
name: ruff
|
name: ruff
|
||||||
path: target/debug
|
path: target/debug
|
||||||
|
|
||||||
- uses: dawidd6/action-download-artifact@v3
|
- uses: dawidd6/action-download-artifact@v2
|
||||||
name: Download baseline Ruff binary
|
name: Download baseline Ruff binary
|
||||||
with:
|
with:
|
||||||
name: ruff
|
name: ruff
|
||||||
@@ -320,8 +292,6 @@ jobs:
|
|||||||
cargo-udeps:
|
cargo-udeps:
|
||||||
name: "cargo udeps"
|
name: "cargo udeps"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: determine_changes
|
|
||||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: "Install nightly Rust toolchain"
|
- name: "Install nightly Rust toolchain"
|
||||||
@@ -338,7 +308,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
architecture: x64
|
architecture: x64
|
||||||
@@ -362,7 +332,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
@@ -392,7 +362,7 @@ jobs:
|
|||||||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
- name: "Add SSH key"
|
- name: "Add SSH key"
|
||||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||||
uses: webfactory/ssh-agent@v0.8.0
|
uses: webfactory/ssh-agent@v0.8.0
|
||||||
@@ -418,7 +388,7 @@ jobs:
|
|||||||
run: mkdocs build --strict -f mkdocs.insiders.yml
|
run: mkdocs build --strict -f mkdocs.insiders.yml
|
||||||
- name: "Build docs"
|
- name: "Build docs"
|
||||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||||
run: mkdocs build --strict -f mkdocs.public.yml
|
run: mkdocs build --strict -f mkdocs.generated.yml
|
||||||
|
|
||||||
check-formatter-instability-and-black-similarity:
|
check-formatter-instability-and-black-similarity:
|
||||||
name: "formatter instabilities and black similarity"
|
name: "formatter instabilities and black similarity"
|
||||||
@@ -441,10 +411,7 @@ jobs:
|
|||||||
check-ruff-lsp:
|
check-ruff-lsp:
|
||||||
name: "test ruff-lsp"
|
name: "test ruff-lsp"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs:
|
needs: cargo-test-linux
|
||||||
- cargo-test-linux
|
|
||||||
- determine_changes
|
|
||||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
|
||||||
steps:
|
steps:
|
||||||
- uses: extractions/setup-just@v1
|
- uses: extractions/setup-just@v1
|
||||||
env:
|
env:
|
||||||
@@ -455,7 +422,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
repository: "astral-sh/ruff-lsp"
|
repository: "astral-sh/ruff-lsp"
|
||||||
|
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
|
||||||
@@ -482,8 +449,6 @@ jobs:
|
|||||||
|
|
||||||
benchmarks:
|
benchmarks:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: determine_changes
|
|
||||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
|
||||||
steps:
|
steps:
|
||||||
- name: "Checkout Branch"
|
- name: "Checkout Branch"
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
@@ -502,7 +467,7 @@ jobs:
|
|||||||
run: cargo codspeed build --features codspeed -p ruff_benchmark
|
run: cargo codspeed build --features codspeed -p ruff_benchmark
|
||||||
|
|
||||||
- name: "Run benchmarks"
|
- name: "Run benchmarks"
|
||||||
uses: CodSpeedHQ/action@v2
|
uses: CodSpeedHQ/action@v1
|
||||||
with:
|
with:
|
||||||
run: cargo codspeed run
|
run: cargo codspeed run
|
||||||
token: ${{ secrets.CODSPEED_TOKEN }}
|
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||||
|
|||||||
4
.github/workflows/docs.yaml
vendored
4
.github/workflows/docs.yaml
vendored
@@ -20,7 +20,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.ref }}
|
ref: ${{ inputs.ref }}
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
- name: "Add SSH key"
|
- name: "Add SSH key"
|
||||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||||
uses: webfactory/ssh-agent@v0.8.0
|
uses: webfactory/ssh-agent@v0.8.0
|
||||||
@@ -44,7 +44,7 @@ jobs:
|
|||||||
run: mkdocs build --strict -f mkdocs.insiders.yml
|
run: mkdocs build --strict -f mkdocs.insiders.yml
|
||||||
- name: "Build docs"
|
- name: "Build docs"
|
||||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||||
run: mkdocs build --strict -f mkdocs.public.yml
|
run: mkdocs build --strict -f mkdocs.generated.yml
|
||||||
- name: "Deploy to Cloudflare Pages"
|
- name: "Deploy to Cloudflare Pages"
|
||||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||||
uses: cloudflare/wrangler-action@v3.3.2
|
uses: cloudflare/wrangler-action@v3.3.2
|
||||||
|
|||||||
247
.github/workflows/flake8-to-ruff.yaml
vendored
Normal file
247
.github/workflows/flake8-to-ruff.yaml
vendored
Normal file
@@ -0,0 +1,247 @@
|
|||||||
|
name: "[flake8-to-ruff] Release"
|
||||||
|
|
||||||
|
on: workflow_dispatch
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
env:
|
||||||
|
PACKAGE_NAME: flake8-to-ruff
|
||||||
|
CRATE_NAME: flake8_to_ruff
|
||||||
|
PYTHON_VERSION: "3.11"
|
||||||
|
CARGO_INCREMENTAL: 0
|
||||||
|
CARGO_NET_RETRY: 10
|
||||||
|
CARGO_TERM_COLOR: always
|
||||||
|
RUSTUP_MAX_RETRIES: 10
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
macos-x86_64:
|
||||||
|
runs-on: macos-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
architecture: x64
|
||||||
|
- name: "Install Rust toolchain"
|
||||||
|
run: rustup show
|
||||||
|
- name: "Build wheels - x86_64"
|
||||||
|
uses: PyO3/maturin-action@v1
|
||||||
|
with:
|
||||||
|
target: x86_64
|
||||||
|
args: --release --out dist --sdist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||||
|
- name: "Install built wheel - x86_64"
|
||||||
|
run: |
|
||||||
|
pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
|
||||||
|
- name: "Upload wheels"
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: wheels
|
||||||
|
path: dist
|
||||||
|
macos-universal:
|
||||||
|
runs-on: macos-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
architecture: x64
|
||||||
|
- name: "Install Rust toolchain"
|
||||||
|
run: rustup show
|
||||||
|
- name: "Build wheels - universal2"
|
||||||
|
uses: PyO3/maturin-action@v1
|
||||||
|
with:
|
||||||
|
args: --release --target universal2-apple-darwin --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||||
|
- name: "Install built wheel - universal2"
|
||||||
|
run: |
|
||||||
|
pip install dist/${{ env.CRATE_NAME }}-*universal2.whl --force-reinstall
|
||||||
|
- name: "Upload wheels"
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: wheels
|
||||||
|
path: dist
|
||||||
|
|
||||||
|
windows:
|
||||||
|
runs-on: windows-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
target: [x64, x86]
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
architecture: ${{ matrix.target }}
|
||||||
|
- name: "Install Rust toolchain"
|
||||||
|
run: rustup show
|
||||||
|
- name: "Build wheels"
|
||||||
|
uses: PyO3/maturin-action@v1
|
||||||
|
with:
|
||||||
|
target: ${{ matrix.target }}
|
||||||
|
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||||
|
- name: "Install built wheel"
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
python -m pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
|
||||||
|
- name: "Upload wheels"
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: wheels
|
||||||
|
path: dist
|
||||||
|
|
||||||
|
linux:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
target: [x86_64, i686]
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
architecture: x64
|
||||||
|
- name: "Build wheels"
|
||||||
|
uses: PyO3/maturin-action@v1
|
||||||
|
with:
|
||||||
|
target: ${{ matrix.target }}
|
||||||
|
manylinux: auto
|
||||||
|
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||||
|
- name: "Install built wheel"
|
||||||
|
if: matrix.target == 'x86_64'
|
||||||
|
run: |
|
||||||
|
pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
|
||||||
|
- name: "Upload wheels"
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: wheels
|
||||||
|
path: dist
|
||||||
|
|
||||||
|
linux-cross:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
target: [aarch64, armv7, s390x, ppc64le, ppc64]
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
- name: "Build wheels"
|
||||||
|
uses: PyO3/maturin-action@v1
|
||||||
|
with:
|
||||||
|
target: ${{ matrix.target }}
|
||||||
|
manylinux: auto
|
||||||
|
args: --no-default-features --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||||
|
- uses: uraimo/run-on-arch-action@v2
|
||||||
|
if: matrix.target != 'ppc64'
|
||||||
|
name: Install built wheel
|
||||||
|
with:
|
||||||
|
arch: ${{ matrix.target }}
|
||||||
|
distro: ubuntu20.04
|
||||||
|
githubToken: ${{ github.token }}
|
||||||
|
install: |
|
||||||
|
apt-get update
|
||||||
|
apt-get install -y --no-install-recommends python3 python3-pip
|
||||||
|
pip3 install -U pip
|
||||||
|
run: |
|
||||||
|
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||||
|
- name: "Upload wheels"
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: wheels
|
||||||
|
path: dist
|
||||||
|
|
||||||
|
musllinux:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
target:
|
||||||
|
- x86_64-unknown-linux-musl
|
||||||
|
- i686-unknown-linux-musl
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
architecture: x64
|
||||||
|
- name: "Build wheels"
|
||||||
|
uses: PyO3/maturin-action@v1
|
||||||
|
with:
|
||||||
|
target: ${{ matrix.target }}
|
||||||
|
manylinux: musllinux_1_2
|
||||||
|
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||||
|
- name: "Install built wheel"
|
||||||
|
if: matrix.target == 'x86_64-unknown-linux-musl'
|
||||||
|
uses: addnab/docker-run-action@v3
|
||||||
|
with:
|
||||||
|
image: alpine:latest
|
||||||
|
options: -v ${{ github.workspace }}:/io -w /io
|
||||||
|
run: |
|
||||||
|
apk add py3-pip
|
||||||
|
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links /io/dist/ --force-reinstall
|
||||||
|
- name: "Upload wheels"
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: wheels
|
||||||
|
path: dist
|
||||||
|
|
||||||
|
musllinux-cross:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
platform:
|
||||||
|
- target: aarch64-unknown-linux-musl
|
||||||
|
arch: aarch64
|
||||||
|
- target: armv7-unknown-linux-musleabihf
|
||||||
|
arch: armv7
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
- name: "Build wheels"
|
||||||
|
uses: PyO3/maturin-action@v1
|
||||||
|
with:
|
||||||
|
target: ${{ matrix.platform.target }}
|
||||||
|
manylinux: musllinux_1_2
|
||||||
|
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||||
|
- uses: uraimo/run-on-arch-action@v2
|
||||||
|
name: Install built wheel
|
||||||
|
with:
|
||||||
|
arch: ${{ matrix.platform.arch }}
|
||||||
|
distro: alpine_latest
|
||||||
|
githubToken: ${{ github.token }}
|
||||||
|
install: |
|
||||||
|
apk add py3-pip
|
||||||
|
run: |
|
||||||
|
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||||
|
- name: "Upload wheels"
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: wheels
|
||||||
|
path: dist
|
||||||
|
|
||||||
|
release:
|
||||||
|
name: Release
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs:
|
||||||
|
- macos-universal
|
||||||
|
- macos-x86_64
|
||||||
|
- windows
|
||||||
|
- linux
|
||||||
|
- linux-cross
|
||||||
|
- musllinux
|
||||||
|
- musllinux-cross
|
||||||
|
steps:
|
||||||
|
- uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: wheels
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
- name: "Publish to PyPi"
|
||||||
|
env:
|
||||||
|
TWINE_USERNAME: __token__
|
||||||
|
TWINE_PASSWORD: ${{ secrets.FLAKE8_TO_RUFF_TOKEN }}
|
||||||
|
run: |
|
||||||
|
pip install --upgrade twine
|
||||||
|
twine upload --skip-existing *
|
||||||
4
.github/workflows/pr-comment.yaml
vendored
4
.github/workflows/pr-comment.yaml
vendored
@@ -17,7 +17,7 @@ jobs:
|
|||||||
comment:
|
comment:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: dawidd6/action-download-artifact@v3
|
- uses: dawidd6/action-download-artifact@v2
|
||||||
name: Download pull request number
|
name: Download pull request number
|
||||||
with:
|
with:
|
||||||
name: pr-number
|
name: pr-number
|
||||||
@@ -32,7 +32,7 @@ jobs:
|
|||||||
echo "pr-number=$(<pr-number)" >> $GITHUB_OUTPUT
|
echo "pr-number=$(<pr-number)" >> $GITHUB_OUTPUT
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- uses: dawidd6/action-download-artifact@v3
|
- uses: dawidd6/action-download-artifact@v2
|
||||||
name: "Download ecosystem results"
|
name: "Download ecosystem results"
|
||||||
id: download-ecosystem-result
|
id: download-ecosystem-result
|
||||||
if: steps.pr-number.outputs.pr-number
|
if: steps.pr-number.outputs.pr-number
|
||||||
|
|||||||
117
.github/workflows/release.yaml
vendored
117
.github/workflows/release.yaml
vendored
@@ -36,7 +36,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.sha }}
|
ref: ${{ inputs.sha }}
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
- name: "Prep README.md"
|
- name: "Prep README.md"
|
||||||
@@ -63,7 +63,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.sha }}
|
ref: ${{ inputs.sha }}
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
architecture: x64
|
architecture: x64
|
||||||
@@ -73,7 +73,7 @@ jobs:
|
|||||||
uses: PyO3/maturin-action@v1
|
uses: PyO3/maturin-action@v1
|
||||||
with:
|
with:
|
||||||
target: x86_64
|
target: x86_64
|
||||||
args: --release --locked --out dist
|
args: --release --out dist
|
||||||
- name: "Test wheel - x86_64"
|
- name: "Test wheel - x86_64"
|
||||||
run: |
|
run: |
|
||||||
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
||||||
@@ -86,7 +86,7 @@ jobs:
|
|||||||
path: dist
|
path: dist
|
||||||
- name: "Archive binary"
|
- name: "Archive binary"
|
||||||
run: |
|
run: |
|
||||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-x86_64-apple-darwin.tar.gz
|
ARCHIVE_FILE=ruff-x86_64-apple-darwin.tar.gz
|
||||||
tar czvf $ARCHIVE_FILE -C target/x86_64-apple-darwin/release ruff
|
tar czvf $ARCHIVE_FILE -C target/x86_64-apple-darwin/release ruff
|
||||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||||
- name: "Upload binary"
|
- name: "Upload binary"
|
||||||
@@ -103,7 +103,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.sha }}
|
ref: ${{ inputs.sha }}
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
architecture: x64
|
architecture: x64
|
||||||
@@ -112,7 +112,7 @@ jobs:
|
|||||||
- name: "Build wheels - universal2"
|
- name: "Build wheels - universal2"
|
||||||
uses: PyO3/maturin-action@v1
|
uses: PyO3/maturin-action@v1
|
||||||
with:
|
with:
|
||||||
args: --release --locked --target universal2-apple-darwin --out dist
|
args: --release --target universal2-apple-darwin --out dist
|
||||||
- name: "Test wheel - universal2"
|
- name: "Test wheel - universal2"
|
||||||
run: |
|
run: |
|
||||||
pip install dist/${{ env.PACKAGE_NAME }}-*universal2.whl --force-reinstall
|
pip install dist/${{ env.PACKAGE_NAME }}-*universal2.whl --force-reinstall
|
||||||
@@ -125,7 +125,7 @@ jobs:
|
|||||||
path: dist
|
path: dist
|
||||||
- name: "Archive binary"
|
- name: "Archive binary"
|
||||||
run: |
|
run: |
|
||||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-aarch64-apple-darwin.tar.gz
|
ARCHIVE_FILE=ruff-aarch64-apple-darwin.tar.gz
|
||||||
tar czvf $ARCHIVE_FILE -C target/aarch64-apple-darwin/release ruff
|
tar czvf $ARCHIVE_FILE -C target/aarch64-apple-darwin/release ruff
|
||||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||||
- name: "Upload binary"
|
- name: "Upload binary"
|
||||||
@@ -151,7 +151,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.sha }}
|
ref: ${{ inputs.sha }}
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
architecture: ${{ matrix.platform.arch }}
|
architecture: ${{ matrix.platform.arch }}
|
||||||
@@ -161,7 +161,7 @@ jobs:
|
|||||||
uses: PyO3/maturin-action@v1
|
uses: PyO3/maturin-action@v1
|
||||||
with:
|
with:
|
||||||
target: ${{ matrix.platform.target }}
|
target: ${{ matrix.platform.target }}
|
||||||
args: --release --locked --out dist
|
args: --release --out dist
|
||||||
- name: "Test wheel"
|
- name: "Test wheel"
|
||||||
if: ${{ !startsWith(matrix.platform.target, 'aarch64') }}
|
if: ${{ !startsWith(matrix.platform.target, 'aarch64') }}
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -177,7 +177,7 @@ jobs:
|
|||||||
- name: "Archive binary"
|
- name: "Archive binary"
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.zip
|
ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.zip
|
||||||
7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/ruff.exe
|
7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/ruff.exe
|
||||||
sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||||
- name: "Upload binary"
|
- name: "Upload binary"
|
||||||
@@ -199,7 +199,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.sha }}
|
ref: ${{ inputs.sha }}
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
architecture: x64
|
architecture: x64
|
||||||
@@ -210,7 +210,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
target: ${{ matrix.target }}
|
target: ${{ matrix.target }}
|
||||||
manylinux: auto
|
manylinux: auto
|
||||||
args: --release --locked --out dist
|
args: --release --out dist
|
||||||
- name: "Test wheel"
|
- name: "Test wheel"
|
||||||
if: ${{ startsWith(matrix.target, 'x86_64') }}
|
if: ${{ startsWith(matrix.target, 'x86_64') }}
|
||||||
run: |
|
run: |
|
||||||
@@ -224,7 +224,7 @@ jobs:
|
|||||||
path: dist
|
path: dist
|
||||||
- name: "Archive binary"
|
- name: "Archive binary"
|
||||||
run: |
|
run: |
|
||||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.target }}.tar.gz
|
ARCHIVE_FILE=ruff-${{ matrix.target }}.tar.gz
|
||||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
|
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
|
||||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||||
- name: "Upload binary"
|
- name: "Upload binary"
|
||||||
@@ -258,7 +258,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.sha }}
|
ref: ${{ inputs.sha }}
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
- name: "Prep README.md"
|
- name: "Prep README.md"
|
||||||
@@ -269,7 +269,7 @@ jobs:
|
|||||||
target: ${{ matrix.platform.target }}
|
target: ${{ matrix.platform.target }}
|
||||||
manylinux: auto
|
manylinux: auto
|
||||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||||
args: --release --locked --out dist
|
args: --release --out dist
|
||||||
- uses: uraimo/run-on-arch-action@v2
|
- uses: uraimo/run-on-arch-action@v2
|
||||||
if: matrix.platform.arch != 'ppc64'
|
if: matrix.platform.arch != 'ppc64'
|
||||||
name: Test wheel
|
name: Test wheel
|
||||||
@@ -291,7 +291,7 @@ jobs:
|
|||||||
path: dist
|
path: dist
|
||||||
- name: "Archive binary"
|
- name: "Archive binary"
|
||||||
run: |
|
run: |
|
||||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.tar.gz
|
ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.tar.gz
|
||||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
|
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
|
||||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||||
- name: "Upload binary"
|
- name: "Upload binary"
|
||||||
@@ -313,7 +313,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.sha }}
|
ref: ${{ inputs.sha }}
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
architecture: x64
|
architecture: x64
|
||||||
@@ -324,7 +324,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
target: ${{ matrix.target }}
|
target: ${{ matrix.target }}
|
||||||
manylinux: musllinux_1_2
|
manylinux: musllinux_1_2
|
||||||
args: --release --locked --out dist
|
args: --release --out dist
|
||||||
- name: "Test wheel"
|
- name: "Test wheel"
|
||||||
if: matrix.target == 'x86_64-unknown-linux-musl'
|
if: matrix.target == 'x86_64-unknown-linux-musl'
|
||||||
uses: addnab/docker-run-action@v3
|
uses: addnab/docker-run-action@v3
|
||||||
@@ -332,10 +332,10 @@ jobs:
|
|||||||
image: alpine:latest
|
image: alpine:latest
|
||||||
options: -v ${{ github.workspace }}:/io -w /io
|
options: -v ${{ github.workspace }}:/io -w /io
|
||||||
run: |
|
run: |
|
||||||
apk add python3
|
apk add py3-pip
|
||||||
python -m venv .venv
|
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links /io/dist/ --force-reinstall
|
||||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
ruff --help
|
||||||
.venv/bin/ruff check --help
|
python -m ruff --help
|
||||||
- name: "Upload wheels"
|
- name: "Upload wheels"
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
@@ -343,7 +343,7 @@ jobs:
|
|||||||
path: dist
|
path: dist
|
||||||
- name: "Archive binary"
|
- name: "Archive binary"
|
||||||
run: |
|
run: |
|
||||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.target }}.tar.gz
|
ARCHIVE_FILE=ruff-${{ matrix.target }}.tar.gz
|
||||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
|
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
|
||||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||||
- name: "Upload binary"
|
- name: "Upload binary"
|
||||||
@@ -369,7 +369,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.sha }}
|
ref: ${{ inputs.sha }}
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
- name: "Prep README.md"
|
- name: "Prep README.md"
|
||||||
@@ -379,7 +379,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
target: ${{ matrix.platform.target }}
|
target: ${{ matrix.platform.target }}
|
||||||
manylinux: musllinux_1_2
|
manylinux: musllinux_1_2
|
||||||
args: --release --locked --out dist
|
args: --release --out dist
|
||||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||||
- uses: uraimo/run-on-arch-action@v2
|
- uses: uraimo/run-on-arch-action@v2
|
||||||
name: Test wheel
|
name: Test wheel
|
||||||
@@ -388,11 +388,10 @@ jobs:
|
|||||||
distro: alpine_latest
|
distro: alpine_latest
|
||||||
githubToken: ${{ github.token }}
|
githubToken: ${{ github.token }}
|
||||||
install: |
|
install: |
|
||||||
apk add python3
|
apk add py3-pip
|
||||||
run: |
|
run: |
|
||||||
python -m venv .venv
|
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
ruff check --help
|
||||||
.venv/bin/ruff check --help
|
|
||||||
- name: "Upload wheels"
|
- name: "Upload wheels"
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
@@ -400,7 +399,7 @@ jobs:
|
|||||||
path: dist
|
path: dist
|
||||||
- name: "Archive binary"
|
- name: "Archive binary"
|
||||||
run: |
|
run: |
|
||||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.tar.gz
|
ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.tar.gz
|
||||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
|
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
|
||||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||||
- name: "Upload binary"
|
- name: "Upload binary"
|
||||||
@@ -517,62 +516,6 @@ jobs:
|
|||||||
files: binaries/*
|
files: binaries/*
|
||||||
tag_name: v${{ inputs.tag }}
|
tag_name: v${{ inputs.tag }}
|
||||||
|
|
||||||
docker-publish:
|
|
||||||
# This action doesn't need to wait on any other task, it's easy to re-tag if something failed and we're validating
|
|
||||||
# the tag here also
|
|
||||||
name: Push Docker image ghcr.io/astral-sh/ruff
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
environment:
|
|
||||||
name: release
|
|
||||||
permissions:
|
|
||||||
# For the docker push
|
|
||||||
packages: write
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
ref: ${{ inputs.sha }}
|
|
||||||
|
|
||||||
- uses: docker/setup-buildx-action@v3
|
|
||||||
|
|
||||||
- uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
registry: ghcr.io
|
|
||||||
username: ${{ github.repository_owner }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Extract metadata (tags, labels) for Docker
|
|
||||||
id: meta
|
|
||||||
uses: docker/metadata-action@v5
|
|
||||||
with:
|
|
||||||
images: ghcr.io/astral-sh/ruff
|
|
||||||
|
|
||||||
- name: Check tag consistency
|
|
||||||
# Unlike validate-tag we don't check if the commit is on the main branch, but it seems good enough since we can
|
|
||||||
# change docker tags
|
|
||||||
if: ${{ inputs.tag }}
|
|
||||||
run: |
|
|
||||||
version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
|
|
||||||
if [ "${{ inputs.tag }}" != "${version}" ]; then
|
|
||||||
echo "The input tag does not match the version from pyproject.toml:" >&2
|
|
||||||
echo "${{ inputs.tag }}" >&2
|
|
||||||
echo "${version}" >&2
|
|
||||||
exit 1
|
|
||||||
else
|
|
||||||
echo "Releasing ${version}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: "Build and push Docker image"
|
|
||||||
uses: docker/build-push-action@v5
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
# Reuse the builder
|
|
||||||
cache-from: type=gha
|
|
||||||
cache-to: type=gha,mode=max
|
|
||||||
push: ${{ inputs.tag != '' }}
|
|
||||||
tags: ghcr.io/astral-sh/ruff:latest,ghcr.io/astral-sh/ruff:${{ inputs.tag || 'dry-run' }}
|
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
|
||||||
|
|
||||||
# After the release has been published, we update downstream repositories
|
# After the release has been published, we update downstream repositories
|
||||||
# This is separate because if this fails the release is still fine, we just need to do some manual workflow triggers
|
# This is separate because if this fails the release is still fine, we just need to do some manual workflow triggers
|
||||||
update-dependents:
|
update-dependents:
|
||||||
@@ -581,7 +524,7 @@ jobs:
|
|||||||
needs: publish-release
|
needs: publish-release
|
||||||
steps:
|
steps:
|
||||||
- name: "Update pre-commit mirror"
|
- name: "Update pre-commit mirror"
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@v6
|
||||||
with:
|
with:
|
||||||
github-token: ${{ secrets.RUFF_PRE_COMMIT_PAT }}
|
github-token: ${{ secrets.RUFF_PRE_COMMIT_PAT }}
|
||||||
script: |
|
script: |
|
||||||
|
|||||||
@@ -13,12 +13,12 @@ exclude: |
|
|||||||
|
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/abravalheri/validate-pyproject
|
- repo: https://github.com/abravalheri/validate-pyproject
|
||||||
rev: v0.15
|
rev: v0.12.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: validate-pyproject
|
- id: validate-pyproject
|
||||||
|
|
||||||
- repo: https://github.com/executablebooks/mdformat
|
- repo: https://github.com/executablebooks/mdformat
|
||||||
rev: 0.7.17
|
rev: 0.7.16
|
||||||
hooks:
|
hooks:
|
||||||
- id: mdformat
|
- id: mdformat
|
||||||
additional_dependencies:
|
additional_dependencies:
|
||||||
@@ -26,22 +26,16 @@ repos:
|
|||||||
- mdformat-admon
|
- mdformat-admon
|
||||||
exclude: |
|
exclude: |
|
||||||
(?x)^(
|
(?x)^(
|
||||||
docs/formatter/black\.md
|
docs/formatter/black.md
|
||||||
| docs/\w+\.md
|
|
||||||
)$
|
)$
|
||||||
|
|
||||||
- repo: https://github.com/igorshubovych/markdownlint-cli
|
- repo: https://github.com/igorshubovych/markdownlint-cli
|
||||||
rev: v0.37.0
|
rev: v0.33.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: markdownlint-fix
|
- id: markdownlint-fix
|
||||||
exclude: |
|
|
||||||
(?x)^(
|
|
||||||
docs/formatter/black\.md
|
|
||||||
| docs/\w+\.md
|
|
||||||
)$
|
|
||||||
|
|
||||||
- repo: https://github.com/crate-ci/typos
|
- repo: https://github.com/crate-ci/typos
|
||||||
rev: v1.16.22
|
rev: v1.14.12
|
||||||
hooks:
|
hooks:
|
||||||
- id: typos
|
- id: typos
|
||||||
|
|
||||||
@@ -55,7 +49,7 @@ repos:
|
|||||||
pass_filenames: false # This makes it a lot faster
|
pass_filenames: false # This makes it a lot faster
|
||||||
|
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.1.4
|
rev: v0.1.3
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff-format
|
- id: ruff-format
|
||||||
- id: ruff
|
- id: ruff
|
||||||
@@ -70,7 +64,7 @@ repos:
|
|||||||
|
|
||||||
# Prettier
|
# Prettier
|
||||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||||
rev: v3.0.3
|
rev: v3.0.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: prettier
|
- id: prettier
|
||||||
types: [yaml]
|
types: [yaml]
|
||||||
|
|||||||
@@ -1,42 +1,5 @@
|
|||||||
# Breaking Changes
|
# Breaking Changes
|
||||||
|
|
||||||
## 0.1.9
|
|
||||||
|
|
||||||
### `site-packages` is now excluded by default ([#5513](https://github.com/astral-sh/ruff/pull/5513))
|
|
||||||
|
|
||||||
Ruff maintains a list of default exclusions, which now consists of the following patterns:
|
|
||||||
|
|
||||||
- `.bzr`
|
|
||||||
- `.direnv`
|
|
||||||
- `.eggs`
|
|
||||||
- `.git-rewrite`
|
|
||||||
- `.git`
|
|
||||||
- `.hg`
|
|
||||||
- `.ipynb_checkpoints`
|
|
||||||
- `.mypy_cache`
|
|
||||||
- `.nox`
|
|
||||||
- `.pants.d`
|
|
||||||
- `.pyenv`
|
|
||||||
- `.pytest_cache`
|
|
||||||
- `.pytype`
|
|
||||||
- `.ruff_cache`
|
|
||||||
- `.svn`
|
|
||||||
- `.tox`
|
|
||||||
- `.venv`
|
|
||||||
- `.vscode`
|
|
||||||
- `__pypackages__`
|
|
||||||
- `_build`
|
|
||||||
- `buck-out`
|
|
||||||
- `build`
|
|
||||||
- `dist`
|
|
||||||
- `node_modules`
|
|
||||||
- `site-packages`
|
|
||||||
- `venv`
|
|
||||||
|
|
||||||
Previously, the `site-packages` directory was not excluded by default. While `site-packages` tends
|
|
||||||
to be excluded anyway by virtue of the `.venv` exclusion, this may not be the case when using Ruff
|
|
||||||
from VS Code outside a virtual environment.
|
|
||||||
|
|
||||||
## 0.1.0
|
## 0.1.0
|
||||||
|
|
||||||
### The deprecated `format` setting has been removed
|
### The deprecated `format` setting has been removed
|
||||||
|
|||||||
457
CHANGELOG.md
457
CHANGELOG.md
@@ -1,462 +1,5 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
## 0.1.11
|
|
||||||
|
|
||||||
### Preview features
|
|
||||||
|
|
||||||
- \[`pylint`\] Implement `super-without-brackets` (`W0245`) ([#9257](https://github.com/astral-sh/ruff/pull/9257))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- Check path string properly in `python -m ruff` invocations ([#9367](https://github.com/astral-sh/ruff/pull/9367))
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
|
|
||||||
- Tweak `relative-imports` message ([#9365](https://github.com/astral-sh/ruff/pull/9365))
|
|
||||||
- Add fix safety note for `yield-in-for-loop` ([#9364](https://github.com/astral-sh/ruff/pull/9364))
|
|
||||||
|
|
||||||
## 0.1.10
|
|
||||||
|
|
||||||
### Preview features
|
|
||||||
|
|
||||||
- Improve `dummy_implementations` preview style formatting ([#9240](https://github.com/astral-sh/ruff/pull/9240))
|
|
||||||
- Normalise Hex and unicode escape sequences in strings ([#9280](https://github.com/astral-sh/ruff/pull/9280))
|
|
||||||
- Parenthesize long type annotations in annotated assignments ([#9210](https://github.com/astral-sh/ruff/pull/9210))
|
|
||||||
- Parenthesize multi-context managers in `with` statements ([#9222](https://github.com/astral-sh/ruff/pull/9222))
|
|
||||||
- \[`flake8-pyi`\] Implement `generator-return-from-iter-method` (`PYI058`) ([#9313](https://github.com/astral-sh/ruff/pull/9313))
|
|
||||||
- \[`pylint`\] Implement `empty-comment` (`PLR2044`) ([#9174](https://github.com/astral-sh/ruff/pull/9174))
|
|
||||||
- \[`refurb`\] Implement `bit-count` (`FURB161`) ([#9265](https://github.com/astral-sh/ruff/pull/9265))
|
|
||||||
- \[`ruff`\] Add `never-union` rule to detect redundant `typing.NoReturn` and `typing.Never` ([#9217](https://github.com/astral-sh/ruff/pull/9217))
|
|
||||||
|
|
||||||
### CLI
|
|
||||||
|
|
||||||
- Add paths to TOML parse errors ([#9358](https://github.com/astral-sh/ruff/pull/9358))
|
|
||||||
- Add row and column numbers to formatter parse errors ([#9321](https://github.com/astral-sh/ruff/pull/9321))
|
|
||||||
- Improve responsiveness when invoked via Python ([#9315](https://github.com/astral-sh/ruff/pull/9315))
|
|
||||||
- Short rule messages should not end with a period ([#9345](https://github.com/astral-sh/ruff/pull/9345))
|
|
||||||
|
|
||||||
### Configuration
|
|
||||||
|
|
||||||
- Respect runtime-required decorators on functions ([#9317](https://github.com/astral-sh/ruff/pull/9317))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- Avoid `asyncio-dangling-task` for nonlocal and global bindings ([#9263](https://github.com/astral-sh/ruff/pull/9263))
|
|
||||||
- Escape trailing placeholders in rule documentation ([#9301](https://github.com/astral-sh/ruff/pull/9301))
|
|
||||||
- Fix continuation detection following multi-line strings ([#9332](https://github.com/astral-sh/ruff/pull/9332))
|
|
||||||
- Fix scoping for generators in named expressions in classes ([#9248](https://github.com/astral-sh/ruff/pull/9248))
|
|
||||||
- Port from obsolete wsl crate to is-wsl ([#9356](https://github.com/astral-sh/ruff/pull/9356))
|
|
||||||
- Remove special pre-visit for module docstrings ([#9261](https://github.com/astral-sh/ruff/pull/9261))
|
|
||||||
- Respect `__str__` definitions from super classes ([#9338](https://github.com/astral-sh/ruff/pull/9338))
|
|
||||||
- Respect `unused-noqa` via `per-file-ignores` ([#9300](https://github.com/astral-sh/ruff/pull/9300))
|
|
||||||
- Respect attribute chains when resolving builtin call paths ([#9309](https://github.com/astral-sh/ruff/pull/9309))
|
|
||||||
- Treat all `typing_extensions` members as typing aliases ([#9335](https://github.com/astral-sh/ruff/pull/9335))
|
|
||||||
- Use `Display` for formatter parse errors ([#9316](https://github.com/astral-sh/ruff/pull/9316))
|
|
||||||
- Wrap subscripted dicts in parens for f-string conversion ([#9238](https://github.com/astral-sh/ruff/pull/9238))
|
|
||||||
- \[`flake8-annotations`\] Avoid adding return types to stub methods ([#9277](https://github.com/astral-sh/ruff/pull/9277))
|
|
||||||
- \[`flake8-annotations`\] Respect mixed `return` and `raise` cases in return-type analysis ([#9310](https://github.com/astral-sh/ruff/pull/9310))
|
|
||||||
- \[`flake8-bandit`\] Don't report violations when `SafeLoader` is imported from `yaml.loader` (`S506`) ([#9299](https://github.com/astral-sh/ruff/pull/9299))
|
|
||||||
- \[`pylint`\] Avoid panic when comment is preceded by Unicode ([#9331](https://github.com/astral-sh/ruff/pull/9331))
|
|
||||||
- \[`pylint`\] Change `PLR0917` error message to match other `PLR09XX` messages ([#9308](https://github.com/astral-sh/ruff/pull/9308))
|
|
||||||
- \[`refurb`\] Avoid false positives for `math-constant` (`FURB152`) ([#9290](https://github.com/astral-sh/ruff/pull/9290))
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
|
|
||||||
- Expand target name for better rule documentation ([#9302](https://github.com/astral-sh/ruff/pull/9302))
|
|
||||||
- Fix typos found by codespell ([#9346](https://github.com/astral-sh/ruff/pull/9346))
|
|
||||||
- \[`perflint`\] Document `PERF102` fix un-safety ([#9351](https://github.com/astral-sh/ruff/pull/9351))
|
|
||||||
- \[`pyupgrade`\] Document `UP007` fix un-safety ([#9306](https://github.com/astral-sh/ruff/pull/9306))
|
|
||||||
|
|
||||||
## 0.1.9
|
|
||||||
|
|
||||||
### Breaking changes
|
|
||||||
|
|
||||||
- Add site-packages to default exclusions ([#9188](https://github.com/astral-sh/ruff/pull/9188))
|
|
||||||
|
|
||||||
### Preview features
|
|
||||||
|
|
||||||
- Fix: Avoid parenthesizing subscript targets and values ([#9209](https://github.com/astral-sh/ruff/pull/9209))
|
|
||||||
- \[`pylint`\] Implement `too-many-locals` (`PLR0914`) ([#9163](https://github.com/astral-sh/ruff/pull/9163))
|
|
||||||
- Implement `reimplemented_operator` (FURB118) ([#9171](https://github.com/astral-sh/ruff/pull/9171))
|
|
||||||
- Add a rule to detect string members in runtime-evaluated unions ([#9143](https://github.com/astral-sh/ruff/pull/9143))
|
|
||||||
- Implement `no_blank_line_before_class_docstring` preview style ([#9154](https://github.com/astral-sh/ruff/pull/9154))
|
|
||||||
|
|
||||||
### Rule changes
|
|
||||||
|
|
||||||
- `CONSTANT_CASE` variables are improperly flagged for yoda violation (`SIM300`) ([#9164](https://github.com/astral-sh/ruff/pull/9164))
|
|
||||||
- \[`flake8-pyi`\] Cover ParamSpecs and TypeVarTuples (`PYI018`) ([#9198](https://github.com/astral-sh/ruff/pull/9198))
|
|
||||||
- \[`flake8-bugbear`\] Add fix for `zip-without-explicit-strict` (`B905`) ([#9176](https://github.com/astral-sh/ruff/pull/9176))
|
|
||||||
- Add fix to automatically remove `print` and `pprint` statements (`T201`, `T203`) ([#9208](https://github.com/astral-sh/ruff/pull/9208))
|
|
||||||
- Prefer `Never` to `NoReturn` in auto-typing in Python >= 3.11 (`ANN201`) ([#9213](https://github.com/astral-sh/ruff/pull/9213))
|
|
||||||
|
|
||||||
### Formatter
|
|
||||||
|
|
||||||
- `can_omit_optional_parentheses`: Exit early for unparenthesized expressions ([#9125](https://github.com/astral-sh/ruff/pull/9125))
|
|
||||||
- Fix `dynamic` mode with doctests so that it doesn't exceed configured line width ([#9129](https://github.com/astral-sh/ruff/pull/9129))
|
|
||||||
- Fix `can_omit_optional_parentheses` for expressions with a right most fstring ([#9124](https://github.com/astral-sh/ruff/pull/9124))
|
|
||||||
- Add `target_version` to formatter options ([#9220](https://github.com/astral-sh/ruff/pull/9220))
|
|
||||||
|
|
||||||
### CLI
|
|
||||||
|
|
||||||
- Update `ruff format --check` to display message for already formatted files ([#9153](https://github.com/astral-sh/ruff/pull/9153))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- Reverse order of arguments for `operator.contains` ([#9192](https://github.com/astral-sh/ruff/pull/9192))
|
|
||||||
- Iterate over lambdas in deferred type annotations ([#9175](https://github.com/astral-sh/ruff/pull/9175))
|
|
||||||
- Fix panic in `D208` with multibyte indent ([#9147](https://github.com/astral-sh/ruff/pull/9147))
|
|
||||||
- Add support for `NoReturn` in auto-return-typing ([#9206](https://github.com/astral-sh/ruff/pull/9206))
|
|
||||||
- Allow removal of `typing` from `exempt-modules` ([#9214](https://github.com/astral-sh/ruff/pull/9214))
|
|
||||||
- Avoid `mutable-class-default` violations for Pydantic subclasses ([#9187](https://github.com/astral-sh/ruff/pull/9187))
|
|
||||||
- Fix dropped union expressions for piped non-types in `PYI055` autofix ([#9161](https://github.com/astral-sh/ruff/pull/9161))
|
|
||||||
- Enable annotation quoting for multi-line expressions ([#9142](https://github.com/astral-sh/ruff/pull/9142))
|
|
||||||
- Deduplicate edits when quoting annotations ([#9140](https://github.com/astral-sh/ruff/pull/9140))
|
|
||||||
- Prevent invalid utf8 indexing in cell magic detection ([#9146](https://github.com/astral-sh/ruff/pull/9146))
|
|
||||||
- Avoid nested quotations in auto-quoting fix ([#9168](https://github.com/astral-sh/ruff/pull/9168))
|
|
||||||
- Add base-class inheritance detection to flake8-django rules ([#9151](https://github.com/astral-sh/ruff/pull/9151))
|
|
||||||
- Avoid `asyncio-dangling-task` violations on shadowed bindings ([#9215](https://github.com/astral-sh/ruff/pull/9215))
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
|
|
||||||
- Fix blog post URL in changelog ([#9119](https://github.com/astral-sh/ruff/pull/9119))
|
|
||||||
- Add error suppression hint for multi-line strings ([#9205](https://github.com/astral-sh/ruff/pull/9205))
|
|
||||||
- Fix typo in SemanticModel.parent_expression docstring ([#9167](https://github.com/astral-sh/ruff/pull/9167))
|
|
||||||
- Document link between import sorting and formatter ([#9117](https://github.com/astral-sh/ruff/pull/9117))
|
|
||||||
|
|
||||||
## 0.1.8
|
|
||||||
|
|
||||||
This release includes opt-in support for formatting Python snippets within
|
|
||||||
docstrings via the `docstring-code-format` setting.
|
|
||||||
[Check out the blog post](https://astral.sh/blog/ruff-v0.1.8) for more details!
|
|
||||||
|
|
||||||
### Preview features
|
|
||||||
|
|
||||||
- Add `"preserve"` quote-style to mimic Black's skip-string-normalization ([#8822](https://github.com/astral-sh/ruff/pull/8822))
|
|
||||||
- Implement `prefer_splitting_right_hand_side_of_assignments` preview style ([#8943](https://github.com/astral-sh/ruff/pull/8943))
|
|
||||||
- \[`pycodestyle`\] Add fix for `unexpected-spaces-around-keyword-parameter-equals` ([#9072](https://github.com/astral-sh/ruff/pull/9072))
|
|
||||||
- \[`pycodestyle`\] Add fix for comment-related whitespace rules ([#9075](https://github.com/astral-sh/ruff/pull/9075))
|
|
||||||
- \[`pycodestyle`\] Allow `sys.path` modifications between imports ([#9047](https://github.com/astral-sh/ruff/pull/9047))
|
|
||||||
- \[`refurb`\] Implement `hashlib-digest-hex` (`FURB181`) ([#9077](https://github.com/astral-sh/ruff/pull/9077))
|
|
||||||
|
|
||||||
### Rule changes
|
|
||||||
|
|
||||||
- Allow `flake8-type-checking` rules to automatically quote runtime-evaluated references ([#6001](https://github.com/astral-sh/ruff/pull/6001))
|
|
||||||
- Allow transparent cell magics in Jupyter Notebooks ([#8911](https://github.com/astral-sh/ruff/pull/8911))
|
|
||||||
- \[`flake8-annotations`\] Avoid `ANN2xx` fixes for abstract methods with empty bodies ([#9034](https://github.com/astral-sh/ruff/pull/9034))
|
|
||||||
- \[`flake8-self`\] Ignore underscore references in type annotations ([#9036](https://github.com/astral-sh/ruff/pull/9036))
|
|
||||||
- \[`pep8-naming`\] Allow class names when `apps.get_model` is a non-string ([#9065](https://github.com/astral-sh/ruff/pull/9065))
|
|
||||||
- \[`pycodestyle`\] Allow `matplotlib.use` calls to intersperse imports ([#9094](https://github.com/astral-sh/ruff/pull/9094))
|
|
||||||
- \[`pyflakes`\] Support fixing unused assignments in tuples by renaming variables (`F841`) ([#9107](https://github.com/astral-sh/ruff/pull/9107))
|
|
||||||
- \[`pylint`\] Add fix for `subprocess-run-without-check` (`PLW1510`) ([#6708](https://github.com/astral-sh/ruff/pull/6708))
|
|
||||||
|
|
||||||
### Formatter
|
|
||||||
|
|
||||||
- Add `docstring-code-format` knob to enable docstring snippet formatting ([#8854](https://github.com/astral-sh/ruff/pull/8854))
|
|
||||||
- Use double quotes for all docstrings, including single-quoted docstrings ([#9020](https://github.com/astral-sh/ruff/pull/9020))
|
|
||||||
- Implement "dynamic" line width mode for docstring code formatting ([#9098](https://github.com/astral-sh/ruff/pull/9098))
|
|
||||||
- Support reformatting Markdown code blocks ([#9030](https://github.com/astral-sh/ruff/pull/9030))
|
|
||||||
- add support for formatting reStructuredText code snippets ([#9003](https://github.com/astral-sh/ruff/pull/9003))
|
|
||||||
- Avoid trailing comma for single-argument with positional separator ([#9076](https://github.com/astral-sh/ruff/pull/9076))
|
|
||||||
- Fix handling of trailing target comment ([#9051](https://github.com/astral-sh/ruff/pull/9051))
|
|
||||||
|
|
||||||
### CLI
|
|
||||||
|
|
||||||
- Hide unsafe fix suggestions when explicitly disabled ([#9095](https://github.com/astral-sh/ruff/pull/9095))
|
|
||||||
- Add SARIF support to `--output-format` ([#9078](https://github.com/astral-sh/ruff/pull/9078))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- Apply unnecessary index rule prior to enumerate rewrite ([#9012](https://github.com/astral-sh/ruff/pull/9012))
|
|
||||||
- \[`flake8-err-msg`\] Allow `EM` fixes even if `msg` variable is defined ([#9059](https://github.com/astral-sh/ruff/pull/9059))
|
|
||||||
- \[`flake8-pie`\] Prevent keyword arguments duplication ([#8450](https://github.com/astral-sh/ruff/pull/8450))
|
|
||||||
- \[`flake8-pie`\] Respect trailing comma in `unnecessary-dict-kwargs` (`PIE804`) ([#9015](https://github.com/astral-sh/ruff/pull/9015))
|
|
||||||
- \[`flake8-raise`\] Avoid removing parentheses on ctypes.WinError ([#9027](https://github.com/astral-sh/ruff/pull/9027))
|
|
||||||
- \[`isort`\] Avoid invalid combination of `force-sort-within-types` and `lines-between-types` ([#9041](https://github.com/astral-sh/ruff/pull/9041))
|
|
||||||
- \[`isort`\] Ensure that from-style imports are always ordered first in `__future__` ([#9039](https://github.com/astral-sh/ruff/pull/9039))
|
|
||||||
- \[`pycodestyle`\] Allow tab indentation before keyword ([#9099](https://github.com/astral-sh/ruff/pull/9099))
|
|
||||||
- \[`pylint`\] Ignore `@overrides` and `@overloads` for `too-many-positional` ([#9000](https://github.com/astral-sh/ruff/pull/9000))
|
|
||||||
- \[`pyupgrade`\] Enable `printf-string-formatting` fix with comments on right-hand side ([#9037](https://github.com/astral-sh/ruff/pull/9037))
|
|
||||||
- \[`refurb`\] Make `math-constant` (`FURB152`) rule more targeted ([#9054](https://github.com/astral-sh/ruff/pull/9054))
|
|
||||||
- \[`refurb`\] Support floating-point base in `redundant-log-base` (`FURB163`) ([#9100](https://github.com/astral-sh/ruff/pull/9100))
|
|
||||||
- \[`ruff`\] Detect `unused-asyncio-dangling-task` (`RUF006`) on unused assignments ([#9060](https://github.com/astral-sh/ruff/pull/9060))
|
|
||||||
|
|
||||||
## 0.1.7
|
|
||||||
|
|
||||||
### Preview features
|
|
||||||
|
|
||||||
- Implement multiline dictionary and list hugging for preview style ([#8293](https://github.com/astral-sh/ruff/pull/8293))
|
|
||||||
- Implement the `fix_power_op_line_length` preview style ([#8947](https://github.com/astral-sh/ruff/pull/8947))
|
|
||||||
- Use Python version to determine typing rewrite safety ([#8919](https://github.com/astral-sh/ruff/pull/8919))
|
|
||||||
- \[`flake8-annotations`\] Enable auto-return-type involving `Optional` and `Union` annotations ([#8885](https://github.com/astral-sh/ruff/pull/8885))
|
|
||||||
- \[`flake8-bandit`\] Implement `django-raw-sql` (`S611`) ([#8651](https://github.com/astral-sh/ruff/pull/8651))
|
|
||||||
- \[`flake8-bandit`\] Implement `tarfile-unsafe-members` (`S202`) ([#8829](https://github.com/astral-sh/ruff/pull/8829))
|
|
||||||
- \[`flake8-pyi`\] Implement fix for `unnecessary-literal-union` (`PYI030`) ([#7934](https://github.com/astral-sh/ruff/pull/7934))
|
|
||||||
- \[`flake8-simplify`\] Extend `dict-get-with-none-default` (`SIM910`) to non-literals ([#8762](https://github.com/astral-sh/ruff/pull/8762))
|
|
||||||
- \[`pylint`\] - add `unnecessary-list-index-lookup` (`PLR1736`) + autofix ([#7999](https://github.com/astral-sh/ruff/pull/7999))
|
|
||||||
- \[`pylint`\] - implement R0202 and R0203 with autofixes ([#8335](https://github.com/astral-sh/ruff/pull/8335))
|
|
||||||
- \[`pylint`\] Implement `repeated-keyword` (`PLe1132`) ([#8706](https://github.com/astral-sh/ruff/pull/8706))
|
|
||||||
- \[`pylint`\] Implement `too-many-positional` (`PLR0917`) ([#8995](https://github.com/astral-sh/ruff/pull/8995))
|
|
||||||
- \[`pylint`\] Implement `unnecessary-dict-index-lookup` (`PLR1733`) ([#8036](https://github.com/astral-sh/ruff/pull/8036))
|
|
||||||
- \[`refurb`\] Implement `redundant-log-base` (`FURB163`) ([#8842](https://github.com/astral-sh/ruff/pull/8842))
|
|
||||||
|
|
||||||
### Rule changes
|
|
||||||
|
|
||||||
- \[`flake8-boolean-trap`\] Allow booleans in `@override` methods ([#8882](https://github.com/astral-sh/ruff/pull/8882))
|
|
||||||
- \[`flake8-bugbear`\] Avoid `B015`,`B018` for last expression in a cell ([#8815](https://github.com/astral-sh/ruff/pull/8815))
|
|
||||||
- \[`flake8-pie`\] Allow ellipses for enum values in stub files ([#8825](https://github.com/astral-sh/ruff/pull/8825))
|
|
||||||
- \[`flake8-pyi`\] Check PEP 695 type aliases for `snake-case-type-alias` and `t-suffixed-type-alias` ([#8966](https://github.com/astral-sh/ruff/pull/8966))
|
|
||||||
- \[`flake8-pyi`\] Check for kwarg and vararg `NoReturn` type annotations ([#8948](https://github.com/astral-sh/ruff/pull/8948))
|
|
||||||
- \[`flake8-simplify`\] Omit select context managers from `SIM117` ([#8801](https://github.com/astral-sh/ruff/pull/8801))
|
|
||||||
- \[`pep8-naming`\] Allow Django model loads in `non-lowercase-variable-in-function` (`N806`) ([#8917](https://github.com/astral-sh/ruff/pull/8917))
|
|
||||||
- \[`pycodestyle`\] Avoid `E703` for last expression in a cell ([#8821](https://github.com/astral-sh/ruff/pull/8821))
|
|
||||||
- \[`pycodestyle`\] Update `E402` to work at cell level for notebooks ([#8872](https://github.com/astral-sh/ruff/pull/8872))
|
|
||||||
- \[`pydocstyle`\] Avoid `D100` for Jupyter Notebooks ([#8816](https://github.com/astral-sh/ruff/pull/8816))
|
|
||||||
- \[`pylint`\] Implement fix for `unspecified-encoding` (`PLW1514`) ([#8928](https://github.com/astral-sh/ruff/pull/8928))
|
|
||||||
|
|
||||||
### Formatter
|
|
||||||
|
|
||||||
- Avoid unstable formatting in ellipsis-only body with trailing comment ([#8984](https://github.com/astral-sh/ruff/pull/8984))
|
|
||||||
- Inline trailing comments for type alias similar to assignments ([#8941](https://github.com/astral-sh/ruff/pull/8941))
|
|
||||||
- Insert trailing comma when function breaks with single argument ([#8921](https://github.com/astral-sh/ruff/pull/8921))
|
|
||||||
|
|
||||||
### CLI
|
|
||||||
|
|
||||||
- Update `ruff check` and `ruff format` to default to the current directory ([#8791](https://github.com/astral-sh/ruff/pull/8791))
|
|
||||||
- Stop at the first resolved parent configuration ([#8864](https://github.com/astral-sh/ruff/pull/8864))
|
|
||||||
|
|
||||||
### Configuration
|
|
||||||
|
|
||||||
- \[`pylint`\] Default `max-positional-args` to `max-args` ([#8998](https://github.com/astral-sh/ruff/pull/8998))
|
|
||||||
- \[`pylint`\] Add `allow-dunder-method-names` setting for `bad-dunder-method-name` (`PLW3201`) ([#8812](https://github.com/astral-sh/ruff/pull/8812))
|
|
||||||
- \[`isort`\] Add support for `from-first` setting ([#8663](https://github.com/astral-sh/ruff/pull/8663))
|
|
||||||
- \[`isort`\] Add support for `length-sort` settings ([#8841](https://github.com/astral-sh/ruff/pull/8841))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- Add support for `@functools.singledispatch` ([#8934](https://github.com/astral-sh/ruff/pull/8934))
|
|
||||||
- Avoid off-by-one error in stripping noqa following multi-byte char ([#8979](https://github.com/astral-sh/ruff/pull/8979))
|
|
||||||
- Avoid off-by-one error in with-item named expressions ([#8915](https://github.com/astral-sh/ruff/pull/8915))
|
|
||||||
- Avoid syntax error via invalid ur string prefix ([#8971](https://github.com/astral-sh/ruff/pull/8971))
|
|
||||||
- Avoid underflow in `get_model` matching ([#8965](https://github.com/astral-sh/ruff/pull/8965))
|
|
||||||
- Avoid unnecessary index diagnostics when value is modified ([#8970](https://github.com/astral-sh/ruff/pull/8970))
|
|
||||||
- Convert over-indentation rule to use number of characters ([#8983](https://github.com/astral-sh/ruff/pull/8983))
|
|
||||||
- Detect implicit returns in auto-return-types ([#8952](https://github.com/astral-sh/ruff/pull/8952))
|
|
||||||
- Fix start >= end error in over-indentation ([#8982](https://github.com/astral-sh/ruff/pull/8982))
|
|
||||||
- Ignore `@overload` and `@override` methods for too-many-arguments checks ([#8954](https://github.com/astral-sh/ruff/pull/8954))
|
|
||||||
- Lexer start of line is false only for `Mode::Expression` ([#8880](https://github.com/astral-sh/ruff/pull/8880))
|
|
||||||
- Mark `pydantic_settings.BaseSettings` as having default copy semantics ([#8793](https://github.com/astral-sh/ruff/pull/8793))
|
|
||||||
- Respect dictionary unpacking in `NamedTuple` assignments ([#8810](https://github.com/astral-sh/ruff/pull/8810))
|
|
||||||
- Respect local subclasses in `flake8-type-checking` ([#8768](https://github.com/astral-sh/ruff/pull/8768))
|
|
||||||
- Support type alias statements in simple statement positions ([#8916](https://github.com/astral-sh/ruff/pull/8916))
|
|
||||||
- \[`flake8-annotations`\] Avoid filtering out un-representable types in return annotation ([#8881](https://github.com/astral-sh/ruff/pull/8881))
|
|
||||||
- \[`flake8-pie`\] Retain extra ellipses in protocols and abstract methods ([#8769](https://github.com/astral-sh/ruff/pull/8769))
|
|
||||||
- \[`flake8-pyi`\] Respect local enum subclasses in `simple-defaults` (`PYI052`) ([#8767](https://github.com/astral-sh/ruff/pull/8767))
|
|
||||||
- \[`flake8-trio`\] Use correct range for `TRIO115` fix ([#8933](https://github.com/astral-sh/ruff/pull/8933))
|
|
||||||
- \[`flake8-trio`\] Use full arguments range for zero-sleep-call ([#8936](https://github.com/astral-sh/ruff/pull/8936))
|
|
||||||
- \[`isort`\] fix: mark `__main__` as first-party import ([#8805](https://github.com/astral-sh/ruff/pull/8805))
|
|
||||||
- \[`pep8-naming`\] Avoid `N806` errors for type alias statements ([#8785](https://github.com/astral-sh/ruff/pull/8785))
|
|
||||||
- \[`perflint`\] Avoid `PERF101` if there's an append in loop body ([#8809](https://github.com/astral-sh/ruff/pull/8809))
|
|
||||||
- \[`pycodestyle`\] Allow space-before-colon after end-of-slice ([#8838](https://github.com/astral-sh/ruff/pull/8838))
|
|
||||||
- \[`pydocstyle`\] Avoid non-character breaks in `over-indentation` (`D208`) ([#8866](https://github.com/astral-sh/ruff/pull/8866))
|
|
||||||
- \[`pydocstyle`\] Ignore underlines when determining docstring logical lines ([#8929](https://github.com/astral-sh/ruff/pull/8929))
|
|
||||||
- \[`pylint`\] Extend `self-assigning-variable` to multi-target assignments ([#8839](https://github.com/astral-sh/ruff/pull/8839))
|
|
||||||
- \[`tryceratops`\] Avoid repeated triggers in nested `tryceratops` diagnostics ([#8772](https://github.com/astral-sh/ruff/pull/8772))
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
|
|
||||||
- Add advice for fixing RUF008 when mutability is not desired ([#8853](https://github.com/astral-sh/ruff/pull/8853))
|
|
||||||
- Added the command to run ruff using pkgx to the installation.md ([#8955](https://github.com/astral-sh/ruff/pull/8955))
|
|
||||||
- Document fix safety for flake8-comprehensions and some pyupgrade rules ([#8918](https://github.com/astral-sh/ruff/pull/8918))
|
|
||||||
- Fix doc formatting for zero-sleep-call ([#8937](https://github.com/astral-sh/ruff/pull/8937))
|
|
||||||
- Remove duplicate imports from os-stat documentation ([#8930](https://github.com/astral-sh/ruff/pull/8930))
|
|
||||||
- Replace generated reference to MkDocs ([#8806](https://github.com/astral-sh/ruff/pull/8806))
|
|
||||||
- Update Arch Linux package URL in installation.md ([#8802](https://github.com/astral-sh/ruff/pull/8802))
|
|
||||||
- \[`flake8-pyi`\] Fix error in `t-suffixed-type-alias` (`PYI043`) example ([#8963](https://github.com/astral-sh/ruff/pull/8963))
|
|
||||||
- \[`flake8-pyi`\] Improve motivation for `custom-type-var-return-type` (`PYI019`) ([#8766](https://github.com/astral-sh/ruff/pull/8766))
|
|
||||||
|
|
||||||
## 0.1.6
|
|
||||||
|
|
||||||
### Preview features
|
|
||||||
|
|
||||||
- \[`flake8-boolean-trap`\] Extend `boolean-type-hint-positional-argument` (`FBT001`) to include booleans in unions ([#7501](https://github.com/astral-sh/ruff/pull/7501))
|
|
||||||
- \[`flake8-pie`\] Extend `reimplemented-list-builtin` (`PIE807`) to `dict` reimplementations ([#8608](https://github.com/astral-sh/ruff/pull/8608))
|
|
||||||
- \[`flake8-pie`\] Extend `unnecessary-pass` (`PIE790`) to include ellipses (`...`) ([#8641](https://github.com/astral-sh/ruff/pull/8641))
|
|
||||||
- \[`flake8-pie`\] Implement fix for `unnecessary-spread` (`PIE800`) ([#8668](https://github.com/astral-sh/ruff/pull/8668))
|
|
||||||
- \[`flake8-quotes`\] Implement `unnecessary-escaped-quote` (`Q004`) ([#8630](https://github.com/astral-sh/ruff/pull/8630))
|
|
||||||
- \[`pycodestyle`\] Implement fix for `multiple-spaces-after-keyword` (`E271`) and `multiple-spaces-before-keyword` (`E272`) ([#8622](https://github.com/astral-sh/ruff/pull/8622))
|
|
||||||
- \[`pycodestyle`\] Implement fix for `multiple-spaces-after-operator` (`E222`) and `multiple-spaces-before-operator` (`E221`) ([#8623](https://github.com/astral-sh/ruff/pull/8623))
|
|
||||||
- \[`pyflakes`\] Extend `is-literal` (`F632`) to include comparisons against mutable initializers ([#8607](https://github.com/astral-sh/ruff/pull/8607))
|
|
||||||
- \[`pylint`\] Implement `redefined-argument-from-local` (`PLR1704`) ([#8159](https://github.com/astral-sh/ruff/pull/8159))
|
|
||||||
- \[`pylint`\] Implement fix for `unnecessary-lambda` (`PLW0108`) ([#8621](https://github.com/astral-sh/ruff/pull/8621))
|
|
||||||
- \[`refurb`\] Implement `if-expr-min-max` (`FURB136`) ([#8664](https://github.com/astral-sh/ruff/pull/8664))
|
|
||||||
- \[`refurb`\] Implement `math-constant` (`FURB152`) ([#8727](https://github.com/astral-sh/ruff/pull/8727))
|
|
||||||
|
|
||||||
### Rule changes
|
|
||||||
|
|
||||||
- \[`flake8-annotations`\] Add autotyping-like return type inference for annotation rules ([#8643](https://github.com/astral-sh/ruff/pull/8643))
|
|
||||||
- \[`flake8-future-annotations`\] Implement fix for `future-required-type-annotation` (`FA102`) ([#8711](https://github.com/astral-sh/ruff/pull/8711))
|
|
||||||
- \[`flake8-implicit-namespace-package`\] Avoid missing namespace violations in scripts with shebangs ([#8710](https://github.com/astral-sh/ruff/pull/8710))
|
|
||||||
- \[`pydocstyle`\] Update `over-indentation` (`D208`) to preserve indentation offsets when fixing overindented lines ([#8699](https://github.com/astral-sh/ruff/pull/8699))
|
|
||||||
- \[`pyupgrade`\] Refine `timeout-error-alias` (`UP041`) to remove false positives ([#8587](https://github.com/astral-sh/ruff/pull/8587))
|
|
||||||
|
|
||||||
### Formatter
|
|
||||||
|
|
||||||
- Fix instability in `await` formatting with fluent style ([#8676](https://github.com/astral-sh/ruff/pull/8676))
|
|
||||||
- Compare formatted and unformatted ASTs during formatter tests ([#8624](https://github.com/astral-sh/ruff/pull/8624))
|
|
||||||
- Preserve trailing semicolon for Notebooks ([#8590](https://github.com/astral-sh/ruff/pull/8590))
|
|
||||||
|
|
||||||
### CLI
|
|
||||||
|
|
||||||
- Improve debug printing for resolving origin of config settings ([#8729](https://github.com/astral-sh/ruff/pull/8729))
|
|
||||||
- Write unchanged, excluded files to stdout when read via stdin ([#8596](https://github.com/astral-sh/ruff/pull/8596))
|
|
||||||
|
|
||||||
### Configuration
|
|
||||||
|
|
||||||
- \[`isort`\] Support disabling sections with `no-sections = true` ([#8657](https://github.com/astral-sh/ruff/pull/8657))
|
|
||||||
- \[`pep8-naming`\] Support local and dynamic class- and static-method decorators ([#8592](https://github.com/astral-sh/ruff/pull/8592))
|
|
||||||
- \[`pydocstyle`\] Allow overriding pydocstyle convention rules ([#8586](https://github.com/astral-sh/ruff/pull/8586))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- Avoid syntax error via importing `trio.lowlevel` ([#8730](https://github.com/astral-sh/ruff/pull/8730))
|
|
||||||
- Omit unrolled augmented assignments in `PIE794` ([#8634](https://github.com/astral-sh/ruff/pull/8634))
|
|
||||||
- Slice source code instead of generating it for `EM` fixes ([#7746](https://github.com/astral-sh/ruff/pull/7746))
|
|
||||||
- Allow whitespace around colon in slices for `whitespace-before-punctuation` (`E203`) ([#8654](https://github.com/astral-sh/ruff/pull/8654))
|
|
||||||
- Use function range for `no-self-use` ([#8637](https://github.com/astral-sh/ruff/pull/8637))
|
|
||||||
- F-strings doesn't contain bytes literal for `PLW0129` ([#8675](https://github.com/astral-sh/ruff/pull/8675))
|
|
||||||
- Improve detection of `TYPE_CHECKING` blocks imported from `typing_extensions` or `_typeshed` ([#8429](https://github.com/astral-sh/ruff/pull/8429))
|
|
||||||
- Treat display as a builtin in IPython ([#8707](https://github.com/astral-sh/ruff/pull/8707))
|
|
||||||
- Avoid `FURB113` autofix if comments are present ([#8494](https://github.com/astral-sh/ruff/pull/8494))
|
|
||||||
- Consider the new f-string tokens for `flake8-commas` ([#8582](https://github.com/astral-sh/ruff/pull/8582))
|
|
||||||
- Remove erroneous bad-dunder-name reference ([#8742](https://github.com/astral-sh/ruff/pull/8742))
|
|
||||||
- Avoid recommending Self usages in metaclasses ([#8639](https://github.com/astral-sh/ruff/pull/8639))
|
|
||||||
- Detect runtime-evaluated base classes defined in the current file ([#8572](https://github.com/astral-sh/ruff/pull/8572))
|
|
||||||
- Avoid inserting trailing commas within f-strings ([#8574](https://github.com/astral-sh/ruff/pull/8574))
|
|
||||||
- Remove incorrect deprecation label for stdout and stderr ([#8743](https://github.com/astral-sh/ruff/pull/8743))
|
|
||||||
- Fix unnecessary parentheses in UP007 fix ([#8610](https://github.com/astral-sh/ruff/pull/8610))
|
|
||||||
- Remove repeated and erroneous scoped settings headers in docs ([#8670](https://github.com/astral-sh/ruff/pull/8670))
|
|
||||||
- Trim trailing empty strings when converting to f-strings ([#8712](https://github.com/astral-sh/ruff/pull/8712))
|
|
||||||
- Fix ordering for `force-sort-within-sections` ([#8665](https://github.com/astral-sh/ruff/pull/8665))
|
|
||||||
- Run unicode prefix rule over tokens ([#8709](https://github.com/astral-sh/ruff/pull/8709))
|
|
||||||
- Update UP032 to unescape curly braces in literal parts of converted strings ([#8697](https://github.com/astral-sh/ruff/pull/8697))
|
|
||||||
- List all ipython builtins ([#8719](https://github.com/astral-sh/ruff/pull/8719))
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
|
|
||||||
- Document conventions in the FAQ ([#8638](https://github.com/astral-sh/ruff/pull/8638))
|
|
||||||
- Redirect from rule codes to rule pages in docs ([#8636](https://github.com/astral-sh/ruff/pull/8636))
|
|
||||||
- Fix permalink to convention setting ([#8575](https://github.com/astral-sh/ruff/pull/8575))
|
|
||||||
|
|
||||||
## 0.1.5
|
|
||||||
|
|
||||||
### Preview features
|
|
||||||
|
|
||||||
- \[`flake8-bandit`\] Implement `mako-templates` (`S702`) ([#8533](https://github.com/astral-sh/ruff/pull/8533))
|
|
||||||
- \[`flake8-trio`\] Implement `TRIO105` ([#8490](https://github.com/astral-sh/ruff/pull/8490))
|
|
||||||
- \[`flake8-trio`\] Implement `TRIO109` ([#8534](https://github.com/astral-sh/ruff/pull/8534))
|
|
||||||
- \[`flake8-trio`\] Implement `TRIO110` ([#8537](https://github.com/astral-sh/ruff/pull/8537))
|
|
||||||
- \[`flake8-trio`\] Implement `TRIO115` ([#8486](https://github.com/astral-sh/ruff/pull/8486))
|
|
||||||
- \[`refurb`\] Implement `type-none-comparison` (`FURB169`) ([#8487](https://github.com/astral-sh/ruff/pull/8487))
|
|
||||||
- Flag all comparisons against builtin types in `E721` ([#8491](https://github.com/astral-sh/ruff/pull/8491))
|
|
||||||
- Make `SIM118` fix as safe when the expression is a known dictionary ([#8525](https://github.com/astral-sh/ruff/pull/8525))
|
|
||||||
|
|
||||||
### Formatter
|
|
||||||
|
|
||||||
- Fix multiline lambda expression statement formatting ([#8466](https://github.com/astral-sh/ruff/pull/8466))
|
|
||||||
|
|
||||||
### CLI
|
|
||||||
|
|
||||||
- Add hidden `--extension` to override inference of source type from file extension ([#8373](https://github.com/astral-sh/ruff/pull/8373))
|
|
||||||
|
|
||||||
### Configuration
|
|
||||||
|
|
||||||
- Account for selector specificity when merging `extend_unsafe_fixes` and `override extend_safe_fixes` ([#8444](https://github.com/astral-sh/ruff/pull/8444))
|
|
||||||
- Add support for disabling cache with `RUFF_NO_CACHE` environment variable ([#8538](https://github.com/astral-sh/ruff/pull/8538))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- \[`E721`\] Flag comparisons to `memoryview` ([#8485](https://github.com/astral-sh/ruff/pull/8485))
|
|
||||||
- Allow collapsed-ellipsis bodies in other statements ([#8499](https://github.com/astral-sh/ruff/pull/8499))
|
|
||||||
- Avoid `D301` autofix for `u` prefixed strings ([#8495](https://github.com/astral-sh/ruff/pull/8495))
|
|
||||||
- Only flag `flake8-trio` rules when `trio` import is present ([#8550](https://github.com/astral-sh/ruff/pull/8550))
|
|
||||||
- Reject more syntactically invalid Python programs ([#8524](https://github.com/astral-sh/ruff/pull/8524))
|
|
||||||
- Avoid raising `TRIO115` violations for `trio.sleep(...)` calls with non-number values ([#8532](https://github.com/astral-sh/ruff/pull/8532))
|
|
||||||
- Fix `F841` false negative on assignment to multiple variables ([#8489](https://github.com/astral-sh/ruff/pull/8489))
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
|
|
||||||
- Fix link to isort `known-first-party` ([#8562](https://github.com/astral-sh/ruff/pull/8562))
|
|
||||||
- Add notes on fix safety to a few rules ([#8500](https://github.com/astral-sh/ruff/pull/8500))
|
|
||||||
- Add missing toml config tabs ([#8512](https://github.com/astral-sh/ruff/pull/8512))
|
|
||||||
- Add instructions for configuration of Emacs ([#8488](https://github.com/astral-sh/ruff/pull/8488))
|
|
||||||
- Improve detail link contrast in dark mode ([#8548](https://github.com/astral-sh/ruff/pull/8548))
|
|
||||||
- Fix typo in example ([#8506](https://github.com/astral-sh/ruff/pull/8506))
|
|
||||||
- Added tabs for configuration files in the documentation ([#8480](https://github.com/astral-sh/ruff/pull/8480))
|
|
||||||
- Recommend `project.requires-python` over `target-version` ([#8513](https://github.com/astral-sh/ruff/pull/8513))
|
|
||||||
- Add singleton escape hatch to `B008` documentation ([#8501](https://github.com/astral-sh/ruff/pull/8501))
|
|
||||||
- Fix tab configuration docs ([#8502](https://github.com/astral-sh/ruff/pull/8502))
|
|
||||||
|
|
||||||
## 0.1.4
|
|
||||||
|
|
||||||
### Preview features
|
|
||||||
|
|
||||||
- \[`flake8-trio`\] Implement `timeout-without-await` (`TRIO001`) ([#8439](https://github.com/astral-sh/ruff/pull/8439))
|
|
||||||
- \[`numpy`\] Implement NumPy 2.0 migration rule (`NPY200`) ([#7702](https://github.com/astral-sh/ruff/pull/7702))
|
|
||||||
- \[`pylint`\] Implement `bad-open-mode` (`W1501`) ([#8294](https://github.com/astral-sh/ruff/pull/8294))
|
|
||||||
- \[`pylint`\] Implement `import-outside-toplevel` (`C0415`) rule ([#5180](https://github.com/astral-sh/ruff/pull/5180))
|
|
||||||
- \[`pylint`\] Implement `useless-with-lock` (`W2101`) ([#8321](https://github.com/astral-sh/ruff/pull/8321))
|
|
||||||
- \[`pyupgrade`\] Implement `timeout-error-alias` (`UP041`) ([#8476](https://github.com/astral-sh/ruff/pull/8476))
|
|
||||||
- \[`refurb`\] Implement `isinstance-type-none` (`FURB168`) ([#8308](https://github.com/astral-sh/ruff/pull/8308))
|
|
||||||
- Detect confusable Unicode-to-Unicode units in `RUF001`, `RUF002`, and `RUF003` ([#4430](https://github.com/astral-sh/ruff/pull/4430))
|
|
||||||
- Add newline after module docstrings in preview style ([#8283](https://github.com/astral-sh/ruff/pull/8283))
|
|
||||||
|
|
||||||
### Formatter
|
|
||||||
|
|
||||||
- Add a note on line-too-long to the formatter docs ([#8314](https://github.com/astral-sh/ruff/pull/8314))
|
|
||||||
- Preserve trailing statement semicolons when using `fmt: skip` ([#8273](https://github.com/astral-sh/ruff/pull/8273))
|
|
||||||
- Preserve trailing semicolons when using `fmt: off` ([#8275](https://github.com/astral-sh/ruff/pull/8275))
|
|
||||||
- Avoid duplicating linter-formatter compatibility warnings ([#8292](https://github.com/astral-sh/ruff/pull/8292))
|
|
||||||
- Avoid inserting a newline after function docstrings ([#8375](https://github.com/astral-sh/ruff/pull/8375))
|
|
||||||
- Insert newline between docstring and following own line comment ([#8216](https://github.com/astral-sh/ruff/pull/8216))
|
|
||||||
- Split tuples in return positions by comma first ([#8280](https://github.com/astral-sh/ruff/pull/8280))
|
|
||||||
- Avoid treating byte strings as docstrings ([#8350](https://github.com/astral-sh/ruff/pull/8350))
|
|
||||||
- Add `--line-length` option to `format` command ([#8363](https://github.com/astral-sh/ruff/pull/8363))
|
|
||||||
- Avoid parenthesizing unsplittable because of comments ([#8431](https://github.com/astral-sh/ruff/pull/8431))
|
|
||||||
|
|
||||||
### CLI
|
|
||||||
|
|
||||||
- Add `--output-format` to `ruff rule` and `ruff linter` ([#8203](https://github.com/astral-sh/ruff/pull/8203))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- Respect `--force-exclude` in `lint.exclude` and `format.exclude` ([#8393](https://github.com/astral-sh/ruff/pull/8393))
|
|
||||||
- Respect `--extend-per-file-ignores` on the CLI ([#8329](https://github.com/astral-sh/ruff/pull/8329))
|
|
||||||
- Extend `bad-dunder-method-name` to permit `__index__` ([#8300](https://github.com/astral-sh/ruff/pull/8300))
|
|
||||||
- Fix panic with 8 in octal escape ([#8356](https://github.com/astral-sh/ruff/pull/8356))
|
|
||||||
- Avoid raising `D300` when both triple quote styles are present ([#8462](https://github.com/astral-sh/ruff/pull/8462))
|
|
||||||
- Consider unterminated f-strings in `FStringRanges` ([#8154](https://github.com/astral-sh/ruff/pull/8154))
|
|
||||||
- Avoid including literal `shell=True` for truthy, non-`True` diagnostics ([#8359](https://github.com/astral-sh/ruff/pull/8359))
|
|
||||||
- Avoid triggering single-element test for starred expressions ([#8433](https://github.com/astral-sh/ruff/pull/8433))
|
|
||||||
- Detect and ignore Jupyter automagics ([#8398](https://github.com/astral-sh/ruff/pull/8398))
|
|
||||||
- Fix invalid E231 error with f-strings ([#8369](https://github.com/astral-sh/ruff/pull/8369))
|
|
||||||
- Avoid triggering `NamedTuple` rewrite with starred annotation ([#8434](https://github.com/astral-sh/ruff/pull/8434))
|
|
||||||
- Avoid un-setting bracket flag in logical lines ([#8380](https://github.com/astral-sh/ruff/pull/8380))
|
|
||||||
- Place 'r' prefix before 'f' for raw format strings ([#8464](https://github.com/astral-sh/ruff/pull/8464))
|
|
||||||
- Remove trailing periods from NumPy 2.0 code actions ([#8475](https://github.com/astral-sh/ruff/pull/8475))
|
|
||||||
- Fix bug where `PLE1307` was raised when formatting `%c` with characters ([#8407](https://github.com/astral-sh/ruff/pull/8407))
|
|
||||||
- Remove unicode flag from comparable ([#8440](https://github.com/astral-sh/ruff/pull/8440))
|
|
||||||
- Improve B015 message ([#8295](https://github.com/astral-sh/ruff/pull/8295))
|
|
||||||
- Use `fixedOverflowWidgets` for playground popover ([#8458](https://github.com/astral-sh/ruff/pull/8458))
|
|
||||||
- Mark `byte_bounds` as a non-backwards-compatible NumPy 2.0 change ([#8474](https://github.com/astral-sh/ruff/pull/8474))
|
|
||||||
|
|
||||||
### Internals
|
|
||||||
|
|
||||||
- Add a dedicated cache directory per Ruff version ([#8333](https://github.com/astral-sh/ruff/pull/8333))
|
|
||||||
- Allow selective caching for `--fix` and `--diff` ([#8316](https://github.com/astral-sh/ruff/pull/8316))
|
|
||||||
- Improve performance of comment parsing ([#8193](https://github.com/astral-sh/ruff/pull/8193))
|
|
||||||
- Improve performance of string parsing ([#8227](https://github.com/astral-sh/ruff/pull/8227))
|
|
||||||
- Use a dedicated sort key for isort import sorting ([#7963](https://github.com/astral-sh/ruff/pull/7963))
|
|
||||||
|
|
||||||
## 0.1.3
|
## 0.1.3
|
||||||
|
|
||||||
This release includes a variety of improvements to the Ruff formatter, removing several known and
|
This release includes a variety of improvements to the Ruff formatter, removing several known and
|
||||||
|
|||||||
@@ -72,7 +72,7 @@ representative at an online or offline event.
|
|||||||
|
|
||||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||||
reported to the community leaders responsible for enforcement at
|
reported to the community leaders responsible for enforcement at
|
||||||
<charlie.r.marsh@gmail.com>.
|
charlie.r.marsh@gmail.com.
|
||||||
All complaints will be reviewed and investigated promptly and fairly.
|
All complaints will be reviewed and investigated promptly and fairly.
|
||||||
|
|
||||||
All community leaders are obligated to respect the privacy and security of the
|
All community leaders are obligated to respect the privacy and security of the
|
||||||
|
|||||||
@@ -295,7 +295,7 @@ To preview any changes to the documentation locally:
|
|||||||
|
|
||||||
```shell
|
```shell
|
||||||
# For contributors.
|
# For contributors.
|
||||||
mkdocs serve -f mkdocs.public.yml
|
mkdocs serve -f mkdocs.generated.yml
|
||||||
|
|
||||||
# For members of the Astral org, which has access to MkDocs Insiders via sponsorship.
|
# For members of the Astral org, which has access to MkDocs Insiders via sponsorship.
|
||||||
mkdocs serve -f mkdocs.insiders.yml
|
mkdocs serve -f mkdocs.insiders.yml
|
||||||
@@ -315,41 +315,23 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
|
|||||||
|
|
||||||
### Creating a new release
|
### Creating a new release
|
||||||
|
|
||||||
We use an experimental in-house tool for managing releases.
|
1. Update the version with `rg 0.0.269 --files-with-matches | xargs sed -i 's/0.0.269/0.0.270/g'`
|
||||||
|
1. Update `BREAKING_CHANGES.md`
|
||||||
1. Install `rooster`: `pip install git+https://github.com/zanieb/rooster@main`
|
1. Create a PR with the version and `BREAKING_CHANGES.md` updated
|
||||||
1. Run `rooster release`; this command will:
|
|
||||||
- Generate a changelog entry in `CHANGELOG.md`
|
|
||||||
- Update versions in `pyproject.toml` and `Cargo.toml`
|
|
||||||
- Update references to versions in the `README.md` and documentation
|
|
||||||
1. The changelog should then be editorialized for consistency
|
|
||||||
- Often labels will be missing from pull requests they will need to be manually organized into the proper section
|
|
||||||
- Changes should be edited to be user-facing descriptions, avoiding internal details
|
|
||||||
1. Highlight any breaking changes in `BREAKING_CHANGES.md`
|
|
||||||
1. Run `cargo check`. This should update the lock file with new versions.
|
|
||||||
1. Create a pull request with the changelog and version updates
|
|
||||||
1. Merge the PR
|
1. Merge the PR
|
||||||
1. Run the [release workflow](https://github.com/astral-sh/ruff/actions/workflows/release.yaml) with:
|
1. Run the release workflow with the version number (without starting `v`) as input. Make sure
|
||||||
- The new version number (without starting `v`)
|
main has your merged PR as last commit
|
||||||
- The commit hash of the merged release pull request on `main`
|
|
||||||
1. The release workflow will do the following:
|
1. The release workflow will do the following:
|
||||||
1. Build all the assets. If this fails (even though we tested in step 4), we haven't tagged or
|
1. Build all the assets. If this fails (even though we tested in step 4), we haven't tagged or
|
||||||
uploaded anything, you can restart after pushing a fix.
|
uploaded anything, you can restart after pushing a fix.
|
||||||
1. Upload to PyPI.
|
1. Upload to PyPI.
|
||||||
1. Create and push the Git tag (as extracted from `pyproject.toml`). We create the Git tag only
|
1. Create and push the Git tag (as extracted from `pyproject.toml`). We create the Git tag only
|
||||||
after building the wheels and uploading to PyPI, since we can't delete or modify the tag ([#4468](https://github.com/astral-sh/ruff/issues/4468)).
|
after building the wheels and uploading to PyPI, since we can't delete or modify the tag ([#4468](https://github.com/charliermarsh/ruff/issues/4468)).
|
||||||
1. Attach artifacts to draft GitHub release
|
1. Attach artifacts to draft GitHub release
|
||||||
1. Trigger downstream repositories. This can fail non-catastrophically, as we can run any
|
1. Trigger downstream repositories. This can fail non-catastrophically, as we can run any
|
||||||
downstream jobs manually if needed.
|
downstream jobs manually if needed.
|
||||||
1. Publish the GitHub release
|
1. Create release notes in GitHub UI and promote from draft.
|
||||||
1. Open the draft release in the GitHub release section
|
1. If needed, [update the schemastore](https://github.com/charliermarsh/ruff/blob/main/scripts/update_schemastore.py)
|
||||||
1. Copy the changelog for the release into the GitHub release
|
|
||||||
- See previous releases for formatting of section headers
|
|
||||||
1. Generate the contributor list with `rooster contributors` and add to the release notes
|
|
||||||
1. If needed, [update the schemastore](https://github.com/astral-sh/ruff/blob/main/scripts/update_schemastore.py).
|
|
||||||
1. One can determine if an update is needed when
|
|
||||||
`git diff old-version-tag new-version-tag -- ruff.schema.json` returns a non-empty diff.
|
|
||||||
1. Once run successfully, you should follow the link in the output to create a PR.
|
|
||||||
1. If needed, update the `ruff-lsp` and `ruff-vscode` repositories.
|
1. If needed, update the `ruff-lsp` and `ruff-vscode` repositories.
|
||||||
|
|
||||||
## Ecosystem CI
|
## Ecosystem CI
|
||||||
@@ -370,7 +352,7 @@ See the [ruff-ecosystem package](https://github.com/astral-sh/ruff/tree/main/pyt
|
|||||||
We have several ways of benchmarking and profiling Ruff:
|
We have several ways of benchmarking and profiling Ruff:
|
||||||
|
|
||||||
- Our main performance benchmark comparing Ruff with other tools on the CPython codebase
|
- Our main performance benchmark comparing Ruff with other tools on the CPython codebase
|
||||||
- Microbenchmarks which run the linter or the formatter on individual files. These run on pull requests.
|
- Microbenchmarks which the linter or the formatter on individual files. There run on pull requests.
|
||||||
- Profiling the linter on either the microbenchmarks or entire projects
|
- Profiling the linter on either the microbenchmarks or entire projects
|
||||||
|
|
||||||
### CPython Benchmark
|
### CPython Benchmark
|
||||||
@@ -561,10 +543,10 @@ examples.
|
|||||||
|
|
||||||
#### Linux
|
#### Linux
|
||||||
|
|
||||||
Install `perf` and build `ruff_benchmark` with the `profiling` profile and then run it with perf
|
Install `perf` and build `ruff_benchmark` with the `release-debug` profile and then run it with perf
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
cargo bench -p ruff_benchmark --no-run --profile=profiling && perf record --call-graph dwarf -F 9999 cargo bench -p ruff_benchmark --profile=profiling -- --profile-time=1
|
cargo bench -p ruff_benchmark --no-run --profile=release-debug && perf record --call-graph dwarf -F 9999 cargo bench -p ruff_benchmark --profile=release-debug -- --profile-time=1
|
||||||
```
|
```
|
||||||
|
|
||||||
You can also use the `ruff_dev` launcher to run `ruff check` multiple times on a repository to
|
You can also use the `ruff_dev` launcher to run `ruff check` multiple times on a repository to
|
||||||
@@ -572,8 +554,8 @@ gather enough samples for a good flamegraph (change the 999, the sample rate, an
|
|||||||
of checks, to your liking)
|
of checks, to your liking)
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
cargo build --bin ruff_dev --profile=profiling
|
cargo build --bin ruff_dev --profile=release-debug
|
||||||
perf record -g -F 999 target/profiling/ruff_dev repeat --repeat 30 --exit-zero --no-cache path/to/cpython > /dev/null
|
perf record -g -F 999 target/release-debug/ruff_dev repeat --repeat 30 --exit-zero --no-cache path/to/cpython > /dev/null
|
||||||
```
|
```
|
||||||
|
|
||||||
Then convert the recorded profile
|
Then convert the recorded profile
|
||||||
@@ -603,7 +585,7 @@ cargo install cargo-instruments
|
|||||||
Then run the profiler with
|
Then run the profiler with
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
cargo instruments -t time --bench linter --profile profiling -p ruff_benchmark -- --profile-time=1
|
cargo instruments -t time --bench linter --profile release-debug -p ruff_benchmark -- --profile-time=1
|
||||||
```
|
```
|
||||||
|
|
||||||
- `-t`: Specifies what to profile. Useful options are `time` to profile the wall time and `alloc`
|
- `-t`: Specifies what to profile. Useful options are `time` to profile the wall time and `alloc`
|
||||||
|
|||||||
614
Cargo.lock
generated
614
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
156
Cargo.toml
156
Cargo.toml
@@ -12,151 +12,51 @@ authors = ["Charlie Marsh <charlie.r.marsh@gmail.com>"]
|
|||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
|
||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
aho-corasick = { version = "1.1.2" }
|
anyhow = { version = "1.0.69" }
|
||||||
annotate-snippets = { version = "0.9.2", features = ["color"] }
|
bitflags = { version = "2.3.1" }
|
||||||
anyhow = { version = "1.0.79" }
|
|
||||||
argfile = { version = "0.1.6" }
|
|
||||||
assert_cmd = { version = "2.0.8" }
|
|
||||||
bincode = { version = "1.3.3" }
|
|
||||||
bitflags = { version = "2.4.1" }
|
|
||||||
cachedir = { version = "0.3.1" }
|
|
||||||
chrono = { version = "0.4.31", default-features = false, features = ["clock"] }
|
chrono = { version = "0.4.31", default-features = false, features = ["clock"] }
|
||||||
clap = { version = "4.4.13", features = ["derive"] }
|
clap = { version = "4.4.7", features = ["derive"] }
|
||||||
clap_complete_command = { version = "0.5.1" }
|
colored = { version = "2.0.0" }
|
||||||
clearscreen = { version = "2.0.0" }
|
filetime = { version = "0.2.20" }
|
||||||
codspeed-criterion-compat = { version = "2.3.3", default-features = false }
|
|
||||||
colored = { version = "2.1.0" }
|
|
||||||
configparser = { version = "3.0.3" }
|
|
||||||
console_error_panic_hook = { version = "0.1.7" }
|
|
||||||
console_log = { version = "1.0.0" }
|
|
||||||
countme = { version ="3.0.1"}
|
|
||||||
criterion = { version = "0.5.1", default-features = false }
|
|
||||||
dirs = { version = "5.0.0" }
|
|
||||||
drop_bomb = { version = "0.1.5" }
|
|
||||||
env_logger = { version ="0.10.1"}
|
|
||||||
fern = { version = "0.6.1" }
|
|
||||||
filetime = { version = "0.2.23" }
|
|
||||||
fs-err = { version ="2.11.0"}
|
|
||||||
glob = { version = "0.3.1" }
|
glob = { version = "0.3.1" }
|
||||||
globset = { version = "0.4.14" }
|
globset = { version = "0.4.10" }
|
||||||
hexf-parse = { version ="0.2.1"}
|
ignore = { version = "0.4.20" }
|
||||||
ignore = { version = "0.4.21" }
|
|
||||||
imara-diff ={ version = "0.1.5"}
|
|
||||||
imperative = { version = "1.0.4" }
|
|
||||||
indicatif ={ version = "0.17.7"}
|
|
||||||
indoc ={ version = "2.0.4"}
|
|
||||||
insta = { version = "1.34.0", feature = ["filters", "glob"] }
|
insta = { version = "1.34.0", feature = ["filters", "glob"] }
|
||||||
insta-cmd = { version = "0.4.0" }
|
is-macro = { version = "0.3.0" }
|
||||||
is-macro = { version = "0.3.4" }
|
itertools = { version = "0.11.0" }
|
||||||
is-wsl = { version = "0.4.0" }
|
|
||||||
itertools = { version = "0.12.0" }
|
|
||||||
js-sys = { version = "0.3.66" }
|
|
||||||
lalrpop-util = { version = "0.20.0", default-features = false }
|
|
||||||
lexical-parse-float = { version = "0.8.0", features = ["format"] }
|
|
||||||
libcst = { version = "1.1.0", default-features = false }
|
libcst = { version = "1.1.0", default-features = false }
|
||||||
log = { version = "0.4.17" }
|
log = { version = "0.4.17" }
|
||||||
memchr = { version = "2.6.4" }
|
memchr = { version = "2.6.4" }
|
||||||
mimalloc = { version ="0.1.39"}
|
once_cell = { version = "1.17.1" }
|
||||||
natord = { version = "1.0.9" }
|
|
||||||
notify = { version = "6.1.1" }
|
|
||||||
once_cell = { version = "1.19.0" }
|
|
||||||
path-absolutize = { version = "3.1.1" }
|
path-absolutize = { version = "3.1.1" }
|
||||||
pathdiff = { version = "0.2.1" }
|
proc-macro2 = { version = "1.0.69" }
|
||||||
pep440_rs = { version = "0.4.0", features = ["serde"] }
|
|
||||||
pretty_assertions = "1.3.0"
|
|
||||||
proc-macro2 = { version = "1.0.73" }
|
|
||||||
pyproject-toml = { version = "0.8.1" }
|
|
||||||
quick-junit = { version = "0.3.5" }
|
|
||||||
quote = { version = "1.0.23" }
|
quote = { version = "1.0.23" }
|
||||||
rand = { version = "0.8.5" }
|
|
||||||
rayon = { version = "1.8.0" }
|
|
||||||
regex = { version = "1.10.2" }
|
regex = { version = "1.10.2" }
|
||||||
result-like = { version = "0.5.0" }
|
|
||||||
rustc-hash = { version = "1.1.0" }
|
rustc-hash = { version = "1.1.0" }
|
||||||
schemars = { version = "0.8.16" }
|
schemars = { version = "0.8.15" }
|
||||||
seahash = { version ="4.1.0"}
|
serde = { version = "1.0.190", features = ["derive"] }
|
||||||
semver = { version = "1.0.20" }
|
serde_json = { version = "1.0.107" }
|
||||||
serde = { version = "1.0.195", features = ["derive"] }
|
|
||||||
serde-wasm-bindgen = { version = "0.6.3" }
|
|
||||||
serde_json = { version = "1.0.109" }
|
|
||||||
serde_test = { version = "1.0.152" }
|
|
||||||
serde_with = { version = "3.4.0", default-features = false, features = ["macros"] }
|
|
||||||
shellexpand = { version = "3.0.0" }
|
shellexpand = { version = "3.0.0" }
|
||||||
shlex = { version ="1.2.0"}
|
|
||||||
similar = { version = "2.3.0", features = ["inline"] }
|
similar = { version = "2.3.0", features = ["inline"] }
|
||||||
smallvec = { version = "1.11.2" }
|
smallvec = { version = "1.11.1" }
|
||||||
static_assertions = "1.1.0"
|
static_assertions = "1.1.0"
|
||||||
strum = { version = "0.25.0", features = ["strum_macros"] }
|
strum = { version = "0.25.0", features = ["strum_macros"] }
|
||||||
strum_macros = { version = "0.25.3" }
|
strum_macros = { version = "0.25.3" }
|
||||||
syn = { version = "2.0.40" }
|
syn = { version = "2.0.38" }
|
||||||
tempfile = { version ="3.9.0"}
|
test-case = { version = "3.2.1" }
|
||||||
test-case = { version = "3.3.1" }
|
thiserror = { version = "1.0.50" }
|
||||||
thiserror = { version = "1.0.51" }
|
toml = { version = "0.7.8" }
|
||||||
tikv-jemallocator = { version ="0.5.0"}
|
|
||||||
toml = { version = "0.8.8" }
|
|
||||||
tracing = { version = "0.1.40" }
|
tracing = { version = "0.1.40" }
|
||||||
tracing-indicatif = { version = "0.3.6" }
|
tracing-indicatif = { version = "0.3.4" }
|
||||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
|
tracing-subscriber = { version = "0.3.17", features = ["env-filter"] }
|
||||||
typed-arena = { version = "2.0.2" }
|
|
||||||
unic-ucd-category = { version ="0.9"}
|
|
||||||
unicode-ident = { version = "1.0.12" }
|
unicode-ident = { version = "1.0.12" }
|
||||||
|
unicode_names2 = { version = "1.2.0" }
|
||||||
unicode-width = { version = "0.1.11" }
|
unicode-width = { version = "0.1.11" }
|
||||||
unicode_names2 = { version = "1.2.1" }
|
uuid = { version = "1.5.0", features = ["v4", "fast-rng", "macro-diagnostics", "js"] }
|
||||||
ureq = { version = "2.9.1" }
|
wsl = { version = "0.1.0" }
|
||||||
url = { version = "2.5.0" }
|
|
||||||
uuid = { version = "1.6.1", features = ["v4", "fast-rng", "macro-diagnostics", "js"] }
|
|
||||||
walkdir = { version = "2.3.2" }
|
|
||||||
wasm-bindgen = { version = "0.2.84" }
|
|
||||||
wasm-bindgen-test = { version = "0.3.39" }
|
|
||||||
wild = { version = "2" }
|
|
||||||
|
|
||||||
[workspace.lints.rust]
|
|
||||||
unsafe_code = "warn"
|
|
||||||
unreachable_pub = "warn"
|
|
||||||
|
|
||||||
[workspace.lints.clippy]
|
|
||||||
pedantic = { level = "warn", priority = -2 }
|
|
||||||
# Allowed pedantic lints
|
|
||||||
char_lit_as_u8 = "allow"
|
|
||||||
collapsible_else_if = "allow"
|
|
||||||
collapsible_if = "allow"
|
|
||||||
implicit_hasher = "allow"
|
|
||||||
match_same_arms = "allow"
|
|
||||||
missing_errors_doc = "allow"
|
|
||||||
missing_panics_doc = "allow"
|
|
||||||
module_name_repetitions = "allow"
|
|
||||||
must_use_candidate = "allow"
|
|
||||||
similar_names = "allow"
|
|
||||||
too_many_lines = "allow"
|
|
||||||
# To allow `#[allow(clippy::all)]` in `crates/ruff_python_parser/src/python.rs`.
|
|
||||||
needless_raw_string_hashes = "allow"
|
|
||||||
# Disallowed restriction lints
|
|
||||||
print_stdout = "warn"
|
|
||||||
print_stderr = "warn"
|
|
||||||
dbg_macro = "warn"
|
|
||||||
empty_drop = "warn"
|
|
||||||
empty_structs_with_brackets = "warn"
|
|
||||||
exit = "warn"
|
|
||||||
get_unwrap = "warn"
|
|
||||||
rc_buffer = "warn"
|
|
||||||
rc_mutex = "warn"
|
|
||||||
rest_pat_in_fully_bound_structs = "warn"
|
|
||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
# Note that we set these explicitly, and these values
|
lto = "fat"
|
||||||
# were chosen based on a trade-off between compile times
|
|
||||||
# and runtime performance[1].
|
|
||||||
#
|
|
||||||
# [1]: https://github.com/astral-sh/ruff/pull/9031
|
|
||||||
lto = "thin"
|
|
||||||
codegen-units = 16
|
|
||||||
|
|
||||||
# Some crates don't change as much but benefit more from
|
|
||||||
# more expensive optimization passes, so we selectively
|
|
||||||
# decrease codegen-units in some cases.
|
|
||||||
[profile.release.package.ruff_python_parser]
|
|
||||||
codegen-units = 1
|
|
||||||
[profile.release.package.ruff_python_ast]
|
|
||||||
codegen-units = 1
|
codegen-units = 1
|
||||||
|
|
||||||
[profile.dev.package.insta]
|
[profile.dev.package.insta]
|
||||||
@@ -170,8 +70,8 @@ opt-level = 3
|
|||||||
[profile.dev.package.ruff_python_parser]
|
[profile.dev.package.ruff_python_parser]
|
||||||
opt-level = 1
|
opt-level = 1
|
||||||
|
|
||||||
# Use the `--profile profiling` flag to show symbols in release mode.
|
# Use the `--profile release-debug` flag to show symbols in release mode.
|
||||||
# e.g. `cargo build --profile profiling`
|
# e.g. `cargo build --profile release-debug`
|
||||||
[profile.profiling]
|
[profile.release-debug]
|
||||||
inherits = "release"
|
inherits = "release"
|
||||||
debug = 1
|
debug = 1
|
||||||
|
|||||||
38
Dockerfile
38
Dockerfile
@@ -1,38 +0,0 @@
|
|||||||
FROM --platform=$BUILDPLATFORM ubuntu as build
|
|
||||||
ENV HOME="/root"
|
|
||||||
WORKDIR $HOME
|
|
||||||
|
|
||||||
RUN apt update && apt install -y build-essential curl python3-venv
|
|
||||||
|
|
||||||
# Setup zig as cross compiling linker
|
|
||||||
RUN python3 -m venv $HOME/.venv
|
|
||||||
RUN .venv/bin/pip install cargo-zigbuild
|
|
||||||
ENV PATH="$HOME/.venv/bin:$PATH"
|
|
||||||
|
|
||||||
# Install rust
|
|
||||||
ARG TARGETPLATFORM
|
|
||||||
RUN case "$TARGETPLATFORM" in \
|
|
||||||
"linux/arm64") echo "aarch64-unknown-linux-musl" > rust_target.txt ;; \
|
|
||||||
"linux/amd64") echo "x86_64-unknown-linux-musl" > rust_target.txt ;; \
|
|
||||||
*) exit 1 ;; \
|
|
||||||
esac
|
|
||||||
# Update rustup whenever we bump the rust version
|
|
||||||
COPY rust-toolchain.toml rust-toolchain.toml
|
|
||||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --target $(cat rust_target.txt) --profile minimal --default-toolchain none
|
|
||||||
ENV PATH="$HOME/.cargo/bin:$PATH"
|
|
||||||
# Installs the correct toolchain version from rust-toolchain.toml and then the musl target
|
|
||||||
RUN rustup target add $(cat rust_target.txt)
|
|
||||||
|
|
||||||
# Build
|
|
||||||
COPY crates crates
|
|
||||||
COPY Cargo.toml Cargo.toml
|
|
||||||
COPY Cargo.lock Cargo.lock
|
|
||||||
RUN cargo zigbuild --bin ruff --target $(cat rust_target.txt) --release
|
|
||||||
RUN cp target/$(cat rust_target.txt)/release/ruff /ruff
|
|
||||||
# TODO: Optimize binary size, with a version that also works when cross compiling
|
|
||||||
# RUN strip --strip-all /ruff
|
|
||||||
|
|
||||||
FROM scratch
|
|
||||||
COPY --from=build /ruff /ruff
|
|
||||||
WORKDIR /io
|
|
||||||
ENTRYPOINT ["/ruff"]
|
|
||||||
35
README.md
35
README.md
@@ -54,7 +54,7 @@ Ruff is extremely actively developed and used in major open-source projects like
|
|||||||
- [Pandas](https://github.com/pandas-dev/pandas)
|
- [Pandas](https://github.com/pandas-dev/pandas)
|
||||||
- [SciPy](https://github.com/scipy/scipy)
|
- [SciPy](https://github.com/scipy/scipy)
|
||||||
|
|
||||||
...and [many more](#whos-using-ruff).
|
...and many more.
|
||||||
|
|
||||||
Ruff is backed by [Astral](https://astral.sh). Read the [launch post](https://astral.sh/blog/announcing-astral-the-company-behind-ruff),
|
Ruff is backed by [Astral](https://astral.sh). Read the [launch post](https://astral.sh/blog/announcing-astral-the-company-behind-ruff),
|
||||||
or the original [project announcement](https://notes.crmarsh.com/python-tooling-could-be-much-much-faster).
|
or the original [project announcement](https://notes.crmarsh.com/python-tooling-could-be-much-much-faster).
|
||||||
@@ -148,14 +148,14 @@ ruff format @arguments.txt # Format using an input file, treating its
|
|||||||
Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff-pre-commit`](https://github.com/astral-sh/ruff-pre-commit):
|
Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff-pre-commit`](https://github.com/astral-sh/ruff-pre-commit):
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
|
# Run the Ruff linter.
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
# Ruff version.
|
# Ruff version.
|
||||||
rev: v0.1.11
|
rev: v0.1.3
|
||||||
hooks:
|
hooks:
|
||||||
# Run the linter.
|
# Run the Ruff linter.
|
||||||
- id: ruff
|
- id: ruff
|
||||||
args: [ --fix ]
|
# Run the Ruff formatter.
|
||||||
# Run the formatter.
|
|
||||||
- id: ruff-format
|
- id: ruff-format
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -194,25 +194,20 @@ exclude = [
|
|||||||
".git",
|
".git",
|
||||||
".git-rewrite",
|
".git-rewrite",
|
||||||
".hg",
|
".hg",
|
||||||
".ipynb_checkpoints",
|
|
||||||
".mypy_cache",
|
".mypy_cache",
|
||||||
".nox",
|
".nox",
|
||||||
".pants.d",
|
".pants.d",
|
||||||
".pyenv",
|
|
||||||
".pytest_cache",
|
|
||||||
".pytype",
|
".pytype",
|
||||||
".ruff_cache",
|
".ruff_cache",
|
||||||
".svn",
|
".svn",
|
||||||
".tox",
|
".tox",
|
||||||
".venv",
|
".venv",
|
||||||
".vscode",
|
|
||||||
"__pypackages__",
|
"__pypackages__",
|
||||||
"_build",
|
"_build",
|
||||||
"buck-out",
|
"buck-out",
|
||||||
"build",
|
"build",
|
||||||
"dist",
|
"dist",
|
||||||
"node_modules",
|
"node_modules",
|
||||||
"site-packages",
|
|
||||||
"venv",
|
"venv",
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -382,11 +377,10 @@ Ruff is used by a number of major open-source projects and companies, including:
|
|||||||
- Anthropic ([Python SDK](https://github.com/anthropics/anthropic-sdk-python))
|
- Anthropic ([Python SDK](https://github.com/anthropics/anthropic-sdk-python))
|
||||||
- [Apache Airflow](https://github.com/apache/airflow)
|
- [Apache Airflow](https://github.com/apache/airflow)
|
||||||
- AstraZeneca ([Magnus](https://github.com/AstraZeneca/magnus-core))
|
- AstraZeneca ([Magnus](https://github.com/AstraZeneca/magnus-core))
|
||||||
- [Babel](https://github.com/python-babel/babel)
|
|
||||||
- Benchling ([Refac](https://github.com/benchling/refac))
|
- Benchling ([Refac](https://github.com/benchling/refac))
|
||||||
|
- [Babel](https://github.com/python-babel/babel)
|
||||||
- [Bokeh](https://github.com/bokeh/bokeh)
|
- [Bokeh](https://github.com/bokeh/bokeh)
|
||||||
- [Cryptography (PyCA)](https://github.com/pyca/cryptography)
|
- [Cryptography (PyCA)](https://github.com/pyca/cryptography)
|
||||||
- CERN ([Indico](https://getindico.io/))
|
|
||||||
- [DVC](https://github.com/iterative/dvc)
|
- [DVC](https://github.com/iterative/dvc)
|
||||||
- [Dagger](https://github.com/dagger/dagger)
|
- [Dagger](https://github.com/dagger/dagger)
|
||||||
- [Dagster](https://github.com/dagster-io/dagster)
|
- [Dagster](https://github.com/dagster-io/dagster)
|
||||||
@@ -395,16 +389,15 @@ Ruff is used by a number of major open-source projects and companies, including:
|
|||||||
- [Gradio](https://github.com/gradio-app/gradio)
|
- [Gradio](https://github.com/gradio-app/gradio)
|
||||||
- [Great Expectations](https://github.com/great-expectations/great_expectations)
|
- [Great Expectations](https://github.com/great-expectations/great_expectations)
|
||||||
- [HTTPX](https://github.com/encode/httpx)
|
- [HTTPX](https://github.com/encode/httpx)
|
||||||
- [Hatch](https://github.com/pypa/hatch)
|
|
||||||
- [Home Assistant](https://github.com/home-assistant/core)
|
|
||||||
- Hugging Face ([Transformers](https://github.com/huggingface/transformers),
|
- Hugging Face ([Transformers](https://github.com/huggingface/transformers),
|
||||||
[Datasets](https://github.com/huggingface/datasets),
|
[Datasets](https://github.com/huggingface/datasets),
|
||||||
[Diffusers](https://github.com/huggingface/diffusers))
|
[Diffusers](https://github.com/huggingface/diffusers))
|
||||||
|
- [Hatch](https://github.com/pypa/hatch)
|
||||||
|
- [Home Assistant](https://github.com/home-assistant/core)
|
||||||
- ING Bank ([popmon](https://github.com/ing-bank/popmon), [probatus](https://github.com/ing-bank/probatus))
|
- ING Bank ([popmon](https://github.com/ing-bank/popmon), [probatus](https://github.com/ing-bank/probatus))
|
||||||
- [Ibis](https://github.com/ibis-project/ibis)
|
- [Ibis](https://github.com/ibis-project/ibis)
|
||||||
- [Jupyter](https://github.com/jupyter-server/jupyter_server)
|
- [Jupyter](https://github.com/jupyter-server/jupyter_server)
|
||||||
- [LangChain](https://github.com/hwchase17/langchain)
|
- [LangChain](https://github.com/hwchase17/langchain)
|
||||||
- [Litestar](https://litestar.dev/)
|
|
||||||
- [LlamaIndex](https://github.com/jerryjliu/llama_index)
|
- [LlamaIndex](https://github.com/jerryjliu/llama_index)
|
||||||
- Matrix ([Synapse](https://github.com/matrix-org/synapse))
|
- Matrix ([Synapse](https://github.com/matrix-org/synapse))
|
||||||
- [MegaLinter](https://github.com/oxsecurity/megalinter)
|
- [MegaLinter](https://github.com/oxsecurity/megalinter)
|
||||||
@@ -418,35 +411,29 @@ Ruff is used by a number of major open-source projects and companies, including:
|
|||||||
- Netflix ([Dispatch](https://github.com/Netflix/dispatch))
|
- Netflix ([Dispatch](https://github.com/Netflix/dispatch))
|
||||||
- [Neon](https://github.com/neondatabase/neon)
|
- [Neon](https://github.com/neondatabase/neon)
|
||||||
- [NoneBot](https://github.com/nonebot/nonebot2)
|
- [NoneBot](https://github.com/nonebot/nonebot2)
|
||||||
- [NumPyro](https://github.com/pyro-ppl/numpyro)
|
|
||||||
- [ONNX](https://github.com/onnx/onnx)
|
- [ONNX](https://github.com/onnx/onnx)
|
||||||
- [OpenBB](https://github.com/OpenBB-finance/OpenBBTerminal)
|
- [OpenBB](https://github.com/OpenBB-finance/OpenBBTerminal)
|
||||||
- [PDM](https://github.com/pdm-project/pdm)
|
- [PDM](https://github.com/pdm-project/pdm)
|
||||||
- [PaddlePaddle](https://github.com/PaddlePaddle/Paddle)
|
- [PaddlePaddle](https://github.com/PaddlePaddle/Paddle)
|
||||||
- [Pandas](https://github.com/pandas-dev/pandas)
|
- [Pandas](https://github.com/pandas-dev/pandas)
|
||||||
- [Pillow](https://github.com/python-pillow/Pillow)
|
|
||||||
- [Poetry](https://github.com/python-poetry/poetry)
|
- [Poetry](https://github.com/python-poetry/poetry)
|
||||||
- [Polars](https://github.com/pola-rs/polars)
|
- [Polars](https://github.com/pola-rs/polars)
|
||||||
- [PostHog](https://github.com/PostHog/posthog)
|
- [PostHog](https://github.com/PostHog/posthog)
|
||||||
- Prefect ([Python SDK](https://github.com/PrefectHQ/prefect), [Marvin](https://github.com/PrefectHQ/marvin))
|
- Prefect ([Python SDK](https://github.com/PrefectHQ/prefect), [Marvin](https://github.com/PrefectHQ/marvin))
|
||||||
- [PyInstaller](https://github.com/pyinstaller/pyinstaller)
|
- [PyInstaller](https://github.com/pyinstaller/pyinstaller)
|
||||||
- [PyMC](https://github.com/pymc-devs/pymc/)
|
|
||||||
- [PyMC-Marketing](https://github.com/pymc-labs/pymc-marketing)
|
|
||||||
- [PyTorch](https://github.com/pytorch/pytorch)
|
- [PyTorch](https://github.com/pytorch/pytorch)
|
||||||
- [Pydantic](https://github.com/pydantic/pydantic)
|
- [Pydantic](https://github.com/pydantic/pydantic)
|
||||||
- [Pylint](https://github.com/PyCQA/pylint)
|
- [Pylint](https://github.com/PyCQA/pylint)
|
||||||
- [PyVista](https://github.com/pyvista/pyvista)
|
|
||||||
- [Reflex](https://github.com/reflex-dev/reflex)
|
- [Reflex](https://github.com/reflex-dev/reflex)
|
||||||
- [River](https://github.com/online-ml/river)
|
|
||||||
- [Rippling](https://rippling.com)
|
- [Rippling](https://rippling.com)
|
||||||
- [Robyn](https://github.com/sansyrox/robyn)
|
- [Robyn](https://github.com/sansyrox/robyn)
|
||||||
- [Saleor](https://github.com/saleor/saleor)
|
|
||||||
- Scale AI ([Launch SDK](https://github.com/scaleapi/launch-python-client))
|
- Scale AI ([Launch SDK](https://github.com/scaleapi/launch-python-client))
|
||||||
- [SciPy](https://github.com/scipy/scipy)
|
|
||||||
- Snowflake ([SnowCLI](https://github.com/Snowflake-Labs/snowcli))
|
- Snowflake ([SnowCLI](https://github.com/Snowflake-Labs/snowcli))
|
||||||
|
- [Saleor](https://github.com/saleor/saleor)
|
||||||
|
- [SciPy](https://github.com/scipy/scipy)
|
||||||
- [Sphinx](https://github.com/sphinx-doc/sphinx)
|
- [Sphinx](https://github.com/sphinx-doc/sphinx)
|
||||||
- [Stable Baselines3](https://github.com/DLR-RM/stable-baselines3)
|
- [Stable Baselines3](https://github.com/DLR-RM/stable-baselines3)
|
||||||
- [Starlette](https://github.com/encode/starlette)
|
- [Litestar](https://litestar.dev/)
|
||||||
- [The Algorithms](https://github.com/TheAlgorithms/Python)
|
- [The Algorithms](https://github.com/TheAlgorithms/Python)
|
||||||
- [Vega-Altair](https://github.com/altair-viz/altair)
|
- [Vega-Altair](https://github.com/altair-viz/altair)
|
||||||
- WordPress ([Openverse](https://github.com/WordPress/openverse))
|
- WordPress ([Openverse](https://github.com/WordPress/openverse))
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
[files]
|
[files]
|
||||||
# https://github.com/crate-ci/typos/issues/868
|
extend-exclude = ["resources", "snapshots"]
|
||||||
extend-exclude = ["**/resources/**/*", "**/snapshots/**/*"]
|
|
||||||
|
|
||||||
[default.extend-words]
|
[default.extend-words]
|
||||||
hel = "hel"
|
hel = "hel"
|
||||||
|
|||||||
36
crates/flake8_to_ruff/Cargo.toml
Normal file
36
crates/flake8_to_ruff/Cargo.toml
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
[package]
|
||||||
|
name = "flake8-to-ruff"
|
||||||
|
version = "0.1.3"
|
||||||
|
description = """
|
||||||
|
Convert Flake8 configuration files to Ruff configuration files.
|
||||||
|
"""
|
||||||
|
authors = { workspace = true }
|
||||||
|
edition = { workspace = true }
|
||||||
|
rust-version = { workspace = true }
|
||||||
|
homepage = { workspace = true }
|
||||||
|
documentation = { workspace = true }
|
||||||
|
repository = { workspace = true }
|
||||||
|
license = { workspace = true }
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
ruff_linter = { path = "../ruff_linter", default-features = false }
|
||||||
|
ruff_workspace = { path = "../ruff_workspace" }
|
||||||
|
|
||||||
|
anyhow = { workspace = true }
|
||||||
|
clap = { workspace = true }
|
||||||
|
colored = { workspace = true }
|
||||||
|
configparser = { version = "3.0.2" }
|
||||||
|
itertools = { workspace = true }
|
||||||
|
log = { workspace = true }
|
||||||
|
once_cell = { workspace = true }
|
||||||
|
pep440_rs = { version = "0.3.12", features = ["serde"] }
|
||||||
|
regex = { workspace = true }
|
||||||
|
rustc-hash = { workspace = true }
|
||||||
|
serde = { workspace = true }
|
||||||
|
serde_json = { workspace = true }
|
||||||
|
strum = { workspace = true }
|
||||||
|
strum_macros = { workspace = true }
|
||||||
|
toml = { workspace = true }
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
pretty_assertions = "1.3.0"
|
||||||
99
crates/flake8_to_ruff/README.md
Normal file
99
crates/flake8_to_ruff/README.md
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
# flake8-to-ruff
|
||||||
|
|
||||||
|
Convert existing Flake8 configuration files (`setup.cfg`, `tox.ini`, or `.flake8`) for use with
|
||||||
|
[Ruff](https://github.com/astral-sh/ruff).
|
||||||
|
|
||||||
|
Generates a Ruff-compatible `pyproject.toml` section.
|
||||||
|
|
||||||
|
## Installation and Usage
|
||||||
|
|
||||||
|
### Installation
|
||||||
|
|
||||||
|
Available as [`flake8-to-ruff`](https://pypi.org/project/flake8-to-ruff/) on PyPI:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
pip install flake8-to-ruff
|
||||||
|
```
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
To run `flake8-to-ruff`:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
flake8-to-ruff path/to/setup.cfg
|
||||||
|
flake8-to-ruff path/to/tox.ini
|
||||||
|
flake8-to-ruff path/to/.flake8
|
||||||
|
```
|
||||||
|
|
||||||
|
`flake8-to-ruff` will print the relevant `pyproject.toml` sections to standard output, like so:
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[tool.ruff]
|
||||||
|
exclude = [
|
||||||
|
'.svn',
|
||||||
|
'CVS',
|
||||||
|
'.bzr',
|
||||||
|
'.hg',
|
||||||
|
'.git',
|
||||||
|
'__pycache__',
|
||||||
|
'.tox',
|
||||||
|
'.idea',
|
||||||
|
'.mypy_cache',
|
||||||
|
'.venv',
|
||||||
|
'node_modules',
|
||||||
|
'_state_machine.py',
|
||||||
|
'test_fstring.py',
|
||||||
|
'bad_coding2.py',
|
||||||
|
'badsyntax_*.py',
|
||||||
|
]
|
||||||
|
select = [
|
||||||
|
'A',
|
||||||
|
'E',
|
||||||
|
'F',
|
||||||
|
'Q',
|
||||||
|
]
|
||||||
|
ignore = []
|
||||||
|
|
||||||
|
[tool.ruff.flake8-quotes]
|
||||||
|
inline-quotes = 'single'
|
||||||
|
|
||||||
|
[tool.ruff.pep8-naming]
|
||||||
|
ignore-names = [
|
||||||
|
'foo',
|
||||||
|
'bar',
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
### Plugins
|
||||||
|
|
||||||
|
`flake8-to-ruff` will attempt to infer any activated plugins based on the settings provided in your
|
||||||
|
configuration file.
|
||||||
|
|
||||||
|
For example, if your `.flake8` file includes a `docstring-convention` property, `flake8-to-ruff`
|
||||||
|
will enable the appropriate [`flake8-docstrings`](https://pypi.org/project/flake8-docstrings/)
|
||||||
|
checks.
|
||||||
|
|
||||||
|
Alternatively, you can manually specify plugins on the command-line:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
flake8-to-ruff path/to/.flake8 --plugin flake8-builtins --plugin flake8-quotes
|
||||||
|
```
|
||||||
|
|
||||||
|
## Limitations
|
||||||
|
|
||||||
|
1. Ruff only supports a subset of the Flake configuration options. `flake8-to-ruff` will warn on and
|
||||||
|
ignore unsupported options in the `.flake8` file (or equivalent). (Similarly, Ruff has a few
|
||||||
|
configuration options that don't exist in Flake8.)
|
||||||
|
1. Ruff will omit any rule codes that are unimplemented or unsupported by Ruff, including rule
|
||||||
|
codes from unsupported plugins. (See the
|
||||||
|
[documentation](https://docs.astral.sh/ruff/faq/#how-does-ruff-compare-to-flake8) for the complete
|
||||||
|
list of supported plugins.)
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
MIT
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
Contributions are welcome and hugely appreciated. To get started, check out the
|
||||||
|
[contributing guidelines](https://github.com/astral-sh/ruff/blob/main/CONTRIBUTING.md).
|
||||||
65
crates/flake8_to_ruff/examples/cryptography/pyproject.toml
Normal file
65
crates/flake8_to_ruff/examples/cryptography/pyproject.toml
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
[build-system]
|
||||||
|
requires = [
|
||||||
|
# The minimum setuptools version is specific to the PEP 517 backend,
|
||||||
|
# and may be stricter than the version required in `setup.cfg`
|
||||||
|
"setuptools>=40.6.0,!=60.9.0",
|
||||||
|
"wheel",
|
||||||
|
# Must be kept in sync with the `install_requirements` in `setup.cfg`
|
||||||
|
"cffi>=1.12; platform_python_implementation != 'PyPy'",
|
||||||
|
"setuptools-rust>=0.11.4",
|
||||||
|
]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
|
[tool.black]
|
||||||
|
line-length = 79
|
||||||
|
target-version = ["py36"]
|
||||||
|
|
||||||
|
[tool.pytest.ini_options]
|
||||||
|
addopts = "-r s --capture=no --strict-markers --benchmark-disable"
|
||||||
|
markers = [
|
||||||
|
"skip_fips: this test is not executed in FIPS mode",
|
||||||
|
"supported: parametrized test requiring only_if and skip_message",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.mypy]
|
||||||
|
show_error_codes = true
|
||||||
|
check_untyped_defs = true
|
||||||
|
no_implicit_reexport = true
|
||||||
|
warn_redundant_casts = true
|
||||||
|
warn_unused_ignores = true
|
||||||
|
warn_unused_configs = true
|
||||||
|
strict_equality = true
|
||||||
|
|
||||||
|
[[tool.mypy.overrides]]
|
||||||
|
module = [
|
||||||
|
"pretend"
|
||||||
|
]
|
||||||
|
ignore_missing_imports = true
|
||||||
|
|
||||||
|
[tool.coverage.run]
|
||||||
|
branch = true
|
||||||
|
relative_files = true
|
||||||
|
source = [
|
||||||
|
"cryptography",
|
||||||
|
"tests/",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.coverage.paths]
|
||||||
|
source = [
|
||||||
|
"src/cryptography",
|
||||||
|
"*.tox/*/lib*/python*/site-packages/cryptography",
|
||||||
|
"*.tox\\*\\Lib\\site-packages\\cryptography",
|
||||||
|
"*.tox/pypy/site-packages/cryptography",
|
||||||
|
]
|
||||||
|
tests =[
|
||||||
|
"tests/",
|
||||||
|
"*tests\\",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.coverage.report]
|
||||||
|
exclude_lines = [
|
||||||
|
"@abc.abstractmethod",
|
||||||
|
"@abc.abstractproperty",
|
||||||
|
"@typing.overload",
|
||||||
|
"if typing.TYPE_CHECKING",
|
||||||
|
]
|
||||||
91
crates/flake8_to_ruff/examples/cryptography/setup.cfg
Normal file
91
crates/flake8_to_ruff/examples/cryptography/setup.cfg
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
[metadata]
|
||||||
|
name = cryptography
|
||||||
|
version = attr: cryptography.__version__
|
||||||
|
description = cryptography is a package which provides cryptographic recipes and primitives to Python developers.
|
||||||
|
long_description = file: README.rst
|
||||||
|
long_description_content_type = text/x-rst
|
||||||
|
license = BSD-3-Clause OR Apache-2.0
|
||||||
|
url = https://github.com/pyca/cryptography
|
||||||
|
author = The Python Cryptographic Authority and individual contributors
|
||||||
|
author_email = cryptography-dev@python.org
|
||||||
|
project_urls =
|
||||||
|
Documentation=https://cryptography.io/
|
||||||
|
Source=https://github.com/pyca/cryptography/
|
||||||
|
Issues=https://github.com/pyca/cryptography/issues
|
||||||
|
Changelog=https://cryptography.io/en/latest/changelog/
|
||||||
|
classifiers =
|
||||||
|
Development Status :: 5 - Production/Stable
|
||||||
|
Intended Audience :: Developers
|
||||||
|
License :: OSI Approved :: Apache Software License
|
||||||
|
License :: OSI Approved :: BSD License
|
||||||
|
Natural Language :: English
|
||||||
|
Operating System :: MacOS :: MacOS X
|
||||||
|
Operating System :: POSIX
|
||||||
|
Operating System :: POSIX :: BSD
|
||||||
|
Operating System :: POSIX :: Linux
|
||||||
|
Operating System :: Microsoft :: Windows
|
||||||
|
Programming Language :: Python
|
||||||
|
Programming Language :: Python :: 3
|
||||||
|
Programming Language :: Python :: 3 :: Only
|
||||||
|
Programming Language :: Python :: 3.6
|
||||||
|
Programming Language :: Python :: 3.7
|
||||||
|
Programming Language :: Python :: 3.8
|
||||||
|
Programming Language :: Python :: 3.9
|
||||||
|
Programming Language :: Python :: 3.10
|
||||||
|
Programming Language :: Python :: 3.11
|
||||||
|
Programming Language :: Python :: Implementation :: CPython
|
||||||
|
Programming Language :: Python :: Implementation :: PyPy
|
||||||
|
Topic :: Security :: Cryptography
|
||||||
|
|
||||||
|
[options]
|
||||||
|
python_requires = >=3.6
|
||||||
|
include_package_data = True
|
||||||
|
zip_safe = False
|
||||||
|
package_dir =
|
||||||
|
=src
|
||||||
|
packages = find:
|
||||||
|
# `install_requires` must be kept in sync with `pyproject.toml`
|
||||||
|
install_requires =
|
||||||
|
cffi >=1.12
|
||||||
|
|
||||||
|
[options.packages.find]
|
||||||
|
where = src
|
||||||
|
exclude =
|
||||||
|
_cffi_src
|
||||||
|
_cffi_src.*
|
||||||
|
|
||||||
|
[options.extras_require]
|
||||||
|
test =
|
||||||
|
pytest>=6.2.0
|
||||||
|
pytest-benchmark
|
||||||
|
pytest-cov
|
||||||
|
pytest-subtests
|
||||||
|
pytest-xdist
|
||||||
|
pretend
|
||||||
|
iso8601
|
||||||
|
pytz
|
||||||
|
hypothesis>=1.11.4,!=3.79.2
|
||||||
|
docs =
|
||||||
|
sphinx >= 1.6.5,!=1.8.0,!=3.1.0,!=3.1.1,!=5.2.0,!=5.2.0.post0
|
||||||
|
sphinx_rtd_theme
|
||||||
|
docstest =
|
||||||
|
pyenchant >= 1.6.11
|
||||||
|
twine >= 1.12.0
|
||||||
|
sphinxcontrib-spelling >= 4.0.1
|
||||||
|
sdist =
|
||||||
|
setuptools_rust >= 0.11.4
|
||||||
|
pep8test =
|
||||||
|
black
|
||||||
|
flake8
|
||||||
|
flake8-import-order
|
||||||
|
pep8-naming
|
||||||
|
# This extra is for OpenSSH private keys that use bcrypt KDF
|
||||||
|
# Versions: v3.1.3 - ignore_few_rounds, v3.1.5 - abi3
|
||||||
|
ssh =
|
||||||
|
bcrypt >= 3.1.5
|
||||||
|
|
||||||
|
[flake8]
|
||||||
|
ignore = E203,E211,W503,W504,N818
|
||||||
|
exclude = .tox,*.egg,.git,_build,.hypothesis
|
||||||
|
select = E,W,F,N,I
|
||||||
|
application-import-names = cryptography,cryptography_vectors,tests
|
||||||
19
crates/flake8_to_ruff/examples/jupyterhub.ini
Normal file
19
crates/flake8_to_ruff/examples/jupyterhub.ini
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
[flake8]
|
||||||
|
# Ignore style and complexity
|
||||||
|
# E: style errors
|
||||||
|
# W: style warnings
|
||||||
|
# C: complexity
|
||||||
|
# D: docstring warnings (unused pydocstyle extension)
|
||||||
|
# F841: local variable assigned but never used
|
||||||
|
ignore = E, C, W, D, F841
|
||||||
|
builtins = c, get_config
|
||||||
|
exclude =
|
||||||
|
.cache,
|
||||||
|
.github,
|
||||||
|
docs,
|
||||||
|
jupyterhub/alembic*,
|
||||||
|
onbuild,
|
||||||
|
scripts,
|
||||||
|
share,
|
||||||
|
tools,
|
||||||
|
setup.py
|
||||||
43
crates/flake8_to_ruff/examples/manim.ini
Normal file
43
crates/flake8_to_ruff/examples/manim.ini
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
[flake8]
|
||||||
|
# Exclude the grpc generated code
|
||||||
|
exclude = ./manim/grpc/gen/*
|
||||||
|
max-complexity = 15
|
||||||
|
max-line-length = 88
|
||||||
|
statistics = True
|
||||||
|
# Prevents some flake8-rst-docstrings errors
|
||||||
|
rst-roles = attr,class,func,meth,mod,obj,ref,doc,exc
|
||||||
|
rst-directives = manim, SEEALSO, seealso
|
||||||
|
docstring-convention=numpy
|
||||||
|
|
||||||
|
select = A,A00,B,B9,C4,C90,D,E,F,F,PT,RST,SIM,W
|
||||||
|
|
||||||
|
# General Compatibility
|
||||||
|
extend-ignore = E203, W503, D202, D212, D213, D404
|
||||||
|
|
||||||
|
# Misc
|
||||||
|
F401, F403, F405, F841, E501, E731, E402, F811, F821,
|
||||||
|
|
||||||
|
# Plug-in: flake8-builtins
|
||||||
|
A001, A002, A003,
|
||||||
|
|
||||||
|
# Plug-in: flake8-bugbear
|
||||||
|
B006, B007, B008, B009, B010, B903, B950,
|
||||||
|
|
||||||
|
# Plug-in: flake8-simplify
|
||||||
|
SIM105, SIM106, SIM119,
|
||||||
|
|
||||||
|
# Plug-in: flake8-comprehensions
|
||||||
|
C901
|
||||||
|
|
||||||
|
# Plug-in: flake8-pytest-style
|
||||||
|
PT001, PT004, PT006, PT011, PT018, PT022, PT023,
|
||||||
|
|
||||||
|
# Plug-in: flake8-docstrings
|
||||||
|
D100, D101, D102, D103, D104, D105, D106, D107,
|
||||||
|
D200, D202, D204, D205, D209,
|
||||||
|
D301,
|
||||||
|
D400, D401, D402, D403, D405, D406, D407, D409, D411, D412, D414,
|
||||||
|
|
||||||
|
# Plug-in: flake8-rst-docstrings
|
||||||
|
RST201, RST203, RST210, RST212, RST213, RST215,
|
||||||
|
RST301, RST303,
|
||||||
36
crates/flake8_to_ruff/examples/poetry.ini
Normal file
36
crates/flake8_to_ruff/examples/poetry.ini
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
[flake8]
|
||||||
|
min_python_version = 3.7.0
|
||||||
|
max-line-length = 88
|
||||||
|
ban-relative-imports = true
|
||||||
|
# flake8-use-fstring: https://github.com/MichaelKim0407/flake8-use-fstring#--percent-greedy-and---format-greedy
|
||||||
|
format-greedy = 1
|
||||||
|
inline-quotes = double
|
||||||
|
enable-extensions = TC, TC1
|
||||||
|
type-checking-strict = true
|
||||||
|
eradicate-whitelist-extend = ^-.*;
|
||||||
|
extend-ignore =
|
||||||
|
# E203: Whitespace before ':' (pycqa/pycodestyle#373)
|
||||||
|
E203,
|
||||||
|
# SIM106: Handle error-cases first
|
||||||
|
SIM106,
|
||||||
|
# ANN101: Missing type annotation for self in method
|
||||||
|
ANN101,
|
||||||
|
# ANN102: Missing type annotation for cls in classmethod
|
||||||
|
ANN102,
|
||||||
|
# PIE781: assign-and-return
|
||||||
|
PIE781,
|
||||||
|
# PIE798 no-unnecessary-class: Consider using a module for namespacing instead
|
||||||
|
PIE798,
|
||||||
|
per-file-ignores =
|
||||||
|
# TC002: Move third-party import '...' into a type-checking block
|
||||||
|
__init__.py:TC002,
|
||||||
|
# ANN201: Missing return type annotation for public function
|
||||||
|
tests/test_*:ANN201
|
||||||
|
tests/**/test_*:ANN201
|
||||||
|
extend-exclude =
|
||||||
|
# Frozen and not subject to change in this repo:
|
||||||
|
get-poetry.py,
|
||||||
|
install-poetry.py,
|
||||||
|
# External to the project's coding standards:
|
||||||
|
tests/fixtures/*,
|
||||||
|
tests/**/fixtures/*,
|
||||||
19
crates/flake8_to_ruff/examples/python-discord.ini
Normal file
19
crates/flake8_to_ruff/examples/python-discord.ini
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
[flake8]
|
||||||
|
max-line-length=120
|
||||||
|
docstring-convention=all
|
||||||
|
import-order-style=pycharm
|
||||||
|
application_import_names=bot,tests
|
||||||
|
exclude=.cache,.venv,.git,constants.py
|
||||||
|
extend-ignore=
|
||||||
|
B311,W503,E226,S311,T000,E731
|
||||||
|
# Missing Docstrings
|
||||||
|
D100,D104,D105,D107,
|
||||||
|
# Docstring Whitespace
|
||||||
|
D203,D212,D214,D215,
|
||||||
|
# Docstring Quotes
|
||||||
|
D301,D302,
|
||||||
|
# Docstring Content
|
||||||
|
D400,D401,D402,D404,D405,D406,D407,D408,D409,D410,D411,D412,D413,D414,D416,D417
|
||||||
|
# Type Annotations
|
||||||
|
ANN002,ANN003,ANN101,ANN102,ANN204,ANN206,ANN401
|
||||||
|
per-file-ignores=tests/*:D,ANN
|
||||||
6
crates/flake8_to_ruff/examples/requests.ini
Normal file
6
crates/flake8_to_ruff/examples/requests.ini
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
[flake8]
|
||||||
|
ignore = E203, E501, W503
|
||||||
|
per-file-ignores =
|
||||||
|
requests/__init__.py:E402, F401
|
||||||
|
requests/compat.py:E402, F401
|
||||||
|
tests/compat.py:F401
|
||||||
34
crates/flake8_to_ruff/pyproject.toml
Normal file
34
crates/flake8_to_ruff/pyproject.toml
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
[project]
|
||||||
|
name = "flake8-to-ruff"
|
||||||
|
keywords = ["automation", "flake8", "pycodestyle", "pyflakes", "pylint", "clippy"]
|
||||||
|
classifiers = [
|
||||||
|
"Development Status :: 3 - Alpha",
|
||||||
|
"Environment :: Console",
|
||||||
|
"Intended Audience :: Developers",
|
||||||
|
"License :: OSI Approved :: MIT License",
|
||||||
|
"Operating System :: OS Independent",
|
||||||
|
"Programming Language :: Python",
|
||||||
|
"Programming Language :: Python :: 3.7",
|
||||||
|
"Programming Language :: Python :: 3.8",
|
||||||
|
"Programming Language :: Python :: 3.9",
|
||||||
|
"Programming Language :: Python :: 3.10",
|
||||||
|
"Programming Language :: Python :: 3.11",
|
||||||
|
"Programming Language :: Python :: 3 :: Only",
|
||||||
|
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||||
|
"Topic :: Software Development :: Quality Assurance",
|
||||||
|
]
|
||||||
|
author = "Charlie Marsh"
|
||||||
|
author_email = "charlie.r.marsh@gmail.com"
|
||||||
|
description = "Convert existing Flake8 configuration to Ruff."
|
||||||
|
requires-python = ">=3.7"
|
||||||
|
|
||||||
|
[project.urls]
|
||||||
|
repository = "https://github.com/astral-sh/ruff#subdirectory=crates/flake8_to_ruff"
|
||||||
|
|
||||||
|
[build-system]
|
||||||
|
requires = ["maturin>=1.0,<2.0"]
|
||||||
|
build-backend = "maturin"
|
||||||
|
|
||||||
|
[tool.maturin]
|
||||||
|
bindings = "bin"
|
||||||
|
strip = true
|
||||||
13
crates/flake8_to_ruff/src/black.rs
Normal file
13
crates/flake8_to_ruff/src/black.rs
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
//! Extract Black configuration settings from a pyproject.toml.
|
||||||
|
|
||||||
|
use ruff_linter::line_width::LineLength;
|
||||||
|
use ruff_linter::settings::types::PythonVersion;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
|
||||||
|
pub(crate) struct Black {
|
||||||
|
#[serde(alias = "line-length", alias = "line_length")]
|
||||||
|
pub(crate) line_length: Option<LineLength>,
|
||||||
|
#[serde(alias = "target-version", alias = "target_version")]
|
||||||
|
pub(crate) target_version: Option<Vec<PythonVersion>>,
|
||||||
|
}
|
||||||
687
crates/flake8_to_ruff/src/converter.rs
Normal file
687
crates/flake8_to_ruff/src/converter.rs
Normal file
@@ -0,0 +1,687 @@
|
|||||||
|
use std::collections::{HashMap, HashSet};
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use itertools::Itertools;
|
||||||
|
|
||||||
|
use ruff_linter::line_width::LineLength;
|
||||||
|
use ruff_linter::registry::Linter;
|
||||||
|
use ruff_linter::rule_selector::RuleSelector;
|
||||||
|
use ruff_linter::rules::flake8_pytest_style::types::{
|
||||||
|
ParametrizeNameType, ParametrizeValuesRowType, ParametrizeValuesType,
|
||||||
|
};
|
||||||
|
use ruff_linter::rules::flake8_quotes::settings::Quote;
|
||||||
|
use ruff_linter::rules::flake8_tidy_imports::settings::Strictness;
|
||||||
|
use ruff_linter::rules::pydocstyle::settings::Convention;
|
||||||
|
use ruff_linter::settings::types::PythonVersion;
|
||||||
|
use ruff_linter::settings::DEFAULT_SELECTORS;
|
||||||
|
use ruff_linter::warn_user;
|
||||||
|
use ruff_workspace::options::{
|
||||||
|
Flake8AnnotationsOptions, Flake8BugbearOptions, Flake8BuiltinsOptions, Flake8ErrMsgOptions,
|
||||||
|
Flake8PytestStyleOptions, Flake8QuotesOptions, Flake8TidyImportsOptions, LintCommonOptions,
|
||||||
|
LintOptions, McCabeOptions, Options, Pep8NamingOptions, PydocstyleOptions,
|
||||||
|
};
|
||||||
|
use ruff_workspace::pyproject::Pyproject;
|
||||||
|
|
||||||
|
use super::external_config::ExternalConfig;
|
||||||
|
use super::plugin::Plugin;
|
||||||
|
use super::{parser, plugin};
|
||||||
|
|
||||||
|
pub(crate) fn convert(
|
||||||
|
config: &HashMap<String, HashMap<String, Option<String>>>,
|
||||||
|
external_config: &ExternalConfig,
|
||||||
|
plugins: Option<Vec<Plugin>>,
|
||||||
|
) -> Pyproject {
|
||||||
|
// Extract the Flake8 section.
|
||||||
|
let flake8 = config
|
||||||
|
.get("flake8")
|
||||||
|
.expect("Unable to find flake8 section in INI file");
|
||||||
|
|
||||||
|
// Extract all referenced rule code prefixes, to power plugin inference.
|
||||||
|
let mut referenced_codes: HashSet<RuleSelector> = HashSet::default();
|
||||||
|
for (key, value) in flake8 {
|
||||||
|
if let Some(value) = value {
|
||||||
|
match key.as_str() {
|
||||||
|
"select" | "ignore" | "extend-select" | "extend_select" | "extend-ignore"
|
||||||
|
| "extend_ignore" => {
|
||||||
|
referenced_codes.extend(parser::parse_prefix_codes(value.as_ref()));
|
||||||
|
}
|
||||||
|
"per-file-ignores" | "per_file_ignores" => {
|
||||||
|
if let Ok(per_file_ignores) =
|
||||||
|
parser::parse_files_to_codes_mapping(value.as_ref())
|
||||||
|
{
|
||||||
|
for (_, codes) in parser::collect_per_file_ignores(per_file_ignores) {
|
||||||
|
referenced_codes.extend(codes);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Infer plugins, if not provided.
|
||||||
|
let plugins = plugins.unwrap_or_else(|| {
|
||||||
|
let from_options = plugin::infer_plugins_from_options(flake8);
|
||||||
|
if !from_options.is_empty() {
|
||||||
|
#[allow(clippy::print_stderr)]
|
||||||
|
{
|
||||||
|
eprintln!("Inferred plugins from settings: {from_options:#?}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let from_codes = plugin::infer_plugins_from_codes(&referenced_codes);
|
||||||
|
if !from_codes.is_empty() {
|
||||||
|
#[allow(clippy::print_stderr)]
|
||||||
|
{
|
||||||
|
eprintln!("Inferred plugins from referenced codes: {from_codes:#?}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
from_options.into_iter().chain(from_codes).collect()
|
||||||
|
});
|
||||||
|
|
||||||
|
// Check if the user has specified a `select`. If not, we'll add our own
|
||||||
|
// default `select`, and populate it based on user plugins.
|
||||||
|
let mut select = flake8
|
||||||
|
.get("select")
|
||||||
|
.and_then(|value| {
|
||||||
|
value
|
||||||
|
.as_ref()
|
||||||
|
.map(|value| HashSet::from_iter(parser::parse_prefix_codes(value)))
|
||||||
|
})
|
||||||
|
.unwrap_or_else(|| resolve_select(&plugins));
|
||||||
|
let mut ignore: HashSet<RuleSelector> = flake8
|
||||||
|
.get("ignore")
|
||||||
|
.and_then(|value| {
|
||||||
|
value
|
||||||
|
.as_ref()
|
||||||
|
.map(|value| HashSet::from_iter(parser::parse_prefix_codes(value)))
|
||||||
|
})
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
// Parse each supported option.
|
||||||
|
let mut options = Options::default();
|
||||||
|
let mut lint_options = LintCommonOptions::default();
|
||||||
|
let mut flake8_annotations = Flake8AnnotationsOptions::default();
|
||||||
|
let mut flake8_bugbear = Flake8BugbearOptions::default();
|
||||||
|
let mut flake8_builtins = Flake8BuiltinsOptions::default();
|
||||||
|
let mut flake8_errmsg = Flake8ErrMsgOptions::default();
|
||||||
|
let mut flake8_pytest_style = Flake8PytestStyleOptions::default();
|
||||||
|
let mut flake8_quotes = Flake8QuotesOptions::default();
|
||||||
|
let mut flake8_tidy_imports = Flake8TidyImportsOptions::default();
|
||||||
|
let mut mccabe = McCabeOptions::default();
|
||||||
|
let mut pep8_naming = Pep8NamingOptions::default();
|
||||||
|
let mut pydocstyle = PydocstyleOptions::default();
|
||||||
|
for (key, value) in flake8 {
|
||||||
|
if let Some(value) = value {
|
||||||
|
match key.as_str() {
|
||||||
|
// flake8
|
||||||
|
"builtins" => {
|
||||||
|
options.builtins = Some(parser::parse_strings(value.as_ref()));
|
||||||
|
}
|
||||||
|
"max-line-length" | "max_line_length" => match LineLength::from_str(value) {
|
||||||
|
Ok(line_length) => options.line_length = Some(line_length),
|
||||||
|
Err(e) => {
|
||||||
|
warn_user!("Unable to parse '{key}' property: {e}");
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"select" => {
|
||||||
|
// No-op (handled above).
|
||||||
|
select.extend(parser::parse_prefix_codes(value.as_ref()));
|
||||||
|
}
|
||||||
|
"ignore" => {
|
||||||
|
// No-op (handled above).
|
||||||
|
}
|
||||||
|
"extend-select" | "extend_select" => {
|
||||||
|
// Unlike Flake8, use a single explicit `select`.
|
||||||
|
select.extend(parser::parse_prefix_codes(value.as_ref()));
|
||||||
|
}
|
||||||
|
"extend-ignore" | "extend_ignore" => {
|
||||||
|
// Unlike Flake8, use a single explicit `ignore`.
|
||||||
|
ignore.extend(parser::parse_prefix_codes(value.as_ref()));
|
||||||
|
}
|
||||||
|
"exclude" => {
|
||||||
|
options.exclude = Some(parser::parse_strings(value.as_ref()));
|
||||||
|
}
|
||||||
|
"extend-exclude" | "extend_exclude" => {
|
||||||
|
options.extend_exclude = Some(parser::parse_strings(value.as_ref()));
|
||||||
|
}
|
||||||
|
"per-file-ignores" | "per_file_ignores" => {
|
||||||
|
match parser::parse_files_to_codes_mapping(value.as_ref()) {
|
||||||
|
Ok(per_file_ignores) => {
|
||||||
|
lint_options.per_file_ignores =
|
||||||
|
Some(parser::collect_per_file_ignores(per_file_ignores));
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
warn_user!("Unable to parse '{key}' property: {e}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// flake8-bugbear
|
||||||
|
"extend-immutable-calls" | "extend_immutable_calls" => {
|
||||||
|
flake8_bugbear.extend_immutable_calls =
|
||||||
|
Some(parser::parse_strings(value.as_ref()));
|
||||||
|
}
|
||||||
|
// flake8-builtins
|
||||||
|
"builtins-ignorelist" | "builtins_ignorelist" => {
|
||||||
|
flake8_builtins.builtins_ignorelist =
|
||||||
|
Some(parser::parse_strings(value.as_ref()));
|
||||||
|
}
|
||||||
|
// flake8-annotations
|
||||||
|
"suppress-none-returning" | "suppress_none_returning" => {
|
||||||
|
match parser::parse_bool(value.as_ref()) {
|
||||||
|
Ok(bool) => flake8_annotations.suppress_none_returning = Some(bool),
|
||||||
|
Err(e) => {
|
||||||
|
warn_user!("Unable to parse '{key}' property: {e}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"suppress-dummy-args" | "suppress_dummy_args" => {
|
||||||
|
match parser::parse_bool(value.as_ref()) {
|
||||||
|
Ok(bool) => flake8_annotations.suppress_dummy_args = Some(bool),
|
||||||
|
Err(e) => {
|
||||||
|
warn_user!("Unable to parse '{key}' property: {e}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"mypy-init-return" | "mypy_init_return" => {
|
||||||
|
match parser::parse_bool(value.as_ref()) {
|
||||||
|
Ok(bool) => flake8_annotations.mypy_init_return = Some(bool),
|
||||||
|
Err(e) => {
|
||||||
|
warn_user!("Unable to parse '{key}' property: {e}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"allow-star-arg-any" | "allow_star_arg_any" => {
|
||||||
|
match parser::parse_bool(value.as_ref()) {
|
||||||
|
Ok(bool) => flake8_annotations.allow_star_arg_any = Some(bool),
|
||||||
|
Err(e) => {
|
||||||
|
warn_user!("Unable to parse '{key}' property: {e}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// flake8-quotes
|
||||||
|
"quotes" | "inline-quotes" | "inline_quotes" => match value.trim() {
|
||||||
|
"'" | "single" => flake8_quotes.inline_quotes = Some(Quote::Single),
|
||||||
|
"\"" | "double" => flake8_quotes.inline_quotes = Some(Quote::Double),
|
||||||
|
_ => {
|
||||||
|
warn_user!("Unexpected '{key}' value: {value}");
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"multiline-quotes" | "multiline_quotes" => match value.trim() {
|
||||||
|
"'" | "single" => flake8_quotes.multiline_quotes = Some(Quote::Single),
|
||||||
|
"\"" | "double" => flake8_quotes.multiline_quotes = Some(Quote::Double),
|
||||||
|
_ => {
|
||||||
|
warn_user!("Unexpected '{key}' value: {value}");
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"docstring-quotes" | "docstring_quotes" => match value.trim() {
|
||||||
|
"'" | "single" => flake8_quotes.docstring_quotes = Some(Quote::Single),
|
||||||
|
"\"" | "double" => flake8_quotes.docstring_quotes = Some(Quote::Double),
|
||||||
|
_ => {
|
||||||
|
warn_user!("Unexpected '{key}' value: {value}");
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"avoid-escape" | "avoid_escape" => match parser::parse_bool(value.as_ref()) {
|
||||||
|
Ok(bool) => flake8_quotes.avoid_escape = Some(bool),
|
||||||
|
Err(e) => {
|
||||||
|
warn_user!("Unable to parse '{key}' property: {e}");
|
||||||
|
}
|
||||||
|
},
|
||||||
|
// pep8-naming
|
||||||
|
"ignore-names" | "ignore_names" => {
|
||||||
|
pep8_naming.ignore_names = Some(parser::parse_strings(value.as_ref()));
|
||||||
|
}
|
||||||
|
"classmethod-decorators" | "classmethod_decorators" => {
|
||||||
|
pep8_naming.classmethod_decorators =
|
||||||
|
Some(parser::parse_strings(value.as_ref()));
|
||||||
|
}
|
||||||
|
"staticmethod-decorators" | "staticmethod_decorators" => {
|
||||||
|
pep8_naming.staticmethod_decorators =
|
||||||
|
Some(parser::parse_strings(value.as_ref()));
|
||||||
|
}
|
||||||
|
// flake8-tidy-imports
|
||||||
|
"ban-relative-imports" | "ban_relative_imports" => match value.trim() {
|
||||||
|
"true" => flake8_tidy_imports.ban_relative_imports = Some(Strictness::All),
|
||||||
|
"parents" => {
|
||||||
|
flake8_tidy_imports.ban_relative_imports = Some(Strictness::Parents);
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
warn_user!("Unexpected '{key}' value: {value}");
|
||||||
|
}
|
||||||
|
},
|
||||||
|
// flake8-docstrings
|
||||||
|
"docstring-convention" => match value.trim() {
|
||||||
|
"google" => pydocstyle.convention = Some(Convention::Google),
|
||||||
|
"numpy" => pydocstyle.convention = Some(Convention::Numpy),
|
||||||
|
"pep257" => pydocstyle.convention = Some(Convention::Pep257),
|
||||||
|
"all" => pydocstyle.convention = None,
|
||||||
|
_ => {
|
||||||
|
warn_user!("Unexpected '{key}' value: {value}");
|
||||||
|
}
|
||||||
|
},
|
||||||
|
// mccabe
|
||||||
|
"max-complexity" | "max_complexity" => match value.parse::<usize>() {
|
||||||
|
Ok(max_complexity) => mccabe.max_complexity = Some(max_complexity),
|
||||||
|
Err(e) => {
|
||||||
|
warn_user!("Unable to parse '{key}' property: {e}");
|
||||||
|
}
|
||||||
|
},
|
||||||
|
// flake8-errmsg
|
||||||
|
"errmsg-max-string-length" | "errmsg_max_string_length" => {
|
||||||
|
match value.parse::<usize>() {
|
||||||
|
Ok(max_string_length) => {
|
||||||
|
flake8_errmsg.max_string_length = Some(max_string_length);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
warn_user!("Unable to parse '{key}' property: {e}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// flake8-pytest-style
|
||||||
|
"pytest-fixture-no-parentheses" | "pytest_fixture_no_parentheses " => {
|
||||||
|
match parser::parse_bool(value.as_ref()) {
|
||||||
|
Ok(bool) => flake8_pytest_style.fixture_parentheses = Some(!bool),
|
||||||
|
Err(e) => {
|
||||||
|
warn_user!("Unable to parse '{key}' property: {e}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"pytest-parametrize-names-type" | "pytest_parametrize_names_type" => {
|
||||||
|
match value.trim() {
|
||||||
|
"csv" => {
|
||||||
|
flake8_pytest_style.parametrize_names_type =
|
||||||
|
Some(ParametrizeNameType::Csv);
|
||||||
|
}
|
||||||
|
"tuple" => {
|
||||||
|
flake8_pytest_style.parametrize_names_type =
|
||||||
|
Some(ParametrizeNameType::Tuple);
|
||||||
|
}
|
||||||
|
"list" => {
|
||||||
|
flake8_pytest_style.parametrize_names_type =
|
||||||
|
Some(ParametrizeNameType::List);
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
warn_user!("Unexpected '{key}' value: {value}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"pytest-parametrize-values-type" | "pytest_parametrize_values_type" => {
|
||||||
|
match value.trim() {
|
||||||
|
"tuple" => {
|
||||||
|
flake8_pytest_style.parametrize_values_type =
|
||||||
|
Some(ParametrizeValuesType::Tuple);
|
||||||
|
}
|
||||||
|
"list" => {
|
||||||
|
flake8_pytest_style.parametrize_values_type =
|
||||||
|
Some(ParametrizeValuesType::List);
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
warn_user!("Unexpected '{key}' value: {value}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"pytest-parametrize-values-row-type" | "pytest_parametrize_values_row_type" => {
|
||||||
|
match value.trim() {
|
||||||
|
"tuple" => {
|
||||||
|
flake8_pytest_style.parametrize_values_row_type =
|
||||||
|
Some(ParametrizeValuesRowType::Tuple);
|
||||||
|
}
|
||||||
|
"list" => {
|
||||||
|
flake8_pytest_style.parametrize_values_row_type =
|
||||||
|
Some(ParametrizeValuesRowType::List);
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
warn_user!("Unexpected '{key}' value: {value}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"pytest-raises-require-match-for" | "pytest_raises_require_match_for" => {
|
||||||
|
flake8_pytest_style.raises_require_match_for =
|
||||||
|
Some(parser::parse_strings(value.as_ref()));
|
||||||
|
}
|
||||||
|
"pytest-mark-no-parentheses" | "pytest_mark_no_parentheses" => {
|
||||||
|
match parser::parse_bool(value.as_ref()) {
|
||||||
|
Ok(bool) => flake8_pytest_style.mark_parentheses = Some(!bool),
|
||||||
|
Err(e) => {
|
||||||
|
warn_user!("Unable to parse '{key}' property: {e}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Unknown
|
||||||
|
_ => {
|
||||||
|
warn_user!("Skipping unsupported property: {}", key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deduplicate and sort.
|
||||||
|
lint_options.select = Some(
|
||||||
|
select
|
||||||
|
.into_iter()
|
||||||
|
.sorted_by_key(RuleSelector::prefix_and_code)
|
||||||
|
.collect(),
|
||||||
|
);
|
||||||
|
lint_options.ignore = Some(
|
||||||
|
ignore
|
||||||
|
.into_iter()
|
||||||
|
.sorted_by_key(RuleSelector::prefix_and_code)
|
||||||
|
.collect(),
|
||||||
|
);
|
||||||
|
if flake8_annotations != Flake8AnnotationsOptions::default() {
|
||||||
|
lint_options.flake8_annotations = Some(flake8_annotations);
|
||||||
|
}
|
||||||
|
if flake8_bugbear != Flake8BugbearOptions::default() {
|
||||||
|
lint_options.flake8_bugbear = Some(flake8_bugbear);
|
||||||
|
}
|
||||||
|
if flake8_builtins != Flake8BuiltinsOptions::default() {
|
||||||
|
lint_options.flake8_builtins = Some(flake8_builtins);
|
||||||
|
}
|
||||||
|
if flake8_errmsg != Flake8ErrMsgOptions::default() {
|
||||||
|
lint_options.flake8_errmsg = Some(flake8_errmsg);
|
||||||
|
}
|
||||||
|
if flake8_pytest_style != Flake8PytestStyleOptions::default() {
|
||||||
|
lint_options.flake8_pytest_style = Some(flake8_pytest_style);
|
||||||
|
}
|
||||||
|
if flake8_quotes != Flake8QuotesOptions::default() {
|
||||||
|
lint_options.flake8_quotes = Some(flake8_quotes);
|
||||||
|
}
|
||||||
|
if flake8_tidy_imports != Flake8TidyImportsOptions::default() {
|
||||||
|
lint_options.flake8_tidy_imports = Some(flake8_tidy_imports);
|
||||||
|
}
|
||||||
|
if mccabe != McCabeOptions::default() {
|
||||||
|
lint_options.mccabe = Some(mccabe);
|
||||||
|
}
|
||||||
|
if pep8_naming != Pep8NamingOptions::default() {
|
||||||
|
lint_options.pep8_naming = Some(pep8_naming);
|
||||||
|
}
|
||||||
|
if pydocstyle != PydocstyleOptions::default() {
|
||||||
|
lint_options.pydocstyle = Some(pydocstyle);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract any settings from the existing `pyproject.toml`.
|
||||||
|
if let Some(black) = &external_config.black {
|
||||||
|
if let Some(line_length) = &black.line_length {
|
||||||
|
options.line_length = Some(*line_length);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(target_version) = &black.target_version {
|
||||||
|
if let Some(target_version) = target_version.iter().min() {
|
||||||
|
options.target_version = Some(*target_version);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(isort) = &external_config.isort {
|
||||||
|
if let Some(src_paths) = &isort.src_paths {
|
||||||
|
match options.src.as_mut() {
|
||||||
|
Some(src) => {
|
||||||
|
src.extend_from_slice(src_paths);
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
options.src = Some(src_paths.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(project) = &external_config.project {
|
||||||
|
if let Some(requires_python) = &project.requires_python {
|
||||||
|
if options.target_version.is_none() {
|
||||||
|
options.target_version =
|
||||||
|
PythonVersion::get_minimum_supported_version(requires_python);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if lint_options != LintCommonOptions::default() {
|
||||||
|
options.lint = Some(LintOptions {
|
||||||
|
common: lint_options,
|
||||||
|
..LintOptions::default()
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create the pyproject.toml.
|
||||||
|
Pyproject::new(options)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Resolve the set of enabled `RuleSelector` values for the given
|
||||||
|
/// plugins.
|
||||||
|
fn resolve_select(plugins: &[Plugin]) -> HashSet<RuleSelector> {
|
||||||
|
let mut select: HashSet<_> = DEFAULT_SELECTORS.iter().cloned().collect();
|
||||||
|
select.extend(plugins.iter().map(|p| Linter::from(p).into()));
|
||||||
|
select
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
|
use itertools::Itertools;
|
||||||
|
use pep440_rs::VersionSpecifiers;
|
||||||
|
|
||||||
|
use pretty_assertions::assert_eq;
|
||||||
|
use ruff_linter::line_width::LineLength;
|
||||||
|
use ruff_linter::registry::Linter;
|
||||||
|
use ruff_linter::rule_selector::RuleSelector;
|
||||||
|
use ruff_linter::rules::flake8_quotes;
|
||||||
|
use ruff_linter::rules::pydocstyle::settings::Convention;
|
||||||
|
use ruff_linter::settings::types::PythonVersion;
|
||||||
|
use ruff_workspace::options::{
|
||||||
|
Flake8QuotesOptions, LintCommonOptions, LintOptions, Options, PydocstyleOptions,
|
||||||
|
};
|
||||||
|
use ruff_workspace::pyproject::Pyproject;
|
||||||
|
|
||||||
|
use crate::converter::DEFAULT_SELECTORS;
|
||||||
|
use crate::pep621::Project;
|
||||||
|
use crate::ExternalConfig;
|
||||||
|
|
||||||
|
use super::super::plugin::Plugin;
|
||||||
|
use super::convert;
|
||||||
|
|
||||||
|
fn lint_default_options(plugins: impl IntoIterator<Item = RuleSelector>) -> LintCommonOptions {
|
||||||
|
LintCommonOptions {
|
||||||
|
ignore: Some(vec![]),
|
||||||
|
select: Some(
|
||||||
|
DEFAULT_SELECTORS
|
||||||
|
.iter()
|
||||||
|
.cloned()
|
||||||
|
.chain(plugins)
|
||||||
|
.sorted_by_key(RuleSelector::prefix_and_code)
|
||||||
|
.collect(),
|
||||||
|
),
|
||||||
|
..LintCommonOptions::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_converts_empty() {
|
||||||
|
let actual = convert(
|
||||||
|
&HashMap::from([("flake8".to_string(), HashMap::default())]),
|
||||||
|
&ExternalConfig::default(),
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
let expected = Pyproject::new(Options {
|
||||||
|
lint: Some(LintOptions {
|
||||||
|
common: lint_default_options([]),
|
||||||
|
..LintOptions::default()
|
||||||
|
}),
|
||||||
|
..Options::default()
|
||||||
|
});
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_converts_dashes() {
|
||||||
|
let actual = convert(
|
||||||
|
&HashMap::from([(
|
||||||
|
"flake8".to_string(),
|
||||||
|
HashMap::from([("max-line-length".to_string(), Some("100".to_string()))]),
|
||||||
|
)]),
|
||||||
|
&ExternalConfig::default(),
|
||||||
|
Some(vec![]),
|
||||||
|
);
|
||||||
|
let expected = Pyproject::new(Options {
|
||||||
|
line_length: Some(LineLength::try_from(100).unwrap()),
|
||||||
|
lint: Some(LintOptions {
|
||||||
|
common: lint_default_options([]),
|
||||||
|
..LintOptions::default()
|
||||||
|
}),
|
||||||
|
..Options::default()
|
||||||
|
});
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_converts_underscores() {
|
||||||
|
let actual = convert(
|
||||||
|
&HashMap::from([(
|
||||||
|
"flake8".to_string(),
|
||||||
|
HashMap::from([("max_line_length".to_string(), Some("100".to_string()))]),
|
||||||
|
)]),
|
||||||
|
&ExternalConfig::default(),
|
||||||
|
Some(vec![]),
|
||||||
|
);
|
||||||
|
let expected = Pyproject::new(Options {
|
||||||
|
line_length: Some(LineLength::try_from(100).unwrap()),
|
||||||
|
lint: Some(LintOptions {
|
||||||
|
common: lint_default_options([]),
|
||||||
|
..LintOptions::default()
|
||||||
|
}),
|
||||||
|
..Options::default()
|
||||||
|
});
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_ignores_parse_errors() {
|
||||||
|
let actual = convert(
|
||||||
|
&HashMap::from([(
|
||||||
|
"flake8".to_string(),
|
||||||
|
HashMap::from([("max_line_length".to_string(), Some("abc".to_string()))]),
|
||||||
|
)]),
|
||||||
|
&ExternalConfig::default(),
|
||||||
|
Some(vec![]),
|
||||||
|
);
|
||||||
|
let expected = Pyproject::new(Options {
|
||||||
|
lint: Some(LintOptions {
|
||||||
|
common: lint_default_options([]),
|
||||||
|
..LintOptions::default()
|
||||||
|
}),
|
||||||
|
..Options::default()
|
||||||
|
});
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_converts_plugin_options() {
|
||||||
|
let actual = convert(
|
||||||
|
&HashMap::from([(
|
||||||
|
"flake8".to_string(),
|
||||||
|
HashMap::from([("inline-quotes".to_string(), Some("single".to_string()))]),
|
||||||
|
)]),
|
||||||
|
&ExternalConfig::default(),
|
||||||
|
Some(vec![]),
|
||||||
|
);
|
||||||
|
let expected = Pyproject::new(Options {
|
||||||
|
lint: Some(LintOptions {
|
||||||
|
common: LintCommonOptions {
|
||||||
|
flake8_quotes: Some(Flake8QuotesOptions {
|
||||||
|
inline_quotes: Some(flake8_quotes::settings::Quote::Single),
|
||||||
|
multiline_quotes: None,
|
||||||
|
docstring_quotes: None,
|
||||||
|
avoid_escape: None,
|
||||||
|
}),
|
||||||
|
..lint_default_options([])
|
||||||
|
},
|
||||||
|
..LintOptions::default()
|
||||||
|
}),
|
||||||
|
..Options::default()
|
||||||
|
});
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_converts_docstring_conventions() {
|
||||||
|
let actual = convert(
|
||||||
|
&HashMap::from([(
|
||||||
|
"flake8".to_string(),
|
||||||
|
HashMap::from([(
|
||||||
|
"docstring-convention".to_string(),
|
||||||
|
Some("numpy".to_string()),
|
||||||
|
)]),
|
||||||
|
)]),
|
||||||
|
&ExternalConfig::default(),
|
||||||
|
Some(vec![Plugin::Flake8Docstrings]),
|
||||||
|
);
|
||||||
|
let expected = Pyproject::new(Options {
|
||||||
|
lint: Some(LintOptions {
|
||||||
|
common: LintCommonOptions {
|
||||||
|
pydocstyle: Some(PydocstyleOptions {
|
||||||
|
convention: Some(Convention::Numpy),
|
||||||
|
ignore_decorators: None,
|
||||||
|
property_decorators: None,
|
||||||
|
}),
|
||||||
|
..lint_default_options([Linter::Pydocstyle.into()])
|
||||||
|
},
|
||||||
|
..LintOptions::default()
|
||||||
|
}),
|
||||||
|
..Options::default()
|
||||||
|
});
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_infers_plugins_if_omitted() {
|
||||||
|
let actual = convert(
|
||||||
|
&HashMap::from([(
|
||||||
|
"flake8".to_string(),
|
||||||
|
HashMap::from([("inline-quotes".to_string(), Some("single".to_string()))]),
|
||||||
|
)]),
|
||||||
|
&ExternalConfig::default(),
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
let expected = Pyproject::new(Options {
|
||||||
|
lint: Some(LintOptions {
|
||||||
|
common: LintCommonOptions {
|
||||||
|
flake8_quotes: Some(Flake8QuotesOptions {
|
||||||
|
inline_quotes: Some(flake8_quotes::settings::Quote::Single),
|
||||||
|
multiline_quotes: None,
|
||||||
|
docstring_quotes: None,
|
||||||
|
avoid_escape: None,
|
||||||
|
}),
|
||||||
|
..lint_default_options([Linter::Flake8Quotes.into()])
|
||||||
|
},
|
||||||
|
..LintOptions::default()
|
||||||
|
}),
|
||||||
|
..Options::default()
|
||||||
|
});
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_converts_project_requires_python() -> Result<()> {
|
||||||
|
let actual = convert(
|
||||||
|
&HashMap::from([("flake8".to_string(), HashMap::default())]),
|
||||||
|
&ExternalConfig {
|
||||||
|
project: Some(&Project {
|
||||||
|
requires_python: Some(VersionSpecifiers::from_str(">=3.8.16, <3.11")?),
|
||||||
|
}),
|
||||||
|
..ExternalConfig::default()
|
||||||
|
},
|
||||||
|
Some(vec![]),
|
||||||
|
);
|
||||||
|
let expected = Pyproject::new(Options {
|
||||||
|
target_version: Some(PythonVersion::Py38),
|
||||||
|
lint: Some(LintOptions {
|
||||||
|
common: lint_default_options([]),
|
||||||
|
..LintOptions::default()
|
||||||
|
}),
|
||||||
|
..Options::default()
|
||||||
|
});
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
10
crates/flake8_to_ruff/src/external_config.rs
Normal file
10
crates/flake8_to_ruff/src/external_config.rs
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
use super::black::Black;
|
||||||
|
use super::isort::Isort;
|
||||||
|
use super::pep621::Project;
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
pub(crate) struct ExternalConfig<'a> {
|
||||||
|
pub(crate) black: Option<&'a Black>,
|
||||||
|
pub(crate) isort: Option<&'a Isort>,
|
||||||
|
pub(crate) project: Option<&'a Project>,
|
||||||
|
}
|
||||||
10
crates/flake8_to_ruff/src/isort.rs
Normal file
10
crates/flake8_to_ruff/src/isort.rs
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
//! Extract isort configuration settings from a pyproject.toml.
|
||||||
|
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
/// The [isort configuration](https://pycqa.github.io/isort/docs/configuration/config_files.html).
|
||||||
|
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
|
||||||
|
pub(crate) struct Isort {
|
||||||
|
#[serde(alias = "src-paths", alias = "src_paths")]
|
||||||
|
pub(crate) src_paths: Option<Vec<String>>,
|
||||||
|
}
|
||||||
80
crates/flake8_to_ruff/src/main.rs
Normal file
80
crates/flake8_to_ruff/src/main.rs
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
//! Utility to generate Ruff's `pyproject.toml` section from a Flake8 INI file.
|
||||||
|
|
||||||
|
mod black;
|
||||||
|
mod converter;
|
||||||
|
mod external_config;
|
||||||
|
mod isort;
|
||||||
|
mod parser;
|
||||||
|
mod pep621;
|
||||||
|
mod plugin;
|
||||||
|
mod pyproject;
|
||||||
|
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
|
use clap::Parser;
|
||||||
|
use configparser::ini::Ini;
|
||||||
|
|
||||||
|
use crate::converter::convert;
|
||||||
|
use crate::external_config::ExternalConfig;
|
||||||
|
use crate::plugin::Plugin;
|
||||||
|
use crate::pyproject::parse;
|
||||||
|
use ruff_linter::logging::{set_up_logging, LogLevel};
|
||||||
|
|
||||||
|
#[derive(Parser)]
|
||||||
|
#[command(
|
||||||
|
about = "Convert existing Flake8 configuration to Ruff.",
|
||||||
|
long_about = None
|
||||||
|
)]
|
||||||
|
struct Args {
|
||||||
|
/// Path to the Flake8 configuration file (e.g., `setup.cfg`, `tox.ini`, or
|
||||||
|
/// `.flake8`).
|
||||||
|
#[arg(required = true)]
|
||||||
|
file: PathBuf,
|
||||||
|
/// Optional path to a `pyproject.toml` file, used to ensure compatibility
|
||||||
|
/// with Black.
|
||||||
|
#[arg(long)]
|
||||||
|
pyproject: Option<PathBuf>,
|
||||||
|
/// List of plugins to enable.
|
||||||
|
#[arg(long, value_delimiter = ',')]
|
||||||
|
plugin: Option<Vec<Plugin>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() -> Result<()> {
|
||||||
|
set_up_logging(&LogLevel::Default)?;
|
||||||
|
|
||||||
|
let args = Args::parse();
|
||||||
|
|
||||||
|
// Read the INI file.
|
||||||
|
let mut ini = Ini::new_cs();
|
||||||
|
ini.set_multiline(true);
|
||||||
|
let config = ini.load(args.file).map_err(|msg| anyhow::anyhow!(msg))?;
|
||||||
|
|
||||||
|
// Read the pyproject.toml file.
|
||||||
|
let pyproject = args.pyproject.map(parse).transpose()?;
|
||||||
|
let external_config = pyproject
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|pyproject| pyproject.tool.as_ref())
|
||||||
|
.map(|tool| ExternalConfig {
|
||||||
|
black: tool.black.as_ref(),
|
||||||
|
isort: tool.isort.as_ref(),
|
||||||
|
..Default::default()
|
||||||
|
})
|
||||||
|
.unwrap_or_default();
|
||||||
|
let external_config = ExternalConfig {
|
||||||
|
project: pyproject
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|pyproject| pyproject.project.as_ref()),
|
||||||
|
..external_config
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create Ruff's pyproject.toml section.
|
||||||
|
let pyproject = convert(&config, &external_config, args.plugin);
|
||||||
|
|
||||||
|
#[allow(clippy::print_stdout)]
|
||||||
|
{
|
||||||
|
println!("{}", toml::to_string_pretty(&pyproject)?);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
391
crates/flake8_to_ruff/src/parser.rs
Normal file
391
crates/flake8_to_ruff/src/parser.rs
Normal file
@@ -0,0 +1,391 @@
|
|||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use anyhow::{bail, Result};
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
|
use regex::Regex;
|
||||||
|
use rustc_hash::FxHashMap;
|
||||||
|
|
||||||
|
use ruff_linter::settings::types::PatternPrefixPair;
|
||||||
|
use ruff_linter::{warn_user, RuleSelector};
|
||||||
|
|
||||||
|
static COMMA_SEPARATED_LIST_RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"[,\s]").unwrap());
|
||||||
|
|
||||||
|
/// Parse a comma-separated list of `RuleSelector` values (e.g.,
|
||||||
|
/// "F401,E501").
|
||||||
|
pub(crate) fn parse_prefix_codes(value: &str) -> Vec<RuleSelector> {
|
||||||
|
let mut codes: Vec<RuleSelector> = vec![];
|
||||||
|
for code in COMMA_SEPARATED_LIST_RE.split(value) {
|
||||||
|
let code = code.trim();
|
||||||
|
if code.is_empty() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if let Ok(code) = RuleSelector::from_str(code) {
|
||||||
|
codes.push(code);
|
||||||
|
} else {
|
||||||
|
warn_user!("Unsupported prefix code: {code}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
codes
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse a comma-separated list of strings (e.g., "__init__.py,__main__.py").
|
||||||
|
pub(crate) fn parse_strings(value: &str) -> Vec<String> {
|
||||||
|
COMMA_SEPARATED_LIST_RE
|
||||||
|
.split(value)
|
||||||
|
.map(str::trim)
|
||||||
|
.filter(|part| !part.is_empty())
|
||||||
|
.map(String::from)
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse a boolean.
|
||||||
|
pub(crate) fn parse_bool(value: &str) -> Result<bool> {
|
||||||
|
match value.trim() {
|
||||||
|
"true" => Ok(true),
|
||||||
|
"false" => Ok(false),
|
||||||
|
_ => bail!("Unexpected boolean value: {value}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct Token {
|
||||||
|
token_name: TokenType,
|
||||||
|
src: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
enum TokenType {
|
||||||
|
Code,
|
||||||
|
File,
|
||||||
|
Colon,
|
||||||
|
Comma,
|
||||||
|
Ws,
|
||||||
|
Eof,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct State {
|
||||||
|
seen_sep: bool,
|
||||||
|
seen_colon: bool,
|
||||||
|
filenames: Vec<String>,
|
||||||
|
codes: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl State {
|
||||||
|
const fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
seen_sep: true,
|
||||||
|
seen_colon: false,
|
||||||
|
filenames: vec![],
|
||||||
|
codes: vec![],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generate the list of `StrRuleCodePair` pairs for the current
|
||||||
|
/// state.
|
||||||
|
fn parse(&self) -> Vec<PatternPrefixPair> {
|
||||||
|
let mut codes: Vec<PatternPrefixPair> = vec![];
|
||||||
|
for code in &self.codes {
|
||||||
|
if let Ok(code) = RuleSelector::from_str(code) {
|
||||||
|
for filename in &self.filenames {
|
||||||
|
codes.push(PatternPrefixPair {
|
||||||
|
pattern: filename.clone(),
|
||||||
|
prefix: code.clone(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
warn_user!("Unsupported prefix code: {code}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
codes
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Tokenize the raw 'files-to-codes' mapping.
|
||||||
|
fn tokenize_files_to_codes_mapping(value: &str) -> Vec<Token> {
|
||||||
|
let mut tokens = vec![];
|
||||||
|
let mut i = 0;
|
||||||
|
while i < value.len() {
|
||||||
|
for (token_re, token_name) in [
|
||||||
|
(
|
||||||
|
Regex::new(r"([A-Z]+[0-9]*)(?:$|\s|,)").unwrap(),
|
||||||
|
TokenType::Code,
|
||||||
|
),
|
||||||
|
(Regex::new(r"([^\s:,]+)").unwrap(), TokenType::File),
|
||||||
|
(Regex::new(r"(\s*:\s*)").unwrap(), TokenType::Colon),
|
||||||
|
(Regex::new(r"(\s*,\s*)").unwrap(), TokenType::Comma),
|
||||||
|
(Regex::new(r"(\s+)").unwrap(), TokenType::Ws),
|
||||||
|
] {
|
||||||
|
if let Some(cap) = token_re.captures(&value[i..]) {
|
||||||
|
let mat = cap.get(1).unwrap();
|
||||||
|
if mat.start() == 0 {
|
||||||
|
tokens.push(Token {
|
||||||
|
token_name,
|
||||||
|
src: mat.as_str().trim().to_string(),
|
||||||
|
});
|
||||||
|
i += mat.end();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tokens.push(Token {
|
||||||
|
token_name: TokenType::Eof,
|
||||||
|
src: String::new(),
|
||||||
|
});
|
||||||
|
tokens
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse a 'files-to-codes' mapping, mimicking Flake8's internal logic.
|
||||||
|
/// See: <https://github.com/PyCQA/flake8/blob/7dfe99616fc2f07c0017df2ba5fa884158f3ea8a/src/flake8/utils.py#L45>
|
||||||
|
pub(crate) fn parse_files_to_codes_mapping(value: &str) -> Result<Vec<PatternPrefixPair>> {
|
||||||
|
if value.trim().is_empty() {
|
||||||
|
return Ok(vec![]);
|
||||||
|
}
|
||||||
|
let mut codes: Vec<PatternPrefixPair> = vec![];
|
||||||
|
let mut state = State::new();
|
||||||
|
for token in tokenize_files_to_codes_mapping(value) {
|
||||||
|
if matches!(token.token_name, TokenType::Comma | TokenType::Ws) {
|
||||||
|
state.seen_sep = true;
|
||||||
|
} else if !state.seen_colon {
|
||||||
|
if matches!(token.token_name, TokenType::Colon) {
|
||||||
|
state.seen_colon = true;
|
||||||
|
state.seen_sep = true;
|
||||||
|
} else if state.seen_sep && matches!(token.token_name, TokenType::File) {
|
||||||
|
state.filenames.push(token.src);
|
||||||
|
state.seen_sep = false;
|
||||||
|
} else {
|
||||||
|
bail!("Unexpected token: {:?}", token.token_name);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if matches!(token.token_name, TokenType::Eof) {
|
||||||
|
codes.extend(state.parse());
|
||||||
|
state = State::new();
|
||||||
|
} else if state.seen_sep && matches!(token.token_name, TokenType::Code) {
|
||||||
|
state.codes.push(token.src);
|
||||||
|
state.seen_sep = false;
|
||||||
|
} else if state.seen_sep && matches!(token.token_name, TokenType::File) {
|
||||||
|
codes.extend(state.parse());
|
||||||
|
state = State::new();
|
||||||
|
state.filenames.push(token.src);
|
||||||
|
state.seen_sep = false;
|
||||||
|
} else {
|
||||||
|
bail!("Unexpected token: {:?}", token.token_name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(codes)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Collect a list of `PatternPrefixPair` structs as a `BTreeMap`.
|
||||||
|
pub(crate) fn collect_per_file_ignores(
|
||||||
|
pairs: Vec<PatternPrefixPair>,
|
||||||
|
) -> FxHashMap<String, Vec<RuleSelector>> {
|
||||||
|
let mut per_file_ignores: FxHashMap<String, Vec<RuleSelector>> = FxHashMap::default();
|
||||||
|
for pair in pairs {
|
||||||
|
per_file_ignores
|
||||||
|
.entry(pair.pattern)
|
||||||
|
.or_default()
|
||||||
|
.push(pair.prefix);
|
||||||
|
}
|
||||||
|
per_file_ignores
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use anyhow::Result;
|
||||||
|
|
||||||
|
use ruff_linter::codes;
|
||||||
|
use ruff_linter::registry::Linter;
|
||||||
|
use ruff_linter::settings::types::PatternPrefixPair;
|
||||||
|
use ruff_linter::RuleSelector;
|
||||||
|
|
||||||
|
use super::{parse_files_to_codes_mapping, parse_prefix_codes, parse_strings};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_parses_prefix_codes() {
|
||||||
|
let actual = parse_prefix_codes("");
|
||||||
|
let expected: Vec<RuleSelector> = vec![];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
let actual = parse_prefix_codes(" ");
|
||||||
|
let expected: Vec<RuleSelector> = vec![];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
let actual = parse_prefix_codes("F401");
|
||||||
|
let expected = vec![codes::Pyflakes::_401.into()];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
let actual = parse_prefix_codes("F401,");
|
||||||
|
let expected = vec![codes::Pyflakes::_401.into()];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
let actual = parse_prefix_codes("F401,E501");
|
||||||
|
let expected = vec![
|
||||||
|
codes::Pyflakes::_401.into(),
|
||||||
|
codes::Pycodestyle::E501.into(),
|
||||||
|
];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
let actual = parse_prefix_codes("F401, E501");
|
||||||
|
let expected = vec![
|
||||||
|
codes::Pyflakes::_401.into(),
|
||||||
|
codes::Pycodestyle::E501.into(),
|
||||||
|
];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_parses_strings() {
|
||||||
|
let actual = parse_strings("");
|
||||||
|
let expected: Vec<String> = vec![];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
let actual = parse_strings(" ");
|
||||||
|
let expected: Vec<String> = vec![];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
let actual = parse_strings("__init__.py");
|
||||||
|
let expected = vec!["__init__.py".to_string()];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
let actual = parse_strings("__init__.py,");
|
||||||
|
let expected = vec!["__init__.py".to_string()];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
let actual = parse_strings("__init__.py,__main__.py");
|
||||||
|
let expected = vec!["__init__.py".to_string(), "__main__.py".to_string()];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
let actual = parse_strings("__init__.py, __main__.py");
|
||||||
|
let expected = vec!["__init__.py".to_string(), "__main__.py".to_string()];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_parse_files_to_codes_mapping() -> Result<()> {
|
||||||
|
let actual = parse_files_to_codes_mapping("")?;
|
||||||
|
let expected: Vec<PatternPrefixPair> = vec![];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
let actual = parse_files_to_codes_mapping(" ")?;
|
||||||
|
let expected: Vec<PatternPrefixPair> = vec![];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
// Ex) locust
|
||||||
|
let actual = parse_files_to_codes_mapping(
|
||||||
|
"per-file-ignores =
|
||||||
|
locust/test/*: F841
|
||||||
|
examples/*: F841
|
||||||
|
*.pyi: E302,E704"
|
||||||
|
.strip_prefix("per-file-ignores =")
|
||||||
|
.unwrap(),
|
||||||
|
)?;
|
||||||
|
let expected: Vec<PatternPrefixPair> = vec![
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "locust/test/*".to_string(),
|
||||||
|
prefix: codes::Pyflakes::_841.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "examples/*".to_string(),
|
||||||
|
prefix: codes::Pyflakes::_841.into(),
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
// Ex) celery
|
||||||
|
let actual = parse_files_to_codes_mapping(
|
||||||
|
"per-file-ignores =
|
||||||
|
t/*,setup.py,examples/*,docs/*,extra/*:
|
||||||
|
D,"
|
||||||
|
.strip_prefix("per-file-ignores =")
|
||||||
|
.unwrap(),
|
||||||
|
)?;
|
||||||
|
let expected: Vec<PatternPrefixPair> = vec![
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "t/*".to_string(),
|
||||||
|
prefix: Linter::Pydocstyle.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "setup.py".to_string(),
|
||||||
|
prefix: Linter::Pydocstyle.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "examples/*".to_string(),
|
||||||
|
prefix: Linter::Pydocstyle.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "docs/*".to_string(),
|
||||||
|
prefix: Linter::Pydocstyle.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "extra/*".to_string(),
|
||||||
|
prefix: Linter::Pydocstyle.into(),
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
// Ex) scrapy
|
||||||
|
let actual = parse_files_to_codes_mapping(
|
||||||
|
"per-file-ignores =
|
||||||
|
scrapy/__init__.py:E402
|
||||||
|
scrapy/core/downloader/handlers/http.py:F401
|
||||||
|
scrapy/http/__init__.py:F401
|
||||||
|
scrapy/linkextractors/__init__.py:E402,F401
|
||||||
|
scrapy/selector/__init__.py:F401
|
||||||
|
scrapy/spiders/__init__.py:E402,F401
|
||||||
|
scrapy/utils/url.py:F403,F405
|
||||||
|
tests/test_loader.py:E741"
|
||||||
|
.strip_prefix("per-file-ignores =")
|
||||||
|
.unwrap(),
|
||||||
|
)?;
|
||||||
|
let expected: Vec<PatternPrefixPair> = vec![
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "scrapy/__init__.py".to_string(),
|
||||||
|
prefix: codes::Pycodestyle::E402.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "scrapy/core/downloader/handlers/http.py".to_string(),
|
||||||
|
prefix: codes::Pyflakes::_401.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "scrapy/http/__init__.py".to_string(),
|
||||||
|
prefix: codes::Pyflakes::_401.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "scrapy/linkextractors/__init__.py".to_string(),
|
||||||
|
prefix: codes::Pycodestyle::E402.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "scrapy/linkextractors/__init__.py".to_string(),
|
||||||
|
prefix: codes::Pyflakes::_401.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "scrapy/selector/__init__.py".to_string(),
|
||||||
|
prefix: codes::Pyflakes::_401.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "scrapy/spiders/__init__.py".to_string(),
|
||||||
|
prefix: codes::Pycodestyle::E402.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "scrapy/spiders/__init__.py".to_string(),
|
||||||
|
prefix: codes::Pyflakes::_401.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "scrapy/utils/url.py".to_string(),
|
||||||
|
prefix: codes::Pyflakes::_403.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "scrapy/utils/url.py".to_string(),
|
||||||
|
prefix: codes::Pyflakes::_405.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "tests/test_loader.py".to_string(),
|
||||||
|
prefix: codes::Pycodestyle::E741.into(),
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
10
crates/flake8_to_ruff/src/pep621.rs
Normal file
10
crates/flake8_to_ruff/src/pep621.rs
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
//! Extract PEP 621 configuration settings from a pyproject.toml.
|
||||||
|
|
||||||
|
use pep440_rs::VersionSpecifiers;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
|
||||||
|
pub(crate) struct Project {
|
||||||
|
#[serde(alias = "requires-python", alias = "requires_python")]
|
||||||
|
pub(crate) requires_python: Option<VersionSpecifiers>,
|
||||||
|
}
|
||||||
368
crates/flake8_to_ruff/src/plugin.rs
Normal file
368
crates/flake8_to_ruff/src/plugin.rs
Normal file
@@ -0,0 +1,368 @@
|
|||||||
|
use std::collections::{BTreeSet, HashMap, HashSet};
|
||||||
|
use std::fmt;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use anyhow::anyhow;
|
||||||
|
use ruff_linter::registry::Linter;
|
||||||
|
use ruff_linter::rule_selector::PreviewOptions;
|
||||||
|
use ruff_linter::RuleSelector;
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq)]
|
||||||
|
pub enum Plugin {
|
||||||
|
Flake82020,
|
||||||
|
Flake8Annotations,
|
||||||
|
Flake8Bandit,
|
||||||
|
Flake8BlindExcept,
|
||||||
|
Flake8BooleanTrap,
|
||||||
|
Flake8Bugbear,
|
||||||
|
Flake8Builtins,
|
||||||
|
Flake8Commas,
|
||||||
|
Flake8Comprehensions,
|
||||||
|
Flake8Datetimez,
|
||||||
|
Flake8Debugger,
|
||||||
|
Flake8Docstrings,
|
||||||
|
Flake8Eradicate,
|
||||||
|
Flake8ErrMsg,
|
||||||
|
Flake8Executable,
|
||||||
|
Flake8ImplicitStrConcat,
|
||||||
|
Flake8ImportConventions,
|
||||||
|
Flake8NoPep420,
|
||||||
|
Flake8Pie,
|
||||||
|
Flake8Print,
|
||||||
|
Flake8PytestStyle,
|
||||||
|
Flake8Quotes,
|
||||||
|
Flake8Return,
|
||||||
|
Flake8Simplify,
|
||||||
|
Flake8TidyImports,
|
||||||
|
Flake8TypeChecking,
|
||||||
|
Flake8UnusedArguments,
|
||||||
|
Flake8UsePathlib,
|
||||||
|
McCabe,
|
||||||
|
PEP8Naming,
|
||||||
|
PandasVet,
|
||||||
|
Pyupgrade,
|
||||||
|
Tryceratops,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for Plugin {
|
||||||
|
type Err = anyhow::Error;
|
||||||
|
|
||||||
|
fn from_str(string: &str) -> Result<Self, Self::Err> {
|
||||||
|
match string {
|
||||||
|
"flake8-2020" => Ok(Plugin::Flake82020),
|
||||||
|
"flake8-annotations" => Ok(Plugin::Flake8Annotations),
|
||||||
|
"flake8-bandit" => Ok(Plugin::Flake8Bandit),
|
||||||
|
"flake8-blind-except" => Ok(Plugin::Flake8BlindExcept),
|
||||||
|
"flake8-boolean-trap" => Ok(Plugin::Flake8BooleanTrap),
|
||||||
|
"flake8-bugbear" => Ok(Plugin::Flake8Bugbear),
|
||||||
|
"flake8-builtins" => Ok(Plugin::Flake8Builtins),
|
||||||
|
"flake8-commas" => Ok(Plugin::Flake8Commas),
|
||||||
|
"flake8-comprehensions" => Ok(Plugin::Flake8Comprehensions),
|
||||||
|
"flake8-datetimez" => Ok(Plugin::Flake8Datetimez),
|
||||||
|
"flake8-debugger" => Ok(Plugin::Flake8Debugger),
|
||||||
|
"flake8-docstrings" => Ok(Plugin::Flake8Docstrings),
|
||||||
|
"flake8-eradicate" => Ok(Plugin::Flake8Eradicate),
|
||||||
|
"flake8-errmsg" => Ok(Plugin::Flake8ErrMsg),
|
||||||
|
"flake8-executable" => Ok(Plugin::Flake8Executable),
|
||||||
|
"flake8-implicit-str-concat" => Ok(Plugin::Flake8ImplicitStrConcat),
|
||||||
|
"flake8-import-conventions" => Ok(Plugin::Flake8ImportConventions),
|
||||||
|
"flake8-no-pep420" => Ok(Plugin::Flake8NoPep420),
|
||||||
|
"flake8-pie" => Ok(Plugin::Flake8Pie),
|
||||||
|
"flake8-print" => Ok(Plugin::Flake8Print),
|
||||||
|
"flake8-pytest-style" => Ok(Plugin::Flake8PytestStyle),
|
||||||
|
"flake8-quotes" => Ok(Plugin::Flake8Quotes),
|
||||||
|
"flake8-return" => Ok(Plugin::Flake8Return),
|
||||||
|
"flake8-simplify" => Ok(Plugin::Flake8Simplify),
|
||||||
|
"flake8-tidy-imports" => Ok(Plugin::Flake8TidyImports),
|
||||||
|
"flake8-type-checking" => Ok(Plugin::Flake8TypeChecking),
|
||||||
|
"flake8-unused-arguments" => Ok(Plugin::Flake8UnusedArguments),
|
||||||
|
"flake8-use-pathlib" => Ok(Plugin::Flake8UsePathlib),
|
||||||
|
"mccabe" => Ok(Plugin::McCabe),
|
||||||
|
"pep8-naming" => Ok(Plugin::PEP8Naming),
|
||||||
|
"pandas-vet" => Ok(Plugin::PandasVet),
|
||||||
|
"pyupgrade" => Ok(Plugin::Pyupgrade),
|
||||||
|
"tryceratops" => Ok(Plugin::Tryceratops),
|
||||||
|
_ => Err(anyhow!("Unknown plugin: {string}")),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Debug for Plugin {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"{}",
|
||||||
|
match self {
|
||||||
|
Plugin::Flake82020 => "flake8-2020",
|
||||||
|
Plugin::Flake8Annotations => "flake8-annotations",
|
||||||
|
Plugin::Flake8Bandit => "flake8-bandit",
|
||||||
|
Plugin::Flake8BlindExcept => "flake8-blind-except",
|
||||||
|
Plugin::Flake8BooleanTrap => "flake8-boolean-trap",
|
||||||
|
Plugin::Flake8Bugbear => "flake8-bugbear",
|
||||||
|
Plugin::Flake8Builtins => "flake8-builtins",
|
||||||
|
Plugin::Flake8Commas => "flake8-commas",
|
||||||
|
Plugin::Flake8Comprehensions => "flake8-comprehensions",
|
||||||
|
Plugin::Flake8Datetimez => "flake8-datetimez",
|
||||||
|
Plugin::Flake8Debugger => "flake8-debugger",
|
||||||
|
Plugin::Flake8Docstrings => "flake8-docstrings",
|
||||||
|
Plugin::Flake8Eradicate => "flake8-eradicate",
|
||||||
|
Plugin::Flake8ErrMsg => "flake8-errmsg",
|
||||||
|
Plugin::Flake8Executable => "flake8-executable",
|
||||||
|
Plugin::Flake8ImplicitStrConcat => "flake8-implicit-str-concat",
|
||||||
|
Plugin::Flake8ImportConventions => "flake8-import-conventions",
|
||||||
|
Plugin::Flake8NoPep420 => "flake8-no-pep420",
|
||||||
|
Plugin::Flake8Pie => "flake8-pie",
|
||||||
|
Plugin::Flake8Print => "flake8-print",
|
||||||
|
Plugin::Flake8PytestStyle => "flake8-pytest-style",
|
||||||
|
Plugin::Flake8Quotes => "flake8-quotes",
|
||||||
|
Plugin::Flake8Return => "flake8-return",
|
||||||
|
Plugin::Flake8Simplify => "flake8-simplify",
|
||||||
|
Plugin::Flake8TidyImports => "flake8-tidy-imports",
|
||||||
|
Plugin::Flake8TypeChecking => "flake8-type-checking",
|
||||||
|
Plugin::Flake8UnusedArguments => "flake8-unused-arguments",
|
||||||
|
Plugin::Flake8UsePathlib => "flake8-use-pathlib",
|
||||||
|
Plugin::McCabe => "mccabe",
|
||||||
|
Plugin::PEP8Naming => "pep8-naming",
|
||||||
|
Plugin::PandasVet => "pandas-vet",
|
||||||
|
Plugin::Pyupgrade => "pyupgrade",
|
||||||
|
Plugin::Tryceratops => "tryceratops",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&Plugin> for Linter {
|
||||||
|
fn from(plugin: &Plugin) -> Self {
|
||||||
|
match plugin {
|
||||||
|
Plugin::Flake82020 => Linter::Flake82020,
|
||||||
|
Plugin::Flake8Annotations => Linter::Flake8Annotations,
|
||||||
|
Plugin::Flake8Bandit => Linter::Flake8Bandit,
|
||||||
|
Plugin::Flake8BlindExcept => Linter::Flake8BlindExcept,
|
||||||
|
Plugin::Flake8BooleanTrap => Linter::Flake8BooleanTrap,
|
||||||
|
Plugin::Flake8Bugbear => Linter::Flake8Bugbear,
|
||||||
|
Plugin::Flake8Builtins => Linter::Flake8Builtins,
|
||||||
|
Plugin::Flake8Commas => Linter::Flake8Commas,
|
||||||
|
Plugin::Flake8Comprehensions => Linter::Flake8Comprehensions,
|
||||||
|
Plugin::Flake8Datetimez => Linter::Flake8Datetimez,
|
||||||
|
Plugin::Flake8Debugger => Linter::Flake8Debugger,
|
||||||
|
Plugin::Flake8Docstrings => Linter::Pydocstyle,
|
||||||
|
Plugin::Flake8Eradicate => Linter::Eradicate,
|
||||||
|
Plugin::Flake8ErrMsg => Linter::Flake8ErrMsg,
|
||||||
|
Plugin::Flake8Executable => Linter::Flake8Executable,
|
||||||
|
Plugin::Flake8ImplicitStrConcat => Linter::Flake8ImplicitStrConcat,
|
||||||
|
Plugin::Flake8ImportConventions => Linter::Flake8ImportConventions,
|
||||||
|
Plugin::Flake8NoPep420 => Linter::Flake8NoPep420,
|
||||||
|
Plugin::Flake8Pie => Linter::Flake8Pie,
|
||||||
|
Plugin::Flake8Print => Linter::Flake8Print,
|
||||||
|
Plugin::Flake8PytestStyle => Linter::Flake8PytestStyle,
|
||||||
|
Plugin::Flake8Quotes => Linter::Flake8Quotes,
|
||||||
|
Plugin::Flake8Return => Linter::Flake8Return,
|
||||||
|
Plugin::Flake8Simplify => Linter::Flake8Simplify,
|
||||||
|
Plugin::Flake8TidyImports => Linter::Flake8TidyImports,
|
||||||
|
Plugin::Flake8TypeChecking => Linter::Flake8TypeChecking,
|
||||||
|
Plugin::Flake8UnusedArguments => Linter::Flake8UnusedArguments,
|
||||||
|
Plugin::Flake8UsePathlib => Linter::Flake8UsePathlib,
|
||||||
|
Plugin::McCabe => Linter::McCabe,
|
||||||
|
Plugin::PEP8Naming => Linter::PEP8Naming,
|
||||||
|
Plugin::PandasVet => Linter::PandasVet,
|
||||||
|
Plugin::Pyupgrade => Linter::Pyupgrade,
|
||||||
|
Plugin::Tryceratops => Linter::Tryceratops,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Infer the enabled plugins based on user-provided options.
|
||||||
|
///
|
||||||
|
/// For example, if the user specified a `mypy-init-return` setting, we should
|
||||||
|
/// infer that `flake8-annotations` is active.
|
||||||
|
pub(crate) fn infer_plugins_from_options(flake8: &HashMap<String, Option<String>>) -> Vec<Plugin> {
|
||||||
|
let mut plugins = BTreeSet::new();
|
||||||
|
for key in flake8.keys() {
|
||||||
|
match key.as_str() {
|
||||||
|
// flake8-annotations
|
||||||
|
"suppress-none-returning" | "suppress_none_returning" => {
|
||||||
|
plugins.insert(Plugin::Flake8Annotations);
|
||||||
|
}
|
||||||
|
"suppress-dummy-args" | "suppress_dummy_args" => {
|
||||||
|
plugins.insert(Plugin::Flake8Annotations);
|
||||||
|
}
|
||||||
|
"allow-untyped-defs" | "allow_untyped_defs" => {
|
||||||
|
plugins.insert(Plugin::Flake8Annotations);
|
||||||
|
}
|
||||||
|
"allow-untyped-nested" | "allow_untyped_nested" => {
|
||||||
|
plugins.insert(Plugin::Flake8Annotations);
|
||||||
|
}
|
||||||
|
"mypy-init-return" | "mypy_init_return" => {
|
||||||
|
plugins.insert(Plugin::Flake8Annotations);
|
||||||
|
}
|
||||||
|
"dispatch-decorators" | "dispatch_decorators" => {
|
||||||
|
plugins.insert(Plugin::Flake8Annotations);
|
||||||
|
}
|
||||||
|
"overload-decorators" | "overload_decorators" => {
|
||||||
|
plugins.insert(Plugin::Flake8Annotations);
|
||||||
|
}
|
||||||
|
"allow-star-arg-any" | "allow_star_arg_any" => {
|
||||||
|
plugins.insert(Plugin::Flake8Annotations);
|
||||||
|
}
|
||||||
|
// flake8-bugbear
|
||||||
|
"extend-immutable-calls" | "extend_immutable_calls" => {
|
||||||
|
plugins.insert(Plugin::Flake8Bugbear);
|
||||||
|
}
|
||||||
|
// flake8-builtins
|
||||||
|
"builtins-ignorelist" | "builtins_ignorelist" => {
|
||||||
|
plugins.insert(Plugin::Flake8Builtins);
|
||||||
|
}
|
||||||
|
// flake8-docstrings
|
||||||
|
"docstring-convention" | "docstring_convention" => {
|
||||||
|
plugins.insert(Plugin::Flake8Docstrings);
|
||||||
|
}
|
||||||
|
// flake8-eradicate
|
||||||
|
"eradicate-aggressive" | "eradicate_aggressive" => {
|
||||||
|
plugins.insert(Plugin::Flake8Eradicate);
|
||||||
|
}
|
||||||
|
"eradicate-whitelist" | "eradicate_whitelist" => {
|
||||||
|
plugins.insert(Plugin::Flake8Eradicate);
|
||||||
|
}
|
||||||
|
"eradicate-whitelist-extend" | "eradicate_whitelist_extend" => {
|
||||||
|
plugins.insert(Plugin::Flake8Eradicate);
|
||||||
|
}
|
||||||
|
// flake8-pytest-style
|
||||||
|
"pytest-fixture-no-parentheses" | "pytest_fixture_no_parentheses " => {
|
||||||
|
plugins.insert(Plugin::Flake8PytestStyle);
|
||||||
|
}
|
||||||
|
"pytest-parametrize-names-type" | "pytest_parametrize_names_type" => {
|
||||||
|
plugins.insert(Plugin::Flake8PytestStyle);
|
||||||
|
}
|
||||||
|
"pytest-parametrize-values-type" | "pytest_parametrize_values_type" => {
|
||||||
|
plugins.insert(Plugin::Flake8PytestStyle);
|
||||||
|
}
|
||||||
|
"pytest-parametrize-values-row-type" | "pytest_parametrize_values_row_type" => {
|
||||||
|
plugins.insert(Plugin::Flake8PytestStyle);
|
||||||
|
}
|
||||||
|
"pytest-raises-require-match-for" | "pytest_raises_require_match_for" => {
|
||||||
|
plugins.insert(Plugin::Flake8PytestStyle);
|
||||||
|
}
|
||||||
|
"pytest-mark-no-parentheses" | "pytest_mark_no_parentheses" => {
|
||||||
|
plugins.insert(Plugin::Flake8PytestStyle);
|
||||||
|
}
|
||||||
|
// flake8-quotes
|
||||||
|
"quotes" | "inline-quotes" | "inline_quotes" => {
|
||||||
|
plugins.insert(Plugin::Flake8Quotes);
|
||||||
|
}
|
||||||
|
"multiline-quotes" | "multiline_quotes" => {
|
||||||
|
plugins.insert(Plugin::Flake8Quotes);
|
||||||
|
}
|
||||||
|
"docstring-quotes" | "docstring_quotes" => {
|
||||||
|
plugins.insert(Plugin::Flake8Quotes);
|
||||||
|
}
|
||||||
|
"avoid-escape" | "avoid_escape" => {
|
||||||
|
plugins.insert(Plugin::Flake8Quotes);
|
||||||
|
}
|
||||||
|
// flake8-tidy-imports
|
||||||
|
"ban-relative-imports" | "ban_relative_imports" => {
|
||||||
|
plugins.insert(Plugin::Flake8TidyImports);
|
||||||
|
}
|
||||||
|
"banned-modules" | "banned_modules" => {
|
||||||
|
plugins.insert(Plugin::Flake8TidyImports);
|
||||||
|
}
|
||||||
|
// mccabe
|
||||||
|
"max-complexity" | "max_complexity" => {
|
||||||
|
plugins.insert(Plugin::McCabe);
|
||||||
|
}
|
||||||
|
// pep8-naming
|
||||||
|
"ignore-names" | "ignore_names" => {
|
||||||
|
plugins.insert(Plugin::PEP8Naming);
|
||||||
|
}
|
||||||
|
"classmethod-decorators" | "classmethod_decorators" => {
|
||||||
|
plugins.insert(Plugin::PEP8Naming);
|
||||||
|
}
|
||||||
|
"staticmethod-decorators" | "staticmethod_decorators" => {
|
||||||
|
plugins.insert(Plugin::PEP8Naming);
|
||||||
|
}
|
||||||
|
"max-string-length" | "max_string_length" => {
|
||||||
|
plugins.insert(Plugin::Flake8ErrMsg);
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Vec::from_iter(plugins)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Infer the enabled plugins based on the referenced prefixes.
|
||||||
|
///
|
||||||
|
/// For example, if the user ignores `ANN101`, we should infer that
|
||||||
|
/// `flake8-annotations` is active.
|
||||||
|
pub(crate) fn infer_plugins_from_codes(selectors: &HashSet<RuleSelector>) -> Vec<Plugin> {
|
||||||
|
// Ignore cases in which we've knowingly changed rule prefixes.
|
||||||
|
[
|
||||||
|
Plugin::Flake82020,
|
||||||
|
Plugin::Flake8Annotations,
|
||||||
|
Plugin::Flake8Bandit,
|
||||||
|
// Plugin::Flake8BlindExcept,
|
||||||
|
Plugin::Flake8BooleanTrap,
|
||||||
|
Plugin::Flake8Bugbear,
|
||||||
|
Plugin::Flake8Builtins,
|
||||||
|
// Plugin::Flake8Commas,
|
||||||
|
Plugin::Flake8Comprehensions,
|
||||||
|
Plugin::Flake8Datetimez,
|
||||||
|
Plugin::Flake8Debugger,
|
||||||
|
Plugin::Flake8Docstrings,
|
||||||
|
// Plugin::Flake8Eradicate,
|
||||||
|
Plugin::Flake8ErrMsg,
|
||||||
|
Plugin::Flake8Executable,
|
||||||
|
Plugin::Flake8ImplicitStrConcat,
|
||||||
|
// Plugin::Flake8ImportConventions,
|
||||||
|
Plugin::Flake8NoPep420,
|
||||||
|
Plugin::Flake8Pie,
|
||||||
|
Plugin::Flake8Print,
|
||||||
|
Plugin::Flake8PytestStyle,
|
||||||
|
Plugin::Flake8Quotes,
|
||||||
|
Plugin::Flake8Return,
|
||||||
|
Plugin::Flake8Simplify,
|
||||||
|
// Plugin::Flake8TidyImports,
|
||||||
|
// Plugin::Flake8TypeChecking,
|
||||||
|
Plugin::Flake8UnusedArguments,
|
||||||
|
// Plugin::Flake8UsePathlib,
|
||||||
|
Plugin::McCabe,
|
||||||
|
Plugin::PEP8Naming,
|
||||||
|
Plugin::PandasVet,
|
||||||
|
Plugin::Tryceratops,
|
||||||
|
]
|
||||||
|
.into_iter()
|
||||||
|
.filter(|plugin| {
|
||||||
|
for selector in selectors {
|
||||||
|
if selector
|
||||||
|
.rules(&PreviewOptions::default())
|
||||||
|
.any(|rule| Linter::from(plugin).rules().any(|r| r == rule))
|
||||||
|
{
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
false
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use super::{infer_plugins_from_options, Plugin};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_infers_plugins() {
|
||||||
|
let actual = infer_plugins_from_options(&HashMap::from([(
|
||||||
|
"inline-quotes".to_string(),
|
||||||
|
Some("single".to_string()),
|
||||||
|
)]));
|
||||||
|
let expected = vec![Plugin::Flake8Quotes];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
let actual = infer_plugins_from_options(&HashMap::from([(
|
||||||
|
"staticmethod-decorators".to_string(),
|
||||||
|
Some("[]".to_string()),
|
||||||
|
)]));
|
||||||
|
let expected = vec![Plugin::PEP8Naming];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
}
|
||||||
|
}
|
||||||
26
crates/flake8_to_ruff/src/pyproject.rs
Normal file
26
crates/flake8_to_ruff/src/pyproject.rs
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
use super::black::Black;
|
||||||
|
use super::isort::Isort;
|
||||||
|
use super::pep621::Project;
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
pub(crate) struct Tools {
|
||||||
|
pub(crate) black: Option<Black>,
|
||||||
|
pub(crate) isort: Option<Isort>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
pub(crate) struct Pyproject {
|
||||||
|
pub(crate) tool: Option<Tools>,
|
||||||
|
pub(crate) project: Option<Project>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn parse<P: AsRef<Path>>(path: P) -> Result<Pyproject> {
|
||||||
|
let contents = std::fs::read_to_string(path)?;
|
||||||
|
let pyproject = toml::from_str::<Pyproject>(&contents)?;
|
||||||
|
Ok(pyproject)
|
||||||
|
}
|
||||||
@@ -31,29 +31,26 @@ name = "formatter"
|
|||||||
harness = false
|
harness = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
once_cell = { workspace = true }
|
once_cell.workspace = true
|
||||||
serde = { workspace = true }
|
serde.workspace = true
|
||||||
serde_json = { workspace = true }
|
serde_json.workspace = true
|
||||||
url = { workspace = true }
|
url = "2.3.1"
|
||||||
ureq = { workspace = true }
|
ureq = "2.8.0"
|
||||||
criterion = { workspace = true, default-features = false }
|
criterion = { version = "0.5.1", default-features = false }
|
||||||
codspeed-criterion-compat = { workspace = true, default-features = false, optional = true}
|
codspeed-criterion-compat = { version="2.3.0", default-features = false, optional = true}
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
ruff_linter = { path = "../ruff_linter" }
|
ruff_linter.path = "../ruff_linter"
|
||||||
ruff_python_ast = { path = "../ruff_python_ast" }
|
ruff_python_ast.path = "../ruff_python_ast"
|
||||||
ruff_python_formatter = { path = "../ruff_python_formatter" }
|
ruff_python_formatter = { path = "../ruff_python_formatter" }
|
||||||
ruff_python_index = { path = "../ruff_python_index" }
|
ruff_python_index = { path = "../ruff_python_index" }
|
||||||
ruff_python_parser = { path = "../ruff_python_parser" }
|
ruff_python_parser = { path = "../ruff_python_parser" }
|
||||||
|
|
||||||
[lints]
|
|
||||||
workspace = true
|
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
codspeed = ["codspeed-criterion-compat"]
|
codspeed = ["codspeed-criterion-compat"]
|
||||||
|
|
||||||
[target.'cfg(target_os = "windows")'.dev-dependencies]
|
[target.'cfg(target_os = "windows")'.dev-dependencies]
|
||||||
mimalloc = { workspace = true }
|
mimalloc = "0.1.39"
|
||||||
|
|
||||||
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64")))'.dev-dependencies]
|
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64")))'.dev-dependencies]
|
||||||
tikv-jemallocator = { workspace = true }
|
tikv-jemallocator = "0.5.0"
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ use ruff_benchmark::criterion::{
|
|||||||
criterion_group, criterion_main, BenchmarkId, Criterion, Throughput,
|
criterion_group, criterion_main, BenchmarkId, Criterion, Throughput,
|
||||||
};
|
};
|
||||||
use ruff_benchmark::{TestCase, TestFile, TestFileDownloadError};
|
use ruff_benchmark::{TestCase, TestFile, TestFileDownloadError};
|
||||||
use ruff_python_formatter::{format_module_ast, PreviewMode, PyFormatOptions};
|
use ruff_python_formatter::{format_module_ast, PyFormatOptions};
|
||||||
use ruff_python_index::CommentRangesBuilder;
|
use ruff_python_index::CommentRangesBuilder;
|
||||||
use ruff_python_parser::lexer::lex;
|
use ruff_python_parser::lexer::lex;
|
||||||
use ruff_python_parser::{parse_tokens, Mode};
|
use ruff_python_parser::{parse_tokens, Mode};
|
||||||
@@ -65,12 +65,11 @@ fn benchmark_formatter(criterion: &mut Criterion) {
|
|||||||
let comment_ranges = comment_ranges.finish();
|
let comment_ranges = comment_ranges.finish();
|
||||||
|
|
||||||
// Parse the AST.
|
// Parse the AST.
|
||||||
let module = parse_tokens(tokens, case.code(), Mode::Module)
|
let module = parse_tokens(tokens, case.code(), Mode::Module, "<filename>")
|
||||||
.expect("Input to be a valid python program");
|
.expect("Input to be a valid python program");
|
||||||
|
|
||||||
b.iter(|| {
|
b.iter(|| {
|
||||||
let options = PyFormatOptions::from_extension(Path::new(case.name()))
|
let options = PyFormatOptions::from_extension(Path::new(case.name()));
|
||||||
.with_preview(PreviewMode::Enabled);
|
|
||||||
let formatted =
|
let formatted =
|
||||||
format_module_ast(&module, &comment_ranges, case.code(), options)
|
format_module_ast(&module, &comment_ranges, case.code(), options)
|
||||||
.expect("Formatting to succeed");
|
.expect("Formatting to succeed");
|
||||||
|
|||||||
@@ -2,15 +2,12 @@ use ruff_benchmark::criterion::{
|
|||||||
criterion_group, criterion_main, BenchmarkGroup, BenchmarkId, Criterion, Throughput,
|
criterion_group, criterion_main, BenchmarkGroup, BenchmarkId, Criterion, Throughput,
|
||||||
};
|
};
|
||||||
use ruff_benchmark::{TestCase, TestFile, TestFileDownloadError};
|
use ruff_benchmark::{TestCase, TestFile, TestFileDownloadError};
|
||||||
use ruff_linter::linter::{lint_only, ParseSource};
|
use ruff_linter::linter::lint_only;
|
||||||
use ruff_linter::rule_selector::PreviewOptions;
|
|
||||||
use ruff_linter::settings::rule_table::RuleTable;
|
use ruff_linter::settings::rule_table::RuleTable;
|
||||||
use ruff_linter::settings::types::PreviewMode;
|
|
||||||
use ruff_linter::settings::{flags, LinterSettings};
|
use ruff_linter::settings::{flags, LinterSettings};
|
||||||
use ruff_linter::source_kind::SourceKind;
|
use ruff_linter::source_kind::SourceKind;
|
||||||
use ruff_linter::{registry::Rule, RuleSelector};
|
use ruff_linter::{registry::Rule, RuleSelector};
|
||||||
use ruff_python_ast::PySourceType;
|
use ruff_python_ast::PySourceType;
|
||||||
use ruff_python_parser::{lexer, parse_program_tokens, Mode};
|
|
||||||
|
|
||||||
#[cfg(target_os = "windows")]
|
#[cfg(target_os = "windows")]
|
||||||
#[global_allocator]
|
#[global_allocator]
|
||||||
@@ -54,12 +51,7 @@ fn benchmark_linter(mut group: BenchmarkGroup, settings: &LinterSettings) {
|
|||||||
BenchmarkId::from_parameter(case.name()),
|
BenchmarkId::from_parameter(case.name()),
|
||||||
&case,
|
&case,
|
||||||
|b, case| {
|
|b, case| {
|
||||||
// Tokenize the source.
|
let kind = SourceKind::Python(case.code().to_string());
|
||||||
let tokens: Vec<_> = lexer::lex(case.code(), Mode::Module).collect();
|
|
||||||
|
|
||||||
// Parse the source.
|
|
||||||
let ast = parse_program_tokens(tokens.clone(), case.code(), false).unwrap();
|
|
||||||
|
|
||||||
b.iter(|| {
|
b.iter(|| {
|
||||||
let path = case.path();
|
let path = case.path();
|
||||||
let result = lint_only(
|
let result = lint_only(
|
||||||
@@ -67,12 +59,8 @@ fn benchmark_linter(mut group: BenchmarkGroup, settings: &LinterSettings) {
|
|||||||
None,
|
None,
|
||||||
settings,
|
settings,
|
||||||
flags::Noqa::Enabled,
|
flags::Noqa::Enabled,
|
||||||
&SourceKind::Python(case.code().to_string()),
|
&kind,
|
||||||
PySourceType::from(path.as_path()),
|
PySourceType::from(path.as_path()),
|
||||||
ParseSource::Precomputed {
|
|
||||||
tokens: &tokens,
|
|
||||||
ast: &ast,
|
|
||||||
},
|
|
||||||
);
|
);
|
||||||
|
|
||||||
// Assert that file contains no parse errors
|
// Assert that file contains no parse errors
|
||||||
@@ -90,21 +78,12 @@ fn benchmark_default_rules(criterion: &mut Criterion) {
|
|||||||
benchmark_linter(group, &LinterSettings::default());
|
benchmark_linter(group, &LinterSettings::default());
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Disables IO based rules because they are a source of flakiness
|
fn benchmark_all_rules(criterion: &mut Criterion) {
|
||||||
fn disable_io_rules(rules: &mut RuleTable) {
|
let mut rules: RuleTable = RuleSelector::All.all_rules().collect();
|
||||||
|
|
||||||
|
// Disable IO based rules because it is a source of flakiness
|
||||||
rules.disable(Rule::ShebangMissingExecutableFile);
|
rules.disable(Rule::ShebangMissingExecutableFile);
|
||||||
rules.disable(Rule::ShebangNotExecutable);
|
rules.disable(Rule::ShebangNotExecutable);
|
||||||
}
|
|
||||||
|
|
||||||
fn benchmark_all_rules(criterion: &mut Criterion) {
|
|
||||||
let mut rules: RuleTable = RuleSelector::All
|
|
||||||
.rules(&PreviewOptions {
|
|
||||||
mode: PreviewMode::Disabled,
|
|
||||||
require_explicit: false,
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
disable_io_rules(&mut rules);
|
|
||||||
|
|
||||||
let settings = LinterSettings {
|
let settings = LinterSettings {
|
||||||
rules,
|
rules,
|
||||||
@@ -115,22 +94,6 @@ fn benchmark_all_rules(criterion: &mut Criterion) {
|
|||||||
benchmark_linter(group, &settings);
|
benchmark_linter(group, &settings);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn benchmark_preview_rules(criterion: &mut Criterion) {
|
|
||||||
let mut rules: RuleTable = RuleSelector::All.all_rules().collect();
|
|
||||||
|
|
||||||
disable_io_rules(&mut rules);
|
|
||||||
|
|
||||||
let settings = LinterSettings {
|
|
||||||
rules,
|
|
||||||
preview: PreviewMode::Enabled,
|
|
||||||
..LinterSettings::default()
|
|
||||||
};
|
|
||||||
|
|
||||||
let group = criterion.benchmark_group("linter/all-with-preview-rules");
|
|
||||||
benchmark_linter(group, &settings);
|
|
||||||
}
|
|
||||||
|
|
||||||
criterion_group!(default_rules, benchmark_default_rules);
|
criterion_group!(default_rules, benchmark_default_rules);
|
||||||
criterion_group!(all_rules, benchmark_all_rules);
|
criterion_group!(all_rules, benchmark_all_rules);
|
||||||
criterion_group!(preview_rules, benchmark_preview_rules);
|
criterion_main!(default_rules, all_rules);
|
||||||
criterion_main!(default_rules, all_rules, preview_rules);
|
|
||||||
|
|||||||
@@ -60,7 +60,7 @@ fn benchmark_parser(criterion: &mut Criterion<WallTime>) {
|
|||||||
&case,
|
&case,
|
||||||
|b, case| {
|
|b, case| {
|
||||||
b.iter(|| {
|
b.iter(|| {
|
||||||
let parsed = parse_suite(case.code()).unwrap();
|
let parsed = parse_suite(case.code(), case.name()).unwrap();
|
||||||
|
|
||||||
let mut visitor = CountVisitor { count: 0 };
|
let mut visitor = CountVisitor { count: 0 };
|
||||||
visitor.visit_body(&parsed);
|
visitor.visit_body(&parsed);
|
||||||
|
|||||||
@@ -16,10 +16,7 @@ glob = { workspace = true }
|
|||||||
globset = { workspace = true }
|
globset = { workspace = true }
|
||||||
regex = { workspace = true }
|
regex = { workspace = true }
|
||||||
filetime = { workspace = true }
|
filetime = { workspace = true }
|
||||||
seahash = { workspace = true }
|
seahash = "4.1.0"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
ruff_macros = { path = "../ruff_macros" }
|
ruff_macros = { path = "../ruff_macros" }
|
||||||
|
|
||||||
[lints]
|
|
||||||
workspace = true
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "ruff_cli"
|
name = "ruff_cli"
|
||||||
version = "0.1.11"
|
version = "0.1.3"
|
||||||
publish = false
|
publish = false
|
||||||
authors = { workspace = true }
|
authors = { workspace = true }
|
||||||
edition = { workspace = true }
|
edition = { workspace = true }
|
||||||
@@ -15,60 +15,64 @@ readme = "../../README.md"
|
|||||||
name = "ruff"
|
name = "ruff"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
ruff_linter = { path = "../ruff_linter", features = ["clap"] }
|
||||||
ruff_cache = { path = "../ruff_cache" }
|
ruff_cache = { path = "../ruff_cache" }
|
||||||
ruff_diagnostics = { path = "../ruff_diagnostics" }
|
ruff_diagnostics = { path = "../ruff_diagnostics" }
|
||||||
ruff_linter = { path = "../ruff_linter", features = ["clap"] }
|
ruff_formatter = { path = "../ruff_formatter" }
|
||||||
ruff_macros = { path = "../ruff_macros" }
|
|
||||||
ruff_notebook = { path = "../ruff_notebook" }
|
ruff_notebook = { path = "../ruff_notebook" }
|
||||||
|
ruff_macros = { path = "../ruff_macros" }
|
||||||
ruff_python_ast = { path = "../ruff_python_ast" }
|
ruff_python_ast = { path = "../ruff_python_ast" }
|
||||||
ruff_python_formatter = { path = "../ruff_python_formatter" }
|
ruff_python_formatter = { path = "../ruff_python_formatter" }
|
||||||
ruff_source_file = { path = "../ruff_source_file" }
|
ruff_source_file = { path = "../ruff_source_file" }
|
||||||
ruff_text_size = { path = "../ruff_text_size" }
|
ruff_python_trivia = { path = "../ruff_python_trivia" }
|
||||||
ruff_workspace = { path = "../ruff_workspace" }
|
ruff_workspace = { path = "../ruff_workspace" }
|
||||||
|
ruff_text_size = { path = "../ruff_text_size" }
|
||||||
|
|
||||||
|
annotate-snippets = { version = "0.9.1", features = ["color"] }
|
||||||
anyhow = { workspace = true }
|
anyhow = { workspace = true }
|
||||||
argfile = { workspace = true }
|
argfile = { version = "0.1.6" }
|
||||||
bincode = { workspace = true }
|
bincode = { version = "1.3.3" }
|
||||||
bitflags = { workspace = true }
|
bitflags = { workspace = true }
|
||||||
cachedir = { workspace = true }
|
cachedir = { version = "0.3.0" }
|
||||||
chrono = { workspace = true }
|
chrono = { workspace = true }
|
||||||
clap = { workspace = true, features = ["derive", "env"] }
|
clap = { workspace = true, features = ["derive", "env"] }
|
||||||
clap_complete_command = { workspace = true }
|
clap_complete_command = { version = "0.5.1" }
|
||||||
clearscreen = { workspace = true }
|
clearscreen = { version = "2.0.0" }
|
||||||
colored = { workspace = true }
|
colored = { workspace = true }
|
||||||
filetime = { workspace = true }
|
filetime = { workspace = true }
|
||||||
|
glob = { workspace = true }
|
||||||
ignore = { workspace = true }
|
ignore = { workspace = true }
|
||||||
is-macro = { workspace = true }
|
is-macro = { workspace = true }
|
||||||
itertools = { workspace = true }
|
itertools = { workspace = true }
|
||||||
log = { workspace = true }
|
log = { workspace = true }
|
||||||
notify = { workspace = true }
|
notify = { version = "6.1.1" }
|
||||||
path-absolutize = { workspace = true, features = ["once_cell_cache"] }
|
path-absolutize = { workspace = true, features = ["once_cell_cache"] }
|
||||||
rayon = { workspace = true }
|
rayon = { version = "1.8.0" }
|
||||||
regex = { workspace = true }
|
regex = { workspace = true }
|
||||||
|
ruff_python_stdlib = { path = "../ruff_python_stdlib" }
|
||||||
rustc-hash = { workspace = true }
|
rustc-hash = { workspace = true }
|
||||||
serde = { workspace = true }
|
serde = { workspace = true }
|
||||||
serde_json = { workspace = true }
|
serde_json = { workspace = true }
|
||||||
shellexpand = { workspace = true }
|
shellexpand = { workspace = true }
|
||||||
|
similar = { workspace = true }
|
||||||
strum = { workspace = true, features = [] }
|
strum = { workspace = true, features = [] }
|
||||||
thiserror = { workspace = true }
|
thiserror = { workspace = true }
|
||||||
tracing = { workspace = true, features = ["log"] }
|
tracing = { workspace = true, features = ["log"] }
|
||||||
walkdir = { workspace = true }
|
walkdir = { version = "2.3.2" }
|
||||||
wild = { workspace = true }
|
wild = { version = "2" }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
assert_cmd = { workspace = true }
|
assert_cmd = { version = "2.0.8" }
|
||||||
# Avoid writing colored snapshots when running tests from the terminal
|
# Avoid writing colored snapshots when running tests from the terminal
|
||||||
colored = { workspace = true, features = ["no-color"]}
|
colored = { workspace = true, features = ["no-color"]}
|
||||||
insta = { workspace = true, features = ["filters", "json"] }
|
insta = { workspace = true, features = ["filters", "json"] }
|
||||||
insta-cmd = { workspace = true }
|
insta-cmd = { version = "0.4.0" }
|
||||||
tempfile = { workspace = true }
|
tempfile = "3.8.1"
|
||||||
test-case = { workspace = true }
|
test-case = { workspace = true }
|
||||||
|
ureq = { version = "2.8.0", features = [] }
|
||||||
|
|
||||||
[target.'cfg(target_os = "windows")'.dependencies]
|
[target.'cfg(target_os = "windows")'.dependencies]
|
||||||
mimalloc = { workspace = true }
|
mimalloc = "0.1.39"
|
||||||
|
|
||||||
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64")))'.dependencies]
|
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64")))'.dependencies]
|
||||||
tikv-jemallocator = { workspace = true }
|
tikv-jemallocator = "0.5.0"
|
||||||
|
|
||||||
[lints]
|
|
||||||
workspace = true
|
|
||||||
|
|||||||
@@ -1,2 +0,0 @@
|
|||||||
[tool.ruff]
|
|
||||||
select = []
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
[tool.ruff]
|
|
||||||
include = ["a.py", "subdirectory/c.py"]
|
|
||||||
@@ -1,413 +0,0 @@
|
|||||||
{
|
|
||||||
"cells": [
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 1,
|
|
||||||
"id": "4f8ce941-1492-4d4e-8ab5-70d733fe891a",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"%config ZMQInteractiveShell.ast_node_interactivity=\"last_expr_or_assign\""
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 2,
|
|
||||||
"id": "721ec705-0c65-4bfb-9809-7ed8bc534186",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [
|
|
||||||
{
|
|
||||||
"data": {
|
|
||||||
"text/plain": [
|
|
||||||
"1"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"execution_count": 2,
|
|
||||||
"metadata": {},
|
|
||||||
"output_type": "execute_result"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"source": [
|
|
||||||
"# Assignment statement without a semicolon\n",
|
|
||||||
"x = 1"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 3,
|
|
||||||
"id": "de50e495-17e5-41cc-94bd-565757555d7e",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"# Assignment statement with a semicolon\n",
|
|
||||||
"x = 1;\n",
|
|
||||||
"x = 1;"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 4,
|
|
||||||
"id": "39e31201-23da-44eb-8684-41bba3663991",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [
|
|
||||||
{
|
|
||||||
"data": {
|
|
||||||
"text/plain": [
|
|
||||||
"2"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"execution_count": 4,
|
|
||||||
"metadata": {},
|
|
||||||
"output_type": "execute_result"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"source": [
|
|
||||||
"# Augmented assignment without a semicolon\n",
|
|
||||||
"x += 1"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 5,
|
|
||||||
"id": "6b73d3dd-c73a-4697-9e97-e109a6c1fbab",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"# Augmented assignment without a semicolon\n",
|
|
||||||
"x += 1;\n",
|
|
||||||
"x += 1; # comment\n",
|
|
||||||
"# comment"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 6,
|
|
||||||
"id": "2a3e5b86-aa5b-46ba-b9c6-0386d876f58c",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"# Multiple assignment without a semicolon\n",
|
|
||||||
"x = y = 1"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 7,
|
|
||||||
"id": "07f89e51-9357-4cfb-8fc5-76fb75e35949",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"# Multiple assignment with a semicolon\n",
|
|
||||||
"x = y = 1;\n",
|
|
||||||
"x = y = 1;"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 8,
|
|
||||||
"id": "c22b539d-473e-48f8-a236-625e58c47a00",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"# Tuple unpacking without a semicolon\n",
|
|
||||||
"x, y = 1, 2"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 9,
|
|
||||||
"id": "12c87940-a0d5-403b-a81c-7507eb06dc7e",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"# Tuple unpacking with a semicolon (irrelevant)\n",
|
|
||||||
"x, y = 1, 2;\n",
|
|
||||||
"x, y = 1, 2; # comment\n",
|
|
||||||
"# comment"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 10,
|
|
||||||
"id": "5a768c76-6bc4-470c-b37e-8cc14bc6caf4",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [
|
|
||||||
{
|
|
||||||
"data": {
|
|
||||||
"text/plain": [
|
|
||||||
"1"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"execution_count": 10,
|
|
||||||
"metadata": {},
|
|
||||||
"output_type": "execute_result"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"source": [
|
|
||||||
"# Annotated assignment statement without a semicolon\n",
|
|
||||||
"x: int = 1"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 11,
|
|
||||||
"id": "21bfda82-1a9a-4ba1-9078-74ac480804b5",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"# Annotated assignment statement without a semicolon\n",
|
|
||||||
"x: int = 1;\n",
|
|
||||||
"x: int = 1; # comment\n",
|
|
||||||
"# comment"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 12,
|
|
||||||
"id": "09929999-ff29-4d10-ad2b-e665af15812d",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [
|
|
||||||
{
|
|
||||||
"data": {
|
|
||||||
"text/plain": [
|
|
||||||
"1"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"execution_count": 12,
|
|
||||||
"metadata": {},
|
|
||||||
"output_type": "execute_result"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"source": [
|
|
||||||
"# Assignment expression without a semicolon\n",
|
|
||||||
"(x := 1)"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 13,
|
|
||||||
"id": "32a83217-1bad-4f61-855e-ffcdb119c763",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"# Assignment expression with a semicolon\n",
|
|
||||||
"(x := 1);\n",
|
|
||||||
"(x := 1); # comment\n",
|
|
||||||
"# comment"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 14,
|
|
||||||
"id": "61b81865-277e-4964-b03e-eb78f1f318eb",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [
|
|
||||||
{
|
|
||||||
"data": {
|
|
||||||
"text/plain": [
|
|
||||||
"1"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"execution_count": 14,
|
|
||||||
"metadata": {},
|
|
||||||
"output_type": "execute_result"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"source": [
|
|
||||||
"x = 1\n",
|
|
||||||
"# Expression without a semicolon\n",
|
|
||||||
"x"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 15,
|
|
||||||
"id": "974c29be-67e1-4000-95fa-6ca118a63bad",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"x = 1\n",
|
|
||||||
"# Expression with a semicolon\n",
|
|
||||||
"x;"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 16,
|
|
||||||
"id": "cfeb1757-46d6-4f13-969f-a283b6d0304f",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"class Point:\n",
|
|
||||||
" def __init__(self, x, y):\n",
|
|
||||||
" self.x = x\n",
|
|
||||||
" self.y = y\n",
|
|
||||||
"\n",
|
|
||||||
"\n",
|
|
||||||
"p = Point(0, 0);"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 17,
|
|
||||||
"id": "2ee7f1a5-ccfe-4004-bfa4-ef834a58da97",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"# Assignment statement where the left is an attribute access doesn't\n",
|
|
||||||
"# print the value.\n",
|
|
||||||
"p.x = 1;"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 18,
|
|
||||||
"id": "3e49370a-048b-474d-aa0a-3d1d4a73ad37",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"data = {}\n",
|
|
||||||
"\n",
|
|
||||||
"# Neither does the subscript node\n",
|
|
||||||
"data[\"foo\"] = 1;"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 19,
|
|
||||||
"id": "d594bdd3-eaa9-41ef-8cda-cf01bc273b2d",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"if (x := 1):\n",
|
|
||||||
" # It should be the top level statement\n",
|
|
||||||
" x"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 20,
|
|
||||||
"id": "e532f0cf-80c7-42b7-8226-6002fcf74fb6",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [
|
|
||||||
{
|
|
||||||
"data": {
|
|
||||||
"text/plain": [
|
|
||||||
"1"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"execution_count": 20,
|
|
||||||
"metadata": {},
|
|
||||||
"output_type": "execute_result"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"source": [
|
|
||||||
"# Parentheses with comments\n",
|
|
||||||
"(\n",
|
|
||||||
" x := 1 # comment\n",
|
|
||||||
") # comment"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 21,
|
|
||||||
"id": "473c5d62-871b-46ed-8a34-27095243f462",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"# Parentheses with comments\n",
|
|
||||||
"(\n",
|
|
||||||
" x := 1 # comment\n",
|
|
||||||
"); # comment"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 22,
|
|
||||||
"id": "8c3c2361-f49f-45fe-bbe3-7e27410a8a86",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [
|
|
||||||
{
|
|
||||||
"data": {
|
|
||||||
"text/plain": [
|
|
||||||
"'Hello world!'"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"execution_count": 22,
|
|
||||||
"metadata": {},
|
|
||||||
"output_type": "execute_result"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"source": [
|
|
||||||
"\"\"\"Hello world!\"\"\""
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 23,
|
|
||||||
"id": "23dbe9b5-3f68-4890-ab2d-ab0dbfd0712a",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"\"\"\"Hello world!\"\"\"; # comment\n",
|
|
||||||
"# comment"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 24,
|
|
||||||
"id": "3ce33108-d95d-4c70-83d1-0d4fd36a2951",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [
|
|
||||||
{
|
|
||||||
"data": {
|
|
||||||
"text/plain": [
|
|
||||||
"'x = 1'"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"execution_count": 24,
|
|
||||||
"metadata": {},
|
|
||||||
"output_type": "execute_result"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"source": [
|
|
||||||
"x = 1\n",
|
|
||||||
"f\"x = {x}\""
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 25,
|
|
||||||
"id": "654a4a67-de43-4684-824a-9451c67db48f",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"x = 1\n",
|
|
||||||
"f\"x = {x}\";\n",
|
|
||||||
"f\"x = {x}\"; # comment\n",
|
|
||||||
"# comment"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"metadata": {
|
|
||||||
"kernelspec": {
|
|
||||||
"display_name": "Python (ruff-playground)",
|
|
||||||
"language": "python",
|
|
||||||
"name": "ruff-playground"
|
|
||||||
},
|
|
||||||
"language_info": {
|
|
||||||
"codemirror_mode": {
|
|
||||||
"name": "ipython",
|
|
||||||
"version": 3
|
|
||||||
},
|
|
||||||
"file_extension": ".py",
|
|
||||||
"mimetype": "text/x-python",
|
|
||||||
"name": "python",
|
|
||||||
"nbconvert_exporter": "python",
|
|
||||||
"pygments_lexer": "ipython3",
|
|
||||||
"version": "3.11.3"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"nbformat": 4,
|
|
||||||
"nbformat_minor": 5
|
|
||||||
}
|
|
||||||
@@ -8,8 +8,8 @@ use ruff_linter::line_width::LineLength;
|
|||||||
use ruff_linter::logging::LogLevel;
|
use ruff_linter::logging::LogLevel;
|
||||||
use ruff_linter::registry::Rule;
|
use ruff_linter::registry::Rule;
|
||||||
use ruff_linter::settings::types::{
|
use ruff_linter::settings::types::{
|
||||||
ExtensionPair, FilePattern, PatternPrefixPair, PerFileIgnore, PreviewMode, PythonVersion,
|
FilePattern, PatternPrefixPair, PerFileIgnore, PreviewMode, PythonVersion, SerializationFormat,
|
||||||
SerializationFormat, UnsafeFixes,
|
UnsafeFixes,
|
||||||
};
|
};
|
||||||
use ruff_linter::{RuleParser, RuleSelector, RuleSelectorParser};
|
use ruff_linter::{RuleParser, RuleSelector, RuleSelectorParser};
|
||||||
use ruff_workspace::configuration::{Configuration, RuleSelection};
|
use ruff_workspace::configuration::{Configuration, RuleSelection};
|
||||||
@@ -88,7 +88,6 @@ pub enum Command {
|
|||||||
#[allow(clippy::struct_excessive_bools)]
|
#[allow(clippy::struct_excessive_bools)]
|
||||||
pub struct CheckCommand {
|
pub struct CheckCommand {
|
||||||
/// List of files or directories to check.
|
/// List of files or directories to check.
|
||||||
#[clap(help = "List of files or directories to check [default: .]")]
|
|
||||||
pub files: Vec<PathBuf>,
|
pub files: Vec<PathBuf>,
|
||||||
/// Apply fixes to resolve lint violations.
|
/// Apply fixes to resolve lint violations.
|
||||||
/// Use `--no-fix` to disable or `--unsafe-fixes` to include unsafe fixes.
|
/// Use `--no-fix` to disable or `--unsafe-fixes` to include unsafe fixes.
|
||||||
@@ -279,7 +278,7 @@ pub struct CheckCommand {
|
|||||||
#[arg(long, help_heading = "Rule configuration", hide = true)]
|
#[arg(long, help_heading = "Rule configuration", hide = true)]
|
||||||
pub dummy_variable_rgx: Option<Regex>,
|
pub dummy_variable_rgx: Option<Regex>,
|
||||||
/// Disable cache reads.
|
/// Disable cache reads.
|
||||||
#[arg(short, long, env = "RUFF_NO_CACHE", help_heading = "Miscellaneous")]
|
#[arg(short, long, help_heading = "Miscellaneous")]
|
||||||
pub no_cache: bool,
|
pub no_cache: bool,
|
||||||
/// Ignore all configuration files.
|
/// Ignore all configuration files.
|
||||||
#[arg(long, conflicts_with = "config", help_heading = "Miscellaneous")]
|
#[arg(long, conflicts_with = "config", help_heading = "Miscellaneous")]
|
||||||
@@ -352,9 +351,6 @@ pub struct CheckCommand {
|
|||||||
conflicts_with = "watch",
|
conflicts_with = "watch",
|
||||||
)]
|
)]
|
||||||
pub show_settings: bool,
|
pub show_settings: bool,
|
||||||
/// List of mappings from file extension to language (one of ["python", "ipynb", "pyi"]).
|
|
||||||
#[arg(long, value_delimiter = ',', hide = true)]
|
|
||||||
pub extension: Option<Vec<ExtensionPair>>,
|
|
||||||
/// Dev-only argument to show fixes
|
/// Dev-only argument to show fixes
|
||||||
#[arg(long, hide = true)]
|
#[arg(long, hide = true)]
|
||||||
pub ecosystem_ci: bool,
|
pub ecosystem_ci: bool,
|
||||||
@@ -364,7 +360,6 @@ pub struct CheckCommand {
|
|||||||
#[allow(clippy::struct_excessive_bools)]
|
#[allow(clippy::struct_excessive_bools)]
|
||||||
pub struct FormatCommand {
|
pub struct FormatCommand {
|
||||||
/// List of files or directories to format.
|
/// List of files or directories to format.
|
||||||
#[clap(help = "List of files or directories to format [default: .]")]
|
|
||||||
pub files: Vec<PathBuf>,
|
pub files: Vec<PathBuf>,
|
||||||
/// Avoid writing any formatted files back; instead, exit with a non-zero status code if any
|
/// Avoid writing any formatted files back; instead, exit with a non-zero status code if any
|
||||||
/// files would have been modified, and zero otherwise.
|
/// files would have been modified, and zero otherwise.
|
||||||
@@ -379,7 +374,7 @@ pub struct FormatCommand {
|
|||||||
pub config: Option<PathBuf>,
|
pub config: Option<PathBuf>,
|
||||||
|
|
||||||
/// Disable cache reads.
|
/// Disable cache reads.
|
||||||
#[arg(short, long, env = "RUFF_NO_CACHE", help_heading = "Miscellaneous")]
|
#[arg(short, long, help_heading = "Miscellaneous")]
|
||||||
pub no_cache: bool,
|
pub no_cache: bool,
|
||||||
/// Path to the cache directory.
|
/// Path to the cache directory.
|
||||||
#[arg(long, env = "RUFF_CACHE_DIR", help_heading = "Miscellaneous")]
|
#[arg(long, env = "RUFF_CACHE_DIR", help_heading = "Miscellaneous")]
|
||||||
@@ -540,7 +535,6 @@ impl CheckCommand {
|
|||||||
force_exclude: resolve_bool_arg(self.force_exclude, self.no_force_exclude),
|
force_exclude: resolve_bool_arg(self.force_exclude, self.no_force_exclude),
|
||||||
output_format: self.output_format,
|
output_format: self.output_format,
|
||||||
show_fixes: resolve_bool_arg(self.show_fixes, self.no_show_fixes),
|
show_fixes: resolve_bool_arg(self.show_fixes, self.no_show_fixes),
|
||||||
extension: self.extension,
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@@ -653,7 +647,6 @@ pub struct CliOverrides {
|
|||||||
pub force_exclude: Option<bool>,
|
pub force_exclude: Option<bool>,
|
||||||
pub output_format: Option<SerializationFormat>,
|
pub output_format: Option<SerializationFormat>,
|
||||||
pub show_fixes: Option<bool>,
|
pub show_fixes: Option<bool>,
|
||||||
pub extension: Option<Vec<ExtensionPair>>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ConfigurationTransformer for CliOverrides {
|
impl ConfigurationTransformer for CliOverrides {
|
||||||
@@ -738,9 +731,6 @@ impl ConfigurationTransformer for CliOverrides {
|
|||||||
if let Some(target_version) = &self.target_version {
|
if let Some(target_version) = &self.target_version {
|
||||||
config.target_version = Some(*target_version);
|
config.target_version = Some(*target_version);
|
||||||
}
|
}
|
||||||
if let Some(extension) = &self.extension {
|
|
||||||
config.lint.extension = Some(extension.clone().into_iter().collect());
|
|
||||||
}
|
|
||||||
|
|
||||||
config
|
config
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -25,9 +25,10 @@ use ruff_notebook::NotebookIndex;
|
|||||||
use ruff_python_ast::imports::ImportMap;
|
use ruff_python_ast::imports::ImportMap;
|
||||||
use ruff_source_file::SourceFileBuilder;
|
use ruff_source_file::SourceFileBuilder;
|
||||||
use ruff_text_size::{TextRange, TextSize};
|
use ruff_text_size::{TextRange, TextSize};
|
||||||
use ruff_workspace::resolver::Resolver;
|
use ruff_workspace::resolver::{PyprojectConfig, PyprojectDiscoveryStrategy, Resolver};
|
||||||
use ruff_workspace::Settings;
|
use ruff_workspace::Settings;
|
||||||
|
|
||||||
|
use crate::cache;
|
||||||
use crate::diagnostics::Diagnostics;
|
use crate::diagnostics::Diagnostics;
|
||||||
|
|
||||||
/// [`Path`] that is relative to the package root in [`PackageCache`].
|
/// [`Path`] that is relative to the package root in [`PackageCache`].
|
||||||
@@ -85,7 +86,6 @@ pub(crate) struct Cache {
|
|||||||
changes: Mutex<Vec<Change>>,
|
changes: Mutex<Vec<Change>>,
|
||||||
/// The "current" timestamp used as cache for the updates of
|
/// The "current" timestamp used as cache for the updates of
|
||||||
/// [`FileCache::last_seen`]
|
/// [`FileCache::last_seen`]
|
||||||
#[allow(clippy::struct_field_names)]
|
|
||||||
last_seen_cache: u64,
|
last_seen_cache: u64,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -442,7 +442,7 @@ pub(super) struct CacheMessage {
|
|||||||
pub(crate) trait PackageCaches {
|
pub(crate) trait PackageCaches {
|
||||||
fn get(&self, package_root: &Path) -> Option<&Cache>;
|
fn get(&self, package_root: &Path) -> Option<&Cache>;
|
||||||
|
|
||||||
fn persist(self) -> Result<()>;
|
fn persist(self) -> anyhow::Result<()>;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> PackageCaches for Option<T>
|
impl<T> PackageCaches for Option<T>
|
||||||
@@ -468,17 +468,27 @@ pub(crate) struct PackageCacheMap<'a>(FxHashMap<&'a Path, Cache>);
|
|||||||
|
|
||||||
impl<'a> PackageCacheMap<'a> {
|
impl<'a> PackageCacheMap<'a> {
|
||||||
pub(crate) fn init(
|
pub(crate) fn init(
|
||||||
|
pyproject_config: &PyprojectConfig,
|
||||||
package_roots: &FxHashMap<&'a Path, Option<&'a Path>>,
|
package_roots: &FxHashMap<&'a Path, Option<&'a Path>>,
|
||||||
resolver: &Resolver,
|
resolver: &Resolver,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
fn init_cache(path: &Path) {
|
fn init_cache(path: &Path) {
|
||||||
if let Err(e) = init(path) {
|
if let Err(e) = cache::init(path) {
|
||||||
error!("Failed to initialize cache at {}: {e:?}", path.display());
|
error!("Failed to initialize cache at {}: {e:?}", path.display());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for settings in resolver.settings() {
|
match pyproject_config.strategy {
|
||||||
init_cache(&settings.cache_dir);
|
PyprojectDiscoveryStrategy::Fixed => {
|
||||||
|
init_cache(&pyproject_config.settings.cache_dir);
|
||||||
|
}
|
||||||
|
PyprojectDiscoveryStrategy::Hierarchical => {
|
||||||
|
for settings in
|
||||||
|
std::iter::once(&pyproject_config.settings).chain(resolver.settings())
|
||||||
|
{
|
||||||
|
init_cache(&settings.cache_dir);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Self(
|
Self(
|
||||||
@@ -488,7 +498,7 @@ impl<'a> PackageCacheMap<'a> {
|
|||||||
.unique()
|
.unique()
|
||||||
.par_bridge()
|
.par_bridge()
|
||||||
.map(|cache_root| {
|
.map(|cache_root| {
|
||||||
let settings = resolver.resolve(cache_root);
|
let settings = resolver.resolve(cache_root, pyproject_config);
|
||||||
let cache = Cache::open(cache_root.to_path_buf(), settings);
|
let cache = Cache::open(cache_root.to_path_buf(), settings);
|
||||||
(cache_root, cache)
|
(cache_root, cache)
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -38,6 +38,7 @@ pub(crate) fn add_noqa(
|
|||||||
.flatten()
|
.flatten()
|
||||||
.map(ResolvedFile::path)
|
.map(ResolvedFile::path)
|
||||||
.collect::<Vec<_>>(),
|
.collect::<Vec<_>>(),
|
||||||
|
pyproject_config,
|
||||||
);
|
);
|
||||||
|
|
||||||
let start = Instant::now();
|
let start = Instant::now();
|
||||||
@@ -56,7 +57,7 @@ pub(crate) fn add_noqa(
|
|||||||
.parent()
|
.parent()
|
||||||
.and_then(|parent| package_roots.get(parent))
|
.and_then(|parent| package_roots.get(parent))
|
||||||
.and_then(|package| *package);
|
.and_then(|package| *package);
|
||||||
let settings = resolver.resolve(path);
|
let settings = resolver.resolve(path, pyproject_config);
|
||||||
let source_kind = match SourceKind::from_path(path, source_type) {
|
let source_kind = match SourceKind::from_path(path, source_type) {
|
||||||
Ok(Some(source_kind)) => source_kind,
|
Ok(Some(source_kind)) => source_kind,
|
||||||
Ok(None) => return None,
|
Ok(None) => return None,
|
||||||
|
|||||||
@@ -30,7 +30,6 @@ use crate::diagnostics::Diagnostics;
|
|||||||
use crate::panic::catch_unwind;
|
use crate::panic::catch_unwind;
|
||||||
|
|
||||||
/// Run the linter over a collection of files.
|
/// Run the linter over a collection of files.
|
||||||
#[allow(clippy::too_many_arguments)]
|
|
||||||
pub(crate) fn check(
|
pub(crate) fn check(
|
||||||
files: &[PathBuf],
|
files: &[PathBuf],
|
||||||
pyproject_config: &PyprojectConfig,
|
pyproject_config: &PyprojectConfig,
|
||||||
@@ -57,11 +56,16 @@ pub(crate) fn check(
|
|||||||
.flatten()
|
.flatten()
|
||||||
.map(ResolvedFile::path)
|
.map(ResolvedFile::path)
|
||||||
.collect::<Vec<_>>(),
|
.collect::<Vec<_>>(),
|
||||||
|
pyproject_config,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Load the caches.
|
// Load the caches.
|
||||||
let caches = if bool::from(cache) {
|
let caches = if bool::from(cache) {
|
||||||
Some(PackageCacheMap::init(&package_roots, &resolver))
|
Some(PackageCacheMap::init(
|
||||||
|
pyproject_config,
|
||||||
|
&package_roots,
|
||||||
|
&resolver,
|
||||||
|
))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
@@ -76,7 +80,7 @@ pub(crate) fn check(
|
|||||||
.and_then(|parent| package_roots.get(parent))
|
.and_then(|parent| package_roots.get(parent))
|
||||||
.and_then(|package| *package);
|
.and_then(|package| *package);
|
||||||
|
|
||||||
let settings = resolver.resolve(path);
|
let settings = resolver.resolve(path, pyproject_config);
|
||||||
|
|
||||||
if (settings.file_resolver.force_exclude || !resolved_file.is_root())
|
if (settings.file_resolver.force_exclude || !resolved_file.is_root())
|
||||||
&& match_exclusion(
|
&& match_exclusion(
|
||||||
@@ -123,7 +127,7 @@ pub(crate) fn check(
|
|||||||
|
|
||||||
Some(result.unwrap_or_else(|(path, message)| {
|
Some(result.unwrap_or_else(|(path, message)| {
|
||||||
if let Some(path) = &path {
|
if let Some(path) = &path {
|
||||||
let settings = resolver.resolve(path);
|
let settings = resolver.resolve(path, pyproject_config);
|
||||||
if settings.linter.rules.enabled(Rule::IOError) {
|
if settings.linter.rules.enabled(Rule::IOError) {
|
||||||
let dummy =
|
let dummy =
|
||||||
SourceFileBuilder::new(path.to_string_lossy().as_ref(), "").finish();
|
SourceFileBuilder::new(path.to_string_lossy().as_ref(), "").finish();
|
||||||
@@ -180,7 +184,6 @@ pub(crate) fn check(
|
|||||||
|
|
||||||
/// Wraps [`lint_path`](crate::diagnostics::lint_path) in a [`catch_unwind`](std::panic::catch_unwind) and emits
|
/// Wraps [`lint_path`](crate::diagnostics::lint_path) in a [`catch_unwind`](std::panic::catch_unwind) and emits
|
||||||
/// a diagnostic if the linting the file panics.
|
/// a diagnostic if the linting the file panics.
|
||||||
#[allow(clippy::too_many_arguments)]
|
|
||||||
fn lint_path(
|
fn lint_path(
|
||||||
path: &Path,
|
path: &Path,
|
||||||
package: Option<&Path>,
|
package: Option<&Path>,
|
||||||
@@ -197,12 +200,12 @@ fn lint_path(
|
|||||||
match result {
|
match result {
|
||||||
Ok(inner) => inner,
|
Ok(inner) => inner,
|
||||||
Err(error) => {
|
Err(error) => {
|
||||||
let message = r"This indicates a bug in Ruff. If you could open an issue at:
|
let message = r#"This indicates a bug in Ruff. If you could open an issue at:
|
||||||
|
|
||||||
https://github.com/astral-sh/ruff/issues/new?title=%5BLinter%20panic%5D
|
https://github.com/astral-sh/ruff/issues/new?title=%5BLinter%20panic%5D
|
||||||
|
|
||||||
...with the relevant file contents, the `pyproject.toml` settings, and the following stack trace, we'd be very appreciative!
|
...with the relevant file contents, the `pyproject.toml` settings, and the following stack trace, we'd be very appreciative!
|
||||||
";
|
"#;
|
||||||
|
|
||||||
error!(
|
error!(
|
||||||
"{}{}{} {message}\n{error}",
|
"{}{}{} {message}\n{error}",
|
||||||
|
|||||||
@@ -4,11 +4,11 @@ use anyhow::Result;
|
|||||||
|
|
||||||
use ruff_linter::packaging;
|
use ruff_linter::packaging;
|
||||||
use ruff_linter::settings::flags;
|
use ruff_linter::settings::flags;
|
||||||
use ruff_workspace::resolver::{match_exclusion, python_file_at_path, PyprojectConfig, Resolver};
|
use ruff_workspace::resolver::{match_exclusion, python_file_at_path, PyprojectConfig};
|
||||||
|
|
||||||
use crate::args::CliOverrides;
|
use crate::args::CliOverrides;
|
||||||
use crate::diagnostics::{lint_stdin, Diagnostics};
|
use crate::diagnostics::{lint_stdin, Diagnostics};
|
||||||
use crate::stdin::{parrot_stdin, read_from_stdin};
|
use crate::stdin::read_from_stdin;
|
||||||
|
|
||||||
/// Run the linter over a single file, read from `stdin`.
|
/// Run the linter over a single file, read from `stdin`.
|
||||||
pub(crate) fn check_stdin(
|
pub(crate) fn check_stdin(
|
||||||
@@ -18,36 +18,30 @@ pub(crate) fn check_stdin(
|
|||||||
noqa: flags::Noqa,
|
noqa: flags::Noqa,
|
||||||
fix_mode: flags::FixMode,
|
fix_mode: flags::FixMode,
|
||||||
) -> Result<Diagnostics> {
|
) -> Result<Diagnostics> {
|
||||||
let mut resolver = Resolver::new(pyproject_config);
|
if pyproject_config.settings.file_resolver.force_exclude {
|
||||||
|
|
||||||
if resolver.force_exclude() {
|
|
||||||
if let Some(filename) = filename {
|
if let Some(filename) = filename {
|
||||||
if !python_file_at_path(filename, &mut resolver, overrides)? {
|
if !python_file_at_path(filename, pyproject_config, overrides)? {
|
||||||
if fix_mode.is_apply() {
|
|
||||||
parrot_stdin()?;
|
|
||||||
}
|
|
||||||
return Ok(Diagnostics::default());
|
return Ok(Diagnostics::default());
|
||||||
}
|
}
|
||||||
|
|
||||||
if filename.file_name().is_some_and(|name| {
|
let lint_settings = &pyproject_config.settings.linter;
|
||||||
match_exclusion(filename, name, &resolver.base_settings().linter.exclude)
|
if filename
|
||||||
}) {
|
.file_name()
|
||||||
if fix_mode.is_apply() {
|
.is_some_and(|name| match_exclusion(filename, name, &lint_settings.exclude))
|
||||||
parrot_stdin()?;
|
{
|
||||||
}
|
|
||||||
return Ok(Diagnostics::default());
|
return Ok(Diagnostics::default());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let stdin = read_from_stdin()?;
|
|
||||||
let package_root = filename.and_then(Path::parent).and_then(|path| {
|
let package_root = filename.and_then(Path::parent).and_then(|path| {
|
||||||
packaging::detect_package_root(path, &resolver.base_settings().linter.namespace_packages)
|
packaging::detect_package_root(path, &pyproject_config.settings.linter.namespace_packages)
|
||||||
});
|
});
|
||||||
|
let stdin = read_from_stdin()?;
|
||||||
let mut diagnostics = lint_stdin(
|
let mut diagnostics = lint_stdin(
|
||||||
filename,
|
filename,
|
||||||
package_root,
|
package_root,
|
||||||
stdin,
|
stdin,
|
||||||
resolver.base_settings(),
|
&pyproject_config.settings,
|
||||||
noqa,
|
noqa,
|
||||||
fix_mode,
|
fix_mode,
|
||||||
)?;
|
)?;
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ use tracing::debug;
|
|||||||
|
|
||||||
use ruff_diagnostics::SourceMap;
|
use ruff_diagnostics::SourceMap;
|
||||||
use ruff_linter::fs;
|
use ruff_linter::fs;
|
||||||
use ruff_linter::logging::{DisplayParseError, LogLevel};
|
use ruff_linter::logging::LogLevel;
|
||||||
use ruff_linter::registry::Rule;
|
use ruff_linter::registry::Rule;
|
||||||
use ruff_linter::rules::flake8_quotes::settings::Quote;
|
use ruff_linter::rules::flake8_quotes::settings::Quote;
|
||||||
use ruff_linter::source_kind::{SourceError, SourceKind};
|
use ruff_linter::source_kind::{SourceError, SourceKind};
|
||||||
@@ -25,14 +25,16 @@ use ruff_linter::warn_user_once;
|
|||||||
use ruff_python_ast::{PySourceType, SourceType};
|
use ruff_python_ast::{PySourceType, SourceType};
|
||||||
use ruff_python_formatter::{format_module_source, FormatModuleError, QuoteStyle};
|
use ruff_python_formatter::{format_module_source, FormatModuleError, QuoteStyle};
|
||||||
use ruff_text_size::{TextLen, TextRange, TextSize};
|
use ruff_text_size::{TextLen, TextRange, TextSize};
|
||||||
use ruff_workspace::resolver::{match_exclusion, python_files_in_path, ResolvedFile, Resolver};
|
use ruff_workspace::resolver::{
|
||||||
|
match_exclusion, python_files_in_path, PyprojectConfig, ResolvedFile, Resolver,
|
||||||
|
};
|
||||||
use ruff_workspace::FormatterSettings;
|
use ruff_workspace::FormatterSettings;
|
||||||
|
|
||||||
use crate::args::{CliOverrides, FormatArguments};
|
use crate::args::{CliOverrides, FormatArguments};
|
||||||
use crate::cache::{Cache, FileCacheKey, PackageCacheMap, PackageCaches};
|
use crate::cache::{Cache, FileCacheKey, PackageCacheMap, PackageCaches};
|
||||||
use crate::panic::{catch_unwind, PanicError};
|
use crate::panic::{catch_unwind, PanicError};
|
||||||
use crate::resolve::resolve;
|
use crate::resolve::resolve;
|
||||||
use crate::{resolve_default_files, ExitStatus};
|
use crate::ExitStatus;
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone, is_macro::Is)]
|
#[derive(Debug, Copy, Clone, is_macro::Is)]
|
||||||
pub(crate) enum FormatMode {
|
pub(crate) enum FormatMode {
|
||||||
@@ -58,7 +60,7 @@ impl FormatMode {
|
|||||||
|
|
||||||
/// Format a set of files, and return the exit status.
|
/// Format a set of files, and return the exit status.
|
||||||
pub(crate) fn format(
|
pub(crate) fn format(
|
||||||
cli: FormatArguments,
|
cli: &FormatArguments,
|
||||||
overrides: &CliOverrides,
|
overrides: &CliOverrides,
|
||||||
log_level: LogLevel,
|
log_level: LogLevel,
|
||||||
) -> Result<ExitStatus> {
|
) -> Result<ExitStatus> {
|
||||||
@@ -68,16 +70,15 @@ pub(crate) fn format(
|
|||||||
overrides,
|
overrides,
|
||||||
cli.stdin_filename.as_deref(),
|
cli.stdin_filename.as_deref(),
|
||||||
)?;
|
)?;
|
||||||
let mode = FormatMode::from_cli(&cli);
|
let mode = FormatMode::from_cli(cli);
|
||||||
let files = resolve_default_files(cli.files, false);
|
let (paths, resolver) = python_files_in_path(&cli.files, &pyproject_config, overrides)?;
|
||||||
let (paths, resolver) = python_files_in_path(&files, &pyproject_config, overrides)?;
|
|
||||||
|
|
||||||
if paths.is_empty() {
|
if paths.is_empty() {
|
||||||
warn_user_once!("No Python files found under the given path(s)");
|
warn_user_once!("No Python files found under the given path(s)");
|
||||||
return Ok(ExitStatus::Success);
|
return Ok(ExitStatus::Success);
|
||||||
}
|
}
|
||||||
|
|
||||||
warn_incompatible_formatter_settings(&resolver);
|
warn_incompatible_formatter_settings(&pyproject_config, Some(&resolver));
|
||||||
|
|
||||||
// Discover the package root for each Python file.
|
// Discover the package root for each Python file.
|
||||||
let package_roots = resolver.package_roots(
|
let package_roots = resolver.package_roots(
|
||||||
@@ -86,6 +87,7 @@ pub(crate) fn format(
|
|||||||
.flatten()
|
.flatten()
|
||||||
.map(ResolvedFile::path)
|
.map(ResolvedFile::path)
|
||||||
.collect::<Vec<_>>(),
|
.collect::<Vec<_>>(),
|
||||||
|
&pyproject_config,
|
||||||
);
|
);
|
||||||
|
|
||||||
let caches = if cli.no_cache {
|
let caches = if cli.no_cache {
|
||||||
@@ -96,7 +98,11 @@ pub(crate) fn format(
|
|||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
crate::warn_user!("Detected debug build without --no-cache.");
|
crate::warn_user!("Detected debug build without --no-cache.");
|
||||||
|
|
||||||
Some(PackageCacheMap::init(&package_roots, &resolver))
|
Some(PackageCacheMap::init(
|
||||||
|
&pyproject_config,
|
||||||
|
&package_roots,
|
||||||
|
&resolver,
|
||||||
|
))
|
||||||
};
|
};
|
||||||
|
|
||||||
let start = Instant::now();
|
let start = Instant::now();
|
||||||
@@ -111,7 +117,7 @@ pub(crate) fn format(
|
|||||||
return None;
|
return None;
|
||||||
};
|
};
|
||||||
|
|
||||||
let settings = resolver.resolve(path);
|
let settings = resolver.resolve(path, &pyproject_config);
|
||||||
|
|
||||||
// Ignore files that are excluded from formatting
|
// Ignore files that are excluded from formatting
|
||||||
if (settings.file_resolver.force_exclude || !resolved_file.is_root())
|
if (settings.file_resolver.force_exclude || !resolved_file.is_root())
|
||||||
@@ -237,7 +243,7 @@ pub(crate) fn format_path(
|
|||||||
// Extract the sources from the file.
|
// Extract the sources from the file.
|
||||||
let unformatted = match SourceKind::from_path(path, source_type) {
|
let unformatted = match SourceKind::from_path(path, source_type) {
|
||||||
Ok(Some(source_kind)) => source_kind,
|
Ok(Some(source_kind)) => source_kind,
|
||||||
// Non-Python Jupyter notebook.
|
// Non Python Jupyter notebook
|
||||||
Ok(None) => return Ok(FormatResult::Skipped),
|
Ok(None) => return Ok(FormatResult::Skipped),
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
return Err(FormatCommandError::Read(Some(path.to_path_buf()), err));
|
return Err(FormatCommandError::Read(Some(path.to_path_buf()), err));
|
||||||
@@ -314,22 +320,12 @@ pub(crate) fn format_source(
|
|||||||
path: Option<&Path>,
|
path: Option<&Path>,
|
||||||
settings: &FormatterSettings,
|
settings: &FormatterSettings,
|
||||||
) -> Result<FormattedSource, FormatCommandError> {
|
) -> Result<FormattedSource, FormatCommandError> {
|
||||||
match &source_kind {
|
match source_kind {
|
||||||
SourceKind::Python(unformatted) => {
|
SourceKind::Python(unformatted) => {
|
||||||
let options = settings.to_format_options(source_type, unformatted);
|
let options = settings.to_format_options(source_type, unformatted);
|
||||||
|
|
||||||
let formatted = format_module_source(unformatted, options).map_err(|err| {
|
let formatted = format_module_source(unformatted, options)
|
||||||
if let FormatModuleError::ParseError(err) = err {
|
.map_err(|err| FormatCommandError::Format(path.map(Path::to_path_buf), err))?;
|
||||||
DisplayParseError::from_source_kind(
|
|
||||||
err,
|
|
||||||
path.map(Path::to_path_buf),
|
|
||||||
source_kind,
|
|
||||||
)
|
|
||||||
.into()
|
|
||||||
} else {
|
|
||||||
FormatCommandError::Format(path.map(Path::to_path_buf), err)
|
|
||||||
}
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let formatted = formatted.into_code();
|
let formatted = formatted.into_code();
|
||||||
if formatted.len() == unformatted.len() && formatted == *unformatted {
|
if formatted.len() == unformatted.len() && formatted == *unformatted {
|
||||||
@@ -355,19 +351,8 @@ pub(crate) fn format_source(
|
|||||||
let unformatted = ¬ebook.source_code()[range];
|
let unformatted = ¬ebook.source_code()[range];
|
||||||
|
|
||||||
// Format the cell.
|
// Format the cell.
|
||||||
let formatted =
|
let formatted = format_module_source(unformatted, options.clone())
|
||||||
format_module_source(unformatted, options.clone()).map_err(|err| {
|
.map_err(|err| FormatCommandError::Format(path.map(Path::to_path_buf), err))?;
|
||||||
if let FormatModuleError::ParseError(err) = err {
|
|
||||||
DisplayParseError::from_source_kind(
|
|
||||||
err,
|
|
||||||
path.map(Path::to_path_buf),
|
|
||||||
source_kind,
|
|
||||||
)
|
|
||||||
.into()
|
|
||||||
} else {
|
|
||||||
FormatCommandError::Format(path.map(Path::to_path_buf), err)
|
|
||||||
}
|
|
||||||
})?;
|
|
||||||
|
|
||||||
// If the cell is unchanged, skip it.
|
// If the cell is unchanged, skip it.
|
||||||
let formatted = formatted.as_code();
|
let formatted = formatted.as_code();
|
||||||
@@ -422,13 +407,11 @@ pub(crate) fn format_source(
|
|||||||
pub(crate) enum FormatResult {
|
pub(crate) enum FormatResult {
|
||||||
/// The file was formatted.
|
/// The file was formatted.
|
||||||
Formatted,
|
Formatted,
|
||||||
|
|
||||||
/// The file was formatted, [`SourceKind`] contains the formatted code
|
/// The file was formatted, [`SourceKind`] contains the formatted code
|
||||||
Diff {
|
Diff {
|
||||||
unformatted: SourceKind,
|
unformatted: SourceKind,
|
||||||
formatted: SourceKind,
|
formatted: SourceKind,
|
||||||
},
|
},
|
||||||
|
|
||||||
/// The file was unchanged, as the formatted contents matched the existing contents.
|
/// The file was unchanged, as the formatted contents matched the existing contents.
|
||||||
Unchanged,
|
Unchanged,
|
||||||
|
|
||||||
@@ -531,7 +514,7 @@ impl<'a> FormatResults<'a> {
|
|||||||
if changed > 0 && unchanged > 0 {
|
if changed > 0 && unchanged > 0 {
|
||||||
writeln!(
|
writeln!(
|
||||||
f,
|
f,
|
||||||
"{} file{} {}, {} file{} {}",
|
"{} file{} {}, {} file{} left unchanged",
|
||||||
changed,
|
changed,
|
||||||
if changed == 1 { "" } else { "s" },
|
if changed == 1 { "" } else { "s" },
|
||||||
match self.mode {
|
match self.mode {
|
||||||
@@ -540,10 +523,6 @@ impl<'a> FormatResults<'a> {
|
|||||||
},
|
},
|
||||||
unchanged,
|
unchanged,
|
||||||
if unchanged == 1 { "" } else { "s" },
|
if unchanged == 1 { "" } else { "s" },
|
||||||
match self.mode {
|
|
||||||
FormatMode::Write => "left unchanged",
|
|
||||||
FormatMode::Check | FormatMode::Diff => "already formatted",
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
} else if changed > 0 {
|
} else if changed > 0 {
|
||||||
writeln!(
|
writeln!(
|
||||||
@@ -559,13 +538,9 @@ impl<'a> FormatResults<'a> {
|
|||||||
} else if unchanged > 0 {
|
} else if unchanged > 0 {
|
||||||
writeln!(
|
writeln!(
|
||||||
f,
|
f,
|
||||||
"{} file{} {}",
|
"{} file{} left unchanged",
|
||||||
unchanged,
|
unchanged,
|
||||||
if unchanged == 1 { "" } else { "s" },
|
if unchanged == 1 { "" } else { "s" },
|
||||||
match self.mode {
|
|
||||||
FormatMode::Write => "left unchanged",
|
|
||||||
FormatMode::Check | FormatMode::Diff => "already formatted",
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
Ok(())
|
Ok(())
|
||||||
@@ -577,7 +552,6 @@ impl<'a> FormatResults<'a> {
|
|||||||
#[derive(Error, Debug)]
|
#[derive(Error, Debug)]
|
||||||
pub(crate) enum FormatCommandError {
|
pub(crate) enum FormatCommandError {
|
||||||
Ignore(#[from] ignore::Error),
|
Ignore(#[from] ignore::Error),
|
||||||
Parse(#[from] DisplayParseError),
|
|
||||||
Panic(Option<PathBuf>, PanicError),
|
Panic(Option<PathBuf>, PanicError),
|
||||||
Read(Option<PathBuf>, SourceError),
|
Read(Option<PathBuf>, SourceError),
|
||||||
Format(Option<PathBuf>, FormatModuleError),
|
Format(Option<PathBuf>, FormatModuleError),
|
||||||
@@ -595,7 +569,6 @@ impl FormatCommandError {
|
|||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Self::Parse(err) => err.path(),
|
|
||||||
Self::Panic(path, _)
|
Self::Panic(path, _)
|
||||||
| Self::Read(path, _)
|
| Self::Read(path, _)
|
||||||
| Self::Format(path, _)
|
| Self::Format(path, _)
|
||||||
@@ -629,9 +602,6 @@ impl Display for FormatCommandError {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Self::Parse(err) => {
|
|
||||||
write!(f, "{err}")
|
|
||||||
}
|
|
||||||
Self::Read(path, err) => {
|
Self::Read(path, err) => {
|
||||||
if let Some(path) = path {
|
if let Some(path) = path {
|
||||||
write!(
|
write!(
|
||||||
@@ -690,12 +660,12 @@ impl Display for FormatCommandError {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
Self::Panic(path, err) => {
|
Self::Panic(path, err) => {
|
||||||
let message = r"This indicates a bug in Ruff. If you could open an issue at:
|
let message = r#"This indicates a bug in Ruff. If you could open an issue at:
|
||||||
|
|
||||||
https://github.com/astral-sh/ruff/issues/new?title=%5BFormatter%20panic%5D
|
https://github.com/astral-sh/ruff/issues/new?title=%5BFormatter%20panic%5D
|
||||||
|
|
||||||
...with the relevant file contents, the `pyproject.toml` settings, and the following stack trace, we'd be very appreciative!
|
...with the relevant file contents, the `pyproject.toml` settings, and the following stack trace, we'd be very appreciative!
|
||||||
";
|
"#;
|
||||||
if let Some(path) = path {
|
if let Some(path) = path {
|
||||||
write!(
|
write!(
|
||||||
f,
|
f,
|
||||||
@@ -716,10 +686,15 @@ impl Display for FormatCommandError {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn warn_incompatible_formatter_settings(resolver: &Resolver) {
|
pub(super) fn warn_incompatible_formatter_settings(
|
||||||
|
pyproject_config: &PyprojectConfig,
|
||||||
|
resolver: Option<&Resolver>,
|
||||||
|
) {
|
||||||
// First, collect all rules that are incompatible regardless of the linter-specific settings.
|
// First, collect all rules that are incompatible regardless of the linter-specific settings.
|
||||||
let mut incompatible_rules = FxHashSet::default();
|
let mut incompatible_rules = FxHashSet::default();
|
||||||
for setting in resolver.settings() {
|
for setting in std::iter::once(&pyproject_config.settings)
|
||||||
|
.chain(resolver.iter().flat_map(|resolver| resolver.settings()))
|
||||||
|
{
|
||||||
for rule in [
|
for rule in [
|
||||||
// The formatter might collapse implicit string concatenation on a single line.
|
// The formatter might collapse implicit string concatenation on a single line.
|
||||||
Rule::SingleLineImplicitStringConcatenation,
|
Rule::SingleLineImplicitStringConcatenation,
|
||||||
@@ -748,7 +723,9 @@ pub(super) fn warn_incompatible_formatter_settings(resolver: &Resolver) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Next, validate settings-specific incompatibilities.
|
// Next, validate settings-specific incompatibilities.
|
||||||
for setting in resolver.settings() {
|
for setting in std::iter::once(&pyproject_config.settings)
|
||||||
|
.chain(resolver.iter().flat_map(|resolver| resolver.settings()))
|
||||||
|
{
|
||||||
// Validate all rules that rely on tab styles.
|
// Validate all rules that rely on tab styles.
|
||||||
if setting.linter.rules.enabled(Rule::TabIndentation)
|
if setting.linter.rules.enabled(Rule::TabIndentation)
|
||||||
&& setting.formatter.indent_style.is_tab()
|
&& setting.formatter.indent_style.is_tab()
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ use log::error;
|
|||||||
|
|
||||||
use ruff_linter::source_kind::SourceKind;
|
use ruff_linter::source_kind::SourceKind;
|
||||||
use ruff_python_ast::{PySourceType, SourceType};
|
use ruff_python_ast::{PySourceType, SourceType};
|
||||||
use ruff_workspace::resolver::{match_exclusion, python_file_at_path, Resolver};
|
use ruff_workspace::resolver::{match_exclusion, python_file_at_path};
|
||||||
use ruff_workspace::FormatterSettings;
|
use ruff_workspace::FormatterSettings;
|
||||||
|
|
||||||
use crate::args::{CliOverrides, FormatArguments};
|
use crate::args::{CliOverrides, FormatArguments};
|
||||||
@@ -15,7 +15,7 @@ use crate::commands::format::{
|
|||||||
FormatResult, FormattedSource,
|
FormatResult, FormattedSource,
|
||||||
};
|
};
|
||||||
use crate::resolve::resolve;
|
use crate::resolve::resolve;
|
||||||
use crate::stdin::{parrot_stdin, read_from_stdin};
|
use crate::stdin::read_from_stdin;
|
||||||
use crate::ExitStatus;
|
use crate::ExitStatus;
|
||||||
|
|
||||||
/// Run the formatter over a single file, read from `stdin`.
|
/// Run the formatter over a single file, read from `stdin`.
|
||||||
@@ -27,26 +27,21 @@ pub(crate) fn format_stdin(cli: &FormatArguments, overrides: &CliOverrides) -> R
|
|||||||
cli.stdin_filename.as_deref(),
|
cli.stdin_filename.as_deref(),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let mut resolver = Resolver::new(&pyproject_config);
|
warn_incompatible_formatter_settings(&pyproject_config, None);
|
||||||
warn_incompatible_formatter_settings(&resolver);
|
|
||||||
|
|
||||||
let mode = FormatMode::from_cli(cli);
|
let mode = FormatMode::from_cli(cli);
|
||||||
|
|
||||||
if resolver.force_exclude() {
|
if pyproject_config.settings.file_resolver.force_exclude {
|
||||||
if let Some(filename) = cli.stdin_filename.as_deref() {
|
if let Some(filename) = cli.stdin_filename.as_deref() {
|
||||||
if !python_file_at_path(filename, &mut resolver, overrides)? {
|
if !python_file_at_path(filename, &pyproject_config, overrides)? {
|
||||||
if mode.is_write() {
|
|
||||||
parrot_stdin()?;
|
|
||||||
}
|
|
||||||
return Ok(ExitStatus::Success);
|
return Ok(ExitStatus::Success);
|
||||||
}
|
}
|
||||||
|
|
||||||
if filename.file_name().is_some_and(|name| {
|
let format_settings = &pyproject_config.settings.formatter;
|
||||||
match_exclusion(filename, name, &resolver.base_settings().formatter.exclude)
|
if filename
|
||||||
}) {
|
.file_name()
|
||||||
if mode.is_write() {
|
.is_some_and(|name| match_exclusion(filename, name, &format_settings.exclude))
|
||||||
parrot_stdin()?;
|
{
|
||||||
}
|
|
||||||
return Ok(ExitStatus::Success);
|
return Ok(ExitStatus::Success);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -55,14 +50,16 @@ pub(crate) fn format_stdin(cli: &FormatArguments, overrides: &CliOverrides) -> R
|
|||||||
let path = cli.stdin_filename.as_deref();
|
let path = cli.stdin_filename.as_deref();
|
||||||
|
|
||||||
let SourceType::Python(source_type) = path.map(SourceType::from).unwrap_or_default() else {
|
let SourceType::Python(source_type) = path.map(SourceType::from).unwrap_or_default() else {
|
||||||
if mode.is_write() {
|
|
||||||
parrot_stdin()?;
|
|
||||||
}
|
|
||||||
return Ok(ExitStatus::Success);
|
return Ok(ExitStatus::Success);
|
||||||
};
|
};
|
||||||
|
|
||||||
// Format the file.
|
// Format the file.
|
||||||
match format_source_code(path, &resolver.base_settings().formatter, source_type, mode) {
|
match format_source_code(
|
||||||
|
path,
|
||||||
|
&pyproject_config.settings.formatter,
|
||||||
|
source_type,
|
||||||
|
mode,
|
||||||
|
) {
|
||||||
Ok(result) => match mode {
|
Ok(result) => match mode {
|
||||||
FormatMode::Write => Ok(ExitStatus::Success),
|
FormatMode::Write => Ok(ExitStatus::Success),
|
||||||
FormatMode::Check | FormatMode::Diff => {
|
FormatMode::Check | FormatMode::Diff => {
|
||||||
|
|||||||
@@ -18,7 +18,6 @@ struct Explanation<'a> {
|
|||||||
summary: &'a str,
|
summary: &'a str,
|
||||||
message_formats: &'a [&'a str],
|
message_formats: &'a [&'a str],
|
||||||
fix: String,
|
fix: String,
|
||||||
#[allow(clippy::struct_field_names)]
|
|
||||||
explanation: Option<&'a str>,
|
explanation: Option<&'a str>,
|
||||||
preview: bool,
|
preview: bool,
|
||||||
}
|
}
|
||||||
@@ -64,7 +63,7 @@ fn format_rule_text(rule: Rule) -> String {
|
|||||||
|
|
||||||
if rule.is_preview() || rule.is_nursery() {
|
if rule.is_preview() || rule.is_nursery() {
|
||||||
output.push_str(
|
output.push_str(
|
||||||
r"This rule is in preview and is not stable. The `--preview` flag is required for use.",
|
r#"This rule is in preview and is not stable. The `--preview` flag is required for use."#,
|
||||||
);
|
);
|
||||||
output.push('\n');
|
output.push('\n');
|
||||||
output.push('\n');
|
output.push('\n');
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ pub(crate) fn show_settings(
|
|||||||
bail!("No files found under the given path");
|
bail!("No files found under the given path");
|
||||||
};
|
};
|
||||||
|
|
||||||
let settings = resolver.resolve(&path);
|
let settings = resolver.resolve(&path, pyproject_config);
|
||||||
|
|
||||||
writeln!(writer, "Resolved settings for: {path:?}")?;
|
writeln!(writer, "Resolved settings for: {path:?}")?;
|
||||||
if let Some(settings_path) = pyproject_config.path.as_ref() {
|
if let Some(settings_path) = pyproject_config.path.as_ref() {
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
#![cfg_attr(target_family = "wasm", allow(dead_code))]
|
#![cfg_attr(target_family = "wasm", allow(dead_code))]
|
||||||
|
|
||||||
use std::borrow::Cow;
|
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::io;
|
use std::io;
|
||||||
use std::ops::{Add, AddAssign};
|
use std::ops::{Add, AddAssign};
|
||||||
@@ -11,25 +10,24 @@ use colored::Colorize;
|
|||||||
use log::{debug, error, warn};
|
use log::{debug, error, warn};
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
|
|
||||||
|
use crate::cache::{Cache, FileCacheKey, LintCacheData};
|
||||||
use ruff_diagnostics::Diagnostic;
|
use ruff_diagnostics::Diagnostic;
|
||||||
use ruff_linter::linter::{lint_fix, lint_only, FixTable, FixerResult, LinterResult, ParseSource};
|
use ruff_linter::linter::{lint_fix, lint_only, FixTable, FixerResult, LinterResult};
|
||||||
use ruff_linter::logging::DisplayParseError;
|
use ruff_linter::logging::DisplayParseError;
|
||||||
use ruff_linter::message::Message;
|
use ruff_linter::message::Message;
|
||||||
use ruff_linter::pyproject_toml::lint_pyproject_toml;
|
use ruff_linter::pyproject_toml::lint_pyproject_toml;
|
||||||
use ruff_linter::registry::AsRule;
|
use ruff_linter::registry::AsRule;
|
||||||
use ruff_linter::settings::types::{ExtensionMapping, UnsafeFixes};
|
use ruff_linter::settings::types::UnsafeFixes;
|
||||||
use ruff_linter::settings::{flags, LinterSettings};
|
use ruff_linter::settings::{flags, LinterSettings};
|
||||||
use ruff_linter::source_kind::{SourceError, SourceKind};
|
use ruff_linter::source_kind::{SourceError, SourceKind};
|
||||||
use ruff_linter::{fs, IOError, SyntaxError};
|
use ruff_linter::{fs, IOError, SyntaxError};
|
||||||
use ruff_notebook::{Notebook, NotebookError, NotebookIndex};
|
use ruff_notebook::{Notebook, NotebookError, NotebookIndex};
|
||||||
use ruff_python_ast::imports::ImportMap;
|
use ruff_python_ast::imports::ImportMap;
|
||||||
use ruff_python_ast::{PySourceType, SourceType, TomlSourceType};
|
use ruff_python_ast::{SourceType, TomlSourceType};
|
||||||
use ruff_source_file::SourceFileBuilder;
|
use ruff_source_file::{LineIndex, SourceCode, SourceFileBuilder};
|
||||||
use ruff_text_size::{TextRange, TextSize};
|
use ruff_text_size::{TextRange, TextSize};
|
||||||
use ruff_workspace::Settings;
|
use ruff_workspace::Settings;
|
||||||
|
|
||||||
use crate::cache::{Cache, FileCacheKey, LintCacheData};
|
|
||||||
|
|
||||||
#[derive(Debug, Default, PartialEq)]
|
#[derive(Debug, Default, PartialEq)]
|
||||||
pub(crate) struct Diagnostics {
|
pub(crate) struct Diagnostics {
|
||||||
pub(crate) messages: Vec<Message>,
|
pub(crate) messages: Vec<Message>,
|
||||||
@@ -179,11 +177,6 @@ impl AddAssign for FixMap {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn override_source_type(path: Option<&Path>, extension: &ExtensionMapping) -> Option<PySourceType> {
|
|
||||||
let ext = path?.extension()?.to_str()?;
|
|
||||||
extension.get(ext).map(PySourceType::from)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Lint the source code at the given `Path`.
|
/// Lint the source code at the given `Path`.
|
||||||
pub(crate) fn lint_path(
|
pub(crate) fn lint_path(
|
||||||
path: &Path,
|
path: &Path,
|
||||||
@@ -228,35 +221,31 @@ pub(crate) fn lint_path(
|
|||||||
|
|
||||||
debug!("Checking: {}", path.display());
|
debug!("Checking: {}", path.display());
|
||||||
|
|
||||||
let source_type = match override_source_type(Some(path), &settings.extension) {
|
let source_type = match SourceType::from(path) {
|
||||||
Some(source_type) => source_type,
|
SourceType::Toml(TomlSourceType::Pyproject) => {
|
||||||
None => match SourceType::from(path) {
|
let messages = if settings
|
||||||
SourceType::Toml(TomlSourceType::Pyproject) => {
|
.rules
|
||||||
let messages = if settings
|
.iter_enabled()
|
||||||
.rules
|
.any(|rule_code| rule_code.lint_source().is_pyproject_toml())
|
||||||
.iter_enabled()
|
{
|
||||||
.any(|rule_code| rule_code.lint_source().is_pyproject_toml())
|
let contents = match std::fs::read_to_string(path).map_err(SourceError::from) {
|
||||||
{
|
Ok(contents) => contents,
|
||||||
let contents = match std::fs::read_to_string(path).map_err(SourceError::from) {
|
Err(err) => {
|
||||||
Ok(contents) => contents,
|
return Ok(Diagnostics::from_source_error(&err, Some(path), settings));
|
||||||
Err(err) => {
|
}
|
||||||
return Ok(Diagnostics::from_source_error(&err, Some(path), settings));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
let source_file =
|
|
||||||
SourceFileBuilder::new(path.to_string_lossy(), contents).finish();
|
|
||||||
lint_pyproject_toml(source_file, settings)
|
|
||||||
} else {
|
|
||||||
vec![]
|
|
||||||
};
|
};
|
||||||
return Ok(Diagnostics {
|
let source_file = SourceFileBuilder::new(path.to_string_lossy(), contents).finish();
|
||||||
messages,
|
lint_pyproject_toml(source_file, settings)
|
||||||
..Diagnostics::default()
|
} else {
|
||||||
});
|
vec![]
|
||||||
}
|
};
|
||||||
SourceType::Toml(_) => return Ok(Diagnostics::default()),
|
return Ok(Diagnostics {
|
||||||
SourceType::Python(source_type) => source_type,
|
messages,
|
||||||
},
|
..Diagnostics::default()
|
||||||
|
});
|
||||||
|
}
|
||||||
|
SourceType::Toml(_) => return Ok(Diagnostics::default()),
|
||||||
|
SourceType::Python(source_type) => source_type,
|
||||||
};
|
};
|
||||||
|
|
||||||
// Extract the sources from the file.
|
// Extract the sources from the file.
|
||||||
@@ -274,7 +263,6 @@ pub(crate) fn lint_path(
|
|||||||
data: (messages, imports),
|
data: (messages, imports),
|
||||||
error: parse_error,
|
error: parse_error,
|
||||||
},
|
},
|
||||||
transformed,
|
|
||||||
fixed,
|
fixed,
|
||||||
) = if matches!(fix_mode, flags::FixMode::Apply | flags::FixMode::Diff) {
|
) = if matches!(fix_mode, flags::FixMode::Apply | flags::FixMode::Diff) {
|
||||||
if let Ok(FixerResult {
|
if let Ok(FixerResult {
|
||||||
@@ -303,40 +291,17 @@ pub(crate) fn lint_path(
|
|||||||
flags::FixMode::Generate => {}
|
flags::FixMode::Generate => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let transformed = if let Cow::Owned(transformed) = transformed {
|
(result, fixed)
|
||||||
transformed
|
|
||||||
} else {
|
|
||||||
source_kind
|
|
||||||
};
|
|
||||||
(result, transformed, fixed)
|
|
||||||
} else {
|
} else {
|
||||||
// If we fail to fix, lint the original source code.
|
// If we fail to fix, lint the original source code.
|
||||||
let result = lint_only(
|
let result = lint_only(path, package, settings, noqa, &source_kind, source_type);
|
||||||
path,
|
|
||||||
package,
|
|
||||||
settings,
|
|
||||||
noqa,
|
|
||||||
&source_kind,
|
|
||||||
source_type,
|
|
||||||
ParseSource::None,
|
|
||||||
);
|
|
||||||
let transformed = source_kind;
|
|
||||||
let fixed = FxHashMap::default();
|
let fixed = FxHashMap::default();
|
||||||
(result, transformed, fixed)
|
(result, fixed)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
let result = lint_only(
|
let result = lint_only(path, package, settings, noqa, &source_kind, source_type);
|
||||||
path,
|
|
||||||
package,
|
|
||||||
settings,
|
|
||||||
noqa,
|
|
||||||
&source_kind,
|
|
||||||
source_type,
|
|
||||||
ParseSource::None,
|
|
||||||
);
|
|
||||||
let transformed = source_kind;
|
|
||||||
let fixed = FxHashMap::default();
|
let fixed = FxHashMap::default();
|
||||||
(result, transformed, fixed)
|
(result, fixed)
|
||||||
};
|
};
|
||||||
|
|
||||||
let imports = imports.unwrap_or_default();
|
let imports = imports.unwrap_or_default();
|
||||||
@@ -344,7 +309,7 @@ pub(crate) fn lint_path(
|
|||||||
if let Some((cache, relative_path, key)) = caching {
|
if let Some((cache, relative_path, key)) = caching {
|
||||||
// We don't cache parsing errors.
|
// We don't cache parsing errors.
|
||||||
if parse_error.is_none() {
|
if parse_error.is_none() {
|
||||||
// `FixMode::Apply` and `FixMode::Diff` rely on side-effects (writing to disk,
|
// `FixMode::Generate` and `FixMode::Diff` rely on side-effects (writing to disk,
|
||||||
// and writing the diff to stdout, respectively). If a file has diagnostics, we
|
// and writing the diff to stdout, respectively). If a file has diagnostics, we
|
||||||
// need to avoid reading from and writing to the cache in these modes.
|
// need to avoid reading from and writing to the cache in these modes.
|
||||||
if match fix_mode {
|
if match fix_mode {
|
||||||
@@ -359,21 +324,28 @@ pub(crate) fn lint_path(
|
|||||||
LintCacheData::from_messages(
|
LintCacheData::from_messages(
|
||||||
&messages,
|
&messages,
|
||||||
imports.clone(),
|
imports.clone(),
|
||||||
transformed.as_ipy_notebook().map(Notebook::index).cloned(),
|
source_kind.as_ipy_notebook().map(Notebook::index).cloned(),
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(error) = parse_error {
|
if let Some(err) = parse_error {
|
||||||
error!(
|
error!(
|
||||||
"{}",
|
"{}",
|
||||||
DisplayParseError::from_source_kind(error, Some(path.to_path_buf()), &transformed)
|
DisplayParseError::new(
|
||||||
|
err,
|
||||||
|
SourceCode::new(
|
||||||
|
source_kind.source_code(),
|
||||||
|
&LineIndex::from_source_text(source_kind.source_code())
|
||||||
|
),
|
||||||
|
&source_kind,
|
||||||
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
let notebook_indexes = if let SourceKind::IpyNotebook(notebook) = transformed {
|
let notebook_indexes = if let SourceKind::IpyNotebook(notebook) = source_kind {
|
||||||
FxHashMap::from_iter([(path.to_string_lossy().to_string(), notebook.into_index())])
|
FxHashMap::from_iter([(path.to_string_lossy().to_string(), notebook.into_index())])
|
||||||
} else {
|
} else {
|
||||||
FxHashMap::default()
|
FxHashMap::default()
|
||||||
@@ -398,15 +370,8 @@ pub(crate) fn lint_stdin(
|
|||||||
fix_mode: flags::FixMode,
|
fix_mode: flags::FixMode,
|
||||||
) -> Result<Diagnostics> {
|
) -> Result<Diagnostics> {
|
||||||
// TODO(charlie): Support `pyproject.toml`.
|
// TODO(charlie): Support `pyproject.toml`.
|
||||||
let source_type = if let Some(source_type) =
|
let SourceType::Python(source_type) = path.map(SourceType::from).unwrap_or_default() else {
|
||||||
override_source_type(path, &settings.linter.extension)
|
return Ok(Diagnostics::default());
|
||||||
{
|
|
||||||
source_type
|
|
||||||
} else {
|
|
||||||
let SourceType::Python(source_type) = path.map(SourceType::from).unwrap_or_default() else {
|
|
||||||
return Ok(Diagnostics::default());
|
|
||||||
};
|
|
||||||
source_type
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// Extract the sources from the file.
|
// Extract the sources from the file.
|
||||||
@@ -424,7 +389,6 @@ pub(crate) fn lint_stdin(
|
|||||||
data: (messages, imports),
|
data: (messages, imports),
|
||||||
error: parse_error,
|
error: parse_error,
|
||||||
},
|
},
|
||||||
transformed,
|
|
||||||
fixed,
|
fixed,
|
||||||
) = if matches!(fix_mode, flags::FixMode::Apply | flags::FixMode::Diff) {
|
) = if matches!(fix_mode, flags::FixMode::Apply | flags::FixMode::Diff) {
|
||||||
if let Ok(FixerResult {
|
if let Ok(FixerResult {
|
||||||
@@ -453,12 +417,8 @@ pub(crate) fn lint_stdin(
|
|||||||
}
|
}
|
||||||
flags::FixMode::Generate => {}
|
flags::FixMode::Generate => {}
|
||||||
}
|
}
|
||||||
let transformed = if let Cow::Owned(transformed) = transformed {
|
|
||||||
transformed
|
(result, fixed)
|
||||||
} else {
|
|
||||||
source_kind
|
|
||||||
};
|
|
||||||
(result, transformed, fixed)
|
|
||||||
} else {
|
} else {
|
||||||
// If we fail to fix, lint the original source code.
|
// If we fail to fix, lint the original source code.
|
||||||
let result = lint_only(
|
let result = lint_only(
|
||||||
@@ -468,17 +428,15 @@ pub(crate) fn lint_stdin(
|
|||||||
noqa,
|
noqa,
|
||||||
&source_kind,
|
&source_kind,
|
||||||
source_type,
|
source_type,
|
||||||
ParseSource::None,
|
|
||||||
);
|
);
|
||||||
|
let fixed = FxHashMap::default();
|
||||||
|
|
||||||
// Write the contents to stdout anyway.
|
// Write the contents to stdout anyway.
|
||||||
if fix_mode.is_apply() {
|
if fix_mode.is_apply() {
|
||||||
source_kind.write(&mut io::stdout().lock())?;
|
source_kind.write(&mut io::stdout().lock())?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let transformed = source_kind;
|
(result, fixed)
|
||||||
let fixed = FxHashMap::default();
|
|
||||||
(result, transformed, fixed)
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
let result = lint_only(
|
let result = lint_only(
|
||||||
@@ -488,23 +446,21 @@ pub(crate) fn lint_stdin(
|
|||||||
noqa,
|
noqa,
|
||||||
&source_kind,
|
&source_kind,
|
||||||
source_type,
|
source_type,
|
||||||
ParseSource::None,
|
|
||||||
);
|
);
|
||||||
let transformed = source_kind;
|
|
||||||
let fixed = FxHashMap::default();
|
let fixed = FxHashMap::default();
|
||||||
(result, transformed, fixed)
|
(result, fixed)
|
||||||
};
|
};
|
||||||
|
|
||||||
let imports = imports.unwrap_or_default();
|
let imports = imports.unwrap_or_default();
|
||||||
|
|
||||||
if let Some(error) = parse_error {
|
if let Some(err) = parse_error {
|
||||||
error!(
|
error!(
|
||||||
"{}",
|
"Failed to parse {}: {err}",
|
||||||
DisplayParseError::from_source_kind(error, path.map(Path::to_path_buf), &transformed)
|
path.map_or_else(|| "-".into(), fs::relativize_path).bold()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
let notebook_indexes = if let SourceKind::IpyNotebook(notebook) = transformed {
|
let notebook_indexes = if let SourceKind::IpyNotebook(notebook) = source_kind {
|
||||||
FxHashMap::from_iter([(
|
FxHashMap::from_iter([(
|
||||||
path.map_or_else(|| "-".into(), |path| path.to_string_lossy().to_string()),
|
path.map_or_else(|| "-".into(), |path| path.to_string_lossy().to_string()),
|
||||||
notebook.into_index(),
|
notebook.into_index(),
|
||||||
|
|||||||
@@ -101,19 +101,6 @@ fn is_stdin(files: &[PathBuf], stdin_filename: Option<&Path>) -> bool {
|
|||||||
file == Path::new("-")
|
file == Path::new("-")
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the default set of files if none are provided, otherwise returns `None`.
|
|
||||||
fn resolve_default_files(files: Vec<PathBuf>, is_stdin: bool) -> Vec<PathBuf> {
|
|
||||||
if files.is_empty() {
|
|
||||||
if is_stdin {
|
|
||||||
vec![Path::new("-").to_path_buf()]
|
|
||||||
} else {
|
|
||||||
vec![Path::new(".").to_path_buf()]
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
files
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the actual value of the `format` desired from either `output_format`
|
/// Get the actual value of the `format` desired from either `output_format`
|
||||||
/// or `format`, and warn the user if they're using the deprecated form.
|
/// or `format`, and warn the user if they're using the deprecated form.
|
||||||
fn resolve_help_output_format(output_format: HelpFormat, format: Option<HelpFormat>) -> HelpFormat {
|
fn resolve_help_output_format(output_format: HelpFormat, format: Option<HelpFormat>) -> HelpFormat {
|
||||||
@@ -209,7 +196,7 @@ fn format(args: FormatCommand, log_level: LogLevel) -> Result<ExitStatus> {
|
|||||||
if is_stdin(&cli.files, cli.stdin_filename.as_deref()) {
|
if is_stdin(&cli.files, cli.stdin_filename.as_deref()) {
|
||||||
commands::format_stdin::format_stdin(&cli, &overrides)
|
commands::format_stdin::format_stdin(&cli, &overrides)
|
||||||
} else {
|
} else {
|
||||||
commands::format::format(cli, &overrides, log_level)
|
commands::format::format(&cli, &overrides, log_level)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -235,15 +222,17 @@ pub fn check(args: CheckCommand, log_level: LogLevel) -> Result<ExitStatus> {
|
|||||||
};
|
};
|
||||||
let stderr_writer = Box::new(BufWriter::new(io::stderr()));
|
let stderr_writer = Box::new(BufWriter::new(io::stderr()));
|
||||||
|
|
||||||
let is_stdin = is_stdin(&cli.files, cli.stdin_filename.as_deref());
|
|
||||||
let files = resolve_default_files(cli.files, is_stdin);
|
|
||||||
|
|
||||||
if cli.show_settings {
|
if cli.show_settings {
|
||||||
commands::show_settings::show_settings(&files, &pyproject_config, &overrides, &mut writer)?;
|
commands::show_settings::show_settings(
|
||||||
|
&cli.files,
|
||||||
|
&pyproject_config,
|
||||||
|
&overrides,
|
||||||
|
&mut writer,
|
||||||
|
)?;
|
||||||
return Ok(ExitStatus::Success);
|
return Ok(ExitStatus::Success);
|
||||||
}
|
}
|
||||||
if cli.show_files {
|
if cli.show_files {
|
||||||
commands::show_files::show_files(&files, &pyproject_config, &overrides, &mut writer)?;
|
commands::show_files::show_files(&cli.files, &pyproject_config, &overrides, &mut writer)?;
|
||||||
return Ok(ExitStatus::Success);
|
return Ok(ExitStatus::Success);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -306,7 +295,8 @@ pub fn check(args: CheckCommand, log_level: LogLevel) -> Result<ExitStatus> {
|
|||||||
if !fix_mode.is_generate() {
|
if !fix_mode.is_generate() {
|
||||||
warn_user!("--fix is incompatible with --add-noqa.");
|
warn_user!("--fix is incompatible with --add-noqa.");
|
||||||
}
|
}
|
||||||
let modifications = commands::add_noqa::add_noqa(&files, &pyproject_config, &overrides)?;
|
let modifications =
|
||||||
|
commands::add_noqa::add_noqa(&cli.files, &pyproject_config, &overrides)?;
|
||||||
if modifications > 0 && log_level >= LogLevel::Default {
|
if modifications > 0 && log_level >= LogLevel::Default {
|
||||||
let s = if modifications == 1 { "" } else { "s" };
|
let s = if modifications == 1 { "" } else { "s" };
|
||||||
#[allow(clippy::print_stderr)]
|
#[allow(clippy::print_stderr)]
|
||||||
@@ -333,7 +323,7 @@ pub fn check(args: CheckCommand, log_level: LogLevel) -> Result<ExitStatus> {
|
|||||||
// Configure the file watcher.
|
// Configure the file watcher.
|
||||||
let (tx, rx) = channel();
|
let (tx, rx) = channel();
|
||||||
let mut watcher = recommended_watcher(tx)?;
|
let mut watcher = recommended_watcher(tx)?;
|
||||||
for file in &files {
|
for file in &cli.files {
|
||||||
watcher.watch(file, RecursiveMode::Recursive)?;
|
watcher.watch(file, RecursiveMode::Recursive)?;
|
||||||
}
|
}
|
||||||
if let Some(file) = pyproject_config.path.as_ref() {
|
if let Some(file) = pyproject_config.path.as_ref() {
|
||||||
@@ -345,7 +335,7 @@ pub fn check(args: CheckCommand, log_level: LogLevel) -> Result<ExitStatus> {
|
|||||||
printer.write_to_user("Starting linter in watch mode...\n");
|
printer.write_to_user("Starting linter in watch mode...\n");
|
||||||
|
|
||||||
let messages = commands::check::check(
|
let messages = commands::check::check(
|
||||||
&files,
|
&cli.files,
|
||||||
&pyproject_config,
|
&pyproject_config,
|
||||||
&overrides,
|
&overrides,
|
||||||
cache.into(),
|
cache.into(),
|
||||||
@@ -378,7 +368,7 @@ pub fn check(args: CheckCommand, log_level: LogLevel) -> Result<ExitStatus> {
|
|||||||
printer.write_to_user("File change detected...\n");
|
printer.write_to_user("File change detected...\n");
|
||||||
|
|
||||||
let messages = commands::check::check(
|
let messages = commands::check::check(
|
||||||
&files,
|
&cli.files,
|
||||||
&pyproject_config,
|
&pyproject_config,
|
||||||
&overrides,
|
&overrides,
|
||||||
cache.into(),
|
cache.into(),
|
||||||
@@ -392,6 +382,8 @@ pub fn check(args: CheckCommand, log_level: LogLevel) -> Result<ExitStatus> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
let is_stdin = is_stdin(&cli.files, cli.stdin_filename.as_deref());
|
||||||
|
|
||||||
// Generate lint violations.
|
// Generate lint violations.
|
||||||
let diagnostics = if is_stdin {
|
let diagnostics = if is_stdin {
|
||||||
commands::check_stdin::check_stdin(
|
commands::check_stdin::check_stdin(
|
||||||
@@ -403,7 +395,7 @@ pub fn check(args: CheckCommand, log_level: LogLevel) -> Result<ExitStatus> {
|
|||||||
)?
|
)?
|
||||||
} else {
|
} else {
|
||||||
commands::check::check(
|
commands::check::check(
|
||||||
&files,
|
&cli.files,
|
||||||
&pyproject_config,
|
&pyproject_config,
|
||||||
&overrides,
|
&overrides,
|
||||||
cache.into(),
|
cache.into(),
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ use ruff_linter::fs::relativize_path;
|
|||||||
use ruff_linter::logging::LogLevel;
|
use ruff_linter::logging::LogLevel;
|
||||||
use ruff_linter::message::{
|
use ruff_linter::message::{
|
||||||
AzureEmitter, Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter,
|
AzureEmitter, Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter,
|
||||||
JsonEmitter, JsonLinesEmitter, JunitEmitter, PylintEmitter, SarifEmitter, TextEmitter,
|
JsonEmitter, JsonLinesEmitter, JunitEmitter, PylintEmitter, TextEmitter,
|
||||||
};
|
};
|
||||||
use ruff_linter::notify_user;
|
use ruff_linter::notify_user;
|
||||||
use ruff_linter::registry::{AsRule, Rule};
|
use ruff_linter::registry::{AsRule, Rule};
|
||||||
@@ -125,7 +125,15 @@ impl Printer {
|
|||||||
if let Some(fixables) = fixables {
|
if let Some(fixables) = fixables {
|
||||||
let fix_prefix = format!("[{}]", "*".cyan());
|
let fix_prefix = format!("[{}]", "*".cyan());
|
||||||
|
|
||||||
if self.unsafe_fixes.is_hint() {
|
if self.unsafe_fixes.is_enabled() {
|
||||||
|
if fixables.applicable > 0 {
|
||||||
|
writeln!(
|
||||||
|
writer,
|
||||||
|
"{fix_prefix} {} fixable with the --fix option.",
|
||||||
|
fixables.applicable
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
if fixables.applicable > 0 && fixables.unapplicable_unsafe > 0 {
|
if fixables.applicable > 0 && fixables.unapplicable_unsafe > 0 {
|
||||||
let es = if fixables.unapplicable_unsafe == 1 {
|
let es = if fixables.unapplicable_unsafe == 1 {
|
||||||
""
|
""
|
||||||
@@ -155,14 +163,6 @@ impl Printer {
|
|||||||
fixables.unapplicable_unsafe
|
fixables.unapplicable_unsafe
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
if fixables.applicable > 0 {
|
|
||||||
writeln!(
|
|
||||||
writer,
|
|
||||||
"{fix_prefix} {} fixable with the --fix option.",
|
|
||||||
fixables.applicable
|
|
||||||
)?;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@@ -291,9 +291,6 @@ impl Printer {
|
|||||||
SerializationFormat::Azure => {
|
SerializationFormat::Azure => {
|
||||||
AzureEmitter.emit(writer, &diagnostics.messages, &context)?;
|
AzureEmitter.emit(writer, &diagnostics.messages, &context)?;
|
||||||
}
|
}
|
||||||
SerializationFormat::Sarif => {
|
|
||||||
SarifEmitter.emit(writer, &diagnostics.messages, &context)?;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
writer.flush()?;
|
writer.flush()?;
|
||||||
|
|||||||
@@ -43,7 +43,7 @@ pub fn resolve(
|
|||||||
{
|
{
|
||||||
let settings = resolve_root_settings(&pyproject, Relativity::Cwd, overrides)?;
|
let settings = resolve_root_settings(&pyproject, Relativity::Cwd, overrides)?;
|
||||||
debug!(
|
debug!(
|
||||||
"Using user-specified configuration file at: {}",
|
"Using user specified pyproject.toml at {}",
|
||||||
pyproject.display()
|
pyproject.display()
|
||||||
);
|
);
|
||||||
return Ok(PyprojectConfig::new(
|
return Ok(PyprojectConfig::new(
|
||||||
@@ -63,10 +63,7 @@ pub fn resolve(
|
|||||||
.as_ref()
|
.as_ref()
|
||||||
.unwrap_or(&path_dedot::CWD.as_path()),
|
.unwrap_or(&path_dedot::CWD.as_path()),
|
||||||
)? {
|
)? {
|
||||||
debug!(
|
debug!("Using pyproject.toml (parent) at {}", pyproject.display());
|
||||||
"Using configuration file (via parent) at: {}",
|
|
||||||
pyproject.display()
|
|
||||||
);
|
|
||||||
let settings = resolve_root_settings(&pyproject, Relativity::Parent, overrides)?;
|
let settings = resolve_root_settings(&pyproject, Relativity::Parent, overrides)?;
|
||||||
return Ok(PyprojectConfig::new(
|
return Ok(PyprojectConfig::new(
|
||||||
PyprojectDiscoveryStrategy::Hierarchical,
|
PyprojectDiscoveryStrategy::Hierarchical,
|
||||||
@@ -80,10 +77,7 @@ pub fn resolve(
|
|||||||
// end up the "closest" `pyproject.toml` file for every Python file later on, so
|
// end up the "closest" `pyproject.toml` file for every Python file later on, so
|
||||||
// these act as the "default" settings.)
|
// these act as the "default" settings.)
|
||||||
if let Some(pyproject) = pyproject::find_user_settings_toml() {
|
if let Some(pyproject) = pyproject::find_user_settings_toml() {
|
||||||
debug!(
|
debug!("Using pyproject.toml (cwd) at {}", pyproject.display());
|
||||||
"Using configuration file (via cwd) at: {}",
|
|
||||||
pyproject.display()
|
|
||||||
);
|
|
||||||
let settings = resolve_root_settings(&pyproject, Relativity::Cwd, overrides)?;
|
let settings = resolve_root_settings(&pyproject, Relativity::Cwd, overrides)?;
|
||||||
return Ok(PyprojectConfig::new(
|
return Ok(PyprojectConfig::new(
|
||||||
PyprojectDiscoveryStrategy::Hierarchical,
|
PyprojectDiscoveryStrategy::Hierarchical,
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
use std::io;
|
use std::io;
|
||||||
use std::io::{Read, Write};
|
use std::io::Read;
|
||||||
|
|
||||||
/// Read a string from `stdin`.
|
/// Read a string from `stdin`.
|
||||||
pub(crate) fn read_from_stdin() -> Result<String, io::Error> {
|
pub(crate) fn read_from_stdin() -> Result<String, io::Error> {
|
||||||
@@ -7,11 +7,3 @@ pub(crate) fn read_from_stdin() -> Result<String, io::Error> {
|
|||||||
io::stdin().lock().read_to_string(&mut buffer)?;
|
io::stdin().lock().read_to_string(&mut buffer)?;
|
||||||
Ok(buffer)
|
Ok(buffer)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Read bytes from `stdin` and write them to `stdout`.
|
|
||||||
pub(crate) fn parrot_stdin() -> Result<(), io::Error> {
|
|
||||||
let mut buffer = String::new();
|
|
||||||
io::stdin().lock().read_to_string(&mut buffer)?;
|
|
||||||
io::stdout().write_all(buffer.as_bytes())?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -43,53 +43,6 @@ if condition:
|
|||||||
"###);
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn default_files() -> Result<()> {
|
|
||||||
let tempdir = TempDir::new()?;
|
|
||||||
fs::write(
|
|
||||||
tempdir.path().join("foo.py"),
|
|
||||||
r#"
|
|
||||||
foo = "needs formatting"
|
|
||||||
"#,
|
|
||||||
)?;
|
|
||||||
fs::write(
|
|
||||||
tempdir.path().join("bar.py"),
|
|
||||||
r#"
|
|
||||||
bar = "needs formatting"
|
|
||||||
"#,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
|
||||||
.args(["format", "--isolated", "--no-cache", "--check"]).current_dir(tempdir.path()), @r###"
|
|
||||||
success: false
|
|
||||||
exit_code: 1
|
|
||||||
----- stdout -----
|
|
||||||
Would reformat: bar.py
|
|
||||||
Would reformat: foo.py
|
|
||||||
2 files would be reformatted
|
|
||||||
|
|
||||||
----- stderr -----
|
|
||||||
"###);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn format_warn_stdin_filename_with_files() {
|
|
||||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
|
||||||
.args(["format", "--isolated", "--stdin-filename", "foo.py"])
|
|
||||||
.arg("foo.py")
|
|
||||||
.pass_stdin("foo = 1"), @r###"
|
|
||||||
success: true
|
|
||||||
exit_code: 0
|
|
||||||
----- stdout -----
|
|
||||||
foo = 1
|
|
||||||
|
|
||||||
----- stderr -----
|
|
||||||
warning: Ignoring file foo.py in favor of standard input.
|
|
||||||
"###);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn format_options() -> Result<()> {
|
fn format_options() -> Result<()> {
|
||||||
let tempdir = TempDir::new()?;
|
let tempdir = TempDir::new()?;
|
||||||
@@ -139,99 +92,6 @@ if condition:
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn docstring_options() -> Result<()> {
|
|
||||||
let tempdir = TempDir::new()?;
|
|
||||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
|
||||||
fs::write(
|
|
||||||
&ruff_toml,
|
|
||||||
r#"
|
|
||||||
[format]
|
|
||||||
docstring-code-format = true
|
|
||||||
docstring-code-line-length = 20
|
|
||||||
"#,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
|
||||||
.args(["format", "--config"])
|
|
||||||
.arg(&ruff_toml)
|
|
||||||
.arg("-")
|
|
||||||
.pass_stdin(r#"
|
|
||||||
def f(x):
|
|
||||||
'''
|
|
||||||
Something about `f`. And an example:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
foo, bar, quux = this_is_a_long_line(lion, hippo, lemur, bear)
|
|
||||||
|
|
||||||
Another example:
|
|
||||||
|
|
||||||
```py
|
|
||||||
foo, bar, quux = this_is_a_long_line(lion, hippo, lemur, bear)
|
|
||||||
```
|
|
||||||
|
|
||||||
And another:
|
|
||||||
|
|
||||||
>>> foo, bar, quux = this_is_a_long_line(lion, hippo, lemur, bear)
|
|
||||||
'''
|
|
||||||
pass
|
|
||||||
"#), @r###"
|
|
||||||
success: true
|
|
||||||
exit_code: 0
|
|
||||||
----- stdout -----
|
|
||||||
def f(x):
|
|
||||||
"""
|
|
||||||
Something about `f`. And an example:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
(
|
|
||||||
foo,
|
|
||||||
bar,
|
|
||||||
quux,
|
|
||||||
) = this_is_a_long_line(
|
|
||||||
lion,
|
|
||||||
hippo,
|
|
||||||
lemur,
|
|
||||||
bear,
|
|
||||||
)
|
|
||||||
|
|
||||||
Another example:
|
|
||||||
|
|
||||||
```py
|
|
||||||
(
|
|
||||||
foo,
|
|
||||||
bar,
|
|
||||||
quux,
|
|
||||||
) = this_is_a_long_line(
|
|
||||||
lion,
|
|
||||||
hippo,
|
|
||||||
lemur,
|
|
||||||
bear,
|
|
||||||
)
|
|
||||||
```
|
|
||||||
|
|
||||||
And another:
|
|
||||||
|
|
||||||
>>> (
|
|
||||||
... foo,
|
|
||||||
... bar,
|
|
||||||
... quux,
|
|
||||||
... ) = this_is_a_long_line(
|
|
||||||
... lion,
|
|
||||||
... hippo,
|
|
||||||
... lemur,
|
|
||||||
... bear,
|
|
||||||
... )
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
----- stderr -----
|
|
||||||
"###);
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn mixed_line_endings() -> Result<()> {
|
fn mixed_line_endings() -> Result<()> {
|
||||||
let tempdir = TempDir::new()?;
|
let tempdir = TempDir::new()?;
|
||||||
@@ -255,7 +115,7 @@ fn mixed_line_endings() -> Result<()> {
|
|||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
2 files already formatted
|
2 files left unchanged
|
||||||
"###);
|
"###);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -328,86 +188,6 @@ OTHER = "OTHER"
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn syntax_error() -> Result<()> {
|
|
||||||
let tempdir = TempDir::new()?;
|
|
||||||
|
|
||||||
fs::write(
|
|
||||||
tempdir.path().join("main.py"),
|
|
||||||
r#"
|
|
||||||
from module import =
|
|
||||||
"#,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
|
||||||
.current_dir(tempdir.path())
|
|
||||||
.args(["format", "--no-cache", "--isolated", "--check"])
|
|
||||||
.arg("main.py"), @r###"
|
|
||||||
success: false
|
|
||||||
exit_code: 2
|
|
||||||
----- stdout -----
|
|
||||||
|
|
||||||
----- stderr -----
|
|
||||||
error: Failed to parse main.py:2:20: Unexpected token '='
|
|
||||||
"###);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn messages() -> Result<()> {
|
|
||||||
let tempdir = TempDir::new()?;
|
|
||||||
|
|
||||||
fs::write(
|
|
||||||
tempdir.path().join("main.py"),
|
|
||||||
r#"
|
|
||||||
from test import say_hy
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
say_hy("dear Ruff contributor")
|
|
||||||
"#,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
|
||||||
.current_dir(tempdir.path())
|
|
||||||
.args(["format", "--no-cache", "--isolated", "--check"])
|
|
||||||
.arg("main.py"), @r###"
|
|
||||||
success: false
|
|
||||||
exit_code: 1
|
|
||||||
----- stdout -----
|
|
||||||
Would reformat: main.py
|
|
||||||
1 file would be reformatted
|
|
||||||
|
|
||||||
----- stderr -----
|
|
||||||
"###);
|
|
||||||
|
|
||||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
|
||||||
.current_dir(tempdir.path())
|
|
||||||
.args(["format", "--no-cache", "--isolated"])
|
|
||||||
.arg("main.py"), @r###"
|
|
||||||
success: true
|
|
||||||
exit_code: 0
|
|
||||||
----- stdout -----
|
|
||||||
1 file reformatted
|
|
||||||
|
|
||||||
----- stderr -----
|
|
||||||
"###);
|
|
||||||
|
|
||||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
|
||||||
.current_dir(tempdir.path())
|
|
||||||
.args(["format", "--no-cache", "--isolated"])
|
|
||||||
.arg("main.py"), @r###"
|
|
||||||
success: true
|
|
||||||
exit_code: 0
|
|
||||||
----- stdout -----
|
|
||||||
1 file left unchanged
|
|
||||||
|
|
||||||
----- stderr -----
|
|
||||||
"###);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn force_exclude() -> Result<()> {
|
fn force_exclude() -> Result<()> {
|
||||||
let tempdir = TempDir::new()?;
|
let tempdir = TempDir::new()?;
|
||||||
@@ -540,11 +320,6 @@ if __name__ == '__main__':
|
|||||||
exit_code: 0
|
exit_code: 0
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
|
||||||
from test import say_hy
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
say_hy("dear Ruff contributor")
|
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
"###);
|
"###);
|
||||||
Ok(())
|
Ok(())
|
||||||
@@ -615,9 +390,9 @@ fn deprecated_options() -> Result<()> {
|
|||||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||||
fs::write(
|
fs::write(
|
||||||
&ruff_toml,
|
&ruff_toml,
|
||||||
r"
|
r#"
|
||||||
tab-size = 2
|
tab-size = 2
|
||||||
",
|
"#,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
insta::with_settings!({filters => vec![
|
insta::with_settings!({filters => vec![
|
||||||
@@ -627,10 +402,10 @@ tab-size = 2
|
|||||||
.args(["format", "--config"])
|
.args(["format", "--config"])
|
||||||
.arg(&ruff_toml)
|
.arg(&ruff_toml)
|
||||||
.arg("-")
|
.arg("-")
|
||||||
.pass_stdin(r"
|
.pass_stdin(r#"
|
||||||
if True:
|
if True:
|
||||||
pass
|
pass
|
||||||
"), @r###"
|
"#), @r###"
|
||||||
success: true
|
success: true
|
||||||
exit_code: 0
|
exit_code: 0
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
@@ -663,17 +438,16 @@ format = "json"
|
|||||||
.args(["check", "--select", "F401", "--no-cache", "--config"])
|
.args(["check", "--select", "F401", "--no-cache", "--config"])
|
||||||
.arg(&ruff_toml)
|
.arg(&ruff_toml)
|
||||||
.arg("-")
|
.arg("-")
|
||||||
.pass_stdin(r"
|
.pass_stdin(r#"
|
||||||
import os
|
import os
|
||||||
"), @r###"
|
"#), @r###"
|
||||||
success: false
|
success: false
|
||||||
exit_code: 2
|
exit_code: 2
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
ruff failed
|
ruff failed
|
||||||
Cause: Failed to parse [RUFF-TOML-PATH]
|
Cause: Failed to parse `[RUFF-TOML-PATH]`: TOML parse error at line 2, column 10
|
||||||
Cause: TOML parse error at line 2, column 10
|
|
||||||
|
|
|
|
||||||
2 | format = "json"
|
2 | format = "json"
|
||||||
| ^^^^^^
|
| ^^^^^^
|
||||||
@@ -957,7 +731,7 @@ fn test_diff() {
|
|||||||
|
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
2 files would be reformatted, 1 file already formatted
|
2 files would be reformatted, 1 file left unchanged
|
||||||
"###);
|
"###);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -1039,432 +813,3 @@ fn test_diff_stdin_formatted() {
|
|||||||
----- stderr -----
|
----- stderr -----
|
||||||
"###);
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_notebook_trailing_semicolon() {
|
|
||||||
let fixtures = Path::new("resources").join("test").join("fixtures");
|
|
||||||
let unformatted = fs::read(fixtures.join("trailing_semicolon.ipynb")).unwrap();
|
|
||||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
|
||||||
.args(["format", "--isolated", "--stdin-filename", "test.ipynb"])
|
|
||||||
.arg("-")
|
|
||||||
.pass_stdin(unformatted), @r###"
|
|
||||||
success: true
|
|
||||||
exit_code: 0
|
|
||||||
----- stdout -----
|
|
||||||
{
|
|
||||||
"cells": [
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 1,
|
|
||||||
"id": "4f8ce941-1492-4d4e-8ab5-70d733fe891a",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"%config ZMQInteractiveShell.ast_node_interactivity=\"last_expr_or_assign\""
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 2,
|
|
||||||
"id": "721ec705-0c65-4bfb-9809-7ed8bc534186",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [
|
|
||||||
{
|
|
||||||
"data": {
|
|
||||||
"text/plain": [
|
|
||||||
"1"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"execution_count": 2,
|
|
||||||
"metadata": {},
|
|
||||||
"output_type": "execute_result"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"source": [
|
|
||||||
"# Assignment statement without a semicolon\n",
|
|
||||||
"x = 1"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 3,
|
|
||||||
"id": "de50e495-17e5-41cc-94bd-565757555d7e",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"# Assignment statement with a semicolon\n",
|
|
||||||
"x = 1\n",
|
|
||||||
"x = 1;"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 4,
|
|
||||||
"id": "39e31201-23da-44eb-8684-41bba3663991",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [
|
|
||||||
{
|
|
||||||
"data": {
|
|
||||||
"text/plain": [
|
|
||||||
"2"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"execution_count": 4,
|
|
||||||
"metadata": {},
|
|
||||||
"output_type": "execute_result"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"source": [
|
|
||||||
"# Augmented assignment without a semicolon\n",
|
|
||||||
"x += 1"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 5,
|
|
||||||
"id": "6b73d3dd-c73a-4697-9e97-e109a6c1fbab",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"# Augmented assignment without a semicolon\n",
|
|
||||||
"x += 1\n",
|
|
||||||
"x += 1; # comment\n",
|
|
||||||
"# comment"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 6,
|
|
||||||
"id": "2a3e5b86-aa5b-46ba-b9c6-0386d876f58c",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"# Multiple assignment without a semicolon\n",
|
|
||||||
"x = y = 1"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 7,
|
|
||||||
"id": "07f89e51-9357-4cfb-8fc5-76fb75e35949",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"# Multiple assignment with a semicolon\n",
|
|
||||||
"x = y = 1\n",
|
|
||||||
"x = y = 1"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 8,
|
|
||||||
"id": "c22b539d-473e-48f8-a236-625e58c47a00",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"# Tuple unpacking without a semicolon\n",
|
|
||||||
"x, y = 1, 2"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 9,
|
|
||||||
"id": "12c87940-a0d5-403b-a81c-7507eb06dc7e",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"# Tuple unpacking with a semicolon (irrelevant)\n",
|
|
||||||
"x, y = 1, 2\n",
|
|
||||||
"x, y = 1, 2 # comment\n",
|
|
||||||
"# comment"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 10,
|
|
||||||
"id": "5a768c76-6bc4-470c-b37e-8cc14bc6caf4",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [
|
|
||||||
{
|
|
||||||
"data": {
|
|
||||||
"text/plain": [
|
|
||||||
"1"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"execution_count": 10,
|
|
||||||
"metadata": {},
|
|
||||||
"output_type": "execute_result"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"source": [
|
|
||||||
"# Annotated assignment statement without a semicolon\n",
|
|
||||||
"x: int = 1"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 11,
|
|
||||||
"id": "21bfda82-1a9a-4ba1-9078-74ac480804b5",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"# Annotated assignment statement without a semicolon\n",
|
|
||||||
"x: int = 1\n",
|
|
||||||
"x: int = 1; # comment\n",
|
|
||||||
"# comment"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 12,
|
|
||||||
"id": "09929999-ff29-4d10-ad2b-e665af15812d",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [
|
|
||||||
{
|
|
||||||
"data": {
|
|
||||||
"text/plain": [
|
|
||||||
"1"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"execution_count": 12,
|
|
||||||
"metadata": {},
|
|
||||||
"output_type": "execute_result"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"source": [
|
|
||||||
"# Assignment expression without a semicolon\n",
|
|
||||||
"(x := 1)"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 13,
|
|
||||||
"id": "32a83217-1bad-4f61-855e-ffcdb119c763",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"# Assignment expression with a semicolon\n",
|
|
||||||
"(x := 1)\n",
|
|
||||||
"(x := 1); # comment\n",
|
|
||||||
"# comment"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 14,
|
|
||||||
"id": "61b81865-277e-4964-b03e-eb78f1f318eb",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [
|
|
||||||
{
|
|
||||||
"data": {
|
|
||||||
"text/plain": [
|
|
||||||
"1"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"execution_count": 14,
|
|
||||||
"metadata": {},
|
|
||||||
"output_type": "execute_result"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"source": [
|
|
||||||
"x = 1\n",
|
|
||||||
"# Expression without a semicolon\n",
|
|
||||||
"x"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 15,
|
|
||||||
"id": "974c29be-67e1-4000-95fa-6ca118a63bad",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"x = 1\n",
|
|
||||||
"# Expression with a semicolon\n",
|
|
||||||
"x;"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 16,
|
|
||||||
"id": "cfeb1757-46d6-4f13-969f-a283b6d0304f",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"class Point:\n",
|
|
||||||
" def __init__(self, x, y):\n",
|
|
||||||
" self.x = x\n",
|
|
||||||
" self.y = y\n",
|
|
||||||
"\n",
|
|
||||||
"\n",
|
|
||||||
"p = Point(0, 0);"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 17,
|
|
||||||
"id": "2ee7f1a5-ccfe-4004-bfa4-ef834a58da97",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"# Assignment statement where the left is an attribute access doesn't\n",
|
|
||||||
"# print the value.\n",
|
|
||||||
"p.x = 1"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 18,
|
|
||||||
"id": "3e49370a-048b-474d-aa0a-3d1d4a73ad37",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"data = {}\n",
|
|
||||||
"\n",
|
|
||||||
"# Neither does the subscript node\n",
|
|
||||||
"data[\"foo\"] = 1"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 19,
|
|
||||||
"id": "d594bdd3-eaa9-41ef-8cda-cf01bc273b2d",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"if x := 1:\n",
|
|
||||||
" # It should be the top level statement\n",
|
|
||||||
" x"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 20,
|
|
||||||
"id": "e532f0cf-80c7-42b7-8226-6002fcf74fb6",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [
|
|
||||||
{
|
|
||||||
"data": {
|
|
||||||
"text/plain": [
|
|
||||||
"1"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"execution_count": 20,
|
|
||||||
"metadata": {},
|
|
||||||
"output_type": "execute_result"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"source": [
|
|
||||||
"# Parentheses with comments\n",
|
|
||||||
"(\n",
|
|
||||||
" x := 1 # comment\n",
|
|
||||||
") # comment"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 21,
|
|
||||||
"id": "473c5d62-871b-46ed-8a34-27095243f462",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"# Parentheses with comments\n",
|
|
||||||
"(\n",
|
|
||||||
" x := 1 # comment\n",
|
|
||||||
"); # comment"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 22,
|
|
||||||
"id": "8c3c2361-f49f-45fe-bbe3-7e27410a8a86",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [
|
|
||||||
{
|
|
||||||
"data": {
|
|
||||||
"text/plain": [
|
|
||||||
"'Hello world!'"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"execution_count": 22,
|
|
||||||
"metadata": {},
|
|
||||||
"output_type": "execute_result"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"source": [
|
|
||||||
"\"\"\"Hello world!\"\"\""
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 23,
|
|
||||||
"id": "23dbe9b5-3f68-4890-ab2d-ab0dbfd0712a",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"\"\"\"Hello world!\"\"\"; # comment\n",
|
|
||||||
"# comment"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 24,
|
|
||||||
"id": "3ce33108-d95d-4c70-83d1-0d4fd36a2951",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [
|
|
||||||
{
|
|
||||||
"data": {
|
|
||||||
"text/plain": [
|
|
||||||
"'x = 1'"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"execution_count": 24,
|
|
||||||
"metadata": {},
|
|
||||||
"output_type": "execute_result"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"source": [
|
|
||||||
"x = 1\n",
|
|
||||||
"f\"x = {x}\""
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 25,
|
|
||||||
"id": "654a4a67-de43-4684-824a-9451c67db48f",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"x = 1\n",
|
|
||||||
"f\"x = {x}\"\n",
|
|
||||||
"f\"x = {x}\"; # comment\n",
|
|
||||||
"# comment"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"metadata": {
|
|
||||||
"kernelspec": {
|
|
||||||
"display_name": "Python (ruff-playground)",
|
|
||||||
"language": "python",
|
|
||||||
"name": "ruff-playground"
|
|
||||||
},
|
|
||||||
"language_info": {
|
|
||||||
"codemirror_mode": {
|
|
||||||
"name": "ipython",
|
|
||||||
"version": 3
|
|
||||||
},
|
|
||||||
"file_extension": ".py",
|
|
||||||
"mimetype": "text/x-python",
|
|
||||||
"name": "python",
|
|
||||||
"nbconvert_exporter": "python",
|
|
||||||
"pygments_lexer": "ipython3",
|
|
||||||
"version": "3.11.3"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"nbformat": 4,
|
|
||||||
"nbformat_minor": 5
|
|
||||||
}
|
|
||||||
|
|
||||||
----- stderr -----
|
|
||||||
"###);
|
|
||||||
}
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -396,43 +396,3 @@ if __name__ == "__main__":
|
|||||||
"###);
|
"###);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Regression test for [#8858](https://github.com/astral-sh/ruff/issues/8858)
|
|
||||||
#[test]
|
|
||||||
fn parent_configuration_override() -> Result<()> {
|
|
||||||
let tempdir = TempDir::new()?;
|
|
||||||
let root_ruff = tempdir.path().join("ruff.toml");
|
|
||||||
fs::write(
|
|
||||||
root_ruff,
|
|
||||||
r#"
|
|
||||||
[lint]
|
|
||||||
select = ["ALL"]
|
|
||||||
"#,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let sub_dir = tempdir.path().join("subdirectory");
|
|
||||||
fs::create_dir(&sub_dir)?;
|
|
||||||
|
|
||||||
let subdirectory_ruff = sub_dir.join("ruff.toml");
|
|
||||||
fs::write(
|
|
||||||
subdirectory_ruff,
|
|
||||||
r#"
|
|
||||||
[lint]
|
|
||||||
ignore = ["D203", "D212"]
|
|
||||||
"#,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
|
||||||
.current_dir(sub_dir)
|
|
||||||
.arg("check")
|
|
||||||
.args(STDIN_BASE_OPTIONS)
|
|
||||||
, @r###"
|
|
||||||
success: true
|
|
||||||
exit_code: 0
|
|
||||||
----- stdout -----
|
|
||||||
|
|
||||||
----- stderr -----
|
|
||||||
warning: No Python files found under the given path(s)
|
|
||||||
"###);
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,101 +0,0 @@
|
|||||||
#![cfg(not(target_family = "wasm"))]
|
|
||||||
|
|
||||||
use std::path::Path;
|
|
||||||
use std::process::Command;
|
|
||||||
use std::str;
|
|
||||||
|
|
||||||
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
|
|
||||||
const BIN_NAME: &str = "ruff";
|
|
||||||
|
|
||||||
#[cfg(not(target_os = "windows"))]
|
|
||||||
const TEST_FILTERS: &[(&str, &str)] = &[(".*/resources/test/fixtures/", "[BASEPATH]/")];
|
|
||||||
#[cfg(target_os = "windows")]
|
|
||||||
const TEST_FILTERS: &[(&str, &str)] = &[
|
|
||||||
(r".*\\resources\\test\\fixtures\\", "[BASEPATH]\\"),
|
|
||||||
(r"\\", "/"),
|
|
||||||
];
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn check_project_include_defaults() {
|
|
||||||
// Defaults to checking the current working directory
|
|
||||||
//
|
|
||||||
// The test directory includes:
|
|
||||||
// - A pyproject.toml which specifies an include
|
|
||||||
// - A nested pyproject.toml which has a Ruff section
|
|
||||||
//
|
|
||||||
// The nested project should all be checked instead of respecting the parent includes
|
|
||||||
|
|
||||||
insta::with_settings!({
|
|
||||||
filters => TEST_FILTERS.to_vec()
|
|
||||||
}, {
|
|
||||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
|
||||||
.args(["check", "--show-files"]).current_dir(Path::new("./resources/test/fixtures/include-test")), @r###"
|
|
||||||
success: true
|
|
||||||
exit_code: 0
|
|
||||||
----- stdout -----
|
|
||||||
[BASEPATH]/include-test/a.py
|
|
||||||
[BASEPATH]/include-test/nested-project/e.py
|
|
||||||
[BASEPATH]/include-test/nested-project/pyproject.toml
|
|
||||||
[BASEPATH]/include-test/subdirectory/c.py
|
|
||||||
|
|
||||||
----- stderr -----
|
|
||||||
"###);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn check_project_respects_direct_paths() {
|
|
||||||
// Given a direct path not included in the project `includes`, it should be checked
|
|
||||||
|
|
||||||
insta::with_settings!({
|
|
||||||
filters => TEST_FILTERS.to_vec()
|
|
||||||
}, {
|
|
||||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
|
||||||
.args(["check", "--show-files", "b.py"]).current_dir(Path::new("./resources/test/fixtures/include-test")), @r###"
|
|
||||||
success: true
|
|
||||||
exit_code: 0
|
|
||||||
----- stdout -----
|
|
||||||
[BASEPATH]/include-test/b.py
|
|
||||||
|
|
||||||
----- stderr -----
|
|
||||||
"###);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn check_project_respects_subdirectory_includes() {
|
|
||||||
// Given a direct path to a subdirectory, the include should be respected
|
|
||||||
|
|
||||||
insta::with_settings!({
|
|
||||||
filters => TEST_FILTERS.to_vec()
|
|
||||||
}, {
|
|
||||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
|
||||||
.args(["check", "--show-files", "subdirectory"]).current_dir(Path::new("./resources/test/fixtures/include-test")), @r###"
|
|
||||||
success: true
|
|
||||||
exit_code: 0
|
|
||||||
----- stdout -----
|
|
||||||
[BASEPATH]/include-test/subdirectory/c.py
|
|
||||||
|
|
||||||
----- stderr -----
|
|
||||||
"###);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn check_project_from_project_subdirectory_respects_includes() {
|
|
||||||
// Run from a project subdirectory, the include specified in the parent directory should be respected
|
|
||||||
|
|
||||||
insta::with_settings!({
|
|
||||||
filters => TEST_FILTERS.to_vec()
|
|
||||||
}, {
|
|
||||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
|
||||||
.args(["check", "--show-files"]).current_dir(Path::new("./resources/test/fixtures/include-test/subdirectory")), @r###"
|
|
||||||
success: true
|
|
||||||
exit_code: 0
|
|
||||||
----- stdout -----
|
|
||||||
[BASEPATH]/include-test/subdirectory/c.py
|
|
||||||
|
|
||||||
----- stderr -----
|
|
||||||
"###);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
@@ -19,6 +19,7 @@ ruff_python_ast = { path = "../ruff_python_ast" }
|
|||||||
ruff_python_codegen = { path = "../ruff_python_codegen" }
|
ruff_python_codegen = { path = "../ruff_python_codegen" }
|
||||||
ruff_python_formatter = { path = "../ruff_python_formatter" }
|
ruff_python_formatter = { path = "../ruff_python_formatter" }
|
||||||
ruff_notebook = { path = "../ruff_notebook" }
|
ruff_notebook = { path = "../ruff_notebook" }
|
||||||
|
ruff_python_literal = { path = "../ruff_python_literal" }
|
||||||
ruff_python_parser = { path = "../ruff_python_parser" }
|
ruff_python_parser = { path = "../ruff_python_parser" }
|
||||||
ruff_python_stdlib = { path = "../ruff_python_stdlib" }
|
ruff_python_stdlib = { path = "../ruff_python_stdlib" }
|
||||||
ruff_python_trivia = { path = "../ruff_python_trivia" }
|
ruff_python_trivia = { path = "../ruff_python_trivia" }
|
||||||
@@ -27,31 +28,29 @@ ruff_workspace = { path = "../ruff_workspace", features = ["schemars"]}
|
|||||||
anyhow = { workspace = true }
|
anyhow = { workspace = true }
|
||||||
clap = { workspace = true }
|
clap = { workspace = true }
|
||||||
ignore = { workspace = true }
|
ignore = { workspace = true }
|
||||||
imara-diff = { workspace = true }
|
indicatif = "0.17.7"
|
||||||
indicatif = { workspace = true }
|
|
||||||
itertools = { workspace = true }
|
itertools = { workspace = true }
|
||||||
libcst = { workspace = true }
|
libcst = { workspace = true }
|
||||||
once_cell = { workspace = true }
|
once_cell = { workspace = true }
|
||||||
pretty_assertions = { workspace = true }
|
pretty_assertions = { version = "1.3.0" }
|
||||||
rayon = { workspace = true }
|
rayon = "1.8.0"
|
||||||
regex = { workspace = true }
|
regex = { workspace = true }
|
||||||
schemars = { workspace = true }
|
schemars = { workspace = true }
|
||||||
serde = { workspace = true, features = ["derive"] }
|
serde = { workspace = true, features = ["derive"] }
|
||||||
serde_json = { workspace = true }
|
serde_json = { workspace = true }
|
||||||
similar = { workspace = true }
|
similar = { workspace = true }
|
||||||
strum = { workspace = true }
|
strum = { workspace = true }
|
||||||
tempfile = { workspace = true }
|
strum_macros = { workspace = true }
|
||||||
|
tempfile = "3.8.1"
|
||||||
toml = { workspace = true, features = ["parse"] }
|
toml = { workspace = true, features = ["parse"] }
|
||||||
tracing = { workspace = true }
|
tracing = { workspace = true }
|
||||||
tracing-indicatif = { workspace = true }
|
tracing-indicatif = { workspace = true }
|
||||||
tracing-subscriber = { workspace = true, features = ["env-filter"] }
|
tracing-subscriber = { workspace = true, features = ["env-filter"] }
|
||||||
|
imara-diff = "0.1.5"
|
||||||
[dev-dependencies]
|
|
||||||
indoc = { workspace = true }
|
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
# Turn off rayon for profiling
|
# Turn off rayon for profiling
|
||||||
singlethreaded = []
|
singlethreaded = []
|
||||||
|
|
||||||
[lints]
|
[dev-dependencies]
|
||||||
workspace = true
|
indoc = "2.0.4"
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ use tracing_subscriber::layer::SubscriberExt;
|
|||||||
use tracing_subscriber::util::SubscriberInitExt;
|
use tracing_subscriber::util::SubscriberInitExt;
|
||||||
use tracing_subscriber::EnvFilter;
|
use tracing_subscriber::EnvFilter;
|
||||||
|
|
||||||
use ruff_cli::args::{CliOverrides, FormatArguments, FormatCommand, LogLevelArgs};
|
use ruff_cli::args::{FormatCommand, LogLevelArgs};
|
||||||
use ruff_cli::resolve::resolve;
|
use ruff_cli::resolve::resolve;
|
||||||
use ruff_formatter::{FormatError, LineWidth, PrintError};
|
use ruff_formatter::{FormatError, LineWidth, PrintError};
|
||||||
use ruff_linter::logging::LogLevel;
|
use ruff_linter::logging::LogLevel;
|
||||||
@@ -35,27 +35,26 @@ use ruff_linter::settings::types::{FilePattern, FilePatternSet};
|
|||||||
use ruff_python_formatter::{
|
use ruff_python_formatter::{
|
||||||
format_module_source, FormatModuleError, MagicTrailingComma, PreviewMode, PyFormatOptions,
|
format_module_source, FormatModuleError, MagicTrailingComma, PreviewMode, PyFormatOptions,
|
||||||
};
|
};
|
||||||
use ruff_python_parser::ParseError;
|
|
||||||
use ruff_workspace::resolver::{python_files_in_path, PyprojectConfig, ResolvedFile, Resolver};
|
use ruff_workspace::resolver::{python_files_in_path, PyprojectConfig, ResolvedFile, Resolver};
|
||||||
|
|
||||||
fn parse_cli(dirs: &[PathBuf]) -> anyhow::Result<(FormatArguments, CliOverrides)> {
|
/// Find files that ruff would check so we can format them. Adapted from `ruff_cli`.
|
||||||
|
#[allow(clippy::type_complexity)]
|
||||||
|
fn ruff_check_paths(
|
||||||
|
dirs: &[PathBuf],
|
||||||
|
) -> anyhow::Result<(
|
||||||
|
Vec<Result<ResolvedFile, ignore::Error>>,
|
||||||
|
Resolver,
|
||||||
|
PyprojectConfig,
|
||||||
|
)> {
|
||||||
let args_matches = FormatCommand::command()
|
let args_matches = FormatCommand::command()
|
||||||
.no_binary_name(true)
|
.no_binary_name(true)
|
||||||
.get_matches_from(dirs);
|
.get_matches_from(dirs);
|
||||||
let arguments: FormatCommand = FormatCommand::from_arg_matches(&args_matches)?;
|
let arguments: FormatCommand = FormatCommand::from_arg_matches(&args_matches)?;
|
||||||
let (cli, overrides) = arguments.partition();
|
let (cli, overrides) = arguments.partition();
|
||||||
Ok((cli, overrides))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Find the [`PyprojectConfig`] to use for formatting.
|
|
||||||
fn find_pyproject_config(
|
|
||||||
cli: &FormatArguments,
|
|
||||||
overrides: &CliOverrides,
|
|
||||||
) -> anyhow::Result<PyprojectConfig> {
|
|
||||||
let mut pyproject_config = resolve(
|
let mut pyproject_config = resolve(
|
||||||
cli.isolated,
|
cli.isolated,
|
||||||
cli.config.as_deref(),
|
cli.config.as_deref(),
|
||||||
overrides,
|
&overrides,
|
||||||
cli.stdin_filename.as_deref(),
|
cli.stdin_filename.as_deref(),
|
||||||
)?;
|
)?;
|
||||||
// We don't want to format pyproject.toml
|
// We don't want to format pyproject.toml
|
||||||
@@ -64,18 +63,11 @@ fn find_pyproject_config(
|
|||||||
FilePattern::Builtin("*.pyi"),
|
FilePattern::Builtin("*.pyi"),
|
||||||
])
|
])
|
||||||
.unwrap();
|
.unwrap();
|
||||||
Ok(pyproject_config)
|
let (paths, resolver) = python_files_in_path(&cli.files, &pyproject_config, &overrides)?;
|
||||||
}
|
if paths.is_empty() {
|
||||||
|
bail!("no python files in {:?}", dirs)
|
||||||
/// Find files that ruff would check so we can format them. Adapted from `ruff_cli`.
|
}
|
||||||
#[allow(clippy::type_complexity)]
|
Ok((paths, resolver, pyproject_config))
|
||||||
fn ruff_check_paths<'a>(
|
|
||||||
pyproject_config: &'a PyprojectConfig,
|
|
||||||
cli: &FormatArguments,
|
|
||||||
overrides: &CliOverrides,
|
|
||||||
) -> anyhow::Result<(Vec<Result<ResolvedFile, ignore::Error>>, Resolver<'a>)> {
|
|
||||||
let (paths, resolver) = python_files_in_path(&cli.files, pyproject_config, overrides)?;
|
|
||||||
Ok((paths, resolver))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Collects statistics over the formatted files to compute the Jaccard index or the similarity
|
/// Collects statistics over the formatted files to compute the Jaccard index or the similarity
|
||||||
@@ -223,7 +215,6 @@ pub(crate) struct Args {
|
|||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
pub(crate) files_with_errors: Option<u32>,
|
pub(crate) files_with_errors: Option<u32>,
|
||||||
#[clap(flatten)]
|
#[clap(flatten)]
|
||||||
#[allow(clippy::struct_field_names)]
|
|
||||||
pub(crate) log_level_args: LogLevelArgs,
|
pub(crate) log_level_args: LogLevelArgs,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -459,17 +450,11 @@ fn format_dev_project(
|
|||||||
files[0].display()
|
files[0].display()
|
||||||
);
|
);
|
||||||
|
|
||||||
// TODO(konstin): Respect black's excludes.
|
// TODO(konstin): black excludes
|
||||||
|
|
||||||
// Find files to check (or in this case, format twice). Adapted from ruff_cli
|
// Find files to check (or in this case, format twice). Adapted from ruff_cli
|
||||||
// First argument is ignored
|
// First argument is ignored
|
||||||
let (cli, overrides) = parse_cli(files)?;
|
let (paths, resolver, pyproject_config) = ruff_check_paths(files)?;
|
||||||
let pyproject_config = find_pyproject_config(&cli, &overrides)?;
|
|
||||||
let (paths, resolver) = ruff_check_paths(&pyproject_config, &cli, &overrides)?;
|
|
||||||
|
|
||||||
if paths.is_empty() {
|
|
||||||
bail!("No Python files found under the given path(s)");
|
|
||||||
}
|
|
||||||
|
|
||||||
let results = {
|
let results = {
|
||||||
let pb_span =
|
let pb_span =
|
||||||
@@ -482,7 +467,14 @@ fn format_dev_project(
|
|||||||
#[cfg(feature = "singlethreaded")]
|
#[cfg(feature = "singlethreaded")]
|
||||||
let iter = { paths.into_iter() };
|
let iter = { paths.into_iter() };
|
||||||
iter.map(|path| {
|
iter.map(|path| {
|
||||||
let result = format_dir_entry(path, stability_check, write, &black_options, &resolver);
|
let result = format_dir_entry(
|
||||||
|
path,
|
||||||
|
stability_check,
|
||||||
|
write,
|
||||||
|
&black_options,
|
||||||
|
&resolver,
|
||||||
|
&pyproject_config,
|
||||||
|
);
|
||||||
pb_span.pb_inc(1);
|
pb_span.pb_inc(1);
|
||||||
result
|
result
|
||||||
})
|
})
|
||||||
@@ -532,13 +524,14 @@ fn format_dev_project(
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Error handling in between walkdir and `format_dev_file`.
|
/// Error handling in between walkdir and `format_dev_file`
|
||||||
fn format_dir_entry(
|
fn format_dir_entry(
|
||||||
resolved_file: Result<ResolvedFile, ignore::Error>,
|
resolved_file: Result<ResolvedFile, ignore::Error>,
|
||||||
stability_check: bool,
|
stability_check: bool,
|
||||||
write: bool,
|
write: bool,
|
||||||
options: &BlackOptions,
|
options: &BlackOptions,
|
||||||
resolver: &Resolver,
|
resolver: &Resolver,
|
||||||
|
pyproject_config: &PyprojectConfig,
|
||||||
) -> anyhow::Result<(Result<Statistics, CheckFileError>, PathBuf), Error> {
|
) -> anyhow::Result<(Result<Statistics, CheckFileError>, PathBuf), Error> {
|
||||||
let resolved_file = resolved_file.context("Iterating the files in the repository failed")?;
|
let resolved_file = resolved_file.context("Iterating the files in the repository failed")?;
|
||||||
// For some reason it does not filter in the beginning
|
// For some reason it does not filter in the beginning
|
||||||
@@ -549,7 +542,7 @@ fn format_dir_entry(
|
|||||||
let path = resolved_file.into_path();
|
let path = resolved_file.into_path();
|
||||||
let mut options = options.to_py_format_options(&path);
|
let mut options = options.to_py_format_options(&path);
|
||||||
|
|
||||||
let settings = resolver.resolve(&path);
|
let settings = resolver.resolve(&path, pyproject_config);
|
||||||
// That's a bad way of doing this but it's not worth doing something better for format_dev
|
// That's a bad way of doing this but it's not worth doing something better for format_dev
|
||||||
if settings.formatter.line_width != LineWidth::default() {
|
if settings.formatter.line_width != LineWidth::default() {
|
||||||
options = options.with_line_width(settings.formatter.line_width);
|
options = options.with_line_width(settings.formatter.line_width);
|
||||||
@@ -749,11 +742,11 @@ enum CheckFileError {
|
|||||||
reformatted: String,
|
reformatted: String,
|
||||||
},
|
},
|
||||||
/// The input file was already invalid (not a bug)
|
/// The input file was already invalid (not a bug)
|
||||||
SyntaxErrorInInput(ParseError),
|
SyntaxErrorInInput(FormatModuleError),
|
||||||
/// The formatter introduced a syntax error
|
/// The formatter introduced a syntax error
|
||||||
SyntaxErrorInOutput {
|
SyntaxErrorInOutput {
|
||||||
formatted: String,
|
formatted: String,
|
||||||
error: ParseError,
|
error: FormatModuleError,
|
||||||
},
|
},
|
||||||
/// The formatter failed (bug)
|
/// The formatter failed (bug)
|
||||||
FormatError(FormatError),
|
FormatError(FormatError),
|
||||||
@@ -803,7 +796,7 @@ fn format_dev_file(
|
|||||||
let start = Instant::now();
|
let start = Instant::now();
|
||||||
let printed = match format_module_source(&content, options.clone()) {
|
let printed = match format_module_source(&content, options.clone()) {
|
||||||
Ok(printed) => printed,
|
Ok(printed) => printed,
|
||||||
Err(FormatModuleError::ParseError(err)) => {
|
Err(err @ (FormatModuleError::LexError(_) | FormatModuleError::ParseError(_))) => {
|
||||||
return Err(CheckFileError::SyntaxErrorInInput(err));
|
return Err(CheckFileError::SyntaxErrorInInput(err));
|
||||||
}
|
}
|
||||||
Err(FormatModuleError::FormatError(err)) => {
|
Err(FormatModuleError::FormatError(err)) => {
|
||||||
@@ -830,7 +823,7 @@ fn format_dev_file(
|
|||||||
if stability_check {
|
if stability_check {
|
||||||
let reformatted = match format_module_source(formatted, options) {
|
let reformatted = match format_module_source(formatted, options) {
|
||||||
Ok(reformatted) => reformatted,
|
Ok(reformatted) => reformatted,
|
||||||
Err(FormatModuleError::ParseError(err)) => {
|
Err(err @ (FormatModuleError::LexError(_) | FormatModuleError::ParseError(_))) => {
|
||||||
return Err(CheckFileError::SyntaxErrorInOutput {
|
return Err(CheckFileError::SyntaxErrorInOutput {
|
||||||
formatted: formatted.to_string(),
|
formatted: formatted.to_string(),
|
||||||
error: err,
|
error: err,
|
||||||
|
|||||||
@@ -49,7 +49,7 @@ pub(crate) fn main(args: &Args) -> Result<()> {
|
|||||||
|
|
||||||
if rule.is_preview() || rule.is_nursery() {
|
if rule.is_preview() || rule.is_nursery() {
|
||||||
output.push_str(
|
output.push_str(
|
||||||
r"This rule is unstable and in [preview](../preview.md). The `--preview` flag is required for use.",
|
r#"This rule is unstable and in [preview](../preview.md). The `--preview` flag is required for use."#,
|
||||||
);
|
);
|
||||||
output.push('\n');
|
output.push('\n');
|
||||||
output.push('\n');
|
output.push('\n');
|
||||||
|
|||||||
@@ -3,7 +3,6 @@
|
|||||||
//! Used for <https://docs.astral.sh/ruff/settings/>.
|
//! Used for <https://docs.astral.sh/ruff/settings/>.
|
||||||
use std::fmt::Write;
|
use std::fmt::Write;
|
||||||
|
|
||||||
use ruff_python_trivia::textwrap;
|
|
||||||
use ruff_workspace::options::Options;
|
use ruff_workspace::options::Options;
|
||||||
use ruff_workspace::options_base::{OptionField, OptionSet, OptionsMetadata, Visit};
|
use ruff_workspace::options_base::{OptionField, OptionSet, OptionsMetadata, Visit};
|
||||||
|
|
||||||
@@ -126,87 +125,22 @@ fn emit_field(output: &mut String, name: &str, field: &OptionField, parent_set:
|
|||||||
output.push('\n');
|
output.push('\n');
|
||||||
output.push_str(&format!("**Type**: `{}`\n", field.value_type));
|
output.push_str(&format!("**Type**: `{}`\n", field.value_type));
|
||||||
output.push('\n');
|
output.push('\n');
|
||||||
output.push_str("**Example usage**:\n\n");
|
output.push_str(&format!(
|
||||||
output.push_str(&format_tab(
|
"**Example usage**:\n\n```toml\n[tool.ruff{}]\n{}\n```\n",
|
||||||
"pyproject.toml",
|
if let Some(set_name) = parent_set.name() {
|
||||||
&format_header(field.scope, parent_set, ConfigurationFile::PyprojectToml),
|
if set_name == "format" {
|
||||||
field.example,
|
String::from(".format")
|
||||||
));
|
} else {
|
||||||
output.push_str(&format_tab(
|
format!(".lint.{set_name}")
|
||||||
"ruff.toml",
|
}
|
||||||
&format_header(field.scope, parent_set, ConfigurationFile::RuffToml),
|
} else {
|
||||||
field.example,
|
String::new()
|
||||||
|
},
|
||||||
|
field.example
|
||||||
));
|
));
|
||||||
output.push('\n');
|
output.push('\n');
|
||||||
}
|
}
|
||||||
|
|
||||||
fn format_tab(tab_name: &str, header: &str, content: &str) -> String {
|
|
||||||
format!(
|
|
||||||
"=== \"{}\"\n\n ```toml\n {}\n{}\n ```\n",
|
|
||||||
tab_name,
|
|
||||||
header,
|
|
||||||
textwrap::indent(content, " ")
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Format the TOML header for the example usage for a given option.
|
|
||||||
///
|
|
||||||
/// For example: `[tool.ruff.format]` or `[tool.ruff.lint.isort]`.
|
|
||||||
fn format_header(
|
|
||||||
scope: Option<&str>,
|
|
||||||
parent_set: &Set,
|
|
||||||
configuration: ConfigurationFile,
|
|
||||||
) -> String {
|
|
||||||
match configuration {
|
|
||||||
ConfigurationFile::PyprojectToml => {
|
|
||||||
let mut header = if let Some(set_name) = parent_set.name() {
|
|
||||||
if set_name == "format" {
|
|
||||||
String::from("tool.ruff.format")
|
|
||||||
} else {
|
|
||||||
format!("tool.ruff.lint.{set_name}")
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
"tool.ruff".to_string()
|
|
||||||
};
|
|
||||||
if let Some(scope) = scope {
|
|
||||||
if !header.is_empty() {
|
|
||||||
header.push('.');
|
|
||||||
}
|
|
||||||
header.push_str(scope);
|
|
||||||
}
|
|
||||||
format!("[{header}]")
|
|
||||||
}
|
|
||||||
ConfigurationFile::RuffToml => {
|
|
||||||
let mut header = if let Some(set_name) = parent_set.name() {
|
|
||||||
if set_name == "format" {
|
|
||||||
String::from("format")
|
|
||||||
} else {
|
|
||||||
format!("lint.{set_name}")
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
String::new()
|
|
||||||
};
|
|
||||||
if let Some(scope) = scope {
|
|
||||||
if !header.is_empty() {
|
|
||||||
header.push('.');
|
|
||||||
}
|
|
||||||
header.push_str(scope);
|
|
||||||
}
|
|
||||||
if header.is_empty() {
|
|
||||||
String::new()
|
|
||||||
} else {
|
|
||||||
format!("[{header}]")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
enum ConfigurationFile {
|
|
||||||
PyprojectToml,
|
|
||||||
RuffToml,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
struct CollectOptionsVisitor {
|
struct CollectOptionsVisitor {
|
||||||
groups: Vec<(String, OptionSet)>,
|
groups: Vec<(String, OptionSet)>,
|
||||||
|
|||||||
@@ -3,7 +3,6 @@
|
|||||||
//! Used for <https://docs.astral.sh/ruff/rules/>.
|
//! Used for <https://docs.astral.sh/ruff/rules/>.
|
||||||
|
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use std::borrow::Cow;
|
|
||||||
use strum::IntoEnumIterator;
|
use strum::IntoEnumIterator;
|
||||||
|
|
||||||
use ruff_diagnostics::FixAvailability;
|
use ruff_diagnostics::FixAvailability;
|
||||||
@@ -38,16 +37,6 @@ fn generate_table(table_out: &mut String, rules: impl IntoIterator<Item = Rule>,
|
|||||||
|
|
||||||
let rule_name = rule.as_ref();
|
let rule_name = rule.as_ref();
|
||||||
|
|
||||||
// If the message ends in a bracketed expression (like: "Use {replacement}"), escape the
|
|
||||||
// brackets. Otherwise, it'll be interpreted as an HTML attribute via the `attr_list`
|
|
||||||
// plugin. (Above, we'd convert to "Use {replacement\}".)
|
|
||||||
let message = rule.message_formats()[0];
|
|
||||||
let message = if let Some(prefix) = message.strip_suffix('}') {
|
|
||||||
Cow::Owned(format!("{prefix}\\}}"))
|
|
||||||
} else {
|
|
||||||
Cow::Borrowed(message)
|
|
||||||
};
|
|
||||||
|
|
||||||
#[allow(clippy::or_fun_call)]
|
#[allow(clippy::or_fun_call)]
|
||||||
table_out.push_str(&format!(
|
table_out.push_str(&format!(
|
||||||
"| {0}{1} {{ #{0}{1} }} | {2} | {3} | {4} |",
|
"| {0}{1} {{ #{0}{1} }} | {2} | {3} | {4} |",
|
||||||
@@ -57,7 +46,7 @@ fn generate_table(table_out: &mut String, rules: impl IntoIterator<Item = Rule>,
|
|||||||
.is_some()
|
.is_some()
|
||||||
.then_some(format_args!("[{rule_name}](rules/{rule_name}.md)"))
|
.then_some(format_args!("[{rule_name}](rules/{rule_name}.md)"))
|
||||||
.unwrap_or(format_args!("{rule_name}")),
|
.unwrap_or(format_args!("{rule_name}")),
|
||||||
message,
|
rule.message_formats()[0],
|
||||||
status_token,
|
status_token,
|
||||||
));
|
));
|
||||||
table_out.push('\n');
|
table_out.push('\n');
|
||||||
|
|||||||
@@ -1,30 +1,30 @@
|
|||||||
//! Print the AST for a given Python file.
|
//! Print the AST for a given Python file.
|
||||||
#![allow(clippy::print_stdout, clippy::print_stderr)]
|
#![allow(clippy::print_stdout, clippy::print_stderr)]
|
||||||
|
|
||||||
|
use std::fs;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
|
use ruff_python_parser::{parse, Mode};
|
||||||
use ruff_linter::source_kind::SourceKind;
|
|
||||||
use ruff_python_ast::PySourceType;
|
|
||||||
use ruff_python_parser::{parse, AsMode};
|
|
||||||
|
|
||||||
#[derive(clap::Args)]
|
#[derive(clap::Args)]
|
||||||
pub(crate) struct Args {
|
pub(crate) struct Args {
|
||||||
/// Python file for which to generate the AST.
|
/// Python file for which to generate the AST.
|
||||||
#[arg(required = true)]
|
#[arg(required = true)]
|
||||||
file: PathBuf,
|
file: PathBuf,
|
||||||
|
/// Run in Jupyter mode i.e., allow line magics.
|
||||||
|
#[arg(long)]
|
||||||
|
jupyter: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn main(args: &Args) -> Result<()> {
|
pub(crate) fn main(args: &Args) -> Result<()> {
|
||||||
let source_type = PySourceType::from(&args.file);
|
let contents = fs::read_to_string(&args.file)?;
|
||||||
let source_kind = SourceKind::from_path(&args.file, source_type)?.ok_or_else(|| {
|
let mode = if args.jupyter {
|
||||||
anyhow::anyhow!(
|
Mode::Ipython
|
||||||
"Could not determine source kind for file: {}",
|
} else {
|
||||||
args.file.display()
|
Mode::Module
|
||||||
)
|
};
|
||||||
})?;
|
let python_ast = parse(&contents, mode, &args.file.to_string_lossy())?;
|
||||||
let python_ast = parse(source_kind.source_code(), source_type.as_mode())?;
|
|
||||||
println!("{python_ast:#?}");
|
println!("{python_ast:#?}");
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,30 +1,30 @@
|
|||||||
//! Print the token stream for a given Python file.
|
//! Print the token stream for a given Python file.
|
||||||
#![allow(clippy::print_stdout, clippy::print_stderr)]
|
#![allow(clippy::print_stdout, clippy::print_stderr)]
|
||||||
|
|
||||||
|
use std::fs;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
|
use ruff_python_parser::{lexer, Mode};
|
||||||
use ruff_linter::source_kind::SourceKind;
|
|
||||||
use ruff_python_ast::PySourceType;
|
|
||||||
use ruff_python_parser::{lexer, AsMode};
|
|
||||||
|
|
||||||
#[derive(clap::Args)]
|
#[derive(clap::Args)]
|
||||||
pub(crate) struct Args {
|
pub(crate) struct Args {
|
||||||
/// Python file for which to generate the AST.
|
/// Python file for which to generate the AST.
|
||||||
#[arg(required = true)]
|
#[arg(required = true)]
|
||||||
file: PathBuf,
|
file: PathBuf,
|
||||||
|
/// Run in Jupyter mode i.e., allow line magics (`%`, `!`, `?`, `/`, `,`, `;`).
|
||||||
|
#[arg(long)]
|
||||||
|
jupyter: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn main(args: &Args) -> Result<()> {
|
pub(crate) fn main(args: &Args) -> Result<()> {
|
||||||
let source_type = PySourceType::from(&args.file);
|
let contents = fs::read_to_string(&args.file)?;
|
||||||
let source_kind = SourceKind::from_path(&args.file, source_type)?.ok_or_else(|| {
|
let mode = if args.jupyter {
|
||||||
anyhow::anyhow!(
|
Mode::Ipython
|
||||||
"Could not determine source kind for file: {}",
|
} else {
|
||||||
args.file.display()
|
Mode::Module
|
||||||
)
|
};
|
||||||
})?;
|
for (tok, range) in lexer::lex(&contents, mode).flatten() {
|
||||||
for (tok, range) in lexer::lex(source_kind.source_code(), source_type.as_mode()).flatten() {
|
|
||||||
println!(
|
println!(
|
||||||
"{start:#?} {tok:#?} {end:#?}",
|
"{start:#?} {tok:#?} {end:#?}",
|
||||||
start = range.start(),
|
start = range.start(),
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ pub(crate) fn main(args: &Args) -> Result<()> {
|
|||||||
println!("{}", ruff_notebook::round_trip(path)?);
|
println!("{}", ruff_notebook::round_trip(path)?);
|
||||||
} else {
|
} else {
|
||||||
let contents = fs::read_to_string(&args.file)?;
|
let contents = fs::read_to_string(&args.file)?;
|
||||||
println!("{}", round_trip(&contents)?);
|
println!("{}", round_trip(&contents, &args.file.to_string_lossy())?);
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ use ruff_text_size::{Ranged, TextRange, TextSize};
|
|||||||
|
|
||||||
/// A text edit to be applied to a source file. Inserts, deletes, or replaces
|
/// A text edit to be applied to a source file. Inserts, deletes, or replaces
|
||||||
/// content at a given location.
|
/// content at a given location.
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
|
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||||
pub struct Edit {
|
pub struct Edit {
|
||||||
/// The start location of the edit.
|
/// The start location of the edit.
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ use crate::edit::Edit;
|
|||||||
pub enum Applicability {
|
pub enum Applicability {
|
||||||
/// The fix is unsafe and should only be displayed for manual application by the user.
|
/// The fix is unsafe and should only be displayed for manual application by the user.
|
||||||
/// The fix is likely to be incorrect or the resulting code may have invalid syntax.
|
/// The fix is likely to be incorrect or the resulting code may have invalid syntax.
|
||||||
DisplayOnly,
|
Display,
|
||||||
|
|
||||||
/// The fix is unsafe and should only be applied with user opt-in.
|
/// The fix is unsafe and should only be applied with user opt-in.
|
||||||
/// The fix may be what the user intended, but it is uncertain; the resulting code will have valid syntax.
|
/// The fix may be what the user intended, but it is uncertain; the resulting code will have valid syntax.
|
||||||
@@ -87,46 +87,22 @@ impl Fix {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a new [`Fix`] that should only [display](Applicability::DisplayOnly) and not apply from an [`Edit`] element .
|
/// Create a new [`Fix`] that should only [display](Applicability::Display) and not apply from an [`Edit`] element .
|
||||||
pub fn display_only_edit(edit: Edit) -> Self {
|
pub fn display_edit(edit: Edit) -> Self {
|
||||||
Self {
|
Self {
|
||||||
edits: vec![edit],
|
edits: vec![edit],
|
||||||
applicability: Applicability::DisplayOnly,
|
applicability: Applicability::Display,
|
||||||
isolation_level: IsolationLevel::default(),
|
isolation_level: IsolationLevel::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a new [`Fix`] that should only [display](Applicability::DisplayOnly) and not apply from multiple [`Edit`] elements.
|
/// Create a new [`Fix`] that should only [display](Applicability::Display) and not apply from multiple [`Edit`] elements.
|
||||||
pub fn display_only_edits(edit: Edit, rest: impl IntoIterator<Item = Edit>) -> Self {
|
pub fn display_edits(edit: Edit, rest: impl IntoIterator<Item = Edit>) -> Self {
|
||||||
let mut edits: Vec<Edit> = std::iter::once(edit).chain(rest).collect();
|
let mut edits: Vec<Edit> = std::iter::once(edit).chain(rest).collect();
|
||||||
edits.sort_by_key(|edit| (edit.start(), edit.end()));
|
edits.sort_by_key(|edit| (edit.start(), edit.end()));
|
||||||
Self {
|
Self {
|
||||||
edits,
|
edits,
|
||||||
applicability: Applicability::DisplayOnly,
|
applicability: Applicability::Display,
|
||||||
isolation_level: IsolationLevel::default(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a new [`Fix`] with the specified [`Applicability`] to apply an [`Edit`] element.
|
|
||||||
pub fn applicable_edit(edit: Edit, applicability: Applicability) -> Self {
|
|
||||||
Self {
|
|
||||||
edits: vec![edit],
|
|
||||||
applicability,
|
|
||||||
isolation_level: IsolationLevel::default(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a new [`Fix`] with the specified [`Applicability`] to apply multiple [`Edit`] elements.
|
|
||||||
pub fn applicable_edits(
|
|
||||||
edit: Edit,
|
|
||||||
rest: impl IntoIterator<Item = Edit>,
|
|
||||||
applicability: Applicability,
|
|
||||||
) -> Self {
|
|
||||||
let mut edits: Vec<Edit> = std::iter::once(edit).chain(rest).collect();
|
|
||||||
edits.sort_by_key(|edit| (edit.start(), edit.end()));
|
|
||||||
Self {
|
|
||||||
edits,
|
|
||||||
applicability,
|
|
||||||
isolation_level: IsolationLevel::default(),
|
isolation_level: IsolationLevel::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ ruff_cache = { path = "../ruff_cache" }
|
|||||||
ruff_macros = { path = "../ruff_macros" }
|
ruff_macros = { path = "../ruff_macros" }
|
||||||
ruff_text_size = { path = "../ruff_text_size" }
|
ruff_text_size = { path = "../ruff_text_size" }
|
||||||
|
|
||||||
drop_bomb = { workspace = true }
|
drop_bomb = { version = "0.1.5" }
|
||||||
rustc-hash = { workspace = true }
|
rustc-hash = { workspace = true }
|
||||||
schemars = { workspace = true, optional = true }
|
schemars = { workspace = true, optional = true }
|
||||||
serde = { workspace = true, optional = true }
|
serde = { workspace = true, optional = true }
|
||||||
@@ -29,6 +29,3 @@ insta = { workspace = true }
|
|||||||
[features]
|
[features]
|
||||||
serde = ["dep:serde", "ruff_text_size/serde"]
|
serde = ["dep:serde", "ruff_text_size/serde"]
|
||||||
schemars = ["dep:schemars", "ruff_text_size/schemars"]
|
schemars = ["dep:schemars", "ruff_text_size/schemars"]
|
||||||
|
|
||||||
[lints]
|
|
||||||
workspace = true
|
|
||||||
|
|||||||
@@ -1,9 +1,23 @@
|
|||||||
use super::{Buffer, Format, Formatter};
|
use super::{Buffer, Format, Formatter};
|
||||||
use crate::FormatResult;
|
use crate::FormatResult;
|
||||||
|
use std::ffi::c_void;
|
||||||
|
use std::marker::PhantomData;
|
||||||
|
|
||||||
/// A convenience wrapper for representing a formattable argument.
|
/// Mono-morphed type to format an object. Used by the [`crate::format`!], [`crate::format_args`!], and
|
||||||
|
/// [`crate::write`!] macros.
|
||||||
|
///
|
||||||
|
/// This struct is similar to a dynamic dispatch (using `dyn Format`) because it stores a pointer to the value.
|
||||||
|
/// However, it doesn't store the pointer to `dyn Format`'s vtable, instead it statically resolves the function
|
||||||
|
/// pointer of `Format::format` and stores it in `formatter`.
|
||||||
pub struct Argument<'fmt, Context> {
|
pub struct Argument<'fmt, Context> {
|
||||||
value: &'fmt dyn Format<Context>,
|
/// The value to format stored as a raw pointer where `lifetime` stores the value's lifetime.
|
||||||
|
value: *const c_void,
|
||||||
|
|
||||||
|
/// Stores the lifetime of the value. To get the most out of our dear borrow checker.
|
||||||
|
lifetime: PhantomData<&'fmt ()>,
|
||||||
|
|
||||||
|
/// The function pointer to `value`'s `Format::format` method
|
||||||
|
formatter: fn(*const c_void, &mut Formatter<'_, Context>) -> FormatResult<()>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<Context> Clone for Argument<'_, Context> {
|
impl<Context> Clone for Argument<'_, Context> {
|
||||||
@@ -14,19 +28,32 @@ impl<Context> Clone for Argument<'_, Context> {
|
|||||||
impl<Context> Copy for Argument<'_, Context> {}
|
impl<Context> Copy for Argument<'_, Context> {}
|
||||||
|
|
||||||
impl<'fmt, Context> Argument<'fmt, Context> {
|
impl<'fmt, Context> Argument<'fmt, Context> {
|
||||||
/// Called by the [ruff_formatter::format_args] macro.
|
/// Called by the [ruff_formatter::format_args] macro. Creates a mono-morphed value for formatting
|
||||||
|
/// an object.
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn new<F: Format<Context>>(value: &'fmt F) -> Self {
|
pub fn new<F: Format<Context>>(value: &'fmt F) -> Self {
|
||||||
Self { value }
|
#[inline]
|
||||||
|
fn formatter<F: Format<Context>, Context>(
|
||||||
|
ptr: *const c_void,
|
||||||
|
fmt: &mut Formatter<Context>,
|
||||||
|
) -> FormatResult<()> {
|
||||||
|
// SAFETY: Safe because the 'fmt lifetime is captured by the 'lifetime' field.
|
||||||
|
#[allow(unsafe_code)]
|
||||||
|
F::fmt(unsafe { &*ptr.cast::<F>() }, fmt)
|
||||||
|
}
|
||||||
|
|
||||||
|
Self {
|
||||||
|
value: (value as *const F).cast::<std::ffi::c_void>(),
|
||||||
|
lifetime: PhantomData,
|
||||||
|
formatter: formatter::<F, Context>,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Formats the value stored by this argument using the given formatter.
|
/// Formats the value stored by this argument using the given formatter.
|
||||||
#[inline]
|
#[inline]
|
||||||
// Seems to only be triggered on wasm32 and looks like a false positive?
|
|
||||||
#[allow(clippy::trivially_copy_pass_by_ref)]
|
|
||||||
pub(super) fn format(&self, f: &mut Formatter<Context>) -> FormatResult<()> {
|
pub(super) fn format(&self, f: &mut Formatter<Context>) -> FormatResult<()> {
|
||||||
self.value.fmt(f)
|
(self.formatter)(self.value, f)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -2555,17 +2555,17 @@ pub struct BestFitting<'a, Context> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, Context> BestFitting<'a, Context> {
|
impl<'a, Context> BestFitting<'a, Context> {
|
||||||
/// Creates a new best fitting IR with the given variants.
|
/// Creates a new best fitting IR with the given variants. The method itself isn't unsafe
|
||||||
///
|
/// but it is to discourage people from using it because the printer will panic if
|
||||||
/// Callers are required to ensure that the number of variants given
|
/// the slice doesn't contain at least the least and most expanded variants.
|
||||||
/// is at least 2.
|
|
||||||
///
|
///
|
||||||
/// You're looking for a way to create a `BestFitting` object, use the `best_fitting![least_expanded, most_expanded]` macro.
|
/// You're looking for a way to create a `BestFitting` object, use the `best_fitting![least_expanded, most_expanded]` macro.
|
||||||
///
|
///
|
||||||
/// # Panics
|
/// ## Safety
|
||||||
///
|
|
||||||
/// When the slice contains less than two variants.
|
/// The slice must contain at least two variants.
|
||||||
pub fn from_arguments_unchecked(variants: Arguments<'a, Context>) -> Self {
|
#[allow(unsafe_code)]
|
||||||
|
pub unsafe fn from_arguments_unchecked(variants: Arguments<'a, Context>) -> Self {
|
||||||
assert!(
|
assert!(
|
||||||
variants.0.len() >= 2,
|
variants.0.len() >= 2,
|
||||||
"Requires at least the least expanded and most expanded variants"
|
"Requires at least the least expanded and most expanded variants"
|
||||||
@@ -2696,12 +2696,14 @@ impl<Context> Format<Context> for BestFitting<'_, Context> {
|
|||||||
buffer.write_element(FormatElement::Tag(EndBestFittingEntry));
|
buffer.write_element(FormatElement::Tag(EndBestFittingEntry));
|
||||||
}
|
}
|
||||||
|
|
||||||
// OK because the constructor guarantees that there are always at
|
// SAFETY: The constructor guarantees that there are always at least two variants. It's, therefore,
|
||||||
// least two variants.
|
// safe to call into the unsafe `from_vec_unchecked` function
|
||||||
let variants = BestFittingVariants::from_vec_unchecked(buffer.into_vec());
|
#[allow(unsafe_code)]
|
||||||
let element = FormatElement::BestFitting {
|
let element = unsafe {
|
||||||
variants,
|
FormatElement::BestFitting {
|
||||||
mode: self.mode,
|
variants: BestFittingVariants::from_vec_unchecked(buffer.into_vec()),
|
||||||
|
mode: self.mode,
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
f.write_element(element);
|
f.write_element(element);
|
||||||
|
|||||||
@@ -332,14 +332,17 @@ pub enum BestFittingMode {
|
|||||||
pub struct BestFittingVariants(Box<[FormatElement]>);
|
pub struct BestFittingVariants(Box<[FormatElement]>);
|
||||||
|
|
||||||
impl BestFittingVariants {
|
impl BestFittingVariants {
|
||||||
/// Creates a new best fitting IR with the given variants.
|
/// Creates a new best fitting IR with the given variants. The method itself isn't unsafe
|
||||||
///
|
/// but it is to discourage people from using it because the printer will panic if
|
||||||
/// Callers are required to ensure that the number of variants given
|
/// the slice doesn't contain at least the least and most expanded variants.
|
||||||
/// is at least 2 when using `most_expanded` or `most_flag`.
|
|
||||||
///
|
///
|
||||||
/// You're looking for a way to create a `BestFitting` object, use the `best_fitting![least_expanded, most_expanded]` macro.
|
/// You're looking for a way to create a `BestFitting` object, use the `best_fitting![least_expanded, most_expanded]` macro.
|
||||||
|
///
|
||||||
|
/// ## Safety
|
||||||
|
/// The slice must contain at least two variants.
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
pub fn from_vec_unchecked(variants: Vec<FormatElement>) -> Self {
|
#[allow(unsafe_code)]
|
||||||
|
pub unsafe fn from_vec_unchecked(variants: Vec<FormatElement>) -> Self {
|
||||||
debug_assert!(
|
debug_assert!(
|
||||||
variants
|
variants
|
||||||
.iter()
|
.iter()
|
||||||
@@ -348,23 +351,12 @@ impl BestFittingVariants {
|
|||||||
>= 2,
|
>= 2,
|
||||||
"Requires at least the least expanded and most expanded variants"
|
"Requires at least the least expanded and most expanded variants"
|
||||||
);
|
);
|
||||||
|
|
||||||
Self(variants.into_boxed_slice())
|
Self(variants.into_boxed_slice())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the most expanded variant
|
/// Returns the most expanded variant
|
||||||
///
|
|
||||||
/// # Panics
|
|
||||||
///
|
|
||||||
/// When the number of variants is less than two.
|
|
||||||
pub fn most_expanded(&self) -> &[FormatElement] {
|
pub fn most_expanded(&self) -> &[FormatElement] {
|
||||||
assert!(
|
|
||||||
self.as_slice()
|
|
||||||
.iter()
|
|
||||||
.filter(|element| matches!(element, FormatElement::Tag(Tag::StartBestFittingEntry)))
|
|
||||||
.count()
|
|
||||||
>= 2,
|
|
||||||
"Requires at least the least expanded and most expanded variants"
|
|
||||||
);
|
|
||||||
self.into_iter().last().unwrap()
|
self.into_iter().last().unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -373,19 +365,7 @@ impl BestFittingVariants {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the least expanded variant
|
/// Returns the least expanded variant
|
||||||
///
|
|
||||||
/// # Panics
|
|
||||||
///
|
|
||||||
/// When the number of variants is less than two.
|
|
||||||
pub fn most_flat(&self) -> &[FormatElement] {
|
pub fn most_flat(&self) -> &[FormatElement] {
|
||||||
assert!(
|
|
||||||
self.as_slice()
|
|
||||||
.iter()
|
|
||||||
.filter(|element| matches!(element, FormatElement::Tag(Tag::StartBestFittingEntry)))
|
|
||||||
.count()
|
|
||||||
>= 2,
|
|
||||||
"Requires at least the least expanded and most expanded variants"
|
|
||||||
);
|
|
||||||
self.into_iter().next().unwrap()
|
self.into_iter().next().unwrap()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -329,8 +329,10 @@ macro_rules! format {
|
|||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! best_fitting {
|
macro_rules! best_fitting {
|
||||||
($least_expanded:expr, $($tail:expr),+ $(,)?) => {{
|
($least_expanded:expr, $($tail:expr),+ $(,)?) => {{
|
||||||
// OK because the macro syntax requires at least two variants.
|
#[allow(unsafe_code)]
|
||||||
$crate::BestFitting::from_arguments_unchecked($crate::format_args!($least_expanded, $($tail),+))
|
unsafe {
|
||||||
|
$crate::BestFitting::from_arguments_unchecked($crate::format_args!($least_expanded, $($tail),+))
|
||||||
|
}
|
||||||
}}
|
}}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1472,11 +1472,6 @@ impl<'a, 'print> FitsMeasurer<'a, 'print> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn fits_text(&mut self, text: Text, args: PrintElementArgs) -> Fits {
|
fn fits_text(&mut self, text: Text, args: PrintElementArgs) -> Fits {
|
||||||
fn exceeds_width(fits: &FitsMeasurer, args: PrintElementArgs) -> bool {
|
|
||||||
fits.state.line_width > fits.options().line_width.into()
|
|
||||||
&& !args.measure_mode().allows_text_overflow()
|
|
||||||
}
|
|
||||||
|
|
||||||
let indent = std::mem::take(&mut self.state.pending_indent);
|
let indent = std::mem::take(&mut self.state.pending_indent);
|
||||||
self.state.line_width +=
|
self.state.line_width +=
|
||||||
u32::from(indent.level()) * self.options().indent_width() + u32::from(indent.align());
|
u32::from(indent.level()) * self.options().indent_width() + u32::from(indent.align());
|
||||||
@@ -1498,13 +1493,7 @@ impl<'a, 'print> FitsMeasurer<'a, 'print> {
|
|||||||
return Fits::No;
|
return Fits::No;
|
||||||
}
|
}
|
||||||
match args.measure_mode() {
|
match args.measure_mode() {
|
||||||
MeasureMode::FirstLine => {
|
MeasureMode::FirstLine => return Fits::Yes,
|
||||||
return if exceeds_width(self, args) {
|
|
||||||
Fits::No
|
|
||||||
} else {
|
|
||||||
Fits::Yes
|
|
||||||
};
|
|
||||||
}
|
|
||||||
MeasureMode::AllLines
|
MeasureMode::AllLines
|
||||||
| MeasureMode::AllLinesAllowTextOverflow => {
|
| MeasureMode::AllLinesAllowTextOverflow => {
|
||||||
self.state.line_width = 0;
|
self.state.line_width = 0;
|
||||||
@@ -1522,7 +1511,9 @@ impl<'a, 'print> FitsMeasurer<'a, 'print> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if exceeds_width(self, args) {
|
if self.state.line_width > self.options().line_width.into()
|
||||||
|
&& !args.measure_mode().allows_text_overflow()
|
||||||
|
{
|
||||||
return Fits::No;
|
return Fits::No;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1720,14 +1711,14 @@ mod tests {
|
|||||||
));
|
));
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
"a
|
r#"a
|
||||||
b
|
b
|
||||||
c
|
c
|
||||||
d
|
d
|
||||||
d
|
d
|
||||||
c
|
c
|
||||||
b
|
b
|
||||||
a",
|
a"#,
|
||||||
formatted.as_code()
|
formatted.as_code()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -2056,10 +2047,10 @@ two lines`,
|
|||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
printed.as_code(),
|
printed.as_code(),
|
||||||
"Group with id-2
|
r#"Group with id-2
|
||||||
Group with id-1 does not fit on the line because it exceeds the line width of 80 characters by
|
Group with id-1 does not fit on the line because it exceeds the line width of 80 characters by
|
||||||
Group 2 fits
|
Group 2 fits
|
||||||
Group 1 breaks"
|
Group 1 breaks"#
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -16,7 +16,4 @@ license = { workspace = true }
|
|||||||
ruff_macros = { path = "../ruff_macros" }
|
ruff_macros = { path = "../ruff_macros" }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
static_assertions = { workspace = true }
|
static_assertions = "1.1.0"
|
||||||
|
|
||||||
[lints]
|
|
||||||
workspace = true
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "ruff_linter"
|
name = "ruff_linter"
|
||||||
version = "0.1.11"
|
version = "0.1.3"
|
||||||
publish = false
|
publish = false
|
||||||
authors = { workspace = true }
|
authors = { workspace = true }
|
||||||
edition = { workspace = true }
|
edition = { workspace = true }
|
||||||
@@ -29,38 +29,37 @@ ruff_python_parser = { path = "../ruff_python_parser" }
|
|||||||
ruff_source_file = { path = "../ruff_source_file", features = ["serde"] }
|
ruff_source_file = { path = "../ruff_source_file", features = ["serde"] }
|
||||||
ruff_text_size = { path = "../ruff_text_size" }
|
ruff_text_size = { path = "../ruff_text_size" }
|
||||||
|
|
||||||
aho-corasick = { workspace = true }
|
aho-corasick = { version = "1.1.2" }
|
||||||
annotate-snippets = { workspace = true, features = ["color"] }
|
annotate-snippets = { version = "0.9.1", features = ["color"] }
|
||||||
anyhow = { workspace = true }
|
anyhow = { workspace = true }
|
||||||
bitflags = { workspace = true }
|
bitflags = { workspace = true }
|
||||||
chrono = { workspace = true }
|
chrono = { workspace = true }
|
||||||
clap = { workspace = true, features = ["derive", "string"], optional = true }
|
clap = { workspace = true, features = ["derive", "string"], optional = true }
|
||||||
colored = { workspace = true }
|
colored = { workspace = true }
|
||||||
fern = { workspace = true }
|
fern = { version = "0.6.1" }
|
||||||
glob = { workspace = true }
|
glob = { workspace = true }
|
||||||
globset = { workspace = true }
|
globset = { workspace = true }
|
||||||
imperative = { workspace = true }
|
imperative = { version = "1.0.4" }
|
||||||
is-macro = { workspace = true }
|
is-macro = { workspace = true }
|
||||||
is-wsl = { workspace = true }
|
|
||||||
itertools = { workspace = true }
|
itertools = { workspace = true }
|
||||||
libcst = { workspace = true }
|
libcst = { workspace = true }
|
||||||
log = { workspace = true }
|
log = { workspace = true }
|
||||||
memchr = { workspace = true }
|
memchr = { workspace = true }
|
||||||
natord = { workspace = true }
|
natord = { version = "1.0.9" }
|
||||||
once_cell = { workspace = true }
|
once_cell = { workspace = true }
|
||||||
path-absolutize = { workspace = true, features = [
|
path-absolutize = { workspace = true, features = [
|
||||||
"once_cell_cache",
|
"once_cell_cache",
|
||||||
"use_unix_paths_on_wasm",
|
"use_unix_paths_on_wasm",
|
||||||
] }
|
] }
|
||||||
pathdiff = { workspace = true }
|
pathdiff = { version = "0.2.1" }
|
||||||
pep440_rs = { workspace = true, features = ["serde"] }
|
pep440_rs = { version = "0.3.12", features = ["serde"] }
|
||||||
pyproject-toml = { workspace = true }
|
pyproject-toml = { version = "0.8.0" }
|
||||||
quick-junit = { workspace = true }
|
quick-junit = { version = "0.3.2" }
|
||||||
regex = { workspace = true }
|
regex = { workspace = true }
|
||||||
result-like = { workspace = true }
|
result-like = { version = "0.4.6" }
|
||||||
rustc-hash = { workspace = true }
|
rustc-hash = { workspace = true }
|
||||||
schemars = { workspace = true, optional = true }
|
schemars = { workspace = true, optional = true }
|
||||||
semver = { workspace = true }
|
semver = { version = "1.0.20" }
|
||||||
serde = { workspace = true }
|
serde = { workspace = true }
|
||||||
serde_json = { workspace = true }
|
serde_json = { workspace = true }
|
||||||
similar = { workspace = true }
|
similar = { workspace = true }
|
||||||
@@ -69,24 +68,21 @@ strum = { workspace = true }
|
|||||||
strum_macros = { workspace = true }
|
strum_macros = { workspace = true }
|
||||||
thiserror = { workspace = true }
|
thiserror = { workspace = true }
|
||||||
toml = { workspace = true }
|
toml = { workspace = true }
|
||||||
typed-arena = { workspace = true }
|
typed-arena = { version = "2.0.2" }
|
||||||
unicode-width = { workspace = true }
|
unicode-width = { workspace = true }
|
||||||
unicode_names2 = { workspace = true }
|
unicode_names2 = { workspace = true }
|
||||||
url = { workspace = true }
|
wsl = { version = "0.1.0" }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
insta = { workspace = true }
|
insta = { workspace = true }
|
||||||
pretty_assertions = { workspace = true }
|
pretty_assertions = "1.3.0"
|
||||||
test-case = { workspace = true }
|
test-case = { workspace = true }
|
||||||
# Disable colored output in tests
|
# Disable colored output in tests
|
||||||
colored = { workspace = true, features = ["no-color"] }
|
colored = { workspace = true, features = ["no-color"] }
|
||||||
tempfile = { workspace = true }
|
tempfile = "3.8.1"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = []
|
default = []
|
||||||
schemars = ["dep:schemars"]
|
schemars = ["dep:schemars"]
|
||||||
# Enables the UnreachableCode rule
|
# Enables the UnreachableCode rule
|
||||||
unreachable-code = []
|
unreachable-code = []
|
||||||
|
|
||||||
[lints]
|
|
||||||
workspace = true
|
|
||||||
|
|||||||
3
crates/ruff_linter/resources/test/fixtures/README.md
vendored
Normal file
3
crates/ruff_linter/resources/test/fixtures/README.md
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
# fixtures
|
||||||
|
|
||||||
|
Fixture files used for snapshot testing.
|
||||||
@@ -39,18 +39,3 @@ def func():
|
|||||||
for i in range(1110):
|
for i in range(1110):
|
||||||
if True:
|
if True:
|
||||||
break
|
break
|
||||||
|
|
||||||
# TODO(charlie): The `pass` here does not get properly redirected to the top of the
|
|
||||||
# loop, unlike below.
|
|
||||||
def func():
|
|
||||||
for i in range(5):
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
return 1
|
|
||||||
|
|
||||||
def func():
|
|
||||||
for i in range(5):
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
return 1
|
|
||||||
x = 1
|
|
||||||
|
|||||||
@@ -129,11 +129,3 @@ def func():
|
|||||||
print("Grass is green")
|
print("Grass is green")
|
||||||
case Color.BLUE:
|
case Color.BLUE:
|
||||||
print("I'm feeling the blues :(")
|
print("I'm feeling the blues :(")
|
||||||
|
|
||||||
|
|
||||||
def func(point):
|
|
||||||
match point:
|
|
||||||
case (0, 0):
|
|
||||||
print("Origin")
|
|
||||||
case foo:
|
|
||||||
raise ValueError("oops")
|
|
||||||
|
|||||||
@@ -1,304 +0,0 @@
|
|||||||
def func():
|
|
||||||
return 1
|
|
||||||
|
|
||||||
|
|
||||||
def func():
|
|
||||||
return 1.5
|
|
||||||
|
|
||||||
|
|
||||||
def func(x: int):
|
|
||||||
if x > 0:
|
|
||||||
return 1
|
|
||||||
else:
|
|
||||||
return 1.5
|
|
||||||
|
|
||||||
|
|
||||||
def func():
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def func(x: int):
|
|
||||||
if x > 0:
|
|
||||||
return None
|
|
||||||
else:
|
|
||||||
return
|
|
||||||
|
|
||||||
|
|
||||||
def func(x: int):
|
|
||||||
return 1 or 2.5 if x > 0 else 1.5 or "str"
|
|
||||||
|
|
||||||
|
|
||||||
def func(x: int):
|
|
||||||
return 1 + 2.5 if x > 0 else 1.5 or "str"
|
|
||||||
|
|
||||||
|
|
||||||
def func(x: int):
|
|
||||||
if not x:
|
|
||||||
return None
|
|
||||||
return {"foo": 1}
|
|
||||||
|
|
||||||
|
|
||||||
def func(x: int):
|
|
||||||
return {"foo": 1}
|
|
||||||
|
|
||||||
|
|
||||||
def func(x: int):
|
|
||||||
if not x:
|
|
||||||
return 1
|
|
||||||
else:
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def func(x: int):
|
|
||||||
if not x:
|
|
||||||
return 1
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def func(x: int):
|
|
||||||
if not x:
|
|
||||||
return 1
|
|
||||||
elif x > 5:
|
|
||||||
return "str"
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def func(x: int):
|
|
||||||
if x:
|
|
||||||
return 1
|
|
||||||
|
|
||||||
|
|
||||||
def func():
|
|
||||||
x = 1
|
|
||||||
|
|
||||||
|
|
||||||
def func(x: int):
|
|
||||||
if x > 0:
|
|
||||||
return 1
|
|
||||||
|
|
||||||
|
|
||||||
def func(x: int):
|
|
||||||
match x:
|
|
||||||
case [1, 2, 3]:
|
|
||||||
return 1
|
|
||||||
case 4 as y:
|
|
||||||
return "foo"
|
|
||||||
|
|
||||||
|
|
||||||
def func(x: int):
|
|
||||||
for i in range(5):
|
|
||||||
if i > 0:
|
|
||||||
return 1
|
|
||||||
|
|
||||||
|
|
||||||
def func(x: int):
|
|
||||||
for i in range(5):
|
|
||||||
if i > 0:
|
|
||||||
return 1
|
|
||||||
else:
|
|
||||||
return 4
|
|
||||||
|
|
||||||
|
|
||||||
def func(x: int):
|
|
||||||
for i in range(5):
|
|
||||||
if i > 0:
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
return 4
|
|
||||||
|
|
||||||
|
|
||||||
def func(x: int):
|
|
||||||
try:
|
|
||||||
pass
|
|
||||||
except:
|
|
||||||
return 1
|
|
||||||
|
|
||||||
|
|
||||||
def func(x: int):
|
|
||||||
try:
|
|
||||||
pass
|
|
||||||
except:
|
|
||||||
return 1
|
|
||||||
finally:
|
|
||||||
return 2
|
|
||||||
|
|
||||||
|
|
||||||
def func(x: int):
|
|
||||||
try:
|
|
||||||
pass
|
|
||||||
except:
|
|
||||||
return 1
|
|
||||||
else:
|
|
||||||
return 2
|
|
||||||
|
|
||||||
|
|
||||||
def func(x: int):
|
|
||||||
try:
|
|
||||||
return 1
|
|
||||||
except:
|
|
||||||
return 2
|
|
||||||
else:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def func(x: int):
|
|
||||||
while x > 0:
|
|
||||||
break
|
|
||||||
return 1
|
|
||||||
|
|
||||||
|
|
||||||
import abc
|
|
||||||
from abc import abstractmethod
|
|
||||||
|
|
||||||
|
|
||||||
class Foo(abc.ABC):
|
|
||||||
@abstractmethod
|
|
||||||
def method(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def method(self):
|
|
||||||
"""Docstring."""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def method(self):
|
|
||||||
...
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
@abstractmethod
|
|
||||||
def method():
|
|
||||||
pass
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
@abstractmethod
|
|
||||||
def method(cls):
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def method(self):
|
|
||||||
if self.x > 0:
|
|
||||||
return 1
|
|
||||||
else:
|
|
||||||
return 1.5
|
|
||||||
|
|
||||||
|
|
||||||
def func(x: int):
|
|
||||||
try:
|
|
||||||
pass
|
|
||||||
except:
|
|
||||||
return 2
|
|
||||||
|
|
||||||
|
|
||||||
def func(x: int):
|
|
||||||
try:
|
|
||||||
pass
|
|
||||||
except:
|
|
||||||
return 2
|
|
||||||
else:
|
|
||||||
return 3
|
|
||||||
|
|
||||||
|
|
||||||
def func(x: int):
|
|
||||||
if not x:
|
|
||||||
raise ValueError
|
|
||||||
else:
|
|
||||||
raise TypeError
|
|
||||||
|
|
||||||
|
|
||||||
def func(x: int):
|
|
||||||
if not x:
|
|
||||||
raise ValueError
|
|
||||||
else:
|
|
||||||
return 1
|
|
||||||
|
|
||||||
|
|
||||||
from typing import overload
|
|
||||||
|
|
||||||
|
|
||||||
@overload
|
|
||||||
def overloaded(i: int) -> "int":
|
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
@overload
|
|
||||||
def overloaded(i: "str") -> "str":
|
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
def overloaded(i):
|
|
||||||
return i
|
|
||||||
|
|
||||||
|
|
||||||
def func(x: int):
|
|
||||||
if not x:
|
|
||||||
return 1
|
|
||||||
raise ValueError
|
|
||||||
|
|
||||||
|
|
||||||
def func(x: int):
|
|
||||||
if not x:
|
|
||||||
return 1
|
|
||||||
else:
|
|
||||||
return 2
|
|
||||||
raise ValueError
|
|
||||||
|
|
||||||
|
|
||||||
def func():
|
|
||||||
try:
|
|
||||||
raise ValueError
|
|
||||||
except:
|
|
||||||
return 2
|
|
||||||
|
|
||||||
|
|
||||||
def func():
|
|
||||||
try:
|
|
||||||
return 1
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def func(x: int):
|
|
||||||
for _ in range(3):
|
|
||||||
if x > 0:
|
|
||||||
return 1
|
|
||||||
raise ValueError
|
|
||||||
|
|
||||||
|
|
||||||
def func(x: int):
|
|
||||||
if x > 5:
|
|
||||||
raise ValueError
|
|
||||||
else:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def func(x: int):
|
|
||||||
if x > 5:
|
|
||||||
raise ValueError
|
|
||||||
elif x > 10:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def func(x: int):
|
|
||||||
if x > 5:
|
|
||||||
raise ValueError
|
|
||||||
elif x > 10:
|
|
||||||
return 5
|
|
||||||
|
|
||||||
|
|
||||||
def func():
|
|
||||||
try:
|
|
||||||
return 5
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
raise ValueError
|
|
||||||
|
|
||||||
|
|
||||||
def func(x: int):
|
|
||||||
match x:
|
|
||||||
case [1, 2, 3]:
|
|
||||||
return 1
|
|
||||||
case y:
|
|
||||||
return "foo"
|
|
||||||
@@ -8,7 +8,6 @@ def func(address):
|
|||||||
# Error
|
# Error
|
||||||
"0.0.0.0"
|
"0.0.0.0"
|
||||||
'0.0.0.0'
|
'0.0.0.0'
|
||||||
f"0.0.0.0"
|
|
||||||
|
|
||||||
|
|
||||||
# Error
|
# Error
|
||||||
|
|||||||
@@ -5,9 +5,6 @@ with open("/abc/tmp", "w") as f:
|
|||||||
with open("/tmp/abc", "w") as f:
|
with open("/tmp/abc", "w") as f:
|
||||||
f.write("def")
|
f.write("def")
|
||||||
|
|
||||||
with open(f"/tmp/abc", "w") as f:
|
|
||||||
f.write("def")
|
|
||||||
|
|
||||||
with open("/var/tmp/123", "w") as f:
|
with open("/var/tmp/123", "w") as f:
|
||||||
f.write("def")
|
f.write("def")
|
||||||
|
|
||||||
|
|||||||
@@ -1,65 +0,0 @@
|
|||||||
import sys
|
|
||||||
import tarfile
|
|
||||||
import tempfile
|
|
||||||
|
|
||||||
|
|
||||||
def unsafe_archive_handler(filename):
|
|
||||||
tar = tarfile.open(filename)
|
|
||||||
tar.extractall(path=tempfile.mkdtemp())
|
|
||||||
tar.close()
|
|
||||||
|
|
||||||
|
|
||||||
def managed_members_archive_handler(filename):
|
|
||||||
tar = tarfile.open(filename)
|
|
||||||
tar.extractall(path=tempfile.mkdtemp(), members=members_filter(tar))
|
|
||||||
tar.close()
|
|
||||||
|
|
||||||
|
|
||||||
def list_members_archive_handler(filename):
|
|
||||||
tar = tarfile.open(filename)
|
|
||||||
tar.extractall(path=tempfile.mkdtemp(), members=[])
|
|
||||||
tar.close()
|
|
||||||
|
|
||||||
|
|
||||||
def provided_members_archive_handler(filename):
|
|
||||||
tar = tarfile.open(filename)
|
|
||||||
tarfile.extractall(path=tempfile.mkdtemp(), members=tar)
|
|
||||||
tar.close()
|
|
||||||
|
|
||||||
|
|
||||||
def filter_data(filename):
|
|
||||||
tar = tarfile.open(filename)
|
|
||||||
tarfile.extractall(path=tempfile.mkdtemp(), filter="data")
|
|
||||||
tar.close()
|
|
||||||
|
|
||||||
|
|
||||||
def filter_fully_trusted(filename):
|
|
||||||
tar = tarfile.open(filename)
|
|
||||||
tarfile.extractall(path=tempfile.mkdtemp(), filter="fully_trusted")
|
|
||||||
tar.close()
|
|
||||||
|
|
||||||
|
|
||||||
def filter_tar(filename):
|
|
||||||
tar = tarfile.open(filename)
|
|
||||||
tarfile.extractall(path=tempfile.mkdtemp(), filter="tar")
|
|
||||||
tar.close()
|
|
||||||
|
|
||||||
|
|
||||||
def members_filter(tarfile):
|
|
||||||
result = []
|
|
||||||
for member in tarfile.getmembers():
|
|
||||||
if '../' in member.name:
|
|
||||||
print('Member name container directory traversal sequence')
|
|
||||||
continue
|
|
||||||
elif (member.issym() or member.islnk()) and ('../' in member.linkname):
|
|
||||||
print('Symlink to external resource')
|
|
||||||
continue
|
|
||||||
result.append(member)
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
if len(sys.argv) > 1:
|
|
||||||
filename = sys.argv[1]
|
|
||||||
unsafe_archive_handler(filename)
|
|
||||||
managed_members_archive_handler(filename)
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
import telnetlib # S401
|
|
||||||
from telnetlib import Telnet # S401
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
import ftplib # S402
|
|
||||||
from ftplib import FTP # S402
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
import dill # S403
|
|
||||||
from dill import objects # S403
|
|
||||||
import shelve
|
|
||||||
from shelve import open
|
|
||||||
import cPickle
|
|
||||||
from cPickle import load
|
|
||||||
import pickle
|
|
||||||
from pickle import load
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
import subprocess # S404
|
|
||||||
from subprocess import Popen # S404
|
|
||||||
from subprocess import Popen as pop # S404
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
import xml.etree.cElementTree # S405
|
|
||||||
from xml.etree import cElementTree # S405
|
|
||||||
import xml.etree.ElementTree # S405
|
|
||||||
from xml.etree import ElementTree # S405
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user