Compare commits
1 Commits
indent-lam
...
zanie/ecos
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
105fb1c682 |
202
.github/workflows/ci.yaml
vendored
202
.github/workflows/ci.yaml
vendored
@@ -30,7 +30,7 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: tj-actions/changed-files@v40
|
||||
- uses: tj-actions/changed-files@v39
|
||||
id: changed
|
||||
with:
|
||||
files_yaml: |
|
||||
@@ -43,7 +43,6 @@ jobs:
|
||||
- "!crates/ruff_dev/**"
|
||||
- "!crates/ruff_shrinking/**"
|
||||
- scripts/*
|
||||
- .github/workflows/ci.yaml
|
||||
|
||||
formatter:
|
||||
- Cargo.toml
|
||||
@@ -58,7 +57,6 @@ jobs:
|
||||
- crates/ruff_python_parser/**
|
||||
- crates/ruff_dev/**
|
||||
- scripts/*
|
||||
- .github/workflows/ci.yaml
|
||||
|
||||
cargo-fmt:
|
||||
name: "cargo fmt"
|
||||
@@ -84,9 +82,12 @@ jobs:
|
||||
- name: "Clippy (wasm)"
|
||||
run: cargo clippy -p ruff_wasm --target wasm32-unknown-unknown --all-features -- -D warnings
|
||||
|
||||
cargo-test-linux:
|
||||
runs-on: ubuntu-latest
|
||||
name: "cargo test (linux)"
|
||||
cargo-test:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
name: "cargo test | ${{ matrix.os }}"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
@@ -96,54 +97,25 @@ jobs:
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run tests"
|
||||
- name: "Run tests (Ubuntu)"
|
||||
if: ${{ matrix.os == 'ubuntu-latest' }}
|
||||
run: cargo insta test --all --all-features --unreferenced reject
|
||||
- name: "Run tests (Windows)"
|
||||
if: ${{ matrix.os == 'windows-latest' }}
|
||||
shell: bash
|
||||
# We can't reject unreferenced snapshots on windows because flake8_executable can't run on windows
|
||||
run: cargo insta test --all --all-features
|
||||
# Check for broken links in the documentation.
|
||||
- run: cargo doc --all --no-deps
|
||||
env:
|
||||
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
|
||||
RUSTDOCFLAGS: "-D warnings"
|
||||
- uses: actions/upload-artifact@v3
|
||||
if: ${{ matrix.os == 'ubuntu-latest' }}
|
||||
with:
|
||||
name: ruff
|
||||
path: target/debug/ruff
|
||||
|
||||
cargo-test-windows:
|
||||
runs-on: windows-latest
|
||||
name: "cargo test (windows)"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
# We can't reject unreferenced snapshots on windows because flake8_executable can't run on windows
|
||||
run: cargo insta test --all --all-features
|
||||
|
||||
cargo-test-wasm:
|
||||
runs-on: ubuntu-latest
|
||||
name: "cargo test (wasm)"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18
|
||||
cache: "npm"
|
||||
cache-dependency-path: playground/package-lock.json
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run wasm-pack"
|
||||
run: |
|
||||
cd crates/ruff_wasm
|
||||
wasm-pack test --node
|
||||
|
||||
cargo-fuzz:
|
||||
runs-on: ubuntu-latest
|
||||
name: "cargo fuzz"
|
||||
@@ -160,6 +132,25 @@ jobs:
|
||||
tool: cargo-fuzz@0.11
|
||||
- run: cargo fuzz build -s none
|
||||
|
||||
cargo-test-wasm:
|
||||
runs-on: ubuntu-latest
|
||||
name: "cargo test (wasm)"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 18
|
||||
cache: "npm"
|
||||
cache-dependency-path: playground/package-lock.json
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run wasm-pack"
|
||||
run: |
|
||||
cd crates/ruff_wasm
|
||||
wasm-pack test --node
|
||||
|
||||
scripts:
|
||||
name: "test scripts"
|
||||
runs-on: ubuntu-latest
|
||||
@@ -181,10 +172,10 @@ jobs:
|
||||
name: "ecosystem"
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- cargo-test-linux
|
||||
- cargo-test
|
||||
- determine_changes
|
||||
# Only runs on pull requests, since that is the only we way we can find the base version for comparison.
|
||||
if: github.event_name == 'pull_request'
|
||||
if: github.event_name == 'pull_request' && needs.determine_changes.outputs.linter == 'true'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
@@ -192,89 +183,27 @@ jobs:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
- uses: actions/download-artifact@v3
|
||||
name: Download comparison Ruff binary
|
||||
name: Download Ruff binary
|
||||
id: ruff-target
|
||||
with:
|
||||
name: ruff
|
||||
path: target/debug
|
||||
|
||||
- uses: dawidd6/action-download-artifact@v2
|
||||
name: Download baseline Ruff binary
|
||||
name: Download base results
|
||||
with:
|
||||
name: ruff
|
||||
branch: ${{ github.event.pull_request.base.ref }}
|
||||
check_artifacts: true
|
||||
|
||||
- name: Install ruff-ecosystem
|
||||
run: |
|
||||
pip install ./python/ruff-ecosystem
|
||||
|
||||
- name: Run `ruff check` stable ecosystem check
|
||||
if: ${{ needs.determine_changes.outputs.linter == 'true' }}
|
||||
- name: Run ecosystem check
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
|
||||
chmod +x ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
|
||||
|
||||
# Set pipefail to avoid hiding errors with tee
|
||||
set -eo pipefail
|
||||
scripts/check_ecosystem.py ruff ${{ steps.ruff-target.outputs.download-path }}/ruff | tee ecosystem-result
|
||||
cat ecosystem-result > $GITHUB_STEP_SUMMARY
|
||||
|
||||
ruff-ecosystem check ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown | tee ecosystem-result-check-stable
|
||||
|
||||
cat ecosystem-result-check-stable > $GITHUB_STEP_SUMMARY
|
||||
echo "### Linter (stable)" > ecosystem-result
|
||||
cat ecosystem-result-check-stable >> ecosystem-result
|
||||
echo "" >> ecosystem-result
|
||||
|
||||
- name: Run `ruff check` preview ecosystem check
|
||||
if: ${{ needs.determine_changes.outputs.linter == 'true' }}
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
|
||||
|
||||
# Set pipefail to avoid hiding errors with tee
|
||||
set -eo pipefail
|
||||
|
||||
ruff-ecosystem check ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-check-preview
|
||||
|
||||
cat ecosystem-result-check-preview > $GITHUB_STEP_SUMMARY
|
||||
echo "### Linter (preview)" >> ecosystem-result
|
||||
cat ecosystem-result-check-preview >> ecosystem-result
|
||||
echo "" >> ecosystem-result
|
||||
|
||||
- name: Run `ruff format` stable ecosystem check
|
||||
if: ${{ needs.determine_changes.outputs.formatter == 'true' }}
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
|
||||
|
||||
# Set pipefail to avoid hiding errors with tee
|
||||
set -eo pipefail
|
||||
|
||||
ruff-ecosystem format ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown | tee ecosystem-result-format-stable
|
||||
|
||||
cat ecosystem-result-format-stable > $GITHUB_STEP_SUMMARY
|
||||
echo "### Formatter (stable)" >> ecosystem-result
|
||||
cat ecosystem-result-format-stable >> ecosystem-result
|
||||
echo "" >> ecosystem-result
|
||||
|
||||
- name: Run `ruff format` preview ecosystem check
|
||||
if: ${{ needs.determine_changes.outputs.formatter == 'true' }}
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
|
||||
|
||||
# Set pipefail to avoid hiding errors with tee
|
||||
set -eo pipefail
|
||||
|
||||
ruff-ecosystem format ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-format-preview
|
||||
|
||||
cat ecosystem-result-format-preview > $GITHUB_STEP_SUMMARY
|
||||
echo "### Formatter (preview)" >> ecosystem-result
|
||||
cat ecosystem-result-format-preview >> ecosystem-result
|
||||
echo "" >> ecosystem-result
|
||||
|
||||
- name: Export pull request number
|
||||
run: |
|
||||
echo ${{ github.event.number }} > pr-number
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
@@ -296,12 +225,12 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install nightly Rust toolchain"
|
||||
# Only pinned to make caching work, update freely
|
||||
run: rustup toolchain install nightly-2023-10-15
|
||||
run: rustup toolchain install nightly-2023-06-08
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install cargo-udeps"
|
||||
uses: taiki-e/install-action@cargo-udeps
|
||||
- name: "Run cargo-udeps"
|
||||
run: cargo +nightly-2023-10-15 udeps
|
||||
run: cargo +nightly-2023-06-08 udeps
|
||||
|
||||
python-package:
|
||||
name: "python package"
|
||||
@@ -390,8 +319,8 @@ jobs:
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.generated.yml
|
||||
|
||||
check-formatter-instability-and-black-similarity:
|
||||
name: "formatter instabilities and black similarity"
|
||||
check-formatter-ecosystem:
|
||||
name: "Formatter ecosystem and progress checks"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: needs.determine_changes.outputs.formatter == 'true' || github.ref == 'refs/heads/main'
|
||||
@@ -408,45 +337,6 @@ jobs:
|
||||
- name: "Remove checkouts from cache"
|
||||
run: rm -r target/progress_projects
|
||||
|
||||
check-ruff-lsp:
|
||||
name: "test ruff-lsp"
|
||||
runs-on: ubuntu-latest
|
||||
needs: cargo-test-linux
|
||||
steps:
|
||||
- uses: extractions/setup-just@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
name: "Download ruff-lsp source"
|
||||
with:
|
||||
repository: "astral-sh/ruff-lsp"
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
- uses: actions/download-artifact@v3
|
||||
name: Download development ruff binary
|
||||
id: ruff-target
|
||||
with:
|
||||
name: ruff
|
||||
path: target/debug
|
||||
|
||||
- name: Install ruff-lsp dependencies
|
||||
run: |
|
||||
just install
|
||||
|
||||
- name: Run ruff-lsp tests
|
||||
run: |
|
||||
# Setup development binary
|
||||
pip uninstall --yes ruff
|
||||
chmod +x ${{ steps.ruff-target.outputs.download-path }}/ruff
|
||||
export PATH=${{ steps.ruff-target.outputs.download-path }}:$PATH
|
||||
ruff version
|
||||
|
||||
just test
|
||||
|
||||
benchmarks:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
2
.github/workflows/docs.yaml
vendored
2
.github/workflows/docs.yaml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
run: mkdocs build --strict -f mkdocs.generated.yml
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@v3.3.2
|
||||
uses: cloudflare/wrangler-action@v3.3.1
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
|
||||
4
.github/workflows/playground.yaml
vendored
4
.github/workflows/playground.yaml
vendored
@@ -20,7 +20,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 18
|
||||
cache: "npm"
|
||||
@@ -40,7 +40,7 @@ jobs:
|
||||
working-directory: playground
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@v3.3.2
|
||||
uses: cloudflare/wrangler-action@v3.3.1
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
|
||||
21
.github/workflows/pr-comment.yaml
vendored
21
.github/workflows/pr-comment.yaml
vendored
@@ -1,4 +1,4 @@
|
||||
name: Ecosystem check comment
|
||||
name: PR Check Comment
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
@@ -18,13 +18,13 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@v2
|
||||
name: Download pull request number
|
||||
name: Download PR Number
|
||||
with:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
|
||||
- name: Parse pull request number
|
||||
- name: Extract PR Number
|
||||
id: pr-number
|
||||
run: |
|
||||
if [[ -f pr-number ]]
|
||||
@@ -33,7 +33,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@v2
|
||||
name: "Download ecosystem results"
|
||||
name: "Download Ecosystem Result"
|
||||
id: download-ecosystem-result
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
@@ -41,18 +41,15 @@ jobs:
|
||||
workflow: ci.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/ecosystem
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
|
||||
- name: Generate comment content
|
||||
- name: Generate Comment
|
||||
id: generate-comment
|
||||
if: steps.download-ecosystem-result.outputs.found_artifact == 'true'
|
||||
run: |
|
||||
# Note this identifier is used to find the comment to update on
|
||||
# subsequent runs
|
||||
echo '<!-- generated-comment ecosystem -->' >> comment.txt
|
||||
echo '## PR Check Results' >> comment.txt
|
||||
|
||||
echo '## `ruff-ecosystem` results' >> comment.txt
|
||||
echo "### Ecosystem" >> comment.txt
|
||||
cat pr/ecosystem/ecosystem-result >> comment.txt
|
||||
echo "" >> comment.txt
|
||||
|
||||
@@ -60,14 +57,14 @@ jobs:
|
||||
cat comment.txt >> $GITHUB_OUTPUT
|
||||
echo 'EOF' >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Find existing comment
|
||||
- name: Find Comment
|
||||
uses: peter-evans/find-comment@v2
|
||||
if: steps.generate-comment.outcome == 'success'
|
||||
id: find-comment
|
||||
with:
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
comment-author: "github-actions[bot]"
|
||||
body-includes: "<!-- generated-comment ecosystem -->"
|
||||
body-includes: PR Check Results
|
||||
|
||||
- name: Create or update comment
|
||||
if: steps.find-comment.outcome == 'success'
|
||||
|
||||
1
.github/workflows/release.yaml
vendored
1
.github/workflows/release.yaml
vendored
@@ -48,6 +48,7 @@ jobs:
|
||||
args: --out dist
|
||||
- name: "Test sdist"
|
||||
run: |
|
||||
rustup default $(cat rust-toolchain)
|
||||
pip install dist/${{ env.PACKAGE_NAME }}-*.tar.gz --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
|
||||
@@ -13,8 +13,3 @@ MD041: false
|
||||
|
||||
# MD013/line-length
|
||||
MD013: false
|
||||
|
||||
# MD024/no-duplicate-heading
|
||||
MD024:
|
||||
# Allow when nested under different parents e.g. CHANGELOG.md
|
||||
allow_different_nesting: true
|
||||
|
||||
@@ -24,10 +24,6 @@ repos:
|
||||
additional_dependencies:
|
||||
- mdformat-mkdocs
|
||||
- mdformat-admon
|
||||
exclude: |
|
||||
(?x)^(
|
||||
docs/formatter/black.md
|
||||
)$
|
||||
|
||||
- repo: https://github.com/igorshubovych/markdownlint-cli
|
||||
rev: v0.33.0
|
||||
@@ -47,13 +43,10 @@ repos:
|
||||
language: system
|
||||
types: [rust]
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.1.3
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
args: [--fix, --exit-non-zero-on-fix]
|
||||
name: ruff
|
||||
entry: cargo run --bin ruff -- check --no-cache --force-exclude --fix --exit-non-zero-on-fix
|
||||
language: system
|
||||
types_or: [python, pyi]
|
||||
require_serial: true
|
||||
exclude: |
|
||||
@@ -62,6 +55,12 @@ repos:
|
||||
crates/ruff_python_formatter/resources/.*
|
||||
)$
|
||||
|
||||
# Black
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 23.1.0
|
||||
hooks:
|
||||
- id: black
|
||||
|
||||
# Prettier
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: v3.0.0
|
||||
|
||||
163
CHANGELOG.md
163
CHANGELOG.md
@@ -1,170 +1,11 @@
|
||||
# Changelog
|
||||
|
||||
## 0.1.3
|
||||
|
||||
This release includes a variety of improvements to the Ruff formatter, removing several known and
|
||||
unintentional deviations from Black.
|
||||
|
||||
### Formatter
|
||||
|
||||
- Avoid space around pow for `None`, `True` and `False` ([#8189](https://github.com/astral-sh/ruff/pull/8189))
|
||||
- Avoid sorting all paths in the format command ([#8181](https://github.com/astral-sh/ruff/pull/8181))
|
||||
- Insert necessary blank line between class and leading comments ([#8224](https://github.com/astral-sh/ruff/pull/8224))
|
||||
- Avoid introducing new parentheses in annotated assignments ([#8233](https://github.com/astral-sh/ruff/pull/8233))
|
||||
- Refine the warnings about incompatible linter options ([#8196](https://github.com/astral-sh/ruff/pull/8196))
|
||||
- Add test and basic implementation for formatter preview mode ([#8044](https://github.com/astral-sh/ruff/pull/8044))
|
||||
- Refine warning about incompatible `isort` settings ([#8192](https://github.com/astral-sh/ruff/pull/8192))
|
||||
- Only omit optional parentheses for starting or ending with parentheses ([#8238](https://github.com/astral-sh/ruff/pull/8238))
|
||||
- Use source type to determine parser mode for formatting ([#8205](https://github.com/astral-sh/ruff/pull/8205))
|
||||
- Don't warn about magic trailing comma when `isort.force-single-line` is true ([#8244](https://github.com/astral-sh/ruff/pull/8244))
|
||||
- Use `SourceKind::diff` for formatter ([#8240](https://github.com/astral-sh/ruff/pull/8240))
|
||||
- Fix `fmt:off` with trailing child comment ([#8234](https://github.com/astral-sh/ruff/pull/8234))
|
||||
- Formatter parentheses support for `IpyEscapeCommand` ([#8207](https://github.com/astral-sh/ruff/pull/8207))
|
||||
|
||||
### Linter
|
||||
|
||||
- \[`pylint`\] Add buffer methods to `bad-dunder-method-name` (`PLW3201`) exclusions ([#8190](https://github.com/astral-sh/ruff/pull/8190))
|
||||
- Match rule prefixes from `external` codes setting in `unused-noqa` ([#8177](https://github.com/astral-sh/ruff/pull/8177))
|
||||
- Use `line-length` setting for isort in lieu of `pycodestyle.max-line-length` ([#8235](https://github.com/astral-sh/ruff/pull/8235))
|
||||
- Update fix for `unnecessary-paren-on-raise-exception` to unsafe for unknown types ([#8231](https://github.com/astral-sh/ruff/pull/8231))
|
||||
- Correct quick fix message for `W605` ([#8255](https://github.com/astral-sh/ruff/pull/8255))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix typo in max-doc-length documentation ([#8201](https://github.com/astral-sh/ruff/pull/8201))
|
||||
- Improve documentation around linter-formatter conflicts ([#8257](https://github.com/astral-sh/ruff/pull/8257))
|
||||
- Fix link to error suppression documentation in `unused-noqa` ([#8172](https://github.com/astral-sh/ruff/pull/8172))
|
||||
- Add `external` option to `unused-noqa` documentation ([#8171](https://github.com/astral-sh/ruff/pull/8171))
|
||||
- Add title attribute to icons ([#8060](https://github.com/astral-sh/ruff/pull/8060))
|
||||
- Clarify unsafe case in RSE102 ([#8256](https://github.com/astral-sh/ruff/pull/8256))
|
||||
- Fix skipping formatting examples ([#8210](https://github.com/astral-sh/ruff/pull/8210))
|
||||
- docs: fix name of `magic-trailing-comma` option in README ([#8200](https://github.com/astral-sh/ruff/pull/8200))
|
||||
- Add note about scope of rule changing in versioning policy ([#8169](https://github.com/astral-sh/ruff/pull/8169))
|
||||
- Document: Fix default lint rules ([#8218](https://github.com/astral-sh/ruff/pull/8218))
|
||||
- Fix a wrong setting in configuration.md ([#8186](https://github.com/astral-sh/ruff/pull/8186))
|
||||
- Fix misspelled TOML headers in the tutorial ([#8209](https://github.com/astral-sh/ruff/pull/8209))
|
||||
|
||||
## 0.1.2
|
||||
|
||||
This release includes the Beta version of the Ruff formatter — an extremely fast, Black-compatible Python formatter.
|
||||
Try it today with `ruff format`! [Check out the blog post](https://astral.sh/blog/the-ruff-formatter) and [read the docs](https://docs.astral.sh/ruff/formatter/).
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`pylint`\] Implement `non-ascii-module-import` (`C2403`) ([#8056](https://github.com/astral-sh/ruff/pull/8056))
|
||||
- \[`pylint`\] implement `non-ascii-name` (`C2401`) ([#8038](https://github.com/astral-sh/ruff/pull/8038))
|
||||
- \[`pylint`\] Implement unnecessary-lambda (W0108) ([#7953](https://github.com/astral-sh/ruff/pull/7953))
|
||||
- \[`refurb`\] Implement `read-whole-file` (`FURB101`) ([#7682](https://github.com/astral-sh/ruff/pull/7682))
|
||||
- Add fix for `E223`, `E224`, and `E242` ([#8143](https://github.com/astral-sh/ruff/pull/8143))
|
||||
- Add fix for `E225`, `E226`, `E227`, and `E228` ([#8136](https://github.com/astral-sh/ruff/pull/8136))
|
||||
- Add fix for `E252` ([#8142](https://github.com/astral-sh/ruff/pull/8142))
|
||||
- Add fix for `E261` ([#8114](https://github.com/astral-sh/ruff/pull/8114))
|
||||
- Add fix for `E273` and `E274` ([#8144](https://github.com/astral-sh/ruff/pull/8144))
|
||||
- Add fix for `E275` ([#8133](https://github.com/astral-sh/ruff/pull/8133))
|
||||
- Update `SIM401` to catch ternary operations ([#7415](https://github.com/astral-sh/ruff/pull/7415))
|
||||
- Update `E721` to allow `is` and `is` not for direct type comparisons ([#7905](https://github.com/astral-sh/ruff/pull/7905))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- Add `backports.strenum` to `deprecated-imports` ([#8113](https://github.com/astral-sh/ruff/pull/8113))
|
||||
- Update `SIM112` to ignore `https_proxy`, `http_proxy`, and `no_proxy` ([#8140](https://github.com/astral-sh/ruff/pull/8140))
|
||||
- Update fix for `literal-membership` (`PLR6201`) to be unsafe ([#8097](https://github.com/astral-sh/ruff/pull/8097))
|
||||
- Update fix for `mutable-argument-defaults` (`B006`) to be unsafe ([#8108](https://github.com/astral-sh/ruff/pull/8108))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Change `line-ending` default to `auto` ([#8057](https://github.com/astral-sh/ruff/pull/8057))
|
||||
- Respect parenthesized generators in `has_own_parentheses` ([#8100](https://github.com/astral-sh/ruff/pull/8100))
|
||||
- Add caching to formatter ([#8089](https://github.com/astral-sh/ruff/pull/8089))
|
||||
- Remove `--line-length` option from `format` command ([#8131](https://github.com/astral-sh/ruff/pull/8131))
|
||||
- Add formatter to `line-length` documentation ([#8150](https://github.com/astral-sh/ruff/pull/8150))
|
||||
- Warn about incompatible formatter options ([#8088](https://github.com/astral-sh/ruff/pull/8088))
|
||||
- Fix range of unparenthesized tuple subject in match statement ([#8101](https://github.com/astral-sh/ruff/pull/8101))
|
||||
- Remove experimental formatter warning ([#8148](https://github.com/astral-sh/ruff/pull/8148))
|
||||
- Don't move type param opening parenthesis comment ([#8163](https://github.com/astral-sh/ruff/pull/8163))
|
||||
- Update versions in format benchmark script ([#8110](https://github.com/astral-sh/ruff/pull/8110))
|
||||
- Avoid loading files for cached format results ([#8134](https://github.com/astral-sh/ruff/pull/8134))
|
||||
|
||||
### CLI
|
||||
|
||||
- Show the `ruff format` command in help menus ([#8167](https://github.com/astral-sh/ruff/pull/8167))
|
||||
- Add `ruff version` command with long version display ([#8034](https://github.com/astral-sh/ruff/pull/8034))
|
||||
|
||||
### Configuration
|
||||
|
||||
- New `pycodestyle.max-line-length` option ([#8039](https://github.com/astral-sh/ruff/pull/8039))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Detect `sys.version_info` slices in `outdated-version-block` ([#8112](https://github.com/astral-sh/ruff/pull/8112))
|
||||
- Avoid if-else simplification for `TYPE_CHECKING` blocks ([#8072](https://github.com/astral-sh/ruff/pull/8072))
|
||||
- Avoid false-positive print separator diagnostic with starred argument ([#8079](https://github.com/astral-sh/ruff/pull/8079))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix message for `too-many-arguments` lint ([#8092](https://github.com/astral-sh/ruff/pull/8092))
|
||||
- Fix `extend-unsafe-fixes` and `extend-safe-fixes` example ([#8139](https://github.com/astral-sh/ruff/pull/8139))
|
||||
- Add links to `flake8-import-conventions` options ([#8115](https://github.com/astral-sh/ruff/pull/8115))
|
||||
- Rework the documentation to incorporate the Ruff formatter ([#7732](https://github.com/astral-sh/ruff/pull/7732))
|
||||
- Fix `Options` JSON schema description ([#8081](https://github.com/astral-sh/ruff/pull/8081))
|
||||
- Fix typo (`pytext` -> `pytest`) ([#8117](https://github.com/astral-sh/ruff/pull/8117))
|
||||
- Improve `magic-value-comparison` example in docs ([#8111](https://github.com/astral-sh/ruff/pull/8111))
|
||||
|
||||
## 0.1.1
|
||||
|
||||
### Rule changes
|
||||
|
||||
- Add unsafe fix for `escape-sequence-in-docstring` (`D301`) ([#7970](https://github.com/astral-sh/ruff/pull/7970))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Respect `#(deprecated)` attribute in configuration options ([#8035](https://github.com/astral-sh/ruff/pull/8035))
|
||||
- Add `[format|lint].exclude` options ([#8000](https://github.com/astral-sh/ruff/pull/8000))
|
||||
- Respect `tab-size` setting in formatter ([#8006](https://github.com/astral-sh/ruff/pull/8006))
|
||||
- Add `lint.preview` ([#8002](https://github.com/astral-sh/ruff/pull/8002))
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`pylint`\] Implement `literal-membership` (`PLR6201`) ([#7973](https://github.com/astral-sh/ruff/pull/7973))
|
||||
- \[`pylint`\] Implement `too-many-boolean-expressions` (`PLR0916`) ([#7975](https://github.com/astral-sh/ruff/pull/7975))
|
||||
- \[`pylint`\] Implement `misplaced-bare-raise` (`E0704`) ([#7961](https://github.com/astral-sh/ruff/pull/7961))
|
||||
- \[`pylint`\] Implement `global-at-module-level` (`W0604`) ([#8058](https://github.com/astral-sh/ruff/pull/8058))
|
||||
- \[`pylint`\] Implement `unspecified-encoding` (`PLW1514`) ([#7939](https://github.com/astral-sh/ruff/pull/7939))
|
||||
- Add fix for `triple-single-quotes` (`D300`) ([#7967](https://github.com/astral-sh/ruff/pull/7967))
|
||||
|
||||
### Formatter
|
||||
|
||||
- New code style badge for `ruff format` ([#7878](https://github.com/astral-sh/ruff/pull/7878))
|
||||
- Fix comments outside expression parentheses ([#7873](https://github.com/astral-sh/ruff/pull/7873))
|
||||
- Add `--target-version` to `ruff format` ([#8055](https://github.com/astral-sh/ruff/pull/8055))
|
||||
- Skip over parentheses when detecting `in` keyword ([#8054](https://github.com/astral-sh/ruff/pull/8054))
|
||||
- Add `--diff` option to `ruff format` ([#7937](https://github.com/astral-sh/ruff/pull/7937))
|
||||
- Insert newline after nested function or class statements ([#7946](https://github.com/astral-sh/ruff/pull/7946))
|
||||
- Use `pass` over ellipsis in non-function/class contexts ([#8049](https://github.com/astral-sh/ruff/pull/8049))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Lazily evaluate all PEP 695 type alias values ([#8033](https://github.com/astral-sh/ruff/pull/8033))
|
||||
- Avoid failed assertion when showing fixes from stdin ([#8029](https://github.com/astral-sh/ruff/pull/8029))
|
||||
- Avoid flagging HTTP and HTTPS literals in urllib-open ([#8046](https://github.com/astral-sh/ruff/pull/8046))
|
||||
- Avoid flagging `bad-dunder-method-name` for `_` ([#8015](https://github.com/astral-sh/ruff/pull/8015))
|
||||
- Remove Python 2-only methods from `URLOpen` audit ([#8047](https://github.com/astral-sh/ruff/pull/8047))
|
||||
- Use set bracket replacement for `iteration-over-set` to preserve whitespace and comments ([#8001](https://github.com/astral-sh/ruff/pull/8001))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Update tutorial to match revised Ruff defaults ([#8066](https://github.com/astral-sh/ruff/pull/8066))
|
||||
- Update rule `B005` docs ([#8028](https://github.com/astral-sh/ruff/pull/8028))
|
||||
- Update GitHub actions example in docs to use `--output-format` ([#8014](https://github.com/astral-sh/ruff/pull/8014))
|
||||
- Document `lint.preview` and `format.preview` ([#8032](https://github.com/astral-sh/ruff/pull/8032))
|
||||
- Clarify that new rules should be added to `RuleGroup::Preview`. ([#7989](https://github.com/astral-sh/ruff/pull/7989))
|
||||
|
||||
## 0.1.0
|
||||
|
||||
This is the first release which uses the `CHANGELOG` file. See [GitHub Releases](https://github.com/astral-sh/ruff/releases) for prior changelog entries.
|
||||
|
||||
Read Ruff's new [versioning policy](https://docs.astral.sh/ruff/versioning/).
|
||||
|
||||
## 0.1.0
|
||||
|
||||
### Breaking changes
|
||||
|
||||
- Unsafe fixes are no longer displayed or applied without opt-in ([#7769](https://github.com/astral-sh/ruff/pull/7769))
|
||||
|
||||
@@ -114,7 +114,7 @@ such that all crates are contained in a flat `crates` directory.
|
||||
The vast majority of the code, including all lint rules, lives in the `ruff` crate (located at
|
||||
`crates/ruff_linter`). As a contributor, that's the crate that'll be most relevant to you.
|
||||
|
||||
At the time of writing, the repository includes the following crates:
|
||||
At time of writing, the repository includes the following crates:
|
||||
|
||||
- `crates/ruff_linter`: library crate containing all lint rules and the core logic for running them.
|
||||
If you're working on a rule, this is the crate for you.
|
||||
@@ -337,15 +337,16 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
|
||||
## Ecosystem CI
|
||||
|
||||
GitHub Actions will run your changes against a number of real-world projects from GitHub and
|
||||
report on any linter or formatter differences. You can also run those checks locally via:
|
||||
report on any diagnostic differences. You can also run those checks locally via:
|
||||
|
||||
```shell
|
||||
pip install -e ./python/ruff-ecosystem
|
||||
ruff-ecosystem check ruff "./target/debug/ruff"
|
||||
ruff-ecosystem format ruff "./target/debug/ruff"
|
||||
python scripts/check_ecosystem.py path/to/your/ruff path/to/older/ruff
|
||||
```
|
||||
|
||||
See the [ruff-ecosystem package](https://github.com/astral-sh/ruff/tree/main/python/ruff-ecosystem) for more details.
|
||||
You can also run the Ecosystem CI check in a Docker container across a larger set of projects by
|
||||
downloading the [`known-github-tomls.json`](https://github.com/akx/ruff-usage-aggregate/blob/master/data/known-github-tomls.jsonl)
|
||||
as `github_search.jsonl` and following the instructions in [scripts/Dockerfile.ecosystem](https://github.com/astral-sh/ruff/blob/main/scripts/Dockerfile.ecosystem).
|
||||
Note that this check will take a while to run.
|
||||
|
||||
## Benchmarking and Profiling
|
||||
|
||||
@@ -876,5 +877,5 @@ By default, `src` is set to the project root. In the above example, we'd want to
|
||||
`src = ["./src"]` to ensure that we locate `./my_project/src/foo` and thus categorize `import foo`
|
||||
as first-party in `baz.py`. In practice, for this limited example, setting `src = ["./src"]` is
|
||||
unnecessary, as all imports within `./my_project/src/foo` would be categorized as first-party via
|
||||
the same-package heuristic; but if your project contains multiple packages, you'll want to set `src`
|
||||
the same-package heuristic; but your project contains multiple packages, you'll want to set `src`
|
||||
explicitly.
|
||||
|
||||
105
Cargo.lock
generated
105
Cargo.lock
generated
@@ -313,9 +313,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.4.7"
|
||||
version = "4.4.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ac495e00dcec98c83465d5ad66c5c4fabd652fd6686e7c6269b117e729a6f17b"
|
||||
checksum = "d04704f56c2cde07f43e8e2c154b43f216dc5c92fc98ada720177362f953b956"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
@@ -323,9 +323,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.4.7"
|
||||
version = "4.4.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c77ed9a32a62e6ca27175d00d29d05ca32e396ea1eb5fb01d8256b669cec7663"
|
||||
checksum = "0e231faeaca65ebd1ea3c737966bf858971cd38c3849107aa3ea7de90a804e45"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
@@ -376,9 +376,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_derive"
|
||||
version = "4.4.7"
|
||||
version = "4.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf9804afaaf59a91e75b022a30fb7229a7901f60c755489cc61c9b423b836442"
|
||||
checksum = "0862016ff20d69b84ef8247369fabf5c008a7417002411897d40ee1f4532b873"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
@@ -388,9 +388,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_lex"
|
||||
version = "0.6.0"
|
||||
version = "0.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "702fc72eb24e5a1e48ce58027a675bc24edd52096d5397d4aea7c6dd9eca0bd1"
|
||||
checksum = "cd7cc57abe963c6d3b9d8be5b06ba7c8957a930305ca90304f24ef040aa6f961"
|
||||
|
||||
[[package]]
|
||||
name = "clearscreen"
|
||||
@@ -407,9 +407,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "codspeed"
|
||||
version = "2.3.0"
|
||||
version = "2.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d680ccd1eedd2dd7c7a3649a78c7d06e0f16b191b30d81cc58e7bc906488d344"
|
||||
checksum = "9b3238416c10f19985b52a937c5b3efc3ed7efe8f7ae263d2aab29a09bca9f57"
|
||||
dependencies = [
|
||||
"colored",
|
||||
"libc",
|
||||
@@ -418,9 +418,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "codspeed-criterion-compat"
|
||||
version = "2.3.0"
|
||||
version = "2.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "58b48b6c8e890d7d4ad0ed85e9ab4949bf7023198c006000ef6338ba84cf5b71"
|
||||
checksum = "fecc18f65b942d2b033545bb3bd8430a23eecbbe53fad3b1342fb0e5514bca7b"
|
||||
dependencies = [
|
||||
"codspeed",
|
||||
"colored",
|
||||
@@ -810,7 +810,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
|
||||
|
||||
[[package]]
|
||||
name = "flake8-to-ruff"
|
||||
version = "0.1.3"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
@@ -1262,9 +1262,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.149"
|
||||
version = "0.2.147"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a08173bc88b7955d1b3145aa561539096c421ac8debde8cbc3612ec635fee29b"
|
||||
checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3"
|
||||
|
||||
[[package]]
|
||||
name = "libcst"
|
||||
@@ -1309,9 +1309,9 @@ checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f"
|
||||
|
||||
[[package]]
|
||||
name = "linux-raw-sys"
|
||||
version = "0.4.10"
|
||||
version = "0.4.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "da2479e8c062e40bf0066ffa0bc823de0a9368974af99c9f6df941d2c231e03f"
|
||||
checksum = "57bcfdad1b858c2db7c38303a6d2ad4dfaf5eb53dfeb0910128b2c26d6158503"
|
||||
|
||||
[[package]]
|
||||
name = "lock_api"
|
||||
@@ -1801,9 +1801,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyproject-toml"
|
||||
version = "0.8.0"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0774c13ff0b8b7ebb4791c050c497aefcfe3f6a222c0829c7017161ed38391ff"
|
||||
checksum = "569e259cd132eb8cec5df8b672d187c5260f82ad352156b5da9549d4472e64b0"
|
||||
dependencies = [
|
||||
"indexmap",
|
||||
"pep440_rs",
|
||||
@@ -1912,15 +1912,6 @@ dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa"
|
||||
dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_users"
|
||||
version = "0.4.3"
|
||||
@@ -2060,7 +2051,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_cli"
|
||||
version = "0.1.3"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"annotate-snippets 0.9.1",
|
||||
"anyhow",
|
||||
@@ -2081,6 +2072,7 @@ dependencies = [
|
||||
"insta-cmd",
|
||||
"is-macro",
|
||||
"itertools 0.11.0",
|
||||
"itoa",
|
||||
"log",
|
||||
"mimalloc",
|
||||
"notify",
|
||||
@@ -2196,7 +2188,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_linter"
|
||||
version = "0.1.3"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"annotate-snippets 0.9.1",
|
||||
@@ -2389,7 +2381,6 @@ dependencies = [
|
||||
"itertools 0.11.0",
|
||||
"lalrpop",
|
||||
"lalrpop-util",
|
||||
"memchr",
|
||||
"ruff_python_ast",
|
||||
"ruff_text_size",
|
||||
"rustc-hash",
|
||||
@@ -2447,7 +2438,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_shrinking"
|
||||
version = "0.1.3"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
@@ -2559,9 +2550,9 @@ checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
|
||||
|
||||
[[package]]
|
||||
name = "rustix"
|
||||
version = "0.38.21"
|
||||
version = "0.38.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2b426b0506e5d50a7d8dafcf2e81471400deb602392c7dd110815afb4eaf02a3"
|
||||
checksum = "ed6248e1caa625eb708e266e06159f135e8c26f2bb7ceb72dc4b2766d0340964"
|
||||
dependencies = [
|
||||
"bitflags 2.4.0",
|
||||
"errno",
|
||||
@@ -2673,9 +2664,9 @@ checksum = "836fa6a3e1e547f9a2c4040802ec865b5d85f4014efe00555d7090a3dcaa1090"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.190"
|
||||
version = "1.0.188"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "91d3c334ca1ee894a2c6f6ad698fe8c435b76d504b13d436f0685d648d6d96f7"
|
||||
checksum = "cf9e0fcba69a370eed61bcf2b728575f726b50b55cba78064753d708ddc7549e"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
@@ -2693,9 +2684,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.190"
|
||||
version = "1.0.188"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "67c5609f394e5c2bd7fc51efda478004ea80ef42fee983d5c67a65e34f32c0e3"
|
||||
checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -2744,9 +2735,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_with"
|
||||
version = "3.4.0"
|
||||
version = "3.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "64cd236ccc1b7a29e7e2739f27c0b2dd199804abc4290e32f59f3b68d6405c23"
|
||||
checksum = "1ca3b16a3d82c4088f343b7480a93550b3eabe1a358569c2dfe38bbcead07237"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"serde_with_macros",
|
||||
@@ -2754,9 +2745,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_with_macros"
|
||||
version = "3.4.0"
|
||||
version = "3.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "93634eb5f75a2323b16de4748022ac4297f9e76b6dced2be287a099f41b5e788"
|
||||
checksum = "2e6be15c453eb305019bfa438b1593c731f36a289a7853f7707ee29e870b3b3c"
|
||||
dependencies = [
|
||||
"darling",
|
||||
"proc-macro2",
|
||||
@@ -2848,9 +2839,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "strum_macros"
|
||||
version = "0.25.3"
|
||||
version = "0.25.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "23dc1fa9ac9c169a78ba62f0b841814b7abae11bdd047b9c58f893439e309ea0"
|
||||
checksum = "ad8d03b598d3d0fff69bf533ee3ef19b8eeb342729596df84bcc7e1f96ec4059"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
@@ -2892,13 +2883,13 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tempfile"
|
||||
version = "3.8.1"
|
||||
version = "3.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7ef1adac450ad7f4b3c28589471ade84f25f731a7a0fe30d71dfa9f60fd808e5"
|
||||
checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"fastrand",
|
||||
"redox_syscall 0.4.1",
|
||||
"redox_syscall 0.3.5",
|
||||
"rustix",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
@@ -2979,18 +2970,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "thiserror"
|
||||
version = "1.0.50"
|
||||
version = "1.0.49"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f9a7210f5c9a7156bb50aa36aed4c95afb51df0df00713949448cf9e97d382d2"
|
||||
checksum = "1177e8c6d7ede7afde3585fd2513e611227efd6481bd78d2e82ba1ce16557ed4"
|
||||
dependencies = [
|
||||
"thiserror-impl",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror-impl"
|
||||
version = "1.0.50"
|
||||
version = "1.0.49"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "266b2e40bc00e5a6c09c3584011e08b06f123c00362c92b975ba9843aaaa14b8"
|
||||
checksum = "10712f02019e9288794769fba95cd6847df9874d49d871d062172f9dd41bc4cc"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -3113,9 +3104,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tracing"
|
||||
version = "0.1.40"
|
||||
version = "0.1.39"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef"
|
||||
checksum = "ee2ef2af84856a50c1d430afce2fdded0a4ec7eda868db86409b4543df0797f9"
|
||||
dependencies = [
|
||||
"log",
|
||||
"pin-project-lite",
|
||||
@@ -3331,9 +3322,9 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
|
||||
|
||||
[[package]]
|
||||
name = "uuid"
|
||||
version = "1.5.0"
|
||||
version = "1.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "88ad59a7560b41a70d191093a945f0b87bc1deeda46fb237479708a1d6b6cdfc"
|
||||
checksum = "79daa5ed5740825c40b389c5e50312b9c86df53fccd33f281df655642b43869d"
|
||||
dependencies = [
|
||||
"getrandom",
|
||||
"rand",
|
||||
@@ -3343,9 +3334,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "uuid-macro-internal"
|
||||
version = "1.5.0"
|
||||
version = "1.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3d8c6bba9b149ee82950daefc9623b32bb1dacbfb1890e352f6b887bd582adaf"
|
||||
checksum = "f7e1ba1f333bd65ce3c9f27de592fcbc256dafe3af2717f56d7c87761fbaccf4"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
||||
12
Cargo.toml
12
Cargo.toml
@@ -15,7 +15,7 @@ license = "MIT"
|
||||
anyhow = { version = "1.0.69" }
|
||||
bitflags = { version = "2.3.1" }
|
||||
chrono = { version = "0.4.31", default-features = false, features = ["clock"] }
|
||||
clap = { version = "4.4.7", features = ["derive"] }
|
||||
clap = { version = "4.4.6", features = ["derive"] }
|
||||
colored = { version = "2.0.0" }
|
||||
filetime = { version = "0.2.20" }
|
||||
glob = { version = "0.3.1" }
|
||||
@@ -34,25 +34,25 @@ quote = { version = "1.0.23" }
|
||||
regex = { version = "1.10.2" }
|
||||
rustc-hash = { version = "1.1.0" }
|
||||
schemars = { version = "0.8.15" }
|
||||
serde = { version = "1.0.190", features = ["derive"] }
|
||||
serde = { version = "1.0.152", features = ["derive"] }
|
||||
serde_json = { version = "1.0.107" }
|
||||
shellexpand = { version = "3.0.0" }
|
||||
similar = { version = "2.3.0", features = ["inline"] }
|
||||
smallvec = { version = "1.11.1" }
|
||||
static_assertions = "1.1.0"
|
||||
strum = { version = "0.25.0", features = ["strum_macros"] }
|
||||
strum_macros = { version = "0.25.3" }
|
||||
strum_macros = { version = "0.25.2" }
|
||||
syn = { version = "2.0.38" }
|
||||
test-case = { version = "3.2.1" }
|
||||
thiserror = { version = "1.0.50" }
|
||||
thiserror = { version = "1.0.49" }
|
||||
toml = { version = "0.7.8" }
|
||||
tracing = { version = "0.1.40" }
|
||||
tracing = { version = "0.1.39" }
|
||||
tracing-indicatif = { version = "0.3.4" }
|
||||
tracing-subscriber = { version = "0.3.17", features = ["env-filter"] }
|
||||
unicode-ident = { version = "1.0.12" }
|
||||
unicode_names2 = { version = "1.2.0" }
|
||||
unicode-width = { version = "0.1.11" }
|
||||
uuid = { version = "1.5.0", features = ["v4", "fast-rng", "macro-diagnostics", "js"] }
|
||||
uuid = { version = "1.4.1", features = ["v4", "fast-rng", "macro-diagnostics", "js"] }
|
||||
wsl = { version = "0.1.0" }
|
||||
|
||||
[profile.release]
|
||||
|
||||
25
LICENSE
25
LICENSE
@@ -1269,31 +1269,6 @@ are:
|
||||
SOFTWARE.
|
||||
"""
|
||||
|
||||
- flake8-trio, licensed as follows:
|
||||
"""
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2022 Zac Hatfield-Dodds
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
"""
|
||||
|
||||
- Pyright, licensed as follows:
|
||||
"""
|
||||
MIT License
|
||||
|
||||
104
README.md
104
README.md
@@ -10,7 +10,7 @@
|
||||
|
||||
[**Discord**](https://discord.gg/c9MhzV8aU5) | [**Docs**](https://docs.astral.sh/ruff/) | [**Playground**](https://play.ruff.rs/)
|
||||
|
||||
An extremely fast Python linter and code formatter, written in Rust.
|
||||
An extremely fast Python linter, written in Rust.
|
||||
|
||||
<p align="center">
|
||||
<picture align="center">
|
||||
@@ -24,16 +24,17 @@ An extremely fast Python linter and code formatter, written in Rust.
|
||||
<i>Linting the CPython codebase from scratch.</i>
|
||||
</p>
|
||||
|
||||
- ⚡️ 10-100x faster than existing linters (like Flake8) and formatters (like Black)
|
||||
- ⚡️ 10-100x faster than existing linters
|
||||
- 🐍 Installable via `pip`
|
||||
- 🛠️ `pyproject.toml` support
|
||||
- 🤝 Python 3.12 compatibility
|
||||
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruff-compare-to-flake8), isort, and Black
|
||||
- 📦 Built-in caching, to avoid re-analyzing unchanged files
|
||||
- 🔧 Fix support, for automatic error correction (e.g., automatically remove unused imports)
|
||||
- 📏 Over [700 built-in rules](https://docs.astral.sh/ruff/rules/), with native re-implementations
|
||||
of popular Flake8 plugins, like flake8-bugbear
|
||||
- ⌨️ First-party [editor integrations](https://docs.astral.sh/ruff/integrations/) for
|
||||
- 📏 Over [700 built-in rules](https://docs.astral.sh/ruff/rules/)
|
||||
- ⚖️ [Near-parity](https://docs.astral.sh/ruff/faq/#how-does-ruff-compare-to-flake8) with the
|
||||
built-in Flake8 rule set
|
||||
- 🔌 Native re-implementations of dozens of Flake8 plugins, like flake8-bugbear
|
||||
- ⌨️ First-party [editor integrations](https://docs.astral.sh/ruff/editor-integrations/) for
|
||||
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://github.com/astral-sh/ruff-lsp)
|
||||
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://docs.astral.sh/ruff/configuration/#pyprojecttoml-discovery)
|
||||
|
||||
@@ -41,10 +42,10 @@ Ruff aims to be orders of magnitude faster than alternative tools while integrat
|
||||
functionality behind a single, common interface.
|
||||
|
||||
Ruff can be used to replace [Flake8](https://pypi.org/project/flake8/) (plus dozens of plugins),
|
||||
[Black](https://github.com/psf/black), [isort](https://pypi.org/project/isort/),
|
||||
[pydocstyle](https://pypi.org/project/pydocstyle/), [pyupgrade](https://pypi.org/project/pyupgrade/),
|
||||
[autoflake](https://pypi.org/project/autoflake/), and more, all while executing tens or hundreds of
|
||||
times faster than any individual tool.
|
||||
[isort](https://pypi.org/project/isort/), [pydocstyle](https://pypi.org/project/pydocstyle/),
|
||||
[yesqa](https://github.com/asottile/yesqa), [eradicate](https://pypi.org/project/eradicate/),
|
||||
[pyupgrade](https://pypi.org/project/pyupgrade/), and [autoflake](https://pypi.org/project/autoflake/),
|
||||
all while executing tens or hundreds of times faster than any individual tool.
|
||||
|
||||
Ruff is extremely actively developed and used in major open-source projects like:
|
||||
|
||||
@@ -125,38 +126,23 @@ and with [a variety of other package managers](https://docs.astral.sh/ruff/insta
|
||||
|
||||
### Usage
|
||||
|
||||
To run Ruff as a linter, try any of the following:
|
||||
To run Ruff, try any of the following:
|
||||
|
||||
```shell
|
||||
ruff check . # Lint all files in the current directory (and any subdirectories).
|
||||
ruff check path/to/code/ # Lint all files in `/path/to/code` (and any subdirectories).
|
||||
ruff check path/to/code/*.py # Lint all `.py` files in `/path/to/code`.
|
||||
ruff check path/to/code/to/file.py # Lint `file.py`.
|
||||
ruff check @arguments.txt # Lint using an input file, treating its contents as newline-delimited command-line arguments.
|
||||
ruff check . # Lint all files in the current directory (and any subdirectories)
|
||||
ruff check path/to/code/ # Lint all files in `/path/to/code` (and any subdirectories)
|
||||
ruff check path/to/code/*.py # Lint all `.py` files in `/path/to/code`
|
||||
ruff check path/to/code/to/file.py # Lint `file.py`
|
||||
```
|
||||
|
||||
Or, to run Ruff as a formatter:
|
||||
|
||||
```shell
|
||||
ruff format . # Format all files in the current directory (and any subdirectories).
|
||||
ruff format path/to/code/ # Format all files in `/path/to/code` (and any subdirectories).
|
||||
ruff format path/to/code/*.py # Format all `.py` files in `/path/to/code`.
|
||||
ruff format path/to/code/to/file.py # Format `file.py`.
|
||||
ruff format @arguments.txt # Format using an input file, treating its contents as newline-delimited command-line arguments.
|
||||
```
|
||||
|
||||
Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff-pre-commit`](https://github.com/astral-sh/ruff-pre-commit):
|
||||
Ruff can also be used as a [pre-commit](https://pre-commit.com) hook:
|
||||
|
||||
```yaml
|
||||
# Run the Ruff linter.
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.1.3
|
||||
rev: v0.1.0
|
||||
hooks:
|
||||
# Run the Ruff linter.
|
||||
- id: ruff
|
||||
# Run the Ruff formatter.
|
||||
- id: ruff-format
|
||||
```
|
||||
|
||||
Ruff can also be used as a [VS Code extension](https://github.com/astral-sh/ruff-vscode) or
|
||||
@@ -182,10 +168,18 @@ Ruff can be configured through a `pyproject.toml`, `ruff.toml`, or `.ruff.toml`
|
||||
[_Configuration_](https://docs.astral.sh/ruff/configuration/), or [_Settings_](https://docs.astral.sh/ruff/settings/)
|
||||
for a complete list of all configuration options).
|
||||
|
||||
If left unspecified, Ruff's default configuration is equivalent to:
|
||||
If left unspecified, the default configuration is equivalent to:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
|
||||
select = ["E4", "E7", "E9", "F"]
|
||||
ignore = []
|
||||
|
||||
# Allow fix for all enabled rules (when `--fix`) is provided.
|
||||
fixable = ["A", "B", "C", "D", "E", "F", "G", "I", "N", "Q", "S", "T", "W", "ANN", "ARG", "BLE", "COM", "DJ", "DTZ", "EM", "ERA", "EXE", "FBT", "ICN", "INP", "ISC", "NPY", "PD", "PGH", "PIE", "PL", "PT", "PTH", "PYI", "RET", "RSE", "RUF", "SIM", "SLF", "TCH", "TID", "TRY", "UP", "YTT"]
|
||||
unfixable = []
|
||||
|
||||
# Exclude a variety of commonly ignored directories.
|
||||
exclude = [
|
||||
".bzr",
|
||||
@@ -213,46 +207,27 @@ exclude = [
|
||||
|
||||
# Same as Black.
|
||||
line-length = 88
|
||||
indent-width = 4
|
||||
|
||||
# Assume Python 3.8
|
||||
target-version = "py38"
|
||||
|
||||
[tool.ruff.lint]
|
||||
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
|
||||
select = ["E4", "E7", "E9", "F"]
|
||||
ignore = []
|
||||
|
||||
# Allow fix for all enabled rules (when `--fix`) is provided.
|
||||
fixable = ["ALL"]
|
||||
unfixable = []
|
||||
|
||||
# Allow unused variables when underscore-prefixed.
|
||||
dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
|
||||
|
||||
[tool.ruff.format]
|
||||
# Like Black, use double quotes for strings.
|
||||
quote-style = "double"
|
||||
# Assume Python 3.8
|
||||
target-version = "py38"
|
||||
|
||||
# Like Black, indent with spaces, rather than tabs.
|
||||
indent-style = "space"
|
||||
|
||||
# Like Black, respect magic trailing commas.
|
||||
skip-magic-trailing-comma = false
|
||||
|
||||
# Like Black, automatically detect the appropriate line ending.
|
||||
line-ending = "auto"
|
||||
[tool.ruff.mccabe]
|
||||
# Unlike Flake8, default to a complexity level of 10.
|
||||
max-complexity = 10
|
||||
```
|
||||
|
||||
Some configuration options can be provided via the command-line, such as those related to
|
||||
rule enablement and disablement, file discovery, and logging level:
|
||||
rule enablement and disablement, file discovery, logging level, and more:
|
||||
|
||||
```shell
|
||||
ruff check path/to/code/ --select F401 --select F403 --quiet
|
||||
```
|
||||
|
||||
See `ruff help` for more on Ruff's top-level commands, or `ruff help check` and `ruff help format`
|
||||
for more on the linting and formatting commands, respectively.
|
||||
See `ruff help` for more on Ruff's top-level commands, or `ruff help check` for more on the
|
||||
linting command.
|
||||
|
||||
## Rules
|
||||
|
||||
@@ -262,8 +237,9 @@ for more on the linting and formatting commands, respectively.
|
||||
isort, pyupgrade, and others. Regardless of the rule's origin, Ruff re-implements every rule in
|
||||
Rust as a first-party feature.
|
||||
|
||||
By default, Ruff enables Flake8's `F` rules, along with a subset of the `E` rules, omitting any
|
||||
stylistic rules that overlap with the use of a formatter, like `ruff format` or
|
||||
By default, Ruff enables Flake8's `E` and `F` rules. Ruff supports all rules from the `F` category,
|
||||
and a [subset](https://docs.astral.sh/ruff/rules/#error-e) of the `E` category, omitting those
|
||||
stylistic rules made obsolete by the use of a formatter, like
|
||||
[Black](https://github.com/psf/black).
|
||||
|
||||
If you're just getting started with Ruff, **the default rule set is a great place to start**: it
|
||||
@@ -314,7 +290,6 @@ quality tools, including:
|
||||
- [flake8-super](https://pypi.org/project/flake8-super/)
|
||||
- [flake8-tidy-imports](https://pypi.org/project/flake8-tidy-imports/)
|
||||
- [flake8-todos](https://pypi.org/project/flake8-todos/)
|
||||
- [flake8-trio](https://pypi.org/project/flake8-trio/)
|
||||
- [flake8-type-checking](https://pypi.org/project/flake8-type-checking/)
|
||||
- [flake8-use-pathlib](https://pypi.org/project/flake8-use-pathlib/)
|
||||
- [flynt](https://pypi.org/project/flynt/) ([#2102](https://github.com/astral-sh/ruff/issues/2102))
|
||||
@@ -356,7 +331,7 @@ In some cases, Ruff includes a "direct" Rust port of the corresponding tool.
|
||||
We're grateful to the maintainers of these tools for their work, and for all
|
||||
the value they've provided to the Python community.
|
||||
|
||||
Ruff's formatter is built on a fork of Rome's [`rome_formatter`](https://github.com/rome/tools/tree/main/crates/rome_formatter),
|
||||
Ruff's autoformatter is built on a fork of Rome's [`rome_formatter`](https://github.com/rome/tools/tree/main/crates/rome_formatter),
|
||||
and again draws on both API and implementation details from [Rome](https://github.com/rome/tools),
|
||||
[Prettier](https://github.com/prettier/prettier), and [Black](https://github.com/psf/black).
|
||||
|
||||
@@ -410,7 +385,6 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [Mypy](https://github.com/python/mypy)
|
||||
- Netflix ([Dispatch](https://github.com/Netflix/dispatch))
|
||||
- [Neon](https://github.com/neondatabase/neon)
|
||||
- [NoneBot](https://github.com/nonebot/nonebot2)
|
||||
- [ONNX](https://github.com/onnx/onnx)
|
||||
- [OpenBB](https://github.com/OpenBB-finance/OpenBBTerminal)
|
||||
- [PDM](https://github.com/pdm-project/pdm)
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
{
|
||||
"label": "code style",
|
||||
"message": "Ruff",
|
||||
"logoSvg": "<svg width=\"510\" height=\"622\" viewBox=\"0 0 510 622\" fill=\"none\" xmlns=\"http://www.w3.org/2000/svg\"><path fill-rule=\"evenodd\" clip-rule=\"evenodd\" d=\"M206.701 0C200.964 0 196.314 4.64131 196.314 10.3667V41.4667C196.314 47.192 191.663 51.8333 185.927 51.8333H156.843C151.107 51.8333 146.456 56.4746 146.456 62.2V145.133C146.456 150.859 141.806 155.5 136.069 155.5H106.986C101.249 155.5 96.5988 160.141 96.5988 165.867V222.883C96.5988 228.609 91.9484 233.25 86.2118 233.25H57.1283C51.3917 233.25 46.7413 237.891 46.7413 243.617V300.633C46.7413 306.359 42.0909 311 36.3544 311H10.387C4.6504 311 0 315.641 0 321.367V352.467C0 358.192 4.6504 362.833 10.387 362.833H145.418C151.154 362.833 155.804 367.475 155.804 373.2V430.217C155.804 435.942 151.154 440.583 145.418 440.583H116.334C110.597 440.583 105.947 445.225 105.947 450.95V507.967C105.947 513.692 101.297 518.333 95.5601 518.333H66.4766C60.74 518.333 56.0896 522.975 56.0896 528.7V611.633C56.0896 617.359 60.74 622 66.4766 622H149.572C155.309 622 159.959 617.359 159.959 611.633V570.167H201.507C207.244 570.167 211.894 565.525 211.894 559.8V528.7C211.894 522.975 216.544 518.333 222.281 518.333H251.365C257.101 518.333 261.752 513.692 261.752 507.967V476.867C261.752 471.141 266.402 466.5 272.138 466.5H301.222C306.959 466.5 311.609 461.859 311.609 456.133V425.033C311.609 419.308 316.259 414.667 321.996 414.667H351.079C356.816 414.667 361.466 410.025 361.466 404.3V373.2C361.466 367.475 366.117 362.833 371.853 362.833H400.937C406.673 362.833 411.324 358.192 411.324 352.467V321.367C411.324 315.641 415.974 311 421.711 311H450.794C456.531 311 461.181 306.359 461.181 300.633V217.7C461.181 211.975 456.531 207.333 450.794 207.333H420.672C414.936 207.333 410.285 202.692 410.285 196.967V165.867C410.285 160.141 414.936 155.5 420.672 155.5H449.756C455.492 155.5 460.143 150.859 460.143 145.133V114.033C460.143 108.308 464.793 103.667 470.53 103.667H499.613C505.35 103.667 510 99.0253 510 93.3V10.3667C510 4.64132 505.35 0 499.613 0H206.701ZM168.269 440.583C162.532 440.583 157.882 445.225 157.882 450.95V507.967C157.882 513.692 153.231 518.333 147.495 518.333H118.411C112.675 518.333 108.024 522.975 108.024 528.7V559.8C108.024 565.525 112.675 570.167 118.411 570.167H159.959V528.7C159.959 522.975 164.61 518.333 170.346 518.333H199.43C205.166 518.333 209.817 513.692 209.817 507.967V476.867C209.817 471.141 214.467 466.5 220.204 466.5H249.287C255.024 466.5 259.674 461.859 259.674 456.133V425.033C259.674 419.308 264.325 414.667 270.061 414.667H299.145C304.881 414.667 309.532 410.025 309.532 404.3V373.2C309.532 367.475 314.182 362.833 319.919 362.833H349.002C354.739 362.833 359.389 358.192 359.389 352.467V321.367C359.389 315.641 364.039 311 369.776 311H398.859C404.596 311 409.246 306.359 409.246 300.633V269.533C409.246 263.808 404.596 259.167 398.859 259.167H318.88C313.143 259.167 308.493 254.525 308.493 248.8V217.7C308.493 211.975 313.143 207.333 318.88 207.333H347.963C353.7 207.333 358.35 202.692 358.35 196.967V165.867C358.35 160.141 363.001 155.5 368.737 155.5H397.821C403.557 155.5 408.208 150.859 408.208 145.133V114.033C408.208 108.308 412.858 103.667 418.595 103.667H447.678C453.415 103.667 458.065 99.0253 458.065 93.3V62.2C458.065 56.4746 453.415 51.8333 447.678 51.8333H208.778C203.041 51.8333 198.391 56.4746 198.391 62.2V145.133C198.391 150.859 193.741 155.5 188.004 155.5H158.921C153.184 155.5 148.534 160.141 148.534 165.867V222.883C148.534 228.609 143.883 233.25 138.147 233.25H109.063C103.327 233.25 98.6762 237.891 98.6762 243.617V300.633C98.6762 306.359 103.327 311 109.063 311H197.352C203.089 311 207.739 315.641 207.739 321.367V430.217C207.739 435.942 203.089 440.583 197.352 440.583H168.269Z\" fill=\"#D7FF64\"/></svg>",
|
||||
"logoWidth": 10,
|
||||
"labelColor": "grey",
|
||||
"color": "#261230"
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "flake8-to-ruff"
|
||||
version = "0.1.3"
|
||||
version = "0.1.0"
|
||||
description = """
|
||||
Convert Flake8 configuration files to Ruff configuration files.
|
||||
"""
|
||||
|
||||
@@ -17,8 +17,8 @@ use ruff_linter::settings::DEFAULT_SELECTORS;
|
||||
use ruff_linter::warn_user;
|
||||
use ruff_workspace::options::{
|
||||
Flake8AnnotationsOptions, Flake8BugbearOptions, Flake8BuiltinsOptions, Flake8ErrMsgOptions,
|
||||
Flake8PytestStyleOptions, Flake8QuotesOptions, Flake8TidyImportsOptions, LintCommonOptions,
|
||||
LintOptions, McCabeOptions, Options, Pep8NamingOptions, PydocstyleOptions,
|
||||
Flake8PytestStyleOptions, Flake8QuotesOptions, Flake8TidyImportsOptions, LintOptions,
|
||||
McCabeOptions, Options, Pep8NamingOptions, PydocstyleOptions,
|
||||
};
|
||||
use ruff_workspace::pyproject::Pyproject;
|
||||
|
||||
@@ -99,7 +99,7 @@ pub(crate) fn convert(
|
||||
|
||||
// Parse each supported option.
|
||||
let mut options = Options::default();
|
||||
let mut lint_options = LintCommonOptions::default();
|
||||
let mut lint_options = LintOptions::default();
|
||||
let mut flake8_annotations = Flake8AnnotationsOptions::default();
|
||||
let mut flake8_bugbear = Flake8BugbearOptions::default();
|
||||
let mut flake8_builtins = Flake8BuiltinsOptions::default();
|
||||
@@ -433,11 +433,8 @@ pub(crate) fn convert(
|
||||
}
|
||||
}
|
||||
|
||||
if lint_options != LintCommonOptions::default() {
|
||||
options.lint = Some(LintOptions {
|
||||
common: lint_options,
|
||||
..LintOptions::default()
|
||||
});
|
||||
if lint_options != LintOptions::default() {
|
||||
options.lint = Some(lint_options);
|
||||
}
|
||||
|
||||
// Create the pyproject.toml.
|
||||
@@ -468,9 +465,7 @@ mod tests {
|
||||
use ruff_linter::rules::flake8_quotes;
|
||||
use ruff_linter::rules::pydocstyle::settings::Convention;
|
||||
use ruff_linter::settings::types::PythonVersion;
|
||||
use ruff_workspace::options::{
|
||||
Flake8QuotesOptions, LintCommonOptions, LintOptions, Options, PydocstyleOptions,
|
||||
};
|
||||
use ruff_workspace::options::{Flake8QuotesOptions, LintOptions, Options, PydocstyleOptions};
|
||||
use ruff_workspace::pyproject::Pyproject;
|
||||
|
||||
use crate::converter::DEFAULT_SELECTORS;
|
||||
@@ -480,8 +475,8 @@ mod tests {
|
||||
use super::super::plugin::Plugin;
|
||||
use super::convert;
|
||||
|
||||
fn lint_default_options(plugins: impl IntoIterator<Item = RuleSelector>) -> LintCommonOptions {
|
||||
LintCommonOptions {
|
||||
fn lint_default_options(plugins: impl IntoIterator<Item = RuleSelector>) -> LintOptions {
|
||||
LintOptions {
|
||||
ignore: Some(vec![]),
|
||||
select: Some(
|
||||
DEFAULT_SELECTORS
|
||||
@@ -491,7 +486,7 @@ mod tests {
|
||||
.sorted_by_key(RuleSelector::prefix_and_code)
|
||||
.collect(),
|
||||
),
|
||||
..LintCommonOptions::default()
|
||||
..LintOptions::default()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -503,10 +498,7 @@ mod tests {
|
||||
None,
|
||||
);
|
||||
let expected = Pyproject::new(Options {
|
||||
lint: Some(LintOptions {
|
||||
common: lint_default_options([]),
|
||||
..LintOptions::default()
|
||||
}),
|
||||
lint: Some(lint_default_options([])),
|
||||
..Options::default()
|
||||
});
|
||||
assert_eq!(actual, expected);
|
||||
@@ -524,10 +516,7 @@ mod tests {
|
||||
);
|
||||
let expected = Pyproject::new(Options {
|
||||
line_length: Some(LineLength::try_from(100).unwrap()),
|
||||
lint: Some(LintOptions {
|
||||
common: lint_default_options([]),
|
||||
..LintOptions::default()
|
||||
}),
|
||||
lint: Some(lint_default_options([])),
|
||||
..Options::default()
|
||||
});
|
||||
assert_eq!(actual, expected);
|
||||
@@ -545,10 +534,7 @@ mod tests {
|
||||
);
|
||||
let expected = Pyproject::new(Options {
|
||||
line_length: Some(LineLength::try_from(100).unwrap()),
|
||||
lint: Some(LintOptions {
|
||||
common: lint_default_options([]),
|
||||
..LintOptions::default()
|
||||
}),
|
||||
lint: Some(lint_default_options([])),
|
||||
..Options::default()
|
||||
});
|
||||
assert_eq!(actual, expected);
|
||||
@@ -565,10 +551,7 @@ mod tests {
|
||||
Some(vec![]),
|
||||
);
|
||||
let expected = Pyproject::new(Options {
|
||||
lint: Some(LintOptions {
|
||||
common: lint_default_options([]),
|
||||
..LintOptions::default()
|
||||
}),
|
||||
lint: Some(lint_default_options([])),
|
||||
..Options::default()
|
||||
});
|
||||
assert_eq!(actual, expected);
|
||||
@@ -586,16 +569,13 @@ mod tests {
|
||||
);
|
||||
let expected = Pyproject::new(Options {
|
||||
lint: Some(LintOptions {
|
||||
common: LintCommonOptions {
|
||||
flake8_quotes: Some(Flake8QuotesOptions {
|
||||
inline_quotes: Some(flake8_quotes::settings::Quote::Single),
|
||||
multiline_quotes: None,
|
||||
docstring_quotes: None,
|
||||
avoid_escape: None,
|
||||
}),
|
||||
..lint_default_options([])
|
||||
},
|
||||
..LintOptions::default()
|
||||
flake8_quotes: Some(Flake8QuotesOptions {
|
||||
inline_quotes: Some(flake8_quotes::settings::Quote::Single),
|
||||
multiline_quotes: None,
|
||||
docstring_quotes: None,
|
||||
avoid_escape: None,
|
||||
}),
|
||||
..lint_default_options([])
|
||||
}),
|
||||
..Options::default()
|
||||
});
|
||||
@@ -617,15 +597,12 @@ mod tests {
|
||||
);
|
||||
let expected = Pyproject::new(Options {
|
||||
lint: Some(LintOptions {
|
||||
common: LintCommonOptions {
|
||||
pydocstyle: Some(PydocstyleOptions {
|
||||
convention: Some(Convention::Numpy),
|
||||
ignore_decorators: None,
|
||||
property_decorators: None,
|
||||
}),
|
||||
..lint_default_options([Linter::Pydocstyle.into()])
|
||||
},
|
||||
..LintOptions::default()
|
||||
pydocstyle: Some(PydocstyleOptions {
|
||||
convention: Some(Convention::Numpy),
|
||||
ignore_decorators: None,
|
||||
property_decorators: None,
|
||||
}),
|
||||
..lint_default_options([Linter::Pydocstyle.into()])
|
||||
}),
|
||||
..Options::default()
|
||||
});
|
||||
@@ -644,16 +621,13 @@ mod tests {
|
||||
);
|
||||
let expected = Pyproject::new(Options {
|
||||
lint: Some(LintOptions {
|
||||
common: LintCommonOptions {
|
||||
flake8_quotes: Some(Flake8QuotesOptions {
|
||||
inline_quotes: Some(flake8_quotes::settings::Quote::Single),
|
||||
multiline_quotes: None,
|
||||
docstring_quotes: None,
|
||||
avoid_escape: None,
|
||||
}),
|
||||
..lint_default_options([Linter::Flake8Quotes.into()])
|
||||
},
|
||||
..LintOptions::default()
|
||||
flake8_quotes: Some(Flake8QuotesOptions {
|
||||
inline_quotes: Some(flake8_quotes::settings::Quote::Single),
|
||||
multiline_quotes: None,
|
||||
docstring_quotes: None,
|
||||
avoid_escape: None,
|
||||
}),
|
||||
..lint_default_options([Linter::Flake8Quotes.into()])
|
||||
}),
|
||||
..Options::default()
|
||||
});
|
||||
@@ -674,10 +648,7 @@ mod tests {
|
||||
);
|
||||
let expected = Pyproject::new(Options {
|
||||
target_version: Some(PythonVersion::Py38),
|
||||
lint: Some(LintOptions {
|
||||
common: lint_default_options([]),
|
||||
..LintOptions::default()
|
||||
}),
|
||||
lint: Some(lint_default_options([])),
|
||||
..Options::default()
|
||||
});
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
@@ -184,7 +184,7 @@ pub(crate) fn collect_per_file_ignores(
|
||||
for pair in pairs {
|
||||
per_file_ignores
|
||||
.entry(pair.pattern)
|
||||
.or_default()
|
||||
.or_insert_with(Vec::new)
|
||||
.push(pair.prefix);
|
||||
}
|
||||
per_file_ignores
|
||||
|
||||
@@ -37,7 +37,7 @@ serde_json.workspace = true
|
||||
url = "2.3.1"
|
||||
ureq = "2.8.0"
|
||||
criterion = { version = "0.5.1", default-features = false }
|
||||
codspeed-criterion-compat = { version="2.3.0", default-features = false, optional = true}
|
||||
codspeed-criterion-compat = { version="2.2.0", default-features = false, optional = true}
|
||||
|
||||
[dev-dependencies]
|
||||
ruff_linter.path = "../ruff_linter"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff_cli"
|
||||
version = "0.1.3"
|
||||
version = "0.1.0"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
@@ -44,6 +44,7 @@ glob = { workspace = true }
|
||||
ignore = { workspace = true }
|
||||
is-macro = { workspace = true }
|
||||
itertools = { workspace = true }
|
||||
itoa = { version = "1.0.6" }
|
||||
log = { workspace = true }
|
||||
notify = { version = "6.1.1" }
|
||||
path-absolutize = { workspace = true, features = ["once_cell_cache"] }
|
||||
@@ -65,9 +66,9 @@ wild = { version = "2" }
|
||||
assert_cmd = { version = "2.0.8" }
|
||||
# Avoid writing colored snapshots when running tests from the terminal
|
||||
colored = { workspace = true, features = ["no-color"]}
|
||||
insta = { workspace = true, features = ["filters", "json"] }
|
||||
insta = { workspace = true, features = ["filters"] }
|
||||
insta-cmd = { version = "0.4.0" }
|
||||
tempfile = "3.8.1"
|
||||
tempfile = "3.6.0"
|
||||
test-case = { workspace = true }
|
||||
ureq = { version = "2.8.0", features = [] }
|
||||
|
||||
|
||||
@@ -1,80 +0,0 @@
|
||||
use std::{fs, path::Path, process::Command};
|
||||
|
||||
fn main() {
|
||||
// The workspace root directory is not available without walking up the tree
|
||||
// https://github.com/rust-lang/cargo/issues/3946
|
||||
let workspace_root = Path::new(&std::env::var("CARGO_MANIFEST_DIR").unwrap())
|
||||
.join("..")
|
||||
.join("..");
|
||||
|
||||
commit_info(&workspace_root);
|
||||
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
let target = std::env::var("TARGET").unwrap();
|
||||
println!("cargo:rustc-env=RUST_HOST_TARGET={target}");
|
||||
}
|
||||
|
||||
fn commit_info(workspace_root: &Path) {
|
||||
// If not in a git repository, do not attempt to retrieve commit information
|
||||
let git_dir = workspace_root.join(".git");
|
||||
if !git_dir.exists() {
|
||||
return;
|
||||
}
|
||||
|
||||
let git_head_path = git_dir.join("HEAD");
|
||||
println!(
|
||||
"cargo:rerun-if-changed={}",
|
||||
git_head_path.as_path().display()
|
||||
);
|
||||
|
||||
let git_head_contents = fs::read_to_string(git_head_path);
|
||||
if let Ok(git_head_contents) = git_head_contents {
|
||||
// The contents are either a commit or a reference in the following formats
|
||||
// - "<commit>" when the head is detached
|
||||
// - "ref <ref>" when working on a branch
|
||||
// If a commit, checking if the HEAD file has changed is sufficient
|
||||
// If a ref, we need to add the head file for that ref to rebuild on commit
|
||||
let mut git_ref_parts = git_head_contents.split_whitespace();
|
||||
git_ref_parts.next();
|
||||
if let Some(git_ref) = git_ref_parts.next() {
|
||||
let git_ref_path = git_dir.join(git_ref);
|
||||
println!(
|
||||
"cargo:rerun-if-changed={}",
|
||||
git_ref_path.as_path().display()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let output = match Command::new("git")
|
||||
.arg("log")
|
||||
.arg("-1")
|
||||
.arg("--date=short")
|
||||
.arg("--abbrev=9")
|
||||
.arg("--format=%H %h %cd %(describe)")
|
||||
.output()
|
||||
{
|
||||
Ok(output) if output.status.success() => output,
|
||||
_ => return,
|
||||
};
|
||||
let stdout = String::from_utf8(output.stdout).unwrap();
|
||||
let mut parts = stdout.split_whitespace();
|
||||
let mut next = || parts.next().unwrap();
|
||||
println!("cargo:rustc-env=RUFF_COMMIT_HASH={}", next());
|
||||
println!("cargo:rustc-env=RUFF_COMMIT_SHORT_HASH={}", next());
|
||||
println!("cargo:rustc-env=RUFF_COMMIT_DATE={}", next());
|
||||
|
||||
// Describe can fail for some commits
|
||||
// https://git-scm.com/docs/pretty-formats#Documentation/pretty-formats.txt-emdescribeoptionsem
|
||||
if let Some(describe) = parts.next() {
|
||||
let mut describe_parts = describe.split('-');
|
||||
println!(
|
||||
"cargo:rustc-env=RUFF_LAST_TAG={}",
|
||||
describe_parts.next().unwrap()
|
||||
);
|
||||
// If this is the tagged commit, this component will be missing
|
||||
println!(
|
||||
"cargo:rustc-env=RUFF_LAST_TAG_DISTANCE={}",
|
||||
describe_parts.next().unwrap_or("0")
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
print("All formatted!")
|
||||
@@ -1,72 +0,0 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "98e1dd71-14a2-454d-9be0-061dde560b07",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import numpy\n",
|
||||
"maths = (numpy.arange(100)**2).sum()\n",
|
||||
"stats= numpy.asarray([1,2,3,4]).median()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "83a0b1b8",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"A markdown cell"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "ae12f012",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# A cell with IPython escape command\n",
|
||||
"def some_function(foo, bar):\n",
|
||||
" pass\n",
|
||||
"%matplotlib inline"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "10f3bbf9",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"foo = %pwd\n",
|
||||
"def some_function(foo,bar,):\n",
|
||||
" # Another cell with IPython escape command\n",
|
||||
" foo = %pwd\n",
|
||||
" print(foo)"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3 (ipykernel)",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.10.12"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
x = 1
|
||||
y=2
|
||||
z = 3
|
||||
@@ -13,7 +13,6 @@ use ruff_linter::settings::types::{
|
||||
};
|
||||
use ruff_linter::{RuleParser, RuleSelector, RuleSelectorParser};
|
||||
use ruff_workspace::configuration::{Configuration, RuleSelection};
|
||||
use ruff_workspace::options::PycodestyleOptions;
|
||||
use ruff_workspace::resolver::ConfigurationTransformer;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
@@ -50,11 +49,7 @@ pub enum Command {
|
||||
|
||||
/// Output format
|
||||
#[arg(long, value_enum, default_value = "text")]
|
||||
output_format: HelpFormat,
|
||||
|
||||
/// Output format (Deprecated: Use `--output-format` instead).
|
||||
#[arg(long, value_enum, conflicts_with = "output_format", hide = true)]
|
||||
format: Option<HelpFormat>,
|
||||
format: HelpFormat,
|
||||
},
|
||||
/// List or describe the available configuration options.
|
||||
Config { option: Option<String> },
|
||||
@@ -62,11 +57,7 @@ pub enum Command {
|
||||
Linter {
|
||||
/// Output format
|
||||
#[arg(long, value_enum, default_value = "text")]
|
||||
output_format: HelpFormat,
|
||||
|
||||
/// Output format (Deprecated: Use `--output-format` instead).
|
||||
#[arg(long, value_enum, conflicts_with = "output_format", hide = true)]
|
||||
format: Option<HelpFormat>,
|
||||
format: HelpFormat,
|
||||
},
|
||||
/// Clear any caches in the current directory and any subdirectories.
|
||||
#[clap(alias = "--clean")]
|
||||
@@ -75,12 +66,9 @@ pub enum Command {
|
||||
#[clap(alias = "--generate-shell-completion", hide = true)]
|
||||
GenerateShellCompletion { shell: clap_complete_command::Shell },
|
||||
/// Run the Ruff formatter on the given files or directories.
|
||||
#[doc(hidden)]
|
||||
#[clap(hide = true)]
|
||||
Format(FormatCommand),
|
||||
/// Display Ruff's version
|
||||
Version {
|
||||
#[arg(long, value_enum, default_value = "text")]
|
||||
output_format: HelpFormat,
|
||||
},
|
||||
}
|
||||
|
||||
// The `Parser` derive is for ruff_dev, for ruff_cli `Args` would be sufficient
|
||||
@@ -365,21 +353,9 @@ pub struct FormatCommand {
|
||||
/// files would have been modified, and zero otherwise.
|
||||
#[arg(long)]
|
||||
pub check: bool,
|
||||
/// Avoid writing any formatted files back; instead, exit with a non-zero status code and the
|
||||
/// difference between the current file and how the formatted file would look like.
|
||||
#[arg(long)]
|
||||
pub diff: bool,
|
||||
/// Path to the `pyproject.toml` or `ruff.toml` file to use for configuration.
|
||||
#[arg(long, conflicts_with = "isolated")]
|
||||
pub config: Option<PathBuf>,
|
||||
|
||||
/// Disable cache reads.
|
||||
#[arg(short, long, help_heading = "Miscellaneous")]
|
||||
pub no_cache: bool,
|
||||
/// Path to the cache directory.
|
||||
#[arg(long, env = "RUFF_CACHE_DIR", help_heading = "Miscellaneous")]
|
||||
pub cache_dir: Option<PathBuf>,
|
||||
|
||||
/// Respect file exclusions via `.gitignore` and other standard ignore files.
|
||||
/// Use `--no-respect-gitignore` to disable.
|
||||
#[arg(
|
||||
@@ -390,15 +366,6 @@ pub struct FormatCommand {
|
||||
respect_gitignore: bool,
|
||||
#[clap(long, overrides_with("respect_gitignore"), hide = true)]
|
||||
no_respect_gitignore: bool,
|
||||
/// List of paths, used to omit files and/or directories from analysis.
|
||||
#[arg(
|
||||
long,
|
||||
value_delimiter = ',',
|
||||
value_name = "FILE_PATTERN",
|
||||
help_heading = "File selection"
|
||||
)]
|
||||
pub exclude: Option<Vec<FilePattern>>,
|
||||
|
||||
/// Enforce exclusions, even for paths passed to Ruff directly on the command-line.
|
||||
/// Use `--no-force-exclude` to disable.
|
||||
#[arg(
|
||||
@@ -410,7 +377,7 @@ pub struct FormatCommand {
|
||||
#[clap(long, overrides_with("force_exclude"), hide = true)]
|
||||
no_force_exclude: bool,
|
||||
/// Set the line-length.
|
||||
#[arg(long, help_heading = "Format configuration")]
|
||||
#[arg(long, help_heading = "Rule configuration", hide = true)]
|
||||
pub line_length: Option<LineLength>,
|
||||
/// Ignore all configuration files.
|
||||
#[arg(long, conflicts_with = "config", help_heading = "Miscellaneous")]
|
||||
@@ -418,12 +385,10 @@ pub struct FormatCommand {
|
||||
/// The name of the file when passing it through stdin.
|
||||
#[arg(long, help_heading = "Miscellaneous")]
|
||||
pub stdin_filename: Option<PathBuf>,
|
||||
/// The minimum Python version that should be supported.
|
||||
#[arg(long, value_enum)]
|
||||
pub target_version: Option<PythonVersion>,
|
||||
/// Enable preview mode; enables unstable formatting.
|
||||
|
||||
/// Enable preview mode; checks will include unstable rules and fixes.
|
||||
/// Use `--no-preview` to disable.
|
||||
#[arg(long, overrides_with("no_preview"))]
|
||||
#[arg(long, overrides_with("no_preview"), hide = true)]
|
||||
preview: bool,
|
||||
#[clap(long, overrides_with("preview"), hide = true)]
|
||||
no_preview: bool,
|
||||
@@ -510,7 +475,6 @@ impl CheckCommand {
|
||||
extend_exclude: self.extend_exclude,
|
||||
extend_fixable: self.extend_fixable,
|
||||
extend_ignore: self.extend_ignore,
|
||||
extend_per_file_ignores: self.extend_per_file_ignores,
|
||||
extend_select: self.extend_select,
|
||||
extend_unfixable: self.extend_unfixable,
|
||||
fixable: self.fixable,
|
||||
@@ -547,11 +511,9 @@ impl FormatCommand {
|
||||
(
|
||||
FormatArguments {
|
||||
check: self.check,
|
||||
diff: self.diff,
|
||||
config: self.config,
|
||||
files: self.files,
|
||||
isolated: self.isolated,
|
||||
no_cache: self.no_cache,
|
||||
stdin_filename: self.stdin_filename,
|
||||
},
|
||||
CliOverrides {
|
||||
@@ -560,12 +522,8 @@ impl FormatCommand {
|
||||
self.respect_gitignore,
|
||||
self.no_respect_gitignore,
|
||||
),
|
||||
exclude: self.exclude,
|
||||
preview: resolve_bool_arg(self.preview, self.no_preview).map(PreviewMode::from),
|
||||
force_exclude: resolve_bool_arg(self.force_exclude, self.no_force_exclude),
|
||||
target_version: self.target_version,
|
||||
cache_dir: self.cache_dir,
|
||||
|
||||
// Unsupported on the formatter CLI, but required on `Overrides`.
|
||||
..CliOverrides::default()
|
||||
},
|
||||
@@ -609,8 +567,6 @@ pub struct CheckArguments {
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct FormatArguments {
|
||||
pub check: bool,
|
||||
pub no_cache: bool,
|
||||
pub diff: bool,
|
||||
pub config: Option<PathBuf>,
|
||||
pub files: Vec<PathBuf>,
|
||||
pub isolated: bool,
|
||||
@@ -632,7 +588,6 @@ pub struct CliOverrides {
|
||||
pub ignore: Option<Vec<RuleSelector>>,
|
||||
pub line_length: Option<LineLength>,
|
||||
pub per_file_ignores: Option<Vec<PatternPrefixPair>>,
|
||||
pub extend_per_file_ignores: Option<Vec<PatternPrefixPair>>,
|
||||
pub preview: Option<PreviewMode>,
|
||||
pub respect_gitignore: Option<bool>,
|
||||
pub select: Option<Vec<RuleSelector>>,
|
||||
@@ -663,12 +618,6 @@ impl ConfigurationTransformer for CliOverrides {
|
||||
if let Some(extend_exclude) = &self.extend_exclude {
|
||||
config.extend_exclude.extend(extend_exclude.clone());
|
||||
}
|
||||
if let Some(extend_per_file_ignores) = &self.extend_per_file_ignores {
|
||||
config
|
||||
.lint
|
||||
.extend_per_file_ignores
|
||||
.extend(collect_per_file_ignores(extend_per_file_ignores.clone()));
|
||||
}
|
||||
if let Some(fix) = &self.fix {
|
||||
config.fix = Some(*fix);
|
||||
}
|
||||
@@ -704,17 +653,11 @@ impl ConfigurationTransformer for CliOverrides {
|
||||
if let Some(force_exclude) = &self.force_exclude {
|
||||
config.force_exclude = Some(*force_exclude);
|
||||
}
|
||||
if let Some(line_length) = self.line_length {
|
||||
config.line_length = Some(line_length);
|
||||
config.lint.pycodestyle = Some(PycodestyleOptions {
|
||||
max_line_length: Some(line_length),
|
||||
..config.lint.pycodestyle.unwrap_or_default()
|
||||
});
|
||||
if let Some(line_length) = &self.line_length {
|
||||
config.line_length = Some(*line_length);
|
||||
}
|
||||
if let Some(preview) = &self.preview {
|
||||
config.preview = Some(*preview);
|
||||
config.lint.preview = Some(*preview);
|
||||
config.format.preview = Some(*preview);
|
||||
}
|
||||
if let Some(per_file_ignores) = &self.per_file_ignores {
|
||||
config.lint.per_file_ignores = Some(collect_per_file_ignores(per_file_ignores.clone()));
|
||||
@@ -742,7 +685,7 @@ pub fn collect_per_file_ignores(pairs: Vec<PatternPrefixPair>) -> Vec<PerFileIgn
|
||||
for pair in pairs {
|
||||
per_file_ignores
|
||||
.entry(pair.pattern)
|
||||
.or_default()
|
||||
.or_insert_with(Vec::new)
|
||||
.push(pair.prefix);
|
||||
}
|
||||
per_file_ignores
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use std::fmt::Debug;
|
||||
use std::collections::HashMap;
|
||||
use std::fs::{self, File};
|
||||
use std::hash::Hasher;
|
||||
use std::io::{self, BufReader, BufWriter, Write};
|
||||
@@ -8,62 +8,29 @@ use std::sync::Mutex;
|
||||
use std::time::{Duration, SystemTime};
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use filetime::FileTime;
|
||||
use itertools::Itertools;
|
||||
use log::{debug, error};
|
||||
use rayon::iter::ParallelIterator;
|
||||
use rayon::iter::{IntoParallelIterator, ParallelBridge};
|
||||
use rustc_hash::FxHashMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use ruff_cache::{CacheKey, CacheKeyHasher};
|
||||
use ruff_diagnostics::{DiagnosticKind, Fix};
|
||||
use ruff_linter::message::Message;
|
||||
use ruff_linter::{warn_user, VERSION};
|
||||
use ruff_macros::CacheKey;
|
||||
use ruff_linter::warn_user;
|
||||
use ruff_notebook::NotebookIndex;
|
||||
use ruff_python_ast::imports::ImportMap;
|
||||
use ruff_source_file::SourceFileBuilder;
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
use ruff_workspace::resolver::{PyprojectConfig, PyprojectDiscoveryStrategy, Resolver};
|
||||
use ruff_workspace::Settings;
|
||||
|
||||
use crate::cache;
|
||||
use crate::diagnostics::Diagnostics;
|
||||
|
||||
/// Maximum duration for which we keep a file in cache that hasn't been seen.
|
||||
const MAX_LAST_SEEN: Duration = Duration::from_secs(30 * 24 * 60 * 60); // 30 days.
|
||||
|
||||
/// [`Path`] that is relative to the package root in [`PackageCache`].
|
||||
pub(crate) type RelativePath = Path;
|
||||
/// [`PathBuf`] that is relative to the package root in [`PackageCache`].
|
||||
pub(crate) type RelativePathBuf = PathBuf;
|
||||
|
||||
#[derive(CacheKey)]
|
||||
pub(crate) struct FileCacheKey {
|
||||
/// Timestamp when the file was last modified before the (cached) check.
|
||||
file_last_modified: FileTime,
|
||||
/// Permissions of the file before the (cached) check.
|
||||
file_permissions_mode: u32,
|
||||
}
|
||||
|
||||
impl FileCacheKey {
|
||||
pub(crate) fn from_path(path: &Path) -> io::Result<FileCacheKey> {
|
||||
// Construct a cache key for the file
|
||||
let metadata = path.metadata()?;
|
||||
|
||||
#[cfg(unix)]
|
||||
let permissions = {
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
metadata.permissions().mode()
|
||||
};
|
||||
#[cfg(windows)]
|
||||
let permissions: u32 = metadata.permissions().readonly().into();
|
||||
|
||||
Ok(FileCacheKey {
|
||||
file_last_modified: FileTime::from_last_modification_time(&metadata),
|
||||
file_permissions_mode: permissions,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Cache.
|
||||
///
|
||||
/// `Cache` holds everything required to display the diagnostics for a single
|
||||
@@ -83,7 +50,7 @@ pub(crate) struct Cache {
|
||||
/// Files that are linted, but are not in `package.files` or are in
|
||||
/// `package.files` but are outdated. This gets merged with `package.files`
|
||||
/// when the cache is written back to disk in [`Cache::store`].
|
||||
changes: Mutex<Vec<Change>>,
|
||||
new_files: Mutex<HashMap<RelativePathBuf, FileCache>>,
|
||||
/// The "current" timestamp used as cache for the updates of
|
||||
/// [`FileCache::last_seen`]
|
||||
last_seen_cache: u64,
|
||||
@@ -98,11 +65,12 @@ impl Cache {
|
||||
///
|
||||
/// Finally `settings` is used to ensure we don't open a cache for different
|
||||
/// settings. It also defines the directory where to store the cache.
|
||||
pub(crate) fn open(package_root: PathBuf, settings: &Settings) -> Self {
|
||||
pub(crate) fn open(package_root: PathBuf, settings: &Settings) -> Cache {
|
||||
debug_assert!(package_root.is_absolute(), "package root not canonicalized");
|
||||
|
||||
let key = format!("{}", cache_key(&package_root, settings));
|
||||
let path = PathBuf::from_iter([&settings.cache_dir, Path::new(VERSION), Path::new(&key)]);
|
||||
let mut buf = itoa::Buffer::new();
|
||||
let key = Path::new(buf.format(cache_key(&package_root, settings)));
|
||||
let path = PathBuf::from_iter([&settings.cache_dir, Path::new("content"), key]);
|
||||
|
||||
let file = match File::open(&path) {
|
||||
Ok(file) => file,
|
||||
@@ -137,56 +105,36 @@ impl Cache {
|
||||
}
|
||||
|
||||
/// Create an empty `Cache`.
|
||||
fn empty(path: PathBuf, package_root: PathBuf) -> Self {
|
||||
fn empty(path: PathBuf, package_root: PathBuf) -> Cache {
|
||||
let package = PackageCache {
|
||||
package_root,
|
||||
files: FxHashMap::default(),
|
||||
files: HashMap::new(),
|
||||
};
|
||||
Cache::new(path, package)
|
||||
}
|
||||
|
||||
#[allow(clippy::cast_possible_truncation)]
|
||||
fn new(path: PathBuf, package: PackageCache) -> Self {
|
||||
fn new(path: PathBuf, package: PackageCache) -> Cache {
|
||||
Cache {
|
||||
path,
|
||||
package,
|
||||
changes: Mutex::new(Vec::new()),
|
||||
new_files: Mutex::new(HashMap::new()),
|
||||
// SAFETY: this will be truncated to the year ~2554 (so don't use
|
||||
// this code after that!).
|
||||
last_seen_cache: SystemTime::UNIX_EPOCH.elapsed().unwrap().as_millis() as u64,
|
||||
}
|
||||
}
|
||||
|
||||
/// Applies the pending changes and persists the cache to disk, if it has been changed.
|
||||
pub(crate) fn persist(mut self) -> Result<()> {
|
||||
if !self.save() {
|
||||
/// Store the cache to disk, if it has been changed.
|
||||
#[allow(clippy::cast_possible_truncation)]
|
||||
pub(crate) fn store(mut self) -> Result<()> {
|
||||
let new_files = self.new_files.into_inner().unwrap();
|
||||
if new_files.is_empty() {
|
||||
// No changes made, no need to write the same cache file back to
|
||||
// disk.
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let file = File::create(&self.path)
|
||||
.with_context(|| format!("Failed to create cache file '{}'", self.path.display()))?;
|
||||
let writer = BufWriter::new(file);
|
||||
bincode::serialize_into(writer, &self.package).with_context(|| {
|
||||
format!(
|
||||
"Failed to serialise cache to file '{}'",
|
||||
self.path.display()
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
/// Applies the pending changes without storing the cache to disk.
|
||||
#[allow(clippy::cast_possible_truncation)]
|
||||
pub(crate) fn save(&mut self) -> bool {
|
||||
/// Maximum duration for which we keep a file in cache that hasn't been seen.
|
||||
const MAX_LAST_SEEN: Duration = Duration::from_secs(30 * 24 * 60 * 60); // 30 days.
|
||||
|
||||
let changes = std::mem::take(self.changes.get_mut().unwrap());
|
||||
if changes.is_empty() {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Remove cached files that we haven't seen in a while.
|
||||
let now = self.last_seen_cache;
|
||||
self.package.files.retain(|_, file| {
|
||||
@@ -195,32 +143,17 @@ impl Cache {
|
||||
});
|
||||
|
||||
// Apply any changes made and keep track of when we last saw files.
|
||||
for change in changes {
|
||||
let entry = self
|
||||
.package
|
||||
.files
|
||||
.entry(change.path)
|
||||
.and_modify(|existing| {
|
||||
if existing.key != change.new_key {
|
||||
// Reset the data if the key change.
|
||||
existing.data = FileCacheData::default();
|
||||
}
|
||||
self.package.files.extend(new_files);
|
||||
|
||||
existing.key = change.new_key;
|
||||
existing
|
||||
.last_seen
|
||||
.store(self.last_seen_cache, Ordering::Relaxed);
|
||||
})
|
||||
.or_insert_with(|| FileCache {
|
||||
key: change.new_key,
|
||||
last_seen: AtomicU64::new(self.last_seen_cache),
|
||||
data: FileCacheData::default(),
|
||||
});
|
||||
|
||||
change.new_data.apply(&mut entry.data);
|
||||
}
|
||||
|
||||
true
|
||||
let file = File::create(&self.path)
|
||||
.with_context(|| format!("Failed to create cache file '{}'", self.path.display()))?;
|
||||
let writer = BufWriter::new(file);
|
||||
bincode::serialize_into(writer, &self.package).with_context(|| {
|
||||
format!(
|
||||
"Failed to serialise cache to file '{}'",
|
||||
self.path.display()
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the relative path based on `path` and the package root.
|
||||
@@ -236,7 +169,7 @@ impl Cache {
|
||||
///
|
||||
/// This returns `None` if `key` differs from the cached key or if the
|
||||
/// cache doesn't contain results for the file.
|
||||
pub(crate) fn get(&self, path: &RelativePath, key: &FileCacheKey) -> Option<&FileCache> {
|
||||
pub(crate) fn get<T: CacheKey>(&self, path: &RelativePath, key: &T) -> Option<&FileCache> {
|
||||
let file = self.package.files.get(path)?;
|
||||
|
||||
let mut hasher = CacheKeyHasher::new();
|
||||
@@ -252,34 +185,50 @@ impl Cache {
|
||||
Some(file)
|
||||
}
|
||||
|
||||
pub(crate) fn is_formatted(&self, path: &RelativePath, key: &FileCacheKey) -> bool {
|
||||
self.get(path, key)
|
||||
.is_some_and(|entry| entry.data.formatted)
|
||||
}
|
||||
|
||||
/// Add or update a file cache at `path` relative to the package root.
|
||||
fn update(&self, path: RelativePathBuf, key: &FileCacheKey, data: ChangeData) {
|
||||
pub(crate) fn update<T: CacheKey>(
|
||||
&self,
|
||||
path: RelativePathBuf,
|
||||
key: T,
|
||||
messages: &[Message],
|
||||
imports: &ImportMap,
|
||||
notebook_index: Option<&NotebookIndex>,
|
||||
) {
|
||||
let source = if let Some(msg) = messages.first() {
|
||||
msg.file.source_text().to_owned()
|
||||
} else {
|
||||
String::new() // No messages, no need to keep the source!
|
||||
};
|
||||
|
||||
let messages = messages
|
||||
.iter()
|
||||
.map(|msg| {
|
||||
// Make sure that all message use the same source file.
|
||||
assert!(
|
||||
msg.file == messages.first().unwrap().file,
|
||||
"message uses a different source file"
|
||||
);
|
||||
CacheMessage {
|
||||
kind: msg.kind.clone(),
|
||||
range: msg.range,
|
||||
fix: msg.fix.clone(),
|
||||
noqa_offset: msg.noqa_offset,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mut hasher = CacheKeyHasher::new();
|
||||
key.cache_key(&mut hasher);
|
||||
|
||||
self.changes.lock().unwrap().push(Change {
|
||||
path,
|
||||
new_key: hasher.finish(),
|
||||
new_data: data,
|
||||
});
|
||||
}
|
||||
|
||||
pub(crate) fn update_lint(
|
||||
&self,
|
||||
path: RelativePathBuf,
|
||||
key: &FileCacheKey,
|
||||
data: LintCacheData,
|
||||
) {
|
||||
self.update(path, key, ChangeData::Lint(data));
|
||||
}
|
||||
|
||||
pub(crate) fn set_formatted(&self, path: RelativePathBuf, key: &FileCacheKey) {
|
||||
self.update(path, key, ChangeData::Formatted);
|
||||
let file = FileCache {
|
||||
key: hasher.finish(),
|
||||
last_seen: AtomicU64::new(self.last_seen_cache),
|
||||
imports: imports.clone(),
|
||||
messages,
|
||||
source,
|
||||
notebook_index: notebook_index.cloned(),
|
||||
};
|
||||
self.new_files.lock().unwrap().insert(path, file);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -292,7 +241,7 @@ struct PackageCache {
|
||||
/// single file "packages", e.g. scripts.
|
||||
package_root: PathBuf,
|
||||
/// Mapping of source file path to it's cached data.
|
||||
files: FxHashMap<RelativePathBuf, FileCache>,
|
||||
files: HashMap<RelativePathBuf, FileCache>,
|
||||
}
|
||||
|
||||
/// On disk representation of the cache per source file.
|
||||
@@ -305,59 +254,71 @@ pub(crate) struct FileCache {
|
||||
/// Represented as the number of milliseconds since Unix epoch. This will
|
||||
/// break in 1970 + ~584 years (~2554).
|
||||
last_seen: AtomicU64,
|
||||
|
||||
data: FileCacheData,
|
||||
/// Imports made.
|
||||
imports: ImportMap,
|
||||
/// Diagnostic messages.
|
||||
messages: Vec<CacheMessage>,
|
||||
/// Source code of the file.
|
||||
///
|
||||
/// # Notes
|
||||
///
|
||||
/// This will be empty if `messages` is empty.
|
||||
source: String,
|
||||
/// Notebook index if this file is a Jupyter Notebook.
|
||||
notebook_index: Option<NotebookIndex>,
|
||||
}
|
||||
|
||||
impl FileCache {
|
||||
/// Convert the file cache into `Diagnostics`, using `path` as file name.
|
||||
pub(crate) fn to_diagnostics(&self, path: &Path) -> Option<Diagnostics> {
|
||||
self.data.lint.as_ref().map(|lint| {
|
||||
let messages = if lint.messages.is_empty() {
|
||||
Vec::new()
|
||||
} else {
|
||||
let file = SourceFileBuilder::new(path.to_string_lossy(), &*lint.source).finish();
|
||||
lint.messages
|
||||
.iter()
|
||||
.map(|msg| Message {
|
||||
kind: msg.kind.clone(),
|
||||
range: msg.range,
|
||||
fix: msg.fix.clone(),
|
||||
file: file.clone(),
|
||||
noqa_offset: msg.noqa_offset,
|
||||
})
|
||||
.collect()
|
||||
};
|
||||
let notebook_indexes = if let Some(notebook_index) = lint.notebook_index.as_ref() {
|
||||
FxHashMap::from_iter([(path.to_string_lossy().to_string(), notebook_index.clone())])
|
||||
} else {
|
||||
FxHashMap::default()
|
||||
};
|
||||
Diagnostics::new(messages, lint.imports.clone(), notebook_indexes)
|
||||
})
|
||||
pub(crate) fn as_diagnostics(&self, path: &Path) -> Diagnostics {
|
||||
let messages = if self.messages.is_empty() {
|
||||
Vec::new()
|
||||
} else {
|
||||
let file = SourceFileBuilder::new(path.to_string_lossy(), &*self.source).finish();
|
||||
self.messages
|
||||
.iter()
|
||||
.map(|msg| Message {
|
||||
kind: msg.kind.clone(),
|
||||
range: msg.range,
|
||||
fix: msg.fix.clone(),
|
||||
file: file.clone(),
|
||||
noqa_offset: msg.noqa_offset,
|
||||
})
|
||||
.collect()
|
||||
};
|
||||
let notebook_indexes = if let Some(notebook_index) = self.notebook_index.as_ref() {
|
||||
FxHashMap::from_iter([(path.to_string_lossy().to_string(), notebook_index.clone())])
|
||||
} else {
|
||||
FxHashMap::default()
|
||||
};
|
||||
Diagnostics::new(messages, self.imports.clone(), notebook_indexes)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Deserialize, Serialize)]
|
||||
struct FileCacheData {
|
||||
lint: Option<LintCacheData>,
|
||||
formatted: bool,
|
||||
/// On disk representation of a diagnostic message.
|
||||
#[derive(Deserialize, Debug, Serialize)]
|
||||
struct CacheMessage {
|
||||
kind: DiagnosticKind,
|
||||
/// Range into the message's [`FileCache::source`].
|
||||
range: TextRange,
|
||||
fix: Option<Fix>,
|
||||
noqa_offset: TextSize,
|
||||
}
|
||||
|
||||
/// Returns a hash key based on the `package_root`, `settings` and the crate
|
||||
/// version.
|
||||
fn cache_key(package_root: &Path, settings: &Settings) -> u64 {
|
||||
let mut hasher = CacheKeyHasher::new();
|
||||
env!("CARGO_PKG_VERSION").cache_key(&mut hasher);
|
||||
package_root.cache_key(&mut hasher);
|
||||
settings.cache_key(&mut hasher);
|
||||
|
||||
hasher.finish()
|
||||
}
|
||||
|
||||
/// Initialize the cache at the specified `Path`.
|
||||
pub(crate) fn init(path: &Path) -> Result<()> {
|
||||
// Create the cache directories.
|
||||
fs::create_dir_all(path.join(VERSION))?;
|
||||
fs::create_dir_all(path.join("content"))?;
|
||||
|
||||
// Add the CACHEDIR.TAG.
|
||||
if !cachedir::is_tagged(path)? {
|
||||
@@ -374,209 +335,33 @@ pub(crate) fn init(path: &Path) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Serialize, PartialEq)]
|
||||
pub(crate) struct LintCacheData {
|
||||
/// Imports made.
|
||||
pub(super) imports: ImportMap,
|
||||
/// Diagnostic messages.
|
||||
pub(super) messages: Vec<CacheMessage>,
|
||||
/// Source code of the file.
|
||||
///
|
||||
/// # Notes
|
||||
///
|
||||
/// This will be empty if `messages` is empty.
|
||||
pub(super) source: String,
|
||||
/// Notebook index if this file is a Jupyter Notebook.
|
||||
pub(super) notebook_index: Option<NotebookIndex>,
|
||||
}
|
||||
|
||||
impl LintCacheData {
|
||||
pub(crate) fn from_messages(
|
||||
messages: &[Message],
|
||||
imports: ImportMap,
|
||||
notebook_index: Option<NotebookIndex>,
|
||||
) -> Self {
|
||||
let source = if let Some(msg) = messages.first() {
|
||||
msg.file.source_text().to_owned()
|
||||
} else {
|
||||
String::new() // No messages, no need to keep the source!
|
||||
};
|
||||
|
||||
let messages = messages
|
||||
.iter()
|
||||
.map(|msg| {
|
||||
// Make sure that all message use the same source file.
|
||||
assert_eq!(
|
||||
msg.file,
|
||||
messages.first().unwrap().file,
|
||||
"message uses a different source file"
|
||||
);
|
||||
CacheMessage {
|
||||
kind: msg.kind.clone(),
|
||||
range: msg.range,
|
||||
fix: msg.fix.clone(),
|
||||
noqa_offset: msg.noqa_offset,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
Self {
|
||||
imports,
|
||||
messages,
|
||||
source,
|
||||
notebook_index,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// On disk representation of a diagnostic message.
|
||||
#[derive(Deserialize, Debug, Serialize, PartialEq)]
|
||||
pub(super) struct CacheMessage {
|
||||
kind: DiagnosticKind,
|
||||
/// Range into the message's [`FileCache::source`].
|
||||
range: TextRange,
|
||||
fix: Option<Fix>,
|
||||
noqa_offset: TextSize,
|
||||
}
|
||||
|
||||
pub(crate) trait PackageCaches {
|
||||
fn get(&self, package_root: &Path) -> Option<&Cache>;
|
||||
|
||||
fn persist(self) -> anyhow::Result<()>;
|
||||
}
|
||||
|
||||
impl<T> PackageCaches for Option<T>
|
||||
where
|
||||
T: PackageCaches,
|
||||
{
|
||||
fn get(&self, package_root: &Path) -> Option<&Cache> {
|
||||
match self {
|
||||
None => None,
|
||||
Some(caches) => caches.get(package_root),
|
||||
}
|
||||
}
|
||||
|
||||
fn persist(self) -> Result<()> {
|
||||
match self {
|
||||
None => Ok(()),
|
||||
Some(caches) => caches.persist(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct PackageCacheMap<'a>(FxHashMap<&'a Path, Cache>);
|
||||
|
||||
impl<'a> PackageCacheMap<'a> {
|
||||
pub(crate) fn init(
|
||||
pyproject_config: &PyprojectConfig,
|
||||
package_roots: &FxHashMap<&'a Path, Option<&'a Path>>,
|
||||
resolver: &Resolver,
|
||||
) -> Self {
|
||||
fn init_cache(path: &Path) {
|
||||
if let Err(e) = cache::init(path) {
|
||||
error!("Failed to initialize cache at {}: {e:?}", path.display());
|
||||
}
|
||||
}
|
||||
|
||||
match pyproject_config.strategy {
|
||||
PyprojectDiscoveryStrategy::Fixed => {
|
||||
init_cache(&pyproject_config.settings.cache_dir);
|
||||
}
|
||||
PyprojectDiscoveryStrategy::Hierarchical => {
|
||||
for settings in
|
||||
std::iter::once(&pyproject_config.settings).chain(resolver.settings())
|
||||
{
|
||||
init_cache(&settings.cache_dir);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Self(
|
||||
package_roots
|
||||
.iter()
|
||||
.map(|(package, package_root)| package_root.unwrap_or(package))
|
||||
.unique()
|
||||
.par_bridge()
|
||||
.map(|cache_root| {
|
||||
let settings = resolver.resolve(cache_root, pyproject_config);
|
||||
let cache = Cache::open(cache_root.to_path_buf(), settings);
|
||||
(cache_root, cache)
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl PackageCaches for PackageCacheMap<'_> {
|
||||
fn get(&self, package_root: &Path) -> Option<&Cache> {
|
||||
let cache = self.0.get(package_root);
|
||||
|
||||
if cache.is_none() {
|
||||
debug!("No cache found for {}", package_root.display());
|
||||
}
|
||||
|
||||
cache
|
||||
}
|
||||
|
||||
fn persist(self) -> Result<()> {
|
||||
self.0
|
||||
.into_par_iter()
|
||||
.try_for_each(|(_, cache)| cache.persist())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Change {
|
||||
path: PathBuf,
|
||||
new_key: u64,
|
||||
new_data: ChangeData,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum ChangeData {
|
||||
Lint(LintCacheData),
|
||||
Formatted,
|
||||
}
|
||||
|
||||
impl ChangeData {
|
||||
fn apply(self, data: &mut FileCacheData) {
|
||||
match self {
|
||||
ChangeData::Lint(new_lint) => {
|
||||
data.lint = Some(new_lint);
|
||||
}
|
||||
ChangeData::Formatted => {
|
||||
data.formatted = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use filetime::{set_file_mtime, FileTime};
|
||||
use ruff_linter::settings::types::UnsafeFixes;
|
||||
use std::env::temp_dir;
|
||||
use std::fs;
|
||||
use std::io;
|
||||
use std::io::Write;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::atomic::AtomicU64;
|
||||
use std::time::SystemTime;
|
||||
|
||||
use anyhow::Result;
|
||||
use filetime::{set_file_mtime, FileTime};
|
||||
use itertools::Itertools;
|
||||
use test_case::test_case;
|
||||
|
||||
use ruff_cache::CACHE_DIR_NAME;
|
||||
use ruff_linter::settings::flags;
|
||||
use ruff_linter::settings::types::UnsafeFixes;
|
||||
use ruff_python_ast::PySourceType;
|
||||
use ruff_workspace::Settings;
|
||||
|
||||
use crate::cache::{self, FileCache, FileCacheData, FileCacheKey};
|
||||
use crate::cache::{Cache, RelativePathBuf};
|
||||
use crate::commands::format::{format_path, FormatCommandError, FormatMode, FormatResult};
|
||||
use crate::cache::RelativePathBuf;
|
||||
use crate::cache::{self, Cache, FileCache};
|
||||
use crate::diagnostics::{lint_path, Diagnostics};
|
||||
|
||||
use std::sync::atomic::AtomicU64;
|
||||
|
||||
use anyhow::Result;
|
||||
use ruff_python_ast::imports::ImportMap;
|
||||
|
||||
use ruff_workspace::Settings;
|
||||
use test_case::test_case;
|
||||
|
||||
#[test_case("../ruff_linter/resources/test/fixtures", "ruff_tests/cache_same_results_ruff_linter"; "ruff_linter_fixtures")]
|
||||
#[test_case("../ruff_notebook/resources/test/fixtures", "ruff_tests/cache_same_results_ruff_notebook"; "ruff_notebook_fixtures")]
|
||||
fn same_results(package_root: &str, cache_dir_path: &str) {
|
||||
@@ -592,7 +377,7 @@ mod tests {
|
||||
|
||||
let package_root = fs::canonicalize(package_root).unwrap();
|
||||
let cache = Cache::open(package_root.clone(), &settings);
|
||||
assert_eq!(cache.changes.lock().unwrap().len(), 0);
|
||||
assert_eq!(cache.new_files.lock().unwrap().len(), 0);
|
||||
|
||||
let mut paths = Vec::new();
|
||||
let mut parse_errors = Vec::new();
|
||||
@@ -642,7 +427,7 @@ mod tests {
|
||||
}
|
||||
assert_ne!(paths, &[] as &[std::path::PathBuf], "no files checked");
|
||||
|
||||
cache.persist().unwrap();
|
||||
cache.store().unwrap();
|
||||
|
||||
let cache = Cache::open(package_root.clone(), &settings);
|
||||
assert_ne!(cache.package.files.len(), 0);
|
||||
@@ -687,21 +472,21 @@ mod tests {
|
||||
let test_cache = TestCache::new("cache_adds_file_on_lint");
|
||||
let cache = test_cache.open();
|
||||
test_cache.write_source_file("source.py", source);
|
||||
assert_eq!(cache.changes.lock().unwrap().len(), 0);
|
||||
assert_eq!(cache.new_files.lock().unwrap().len(), 0);
|
||||
|
||||
cache.persist().unwrap();
|
||||
cache.store().unwrap();
|
||||
let cache = test_cache.open();
|
||||
|
||||
test_cache
|
||||
.lint_file_with_cache("source.py", &cache)
|
||||
.expect("Failed to lint test file");
|
||||
assert_eq!(
|
||||
cache.changes.lock().unwrap().len(),
|
||||
cache.new_files.lock().unwrap().len(),
|
||||
1,
|
||||
"A single new file should be added to the cache"
|
||||
);
|
||||
|
||||
cache.persist().unwrap();
|
||||
cache.store().unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -712,9 +497,9 @@ mod tests {
|
||||
let cache = test_cache.open();
|
||||
test_cache.write_source_file("source_1.py", source);
|
||||
test_cache.write_source_file("source_2.py", source);
|
||||
assert_eq!(cache.changes.lock().unwrap().len(), 0);
|
||||
assert_eq!(cache.new_files.lock().unwrap().len(), 0);
|
||||
|
||||
cache.persist().unwrap();
|
||||
cache.store().unwrap();
|
||||
let cache = test_cache.open();
|
||||
|
||||
test_cache
|
||||
@@ -724,39 +509,12 @@ mod tests {
|
||||
.lint_file_with_cache("source_2.py", &cache)
|
||||
.expect("Failed to lint test file");
|
||||
assert_eq!(
|
||||
cache.changes.lock().unwrap().len(),
|
||||
2,
|
||||
"Both files should be added to the cache"
|
||||
);
|
||||
cache.persist().unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_adds_files_on_format() {
|
||||
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\", \"b\"])\n";
|
||||
|
||||
let test_cache = TestCache::new("cache_adds_files_on_format");
|
||||
let cache = test_cache.open();
|
||||
test_cache.write_source_file("source_1.py", source);
|
||||
test_cache.write_source_file("source_2.py", source);
|
||||
assert_eq!(cache.changes.lock().unwrap().len(), 0);
|
||||
|
||||
cache.persist().unwrap();
|
||||
let cache = test_cache.open();
|
||||
|
||||
test_cache
|
||||
.format_file_with_cache("source_1.py", &cache)
|
||||
.expect("Failed to format test file");
|
||||
test_cache
|
||||
.format_file_with_cache("source_2.py", &cache)
|
||||
.expect("Failed to format test file");
|
||||
assert_eq!(
|
||||
cache.changes.lock().unwrap().len(),
|
||||
cache.new_files.lock().unwrap().len(),
|
||||
2,
|
||||
"Both files should be added to the cache"
|
||||
);
|
||||
|
||||
cache.persist().unwrap();
|
||||
cache.store().unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -766,13 +524,13 @@ mod tests {
|
||||
let test_cache = TestCache::new("cache_invalidated_on_file_modified_time");
|
||||
let cache = test_cache.open();
|
||||
let source_path = test_cache.write_source_file("source.py", source);
|
||||
assert_eq!(cache.changes.lock().unwrap().len(), 0);
|
||||
assert_eq!(cache.new_files.lock().unwrap().len(), 0);
|
||||
|
||||
let expected_diagnostics = test_cache
|
||||
.lint_file_with_cache("source.py", &cache)
|
||||
.expect("Failed to lint test file");
|
||||
|
||||
cache.persist().unwrap();
|
||||
cache.store().unwrap();
|
||||
let cache = test_cache.open();
|
||||
|
||||
// Update the modified time of the file to a time in the future
|
||||
@@ -787,7 +545,7 @@ mod tests {
|
||||
.expect("Failed to lint test file");
|
||||
|
||||
assert_eq!(
|
||||
cache.changes.lock().unwrap().len(),
|
||||
cache.new_files.lock().unwrap().len(),
|
||||
1,
|
||||
"Cache should not be used, the file should be treated as new and added to the cache"
|
||||
);
|
||||
@@ -825,13 +583,13 @@ mod tests {
|
||||
let test_cache = TestCache::new("cache_invalidated_on_permission_change");
|
||||
let cache = test_cache.open();
|
||||
let path = test_cache.write_source_file("source.py", source);
|
||||
assert_eq!(cache.changes.lock().unwrap().len(), 0);
|
||||
assert_eq!(cache.new_files.lock().unwrap().len(), 0);
|
||||
|
||||
let expected_diagnostics = test_cache
|
||||
.lint_file_with_cache("source.py", &cache)
|
||||
.unwrap();
|
||||
|
||||
cache.persist().unwrap();
|
||||
cache.store().unwrap();
|
||||
let cache = test_cache.open();
|
||||
|
||||
// Flip the permissions on the file
|
||||
@@ -845,7 +603,7 @@ mod tests {
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
cache.changes.lock().unwrap().len(),
|
||||
cache.new_files.lock().unwrap().len(),
|
||||
1,
|
||||
"Cache should not be used, the file should be treated as new and added to the cache"
|
||||
);
|
||||
@@ -857,8 +615,8 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_removes_stale_files_on_persist() {
|
||||
let test_cache = TestCache::new("cache_removes_stale_files_on_persist");
|
||||
fn cache_removes_stale_files_on_store() {
|
||||
let test_cache = TestCache::new("cache_removes_stale_files_on_store");
|
||||
let mut cache = test_cache.open();
|
||||
|
||||
// Add a file to the cache that hasn't been linted or seen since the '70s!
|
||||
@@ -868,7 +626,10 @@ mod tests {
|
||||
FileCache {
|
||||
key: 123,
|
||||
last_seen: AtomicU64::new(123),
|
||||
data: FileCacheData::default(),
|
||||
imports: ImportMap::new(),
|
||||
messages: Vec::new(),
|
||||
source: String::new(),
|
||||
notebook_index: None,
|
||||
},
|
||||
);
|
||||
|
||||
@@ -876,125 +637,34 @@ mod tests {
|
||||
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\", \"b\"])\n";
|
||||
test_cache.write_source_file("new.py", source);
|
||||
let new_path_key = RelativePathBuf::from("new.py");
|
||||
assert_eq!(cache.changes.lock().unwrap().len(), 0);
|
||||
assert_eq!(cache.new_files.lock().unwrap().len(), 0);
|
||||
|
||||
test_cache
|
||||
.lint_file_with_cache("new.py", &cache)
|
||||
.expect("Failed to lint test file");
|
||||
|
||||
// Storing the cache should remove the old (`old.py`) file.
|
||||
cache.persist().unwrap();
|
||||
cache.store().unwrap();
|
||||
// So we when we open the cache again it shouldn't contain `old.py`.
|
||||
let cache = test_cache.open();
|
||||
|
||||
assert_eq!(
|
||||
cache.package.files.keys().collect_vec(),
|
||||
vec![&new_path_key],
|
||||
assert!(
|
||||
cache.package.files.keys().collect_vec() == vec![&new_path_key],
|
||||
"Only the new file should be present"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn format_updates_cache_entry() {
|
||||
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\", \"b\"])\n";
|
||||
|
||||
let test_cache = TestCache::new("format_updates_cache_entry");
|
||||
let cache = test_cache.open();
|
||||
test_cache.write_source_file("source.py", source);
|
||||
assert_eq!(cache.changes.lock().unwrap().len(), 0);
|
||||
|
||||
cache.persist().unwrap();
|
||||
let cache = test_cache.open();
|
||||
|
||||
// Cache the lint results
|
||||
test_cache
|
||||
.lint_file_with_cache("source.py", &cache)
|
||||
.expect("Failed to lint test file");
|
||||
cache.persist().unwrap();
|
||||
|
||||
let mut cache = test_cache.open();
|
||||
|
||||
// Now lint the file
|
||||
test_cache
|
||||
.format_file_with_cache("source.py", &cache)
|
||||
.expect("Failed to format test file");
|
||||
|
||||
cache.save();
|
||||
|
||||
assert_eq!(cache.package.files.len(), 1);
|
||||
|
||||
let Some(file_cache) = cache.get(
|
||||
Path::new("source.py"),
|
||||
&FileCacheKey::from_path(&test_cache.package_root.join("source.py")).unwrap(),
|
||||
) else {
|
||||
panic!("Cache entry for `source.py` is missing.");
|
||||
};
|
||||
|
||||
assert!(file_cache.data.lint.is_some());
|
||||
assert!(file_cache.data.formatted);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn file_changes_invalidate_file_cache() {
|
||||
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\", \"b\"])\n";
|
||||
|
||||
let test_cache = TestCache::new("file_changes_invalidate_file_cache");
|
||||
let cache = test_cache.open();
|
||||
let source_path = test_cache.write_source_file("source.py", source);
|
||||
assert_eq!(cache.changes.lock().unwrap().len(), 0);
|
||||
|
||||
cache.persist().unwrap();
|
||||
let cache = test_cache.open();
|
||||
|
||||
// Cache the format and lint results
|
||||
test_cache
|
||||
.lint_file_with_cache("source.py", &cache)
|
||||
.expect("Failed to lint test file");
|
||||
test_cache
|
||||
.format_file_with_cache("source.py", &cache)
|
||||
.expect("Failed to format test file");
|
||||
|
||||
cache.persist().unwrap();
|
||||
|
||||
let mut cache = test_cache.open();
|
||||
assert_eq!(cache.package.files.len(), 1);
|
||||
|
||||
set_file_mtime(
|
||||
&source_path,
|
||||
FileTime::from_system_time(SystemTime::now() + std::time::Duration::from_secs(1)),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
test_cache
|
||||
.format_file_with_cache("source.py", &cache)
|
||||
.expect("Failed to format test file");
|
||||
|
||||
cache.save();
|
||||
|
||||
assert_eq!(cache.package.files.len(), 1);
|
||||
|
||||
let Some(file_cache) = cache.get(
|
||||
Path::new("source.py"),
|
||||
&FileCacheKey::from_path(&source_path).unwrap(),
|
||||
) else {
|
||||
panic!("Cache entry for `source.py` is missing.");
|
||||
};
|
||||
|
||||
assert_eq!(file_cache.data.lint, None);
|
||||
assert!(file_cache.data.formatted);
|
||||
}
|
||||
|
||||
struct TestCache {
|
||||
package_root: PathBuf,
|
||||
settings: Settings,
|
||||
}
|
||||
|
||||
impl TestCache {
|
||||
fn new(test_case: &str) -> Self {
|
||||
fn new(name: &str) -> Self {
|
||||
// Build a new cache directory and clear it
|
||||
let mut test_dir = temp_dir();
|
||||
test_dir.push("ruff_tests/cache");
|
||||
test_dir.push(test_case);
|
||||
test_dir.push(name);
|
||||
|
||||
let _ = fs::remove_dir_all(&test_dir);
|
||||
|
||||
@@ -1048,21 +718,6 @@ mod tests {
|
||||
UnsafeFixes::Enabled,
|
||||
)
|
||||
}
|
||||
|
||||
fn format_file_with_cache(
|
||||
&self,
|
||||
path: &str,
|
||||
cache: &Cache,
|
||||
) -> Result<FormatResult, FormatCommandError> {
|
||||
let file_path = self.package_root.join(path);
|
||||
format_path(
|
||||
&file_path,
|
||||
&self.settings.formatter,
|
||||
PySourceType::Python,
|
||||
FormatMode::Write,
|
||||
Some(cache),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for TestCache {
|
||||
|
||||
@@ -10,7 +10,7 @@ use ruff_linter::linter::add_noqa_to_path;
|
||||
use ruff_linter::source_kind::SourceKind;
|
||||
use ruff_linter::warn_user_once;
|
||||
use ruff_python_ast::{PySourceType, SourceType};
|
||||
use ruff_workspace::resolver::{python_files_in_path, PyprojectConfig, ResolvedFile};
|
||||
use ruff_workspace::resolver::{python_files_in_path, PyprojectConfig};
|
||||
|
||||
use crate::args::CliOverrides;
|
||||
|
||||
@@ -36,7 +36,7 @@ pub(crate) fn add_noqa(
|
||||
&paths
|
||||
.iter()
|
||||
.flatten()
|
||||
.map(ResolvedFile::path)
|
||||
.map(ignore::DirEntry::path)
|
||||
.collect::<Vec<_>>(),
|
||||
pyproject_config,
|
||||
);
|
||||
@@ -45,15 +45,14 @@ pub(crate) fn add_noqa(
|
||||
let modifications: usize = paths
|
||||
.par_iter()
|
||||
.flatten()
|
||||
.filter_map(|resolved_file| {
|
||||
.filter_map(|entry| {
|
||||
let path = entry.path();
|
||||
let SourceType::Python(source_type @ (PySourceType::Python | PySourceType::Stub)) =
|
||||
SourceType::from(resolved_file.path())
|
||||
SourceType::from(path)
|
||||
else {
|
||||
return None;
|
||||
};
|
||||
let path = resolved_file.path();
|
||||
let package = resolved_file
|
||||
.path()
|
||||
let package = path
|
||||
.parent()
|
||||
.and_then(|parent| package_roots.get(parent))
|
||||
.and_then(|package| *package);
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use std::collections::HashMap;
|
||||
use std::fmt::Write;
|
||||
use std::io;
|
||||
use std::path::{Path, PathBuf};
|
||||
@@ -6,26 +7,25 @@ use std::time::Instant;
|
||||
use anyhow::Result;
|
||||
use colored::Colorize;
|
||||
use ignore::Error;
|
||||
use itertools::Itertools;
|
||||
use log::{debug, error, warn};
|
||||
#[cfg(not(target_family = "wasm"))]
|
||||
use rayon::prelude::*;
|
||||
use ruff_linter::settings::types::UnsafeFixes;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_linter::message::Message;
|
||||
use ruff_linter::registry::Rule;
|
||||
use ruff_linter::settings::types::UnsafeFixes;
|
||||
use ruff_linter::settings::{flags, LinterSettings};
|
||||
use ruff_linter::{fs, warn_user_once, IOError};
|
||||
use ruff_python_ast::imports::ImportMap;
|
||||
use ruff_source_file::SourceFileBuilder;
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
use ruff_workspace::resolver::{
|
||||
match_exclusion, python_files_in_path, PyprojectConfig, ResolvedFile,
|
||||
};
|
||||
use ruff_workspace::resolver::{python_files_in_path, PyprojectConfig, PyprojectDiscoveryStrategy};
|
||||
|
||||
use crate::args::CliOverrides;
|
||||
use crate::cache::{Cache, PackageCacheMap, PackageCaches};
|
||||
use crate::cache::{self, Cache};
|
||||
use crate::diagnostics::Diagnostics;
|
||||
use crate::panic::catch_unwind;
|
||||
|
||||
@@ -42,144 +42,163 @@ pub(crate) fn check(
|
||||
// Collect all the Python files to check.
|
||||
let start = Instant::now();
|
||||
let (paths, resolver) = python_files_in_path(files, pyproject_config, overrides)?;
|
||||
debug!("Identified files to lint in: {:?}", start.elapsed());
|
||||
let duration = start.elapsed();
|
||||
debug!("Identified files to lint in: {:?}", duration);
|
||||
|
||||
if paths.is_empty() {
|
||||
warn_user_once!("No Python files found under the given path(s)");
|
||||
return Ok(Diagnostics::default());
|
||||
}
|
||||
|
||||
// Initialize the cache.
|
||||
if cache.into() {
|
||||
fn init_cache(path: &Path) {
|
||||
if let Err(e) = cache::init(path) {
|
||||
error!("Failed to initialize cache at {}: {e:?}", path.display());
|
||||
}
|
||||
}
|
||||
|
||||
match pyproject_config.strategy {
|
||||
PyprojectDiscoveryStrategy::Fixed => {
|
||||
init_cache(&pyproject_config.settings.cache_dir);
|
||||
}
|
||||
PyprojectDiscoveryStrategy::Hierarchical => {
|
||||
for settings in
|
||||
std::iter::once(&pyproject_config.settings).chain(resolver.settings())
|
||||
{
|
||||
init_cache(&settings.cache_dir);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Discover the package root for each Python file.
|
||||
let package_roots = resolver.package_roots(
|
||||
&paths
|
||||
.iter()
|
||||
.flatten()
|
||||
.map(ResolvedFile::path)
|
||||
.map(ignore::DirEntry::path)
|
||||
.collect::<Vec<_>>(),
|
||||
pyproject_config,
|
||||
);
|
||||
|
||||
// Load the caches.
|
||||
let caches = if bool::from(cache) {
|
||||
Some(PackageCacheMap::init(
|
||||
pyproject_config,
|
||||
&package_roots,
|
||||
&resolver,
|
||||
))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let start = Instant::now();
|
||||
let diagnostics_per_file = paths.par_iter().filter_map(|resolved_file| {
|
||||
let result = match resolved_file {
|
||||
Ok(resolved_file) => {
|
||||
let path = resolved_file.path();
|
||||
let package = path
|
||||
.parent()
|
||||
.and_then(|parent| package_roots.get(parent))
|
||||
.and_then(|package| *package);
|
||||
|
||||
let settings = resolver.resolve(path, pyproject_config);
|
||||
|
||||
if (settings.file_resolver.force_exclude || !resolved_file.is_root())
|
||||
&& match_exclusion(
|
||||
resolved_file.path(),
|
||||
resolved_file.file_name(),
|
||||
&settings.linter.exclude,
|
||||
)
|
||||
{
|
||||
return None;
|
||||
}
|
||||
|
||||
let cache_root = package.unwrap_or_else(|| path.parent().unwrap_or(path));
|
||||
let cache = caches.get(cache_root);
|
||||
|
||||
lint_path(
|
||||
path,
|
||||
package,
|
||||
&settings.linter,
|
||||
cache,
|
||||
noqa,
|
||||
fix_mode,
|
||||
unsafe_fixes,
|
||||
)
|
||||
.map_err(|e| {
|
||||
(Some(path.to_path_buf()), {
|
||||
let mut error = e.to_string();
|
||||
for cause in e.chain() {
|
||||
write!(&mut error, "\n Cause: {cause}").unwrap();
|
||||
}
|
||||
error
|
||||
})
|
||||
})
|
||||
}
|
||||
Err(e) => Err((
|
||||
if let Error::WithPath { path, .. } = e {
|
||||
Some(path.clone())
|
||||
} else {
|
||||
None
|
||||
},
|
||||
e.io_error()
|
||||
.map_or_else(|| e.to_string(), io::Error::to_string),
|
||||
)),
|
||||
};
|
||||
|
||||
Some(result.unwrap_or_else(|(path, message)| {
|
||||
if let Some(path) = &path {
|
||||
let settings = resolver.resolve(path, pyproject_config);
|
||||
if settings.linter.rules.enabled(Rule::IOError) {
|
||||
let dummy =
|
||||
SourceFileBuilder::new(path.to_string_lossy().as_ref(), "").finish();
|
||||
|
||||
Diagnostics::new(
|
||||
vec![Message::from_diagnostic(
|
||||
Diagnostic::new(IOError { message }, TextRange::default()),
|
||||
dummy,
|
||||
TextSize::default(),
|
||||
)],
|
||||
ImportMap::default(),
|
||||
FxHashMap::default(),
|
||||
)
|
||||
} else {
|
||||
warn!(
|
||||
"{}{}{} {message}",
|
||||
"Failed to lint ".bold(),
|
||||
fs::relativize_path(path).bold(),
|
||||
":".bold()
|
||||
);
|
||||
Diagnostics::default()
|
||||
}
|
||||
} else {
|
||||
warn!("{} {message}", "Encountered error:".bold());
|
||||
Diagnostics::default()
|
||||
}
|
||||
}))
|
||||
let caches = bool::from(cache).then(|| {
|
||||
package_roots
|
||||
.iter()
|
||||
.map(|(package, package_root)| package_root.unwrap_or(package))
|
||||
.unique()
|
||||
.par_bridge()
|
||||
.map(|cache_root| {
|
||||
let settings = resolver.resolve(cache_root, pyproject_config);
|
||||
let cache = Cache::open(cache_root.to_path_buf(), settings);
|
||||
(cache_root, cache)
|
||||
})
|
||||
.collect::<HashMap<&Path, Cache>>()
|
||||
});
|
||||
|
||||
// Aggregate the diagnostics of all checked files and count the checked files.
|
||||
// This can't be a regular for loop because we use `par_iter`.
|
||||
let (mut all_diagnostics, checked_files) = diagnostics_per_file
|
||||
.fold(
|
||||
|| (Diagnostics::default(), 0u64),
|
||||
|(all_diagnostics, checked_files), file_diagnostics| {
|
||||
(all_diagnostics + file_diagnostics, checked_files + 1)
|
||||
},
|
||||
)
|
||||
.reduce(
|
||||
|| (Diagnostics::default(), 0u64),
|
||||
|a, b| (a.0 + b.0, a.1 + b.1),
|
||||
);
|
||||
let start = Instant::now();
|
||||
let mut diagnostics: Diagnostics = paths
|
||||
.par_iter()
|
||||
.map(|entry| {
|
||||
match entry {
|
||||
Ok(entry) => {
|
||||
let path = entry.path();
|
||||
let package = path
|
||||
.parent()
|
||||
.and_then(|parent| package_roots.get(parent))
|
||||
.and_then(|package| *package);
|
||||
|
||||
all_diagnostics.messages.sort();
|
||||
let settings = resolver.resolve(path, pyproject_config);
|
||||
|
||||
let cache_root = package.unwrap_or_else(|| path.parent().unwrap_or(path));
|
||||
let cache = caches.as_ref().and_then(|caches| {
|
||||
if let Some(cache) = caches.get(&cache_root) {
|
||||
Some(cache)
|
||||
} else {
|
||||
debug!("No cache found for {}", cache_root.display());
|
||||
None
|
||||
}
|
||||
});
|
||||
|
||||
lint_path(
|
||||
path,
|
||||
package,
|
||||
&settings.linter,
|
||||
cache,
|
||||
noqa,
|
||||
fix_mode,
|
||||
unsafe_fixes,
|
||||
)
|
||||
.map_err(|e| {
|
||||
(Some(path.to_owned()), {
|
||||
let mut error = e.to_string();
|
||||
for cause in e.chain() {
|
||||
write!(&mut error, "\n Cause: {cause}").unwrap();
|
||||
}
|
||||
error
|
||||
})
|
||||
})
|
||||
}
|
||||
Err(e) => Err((
|
||||
if let Error::WithPath { path, .. } = e {
|
||||
Some(path.clone())
|
||||
} else {
|
||||
None
|
||||
},
|
||||
e.io_error()
|
||||
.map_or_else(|| e.to_string(), io::Error::to_string),
|
||||
)),
|
||||
}
|
||||
.unwrap_or_else(|(path, message)| {
|
||||
if let Some(path) = &path {
|
||||
let settings = resolver.resolve(path, pyproject_config);
|
||||
if settings.linter.rules.enabled(Rule::IOError) {
|
||||
let dummy =
|
||||
SourceFileBuilder::new(path.to_string_lossy().as_ref(), "").finish();
|
||||
|
||||
Diagnostics::new(
|
||||
vec![Message::from_diagnostic(
|
||||
Diagnostic::new(IOError { message }, TextRange::default()),
|
||||
dummy,
|
||||
TextSize::default(),
|
||||
)],
|
||||
ImportMap::default(),
|
||||
FxHashMap::default(),
|
||||
)
|
||||
} else {
|
||||
warn!(
|
||||
"{}{}{} {message}",
|
||||
"Failed to lint ".bold(),
|
||||
fs::relativize_path(path).bold(),
|
||||
":".bold()
|
||||
);
|
||||
Diagnostics::default()
|
||||
}
|
||||
} else {
|
||||
warn!("{} {message}", "Encountered error:".bold());
|
||||
Diagnostics::default()
|
||||
}
|
||||
})
|
||||
})
|
||||
.reduce(Diagnostics::default, |mut acc, item| {
|
||||
acc += item;
|
||||
acc
|
||||
});
|
||||
|
||||
diagnostics.messages.sort();
|
||||
|
||||
// Store the caches.
|
||||
caches.persist()?;
|
||||
if let Some(caches) = caches {
|
||||
caches
|
||||
.into_par_iter()
|
||||
.try_for_each(|(_, cache)| cache.store())?;
|
||||
}
|
||||
|
||||
let duration = start.elapsed();
|
||||
debug!("Checked {:?} files in: {:?}", checked_files, duration);
|
||||
debug!("Checked {:?} files in: {:?}", paths.len(), duration);
|
||||
|
||||
Ok(all_diagnostics)
|
||||
Ok(diagnostics)
|
||||
}
|
||||
|
||||
/// Wraps [`lint_path`](crate::diagnostics::lint_path) in a [`catch_unwind`](std::panic::catch_unwind) and emits
|
||||
@@ -226,12 +245,13 @@ mod test {
|
||||
use std::os::unix::fs::OpenOptionsExt;
|
||||
|
||||
use anyhow::Result;
|
||||
|
||||
use ruff_linter::settings::types::UnsafeFixes;
|
||||
use rustc_hash::FxHashMap;
|
||||
use tempfile::TempDir;
|
||||
|
||||
use ruff_linter::message::{Emitter, EmitterContext, TextEmitter};
|
||||
use ruff_linter::registry::Rule;
|
||||
use ruff_linter::settings::types::UnsafeFixes;
|
||||
use ruff_linter::settings::{flags, LinterSettings};
|
||||
use ruff_workspace::resolver::{PyprojectConfig, PyprojectDiscoveryStrategy};
|
||||
use ruff_workspace::Settings;
|
||||
|
||||
@@ -4,7 +4,7 @@ use anyhow::Result;
|
||||
|
||||
use ruff_linter::packaging;
|
||||
use ruff_linter::settings::flags;
|
||||
use ruff_workspace::resolver::{match_exclusion, python_file_at_path, PyprojectConfig};
|
||||
use ruff_workspace::resolver::{python_file_at_path, PyprojectConfig};
|
||||
|
||||
use crate::args::CliOverrides;
|
||||
use crate::diagnostics::{lint_stdin, Diagnostics};
|
||||
@@ -18,19 +18,9 @@ pub(crate) fn check_stdin(
|
||||
noqa: flags::Noqa,
|
||||
fix_mode: flags::FixMode,
|
||||
) -> Result<Diagnostics> {
|
||||
if pyproject_config.settings.file_resolver.force_exclude {
|
||||
if let Some(filename) = filename {
|
||||
if !python_file_at_path(filename, pyproject_config, overrides)? {
|
||||
return Ok(Diagnostics::default());
|
||||
}
|
||||
|
||||
let lint_settings = &pyproject_config.settings.linter;
|
||||
if filename
|
||||
.file_name()
|
||||
.is_some_and(|name| match_exclusion(filename, name, &lint_settings.exclude))
|
||||
{
|
||||
return Ok(Diagnostics::default());
|
||||
}
|
||||
if let Some(filename) = filename {
|
||||
if !python_file_at_path(filename, pyproject_config, overrides)? {
|
||||
return Ok(Diagnostics::default());
|
||||
}
|
||||
}
|
||||
let package_root = filename.and_then(Path::parent).and_then(|path| {
|
||||
|
||||
@@ -1,37 +1,29 @@
|
||||
use std::fmt::{Display, Formatter};
|
||||
use std::fs::File;
|
||||
use std::io;
|
||||
use std::io::{stderr, stdout, Write};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::time::Instant;
|
||||
|
||||
use anyhow::Result;
|
||||
use colored::Colorize;
|
||||
use itertools::Itertools;
|
||||
use log::{error, warn};
|
||||
use log::error;
|
||||
use rayon::iter::Either::{Left, Right};
|
||||
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
|
||||
use rustc_hash::FxHashSet;
|
||||
use rayon::iter::{IntoParallelIterator, ParallelIterator};
|
||||
use thiserror::Error;
|
||||
use tracing::debug;
|
||||
|
||||
use ruff_diagnostics::SourceMap;
|
||||
use ruff_linter::fs;
|
||||
use ruff_linter::logging::LogLevel;
|
||||
use ruff_linter::registry::Rule;
|
||||
use ruff_linter::rules::flake8_quotes::settings::Quote;
|
||||
use ruff_linter::source_kind::{SourceError, SourceKind};
|
||||
use ruff_linter::warn_user_once;
|
||||
use ruff_python_ast::{PySourceType, SourceType};
|
||||
use ruff_python_formatter::{format_module_source, FormatModuleError, QuoteStyle};
|
||||
use ruff_python_formatter::{format_module_source, FormatModuleError};
|
||||
use ruff_text_size::{TextLen, TextRange, TextSize};
|
||||
use ruff_workspace::resolver::{
|
||||
match_exclusion, python_files_in_path, PyprojectConfig, ResolvedFile, Resolver,
|
||||
};
|
||||
use ruff_workspace::resolver::python_files_in_path;
|
||||
use ruff_workspace::FormatterSettings;
|
||||
|
||||
use crate::args::{CliOverrides, FormatArguments};
|
||||
use crate::cache::{Cache, FileCacheKey, PackageCacheMap, PackageCaches};
|
||||
use crate::panic::{catch_unwind, PanicError};
|
||||
use crate::resolve::resolve;
|
||||
use crate::ExitStatus;
|
||||
@@ -42,20 +34,6 @@ pub(crate) enum FormatMode {
|
||||
Write,
|
||||
/// Check if the file is formatted, but do not write the formatted contents back.
|
||||
Check,
|
||||
/// Check if the file is formatted, show a diff if not.
|
||||
Diff,
|
||||
}
|
||||
|
||||
impl FormatMode {
|
||||
pub(crate) fn from_cli(cli: &FormatArguments) -> Self {
|
||||
if cli.diff {
|
||||
FormatMode::Diff
|
||||
} else if cli.check {
|
||||
FormatMode::Check
|
||||
} else {
|
||||
FormatMode::Write
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Format a set of files, and return the exit status.
|
||||
@@ -70,7 +48,11 @@ pub(crate) fn format(
|
||||
overrides,
|
||||
cli.stdin_filename.as_deref(),
|
||||
)?;
|
||||
let mode = FormatMode::from_cli(cli);
|
||||
let mode = if cli.check {
|
||||
FormatMode::Check
|
||||
} else {
|
||||
FormatMode::Write
|
||||
};
|
||||
let (paths, resolver) = python_files_in_path(&cli.files, &pyproject_config, overrides)?;
|
||||
|
||||
if paths.is_empty() {
|
||||
@@ -78,81 +60,31 @@ pub(crate) fn format(
|
||||
return Ok(ExitStatus::Success);
|
||||
}
|
||||
|
||||
warn_incompatible_formatter_settings(&pyproject_config, Some(&resolver));
|
||||
|
||||
// Discover the package root for each Python file.
|
||||
let package_roots = resolver.package_roots(
|
||||
&paths
|
||||
.iter()
|
||||
.flatten()
|
||||
.map(ResolvedFile::path)
|
||||
.collect::<Vec<_>>(),
|
||||
&pyproject_config,
|
||||
);
|
||||
|
||||
let caches = if cli.no_cache {
|
||||
None
|
||||
} else {
|
||||
// `--no-cache` doesn't respect code changes, and so is often confusing during
|
||||
// development.
|
||||
#[cfg(debug_assertions)]
|
||||
crate::warn_user!("Detected debug build without --no-cache.");
|
||||
|
||||
Some(PackageCacheMap::init(
|
||||
&pyproject_config,
|
||||
&package_roots,
|
||||
&resolver,
|
||||
))
|
||||
};
|
||||
|
||||
let start = Instant::now();
|
||||
let (results, mut errors): (Vec<_>, Vec<_>) = paths
|
||||
.par_iter()
|
||||
let (results, errors): (Vec<_>, Vec<_>) = paths
|
||||
.into_par_iter()
|
||||
.filter_map(|entry| {
|
||||
match entry {
|
||||
Ok(resolved_file) => {
|
||||
let path = resolved_file.path();
|
||||
Ok(entry) => {
|
||||
let path = entry.into_path();
|
||||
|
||||
let SourceType::Python(source_type) = SourceType::from(&path) else {
|
||||
// Ignore any non-Python files.
|
||||
return None;
|
||||
};
|
||||
|
||||
let settings = resolver.resolve(path, &pyproject_config);
|
||||
|
||||
// Ignore files that are excluded from formatting
|
||||
if (settings.file_resolver.force_exclude || !resolved_file.is_root())
|
||||
&& match_exclusion(
|
||||
path,
|
||||
resolved_file.file_name(),
|
||||
&settings.formatter.exclude,
|
||||
)
|
||||
{
|
||||
return None;
|
||||
}
|
||||
|
||||
let package = path
|
||||
.parent()
|
||||
.and_then(|parent| package_roots.get(parent).copied())
|
||||
.flatten();
|
||||
let cache_root = package.unwrap_or_else(|| path.parent().unwrap_or(path));
|
||||
let cache = caches.get(cache_root);
|
||||
let resolved_settings = resolver.resolve(&path, &pyproject_config);
|
||||
|
||||
Some(
|
||||
match catch_unwind(|| {
|
||||
format_path(path, &settings.formatter, source_type, mode, cache)
|
||||
format_path(&path, &resolved_settings.formatter, source_type, mode)
|
||||
}) {
|
||||
Ok(inner) => inner.map(|result| FormatPathResult {
|
||||
path: resolved_file.path().to_path_buf(),
|
||||
result,
|
||||
}),
|
||||
Err(error) => Err(FormatCommandError::Panic(
|
||||
Some(resolved_file.path().to_path_buf()),
|
||||
error,
|
||||
)),
|
||||
Ok(inner) => inner.map(|result| FormatPathResult { path, result }),
|
||||
Err(error) => Err(FormatCommandError::Panic(Some(path), error)),
|
||||
},
|
||||
)
|
||||
}
|
||||
Err(err) => Some(Err(FormatCommandError::Ignore(err.clone()))),
|
||||
Err(err) => Some(Err(FormatCommandError::Ignore(err))),
|
||||
}
|
||||
})
|
||||
.partition_map(|result| match result {
|
||||
@@ -167,33 +99,18 @@ pub(crate) fn format(
|
||||
duration
|
||||
);
|
||||
|
||||
caches.persist()?;
|
||||
|
||||
// Report on any errors.
|
||||
errors.sort_unstable_by(|a, b| a.path().cmp(&b.path()));
|
||||
|
||||
for error in &errors {
|
||||
error!("{error}");
|
||||
}
|
||||
|
||||
let results = FormatResults::new(results.as_slice(), mode);
|
||||
match mode {
|
||||
FormatMode::Write => {}
|
||||
FormatMode::Check => {
|
||||
results.write_changed(&mut stdout().lock())?;
|
||||
}
|
||||
FormatMode::Diff => {
|
||||
results.write_diff(&mut stdout().lock())?;
|
||||
}
|
||||
}
|
||||
let summary = FormatSummary::new(results.as_slice(), mode);
|
||||
|
||||
// Report on the formatting changes.
|
||||
if log_level >= LogLevel::Default {
|
||||
if mode.is_diff() {
|
||||
// Allow piping the diff to e.g. a file by writing the summary to stderr
|
||||
results.write_summary(&mut stderr().lock())?;
|
||||
} else {
|
||||
results.write_summary(&mut stdout().lock())?;
|
||||
#[allow(clippy::print_stdout)]
|
||||
{
|
||||
println!("{summary}");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -205,9 +122,9 @@ pub(crate) fn format(
|
||||
Ok(ExitStatus::Error)
|
||||
}
|
||||
}
|
||||
FormatMode::Check | FormatMode::Diff => {
|
||||
FormatMode::Check => {
|
||||
if errors.is_empty() {
|
||||
if results.any_formatted() {
|
||||
if summary.any_formatted() {
|
||||
Ok(ExitStatus::Failure)
|
||||
} else {
|
||||
Ok(ExitStatus::Success)
|
||||
@@ -220,94 +137,61 @@ pub(crate) fn format(
|
||||
}
|
||||
|
||||
/// Format the file at the given [`Path`].
|
||||
#[tracing::instrument(level="debug", skip_all, fields(path = %path.display()))]
|
||||
pub(crate) fn format_path(
|
||||
#[tracing::instrument(skip_all, fields(path = %path.display()))]
|
||||
fn format_path(
|
||||
path: &Path,
|
||||
settings: &FormatterSettings,
|
||||
source_type: PySourceType,
|
||||
mode: FormatMode,
|
||||
cache: Option<&Cache>,
|
||||
) -> Result<FormatResult, FormatCommandError> {
|
||||
if let Some(cache) = cache {
|
||||
let relative_path = cache
|
||||
.relative_path(path)
|
||||
.expect("wrong package cache for file");
|
||||
|
||||
if let Ok(cache_key) = FileCacheKey::from_path(path) {
|
||||
if cache.is_formatted(relative_path, &cache_key) {
|
||||
return Ok(FormatResult::Unchanged);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Extract the sources from the file.
|
||||
let unformatted = match SourceKind::from_path(path, source_type) {
|
||||
let source_kind = match SourceKind::from_path(path, source_type) {
|
||||
Ok(Some(source_kind)) => source_kind,
|
||||
// Non Python Jupyter notebook
|
||||
Ok(None) => return Ok(FormatResult::Skipped),
|
||||
Ok(None) => return Ok(FormatResult::Unchanged),
|
||||
Err(err) => {
|
||||
return Err(FormatCommandError::Read(Some(path.to_path_buf()), err));
|
||||
}
|
||||
};
|
||||
|
||||
// Format the source.
|
||||
let format_result = match format_source(&unformatted, source_type, Some(path), settings)? {
|
||||
FormattedSource::Formatted(formatted) => match mode {
|
||||
FormatMode::Write => {
|
||||
match format_source(source_kind, source_type, Some(path), settings)? {
|
||||
FormattedSource::Formatted(formatted) => {
|
||||
if mode.is_write() {
|
||||
let mut writer = File::create(path).map_err(|err| {
|
||||
FormatCommandError::Write(Some(path.to_path_buf()), err.into())
|
||||
})?;
|
||||
formatted
|
||||
.write(&mut writer)
|
||||
.map_err(|err| FormatCommandError::Write(Some(path.to_path_buf()), err))?;
|
||||
|
||||
if let Some(cache) = cache {
|
||||
if let Ok(cache_key) = FileCacheKey::from_path(path) {
|
||||
let relative_path = cache
|
||||
.relative_path(path)
|
||||
.expect("wrong package cache for file");
|
||||
cache.set_formatted(relative_path.to_path_buf(), &cache_key);
|
||||
}
|
||||
}
|
||||
|
||||
FormatResult::Formatted
|
||||
}
|
||||
FormatMode::Check => FormatResult::Formatted,
|
||||
FormatMode::Diff => FormatResult::Diff {
|
||||
unformatted,
|
||||
formatted,
|
||||
},
|
||||
},
|
||||
FormattedSource::Unchanged => {
|
||||
if let Some(cache) = cache {
|
||||
if let Ok(cache_key) = FileCacheKey::from_path(path) {
|
||||
let relative_path = cache
|
||||
.relative_path(path)
|
||||
.expect("wrong package cache for file");
|
||||
cache.set_formatted(relative_path.to_path_buf(), &cache_key);
|
||||
}
|
||||
}
|
||||
|
||||
FormatResult::Unchanged
|
||||
Ok(FormatResult::Formatted)
|
||||
}
|
||||
};
|
||||
|
||||
Ok(format_result)
|
||||
FormattedSource::Unchanged(_) => Ok(FormatResult::Unchanged),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum FormattedSource {
|
||||
/// The source was formatted, and the [`SourceKind`] contains the transformed source code.
|
||||
Formatted(SourceKind),
|
||||
/// The source was unchanged.
|
||||
Unchanged,
|
||||
/// The source was unchanged, and the [`SourceKind`] contains the original source code.
|
||||
Unchanged(SourceKind),
|
||||
}
|
||||
|
||||
impl From<FormattedSource> for FormatResult {
|
||||
fn from(value: FormattedSource) -> Self {
|
||||
match value {
|
||||
FormattedSource::Formatted(_) => FormatResult::Formatted,
|
||||
FormattedSource::Unchanged => FormatResult::Unchanged,
|
||||
FormattedSource::Unchanged(_) => FormatResult::Unchanged,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FormattedSource {
|
||||
pub(crate) fn source_kind(&self) -> &SourceKind {
|
||||
match self {
|
||||
FormattedSource::Formatted(source_kind) => source_kind,
|
||||
FormattedSource::Unchanged(source_kind) => source_kind,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -315,28 +199,30 @@ impl From<FormattedSource> for FormatResult {
|
||||
/// Format a [`SourceKind`], returning the transformed [`SourceKind`], or `None` if the source was
|
||||
/// unchanged.
|
||||
pub(crate) fn format_source(
|
||||
source_kind: &SourceKind,
|
||||
source_kind: SourceKind,
|
||||
source_type: PySourceType,
|
||||
path: Option<&Path>,
|
||||
settings: &FormatterSettings,
|
||||
) -> Result<FormattedSource, FormatCommandError> {
|
||||
match source_kind {
|
||||
SourceKind::Python(unformatted) => {
|
||||
let options = settings.to_format_options(source_type, unformatted);
|
||||
let options = settings.to_format_options(source_type, &unformatted);
|
||||
|
||||
let formatted = format_module_source(unformatted, options)
|
||||
let formatted = format_module_source(&unformatted, options)
|
||||
.map_err(|err| FormatCommandError::Format(path.map(Path::to_path_buf), err))?;
|
||||
|
||||
let formatted = formatted.into_code();
|
||||
if formatted.len() == unformatted.len() && formatted == *unformatted {
|
||||
Ok(FormattedSource::Unchanged)
|
||||
Ok(FormattedSource::Unchanged(SourceKind::Python(unformatted)))
|
||||
} else {
|
||||
Ok(FormattedSource::Formatted(SourceKind::Python(formatted)))
|
||||
}
|
||||
}
|
||||
SourceKind::IpyNotebook(notebook) => {
|
||||
if !notebook.is_python_notebook() {
|
||||
return Ok(FormattedSource::Unchanged);
|
||||
return Ok(FormattedSource::Unchanged(SourceKind::IpyNotebook(
|
||||
notebook,
|
||||
)));
|
||||
}
|
||||
|
||||
let options = settings.to_format_options(source_type, notebook.source_code());
|
||||
@@ -384,7 +270,9 @@ pub(crate) fn format_source(
|
||||
|
||||
// If the file was unchanged, return `None`.
|
||||
let (Some(mut output), Some(last)) = (output, last) else {
|
||||
return Ok(FormattedSource::Unchanged);
|
||||
return Ok(FormattedSource::Unchanged(SourceKind::IpyNotebook(
|
||||
notebook,
|
||||
)));
|
||||
};
|
||||
|
||||
// Add the remaining content.
|
||||
@@ -392,31 +280,23 @@ pub(crate) fn format_source(
|
||||
output.push_str(slice);
|
||||
|
||||
// Update the notebook.
|
||||
let mut formatted = notebook.clone();
|
||||
formatted.update(&source_map, output);
|
||||
let mut notebook = notebook.clone();
|
||||
notebook.update(&source_map, output);
|
||||
|
||||
Ok(FormattedSource::Formatted(SourceKind::IpyNotebook(
|
||||
formatted,
|
||||
notebook,
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The result of an individual formatting operation.
|
||||
#[derive(Debug, Clone, is_macro::Is)]
|
||||
#[derive(Debug, Clone, Copy, is_macro::Is)]
|
||||
pub(crate) enum FormatResult {
|
||||
/// The file was formatted.
|
||||
Formatted,
|
||||
/// The file was formatted, [`SourceKind`] contains the formatted code
|
||||
Diff {
|
||||
unformatted: SourceKind,
|
||||
formatted: SourceKind,
|
||||
},
|
||||
/// The file was unchanged, as the formatted contents matched the existing contents.
|
||||
Unchanged,
|
||||
|
||||
/// Skipped formatting because its an unsupported file format
|
||||
Skipped,
|
||||
}
|
||||
|
||||
/// The coupling of a [`FormatResult`] with the path of the file that was analyzed.
|
||||
@@ -426,117 +306,77 @@ struct FormatPathResult {
|
||||
result: FormatResult,
|
||||
}
|
||||
|
||||
/// The results of formatting a set of files
|
||||
/// A summary of the formatting results.
|
||||
#[derive(Debug)]
|
||||
struct FormatResults<'a> {
|
||||
struct FormatSummary<'a> {
|
||||
/// The individual formatting results.
|
||||
results: &'a [FormatPathResult],
|
||||
/// The format mode that was used.
|
||||
mode: FormatMode,
|
||||
}
|
||||
|
||||
impl<'a> FormatResults<'a> {
|
||||
impl<'a> FormatSummary<'a> {
|
||||
fn new(results: &'a [FormatPathResult], mode: FormatMode) -> Self {
|
||||
Self { results, mode }
|
||||
}
|
||||
|
||||
/// Returns `true` if any of the files require formatting.
|
||||
fn any_formatted(&self) -> bool {
|
||||
self.results.iter().any(|result| match result.result {
|
||||
FormatResult::Formatted | FormatResult::Diff { .. } => true,
|
||||
FormatResult::Unchanged | FormatResult::Skipped => false,
|
||||
})
|
||||
}
|
||||
|
||||
/// Write a diff of the formatting changes to the given writer.
|
||||
fn write_diff(&self, f: &mut impl Write) -> io::Result<()> {
|
||||
for (path, unformatted, formatted) in self
|
||||
.results
|
||||
self.results
|
||||
.iter()
|
||||
.filter_map(|result| {
|
||||
if let FormatResult::Diff {
|
||||
unformatted,
|
||||
formatted,
|
||||
} = &result.result
|
||||
{
|
||||
Some((result.path.as_path(), unformatted, formatted))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.sorted_unstable_by_key(|(path, _, _)| *path)
|
||||
{
|
||||
unformatted.diff(formatted, Some(path), f)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
.any(|result| result.result.is_formatted())
|
||||
}
|
||||
}
|
||||
|
||||
/// Write a list of the files that would be changed to the given writer.
|
||||
fn write_changed(&self, f: &mut impl Write) -> io::Result<()> {
|
||||
for path in self
|
||||
.results
|
||||
.iter()
|
||||
.filter_map(|result| {
|
||||
if result.result.is_formatted() {
|
||||
Some(result.path.as_path())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.sorted_unstable()
|
||||
{
|
||||
writeln!(f, "Would reformat: {}", fs::relativize_path(path).bold())?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Write a summary of the formatting results to the given writer.
|
||||
fn write_summary(&self, f: &mut impl Write) -> io::Result<()> {
|
||||
impl Display for FormatSummary<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
// Compute the number of changed and unchanged files.
|
||||
let mut changed = 0u32;
|
||||
let mut formatted = 0u32;
|
||||
let mut unchanged = 0u32;
|
||||
for result in self.results {
|
||||
match &result.result {
|
||||
match result.result {
|
||||
FormatResult::Formatted => {
|
||||
changed += 1;
|
||||
// If we're running in check mode, report on any files that would be formatted.
|
||||
if self.mode.is_check() {
|
||||
writeln!(
|
||||
f,
|
||||
"Would reformat: {}",
|
||||
fs::relativize_path(&result.path).bold()
|
||||
)?;
|
||||
}
|
||||
formatted += 1;
|
||||
}
|
||||
FormatResult::Unchanged => unchanged += 1,
|
||||
FormatResult::Diff { .. } => {
|
||||
changed += 1;
|
||||
}
|
||||
FormatResult::Skipped => {}
|
||||
}
|
||||
}
|
||||
|
||||
// Write out a summary of the formatting results.
|
||||
if changed > 0 && unchanged > 0 {
|
||||
writeln!(
|
||||
if formatted > 0 && unchanged > 0 {
|
||||
write!(
|
||||
f,
|
||||
"{} file{} {}, {} file{} left unchanged",
|
||||
changed,
|
||||
if changed == 1 { "" } else { "s" },
|
||||
formatted,
|
||||
if formatted == 1 { "" } else { "s" },
|
||||
match self.mode {
|
||||
FormatMode::Write => "reformatted",
|
||||
FormatMode::Check | FormatMode::Diff => "would be reformatted",
|
||||
FormatMode::Check => "would be reformatted",
|
||||
},
|
||||
unchanged,
|
||||
if unchanged == 1 { "" } else { "s" },
|
||||
)
|
||||
} else if changed > 0 {
|
||||
writeln!(
|
||||
} else if formatted > 0 {
|
||||
write!(
|
||||
f,
|
||||
"{} file{} {}",
|
||||
changed,
|
||||
if changed == 1 { "" } else { "s" },
|
||||
formatted,
|
||||
if formatted == 1 { "" } else { "s" },
|
||||
match self.mode {
|
||||
FormatMode::Write => "reformatted",
|
||||
FormatMode::Check | FormatMode::Diff => "would be reformatted",
|
||||
FormatMode::Check => "would be reformatted",
|
||||
}
|
||||
)
|
||||
} else if unchanged > 0 {
|
||||
writeln!(
|
||||
write!(
|
||||
f,
|
||||
"{} file{} left unchanged",
|
||||
unchanged,
|
||||
@@ -556,26 +396,6 @@ pub(crate) enum FormatCommandError {
|
||||
Read(Option<PathBuf>, SourceError),
|
||||
Format(Option<PathBuf>, FormatModuleError),
|
||||
Write(Option<PathBuf>, SourceError),
|
||||
Diff(Option<PathBuf>, io::Error),
|
||||
}
|
||||
|
||||
impl FormatCommandError {
|
||||
fn path(&self) -> Option<&Path> {
|
||||
match self {
|
||||
Self::Ignore(err) => {
|
||||
if let ignore::Error::WithPath { path, .. } = err {
|
||||
Some(path.as_path())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
Self::Panic(path, _)
|
||||
| Self::Read(path, _)
|
||||
| Self::Format(path, _)
|
||||
| Self::Write(path, _)
|
||||
| Self::Diff(path, _) => path.as_deref(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for FormatCommandError {
|
||||
@@ -641,24 +461,6 @@ impl Display for FormatCommandError {
|
||||
write!(f, "{}{} {err}", "Failed to format".bold(), ":".bold())
|
||||
}
|
||||
}
|
||||
Self::Diff(path, err) => {
|
||||
if let Some(path) = path {
|
||||
write!(
|
||||
f,
|
||||
"{}{}{} {err}",
|
||||
"Failed to generate diff for ".bold(),
|
||||
fs::relativize_path(path).bold(),
|
||||
":".bold()
|
||||
)
|
||||
} else {
|
||||
write!(
|
||||
f,
|
||||
"{}{} {err}",
|
||||
"Failed to generate diff".bold(),
|
||||
":".bold()
|
||||
)
|
||||
}
|
||||
}
|
||||
Self::Panic(path, err) => {
|
||||
let message = r#"This indicates a bug in Ruff. If you could open an issue at:
|
||||
|
||||
@@ -685,128 +487,3 @@ impl Display for FormatCommandError {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn warn_incompatible_formatter_settings(
|
||||
pyproject_config: &PyprojectConfig,
|
||||
resolver: Option<&Resolver>,
|
||||
) {
|
||||
// First, collect all rules that are incompatible regardless of the linter-specific settings.
|
||||
let mut incompatible_rules = FxHashSet::default();
|
||||
for setting in std::iter::once(&pyproject_config.settings)
|
||||
.chain(resolver.iter().flat_map(|resolver| resolver.settings()))
|
||||
{
|
||||
for rule in [
|
||||
// The formatter might collapse implicit string concatenation on a single line.
|
||||
Rule::SingleLineImplicitStringConcatenation,
|
||||
// Flags missing trailing commas when all arguments are on its own line:
|
||||
// ```python
|
||||
// def args(
|
||||
// aaaaaaaa, bbbbbbbbb, cccccccccc, ddddddddd, eeeeeeee, ffffff, gggggggggggg, hhhh
|
||||
// ):
|
||||
// pass
|
||||
// ```
|
||||
Rule::MissingTrailingComma,
|
||||
] {
|
||||
if setting.linter.rules.enabled(rule) {
|
||||
incompatible_rules.insert(rule);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !incompatible_rules.is_empty() {
|
||||
let mut rule_names: Vec<_> = incompatible_rules
|
||||
.into_iter()
|
||||
.map(|rule| format!("`{}`", rule.noqa_code()))
|
||||
.collect();
|
||||
rule_names.sort();
|
||||
warn_user_once!("The following rules may cause conflicts when used with the formatter: {}. To avoid unexpected behavior, we recommend disabling these rules, either by removing them from the `select` or `extend-select` configuration, or adding them to the `ignore` configuration.", rule_names.join(", "));
|
||||
}
|
||||
|
||||
// Next, validate settings-specific incompatibilities.
|
||||
for setting in std::iter::once(&pyproject_config.settings)
|
||||
.chain(resolver.iter().flat_map(|resolver| resolver.settings()))
|
||||
{
|
||||
// Validate all rules that rely on tab styles.
|
||||
if setting.linter.rules.enabled(Rule::TabIndentation)
|
||||
&& setting.formatter.indent_style.is_tab()
|
||||
{
|
||||
warn_user_once!("The `format.indent-style=\"tab\"` option is incompatible with `W191`, which lints against all uses of tabs. We recommend disabling these rules when using the formatter, which enforces a consistent indentation style. Alternatively, set the `format.indent-style` option to `\"space\"`.");
|
||||
}
|
||||
|
||||
// Validate all rules that rely on tab styles.
|
||||
if setting.linter.rules.enabled(Rule::IndentWithSpaces)
|
||||
&& setting.formatter.indent_style.is_tab()
|
||||
{
|
||||
warn_user_once!("The `format.indent-style=\"tab\"` option is incompatible with `D206`, with requires space-based indentation. We recommend disabling these rules when using the formatter, which enforces a consistent indentation style. Alternatively, set the `format.indent-style` option to `\"space\"`.");
|
||||
}
|
||||
|
||||
// Validate all rules that rely on custom indent widths.
|
||||
if setting.linter.rules.any_enabled(&[
|
||||
Rule::IndentationWithInvalidMultiple,
|
||||
Rule::IndentationWithInvalidMultipleComment,
|
||||
]) && setting.formatter.indent_width.value() != 4
|
||||
{
|
||||
warn_user_once!("The `format.indent-width` option with a value other than 4 is incompatible with `E111` and `E114`. We recommend disabling these rules when using the formatter, which enforces a consistent indentation width. Alternatively, set the `format.indent-width` option to `4`.");
|
||||
}
|
||||
|
||||
// Validate all rules that rely on quote styles.
|
||||
if setting
|
||||
.linter
|
||||
.rules
|
||||
.any_enabled(&[Rule::BadQuotesInlineString, Rule::AvoidableEscapedQuote])
|
||||
{
|
||||
match (
|
||||
setting.linter.flake8_quotes.inline_quotes,
|
||||
setting.formatter.quote_style,
|
||||
) {
|
||||
(Quote::Double, QuoteStyle::Single) => {
|
||||
warn_user_once!("The `flake8-quotes.inline-quotes=\"double\"` option is incompatible with the formatter's `format.quote-style=\"single\"`. We recommend disabling `Q000` and `Q003` when using the formatter, which enforces a consistent quote style. Alternatively, set both options to either `\"single\"` or `\"double\"`.");
|
||||
}
|
||||
(Quote::Single, QuoteStyle::Double) => {
|
||||
warn_user_once!("The `flake8-quotes.inline-quotes=\"single\"` option is incompatible with the formatter's `format.quote-style=\"double\"`. We recommend disabling `Q000` and `Q003` when using the formatter, which enforces a consistent quote style. Alternatively, set both options to either `\"single\"` or `\"double\"`.");
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
if setting.linter.rules.enabled(Rule::BadQuotesMultilineString)
|
||||
&& setting.linter.flake8_quotes.multiline_quotes == Quote::Single
|
||||
{
|
||||
warn_user_once!("The `flake8-quotes.multiline-quotes=\"single\"` option is incompatible with the formatter. We recommend disabling `Q001` when using the formatter, which enforces double quotes for multiline strings. Alternatively, set the `flake8-quotes.multiline-quotes` option to `\"double\"`.`");
|
||||
}
|
||||
|
||||
if setting.linter.rules.enabled(Rule::BadQuotesDocstring)
|
||||
&& setting.linter.flake8_quotes.docstring_quotes == Quote::Single
|
||||
{
|
||||
warn_user_once!("The `flake8-quotes.multiline-quotes=\"single\"` option is incompatible with the formatter. We recommend disabling `Q002` when using the formatter, which enforces double quotes for docstrings. Alternatively, set the `flake8-quotes.docstring-quotes` option to `\"double\"`.`");
|
||||
}
|
||||
|
||||
// Validate all isort settings.
|
||||
if setting.linter.rules.enabled(Rule::UnsortedImports) {
|
||||
// The formatter removes empty lines if the value is larger than 2 but always inserts a empty line after imports.
|
||||
// Two empty lines are okay because `isort` only uses this setting for top-level imports (not in nested blocks).
|
||||
if !matches!(setting.linter.isort.lines_after_imports, 1 | 2 | -1) {
|
||||
warn_user_once!("The isort option `isort.lines-after-imports` with a value other than `-1`, `1` or `2` is incompatible with the formatter. To avoid unexpected behavior, we recommend setting the option to one of: `2`, `1`, or `-1` (default).");
|
||||
}
|
||||
|
||||
// Values larger than two get reduced to one line by the formatter if the import is in a nested block.
|
||||
if setting.linter.isort.lines_between_types > 1 {
|
||||
warn_user_once!("The isort option `isort.lines-between-types` with a value greater than 1 is incompatible with the formatter. To avoid unexpected behavior, we recommend setting the option to one of: `1` or `0` (default).");
|
||||
}
|
||||
|
||||
// isort inserts a trailing comma which the formatter preserves, but only if `skip-magic-trailing-comma` isn't false.
|
||||
// This isn't relevant when using `force-single-line`, since isort will never include a trailing comma in that case.
|
||||
if setting.formatter.magic_trailing_comma.is_ignore()
|
||||
&& !setting.linter.isort.force_single_line
|
||||
{
|
||||
if setting.linter.isort.force_wrap_aliases {
|
||||
warn_user_once!("The isort option `isort.force-wrap-aliases` is incompatible with the formatter `format.skip-magic-trailing-comma=true` option. To avoid unexpected behavior, we recommend either setting `isort.force-wrap-aliases=false` or `format.skip-magic-trailing-comma=false`.");
|
||||
}
|
||||
|
||||
if setting.linter.isort.split_on_trailing_comma {
|
||||
warn_user_once!("The isort option `isort.split-on-trailing-comma` is incompatible with the formatter `format.skip-magic-trailing-comma=true` option. To avoid unexpected behavior, we recommend either setting `isort.split-on-trailing-comma=false` or `format.skip-magic-trailing-comma=false`.");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,14 +6,11 @@ use log::error;
|
||||
|
||||
use ruff_linter::source_kind::SourceKind;
|
||||
use ruff_python_ast::{PySourceType, SourceType};
|
||||
use ruff_workspace::resolver::{match_exclusion, python_file_at_path};
|
||||
use ruff_workspace::resolver::python_file_at_path;
|
||||
use ruff_workspace::FormatterSettings;
|
||||
|
||||
use crate::args::{CliOverrides, FormatArguments};
|
||||
use crate::commands::format::{
|
||||
format_source, warn_incompatible_formatter_settings, FormatCommandError, FormatMode,
|
||||
FormatResult, FormattedSource,
|
||||
};
|
||||
use crate::commands::format::{format_source, FormatCommandError, FormatMode, FormatResult};
|
||||
use crate::resolve::resolve;
|
||||
use crate::stdin::read_from_stdin;
|
||||
use crate::ExitStatus;
|
||||
@@ -26,24 +23,15 @@ pub(crate) fn format_stdin(cli: &FormatArguments, overrides: &CliOverrides) -> R
|
||||
overrides,
|
||||
cli.stdin_filename.as_deref(),
|
||||
)?;
|
||||
let mode = if cli.check {
|
||||
FormatMode::Check
|
||||
} else {
|
||||
FormatMode::Write
|
||||
};
|
||||
|
||||
warn_incompatible_formatter_settings(&pyproject_config, None);
|
||||
|
||||
let mode = FormatMode::from_cli(cli);
|
||||
|
||||
if pyproject_config.settings.file_resolver.force_exclude {
|
||||
if let Some(filename) = cli.stdin_filename.as_deref() {
|
||||
if !python_file_at_path(filename, &pyproject_config, overrides)? {
|
||||
return Ok(ExitStatus::Success);
|
||||
}
|
||||
|
||||
let format_settings = &pyproject_config.settings.formatter;
|
||||
if filename
|
||||
.file_name()
|
||||
.is_some_and(|name| match_exclusion(filename, name, &format_settings.exclude))
|
||||
{
|
||||
return Ok(ExitStatus::Success);
|
||||
}
|
||||
if let Some(filename) = cli.stdin_filename.as_deref() {
|
||||
if !python_file_at_path(filename, &pyproject_config, overrides)? {
|
||||
return Ok(ExitStatus::Success);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -62,7 +50,7 @@ pub(crate) fn format_stdin(cli: &FormatArguments, overrides: &CliOverrides) -> R
|
||||
) {
|
||||
Ok(result) => match mode {
|
||||
FormatMode::Write => Ok(ExitStatus::Success),
|
||||
FormatMode::Check | FormatMode::Diff => {
|
||||
FormatMode::Check => {
|
||||
if result.is_formatted() {
|
||||
Ok(ExitStatus::Failure)
|
||||
} else {
|
||||
@@ -97,32 +85,15 @@ fn format_source_code(
|
||||
};
|
||||
|
||||
// Format the source.
|
||||
let formatted = format_source(&source_kind, source_type, path, settings)?;
|
||||
let formatted = format_source(source_kind, source_type, path, settings)?;
|
||||
|
||||
match &formatted {
|
||||
FormattedSource::Formatted(formatted) => match mode {
|
||||
FormatMode::Write => {
|
||||
let mut writer = stdout().lock();
|
||||
formatted
|
||||
.write(&mut writer)
|
||||
.map_err(|err| FormatCommandError::Write(path.map(Path::to_path_buf), err))?;
|
||||
}
|
||||
FormatMode::Check => {}
|
||||
FormatMode::Diff => {
|
||||
source_kind
|
||||
.diff(formatted, path, &mut stdout().lock())
|
||||
.map_err(|err| FormatCommandError::Diff(path.map(Path::to_path_buf), err))?;
|
||||
}
|
||||
},
|
||||
FormattedSource::Unchanged => {
|
||||
// Write to stdout regardless of whether the source was formatted
|
||||
if mode.is_write() {
|
||||
let mut writer = stdout().lock();
|
||||
source_kind
|
||||
.write(&mut writer)
|
||||
.map_err(|err| FormatCommandError::Write(path.map(Path::to_path_buf), err))?;
|
||||
}
|
||||
}
|
||||
// Write to stdout regardless of whether the source was formatted.
|
||||
if mode.is_write() {
|
||||
let mut writer = stdout().lock();
|
||||
formatted
|
||||
.source_kind()
|
||||
.write(&mut writer)
|
||||
.map_err(|err| FormatCommandError::Write(path.map(Path::to_path_buf), err))?;
|
||||
}
|
||||
|
||||
Ok(FormatResult::from(formatted))
|
||||
|
||||
@@ -9,4 +9,3 @@ pub(crate) mod linter;
|
||||
pub(crate) mod rule;
|
||||
pub(crate) mod show_files;
|
||||
pub(crate) mod show_settings;
|
||||
pub(crate) mod version;
|
||||
|
||||
@@ -5,7 +5,7 @@ use anyhow::Result;
|
||||
use itertools::Itertools;
|
||||
|
||||
use ruff_linter::warn_user_once;
|
||||
use ruff_workspace::resolver::{python_files_in_path, PyprojectConfig, ResolvedFile};
|
||||
use ruff_workspace::resolver::{python_files_in_path, PyprojectConfig};
|
||||
|
||||
use crate::args::CliOverrides;
|
||||
|
||||
@@ -25,13 +25,12 @@ pub(crate) fn show_files(
|
||||
}
|
||||
|
||||
// Print the list of files.
|
||||
for path in paths
|
||||
.into_iter()
|
||||
for entry in paths
|
||||
.iter()
|
||||
.flatten()
|
||||
.map(ResolvedFile::into_path)
|
||||
.sorted_unstable()
|
||||
.sorted_by(|a, b| a.path().cmp(b.path()))
|
||||
{
|
||||
writeln!(writer, "{}", path.to_string_lossy())?;
|
||||
writeln!(writer, "{}", entry.path().to_string_lossy())?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -4,7 +4,7 @@ use std::path::PathBuf;
|
||||
use anyhow::{bail, Result};
|
||||
use itertools::Itertools;
|
||||
|
||||
use ruff_workspace::resolver::{python_files_in_path, PyprojectConfig, ResolvedFile};
|
||||
use ruff_workspace::resolver::{python_files_in_path, PyprojectConfig};
|
||||
|
||||
use crate::args::CliOverrides;
|
||||
|
||||
@@ -19,17 +19,16 @@ pub(crate) fn show_settings(
|
||||
let (paths, resolver) = python_files_in_path(files, pyproject_config, overrides)?;
|
||||
|
||||
// Print the list of files.
|
||||
let Some(path) = paths
|
||||
.into_iter()
|
||||
let Some(entry) = paths
|
||||
.iter()
|
||||
.flatten()
|
||||
.map(ResolvedFile::into_path)
|
||||
.sorted_unstable()
|
||||
.sorted_by(|a, b| a.path().cmp(b.path()))
|
||||
.next()
|
||||
else {
|
||||
bail!("No files found under the given path");
|
||||
};
|
||||
|
||||
let settings = resolver.resolve(&path, pyproject_config);
|
||||
let path = entry.path();
|
||||
let settings = resolver.resolve(path, pyproject_config);
|
||||
|
||||
writeln!(writer, "Resolved settings for: {path:?}")?;
|
||||
if let Some(settings_path) = pyproject_config.path.as_ref() {
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
use std::io::{self, BufWriter, Write};
|
||||
|
||||
use anyhow::Result;
|
||||
|
||||
use crate::args::HelpFormat;
|
||||
|
||||
/// Display version information
|
||||
pub(crate) fn version(output_format: HelpFormat) -> Result<()> {
|
||||
let mut stdout = BufWriter::new(io::stdout().lock());
|
||||
let version_info = crate::version::version();
|
||||
|
||||
match output_format {
|
||||
HelpFormat::Text => {
|
||||
writeln!(stdout, "ruff {}", &version_info)?;
|
||||
}
|
||||
HelpFormat::Json => {
|
||||
serde_json::to_writer_pretty(stdout, &version_info)?;
|
||||
}
|
||||
};
|
||||
Ok(())
|
||||
}
|
||||
@@ -2,25 +2,28 @@
|
||||
|
||||
use std::fs::File;
|
||||
use std::io;
|
||||
use std::ops::{Add, AddAssign};
|
||||
use std::ops::AddAssign;
|
||||
#[cfg(unix)]
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use colored::Colorize;
|
||||
use filetime::FileTime;
|
||||
use log::{debug, error, warn};
|
||||
use ruff_linter::settings::types::UnsafeFixes;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::cache::{Cache, FileCacheKey, LintCacheData};
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_linter::linter::{lint_fix, lint_only, FixTable, FixerResult, LinterResult};
|
||||
use ruff_linter::logging::DisplayParseError;
|
||||
use ruff_linter::message::Message;
|
||||
use ruff_linter::pyproject_toml::lint_pyproject_toml;
|
||||
use ruff_linter::registry::AsRule;
|
||||
use ruff_linter::settings::types::UnsafeFixes;
|
||||
use ruff_linter::settings::{flags, LinterSettings};
|
||||
use ruff_linter::source_kind::{SourceError, SourceKind};
|
||||
use ruff_linter::{fs, IOError, SyntaxError};
|
||||
use ruff_macros::CacheKey;
|
||||
use ruff_notebook::{Notebook, NotebookError, NotebookIndex};
|
||||
use ruff_python_ast::imports::ImportMap;
|
||||
use ruff_python_ast::{SourceType, TomlSourceType};
|
||||
@@ -28,10 +31,37 @@ use ruff_source_file::{LineIndex, SourceCode, SourceFileBuilder};
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
use ruff_workspace::Settings;
|
||||
|
||||
use crate::cache::Cache;
|
||||
|
||||
#[derive(CacheKey)]
|
||||
pub(crate) struct FileCacheKey {
|
||||
/// Timestamp when the file was last modified before the (cached) check.
|
||||
file_last_modified: FileTime,
|
||||
/// Permissions of the file before the (cached) check.
|
||||
file_permissions_mode: u32,
|
||||
}
|
||||
|
||||
impl FileCacheKey {
|
||||
fn from_path(path: &Path) -> io::Result<FileCacheKey> {
|
||||
// Construct a cache key for the file
|
||||
let metadata = path.metadata()?;
|
||||
|
||||
#[cfg(unix)]
|
||||
let permissions = metadata.permissions().mode();
|
||||
#[cfg(windows)]
|
||||
let permissions: u32 = metadata.permissions().readonly().into();
|
||||
|
||||
Ok(FileCacheKey {
|
||||
file_last_modified: FileTime::from_last_modification_time(&metadata),
|
||||
file_permissions_mode: permissions,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, PartialEq)]
|
||||
pub(crate) struct Diagnostics {
|
||||
pub(crate) messages: Vec<Message>,
|
||||
pub(crate) fixed: FixMap,
|
||||
pub(crate) fixed: FxHashMap<String, FixTable>,
|
||||
pub(crate) imports: ImportMap,
|
||||
pub(crate) notebook_indexes: FxHashMap<String, NotebookIndex>,
|
||||
}
|
||||
@@ -44,7 +74,7 @@ impl Diagnostics {
|
||||
) -> Self {
|
||||
Self {
|
||||
messages,
|
||||
fixed: FixMap::default(),
|
||||
fixed: FxHashMap::default(),
|
||||
imports,
|
||||
notebook_indexes,
|
||||
}
|
||||
@@ -112,68 +142,22 @@ impl Diagnostics {
|
||||
}
|
||||
}
|
||||
|
||||
impl Add for Diagnostics {
|
||||
type Output = Diagnostics;
|
||||
|
||||
fn add(mut self, other: Self) -> Self::Output {
|
||||
self += other;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl AddAssign for Diagnostics {
|
||||
fn add_assign(&mut self, other: Self) {
|
||||
self.messages.extend(other.messages);
|
||||
self.imports.extend(other.imports);
|
||||
self.fixed += other.fixed;
|
||||
self.notebook_indexes.extend(other.notebook_indexes);
|
||||
}
|
||||
}
|
||||
|
||||
/// A collection of fixes indexed by file path.
|
||||
#[derive(Debug, Default, PartialEq)]
|
||||
pub(crate) struct FixMap(FxHashMap<String, FixTable>);
|
||||
|
||||
impl FixMap {
|
||||
/// Returns `true` if there are no fixes in the map.
|
||||
pub(crate) fn is_empty(&self) -> bool {
|
||||
self.0.is_empty()
|
||||
}
|
||||
|
||||
/// Returns an iterator over the fixes in the map, along with the file path.
|
||||
pub(crate) fn iter(&self) -> impl Iterator<Item = (&String, &FixTable)> {
|
||||
self.0.iter()
|
||||
}
|
||||
|
||||
/// Returns an iterator over the fixes in the map.
|
||||
pub(crate) fn values(&self) -> impl Iterator<Item = &FixTable> {
|
||||
self.0.values()
|
||||
}
|
||||
}
|
||||
|
||||
impl FromIterator<(String, FixTable)> for FixMap {
|
||||
fn from_iter<T: IntoIterator<Item = (String, FixTable)>>(iter: T) -> Self {
|
||||
Self(
|
||||
iter.into_iter()
|
||||
.filter(|(_, fixes)| !fixes.is_empty())
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl AddAssign for FixMap {
|
||||
fn add_assign(&mut self, rhs: Self) {
|
||||
for (filename, fixed) in rhs.0 {
|
||||
for (filename, fixed) in other.fixed {
|
||||
if fixed.is_empty() {
|
||||
continue;
|
||||
}
|
||||
let fixed_in_file = self.0.entry(filename).or_default();
|
||||
let fixed_in_file = self.fixed.entry(filename).or_default();
|
||||
for (rule, count) in fixed {
|
||||
if count > 0 {
|
||||
*fixed_in_file.entry(rule).or_default() += count;
|
||||
}
|
||||
}
|
||||
}
|
||||
self.notebook_indexes.extend(other.notebook_indexes);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -188,28 +172,21 @@ pub(crate) fn lint_path(
|
||||
unsafe_fixes: UnsafeFixes,
|
||||
) -> Result<Diagnostics> {
|
||||
// Check the cache.
|
||||
// TODO(charlie): `fixer::Mode::Apply` and `fixer::Mode::Diff` both have
|
||||
// side-effects that aren't captured in the cache. (In practice, it's fine
|
||||
// to cache `fixer::Mode::Apply`, since a file either has no fixes, or we'll
|
||||
// write the fixes to disk, thus invalidating the cache. But it's a bit hard
|
||||
// to reason about. We need to come up with a better solution here.)
|
||||
let caching = match cache {
|
||||
Some(cache) if noqa.into() => {
|
||||
Some(cache) if noqa.into() && fix_mode.is_generate() => {
|
||||
let relative_path = cache
|
||||
.relative_path(path)
|
||||
.expect("wrong package cache for file");
|
||||
|
||||
let cache_key = FileCacheKey::from_path(path).context("Failed to create cache key")?;
|
||||
let cached_diagnostics = cache
|
||||
.get(relative_path, &cache_key)
|
||||
.and_then(|entry| entry.to_diagnostics(path));
|
||||
if let Some(diagnostics) = cached_diagnostics {
|
||||
// `FixMode::Generate` and `FixMode::Diff` rely on side-effects (writing to disk,
|
||||
// and writing the diff to stdout, respectively). If a file has diagnostics, we
|
||||
// need to avoid reading from and writing to the cache in these modes.
|
||||
if match fix_mode {
|
||||
flags::FixMode::Generate => true,
|
||||
flags::FixMode::Apply | flags::FixMode::Diff => {
|
||||
diagnostics.messages.is_empty() && diagnostics.fixed.is_empty()
|
||||
}
|
||||
} {
|
||||
return Ok(diagnostics);
|
||||
}
|
||||
|
||||
if let Some(cache) = cache.get(relative_path, &cache_key) {
|
||||
return Ok(cache.as_diagnostics(path));
|
||||
}
|
||||
|
||||
// Stash the file metadata for later so when we update the cache it reflects the prerun
|
||||
@@ -309,25 +286,13 @@ pub(crate) fn lint_path(
|
||||
if let Some((cache, relative_path, key)) = caching {
|
||||
// We don't cache parsing errors.
|
||||
if parse_error.is_none() {
|
||||
// `FixMode::Generate` and `FixMode::Diff` rely on side-effects (writing to disk,
|
||||
// and writing the diff to stdout, respectively). If a file has diagnostics, we
|
||||
// need to avoid reading from and writing to the cache in these modes.
|
||||
if match fix_mode {
|
||||
flags::FixMode::Generate => true,
|
||||
flags::FixMode::Apply | flags::FixMode::Diff => {
|
||||
messages.is_empty() && fixed.is_empty()
|
||||
}
|
||||
} {
|
||||
cache.update_lint(
|
||||
relative_path.to_owned(),
|
||||
&key,
|
||||
LintCacheData::from_messages(
|
||||
&messages,
|
||||
imports.clone(),
|
||||
source_kind.as_ipy_notebook().map(Notebook::index).cloned(),
|
||||
),
|
||||
);
|
||||
}
|
||||
cache.update(
|
||||
relative_path.to_owned(),
|
||||
key,
|
||||
&messages,
|
||||
&imports,
|
||||
source_kind.as_ipy_notebook().map(Notebook::index),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -353,7 +318,7 @@ pub(crate) fn lint_path(
|
||||
|
||||
Ok(Diagnostics {
|
||||
messages,
|
||||
fixed: FixMap::from_iter([(fs::relativize_path(path), fixed)]),
|
||||
fixed: FxHashMap::from_iter([(fs::relativize_path(path), fixed)]),
|
||||
imports,
|
||||
notebook_indexes,
|
||||
})
|
||||
@@ -471,7 +436,7 @@ pub(crate) fn lint_stdin(
|
||||
|
||||
Ok(Diagnostics {
|
||||
messages,
|
||||
fixed: FixMap::from_iter([(
|
||||
fixed: FxHashMap::from_iter([(
|
||||
fs::relativize_path(path.unwrap_or_else(|| Path::new("-"))),
|
||||
fixed,
|
||||
)]),
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
#![allow(clippy::print_stdout)]
|
||||
|
||||
use std::fs::File;
|
||||
use std::io::{self, stdout, BufWriter, Write};
|
||||
use std::path::{Path, PathBuf};
|
||||
@@ -18,7 +16,7 @@ use ruff_linter::settings::types::SerializationFormat;
|
||||
use ruff_linter::{fs, warn_user, warn_user_once};
|
||||
use ruff_workspace::Settings;
|
||||
|
||||
use crate::args::{Args, CheckCommand, Command, FormatCommand, HelpFormat};
|
||||
use crate::args::{Args, CheckCommand, Command, FormatCommand};
|
||||
use crate::printer::{Flags as PrinterFlags, Printer};
|
||||
|
||||
pub mod args;
|
||||
@@ -29,7 +27,6 @@ mod panic;
|
||||
mod printer;
|
||||
pub mod resolve;
|
||||
mod stdin;
|
||||
mod version;
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub enum ExitStatus {
|
||||
@@ -101,15 +98,6 @@ fn is_stdin(files: &[PathBuf], stdin_filename: Option<&Path>) -> bool {
|
||||
file == Path::new("-")
|
||||
}
|
||||
|
||||
/// Get the actual value of the `format` desired from either `output_format`
|
||||
/// or `format`, and warn the user if they're using the deprecated form.
|
||||
fn resolve_help_output_format(output_format: HelpFormat, format: Option<HelpFormat>) -> HelpFormat {
|
||||
if format.is_some() {
|
||||
warn_user!("The `--format` argument is deprecated. Use `--output-format` instead.");
|
||||
}
|
||||
format.unwrap_or(output_format)
|
||||
}
|
||||
|
||||
pub fn run(
|
||||
Args {
|
||||
command,
|
||||
@@ -146,22 +134,12 @@ pub fn run(
|
||||
set_up_logging(&log_level)?;
|
||||
|
||||
match command {
|
||||
Command::Version { output_format } => {
|
||||
commands::version::version(output_format)?;
|
||||
Ok(ExitStatus::Success)
|
||||
}
|
||||
Command::Rule {
|
||||
rule,
|
||||
all,
|
||||
format,
|
||||
mut output_format,
|
||||
} => {
|
||||
output_format = resolve_help_output_format(output_format, format);
|
||||
Command::Rule { rule, all, format } => {
|
||||
if all {
|
||||
commands::rule::rules(output_format)?;
|
||||
commands::rule::rules(format)?;
|
||||
}
|
||||
if let Some(rule) = rule {
|
||||
commands::rule::rule(rule, output_format)?;
|
||||
commands::rule::rule(rule, format)?;
|
||||
}
|
||||
Ok(ExitStatus::Success)
|
||||
}
|
||||
@@ -169,12 +147,8 @@ pub fn run(
|
||||
commands::config::config(option.as_deref())?;
|
||||
Ok(ExitStatus::Success)
|
||||
}
|
||||
Command::Linter {
|
||||
format,
|
||||
mut output_format,
|
||||
} => {
|
||||
output_format = resolve_help_output_format(output_format, format);
|
||||
commands::linter::linter(output_format)?;
|
||||
Command::Linter { format } => {
|
||||
commands::linter::linter(format)?;
|
||||
Ok(ExitStatus::Success)
|
||||
}
|
||||
Command::Clean => {
|
||||
@@ -191,6 +165,8 @@ pub fn run(
|
||||
}
|
||||
|
||||
fn format(args: FormatCommand, log_level: LogLevel) -> Result<ExitStatus> {
|
||||
warn_user_once!("`ruff format` is not yet stable, and subject to change in future versions.");
|
||||
|
||||
let (cli, overrides) = args.partition();
|
||||
|
||||
if is_stdin(&cli.files, cli.stdin_filename.as_deref()) {
|
||||
|
||||
@@ -7,9 +7,11 @@ use anyhow::Result;
|
||||
use bitflags::bitflags;
|
||||
use colored::Colorize;
|
||||
use itertools::{iterate, Itertools};
|
||||
use rustc_hash::FxHashMap;
|
||||
use serde::Serialize;
|
||||
|
||||
use ruff_linter::fs::relativize_path;
|
||||
use ruff_linter::linter::FixTable;
|
||||
use ruff_linter::logging::LogLevel;
|
||||
use ruff_linter::message::{
|
||||
AzureEmitter, Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter,
|
||||
@@ -20,7 +22,7 @@ use ruff_linter::registry::{AsRule, Rule};
|
||||
use ruff_linter::settings::flags::{self};
|
||||
use ruff_linter::settings::types::{SerializationFormat, UnsafeFixes};
|
||||
|
||||
use crate::diagnostics::{Diagnostics, FixMap};
|
||||
use crate::diagnostics::Diagnostics;
|
||||
|
||||
bitflags! {
|
||||
#[derive(Default, Debug, Copy, Clone)]
|
||||
@@ -460,7 +462,7 @@ fn show_fix_status(fix_mode: flags::FixMode, fixables: Option<&FixableStatistics
|
||||
(!fix_mode.is_apply()) && fixables.is_some_and(FixableStatistics::any_applicable_fixes)
|
||||
}
|
||||
|
||||
fn print_fix_summary(writer: &mut dyn Write, fixed: &FixMap) -> Result<()> {
|
||||
fn print_fix_summary(writer: &mut dyn Write, fixed: &FxHashMap<String, FixTable>) -> Result<()> {
|
||||
let total = fixed
|
||||
.values()
|
||||
.map(|table| table.values().sum::<usize>())
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_cli/src/version.rs
|
||||
expression: version
|
||||
---
|
||||
0.0.0
|
||||
@@ -1,5 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_cli/src/version.rs
|
||||
expression: version
|
||||
---
|
||||
0.0.0 (53b0f5d92 2023-10-19)
|
||||
@@ -1,5 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_cli/src/version.rs
|
||||
expression: version
|
||||
---
|
||||
0.0.0+24 (53b0f5d92 2023-10-19)
|
||||
@@ -1,14 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_cli/src/version.rs
|
||||
expression: version
|
||||
---
|
||||
{
|
||||
"version": "0.0.0",
|
||||
"commit_info": {
|
||||
"short_commit_hash": "53b0f5d92",
|
||||
"commit_hash": "53b0f5d924110e5b26fbf09f6fd3a03d67b475b7",
|
||||
"commit_date": "2023-10-19",
|
||||
"last_tag": "v0.0.1",
|
||||
"commits_since_last_tag": 0
|
||||
}
|
||||
}
|
||||
@@ -1,130 +0,0 @@
|
||||
//! Code for representing Ruff's release version number.
|
||||
use serde::Serialize;
|
||||
use std::fmt;
|
||||
|
||||
/// Information about the git repository where Ruff was built from.
|
||||
#[derive(Serialize)]
|
||||
pub(crate) struct CommitInfo {
|
||||
short_commit_hash: String,
|
||||
commit_hash: String,
|
||||
commit_date: String,
|
||||
last_tag: Option<String>,
|
||||
commits_since_last_tag: u32,
|
||||
}
|
||||
|
||||
/// Ruff's version.
|
||||
#[derive(Serialize)]
|
||||
pub(crate) struct VersionInfo {
|
||||
/// Ruff's version, such as "0.5.1"
|
||||
version: String,
|
||||
/// Information about the git commit we may have been built from.
|
||||
///
|
||||
/// `None` if not built from a git repo or if retrieval failed.
|
||||
commit_info: Option<CommitInfo>,
|
||||
}
|
||||
|
||||
impl fmt::Display for VersionInfo {
|
||||
/// Formatted version information: "<version>[+<commits>] (<commit> <date>)"
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", self.version)?;
|
||||
|
||||
if let Some(ref ci) = self.commit_info {
|
||||
if ci.commits_since_last_tag > 0 {
|
||||
write!(f, "+{}", ci.commits_since_last_tag)?;
|
||||
}
|
||||
write!(f, " ({} {})", ci.short_commit_hash, ci.commit_date)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns information about Ruff's version.
|
||||
pub(crate) fn version() -> VersionInfo {
|
||||
// Environment variables are only read at compile-time
|
||||
macro_rules! option_env_str {
|
||||
($name:expr) => {
|
||||
option_env!($name).map(|s| s.to_string())
|
||||
};
|
||||
}
|
||||
|
||||
// This version is pulled from Cargo.toml and set by Cargo
|
||||
let version = option_env_str!("CARGO_PKG_VERSION").unwrap();
|
||||
|
||||
// Commit info is pulled from git and set by `build.rs`
|
||||
let commit_info = option_env_str!("RUFF_COMMIT_HASH").map(|commit_hash| CommitInfo {
|
||||
short_commit_hash: option_env_str!("RUFF_COMMIT_SHORT_HASH").unwrap(),
|
||||
commit_hash,
|
||||
commit_date: option_env_str!("RUFF_COMMIT_DATE").unwrap(),
|
||||
last_tag: option_env_str!("RUFF_LAST_TAG"),
|
||||
commits_since_last_tag: option_env_str!("RUFF_LAST_TAG_DISTANCE")
|
||||
.as_deref()
|
||||
.map_or(0, |value| value.parse::<u32>().unwrap_or(0)),
|
||||
});
|
||||
|
||||
VersionInfo {
|
||||
version,
|
||||
commit_info,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use insta::{assert_display_snapshot, assert_json_snapshot};
|
||||
|
||||
use super::{CommitInfo, VersionInfo};
|
||||
|
||||
#[test]
|
||||
fn version_formatting() {
|
||||
let version = VersionInfo {
|
||||
version: "0.0.0".to_string(),
|
||||
commit_info: None,
|
||||
};
|
||||
assert_display_snapshot!(version);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn version_formatting_with_commit_info() {
|
||||
let version = VersionInfo {
|
||||
version: "0.0.0".to_string(),
|
||||
commit_info: Some(CommitInfo {
|
||||
short_commit_hash: "53b0f5d92".to_string(),
|
||||
commit_hash: "53b0f5d924110e5b26fbf09f6fd3a03d67b475b7".to_string(),
|
||||
last_tag: Some("v0.0.1".to_string()),
|
||||
commit_date: "2023-10-19".to_string(),
|
||||
commits_since_last_tag: 0,
|
||||
}),
|
||||
};
|
||||
assert_display_snapshot!(version);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn version_formatting_with_commits_since_last_tag() {
|
||||
let version = VersionInfo {
|
||||
version: "0.0.0".to_string(),
|
||||
commit_info: Some(CommitInfo {
|
||||
short_commit_hash: "53b0f5d92".to_string(),
|
||||
commit_hash: "53b0f5d924110e5b26fbf09f6fd3a03d67b475b7".to_string(),
|
||||
last_tag: Some("v0.0.1".to_string()),
|
||||
commit_date: "2023-10-19".to_string(),
|
||||
commits_since_last_tag: 24,
|
||||
}),
|
||||
};
|
||||
assert_display_snapshot!(version);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn version_serializable() {
|
||||
let version = VersionInfo {
|
||||
version: "0.0.0".to_string(),
|
||||
commit_info: Some(CommitInfo {
|
||||
short_commit_hash: "53b0f5d92".to_string(),
|
||||
commit_hash: "53b0f5d924110e5b26fbf09f6fd3a03d67b475b7".to_string(),
|
||||
last_tag: Some("v0.0.1".to_string()),
|
||||
commit_date: "2023-10-19".to_string(),
|
||||
commits_since_last_tag: 0,
|
||||
}),
|
||||
};
|
||||
assert_json_snapshot!(version);
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,6 @@
|
||||
#![cfg(not(target_family = "wasm"))]
|
||||
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use std::process::Command;
|
||||
use std::str;
|
||||
|
||||
@@ -14,7 +13,7 @@ const BIN_NAME: &str = "ruff";
|
||||
#[test]
|
||||
fn default_options() {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(["format", "--isolated", "--stdin-filename", "test.py"])
|
||||
.args(["format", "--isolated"])
|
||||
.arg("-")
|
||||
.pass_stdin(r#"
|
||||
def foo(arg1, arg2,):
|
||||
@@ -40,6 +39,7 @@ if condition:
|
||||
print('Hy "Micha"') # Should not change quotes
|
||||
|
||||
----- stderr -----
|
||||
warning: `ruff format` is not yet stable, and subject to change in future versions.
|
||||
"###);
|
||||
}
|
||||
|
||||
@@ -50,9 +50,6 @@ fn format_options() -> Result<()> {
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
indent-width = 8
|
||||
line-length = 84
|
||||
|
||||
[format]
|
||||
indent-style = "tab"
|
||||
quote-style = "single"
|
||||
@@ -67,7 +64,7 @@ line-ending = "cr-lf"
|
||||
.arg("-")
|
||||
.pass_stdin(r#"
|
||||
def foo(arg1, arg2,):
|
||||
print("Shouldn't change quotes. It exceeds the line width with the tab size 8")
|
||||
print("Shouldn't change quotes")
|
||||
|
||||
|
||||
if condition:
|
||||
@@ -79,248 +76,14 @@ if condition:
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
def foo(arg1, arg2):
|
||||
print(
|
||||
"Shouldn't change quotes. It exceeds the line width with the tab size 8"
|
||||
)
|
||||
print("Shouldn't change quotes")
|
||||
|
||||
|
||||
if condition:
|
||||
print('Should change quotes')
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn mixed_line_endings() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
|
||||
fs::write(
|
||||
tempdir.path().join("main.py"),
|
||||
"from test import say_hy\n\nif __name__ == \"__main__\":\n say_hy(\"dear Ruff contributor\")\n",
|
||||
)?;
|
||||
|
||||
fs::write(
|
||||
tempdir.path().join("test.py"),
|
||||
"def say_hy(name: str):\r\n print(f\"Hy {name}\")\r\n",
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.current_dir(tempdir.path())
|
||||
.args(["format", "--no-cache", "--diff", "--isolated"])
|
||||
.arg("."), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
2 files left unchanged
|
||||
"###);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn exclude() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
extend-exclude = ["out"]
|
||||
|
||||
[format]
|
||||
exclude = ["test.py", "generated.py"]
|
||||
"#,
|
||||
)?;
|
||||
|
||||
fs::write(
|
||||
tempdir.path().join("main.py"),
|
||||
r#"
|
||||
from test import say_hy
|
||||
|
||||
if __name__ == "__main__":
|
||||
say_hy("dear Ruff contributor")
|
||||
"#,
|
||||
)?;
|
||||
|
||||
// Excluded file but passed to the CLI directly, should be formatted
|
||||
let test_path = tempdir.path().join("test.py");
|
||||
fs::write(
|
||||
&test_path,
|
||||
r#"
|
||||
def say_hy(name: str):
|
||||
print(f"Hy {name}")"#,
|
||||
)?;
|
||||
|
||||
fs::write(
|
||||
tempdir.path().join("generated.py"),
|
||||
r#"NUMBERS = [
|
||||
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
|
||||
10, 11, 12, 13, 14, 15, 16, 17, 18, 19
|
||||
]
|
||||
OTHER = "OTHER"
|
||||
"#,
|
||||
)?;
|
||||
|
||||
let out_dir = tempdir.path().join("out");
|
||||
fs::create_dir(&out_dir)?;
|
||||
|
||||
fs::write(out_dir.join("a.py"), "a = a")?;
|
||||
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.current_dir(tempdir.path())
|
||||
.args(["format", "--no-cache", "--check", "--config"])
|
||||
.arg(ruff_toml.file_name().unwrap())
|
||||
// Explicitly pass test.py, should be formatted regardless of it being excluded by format.exclude
|
||||
.arg(test_path.file_name().unwrap())
|
||||
// Format all other files in the directory, should respect the `exclude` and `format.exclude` options
|
||||
.arg("."), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
Would reformat: main.py
|
||||
Would reformat: test.py
|
||||
2 files would be reformatted
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn force_exclude() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
extend-exclude = ["out"]
|
||||
|
||||
[format]
|
||||
exclude = ["test.py", "generated.py"]
|
||||
"#,
|
||||
)?;
|
||||
|
||||
fs::write(
|
||||
tempdir.path().join("main.py"),
|
||||
r#"
|
||||
from test import say_hy
|
||||
|
||||
if __name__ == "__main__":
|
||||
say_hy("dear Ruff contributor")
|
||||
"#,
|
||||
)?;
|
||||
|
||||
// Excluded file but passed to the CLI directly, should be formatted
|
||||
let test_path = tempdir.path().join("test.py");
|
||||
fs::write(
|
||||
&test_path,
|
||||
r#"
|
||||
def say_hy(name: str):
|
||||
print(f"Hy {name}")"#,
|
||||
)?;
|
||||
|
||||
fs::write(
|
||||
tempdir.path().join("generated.py"),
|
||||
r#"NUMBERS = [
|
||||
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
|
||||
10, 11, 12, 13, 14, 15, 16, 17, 18, 19
|
||||
]
|
||||
OTHER = "OTHER"
|
||||
"#,
|
||||
)?;
|
||||
|
||||
let out_dir = tempdir.path().join("out");
|
||||
fs::create_dir(&out_dir)?;
|
||||
|
||||
fs::write(out_dir.join("a.py"), "a = a")?;
|
||||
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.current_dir(tempdir.path())
|
||||
.args(["format", "--no-cache", "--force-exclude", "--check", "--config"])
|
||||
.arg(ruff_toml.file_name().unwrap())
|
||||
// Explicitly pass test.py, should be respect the `format.exclude` when `--force-exclude` is present
|
||||
.arg(test_path.file_name().unwrap())
|
||||
// Format all other files in the directory, should respect the `exclude` and `format.exclude` options
|
||||
.arg("."), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
Would reformat: main.py
|
||||
1 file would be reformatted
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn exclude_stdin() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
extend-select = ["B", "Q"]
|
||||
ignore = ["Q000", "Q001", "Q002", "Q003"]
|
||||
|
||||
[format]
|
||||
exclude = ["generated.py"]
|
||||
"#,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.current_dir(tempdir.path())
|
||||
.args(["format", "--config", &ruff_toml.file_name().unwrap().to_string_lossy(), "--stdin-filename", "generated.py", "-"])
|
||||
.pass_stdin(r#"
|
||||
from test import say_hy
|
||||
|
||||
if __name__ == '__main__':
|
||||
say_hy("dear Ruff contributor")
|
||||
"#), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
from test import say_hy
|
||||
|
||||
if __name__ == "__main__":
|
||||
say_hy("dear Ruff contributor")
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn force_exclude_stdin() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
extend-select = ["B", "Q"]
|
||||
ignore = ["Q000", "Q001", "Q002", "Q003"]
|
||||
|
||||
[format]
|
||||
exclude = ["generated.py"]
|
||||
"#,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.current_dir(tempdir.path())
|
||||
.args(["format", "--config", &ruff_toml.file_name().unwrap().to_string_lossy(), "--stdin-filename", "generated.py", "--force-exclude", "-"])
|
||||
.pass_stdin(r#"
|
||||
from test import say_hy
|
||||
|
||||
if __name__ == '__main__':
|
||||
say_hy("dear Ruff contributor")
|
||||
"#), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
warning: `ruff format` is not yet stable, and subject to change in future versions.
|
||||
"###);
|
||||
Ok(())
|
||||
}
|
||||
@@ -335,9 +98,6 @@ fn format_option_inheritance() -> Result<()> {
|
||||
r#"
|
||||
extend = "base.toml"
|
||||
|
||||
[lint]
|
||||
extend-select = ["COM812"]
|
||||
|
||||
[format]
|
||||
quote-style = "single"
|
||||
"#,
|
||||
@@ -379,46 +139,11 @@ if condition:
|
||||
print('Should change quotes')
|
||||
|
||||
----- stderr -----
|
||||
warning: The following rules may cause conflicts when used with the formatter: `COM812`. To avoid unexpected behavior, we recommend disabling these rules, either by removing them from the `select` or `extend-select` configuration, or adding them to the `ignore` configuration.
|
||||
warning: `ruff format` is not yet stable, and subject to change in future versions.
|
||||
"###);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deprecated_options() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
tab-size = 2
|
||||
"#,
|
||||
)?;
|
||||
|
||||
insta::with_settings!({filters => vec![
|
||||
(&*regex::escape(ruff_toml.to_str().unwrap()), "[RUFF-TOML-PATH]"),
|
||||
]}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(["format", "--config"])
|
||||
.arg(&ruff_toml)
|
||||
.arg("-")
|
||||
.pass_stdin(r#"
|
||||
if True:
|
||||
pass
|
||||
"#), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
if True:
|
||||
pass
|
||||
|
||||
----- stderr -----
|
||||
warning: The `tab-size` option has been renamed to `indent-width` to emphasize that it configures the indentation used by the formatter as well as the tab width. Please update your configuration to use `indent-width = <value>` instead.
|
||||
"###);
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Since 0.1.0 the legacy format option is no longer supported
|
||||
#[test]
|
||||
fn legacy_format_option() -> Result<()> {
|
||||
@@ -457,359 +182,3 @@ format = "json"
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn conflicting_options() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
indent-width = 2
|
||||
|
||||
[lint]
|
||||
select = ["ALL"]
|
||||
ignore = ["D203", "D212"]
|
||||
|
||||
[lint.isort]
|
||||
lines-after-imports = 3
|
||||
lines-between-types = 2
|
||||
force-wrap-aliases = true
|
||||
combine-as-imports = true
|
||||
split-on-trailing-comma = true
|
||||
|
||||
[lint.flake8-quotes]
|
||||
inline-quotes = "single"
|
||||
docstring-quotes = "single"
|
||||
multiline-quotes = "single"
|
||||
|
||||
[format]
|
||||
skip-magic-trailing-comma = true
|
||||
indent-style = "tab"
|
||||
"#,
|
||||
)?;
|
||||
|
||||
let test_path = tempdir.path().join("test.py");
|
||||
fs::write(
|
||||
&test_path,
|
||||
r#"
|
||||
def say_hy(name: str):
|
||||
print(f"Hy {name}")"#,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(["format", "--no-cache", "--config"])
|
||||
.arg(&ruff_toml)
|
||||
.arg(test_path), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
1 file reformatted
|
||||
|
||||
----- stderr -----
|
||||
warning: The following rules may cause conflicts when used with the formatter: `COM812`, `ISC001`. To avoid unexpected behavior, we recommend disabling these rules, either by removing them from the `select` or `extend-select` configuration, or adding them to the `ignore` configuration.
|
||||
warning: The `format.indent-style="tab"` option is incompatible with `W191`, which lints against all uses of tabs. We recommend disabling these rules when using the formatter, which enforces a consistent indentation style. Alternatively, set the `format.indent-style` option to `"space"`.
|
||||
warning: The `format.indent-style="tab"` option is incompatible with `D206`, with requires space-based indentation. We recommend disabling these rules when using the formatter, which enforces a consistent indentation style. Alternatively, set the `format.indent-style` option to `"space"`.
|
||||
warning: The `flake8-quotes.inline-quotes="single"` option is incompatible with the formatter's `format.quote-style="double"`. We recommend disabling `Q000` and `Q003` when using the formatter, which enforces a consistent quote style. Alternatively, set both options to either `"single"` or `"double"`.
|
||||
warning: The `flake8-quotes.multiline-quotes="single"` option is incompatible with the formatter. We recommend disabling `Q001` when using the formatter, which enforces double quotes for multiline strings. Alternatively, set the `flake8-quotes.multiline-quotes` option to `"double"`.`
|
||||
warning: The `flake8-quotes.multiline-quotes="single"` option is incompatible with the formatter. We recommend disabling `Q002` when using the formatter, which enforces double quotes for docstrings. Alternatively, set the `flake8-quotes.docstring-quotes` option to `"double"`.`
|
||||
warning: The isort option `isort.lines-after-imports` with a value other than `-1`, `1` or `2` is incompatible with the formatter. To avoid unexpected behavior, we recommend setting the option to one of: `2`, `1`, or `-1` (default).
|
||||
warning: The isort option `isort.lines-between-types` with a value greater than 1 is incompatible with the formatter. To avoid unexpected behavior, we recommend setting the option to one of: `1` or `0` (default).
|
||||
warning: The isort option `isort.force-wrap-aliases` is incompatible with the formatter `format.skip-magic-trailing-comma=true` option. To avoid unexpected behavior, we recommend either setting `isort.force-wrap-aliases=false` or `format.skip-magic-trailing-comma=false`.
|
||||
warning: The isort option `isort.split-on-trailing-comma` is incompatible with the formatter `format.skip-magic-trailing-comma=true` option. To avoid unexpected behavior, we recommend either setting `isort.split-on-trailing-comma=false` or `format.skip-magic-trailing-comma=false`.
|
||||
"###);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn conflicting_options_stdin() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
indent-width = 2
|
||||
|
||||
[lint]
|
||||
select = ["ALL"]
|
||||
ignore = ["D203", "D212"]
|
||||
|
||||
[lint.isort]
|
||||
lines-after-imports = 3
|
||||
lines-between-types = 2
|
||||
force-wrap-aliases = true
|
||||
combine-as-imports = true
|
||||
split-on-trailing-comma = true
|
||||
|
||||
[lint.flake8-quotes]
|
||||
inline-quotes = "single"
|
||||
docstring-quotes = "single"
|
||||
multiline-quotes = "single"
|
||||
|
||||
[format]
|
||||
skip-magic-trailing-comma = true
|
||||
indent-style = "tab"
|
||||
"#,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(["format", "--config"])
|
||||
.arg(&ruff_toml)
|
||||
.arg("-")
|
||||
.pass_stdin(r#"
|
||||
def say_hy(name: str):
|
||||
print(f"Hy {name}")"#), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
def say_hy(name: str):
|
||||
print(f"Hy {name}")
|
||||
|
||||
----- stderr -----
|
||||
warning: The following rules may cause conflicts when used with the formatter: `COM812`, `ISC001`. To avoid unexpected behavior, we recommend disabling these rules, either by removing them from the `select` or `extend-select` configuration, or adding them to the `ignore` configuration.
|
||||
warning: The `format.indent-style="tab"` option is incompatible with `W191`, which lints against all uses of tabs. We recommend disabling these rules when using the formatter, which enforces a consistent indentation style. Alternatively, set the `format.indent-style` option to `"space"`.
|
||||
warning: The `format.indent-style="tab"` option is incompatible with `D206`, with requires space-based indentation. We recommend disabling these rules when using the formatter, which enforces a consistent indentation style. Alternatively, set the `format.indent-style` option to `"space"`.
|
||||
warning: The `flake8-quotes.inline-quotes="single"` option is incompatible with the formatter's `format.quote-style="double"`. We recommend disabling `Q000` and `Q003` when using the formatter, which enforces a consistent quote style. Alternatively, set both options to either `"single"` or `"double"`.
|
||||
warning: The `flake8-quotes.multiline-quotes="single"` option is incompatible with the formatter. We recommend disabling `Q001` when using the formatter, which enforces double quotes for multiline strings. Alternatively, set the `flake8-quotes.multiline-quotes` option to `"double"`.`
|
||||
warning: The `flake8-quotes.multiline-quotes="single"` option is incompatible with the formatter. We recommend disabling `Q002` when using the formatter, which enforces double quotes for docstrings. Alternatively, set the `flake8-quotes.docstring-quotes` option to `"double"`.`
|
||||
warning: The isort option `isort.lines-after-imports` with a value other than `-1`, `1` or `2` is incompatible with the formatter. To avoid unexpected behavior, we recommend setting the option to one of: `2`, `1`, or `-1` (default).
|
||||
warning: The isort option `isort.lines-between-types` with a value greater than 1 is incompatible with the formatter. To avoid unexpected behavior, we recommend setting the option to one of: `1` or `0` (default).
|
||||
warning: The isort option `isort.force-wrap-aliases` is incompatible with the formatter `format.skip-magic-trailing-comma=true` option. To avoid unexpected behavior, we recommend either setting `isort.force-wrap-aliases=false` or `format.skip-magic-trailing-comma=false`.
|
||||
warning: The isort option `isort.split-on-trailing-comma` is incompatible with the formatter `format.skip-magic-trailing-comma=true` option. To avoid unexpected behavior, we recommend either setting `isort.split-on-trailing-comma=false` or `format.skip-magic-trailing-comma=false`.
|
||||
"###);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn valid_linter_options() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
[lint]
|
||||
select = ["ALL"]
|
||||
ignore = ["D203", "D212", "COM812", "ISC001"]
|
||||
|
||||
[lint.isort]
|
||||
lines-after-imports = 2
|
||||
lines-between-types = 1
|
||||
force-wrap-aliases = true
|
||||
combine-as-imports = true
|
||||
split-on-trailing-comma = true
|
||||
|
||||
[lint.flake8-quotes]
|
||||
inline-quotes = "single"
|
||||
docstring-quotes = "double"
|
||||
multiline-quotes = "double"
|
||||
|
||||
[format]
|
||||
skip-magic-trailing-comma = false
|
||||
quote-style = "single"
|
||||
"#,
|
||||
)?;
|
||||
|
||||
let test_path = tempdir.path().join("test.py");
|
||||
fs::write(
|
||||
&test_path,
|
||||
r#"
|
||||
def say_hy(name: str):
|
||||
print(f"Hy {name}")"#,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(["format", "--no-cache", "--config"])
|
||||
.arg(&ruff_toml)
|
||||
.arg(test_path), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
1 file reformatted
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn all_rules_default_options() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
[lint]
|
||||
select = ["ALL"]
|
||||
"#,
|
||||
)?;
|
||||
|
||||
let test_path = tempdir.path().join("test.py");
|
||||
fs::write(
|
||||
&test_path,
|
||||
r#"
|
||||
def say_hy(name: str):
|
||||
print(f"Hy {name}")"#,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(["format", "--no-cache", "--config"])
|
||||
.arg(&ruff_toml)
|
||||
.arg(test_path), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
1 file reformatted
|
||||
|
||||
----- stderr -----
|
||||
warning: `one-blank-line-before-class` (D203) and `no-blank-line-before-class` (D211) are incompatible. Ignoring `one-blank-line-before-class`.
|
||||
warning: `multi-line-summary-first-line` (D212) and `multi-line-summary-second-line` (D213) are incompatible. Ignoring `multi-line-summary-second-line`.
|
||||
warning: The following rules may cause conflicts when used with the formatter: `COM812`, `ISC001`. To avoid unexpected behavior, we recommend disabling these rules, either by removing them from the `select` or `extend-select` configuration, or adding them to the `ignore` configuration.
|
||||
"###);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_diff() {
|
||||
let args = ["format", "--no-cache", "--isolated", "--diff"];
|
||||
let fixtures = Path::new("resources").join("test").join("fixtures");
|
||||
let paths = [
|
||||
fixtures.join("unformatted.py"),
|
||||
fixtures.join("formatted.py"),
|
||||
fixtures.join("unformatted.ipynb"),
|
||||
];
|
||||
insta::with_settings!({filters => vec![
|
||||
// Replace windows paths
|
||||
(r"\\", "/"),
|
||||
]}, {
|
||||
assert_cmd_snapshot!(
|
||||
Command::new(get_cargo_bin(BIN_NAME)).args(args).args(paths),
|
||||
@r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
--- resources/test/fixtures/unformatted.ipynb:cell 1
|
||||
+++ resources/test/fixtures/unformatted.ipynb:cell 1
|
||||
@@ -1,3 +1,4 @@
|
||||
import numpy
|
||||
-maths = (numpy.arange(100)**2).sum()
|
||||
-stats= numpy.asarray([1,2,3,4]).median()
|
||||
+
|
||||
+maths = (numpy.arange(100) ** 2).sum()
|
||||
+stats = numpy.asarray([1, 2, 3, 4]).median()
|
||||
--- resources/test/fixtures/unformatted.ipynb:cell 3
|
||||
+++ resources/test/fixtures/unformatted.ipynb:cell 3
|
||||
@@ -1,4 +1,6 @@
|
||||
# A cell with IPython escape command
|
||||
def some_function(foo, bar):
|
||||
pass
|
||||
+
|
||||
+
|
||||
%matplotlib inline
|
||||
--- resources/test/fixtures/unformatted.ipynb:cell 4
|
||||
+++ resources/test/fixtures/unformatted.ipynb:cell 4
|
||||
@@ -1,5 +1,10 @@
|
||||
foo = %pwd
|
||||
-def some_function(foo,bar,):
|
||||
+
|
||||
+
|
||||
+def some_function(
|
||||
+ foo,
|
||||
+ bar,
|
||||
+):
|
||||
# Another cell with IPython escape command
|
||||
foo = %pwd
|
||||
print(foo)
|
||||
|
||||
--- resources/test/fixtures/unformatted.py
|
||||
+++ resources/test/fixtures/unformatted.py
|
||||
@@ -1,3 +1,3 @@
|
||||
x = 1
|
||||
-y=2
|
||||
+y = 2
|
||||
z = 3
|
||||
|
||||
|
||||
----- stderr -----
|
||||
2 files would be reformatted, 1 file left unchanged
|
||||
"###);
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_diff_no_change() {
|
||||
let args = ["format", "--no-cache", "--isolated", "--diff"];
|
||||
let fixtures = Path::new("resources").join("test").join("fixtures");
|
||||
let paths = [fixtures.join("unformatted.py")];
|
||||
insta::with_settings!({filters => vec![
|
||||
// Replace windows paths
|
||||
(r"\\", "/"),
|
||||
]}, {
|
||||
assert_cmd_snapshot!(
|
||||
Command::new(get_cargo_bin(BIN_NAME)).args(args).args(paths),
|
||||
@r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
--- resources/test/fixtures/unformatted.py
|
||||
+++ resources/test/fixtures/unformatted.py
|
||||
@@ -1,3 +1,3 @@
|
||||
x = 1
|
||||
-y=2
|
||||
+y = 2
|
||||
z = 3
|
||||
|
||||
|
||||
----- stderr -----
|
||||
1 file would be reformatted
|
||||
"###
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_diff_stdin_unformatted() {
|
||||
let args = [
|
||||
"format",
|
||||
"--isolated",
|
||||
"--diff",
|
||||
"-",
|
||||
"--stdin-filename",
|
||||
"unformatted.py",
|
||||
];
|
||||
let fixtures = Path::new("resources").join("test").join("fixtures");
|
||||
let unformatted = fs::read(fixtures.join("unformatted.py")).unwrap();
|
||||
assert_cmd_snapshot!(
|
||||
Command::new(get_cargo_bin(BIN_NAME)).args(args).pass_stdin(unformatted),
|
||||
@r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
--- unformatted.py
|
||||
+++ unformatted.py
|
||||
@@ -1,3 +1,3 @@
|
||||
x = 1
|
||||
-y=2
|
||||
+y = 2
|
||||
z = 3
|
||||
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_diff_stdin_formatted() {
|
||||
let args = ["format", "--isolated", "--diff", "-"];
|
||||
let fixtures = Path::new("resources").join("test").join("fixtures");
|
||||
let unformatted = fs::read(fixtures.join("formatted.py")).unwrap();
|
||||
assert_cmd_snapshot!(
|
||||
Command::new(get_cargo_bin(BIN_NAME)).args(args).pass_stdin(unformatted),
|
||||
@r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
}
|
||||
|
||||
@@ -485,6 +485,7 @@ fn stdin_format_jupyter() {
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
warning: `ruff format` is not yet stable, and subject to change in future versions.
|
||||
"###);
|
||||
}
|
||||
|
||||
@@ -634,9 +635,8 @@ fn nursery_group_selector_preview_enabled() {
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:1:1: CPY001 Missing copyright notice at top of file
|
||||
-:1:2: E225 [*] Missing whitespace around operator
|
||||
-:1:2: E225 Missing whitespace around operator
|
||||
Found 2 errors.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
warning: The `NURSERY` selector has been deprecated.
|
||||
@@ -655,9 +655,8 @@ fn preview_enabled_prefix() {
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:1:1: E741 Ambiguous variable name: `I`
|
||||
-:1:2: E225 [*] Missing whitespace around operator
|
||||
-:1:2: E225 Missing whitespace around operator
|
||||
Found 2 errors.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
@@ -676,9 +675,8 @@ fn preview_enabled_all() {
|
||||
-:1:1: E741 Ambiguous variable name: `I`
|
||||
-:1:1: D100 Missing docstring in public module
|
||||
-:1:1: CPY001 Missing copyright notice at top of file
|
||||
-:1:2: E225 [*] Missing whitespace around operator
|
||||
-:1:2: E225 Missing whitespace around operator
|
||||
Found 4 errors.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
warning: `one-blank-line-before-class` (D203) and `no-blank-line-before-class` (D211) are incompatible. Ignoring `one-blank-line-before-class`.
|
||||
@@ -697,9 +695,8 @@ fn preview_enabled_direct() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:1:2: E225 [*] Missing whitespace around operator
|
||||
-:1:2: E225 Missing whitespace around operator
|
||||
Found 1 error.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
@@ -1255,8 +1252,8 @@ fn diff_does_not_show_display_only_fixes_with_unsafe_fixes_enabled() {
|
||||
])
|
||||
.pass_stdin("def add_to_list(item, some_list=[]): ..."),
|
||||
@r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
@@ -1279,8 +1276,8 @@ fn diff_only_unsafe_fixes_available() {
|
||||
])
|
||||
.pass_stdin("x = {'a': 1, 'a': 1}\nprint(('foo'))\n"),
|
||||
@r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
@@ -1308,7 +1305,7 @@ extend-unsafe-fixes = ["UP034"]
|
||||
.args([
|
||||
"--output-format",
|
||||
"text",
|
||||
"--no-cache",
|
||||
"--no-cache",
|
||||
"--select",
|
||||
"F601,UP034",
|
||||
])
|
||||
@@ -1347,7 +1344,7 @@ extend-safe-fixes = ["F601"]
|
||||
.args([
|
||||
"--output-format",
|
||||
"text",
|
||||
"--no-cache",
|
||||
"--no-cache",
|
||||
"--select",
|
||||
"F601,UP034",
|
||||
])
|
||||
|
||||
@@ -31,15 +31,14 @@ inline-quotes = "single"
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.arg("--config")
|
||||
.arg(&ruff_toml)
|
||||
.args(["--stdin-filename", "test.py"])
|
||||
.arg("-")
|
||||
.pass_stdin(r#"a = "abcba".strip("aba")"#), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
test.py:1:5: Q000 [*] Double quotes found but single quotes preferred
|
||||
test.py:1:5: B005 Using `.strip()` with multi-character strings is misleading
|
||||
test.py:1:19: Q000 [*] Double quotes found but single quotes preferred
|
||||
-:1:5: Q000 [*] Double quotes found but single quotes preferred
|
||||
-:1:5: B005 Using `.strip()` with multi-character strings is misleading
|
||||
-:1:19: Q000 [*] Double quotes found but single quotes preferred
|
||||
Found 3 errors.
|
||||
[*] 2 fixable with the `--fix` option.
|
||||
|
||||
@@ -156,243 +155,3 @@ inline-quotes = "single"
|
||||
"###);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn exclude() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
extend-select = ["B", "Q"]
|
||||
extend-exclude = ["out"]
|
||||
|
||||
[lint]
|
||||
exclude = ["test.py", "generated.py"]
|
||||
|
||||
[lint.flake8-quotes]
|
||||
inline-quotes = "single"
|
||||
"#,
|
||||
)?;
|
||||
|
||||
fs::write(
|
||||
tempdir.path().join("main.py"),
|
||||
r#"
|
||||
from test import say_hy
|
||||
|
||||
if __name__ == "__main__":
|
||||
say_hy("dear Ruff contributor")
|
||||
"#,
|
||||
)?;
|
||||
|
||||
// Excluded file but passed to the CLI directly, should be linted
|
||||
let test_path = tempdir.path().join("test.py");
|
||||
fs::write(
|
||||
&test_path,
|
||||
r#"
|
||||
def say_hy(name: str):
|
||||
print(f"Hy {name}")"#,
|
||||
)?;
|
||||
|
||||
fs::write(
|
||||
tempdir.path().join("generated.py"),
|
||||
r#"NUMBERS = [
|
||||
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
|
||||
10, 11, 12, 13, 14, 15, 16, 17, 18, 19
|
||||
]
|
||||
OTHER = "OTHER"
|
||||
"#,
|
||||
)?;
|
||||
|
||||
let out_dir = tempdir.path().join("out");
|
||||
fs::create_dir(&out_dir)?;
|
||||
|
||||
fs::write(out_dir.join("a.py"), r#"a = "a""#)?;
|
||||
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.current_dir(tempdir.path())
|
||||
.arg("check")
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.args(["--config", &ruff_toml.file_name().unwrap().to_string_lossy()])
|
||||
// Explicitly pass test.py, should be linted regardless of it being excluded by lint.exclude
|
||||
.arg(test_path.file_name().unwrap())
|
||||
// Lint all other files in the directory, should respect the `exclude` and `lint.exclude` options
|
||||
.arg("."), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
main.py:4:16: Q000 [*] Double quotes found but single quotes preferred
|
||||
main.py:5:12: Q000 [*] Double quotes found but single quotes preferred
|
||||
test.py:3:15: Q000 [*] Double quotes found but single quotes preferred
|
||||
Found 3 errors.
|
||||
[*] 3 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn exclude_stdin() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
extend-select = ["B", "Q"]
|
||||
|
||||
[lint]
|
||||
exclude = ["generated.py"]
|
||||
|
||||
[lint.flake8-quotes]
|
||||
inline-quotes = "single"
|
||||
"#,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.current_dir(tempdir.path())
|
||||
.arg("check")
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.args(["--config", &ruff_toml.file_name().unwrap().to_string_lossy()])
|
||||
.args(["--stdin-filename", "generated.py"])
|
||||
.arg("-")
|
||||
.pass_stdin(r#"
|
||||
from test import say_hy
|
||||
|
||||
if __name__ == "__main__":
|
||||
say_hy("dear Ruff contributor")
|
||||
"#), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
generated.py:4:16: Q000 [*] Double quotes found but single quotes preferred
|
||||
generated.py:5:12: Q000 [*] Double quotes found but single quotes preferred
|
||||
Found 2 errors.
|
||||
[*] 2 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn line_too_long_width_override() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
line-length = 80
|
||||
select = ["E501"]
|
||||
|
||||
[pycodestyle]
|
||||
max-line-length = 100
|
||||
"#,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.arg("--config")
|
||||
.arg(&ruff_toml)
|
||||
.args(["--stdin-filename", "test.py"])
|
||||
.arg("-")
|
||||
.pass_stdin(r#"
|
||||
# longer than 80, but less than 100
|
||||
_ = "---------------------------------------------------------------------------亜亜亜亜亜亜"
|
||||
# longer than 100
|
||||
_ = "---------------------------------------------------------------------------亜亜亜亜亜亜亜亜亜亜亜亜亜亜"
|
||||
"#), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
test.py:5:91: E501 Line too long (109 > 100)
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn per_file_ignores_stdin() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
extend-select = ["B", "Q"]
|
||||
|
||||
[lint.flake8-quotes]
|
||||
inline-quotes = "single"
|
||||
"#,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.current_dir(tempdir.path())
|
||||
.arg("check")
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.args(["--config", &ruff_toml.file_name().unwrap().to_string_lossy()])
|
||||
.args(["--stdin-filename", "generated.py"])
|
||||
.args(["--per-file-ignores", "generated.py:Q"])
|
||||
.arg("-")
|
||||
.pass_stdin(r#"
|
||||
import os
|
||||
|
||||
from test import say_hy
|
||||
|
||||
if __name__ == "__main__":
|
||||
say_hy("dear Ruff contributor")
|
||||
"#), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
generated.py:2:8: F401 [*] `os` imported but unused
|
||||
Found 1 error.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extend_per_file_ignores_stdin() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
extend-select = ["B", "Q"]
|
||||
|
||||
[lint.flake8-quotes]
|
||||
inline-quotes = "single"
|
||||
"#,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.current_dir(tempdir.path())
|
||||
.arg("check")
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.args(["--config", &ruff_toml.file_name().unwrap().to_string_lossy()])
|
||||
.args(["--stdin-filename", "generated.py"])
|
||||
.args(["--extend-per-file-ignores", "generated.py:Q"])
|
||||
.arg("-")
|
||||
.pass_stdin(r#"
|
||||
import os
|
||||
|
||||
from test import say_hy
|
||||
|
||||
if __name__ == "__main__":
|
||||
say_hy("dear Ruff contributor")
|
||||
"#), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
generated.py:2:8: F401 [*] `os` imported but unused
|
||||
Found 1 error.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -41,7 +41,7 @@ serde_json = { workspace = true }
|
||||
similar = { workspace = true }
|
||||
strum = { workspace = true }
|
||||
strum_macros = { workspace = true }
|
||||
tempfile = "3.8.1"
|
||||
tempfile = "3.6.0"
|
||||
toml = { workspace = true, features = ["parse"] }
|
||||
tracing = { workspace = true }
|
||||
tracing-indicatif = { workspace = true }
|
||||
|
||||
@@ -11,6 +11,7 @@ use std::{fmt, fs, io, iter};
|
||||
|
||||
use anyhow::{bail, format_err, Context, Error};
|
||||
use clap::{CommandFactory, FromArgMatches};
|
||||
use ignore::DirEntry;
|
||||
use imara_diff::intern::InternedInput;
|
||||
use imara_diff::sink::Counter;
|
||||
use imara_diff::{diff, Algorithm};
|
||||
@@ -33,16 +34,16 @@ use ruff_formatter::{FormatError, LineWidth, PrintError};
|
||||
use ruff_linter::logging::LogLevel;
|
||||
use ruff_linter::settings::types::{FilePattern, FilePatternSet};
|
||||
use ruff_python_formatter::{
|
||||
format_module_source, FormatModuleError, MagicTrailingComma, PreviewMode, PyFormatOptions,
|
||||
format_module_source, FormatModuleError, MagicTrailingComma, PyFormatOptions,
|
||||
};
|
||||
use ruff_workspace::resolver::{python_files_in_path, PyprojectConfig, ResolvedFile, Resolver};
|
||||
use ruff_workspace::resolver::{python_files_in_path, PyprojectConfig, Resolver};
|
||||
|
||||
/// Find files that ruff would check so we can format them. Adapted from `ruff_cli`.
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn ruff_check_paths(
|
||||
dirs: &[PathBuf],
|
||||
) -> anyhow::Result<(
|
||||
Vec<Result<ResolvedFile, ignore::Error>>,
|
||||
Vec<Result<DirEntry, ignore::Error>>,
|
||||
Resolver,
|
||||
PyprojectConfig,
|
||||
)> {
|
||||
@@ -466,9 +467,9 @@ fn format_dev_project(
|
||||
let iter = { paths.into_par_iter() };
|
||||
#[cfg(feature = "singlethreaded")]
|
||||
let iter = { paths.into_iter() };
|
||||
iter.map(|path| {
|
||||
iter.map(|dir_entry| {
|
||||
let result = format_dir_entry(
|
||||
path,
|
||||
dir_entry,
|
||||
stability_check,
|
||||
write,
|
||||
&black_options,
|
||||
@@ -526,20 +527,24 @@ fn format_dev_project(
|
||||
|
||||
/// Error handling in between walkdir and `format_dev_file`
|
||||
fn format_dir_entry(
|
||||
resolved_file: Result<ResolvedFile, ignore::Error>,
|
||||
dir_entry: Result<DirEntry, ignore::Error>,
|
||||
stability_check: bool,
|
||||
write: bool,
|
||||
options: &BlackOptions,
|
||||
resolver: &Resolver,
|
||||
pyproject_config: &PyprojectConfig,
|
||||
) -> anyhow::Result<(Result<Statistics, CheckFileError>, PathBuf), Error> {
|
||||
let resolved_file = resolved_file.context("Iterating the files in the repository failed")?;
|
||||
let dir_entry = match dir_entry.context("Iterating the files in the repository failed") {
|
||||
Ok(dir_entry) => dir_entry,
|
||||
Err(err) => return Err(err),
|
||||
};
|
||||
let file = dir_entry.path().to_path_buf();
|
||||
// For some reason it does not filter in the beginning
|
||||
if resolved_file.file_name() == "pyproject.toml" {
|
||||
return Ok((Ok(Statistics::default()), resolved_file.into_path()));
|
||||
if dir_entry.file_name() == "pyproject.toml" {
|
||||
return Ok((Ok(Statistics::default()), file));
|
||||
}
|
||||
|
||||
let path = resolved_file.into_path();
|
||||
let path = dir_entry.path().to_path_buf();
|
||||
let mut options = options.to_py_format_options(&path);
|
||||
|
||||
let settings = resolver.resolve(&path, pyproject_config);
|
||||
@@ -871,7 +876,9 @@ struct BlackOptions {
|
||||
line_length: NonZeroU16,
|
||||
#[serde(alias = "skip-magic-trailing-comma")]
|
||||
skip_magic_trailing_comma: bool,
|
||||
preview: bool,
|
||||
#[allow(unused)]
|
||||
#[serde(alias = "force-exclude")]
|
||||
force_exclude: Option<String>,
|
||||
}
|
||||
|
||||
impl Default for BlackOptions {
|
||||
@@ -879,7 +886,7 @@ impl Default for BlackOptions {
|
||||
Self {
|
||||
line_length: NonZeroU16::new(88).unwrap(),
|
||||
skip_magic_trailing_comma: false,
|
||||
preview: false,
|
||||
force_exclude: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -927,11 +934,6 @@ impl BlackOptions {
|
||||
} else {
|
||||
MagicTrailingComma::Respect
|
||||
})
|
||||
.with_preview(if self.preview {
|
||||
PreviewMode::Enabled
|
||||
} else {
|
||||
PreviewMode::Disabled
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -16,11 +16,8 @@ use crate::ROOT_DIR;
|
||||
const COMMAND_HELP_BEGIN_PRAGMA: &str = "<!-- Begin auto-generated command help. -->\n";
|
||||
const COMMAND_HELP_END_PRAGMA: &str = "<!-- End auto-generated command help. -->";
|
||||
|
||||
const CHECK_HELP_BEGIN_PRAGMA: &str = "<!-- Begin auto-generated check help. -->\n";
|
||||
const CHECK_HELP_END_PRAGMA: &str = "<!-- End auto-generated check help. -->";
|
||||
|
||||
const FORMAT_HELP_BEGIN_PRAGMA: &str = "<!-- Begin auto-generated format help. -->\n";
|
||||
const FORMAT_HELP_END_PRAGMA: &str = "<!-- End auto-generated format help. -->";
|
||||
const SUBCOMMAND_HELP_BEGIN_PRAGMA: &str = "<!-- Begin auto-generated subcommand help. -->\n";
|
||||
const SUBCOMMAND_HELP_END_PRAGMA: &str = "<!-- End auto-generated subcommand help. -->";
|
||||
|
||||
#[derive(clap::Args)]
|
||||
pub(crate) struct Args {
|
||||
@@ -59,15 +56,11 @@ pub(super) fn main(args: &Args) -> Result<()> {
|
||||
let command_help = trim_lines(&help_text());
|
||||
|
||||
// Generate `ruff help check`.
|
||||
let check_help = trim_lines(&subcommand_help_text("check")?);
|
||||
|
||||
// Generate `ruff help format`.
|
||||
let format_help = trim_lines(&subcommand_help_text("format")?);
|
||||
let subcommand_help = trim_lines(&check_help_text());
|
||||
|
||||
if args.mode.is_dry_run() {
|
||||
print!("{command_help}");
|
||||
print!("{check_help}");
|
||||
print!("{format_help}");
|
||||
print!("{subcommand_help}");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
@@ -84,15 +77,9 @@ pub(super) fn main(args: &Args) -> Result<()> {
|
||||
)?;
|
||||
let new = replace_docs_section(
|
||||
&new,
|
||||
&format!("```text\n{check_help}\n```\n\n"),
|
||||
CHECK_HELP_BEGIN_PRAGMA,
|
||||
CHECK_HELP_END_PRAGMA,
|
||||
)?;
|
||||
let new = replace_docs_section(
|
||||
&new,
|
||||
&format!("```text\n{format_help}\n```\n\n"),
|
||||
FORMAT_HELP_BEGIN_PRAGMA,
|
||||
FORMAT_HELP_END_PRAGMA,
|
||||
&format!("```text\n{subcommand_help}\n```\n\n"),
|
||||
SUBCOMMAND_HELP_BEGIN_PRAGMA,
|
||||
SUBCOMMAND_HELP_END_PRAGMA,
|
||||
)?;
|
||||
|
||||
match args.mode {
|
||||
@@ -117,19 +104,18 @@ fn help_text() -> String {
|
||||
args::Args::command().render_help().to_string()
|
||||
}
|
||||
|
||||
/// Returns the output of a given subcommand (e.g., `ruff help check`).
|
||||
fn subcommand_help_text(subcommand: &str) -> Result<String> {
|
||||
/// Returns the output of `ruff help check`.
|
||||
fn check_help_text() -> String {
|
||||
let mut cmd = args::Args::command();
|
||||
|
||||
// The build call is necessary for the help output to contain `Usage: ruff
|
||||
// check` instead of `Usage: check` see https://github.com/clap-rs/clap/issues/4685
|
||||
cmd.build();
|
||||
|
||||
Ok(cmd
|
||||
.find_subcommand_mut(subcommand)
|
||||
.with_context(|| format!("Unable to find subcommand `{subcommand}`"))?
|
||||
cmd.find_subcommand_mut("check")
|
||||
.expect("`check` subcommand not found")
|
||||
.render_help()
|
||||
.to_string())
|
||||
.to_string()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
||||
@@ -11,7 +11,7 @@ use strum::IntoEnumIterator;
|
||||
use ruff_diagnostics::FixAvailability;
|
||||
use ruff_linter::registry::{Linter, Rule, RuleNamespace};
|
||||
use ruff_workspace::options::Options;
|
||||
use ruff_workspace::options_base::{OptionEntry, OptionsMetadata};
|
||||
use ruff_workspace::options_base::OptionsMetadata;
|
||||
|
||||
use crate::ROOT_DIR;
|
||||
|
||||
@@ -55,11 +55,7 @@ pub(crate) fn main(args: &Args) -> Result<()> {
|
||||
output.push('\n');
|
||||
}
|
||||
|
||||
process_documentation(
|
||||
explanation.trim(),
|
||||
&mut output,
|
||||
&rule.noqa_code().to_string(),
|
||||
);
|
||||
process_documentation(explanation.trim(), &mut output);
|
||||
|
||||
let filename = PathBuf::from(ROOT_DIR)
|
||||
.join("docs")
|
||||
@@ -78,7 +74,7 @@ pub(crate) fn main(args: &Args) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn process_documentation(documentation: &str, out: &mut String, rule_name: &str) {
|
||||
fn process_documentation(documentation: &str, out: &mut String) {
|
||||
let mut in_options = false;
|
||||
let mut after = String::new();
|
||||
|
||||
@@ -104,17 +100,7 @@ fn process_documentation(documentation: &str, out: &mut String, rule_name: &str)
|
||||
if let Some(rest) = line.strip_prefix("- `") {
|
||||
let option = rest.trim_end().trim_end_matches('`');
|
||||
|
||||
match Options::metadata().find(option) {
|
||||
Some(OptionEntry::Field(field)) => {
|
||||
if field.deprecated.is_some() {
|
||||
eprintln!("Rule {rule_name} references deprecated option {option}.");
|
||||
}
|
||||
}
|
||||
Some(_) => {}
|
||||
None => {
|
||||
panic!("Unknown option {option} referenced by rule {rule_name}");
|
||||
}
|
||||
}
|
||||
assert!(Options::metadata().has(option), "unknown option {option}");
|
||||
|
||||
let anchor = option.replace('.', "-");
|
||||
out.push_str(&format!("- [`{option}`][{option}]\n"));
|
||||
@@ -152,7 +138,6 @@ Something [`else`][other].
|
||||
|
||||
[other]: http://example.com.",
|
||||
&mut output,
|
||||
"example",
|
||||
);
|
||||
assert_eq!(
|
||||
output,
|
||||
|
||||
@@ -101,24 +101,6 @@ fn emit_field(output: &mut String, name: &str, field: &OptionField, parent_set:
|
||||
output.push_str(&format!("{header_level} [`{name}`](#{name})\n"));
|
||||
}
|
||||
output.push('\n');
|
||||
|
||||
if let Some(deprecated) = &field.deprecated {
|
||||
output.push_str("!!! warning \"Deprecated\"\n");
|
||||
output.push_str(" This option has been deprecated");
|
||||
|
||||
if let Some(since) = deprecated.since {
|
||||
write!(output, " in {since}").unwrap();
|
||||
}
|
||||
|
||||
output.push('.');
|
||||
|
||||
if let Some(message) = deprecated.message {
|
||||
writeln!(output, " {message}").unwrap();
|
||||
}
|
||||
|
||||
output.push('\n');
|
||||
}
|
||||
|
||||
output.push_str(field.doc);
|
||||
output.push_str("\n\n");
|
||||
output.push_str(&format!("**Default value**: `{}`\n", field.default));
|
||||
@@ -128,11 +110,7 @@ fn emit_field(output: &mut String, name: &str, field: &OptionField, parent_set:
|
||||
output.push_str(&format!(
|
||||
"**Example usage**:\n\n```toml\n[tool.ruff{}]\n{}\n```\n",
|
||||
if let Some(set_name) = parent_set.name() {
|
||||
if set_name == "format" {
|
||||
String::from(".format")
|
||||
} else {
|
||||
format!(".lint.{set_name}")
|
||||
}
|
||||
format!(".{set_name}")
|
||||
} else {
|
||||
String::new()
|
||||
},
|
||||
|
||||
@@ -22,16 +22,14 @@ fn generate_table(table_out: &mut String, rules: impl IntoIterator<Item = Rule>,
|
||||
for rule in rules {
|
||||
let fix_token = match rule.fixable() {
|
||||
FixAvailability::Always | FixAvailability::Sometimes => {
|
||||
format!("<span title='Automatic fix available'>{FIX_SYMBOL}</span>")
|
||||
}
|
||||
FixAvailability::None => {
|
||||
format!("<span style='opacity: 0.1' aria-hidden='true'>{FIX_SYMBOL}</span>")
|
||||
format!("<span style='opacity: 1'>{FIX_SYMBOL}</span>")
|
||||
}
|
||||
FixAvailability::None => format!("<span style='opacity: 0.1'>{FIX_SYMBOL}</span>"),
|
||||
};
|
||||
let preview_token = if rule.is_preview() || rule.is_nursery() {
|
||||
format!("<span title='Rule is in preview'>{PREVIEW_SYMBOL}</span>")
|
||||
format!("<span style='opacity: 1'>{PREVIEW_SYMBOL}</span>")
|
||||
} else {
|
||||
format!("<span style='opacity: 0.1' aria-hidden='true'>{PREVIEW_SYMBOL}</span>")
|
||||
format!("<span style='opacity: 0.1'>{PREVIEW_SYMBOL}</span>")
|
||||
};
|
||||
let status_token = format!("{fix_token} {preview_token}");
|
||||
|
||||
@@ -64,7 +62,7 @@ pub(crate) fn generate() -> String {
|
||||
table_out.push('\n');
|
||||
|
||||
table_out.push_str(&format!(
|
||||
"The {PREVIEW_SYMBOL} emoji indicates that a rule is in [\"preview\"](faq.md#what-is-preview)."
|
||||
"The {PREVIEW_SYMBOL} emoji indicates that a rule in [\"preview\"](faq.md#what-is-preview)."
|
||||
));
|
||||
table_out.push('\n');
|
||||
table_out.push('\n');
|
||||
|
||||
@@ -2448,7 +2448,7 @@ where
|
||||
|
||||
/// Adds a new entry to the join output.
|
||||
pub fn entry(&mut self, entry: &dyn Format<Context>) -> &mut Self {
|
||||
self.result = self.result.and_then(|()| {
|
||||
self.result = self.result.and_then(|_| {
|
||||
if let Some(with) = &self.with {
|
||||
if self.has_elements {
|
||||
with.fmt(self.fmt)?;
|
||||
@@ -2519,7 +2519,7 @@ impl<'a, 'buf, Context> FillBuilder<'a, 'buf, Context> {
|
||||
separator: &dyn Format<Context>,
|
||||
entry: &dyn Format<Context>,
|
||||
) -> &mut Self {
|
||||
self.result = self.result.and_then(|()| {
|
||||
self.result = self.result.and_then(|_| {
|
||||
if self.empty {
|
||||
self.empty = false;
|
||||
} else {
|
||||
|
||||
@@ -95,7 +95,7 @@ impl std::fmt::Display for IndentStyle {
|
||||
///
|
||||
/// Determines the visual width of a tab character (`\t`) and the number of
|
||||
/// spaces per indent when using [`IndentStyle::Space`].
|
||||
#[derive(Clone, Copy, Debug, Eq, PartialEq, CacheKey)]
|
||||
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
|
||||
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
pub struct IndentWidth(NonZeroU8);
|
||||
@@ -575,10 +575,6 @@ where
|
||||
context: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn rule(&self) -> &R {
|
||||
&self.rule
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, R, O, C> FormatRefWithRule<'_, T, R, C>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff_linter"
|
||||
version = "0.1.3"
|
||||
version = "0.1.0"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
@@ -53,7 +53,7 @@ path-absolutize = { workspace = true, features = [
|
||||
] }
|
||||
pathdiff = { version = "0.2.1" }
|
||||
pep440_rs = { version = "0.3.12", features = ["serde"] }
|
||||
pyproject-toml = { version = "0.8.0" }
|
||||
pyproject-toml = { version = "0.7.0" }
|
||||
quick-junit = { version = "0.3.2" }
|
||||
regex = { workspace = true }
|
||||
result-like = { version = "0.4.6" }
|
||||
@@ -79,7 +79,7 @@ pretty_assertions = "1.3.0"
|
||||
test-case = { workspace = true }
|
||||
# Disable colored output in tests
|
||||
colored = { workspace = true, features = ["no-color"] }
|
||||
tempfile = "3.8.1"
|
||||
tempfile = "3.6.0"
|
||||
|
||||
[features]
|
||||
default = []
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
import urllib.request
|
||||
|
||||
urllib.request.urlopen(url='http://www.google.com')
|
||||
urllib.request.urlopen(url='http://www.google.com', **kwargs)
|
||||
urllib.request.urlopen('http://www.google.com')
|
||||
urllib.request.urlopen('file:///foo/bar/baz')
|
||||
urllib.request.urlopen(url)
|
||||
|
||||
urllib.request.Request(url='http://www.google.com', **kwargs)
|
||||
urllib.request.Request(url='http://www.google.com')
|
||||
urllib.request.Request('http://www.google.com')
|
||||
urllib.request.Request('file:///foo/bar/baz')
|
||||
urllib.request.Request(url)
|
||||
|
||||
urllib.request.URLopener().open(fullurl='http://www.google.com', **kwargs)
|
||||
urllib.request.URLopener().open(fullurl='http://www.google.com')
|
||||
urllib.request.URLopener().open('http://www.google.com')
|
||||
urllib.request.URLopener().open('file:///foo/bar/baz')
|
||||
urllib.request.URLopener().open(url)
|
||||
@@ -79,6 +79,3 @@ from ZeroDivisionError
|
||||
raise IndexError() from ZeroDivisionError
|
||||
|
||||
raise IndexError();
|
||||
|
||||
# RSE102
|
||||
raise Foo()
|
||||
|
||||
@@ -126,12 +126,3 @@ def f():
|
||||
x = yield 3
|
||||
else:
|
||||
x = yield 5
|
||||
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
# OK
|
||||
if TYPE_CHECKING:
|
||||
x = 3
|
||||
else:
|
||||
x = 5
|
||||
|
||||
@@ -38,7 +38,3 @@ env = os.environ['FOO']
|
||||
|
||||
if env := os.environ['FOO']:
|
||||
pass
|
||||
|
||||
os.environ['https_proxy']
|
||||
os.environ.get['http_proxy']
|
||||
os.getenv('no_proxy')
|
||||
|
||||
@@ -114,16 +114,3 @@ elif key in a_dict:
|
||||
vars[idx] = a_dict[key]
|
||||
else:
|
||||
vars[idx] = "default"
|
||||
|
||||
###
|
||||
# Positive cases (preview)
|
||||
###
|
||||
|
||||
# SIM401
|
||||
var = a_dict[key] if key in a_dict else "default3"
|
||||
|
||||
# SIM401
|
||||
var = "default-1" if key not in a_dict else a_dict[key]
|
||||
|
||||
# OK (default contains effect)
|
||||
var = a_dict[key] if key in a_dict else val1 + val2
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
import trio
|
||||
|
||||
|
||||
async def foo():
|
||||
with trio.fail_after():
|
||||
...
|
||||
|
||||
async def foo():
|
||||
with trio.fail_at():
|
||||
await ...
|
||||
|
||||
async def foo():
|
||||
with trio.move_on_after():
|
||||
...
|
||||
|
||||
async def foo():
|
||||
with trio.move_at():
|
||||
await ...
|
||||
@@ -1,7 +1,7 @@
|
||||
[tool.ruff]
|
||||
line-length = 88
|
||||
|
||||
[tool.ruff.lint.isort]
|
||||
[tool.ruff.isort]
|
||||
lines-after-imports = 3
|
||||
lines-between-types = 2
|
||||
known-local-folder = ["ruff"]
|
||||
|
||||
@@ -1,106 +0,0 @@
|
||||
def func():
|
||||
import numpy as np
|
||||
|
||||
np.add_docstring
|
||||
|
||||
np.add_newdoc
|
||||
|
||||
np.add_newdoc_ufunc
|
||||
|
||||
np.asfarray([1,2,3])
|
||||
|
||||
np.byte_bounds(np.array([1,2,3]))
|
||||
|
||||
np.cast
|
||||
|
||||
np.cfloat(12+34j)
|
||||
|
||||
np.clongfloat(12+34j)
|
||||
|
||||
np.compat
|
||||
|
||||
np.complex_(12+34j)
|
||||
|
||||
np.DataSource
|
||||
|
||||
np.deprecate
|
||||
|
||||
np.deprecate_with_doc
|
||||
|
||||
np.disp(10)
|
||||
|
||||
np.fastCopyAndTranspose
|
||||
|
||||
np.find_common_type
|
||||
|
||||
np.get_array_wrap
|
||||
|
||||
np.float_
|
||||
|
||||
np.geterrobj
|
||||
|
||||
np.Inf
|
||||
|
||||
np.Infinity
|
||||
|
||||
np.infty
|
||||
|
||||
np.issctype
|
||||
|
||||
np.issubclass_(np.int32, np.integer)
|
||||
|
||||
np.issubsctype
|
||||
|
||||
np.mat
|
||||
|
||||
np.maximum_sctype
|
||||
|
||||
np.NaN
|
||||
|
||||
np.nbytes[np.int64]
|
||||
|
||||
np.NINF
|
||||
|
||||
np.NZERO
|
||||
|
||||
np.longcomplex(12+34j)
|
||||
|
||||
np.longfloat(12+34j)
|
||||
|
||||
np.lookfor
|
||||
|
||||
np.obj2sctype(int)
|
||||
|
||||
np.PINF
|
||||
|
||||
np.PZERO
|
||||
|
||||
np.recfromcsv
|
||||
|
||||
np.recfromtxt
|
||||
|
||||
np.round_(12.34)
|
||||
|
||||
np.safe_eval
|
||||
|
||||
np.sctype2char
|
||||
|
||||
np.sctypes
|
||||
|
||||
np.seterrobj
|
||||
|
||||
np.set_numeric_ops
|
||||
|
||||
np.set_string_function
|
||||
|
||||
np.singlecomplex(12+1j)
|
||||
|
||||
np.string_("asdf")
|
||||
|
||||
np.source
|
||||
|
||||
np.tracemalloc_domain
|
||||
|
||||
np.unicode_("asf")
|
||||
|
||||
np.who()
|
||||
@@ -12,19 +12,3 @@ dict['key'] = list[index]
|
||||
# This is not prohibited by PEP8, but avoid it.
|
||||
class Foo (Bar, Baz):
|
||||
pass
|
||||
|
||||
|
||||
def fetch_name () -> Union[str, None]:
|
||||
"""Fetch name from --person-name in sys.argv.
|
||||
|
||||
Returns:
|
||||
name of the person if available, otherwise None
|
||||
"""
|
||||
test = len(5)
|
||||
Logger.info(test)
|
||||
# test commented code
|
||||
# Logger.info("test code")
|
||||
for i in range (0, len (sys.argv)) :
|
||||
if sys.argv[i] == "--name" :
|
||||
return sys.argv[i + 1]
|
||||
return None
|
||||
|
||||
@@ -40,11 +40,5 @@ f"{(a:=1)}"
|
||||
f"{(lambda x:x)}"
|
||||
f"normal{f"{a:.3f}"}normal"
|
||||
|
||||
#: Okay
|
||||
snapshot.file_uri[len(f's3://{self.s3_bucket_name}/'):]
|
||||
|
||||
#: E231
|
||||
{len(f's3://{self.s3_bucket_name}/'):1}
|
||||
|
||||
#: Okay
|
||||
a = (1,
|
||||
|
||||
@@ -4,18 +4,18 @@ if type(res) == type(42):
|
||||
#: E721
|
||||
if type(res) != type(""):
|
||||
pass
|
||||
#: Okay
|
||||
#: E721
|
||||
import types
|
||||
|
||||
if res == types.IntType:
|
||||
pass
|
||||
#: Okay
|
||||
#: E721
|
||||
import types
|
||||
|
||||
if type(res) is not types.ListType:
|
||||
pass
|
||||
#: E721
|
||||
assert type(res) == type(False) or type(res) == type(None)
|
||||
assert type(res) == type(False)
|
||||
#: E721
|
||||
assert type(res) == type([])
|
||||
#: E721
|
||||
@@ -25,18 +25,21 @@ assert type(res) == type((0,))
|
||||
#: E721
|
||||
assert type(res) == type((0))
|
||||
#: E721
|
||||
assert type(res) != type((1, ))
|
||||
#: Okay
|
||||
assert type(res) is type((1, ))
|
||||
#: Okay
|
||||
assert type(res) is not type((1, ))
|
||||
assert type(res) != type((1,))
|
||||
#: E721
|
||||
assert type(res) is type((1,))
|
||||
#: E721
|
||||
assert type(res) is not type((1,))
|
||||
#: E211 E721
|
||||
assert type(res) == type ([2, ])
|
||||
assert type(res) == type(
|
||||
[
|
||||
2,
|
||||
]
|
||||
)
|
||||
#: E201 E201 E202 E721
|
||||
assert type(res) == type( ( ) )
|
||||
assert type(res) == type(())
|
||||
#: E201 E202 E721
|
||||
assert type(res) == type( (0, ) )
|
||||
#:
|
||||
assert type(res) == type((0,))
|
||||
|
||||
#: Okay
|
||||
import types
|
||||
@@ -47,48 +50,18 @@ if isinstance(res, str):
|
||||
pass
|
||||
if isinstance(res, types.MethodType):
|
||||
pass
|
||||
#: Okay
|
||||
def func_histype(a, b, c):
|
||||
if type(a) != type(b) or type(a) == type(ccc):
|
||||
pass
|
||||
#: E722
|
||||
try:
|
||||
pass
|
||||
except:
|
||||
pass
|
||||
#: E722
|
||||
try:
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
||||
except:
|
||||
pass
|
||||
#: E722 E203 E271
|
||||
try:
|
||||
pass
|
||||
except :
|
||||
pass
|
||||
#: Okay
|
||||
fake_code = """"
|
||||
try:
|
||||
do_something()
|
||||
except:
|
||||
pass
|
||||
"""
|
||||
try:
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
||||
#: Okay
|
||||
from . import custom_types as types
|
||||
|
||||
red = types.ColorTypeRED
|
||||
red is types.ColorType.RED
|
||||
#: Okay
|
||||
from . import compute_type
|
||||
assert type(res) == type(None)
|
||||
|
||||
if compute_type(foo) == 5:
|
||||
types = StrEnum
|
||||
if x == types.X:
|
||||
pass
|
||||
|
||||
#: E721
|
||||
assert type(res) is int
|
||||
|
||||
|
||||
class Foo:
|
||||
def asdf(self, value: str | None):
|
||||
|
||||
@@ -10,10 +10,6 @@ def double_quotes_backslash_uppercase():
|
||||
R"""Sum\\mary."""
|
||||
|
||||
|
||||
def shouldnt_add_raw_here():
|
||||
"Ruff \U000026a1"
|
||||
|
||||
|
||||
def make_unique_pod_id(pod_id: str) -> str | None:
|
||||
r"""
|
||||
Generate a unique Pod name.
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
"""Test that type parameters are considered used."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Callable
|
||||
|
||||
from .foo import Record as Record1
|
||||
from .bar import Record as Record2
|
||||
|
||||
type RecordCallback[R: Record1] = Callable[[R], None]
|
||||
|
||||
|
||||
def process_record[R: Record2](record: R) -> None:
|
||||
...
|
||||
@@ -1,5 +0,0 @@
|
||||
"""Test lazy evaluation of type alias values."""
|
||||
|
||||
type RecordCallback[R: Record] = Callable[[R], None]
|
||||
|
||||
from collections.abc import Callable
|
||||
@@ -49,13 +49,6 @@ class Apples:
|
||||
def __doc__(self):
|
||||
return "Docstring"
|
||||
|
||||
# Added in Python 3.12
|
||||
def __buffer__(self):
|
||||
return memoryview(b'')
|
||||
|
||||
def __release_buffer__(self, buf):
|
||||
pass
|
||||
|
||||
# Allow dunder methods recommended by attrs.
|
||||
def __attrs_post_init__(self):
|
||||
pass
|
||||
@@ -70,10 +63,6 @@ class Apples:
|
||||
def __html__(self):
|
||||
pass
|
||||
|
||||
# Allow Python's __index__
|
||||
def __index__(self):
|
||||
pass
|
||||
|
||||
# Allow _missing_, used by enum.Enum.
|
||||
@classmethod
|
||||
def _missing_(cls, value):
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
import pathlib
|
||||
|
||||
NAME = "foo.bar"
|
||||
open(NAME, "wb")
|
||||
open(NAME, "w", encoding="utf-8")
|
||||
open(NAME, "rb")
|
||||
open(NAME, "x", encoding="utf-8")
|
||||
open(NAME, "br")
|
||||
open(NAME, "+r", encoding="utf-8")
|
||||
open(NAME, "xb")
|
||||
open(NAME, "rwx") # [bad-open-mode]
|
||||
open(NAME, mode="rwx") # [bad-open-mode]
|
||||
open(NAME, "rwx", encoding="utf-8") # [bad-open-mode]
|
||||
open(NAME, "rr", encoding="utf-8") # [bad-open-mode]
|
||||
open(NAME, "+", encoding="utf-8") # [bad-open-mode]
|
||||
open(NAME, "xw", encoding="utf-8") # [bad-open-mode]
|
||||
open(NAME, "ab+")
|
||||
open(NAME, "a+b")
|
||||
open(NAME, "+ab")
|
||||
open(NAME, "+rUb")
|
||||
open(NAME, "x+b")
|
||||
open(NAME, "Ua", encoding="utf-8") # [bad-open-mode]
|
||||
open(NAME, "Ur++", encoding="utf-8") # [bad-open-mode]
|
||||
open(NAME, "Ut", encoding="utf-8")
|
||||
open(NAME, "Ubr")
|
||||
|
||||
mode = "rw"
|
||||
open(NAME, mode)
|
||||
|
||||
pathlib.Path(NAME).open("wb")
|
||||
pathlib.Path(NAME).open(mode)
|
||||
pathlib.Path(NAME).open("rwx") # [bad-open-mode]
|
||||
pathlib.Path(NAME).open(mode="rwx") # [bad-open-mode]
|
||||
pathlib.Path(NAME).open("rwx", encoding="utf-8") # [bad-open-mode]
|
||||
@@ -57,9 +57,3 @@ r'\%03o' % (ord(c),)
|
||||
'(%r, %r, %r, %r)' % (hostname, address, username, '$PASSWORD')
|
||||
'%r' % ({'server_school_roles': server_school_roles, 'is_school_multiserver_domain': is_school_multiserver_domain}, )
|
||||
"%d" % (1 if x > 0 else 2)
|
||||
|
||||
# Special cases for %c allowing single character strings
|
||||
# https://github.com/astral-sh/ruff/issues/8406
|
||||
"%c" % ("x",)
|
||||
"%c" % "x"
|
||||
"%c" % "œ"
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
global price # W0604
|
||||
|
||||
price = 25
|
||||
|
||||
if True:
|
||||
global X # W0604
|
||||
|
||||
def no_error():
|
||||
global price
|
||||
price = 30
|
||||
@@ -1,22 +0,0 @@
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
# Verify that statements nested in conditionals (such as top-level type-checking blocks)
|
||||
# are still considered top-level
|
||||
if TYPE_CHECKING:
|
||||
import string
|
||||
|
||||
|
||||
def import_in_function():
|
||||
import symtable # [import-outside-toplevel]
|
||||
import os, sys # [import-outside-toplevel]
|
||||
import time as thyme # [import-outside-toplevel]
|
||||
import random as rand, socket as sock # [import-outside-toplevel]
|
||||
from collections import defaultdict # [import-outside-toplevel]
|
||||
from math import sin as sign, cos as cosplay # [import-outside-toplevel]
|
||||
|
||||
|
||||
class ClassWithImports:
|
||||
import tokenize # [import-outside-toplevel]
|
||||
|
||||
def __init__(self):
|
||||
import trace # [import-outside-toplevel]
|
||||
@@ -1,17 +0,0 @@
|
||||
from os.path import join as łos # Error
|
||||
from os.path import join as los # OK
|
||||
|
||||
import os.path.join as łos # Error
|
||||
import os.path.join as los # OK
|
||||
|
||||
import os.path.łos # Error (recommend an ASCII alias)
|
||||
import os.path.los # OK
|
||||
|
||||
from os.path import łos # Error (recommend an ASCII alias)
|
||||
from os.path import los # OK
|
||||
|
||||
from os.path.łos import foo # OK
|
||||
from os.path.los import foo # OK
|
||||
|
||||
from os.path import łos as foo # OK
|
||||
from os.path import los as foo # OK
|
||||
@@ -1,31 +0,0 @@
|
||||
ápple_count: int = 1 # C2401
|
||||
ápple_count += 2 # C2401
|
||||
ápple_count = 3 # C2401
|
||||
|
||||
(ápple_count for ápple_count in y)
|
||||
|
||||
|
||||
def func(ápple_count):
|
||||
global ápple_count
|
||||
nonlocal ápple_count
|
||||
|
||||
|
||||
def ápple_count():
|
||||
pass
|
||||
|
||||
|
||||
match ápple_count:
|
||||
case ápple_count:
|
||||
pass
|
||||
|
||||
ápple_count: int
|
||||
|
||||
try:
|
||||
1/0
|
||||
except ápple_count:
|
||||
pass
|
||||
|
||||
# OK
|
||||
print(ápple_count)
|
||||
ápple_count == 3
|
||||
apple_count = 4
|
||||
@@ -1,59 +0,0 @@
|
||||
_ = lambda: print() # [unnecessary-lambda]
|
||||
_ = lambda x, y: min(x, y) # [unnecessary-lambda]
|
||||
|
||||
_ = lambda *args: f(*args) # [unnecessary-lambda]
|
||||
_ = lambda **kwargs: f(**kwargs) # [unnecessary-lambda]
|
||||
_ = lambda *args, **kwargs: f(*args, **kwargs) # [unnecessary-lambda]
|
||||
_ = lambda x, y, z, *args, **kwargs: f(x, y, z, *args, **kwargs) # [unnecessary-lambda]
|
||||
|
||||
_ = lambda x: f(lambda x: x)(x) # [unnecessary-lambda]
|
||||
_ = lambda x, y: f(lambda x, y: x + y)(x, y) # [unnecessary-lambda]
|
||||
|
||||
# default value in lambda parameters
|
||||
_ = lambda x=42: print(x)
|
||||
|
||||
# lambda body is not a call
|
||||
_ = lambda x: x
|
||||
|
||||
# ignore chained calls
|
||||
_ = lambda: chained().call()
|
||||
|
||||
# lambda has kwargs but not the call
|
||||
_ = lambda **kwargs: f()
|
||||
|
||||
# lambda has kwargs and the call has kwargs named differently
|
||||
_ = lambda **kwargs: f(**dict([('forty-two', 42)]))
|
||||
|
||||
# lambda has kwargs and the call has unnamed kwargs
|
||||
_ = lambda **kwargs: f(**{1: 2})
|
||||
|
||||
# lambda has starred parameters but not the call
|
||||
_ = lambda *args: f()
|
||||
|
||||
# lambda has starred parameters and the call has starargs named differently
|
||||
_ = lambda *args: f(*list([3, 4]))
|
||||
# lambda has starred parameters and the call has unnamed starargs
|
||||
_ = lambda *args: f(*[3, 4])
|
||||
|
||||
# lambda has parameters but not the call
|
||||
_ = lambda x: f()
|
||||
_ = lambda *x: f()
|
||||
_ = lambda **x: f()
|
||||
|
||||
# lambda parameters and call args are not the same length
|
||||
_ = lambda x, y: f(x)
|
||||
|
||||
# lambda parameters and call args are not in the same order
|
||||
_ = lambda x, y: f(y, x)
|
||||
|
||||
# lambda parameters and call args are not the same
|
||||
_ = lambda x: f(y)
|
||||
|
||||
# the call uses the lambda parameters
|
||||
_ = lambda x: x(x)
|
||||
_ = lambda x, y: x(x, y)
|
||||
_ = lambda x: z(lambda y: x + y)(x)
|
||||
|
||||
# lambda uses an additional keyword
|
||||
_ = lambda *args: f(*args, y=1)
|
||||
_ = lambda *args: f(*args, y=x)
|
||||
@@ -1,56 +0,0 @@
|
||||
import threading
|
||||
from threading import Lock, RLock, Condition, Semaphore, BoundedSemaphore
|
||||
|
||||
|
||||
with threading.Lock(): # [useless-with-lock]
|
||||
...
|
||||
|
||||
with Lock(): # [useless-with-lock]
|
||||
...
|
||||
|
||||
with threading.Lock() as this_shouldnt_matter: # [useless-with-lock]
|
||||
...
|
||||
|
||||
with threading.RLock(): # [useless-with-lock]
|
||||
...
|
||||
|
||||
with RLock(): # [useless-with-lock]
|
||||
...
|
||||
|
||||
with threading.Condition(): # [useless-with-lock]
|
||||
...
|
||||
|
||||
with Condition(): # [useless-with-lock]
|
||||
...
|
||||
|
||||
with threading.Semaphore(): # [useless-with-lock]
|
||||
...
|
||||
|
||||
with Semaphore(): # [useless-with-lock]
|
||||
...
|
||||
|
||||
with threading.BoundedSemaphore(): # [useless-with-lock]
|
||||
...
|
||||
|
||||
with BoundedSemaphore(): # [useless-with-lock]
|
||||
...
|
||||
|
||||
lock = threading.Lock()
|
||||
with lock: # this is ok
|
||||
...
|
||||
|
||||
rlock = threading.RLock()
|
||||
with rlock: # this is ok
|
||||
...
|
||||
|
||||
cond = threading.Condition()
|
||||
with cond: # this is ok
|
||||
...
|
||||
|
||||
sem = threading.Semaphore()
|
||||
with sem: # this is ok
|
||||
...
|
||||
|
||||
b_sem = threading.BoundedSemaphore()
|
||||
with b_sem: # this is ok
|
||||
...
|
||||
@@ -5,6 +5,4 @@ extend-exclude = [
|
||||
"migrations",
|
||||
"with_excluded_file/other_excluded_file.py",
|
||||
]
|
||||
|
||||
[tool.ruff.lint]
|
||||
per-file-ignores = { "__init__.py" = ["F401"] }
|
||||
|
||||
@@ -23,11 +23,3 @@ MyType = typing.NamedTuple("MyType", a=int, b=tuple[str, ...])
|
||||
MyType = typing.NamedTuple("MyType", [("a", int)], [("b", str)])
|
||||
MyType = typing.NamedTuple("MyType", [("a", int)], b=str)
|
||||
MyType = typing.NamedTuple(typename="MyType", a=int, b=str)
|
||||
|
||||
# Regression test for: https://github.com/astral-sh/ruff/issues/8402#issuecomment-1788787357
|
||||
S3File = NamedTuple(
|
||||
"S3File",
|
||||
[
|
||||
("dataHPK",* str),
|
||||
],
|
||||
)
|
||||
|
||||
@@ -80,14 +80,11 @@ from typing import cast
|
||||
# OK
|
||||
from a import b
|
||||
|
||||
# OK: `typing_extensions` contains backported improvements.
|
||||
# Ok: `typing_extensions` contains backported improvements.
|
||||
from typing_extensions import SupportsIndex
|
||||
|
||||
# OK: `typing_extensions` contains backported improvements.
|
||||
# Ok: `typing_extensions` contains backported improvements.
|
||||
from typing_extensions import NamedTuple
|
||||
|
||||
# OK: `typing_extensions` supports `frozen_default` (backported from 3.12).
|
||||
# Ok: `typing_extensions` supports `frozen_default` (backported from 3.12).
|
||||
from typing_extensions import dataclass_transform
|
||||
|
||||
# UP035
|
||||
from backports.strenum import StrEnum
|
||||
|
||||
@@ -202,16 +202,3 @@ if sys.version_info > (3,12):
|
||||
|
||||
if sys.version_info >= (3,12):
|
||||
print("py3")
|
||||
|
||||
# Slices on `sys.version_info` should be treated equivalently.
|
||||
if sys.version_info[:2] >= (3,0):
|
||||
print("py3")
|
||||
|
||||
if sys.version_info[:3] >= (3,0):
|
||||
print("py3")
|
||||
|
||||
if sys.version_info[:2] > (3,13):
|
||||
print("py3")
|
||||
|
||||
if sys.version_info[:3] > (3,13):
|
||||
print("py3")
|
||||
|
||||
@@ -1,126 +0,0 @@
|
||||
def foo():
|
||||
...
|
||||
|
||||
|
||||
def bar(x):
|
||||
...
|
||||
|
||||
|
||||
# Errors.
|
||||
|
||||
# FURB101
|
||||
with open("file.txt") as f:
|
||||
x = f.read()
|
||||
|
||||
# FURB101
|
||||
with open("file.txt", "rb") as f:
|
||||
x = f.read()
|
||||
|
||||
# FURB101
|
||||
with open("file.txt", mode="rb") as f:
|
||||
x = f.read()
|
||||
|
||||
# FURB101
|
||||
with open("file.txt", encoding="utf8") as f:
|
||||
x = f.read()
|
||||
|
||||
# FURB101
|
||||
with open("file.txt", errors="ignore") as f:
|
||||
x = f.read()
|
||||
|
||||
# FURB101
|
||||
with open("file.txt", errors="ignore", mode="rb") as f:
|
||||
x = f.read()
|
||||
|
||||
# FURB101
|
||||
with open("file.txt", mode="r") as f: # noqa: FURB120
|
||||
x = f.read()
|
||||
|
||||
# FURB101
|
||||
with open(foo(), "rb") as f:
|
||||
# The body of `with` is non-trivial, but the recommendation holds.
|
||||
bar("pre")
|
||||
bar(f.read())
|
||||
bar("post")
|
||||
print("Done")
|
||||
|
||||
# FURB101
|
||||
with open("a.txt") as a, open("b.txt", "rb") as b:
|
||||
x = a.read()
|
||||
y = b.read()
|
||||
|
||||
# FURB101
|
||||
with foo() as a, open("file.txt") as b, foo() as c:
|
||||
# We have other things in here, multiple with items, but
|
||||
# the user reads the whole file and that bit they can replace.
|
||||
bar(a)
|
||||
bar(bar(a + b.read()))
|
||||
bar(c)
|
||||
|
||||
|
||||
# Non-errors.
|
||||
|
||||
f2 = open("file2.txt")
|
||||
with open("file.txt") as f:
|
||||
x = f2.read()
|
||||
|
||||
with open("file.txt") as f:
|
||||
# Path.read_text() does not support size, so ignore this
|
||||
x = f.read(100)
|
||||
|
||||
# mode is dynamic
|
||||
with open("file.txt", foo()) as f:
|
||||
x = f.read()
|
||||
|
||||
# keyword mode is incorrect
|
||||
with open("file.txt", mode="a+") as f:
|
||||
x = f.read()
|
||||
|
||||
# enables line buffering, not supported in read_text()
|
||||
with open("file.txt", buffering=1) as f:
|
||||
x = f.read()
|
||||
|
||||
# force CRLF, not supported in read_text()
|
||||
with open("file.txt", newline="\r\n") as f:
|
||||
x = f.read()
|
||||
|
||||
# dont mistake "newline" for "mode"
|
||||
with open("file.txt", newline="b") as f:
|
||||
x = f.read()
|
||||
|
||||
# I guess we can possibly also report this case, but the question
|
||||
# is why the user would put "r+" here in the first place.
|
||||
with open("file.txt", "r+") as f:
|
||||
x = f.read()
|
||||
|
||||
# Even though we read the whole file, we do other things.
|
||||
with open("file.txt") as f:
|
||||
x = f.read()
|
||||
f.seek(0)
|
||||
x += f.read(100)
|
||||
|
||||
# This shouldn't error, since it could contain unsupported arguments, like `buffering`.
|
||||
with open(*filename) as f:
|
||||
x = f.read()
|
||||
|
||||
# This shouldn't error, since it could contain unsupported arguments, like `buffering`.
|
||||
with open(**kwargs) as f:
|
||||
x = f.read()
|
||||
|
||||
# This shouldn't error, since it could contain unsupported arguments, like `buffering`.
|
||||
with open("file.txt", **kwargs) as f:
|
||||
x = f.read()
|
||||
|
||||
# This shouldn't error, since it could contain unsupported arguments, like `buffering`.
|
||||
with open("file.txt", mode="r", **kwargs) as f:
|
||||
x = f.read()
|
||||
|
||||
# This could error (but doesn't), since it can't contain unsupported arguments, like
|
||||
# `buffering`.
|
||||
with open(*filename, mode="r") as f:
|
||||
x = f.read()
|
||||
|
||||
# This could error (but doesn't), since it can't contain unsupported arguments, like
|
||||
# `buffering`.
|
||||
with open(*filename, file="file.txt", mode="r") as f:
|
||||
x = f.read()
|
||||
@@ -31,4 +31,3 @@ print("", "foo", sep=",")
|
||||
print("foo", "", sep=",")
|
||||
print("foo", "", "bar", "", sep=",")
|
||||
print("", "", **kwargs)
|
||||
print(*args, sep=",")
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
foo: object
|
||||
|
||||
# Errors.
|
||||
|
||||
if isinstance(foo, type(None)):
|
||||
pass
|
||||
|
||||
if isinstance(foo, (type(None))):
|
||||
pass
|
||||
|
||||
if isinstance(foo, (type(None), type(None), type(None))):
|
||||
pass
|
||||
|
||||
if isinstance(foo, None | None):
|
||||
pass
|
||||
|
||||
if isinstance(foo, (None | None)):
|
||||
pass
|
||||
|
||||
if isinstance(foo, None | type(None)):
|
||||
pass
|
||||
|
||||
if isinstance(foo, (None | type(None))):
|
||||
pass
|
||||
|
||||
# A bit contrived, but is both technically valid and equivalent to the above.
|
||||
if isinstance(foo, (type(None) | ((((type(None))))) | ((None | type(None))))):
|
||||
pass
|
||||
|
||||
|
||||
# Okay.
|
||||
|
||||
if isinstance(foo, int):
|
||||
pass
|
||||
|
||||
if isinstance(foo, (int)):
|
||||
pass
|
||||
|
||||
if isinstance(foo, (int, str)):
|
||||
pass
|
||||
|
||||
if isinstance(foo, (int, type(None), str)):
|
||||
pass
|
||||
|
||||
# This is a TypeError, which the rule ignores.
|
||||
if isinstance(foo, None):
|
||||
pass
|
||||
|
||||
# This is also a TypeError, which the rule ignores.
|
||||
if isinstance(foo, (None,)):
|
||||
pass
|
||||
@@ -43,6 +43,3 @@ if 1 is {1}:
|
||||
|
||||
if "a" == "a":
|
||||
pass
|
||||
|
||||
if 1 in {*[1]}:
|
||||
pass
|
||||
|
||||
@@ -21,9 +21,6 @@ def f() -> None:
|
||||
# Invalid (but external)
|
||||
d = 1 # noqa: F841, V101
|
||||
|
||||
# Invalid (but external)
|
||||
d = 1 # noqa: V500
|
||||
|
||||
# fmt: off
|
||||
# Invalid - no space before #
|
||||
d = 1# noqa: E501
|
||||
|
||||
@@ -45,11 +45,3 @@ x = f"string { # And here's a comment with an unusual parenthesis: )
|
||||
# And here's a comment with a greek alpha: ∗
|
||||
foo # And here's a comment with an unusual punctuation mark: ᜵
|
||||
}"
|
||||
|
||||
# At runtime the attribute will be stored as Greek small letter mu instead of
|
||||
# micro sign because of PEP 3131's NFKC normalization
|
||||
class Labware:
|
||||
µL = 1.5
|
||||
|
||||
|
||||
assert getattr(Labware(), "µL") == 1.5
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
[tool.ruff]
|
||||
src = [".", "python_modules/*"]
|
||||
exclude = ["examples/excluded"]
|
||||
|
||||
[tool.ruff.lint]
|
||||
extend-select = ["I001"]
|
||||
extend-ignore = ["F841"]
|
||||
|
||||
@@ -10,7 +10,6 @@ pub(crate) fn bindings(checker: &mut Checker) {
|
||||
if !checker.any_enabled(&[
|
||||
Rule::InvalidAllFormat,
|
||||
Rule::InvalidAllObject,
|
||||
Rule::NonAsciiName,
|
||||
Rule::UnaliasedCollectionsAbcSetImport,
|
||||
Rule::UnconventionalImportAlias,
|
||||
Rule::UnusedVariable,
|
||||
@@ -50,11 +49,6 @@ pub(crate) fn bindings(checker: &mut Checker) {
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
if checker.enabled(Rule::NonAsciiName) {
|
||||
if let Some(diagnostic) = pylint::rules::non_ascii_name(binding, checker.locator) {
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
if checker.enabled(Rule::UnconventionalImportAlias) {
|
||||
if let Some(diagnostic) = flake8_import_conventions::rules::unconventional_import_alias(
|
||||
checker,
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
use ruff_python_ast::Expr;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::codes::Rule;
|
||||
use crate::rules::{flake8_pie, pylint};
|
||||
|
||||
/// Run lint rules over all deferred lambdas in the [`SemanticModel`].
|
||||
pub(crate) fn deferred_lambdas(checker: &mut Checker) {
|
||||
while !checker.deferred.lambdas.is_empty() {
|
||||
let lambdas = std::mem::take(&mut checker.deferred.lambdas);
|
||||
for snapshot in lambdas {
|
||||
checker.semantic.restore(snapshot);
|
||||
|
||||
let Some(Expr::Lambda(lambda)) = checker.semantic.current_expression() else {
|
||||
unreachable!("Expected Expr::Lambda");
|
||||
};
|
||||
|
||||
if checker.enabled(Rule::UnnecessaryLambda) {
|
||||
pylint::rules::unnecessary_lambda(checker, lambda);
|
||||
}
|
||||
if checker.enabled(Rule::ReimplementedListBuiltin) {
|
||||
flake8_pie::rules::reimplemented_list_builtin(checker, lambda);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -170,7 +170,7 @@ pub(crate) fn definitions(checker: &mut Checker) {
|
||||
expr.start(),
|
||||
));
|
||||
|
||||
if expr.implicit_concatenated {
|
||||
if pydocstyle::helpers::should_ignore_docstring(expr) {
|
||||
#[allow(deprecated)]
|
||||
let location = checker.locator.compute_source_location(expr.start());
|
||||
warn_user!(
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use ruff_python_ast::{self as ast, Arguments, Expr, ExprContext, Operator};
|
||||
use ruff_python_ast::{self as ast, Arguments, Constant, Expr, ExprContext, Operator};
|
||||
use ruff_python_literal::cformat::{CFormatError, CFormatErrorType};
|
||||
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
@@ -158,9 +158,6 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
||||
if checker.enabled(Rule::NumpyDeprecatedFunction) {
|
||||
numpy::rules::deprecated_function(checker, expr);
|
||||
}
|
||||
if checker.enabled(Rule::Numpy2Deprecation) {
|
||||
numpy::rules::numpy_2_0_deprecation(checker, expr);
|
||||
}
|
||||
if checker.enabled(Rule::CollectionsNamedTuple) {
|
||||
flake8_pyi::rules::collections_named_tuple(checker, expr);
|
||||
}
|
||||
@@ -317,9 +314,6 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
||||
if checker.enabled(Rule::NumpyDeprecatedFunction) {
|
||||
numpy::rules::deprecated_function(checker, expr);
|
||||
}
|
||||
if checker.enabled(Rule::Numpy2Deprecation) {
|
||||
numpy::rules::numpy_2_0_deprecation(checker, expr);
|
||||
}
|
||||
if checker.enabled(Rule::DeprecatedMockImport) {
|
||||
pyupgrade::rules::deprecated_mock_attribute(checker, expr);
|
||||
}
|
||||
@@ -369,18 +363,20 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
||||
]) {
|
||||
if let Expr::Attribute(ast::ExprAttribute { value, attr, .. }) = func.as_ref() {
|
||||
let attr = attr.as_str();
|
||||
if let Expr::StringLiteral(ast::ExprStringLiteral { value: string, .. }) =
|
||||
value.as_ref()
|
||||
if let Expr::Constant(ast::ExprConstant {
|
||||
value: Constant::Str(val),
|
||||
..
|
||||
}) = value.as_ref()
|
||||
{
|
||||
if attr == "join" {
|
||||
// "...".join(...) call
|
||||
if checker.enabled(Rule::StaticJoinToFString) {
|
||||
flynt::rules::static_join_to_fstring(checker, expr, string);
|
||||
flynt::rules::static_join_to_fstring(checker, expr, val);
|
||||
}
|
||||
} else if attr == "format" {
|
||||
// "...".format(...) call
|
||||
let location = expr.range();
|
||||
match pyflakes::format::FormatSummary::try_from(string.as_ref()) {
|
||||
match pyflakes::format::FormatSummary::try_from(val.as_ref()) {
|
||||
Err(e) => {
|
||||
if checker.enabled(Rule::StringDotFormatInvalidFormat) {
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
@@ -425,7 +421,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
||||
|
||||
if checker.enabled(Rule::BadStringFormatCharacter) {
|
||||
pylint::rules::bad_string_format_character::call(
|
||||
checker, string, location,
|
||||
checker, val, location,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -753,9 +749,6 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
||||
if checker.enabled(Rule::SysExitAlias) {
|
||||
pylint::rules::sys_exit_alias(checker, func);
|
||||
}
|
||||
if checker.enabled(Rule::BadOpenMode) {
|
||||
pylint::rules::bad_open_mode(checker, call);
|
||||
}
|
||||
if checker.enabled(Rule::BadStrStripCall) {
|
||||
pylint::rules::bad_str_strip_call(checker, func, args);
|
||||
}
|
||||
@@ -915,9 +908,6 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
||||
if checker.enabled(Rule::ExceptionWithoutExcInfo) {
|
||||
flake8_logging::rules::exception_without_exc_info(checker, call);
|
||||
}
|
||||
if checker.enabled(Rule::IsinstanceTypeNone) {
|
||||
refurb::rules::isinstance_type_none(checker, call);
|
||||
}
|
||||
if checker.enabled(Rule::ImplicitCwd) {
|
||||
refurb::rules::no_implicit_cwd(checker, call);
|
||||
}
|
||||
@@ -997,7 +987,11 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
||||
right,
|
||||
range: _,
|
||||
}) => {
|
||||
if let Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) = left.as_ref() {
|
||||
if let Expr::Constant(ast::ExprConstant {
|
||||
value: Constant::Str(ast::StringConstant { value, .. }),
|
||||
..
|
||||
}) = left.as_ref()
|
||||
{
|
||||
if checker.any_enabled(&[
|
||||
Rule::PercentFormatInvalidFormat,
|
||||
Rule::PercentFormatExpectedMapping,
|
||||
@@ -1234,29 +1228,38 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
||||
refurb::rules::single_item_membership_test(checker, expr, left, ops, comparators);
|
||||
}
|
||||
}
|
||||
Expr::NumberLiteral(_) => {
|
||||
Expr::Constant(ast::ExprConstant {
|
||||
value: Constant::Int(_) | Constant::Float(_) | Constant::Complex { .. },
|
||||
range: _,
|
||||
}) => {
|
||||
if checker.source_type.is_stub() && checker.enabled(Rule::NumericLiteralTooLong) {
|
||||
flake8_pyi::rules::numeric_literal_too_long(checker, expr);
|
||||
}
|
||||
}
|
||||
Expr::BytesLiteral(_) => {
|
||||
Expr::Constant(ast::ExprConstant {
|
||||
value: Constant::Bytes(_),
|
||||
range: _,
|
||||
}) => {
|
||||
if checker.source_type.is_stub() && checker.enabled(Rule::StringOrBytesTooLong) {
|
||||
flake8_pyi::rules::string_or_bytes_too_long(checker, expr);
|
||||
}
|
||||
}
|
||||
Expr::StringLiteral(string) => {
|
||||
Expr::Constant(ast::ExprConstant {
|
||||
value: Constant::Str(value),
|
||||
range: _,
|
||||
}) => {
|
||||
if checker.enabled(Rule::HardcodedBindAllInterfaces) {
|
||||
if let Some(diagnostic) =
|
||||
flake8_bandit::rules::hardcoded_bind_all_interfaces(string)
|
||||
flake8_bandit::rules::hardcoded_bind_all_interfaces(value, expr.range())
|
||||
{
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
if checker.enabled(Rule::HardcodedTempFile) {
|
||||
flake8_bandit::rules::hardcoded_tmp_directory(checker, string);
|
||||
flake8_bandit::rules::hardcoded_tmp_directory(checker, expr, value);
|
||||
}
|
||||
if checker.enabled(Rule::UnicodeKindPrefix) {
|
||||
pyupgrade::rules::unicode_kind_prefix(checker, string);
|
||||
pyupgrade::rules::unicode_kind_prefix(checker, expr, value.unicode);
|
||||
}
|
||||
if checker.source_type.is_stub() {
|
||||
if checker.enabled(Rule::StringOrBytesTooLong) {
|
||||
@@ -1264,17 +1267,23 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
||||
}
|
||||
}
|
||||
}
|
||||
Expr::Lambda(
|
||||
lambda @ ast::ExprLambda {
|
||||
parameters: _,
|
||||
body: _,
|
||||
range: _,
|
||||
},
|
||||
) => {
|
||||
if checker.enabled(Rule::ReimplementedListBuiltin) {
|
||||
flake8_pie::rules::reimplemented_list_builtin(checker, lambda);
|
||||
}
|
||||
}
|
||||
Expr::IfExp(ast::ExprIfExp {
|
||||
test,
|
||||
body,
|
||||
orelse,
|
||||
range: _,
|
||||
}) => {
|
||||
if checker.enabled(Rule::IfElseBlockInsteadOfDictGet) {
|
||||
flake8_simplify::rules::if_exp_instead_of_dict_get(
|
||||
checker, expr, test, body, orelse,
|
||||
);
|
||||
}
|
||||
if checker.enabled(Rule::IfExprWithTrueFalse) {
|
||||
flake8_simplify::rules::if_expr_with_true_false(checker, expr, test, body, orelse);
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
pub(super) use bindings::bindings;
|
||||
pub(super) use comprehension::comprehension;
|
||||
pub(super) use deferred_for_loops::deferred_for_loops;
|
||||
pub(super) use deferred_lambdas::deferred_lambdas;
|
||||
pub(super) use deferred_scopes::deferred_scopes;
|
||||
pub(super) use definitions::definitions;
|
||||
pub(super) use except_handler::except_handler;
|
||||
@@ -16,7 +15,6 @@ pub(super) use unresolved_references::unresolved_references;
|
||||
mod bindings;
|
||||
mod comprehension;
|
||||
mod deferred_for_loops;
|
||||
mod deferred_lambdas;
|
||||
mod deferred_scopes;
|
||||
mod definitions;
|
||||
mod except_handler;
|
||||
|
||||
@@ -12,8 +12,8 @@ use crate::rules::{
|
||||
airflow, flake8_bandit, flake8_boolean_trap, flake8_bugbear, flake8_builtins, flake8_debugger,
|
||||
flake8_django, flake8_errmsg, flake8_import_conventions, flake8_pie, flake8_pyi,
|
||||
flake8_pytest_style, flake8_raise, flake8_return, flake8_simplify, flake8_slots,
|
||||
flake8_tidy_imports, flake8_trio, flake8_type_checking, mccabe, pandas_vet, pep8_naming,
|
||||
perflint, pycodestyle, pyflakes, pygrep_hooks, pylint, pyupgrade, refurb, ruff, tryceratops,
|
||||
flake8_tidy_imports, flake8_type_checking, mccabe, pandas_vet, pep8_naming, perflint,
|
||||
pycodestyle, pyflakes, pygrep_hooks, pylint, pyupgrade, refurb, ruff, tryceratops,
|
||||
};
|
||||
use crate::settings::types::PythonVersion;
|
||||
|
||||
@@ -21,9 +21,6 @@ use crate::settings::types::PythonVersion;
|
||||
pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
match stmt {
|
||||
Stmt::Global(ast::StmtGlobal { names, range: _ }) => {
|
||||
if checker.enabled(Rule::GlobalAtModuleLevel) {
|
||||
pylint::rules::global_at_module_level(checker, stmt);
|
||||
}
|
||||
if checker.enabled(Rule::AmbiguousVariableName) {
|
||||
checker.diagnostics.extend(names.iter().filter_map(|name| {
|
||||
pycodestyle::rules::ambiguous_variable_name(name, name.range())
|
||||
@@ -530,9 +527,6 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
if checker.enabled(Rule::ModuleImportNotAtTopOfFile) {
|
||||
pycodestyle::rules::module_import_not_at_top_of_file(checker, stmt);
|
||||
}
|
||||
if checker.enabled(Rule::ImportOutsideTopLevel) {
|
||||
pylint::rules::import_outside_top_level(checker, stmt);
|
||||
}
|
||||
if checker.enabled(Rule::GlobalStatement) {
|
||||
for name in names {
|
||||
if let Some(asname) = name.asname.as_ref() {
|
||||
@@ -550,9 +544,6 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
}
|
||||
|
||||
for alias in names {
|
||||
if checker.enabled(Rule::NonAsciiImportName) {
|
||||
pylint::rules::non_ascii_module_import(checker, alias);
|
||||
}
|
||||
if let Some(asname) = &alias.asname {
|
||||
if checker.enabled(Rule::BuiltinVariableShadowing) {
|
||||
flake8_builtins::rules::builtin_variable_shadowing(
|
||||
@@ -704,14 +695,11 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
range: _,
|
||||
},
|
||||
) => {
|
||||
let level = *level;
|
||||
let module = module.as_deref();
|
||||
let level = *level;
|
||||
if checker.enabled(Rule::ModuleImportNotAtTopOfFile) {
|
||||
pycodestyle::rules::module_import_not_at_top_of_file(checker, stmt);
|
||||
}
|
||||
if checker.enabled(Rule::ImportOutsideTopLevel) {
|
||||
pylint::rules::import_outside_top_level(checker, stmt);
|
||||
}
|
||||
if checker.enabled(Rule::GlobalStatement) {
|
||||
for name in names {
|
||||
if let Some(asname) = name.asname.as_ref() {
|
||||
@@ -721,11 +709,6 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
}
|
||||
}
|
||||
}
|
||||
if checker.enabled(Rule::NonAsciiImportName) {
|
||||
for alias in names {
|
||||
pylint::rules::non_ascii_module_import(checker, alias);
|
||||
}
|
||||
}
|
||||
if checker.enabled(Rule::UnnecessaryFutureImport) {
|
||||
if checker.settings.target_version >= PythonVersion::Py37 {
|
||||
if let Some("__future__") = module {
|
||||
@@ -1058,16 +1041,16 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
flake8_simplify::rules::if_with_same_arms(checker, checker.locator, if_);
|
||||
}
|
||||
if checker.enabled(Rule::NeedlessBool) {
|
||||
flake8_simplify::rules::needless_bool(checker, if_);
|
||||
flake8_simplify::rules::needless_bool(checker, stmt);
|
||||
}
|
||||
if checker.enabled(Rule::IfElseBlockInsteadOfDictLookup) {
|
||||
flake8_simplify::rules::if_else_block_instead_of_dict_lookup(checker, if_);
|
||||
flake8_simplify::rules::manual_dict_lookup(checker, if_);
|
||||
}
|
||||
if checker.enabled(Rule::IfElseBlockInsteadOfIfExp) {
|
||||
flake8_simplify::rules::if_else_block_instead_of_if_exp(checker, if_);
|
||||
flake8_simplify::rules::use_ternary_operator(checker, stmt);
|
||||
}
|
||||
if checker.enabled(Rule::IfElseBlockInsteadOfDictGet) {
|
||||
flake8_simplify::rules::if_else_block_instead_of_dict_get(checker, if_);
|
||||
flake8_simplify::rules::use_dict_get_with_default(checker, if_);
|
||||
}
|
||||
if checker.enabled(Rule::TypeCheckWithoutTypeError) {
|
||||
tryceratops::rules::type_check_without_type_error(
|
||||
@@ -1189,15 +1172,6 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
if checker.enabled(Rule::RedefinedLoopName) {
|
||||
pylint::rules::redefined_loop_name(checker, stmt);
|
||||
}
|
||||
if checker.enabled(Rule::ReadWholeFile) {
|
||||
refurb::rules::read_whole_file(checker, with_stmt);
|
||||
}
|
||||
if checker.enabled(Rule::UselessWithLock) {
|
||||
pylint::rules::useless_with_lock(checker, with_stmt);
|
||||
}
|
||||
if checker.enabled(Rule::TrioTimeoutWithoutAwait) {
|
||||
flake8_trio::rules::timeout_without_await(checker, with_stmt, items);
|
||||
}
|
||||
}
|
||||
Stmt::While(ast::StmtWhile { body, orelse, .. }) => {
|
||||
if checker.enabled(Rule::FunctionUsesLoopVariable) {
|
||||
@@ -1337,7 +1311,6 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
}
|
||||
}
|
||||
Stmt::Assign(assign @ ast::StmtAssign { targets, value, .. }) => {
|
||||
checker.enabled(Rule::NonAsciiName);
|
||||
if checker.enabled(Rule::LambdaAssignment) {
|
||||
if let [target] = &targets[..] {
|
||||
pycodestyle::rules::lambda_assignment(checker, target, value, None, stmt);
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use ruff_python_ast::Expr;
|
||||
use ruff_python_ast::{Expr, TypeParam};
|
||||
use ruff_python_semantic::{ScopeId, Snapshot};
|
||||
use ruff_text_size::TextRange;
|
||||
|
||||
@@ -10,8 +10,8 @@ pub(crate) struct Deferred<'a> {
|
||||
pub(crate) scopes: Vec<ScopeId>,
|
||||
pub(crate) string_type_definitions: Vec<(TextRange, &'a str, Snapshot)>,
|
||||
pub(crate) future_type_definitions: Vec<(&'a Expr, Snapshot)>,
|
||||
pub(crate) type_param_definitions: Vec<(&'a Expr, Snapshot)>,
|
||||
pub(crate) type_param_definitions: Vec<(&'a TypeParam, Snapshot)>,
|
||||
pub(crate) functions: Vec<Snapshot>,
|
||||
pub(crate) lambdas: Vec<Snapshot>,
|
||||
pub(crate) lambdas: Vec<(&'a Expr, Snapshot)>,
|
||||
pub(crate) for_loops: Vec<Snapshot>,
|
||||
}
|
||||
|
||||
@@ -31,8 +31,9 @@ use std::path::Path;
|
||||
use itertools::Itertools;
|
||||
use log::debug;
|
||||
use ruff_python_ast::{
|
||||
self as ast, Arguments, Comprehension, ElifElseClause, ExceptHandler, Expr, ExprContext,
|
||||
Keyword, MatchCase, Parameter, ParameterWithDefault, Parameters, Pattern, Stmt, Suite, UnaryOp,
|
||||
self as ast, Arguments, Comprehension, Constant, ElifElseClause, ExceptHandler, Expr,
|
||||
ExprContext, Keyword, MatchCase, Parameter, ParameterWithDefault, Parameters, Pattern, Stmt,
|
||||
Suite, UnaryOp,
|
||||
};
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
|
||||
@@ -51,8 +52,8 @@ use ruff_python_parser::typing::{parse_type_annotation, AnnotationKind};
|
||||
use ruff_python_semantic::analyze::{typing, visibility};
|
||||
use ruff_python_semantic::{
|
||||
BindingFlags, BindingId, BindingKind, Exceptions, Export, FromImport, Globals, Import, Module,
|
||||
ModuleKind, NodeId, ScopeId, ScopeKind, SemanticModel, SemanticModelFlags, Snapshot,
|
||||
StarImport, SubmoduleImport,
|
||||
ModuleKind, NodeId, ScopeId, ScopeKind, SemanticModel, SemanticModelFlags, StarImport,
|
||||
SubmoduleImport,
|
||||
};
|
||||
use ruff_python_stdlib::builtins::{BUILTINS, MAGIC_GLOBALS};
|
||||
use ruff_source_file::Locator;
|
||||
@@ -581,9 +582,9 @@ where
|
||||
if let Some(type_params) = type_params {
|
||||
self.visit_type_params(type_params);
|
||||
}
|
||||
self.deferred
|
||||
.type_param_definitions
|
||||
.push((value, self.semantic.snapshot()));
|
||||
// The value in a `type` alias has annotation semantics, in that it's never
|
||||
// evaluated at runtime.
|
||||
self.visit_annotation(value);
|
||||
self.semantic.pop_scope();
|
||||
self.visit_expr(name);
|
||||
}
|
||||
@@ -786,7 +787,11 @@ where
|
||||
&& self.semantic.in_type_definition()
|
||||
&& self.semantic.future_annotations()
|
||||
{
|
||||
if let Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) = expr {
|
||||
if let Expr::Constant(ast::ExprConstant {
|
||||
value: Constant::Str(value),
|
||||
..
|
||||
}) = expr
|
||||
{
|
||||
self.deferred.string_type_definitions.push((
|
||||
expr.range(),
|
||||
value,
|
||||
@@ -897,7 +902,7 @@ where
|
||||
}
|
||||
|
||||
self.semantic.push_scope(ScopeKind::Lambda(lambda));
|
||||
self.deferred.lambdas.push(self.semantic.snapshot());
|
||||
self.deferred.lambdas.push((expr, self.semantic.snapshot()));
|
||||
}
|
||||
Expr::IfExp(ast::ExprIfExp {
|
||||
test,
|
||||
@@ -1181,7 +1186,10 @@ where
|
||||
}
|
||||
}
|
||||
}
|
||||
Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) => {
|
||||
Expr::Constant(ast::ExprConstant {
|
||||
value: Constant::Str(value),
|
||||
range: _,
|
||||
}) => {
|
||||
if self.semantic.in_type_definition()
|
||||
&& !self.semantic.in_literal()
|
||||
&& !self.semantic.in_f_string()
|
||||
@@ -1381,14 +1389,9 @@ where
|
||||
}
|
||||
}
|
||||
// Step 2: Traversal
|
||||
if let ast::TypeParam::TypeVar(ast::TypeParamTypeVar {
|
||||
bound: Some(bound), ..
|
||||
}) = type_param
|
||||
{
|
||||
self.deferred
|
||||
.type_param_definitions
|
||||
.push((bound, self.semantic.snapshot()));
|
||||
}
|
||||
self.deferred
|
||||
.type_param_definitions
|
||||
.push((type_param, self.semantic.snapshot()));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1763,9 +1766,12 @@ impl<'a> Checker<'a> {
|
||||
for (type_param, snapshot) in type_params {
|
||||
self.semantic.restore(snapshot);
|
||||
|
||||
self.semantic.flags |=
|
||||
SemanticModelFlags::TYPE_PARAM_DEFINITION | SemanticModelFlags::TYPE_DEFINITION;
|
||||
self.visit_expr(type_param);
|
||||
if let ast::TypeParam::TypeVar(ast::TypeParamTypeVar {
|
||||
bound: Some(bound), ..
|
||||
}) = type_param
|
||||
{
|
||||
self.visit_annotation(bound);
|
||||
}
|
||||
}
|
||||
}
|
||||
self.semantic.restore(snapshot);
|
||||
@@ -1826,15 +1832,15 @@ impl<'a> Checker<'a> {
|
||||
for snapshot in deferred_functions {
|
||||
self.semantic.restore(snapshot);
|
||||
|
||||
let Stmt::FunctionDef(ast::StmtFunctionDef {
|
||||
if let Stmt::FunctionDef(ast::StmtFunctionDef {
|
||||
body, parameters, ..
|
||||
}) = self.semantic.current_statement()
|
||||
else {
|
||||
{
|
||||
self.visit_parameters(parameters);
|
||||
self.visit_body(body);
|
||||
} else {
|
||||
unreachable!("Expected Stmt::FunctionDef")
|
||||
};
|
||||
|
||||
self.visit_parameters(parameters);
|
||||
self.visit_body(body);
|
||||
}
|
||||
}
|
||||
}
|
||||
self.semantic.restore(snapshot);
|
||||
@@ -1842,31 +1848,26 @@ impl<'a> Checker<'a> {
|
||||
|
||||
fn visit_deferred_lambdas(&mut self) {
|
||||
let snapshot = self.semantic.snapshot();
|
||||
let mut deferred: Vec<Snapshot> = Vec::with_capacity(self.deferred.lambdas.len());
|
||||
while !self.deferred.lambdas.is_empty() {
|
||||
let lambdas = std::mem::take(&mut self.deferred.lambdas);
|
||||
for snapshot in lambdas {
|
||||
for (expr, snapshot) in lambdas {
|
||||
self.semantic.restore(snapshot);
|
||||
|
||||
let Some(Expr::Lambda(ast::ExprLambda {
|
||||
if let Expr::Lambda(ast::ExprLambda {
|
||||
parameters,
|
||||
body,
|
||||
range: _,
|
||||
})) = self.semantic.current_expression()
|
||||
else {
|
||||
}) = expr
|
||||
{
|
||||
if let Some(parameters) = parameters {
|
||||
self.visit_parameters(parameters);
|
||||
}
|
||||
self.visit_expr(body);
|
||||
} else {
|
||||
unreachable!("Expected Expr::Lambda");
|
||||
};
|
||||
|
||||
if let Some(parameters) = parameters {
|
||||
self.visit_parameters(parameters);
|
||||
}
|
||||
self.visit_expr(body);
|
||||
|
||||
deferred.push(snapshot);
|
||||
}
|
||||
}
|
||||
// Reset the deferred lambdas, so we can analyze them later on.
|
||||
self.deferred.lambdas = deferred;
|
||||
self.semantic.restore(snapshot);
|
||||
}
|
||||
|
||||
@@ -1986,7 +1987,6 @@ pub(crate) fn check_ast(
|
||||
checker.visit_exports();
|
||||
|
||||
// Check docstrings, bindings, and unresolved references.
|
||||
analyze::deferred_lambdas(&mut checker);
|
||||
analyze::deferred_for_loops(&mut checker);
|
||||
analyze::definitions(&mut checker);
|
||||
analyze::bindings(&mut checker);
|
||||
|
||||
@@ -128,10 +128,7 @@ pub(crate) fn check_noqa(
|
||||
}
|
||||
|
||||
if line.matches.iter().any(|match_| *match_ == code)
|
||||
|| settings
|
||||
.external
|
||||
.iter()
|
||||
.any(|external| code.starts_with(external))
|
||||
|| settings.external.contains(code)
|
||||
{
|
||||
valid_codes.push(code);
|
||||
} else {
|
||||
|
||||
@@ -95,7 +95,6 @@ mod tests {
|
||||
|
||||
use crate::line_width::LineLength;
|
||||
use crate::registry::Rule;
|
||||
use crate::rules::pycodestyle;
|
||||
use crate::settings::LinterSettings;
|
||||
|
||||
use super::check_physical_lines;
|
||||
@@ -115,10 +114,7 @@ mod tests {
|
||||
&indexer,
|
||||
&[],
|
||||
&LinterSettings {
|
||||
pycodestyle: pycodestyle::settings::Settings {
|
||||
max_line_length: line_length,
|
||||
..pycodestyle::settings::Settings::default()
|
||||
},
|
||||
line_length,
|
||||
..LinterSettings::for_rule(Rule::LineTooLong)
|
||||
},
|
||||
)
|
||||
|
||||
@@ -211,9 +211,6 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Pylint, "C0205") => (RuleGroup::Stable, rules::pylint::rules::SingleStringSlots),
|
||||
(Pylint, "C0208") => (RuleGroup::Stable, rules::pylint::rules::IterationOverSet),
|
||||
(Pylint, "C0414") => (RuleGroup::Stable, rules::pylint::rules::UselessImportAlias),
|
||||
(Pylint, "C0415") => (RuleGroup::Preview, rules::pylint::rules::ImportOutsideTopLevel),
|
||||
(Pylint, "C2401") => (RuleGroup::Preview, rules::pylint::rules::NonAsciiName),
|
||||
(Pylint, "C2403") => (RuleGroup::Preview, rules::pylint::rules::NonAsciiImportName),
|
||||
#[allow(deprecated)]
|
||||
(Pylint, "C1901") => (RuleGroup::Nursery, rules::pylint::rules::CompareToEmptyString),
|
||||
(Pylint, "C3002") => (RuleGroup::Stable, rules::pylint::rules::UnnecessaryDirectLambdaCall),
|
||||
@@ -261,24 +258,20 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Pylint, "R6201") => (RuleGroup::Preview, rules::pylint::rules::LiteralMembership),
|
||||
#[allow(deprecated)]
|
||||
(Pylint, "R6301") => (RuleGroup::Nursery, rules::pylint::rules::NoSelfUse),
|
||||
(Pylint, "W0108") => (RuleGroup::Preview, rules::pylint::rules::UnnecessaryLambda),
|
||||
(Pylint, "W0120") => (RuleGroup::Stable, rules::pylint::rules::UselessElseOnLoop),
|
||||
(Pylint, "W0127") => (RuleGroup::Stable, rules::pylint::rules::SelfAssigningVariable),
|
||||
(Pylint, "W0129") => (RuleGroup::Stable, rules::pylint::rules::AssertOnStringLiteral),
|
||||
(Pylint, "W0131") => (RuleGroup::Stable, rules::pylint::rules::NamedExprWithoutContext),
|
||||
(Pylint, "W0406") => (RuleGroup::Stable, rules::pylint::rules::ImportSelf),
|
||||
(Pylint, "W0602") => (RuleGroup::Stable, rules::pylint::rules::GlobalVariableNotAssigned),
|
||||
(Pylint, "W0604") => (RuleGroup::Preview, rules::pylint::rules::GlobalAtModuleLevel),
|
||||
(Pylint, "W0603") => (RuleGroup::Stable, rules::pylint::rules::GlobalStatement),
|
||||
(Pylint, "W0711") => (RuleGroup::Stable, rules::pylint::rules::BinaryOpException),
|
||||
(Pylint, "W1501") => (RuleGroup::Preview, rules::pylint::rules::BadOpenMode),
|
||||
(Pylint, "W1508") => (RuleGroup::Stable, rules::pylint::rules::InvalidEnvvarDefault),
|
||||
(Pylint, "W1509") => (RuleGroup::Stable, rules::pylint::rules::SubprocessPopenPreexecFn),
|
||||
(Pylint, "W1510") => (RuleGroup::Stable, rules::pylint::rules::SubprocessRunWithoutCheck),
|
||||
(Pylint, "W1514") => (RuleGroup::Preview, rules::pylint::rules::UnspecifiedEncoding),
|
||||
#[allow(deprecated)]
|
||||
(Pylint, "W1641") => (RuleGroup::Nursery, rules::pylint::rules::EqWithoutHash),
|
||||
(Pylint, "W2101") => (RuleGroup::Preview, rules::pylint::rules::UselessWithLock),
|
||||
(Pylint, "R0904") => (RuleGroup::Preview, rules::pylint::rules::TooManyPublicMethods),
|
||||
(Pylint, "W2901") => (RuleGroup::Stable, rules::pylint::rules::RedefinedLoopName),
|
||||
#[allow(deprecated)]
|
||||
@@ -290,9 +283,6 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Flake8Async, "101") => (RuleGroup::Stable, rules::flake8_async::rules::OpenSleepOrSubprocessInAsyncFunction),
|
||||
(Flake8Async, "102") => (RuleGroup::Stable, rules::flake8_async::rules::BlockingOsCallInAsyncFunction),
|
||||
|
||||
// flake8-trio
|
||||
(Flake8Trio, "100") => (RuleGroup::Preview, rules::flake8_trio::rules::TrioTimeoutWithoutAwait),
|
||||
|
||||
// flake8-builtins
|
||||
(Flake8Builtins, "001") => (RuleGroup::Stable, rules::flake8_builtins::rules::BuiltinVariableShadowing),
|
||||
(Flake8Builtins, "002") => (RuleGroup::Stable, rules::flake8_builtins::rules::BuiltinArgumentShadowing),
|
||||
@@ -859,7 +849,6 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Numpy, "001") => (RuleGroup::Stable, rules::numpy::rules::NumpyDeprecatedTypeAlias),
|
||||
(Numpy, "002") => (RuleGroup::Stable, rules::numpy::rules::NumpyLegacyRandom),
|
||||
(Numpy, "003") => (RuleGroup::Stable, rules::numpy::rules::NumpyDeprecatedFunction),
|
||||
(Numpy, "201") => (RuleGroup::Preview, rules::numpy::rules::Numpy2Deprecation),
|
||||
|
||||
// ruff
|
||||
(Ruff, "001") => (RuleGroup::Stable, rules::ruff::rules::AmbiguousUnicodeCharacterString),
|
||||
@@ -931,7 +920,6 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Flake8Slots, "002") => (RuleGroup::Stable, rules::flake8_slots::rules::NoSlotsInNamedtupleSubclass),
|
||||
|
||||
// refurb
|
||||
(Refurb, "101") => (RuleGroup::Preview, rules::refurb::rules::ReadWholeFile),
|
||||
(Refurb, "105") => (RuleGroup::Preview, rules::refurb::rules::PrintEmptyString),
|
||||
#[allow(deprecated)]
|
||||
(Refurb, "113") => (RuleGroup::Nursery, rules::refurb::rules::RepeatedAppend),
|
||||
@@ -942,7 +930,6 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Refurb, "140") => (RuleGroup::Preview, rules::refurb::rules::ReimplementedStarmap),
|
||||
(Refurb, "145") => (RuleGroup::Preview, rules::refurb::rules::SliceCopy),
|
||||
(Refurb, "148") => (RuleGroup::Preview, rules::refurb::rules::UnnecessaryEnumerate),
|
||||
(Refurb, "168") => (RuleGroup::Preview, rules::refurb::rules::IsinstanceTypeNone),
|
||||
(Refurb, "171") => (RuleGroup::Preview, rules::refurb::rules::SingleItemMembershipTest),
|
||||
(Refurb, "177") => (RuleGroup::Preview, rules::refurb::rules::ImplicitCwd),
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
|
||||
use std::iter::FusedIterator;
|
||||
|
||||
use ruff_python_ast::{self as ast, Stmt, Suite};
|
||||
use ruff_python_ast::{self as ast, Constant, Expr, Stmt, Suite};
|
||||
use ruff_python_parser::lexer::LexResult;
|
||||
use ruff_python_parser::Tok;
|
||||
use ruff_text_size::{Ranged, TextSize};
|
||||
@@ -69,15 +69,15 @@ struct StringLinesVisitor<'a> {
|
||||
|
||||
impl StatementVisitor<'_> for StringLinesVisitor<'_> {
|
||||
fn visit_stmt(&mut self, stmt: &Stmt) {
|
||||
if let Stmt::Expr(ast::StmtExpr {
|
||||
value: expr,
|
||||
range: _,
|
||||
}) = stmt
|
||||
{
|
||||
if expr.is_string_literal_expr() {
|
||||
if let Stmt::Expr(ast::StmtExpr { value, range: _ }) = stmt {
|
||||
if let Expr::Constant(ast::ExprConstant {
|
||||
value: Constant::Str(..),
|
||||
..
|
||||
}) = value.as_ref()
|
||||
{
|
||||
for line in UniversalNewlineIterator::with_offset(
|
||||
self.locator.slice(expr.as_ref()),
|
||||
expr.start(),
|
||||
self.locator.slice(value.as_ref()),
|
||||
value.start(),
|
||||
) {
|
||||
self.string_lines.push(line.start());
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user