Compare commits

..

1 Commits

Author SHA1 Message Date
Zanie
69fc2caa31 Add CI job to auto-update pre-commit dependencies weekly 2023-11-01 12:06:14 -05:00
1859 changed files with 39093 additions and 100721 deletions

View File

@@ -1,3 +1,37 @@
[alias]
dev = "run --package ruff_dev --bin ruff_dev"
benchmark = "bench -p ruff_benchmark --bench linter --bench formatter --"
[target.'cfg(all())']
rustflags = [
# CLIPPY LINT SETTINGS
# This is a workaround to configure lints for the entire workspace, pending the ability to configure this via TOML.
# See: `https://github.com/rust-lang/cargo/issues/5034`
# `https://github.com/EmbarkStudios/rust-ecosystem/issues/22#issuecomment-947011395`
"-Dunsafe_code",
"-Wclippy::pedantic",
# Allowed pedantic lints
"-Wclippy::char_lit_as_u8",
"-Aclippy::collapsible_else_if",
"-Aclippy::collapsible_if",
"-Aclippy::implicit_hasher",
"-Aclippy::match_same_arms",
"-Aclippy::missing_errors_doc",
"-Aclippy::missing_panics_doc",
"-Aclippy::module_name_repetitions",
"-Aclippy::must_use_candidate",
"-Aclippy::similar_names",
"-Aclippy::too_many_lines",
# Disallowed restriction lints
"-Wclippy::print_stdout",
"-Wclippy::print_stderr",
"-Wclippy::dbg_macro",
"-Wclippy::empty_drop",
"-Wclippy::empty_structs_with_brackets",
"-Wclippy::exit",
"-Wclippy::get_unwrap",
"-Wclippy::rc_buffer",
"-Wclippy::rc_mutex",
"-Wclippy::rest_pat_in_fully_bound_structs",
"-Wunreachable_pub"
]

3
.gitattributes vendored
View File

@@ -3,8 +3,5 @@
crates/ruff_linter/resources/test/fixtures/isort/line_ending_crlf.py text eol=crlf
crates/ruff_linter/resources/test/fixtures/pycodestyle/W605_1.py text eol=crlf
crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring_code_examples_crlf.py text eol=crlf
crates/ruff_python_formatter/tests/snapshots/format@docstring_code_examples_crlf.py.snap text eol=crlf
ruff.schema.json linguist-generated=true text=auto eol=lf
*.md.snap linguist-language=Markdown

29
.github/release.yml vendored Normal file
View File

@@ -0,0 +1,29 @@
# https://docs.github.com/en/repositories/releasing-projects-on-github/automatically-generated-release-notes#configuring-automatically-generated-release-notes
changelog:
exclude:
labels:
- internal
- documentation
categories:
- title: Breaking Changes
labels:
- breaking
- title: Rules
labels:
- rule
- title: Settings
labels:
- configuration
- cli
- title: Bug Fixes
labels:
- bug
- title: Formatter
labels:
- formatter
- title: Preview
labels:
- preview
- title: Other Changes
labels:
- "*"

View File

@@ -23,13 +23,8 @@ jobs:
name: "Determine changes"
runs-on: ubuntu-latest
outputs:
# Flag that is raised when any code that affects linter is changed
linter: ${{ steps.changed.outputs.linter_any_changed }}
# Flag that is raised when any code that affects formatter is changed
formatter: ${{ steps.changed.outputs.formatter_any_changed }}
# Flag that is raised when any code is changed
# This is superset of the linter and formatter
code: ${{ steps.changed.outputs.code_any_changed }}
steps:
- uses: actions/checkout@v4
with:
@@ -48,7 +43,6 @@ jobs:
- "!crates/ruff_dev/**"
- "!crates/ruff_shrinking/**"
- scripts/*
- python/**
- .github/workflows/ci.yaml
formatter:
@@ -64,15 +58,8 @@ jobs:
- crates/ruff_python_parser/**
- crates/ruff_dev/**
- scripts/*
- python/**
- .github/workflows/ci.yaml
code:
- "**/*"
- "!**/*.md"
- "!docs/**"
- "!assets/**"
cargo-fmt:
name: "cargo fmt"
runs-on: ubuntu-latest
@@ -85,8 +72,6 @@ jobs:
cargo-clippy:
name: "cargo clippy"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
steps:
- uses: actions/checkout@v4
- name: "Install Rust toolchain"
@@ -101,8 +86,6 @@ jobs:
cargo-test-linux:
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
name: "cargo test (linux)"
steps:
- uses: actions/checkout@v4
@@ -127,8 +110,6 @@ jobs:
cargo-test-windows:
runs-on: windows-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
name: "cargo test (windows)"
steps:
- uses: actions/checkout@v4
@@ -146,8 +127,6 @@ jobs:
cargo-test-wasm:
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
name: "cargo test (wasm)"
steps:
- uses: actions/checkout@v4
@@ -167,8 +146,6 @@ jobs:
cargo-fuzz:
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
name: "cargo fuzz"
steps:
- uses: actions/checkout@v4
@@ -186,8 +163,6 @@ jobs:
scripts:
name: "test scripts"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
steps:
- uses: actions/checkout@v4
- name: "Install Rust toolchain"
@@ -209,10 +184,7 @@ jobs:
- cargo-test-linux
- determine_changes
# Only runs on pull requests, since that is the only we way we can find the base version for comparison.
# Ecosystem check needs linter and/or formatter changes.
if: github.event_name == 'pull_request' && ${{
needs.determine_changes.outputs.code == 'true'
}}
if: github.event_name == 'pull_request'
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
@@ -237,7 +209,7 @@ jobs:
run: |
pip install ./python/ruff-ecosystem
- name: Run `ruff check` stable ecosystem check
- name: Run `ruff check` ecosystem check
if: ${{ needs.determine_changes.outputs.linter == 'true' }}
run: |
# Make executable, since artifact download doesn't preserve this
@@ -246,30 +218,13 @@ jobs:
# Set pipefail to avoid hiding errors with tee
set -eo pipefail
ruff-ecosystem check ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown | tee ecosystem-result-check-stable
ruff-ecosystem check ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown | tee ecosystem-result-check
cat ecosystem-result-check-stable > $GITHUB_STEP_SUMMARY
echo "### Linter (stable)" > ecosystem-result
cat ecosystem-result-check-stable >> ecosystem-result
cat ecosystem-result-check > $GITHUB_STEP_SUMMARY
cat ecosystem-result-check > ecosystem-result
echo "" >> ecosystem-result
- name: Run `ruff check` preview ecosystem check
if: ${{ needs.determine_changes.outputs.linter == 'true' }}
run: |
# Make executable, since artifact download doesn't preserve this
chmod +x ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
# Set pipefail to avoid hiding errors with tee
set -eo pipefail
ruff-ecosystem check ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-check-preview
cat ecosystem-result-check-preview > $GITHUB_STEP_SUMMARY
echo "### Linter (preview)" >> ecosystem-result
cat ecosystem-result-check-preview >> ecosystem-result
echo "" >> ecosystem-result
- name: Run `ruff format` stable ecosystem check
- name: Run `ruff format` ecosystem check
if: ${{ needs.determine_changes.outputs.formatter == 'true' }}
run: |
# Make executable, since artifact download doesn't preserve this
@@ -278,28 +233,10 @@ jobs:
# Set pipefail to avoid hiding errors with tee
set -eo pipefail
ruff-ecosystem format ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown | tee ecosystem-result-format-stable
ruff-ecosystem format ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown | tee ecosystem-result-format
cat ecosystem-result-format-stable > $GITHUB_STEP_SUMMARY
echo "### Formatter (stable)" >> ecosystem-result
cat ecosystem-result-format-stable >> ecosystem-result
echo "" >> ecosystem-result
- name: Run `ruff format` preview ecosystem check
if: ${{ needs.determine_changes.outputs.formatter == 'true' }}
run: |
# Make executable, since artifact download doesn't preserve this
chmod +x ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
# Set pipefail to avoid hiding errors with tee
set -eo pipefail
ruff-ecosystem format ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-format-preview
cat ecosystem-result-format-preview > $GITHUB_STEP_SUMMARY
echo "### Formatter (preview)" >> ecosystem-result
cat ecosystem-result-format-preview >> ecosystem-result
echo "" >> ecosystem-result
cat ecosystem-result-format > $GITHUB_STEP_SUMMARY
cat ecosystem-result-format >> ecosystem-result
- name: Export pull request number
run: |
@@ -320,8 +257,6 @@ jobs:
cargo-udeps:
name: "cargo udeps"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
steps:
- uses: actions/checkout@v4
- name: "Install nightly Rust toolchain"
@@ -418,7 +353,7 @@ jobs:
run: mkdocs build --strict -f mkdocs.insiders.yml
- name: "Build docs"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
run: mkdocs build --strict -f mkdocs.public.yml
run: mkdocs build --strict -f mkdocs.generated.yml
check-formatter-instability-and-black-similarity:
name: "formatter instabilities and black similarity"
@@ -441,10 +376,7 @@ jobs:
check-ruff-lsp:
name: "test ruff-lsp"
runs-on: ubuntu-latest
needs:
- cargo-test-linux
- determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
needs: cargo-test-linux
steps:
- uses: extractions/setup-just@v1
env:
@@ -482,8 +414,6 @@ jobs:
benchmarks:
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
steps:
- name: "Checkout Branch"
uses: actions/checkout@v4
@@ -502,7 +432,7 @@ jobs:
run: cargo codspeed build --features codspeed -p ruff_benchmark
- name: "Run benchmarks"
uses: CodSpeedHQ/action@v2
uses: CodSpeedHQ/action@v1
with:
run: cargo codspeed run
token: ${{ secrets.CODSPEED_TOKEN }}

View File

@@ -44,7 +44,7 @@ jobs:
run: mkdocs build --strict -f mkdocs.insiders.yml
- name: "Build docs"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
run: mkdocs build --strict -f mkdocs.public.yml
run: mkdocs build --strict -f mkdocs.generated.yml
- name: "Deploy to Cloudflare Pages"
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
uses: cloudflare/wrangler-action@v3.3.2

View File

@@ -1,4 +1,4 @@
name: Ecosystem check comment
name: PR Check Comment
on:
workflow_run:
@@ -18,13 +18,13 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: dawidd6/action-download-artifact@v2
name: Download pull request number
name: Download PR Number
with:
name: pr-number
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
if_no_artifact_found: ignore
- name: Parse pull request number
- name: Extract PR Number
id: pr-number
run: |
if [[ -f pr-number ]]
@@ -33,7 +33,7 @@ jobs:
fi
- uses: dawidd6/action-download-artifact@v2
name: "Download ecosystem results"
name: "Download Ecosystem Result"
id: download-ecosystem-result
if: steps.pr-number.outputs.pr-number
with:
@@ -44,15 +44,13 @@ jobs:
workflow_conclusion: completed
if_no_artifact_found: ignore
- name: Generate comment content
- name: Generate Comment
id: generate-comment
if: steps.download-ecosystem-result.outputs.found_artifact == 'true'
run: |
# Note this identifier is used to find the comment to update on
# subsequent runs
echo '<!-- generated-comment ecosystem -->' >> comment.txt
echo '## PR Check Results' >> comment.txt
echo '## `ruff-ecosystem` results' >> comment.txt
echo "### Ecosystem" >> comment.txt
cat pr/ecosystem/ecosystem-result >> comment.txt
echo "" >> comment.txt
@@ -60,14 +58,14 @@ jobs:
cat comment.txt >> $GITHUB_OUTPUT
echo 'EOF' >> $GITHUB_OUTPUT
- name: Find existing comment
- name: Find Comment
uses: peter-evans/find-comment@v2
if: steps.generate-comment.outcome == 'success'
id: find-comment
with:
issue-number: ${{ steps.pr-number.outputs.pr-number }}
comment-author: "github-actions[bot]"
body-includes: "<!-- generated-comment ecosystem -->"
body-includes: PR Check Results
- name: Create or update comment
if: steps.find-comment.outcome == 'success'

36
.github/workflows/pre-commit.yaml vendored Normal file
View File

@@ -0,0 +1,36 @@
# Until Dependabot support is released https://github.com/dependabot/dependabot-core/issues/1524
name: Pre-commit update
on:
# every week on monday
schedule:
- cron: "0 0 * * 1"
workflow_dispatch:
permissions:
pull-requests: write
jobs:
upgrade:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Configure Git
run: |
git config user.name "$GITHUB_ACTOR"
git config user.email "$GITHUB_ACTOR@users.noreply.github.com"
- name: Run autoupdate
run: |
pre-commit autoupdate
- name: Commit and push
run: |
git add ".pre-commit-config.yaml"
git commit -m "Upgrade pre-commit dependencies"
git push origin upgrade/pre-commit
- name: Open pull request
run: |
gh pr create --fill

View File

@@ -48,6 +48,7 @@ jobs:
args: --out dist
- name: "Test sdist"
run: |
rustup default $(cat rust-toolchain)
pip install dist/${{ env.PACKAGE_NAME }}-*.tar.gz --force-reinstall
ruff --help
python -m ruff --help
@@ -86,7 +87,7 @@ jobs:
path: dist
- name: "Archive binary"
run: |
ARCHIVE_FILE=ruff-${{ inputs.tag }}-x86_64-apple-darwin.tar.gz
ARCHIVE_FILE=ruff-x86_64-apple-darwin.tar.gz
tar czvf $ARCHIVE_FILE -C target/x86_64-apple-darwin/release ruff
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
@@ -125,7 +126,7 @@ jobs:
path: dist
- name: "Archive binary"
run: |
ARCHIVE_FILE=ruff-${{ inputs.tag }}-aarch64-apple-darwin.tar.gz
ARCHIVE_FILE=ruff-aarch64-apple-darwin.tar.gz
tar czvf $ARCHIVE_FILE -C target/aarch64-apple-darwin/release ruff
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
@@ -177,7 +178,7 @@ jobs:
- name: "Archive binary"
shell: bash
run: |
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.zip
ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.zip
7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/ruff.exe
sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
@@ -224,7 +225,7 @@ jobs:
path: dist
- name: "Archive binary"
run: |
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.target }}.tar.gz
ARCHIVE_FILE=ruff-${{ matrix.target }}.tar.gz
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
@@ -291,7 +292,7 @@ jobs:
path: dist
- name: "Archive binary"
run: |
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.tar.gz
ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.tar.gz
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
@@ -343,7 +344,7 @@ jobs:
path: dist
- name: "Archive binary"
run: |
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.target }}.tar.gz
ARCHIVE_FILE=ruff-${{ matrix.target }}.tar.gz
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
@@ -399,7 +400,7 @@ jobs:
path: dist
- name: "Archive binary"
run: |
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.tar.gz
ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.tar.gz
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
@@ -516,62 +517,6 @@ jobs:
files: binaries/*
tag_name: v${{ inputs.tag }}
docker-publish:
# This action doesn't need to wait on any other task, it's easy to re-tag if something failed and we're validating
# the tag here also
name: Push Docker image ghcr.io/astral-sh/ruff
runs-on: ubuntu-latest
environment:
name: release
permissions:
# For the docker push
packages: write
steps:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.sha }}
- uses: docker/setup-buildx-action@v3
- uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v5
with:
images: ghcr.io/astral-sh/ruff
- name: Check tag consistency
# Unlike validate-tag we don't check if the commit is on the main branch, but it seems good enough since we can
# change docker tags
if: ${{ inputs.tag }}
run: |
version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
if [ "${{ inputs.tag }}" != "${version}" ]; then
echo "The input tag does not match the version from pyproject.toml:" >&2
echo "${{ inputs.tag }}" >&2
echo "${version}" >&2
exit 1
else
echo "Releasing ${version}"
fi
- name: "Build and push Docker image"
uses: docker/build-push-action@v5
with:
context: .
platforms: linux/amd64,linux/arm64
# Reuse the builder
cache-from: type=gha
cache-to: type=gha,mode=max
push: ${{ inputs.tag != '' }}
tags: ghcr.io/astral-sh/ruff:latest,ghcr.io/astral-sh/ruff:${{ inputs.tag || 'dry-run' }}
labels: ${{ steps.meta.outputs.labels }}
# After the release has been published, we update downstream repositories
# This is separate because if this fails the release is still fine, we just need to do some manual workflow triggers
update-dependents:
@@ -580,7 +525,7 @@ jobs:
needs: publish-release
steps:
- name: "Update pre-commit mirror"
uses: actions/github-script@v7
uses: actions/github-script@v6
with:
github-token: ${{ secrets.RUFF_PRE_COMMIT_PAT }}
script: |

View File

@@ -13,12 +13,12 @@ exclude: |
repos:
- repo: https://github.com/abravalheri/validate-pyproject
rev: v0.15
rev: v0.12.1
hooks:
- id: validate-pyproject
- repo: https://github.com/executablebooks/mdformat
rev: 0.7.17
rev: 0.7.16
hooks:
- id: mdformat
additional_dependencies:
@@ -26,22 +26,16 @@ repos:
- mdformat-admon
exclude: |
(?x)^(
docs/formatter/black\.md
| docs/\w+\.md
docs/formatter/black.md
)$
- repo: https://github.com/igorshubovych/markdownlint-cli
rev: v0.37.0
rev: v0.33.0
hooks:
- id: markdownlint-fix
exclude: |
(?x)^(
docs/formatter/black\.md
| docs/\w+\.md
)$
- repo: https://github.com/crate-ci/typos
rev: v1.16.22
rev: v1.14.12
hooks:
- id: typos
@@ -53,13 +47,10 @@ repos:
language: system
types: [rust]
pass_filenames: false # This makes it a lot faster
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.1.4
hooks:
- id: ruff-format
- id: ruff
args: [--fix, --exit-non-zero-on-fix]
name: ruff
entry: cargo run --bin ruff -- check --no-cache --force-exclude --fix --exit-non-zero-on-fix
language: system
types_or: [python, pyi]
require_serial: true
exclude: |
@@ -68,9 +59,15 @@ repos:
crates/ruff_python_formatter/resources/.*
)$
# Black
- repo: https://github.com/psf/black
rev: 23.1.0
hooks:
- id: black
# Prettier
- repo: https://github.com/pre-commit/mirrors-prettier
rev: v3.0.3
rev: v3.0.0
hooks:
- id: prettier
types: [yaml]

View File

@@ -1,282 +1,5 @@
# Changelog
## 0.1.7
### Preview features
- Implement multiline dictionary and list hugging for preview style ([#8293](https://github.com/astral-sh/ruff/pull/8293))
- Implement the `fix_power_op_line_length` preview style ([#8947](https://github.com/astral-sh/ruff/pull/8947))
- Use Python version to determine typing rewrite safety ([#8919](https://github.com/astral-sh/ruff/pull/8919))
- \[`flake8-annotations`\] Enable auto-return-type involving `Optional` and `Union` annotations ([#8885](https://github.com/astral-sh/ruff/pull/8885))
- \[`flake8-bandit`\] Implement `django-raw-sql` (`S611`) ([#8651](https://github.com/astral-sh/ruff/pull/8651))
- \[`flake8-bandit`\] Implement `tarfile-unsafe-members` (`S202`) ([#8829](https://github.com/astral-sh/ruff/pull/8829))
- \[`flake8-pyi`\] Implement fix for `unnecessary-literal-union` (`PYI030`) ([#7934](https://github.com/astral-sh/ruff/pull/7934))
- \[`flake8-simplify`\] Extend `dict-get-with-none-default` (`SIM910`) to non-literals ([#8762](https://github.com/astral-sh/ruff/pull/8762))
- \[`pylint`\] - add `unnecessary-list-index-lookup` (`PLR1736`) + autofix ([#7999](https://github.com/astral-sh/ruff/pull/7999))
- \[`pylint`\] - implement R0202 and R0203 with autofixes ([#8335](https://github.com/astral-sh/ruff/pull/8335))
- \[`pylint`\] Implement `repeated-keyword` (`PLe1132`) ([#8706](https://github.com/astral-sh/ruff/pull/8706))
- \[`pylint`\] Implement `too-many-positional` (`PLR0917`) ([#8995](https://github.com/astral-sh/ruff/pull/8995))
- \[`pylint`\] Implement `unnecessary-dict-index-lookup` (`PLR1733`) ([#8036](https://github.com/astral-sh/ruff/pull/8036))
- \[`refurb`\] Implement `redundant-log-base` (`FURB163`) ([#8842](https://github.com/astral-sh/ruff/pull/8842))
### Rule changes
- \[`flake8-boolean-trap`\] Allow booleans in `@override` methods ([#8882](https://github.com/astral-sh/ruff/pull/8882))
- \[`flake8-bugbear`\] Avoid `B015`,`B018` for last expression in a cell ([#8815](https://github.com/astral-sh/ruff/pull/8815))
- \[`flake8-pie`\] Allow ellipses for enum values in stub files ([#8825](https://github.com/astral-sh/ruff/pull/8825))
- \[`flake8-pyi`\] Check PEP 695 type aliases for `snake-case-type-alias` and `t-suffixed-type-alias` ([#8966](https://github.com/astral-sh/ruff/pull/8966))
- \[`flake8-pyi`\] Check for kwarg and vararg `NoReturn` type annotations ([#8948](https://github.com/astral-sh/ruff/pull/8948))
- \[`flake8-simplify`\] Omit select context managers from `SIM117` ([#8801](https://github.com/astral-sh/ruff/pull/8801))
- \[`pep8-naming`\] Allow Django model loads in `non-lowercase-variable-in-function` (`N806`) ([#8917](https://github.com/astral-sh/ruff/pull/8917))
- \[`pycodestyle`\] Avoid `E703` for last expression in a cell ([#8821](https://github.com/astral-sh/ruff/pull/8821))
- \[`pycodestyle`\] Update `E402` to work at cell level for notebooks ([#8872](https://github.com/astral-sh/ruff/pull/8872))
- \[`pydocstyle`\] Avoid `D100` for Jupyter Notebooks ([#8816](https://github.com/astral-sh/ruff/pull/8816))
- \[`pylint`\] Implement fix for `unspecified-encoding` (`PLW1514`) ([#8928](https://github.com/astral-sh/ruff/pull/8928))
### Formatter
- Avoid unstable formatting in ellipsis-only body with trailing comment ([#8984](https://github.com/astral-sh/ruff/pull/8984))
- Inline trailing comments for type alias similar to assignments ([#8941](https://github.com/astral-sh/ruff/pull/8941))
- Insert trailing comma when function breaks with single argument ([#8921](https://github.com/astral-sh/ruff/pull/8921))
### CLI
- Update `ruff check` and `ruff format` to default to the current directory ([#8791](https://github.com/astral-sh/ruff/pull/8791))
- Stop at the first resolved parent configuration ([#8864](https://github.com/astral-sh/ruff/pull/8864))
### Configuration
- \[`pylint`\] Default `max-positional-args` to `max-args` ([#8998](https://github.com/astral-sh/ruff/pull/8998))
- \[`pylint`\] Add `allow-dunder-method-names` setting for `bad-dunder-method-name` (`PLW3201`) ([#8812](https://github.com/astral-sh/ruff/pull/8812))
- \[`isort`\] Add support for `from-first` setting ([#8663](https://github.com/astral-sh/ruff/pull/8663))
- \[`isort`\] Add support for `length-sort` settings ([#8841](https://github.com/astral-sh/ruff/pull/8841))
### Bug fixes
- Add support for `@functools.singledispatch` ([#8934](https://github.com/astral-sh/ruff/pull/8934))
- Avoid off-by-one error in stripping noqa following multi-byte char ([#8979](https://github.com/astral-sh/ruff/pull/8979))
- Avoid off-by-one error in with-item named expressions ([#8915](https://github.com/astral-sh/ruff/pull/8915))
- Avoid syntax error via invalid ur string prefix ([#8971](https://github.com/astral-sh/ruff/pull/8971))
- Avoid underflow in `get_model` matching ([#8965](https://github.com/astral-sh/ruff/pull/8965))
- Avoid unnecessary index diagnostics when value is modified ([#8970](https://github.com/astral-sh/ruff/pull/8970))
- Convert over-indentation rule to use number of characters ([#8983](https://github.com/astral-sh/ruff/pull/8983))
- Detect implicit returns in auto-return-types ([#8952](https://github.com/astral-sh/ruff/pull/8952))
- Fix start >= end error in over-indentation ([#8982](https://github.com/astral-sh/ruff/pull/8982))
- Ignore `@overload` and `@override` methods for too-many-arguments checks ([#8954](https://github.com/astral-sh/ruff/pull/8954))
- Lexer start of line is false only for `Mode::Expression` ([#8880](https://github.com/astral-sh/ruff/pull/8880))
- Mark `pydantic_settings.BaseSettings` as having default copy semantics ([#8793](https://github.com/astral-sh/ruff/pull/8793))
- Respect dictionary unpacking in `NamedTuple` assignments ([#8810](https://github.com/astral-sh/ruff/pull/8810))
- Respect local subclasses in `flake8-type-checking` ([#8768](https://github.com/astral-sh/ruff/pull/8768))
- Support type alias statements in simple statement positions ([#8916](https://github.com/astral-sh/ruff/pull/8916))
- \[`flake8-annotations`\] Avoid filtering out un-representable types in return annotation ([#8881](https://github.com/astral-sh/ruff/pull/8881))
- \[`flake8-pie`\] Retain extra ellipses in protocols and abstract methods ([#8769](https://github.com/astral-sh/ruff/pull/8769))
- \[`flake8-pyi`\] Respect local enum subclasses in `simple-defaults` (`PYI052`) ([#8767](https://github.com/astral-sh/ruff/pull/8767))
- \[`flake8-trio`\] Use correct range for `TRIO115` fix ([#8933](https://github.com/astral-sh/ruff/pull/8933))
- \[`flake8-trio`\] Use full arguments range for zero-sleep-call ([#8936](https://github.com/astral-sh/ruff/pull/8936))
- \[`isort`\] fix: mark `__main__` as first-party import ([#8805](https://github.com/astral-sh/ruff/pull/8805))
- \[`pep8-naming`\] Avoid `N806` errors for type alias statements ([#8785](https://github.com/astral-sh/ruff/pull/8785))
- \[`perflint`\] Avoid `PERF101` if there's an append in loop body ([#8809](https://github.com/astral-sh/ruff/pull/8809))
- \[`pycodestyle`\] Allow space-before-colon after end-of-slice ([#8838](https://github.com/astral-sh/ruff/pull/8838))
- \[`pydocstyle`\] Avoid non-character breaks in `over-indentation` (`D208`) ([#8866](https://github.com/astral-sh/ruff/pull/8866))
- \[`pydocstyle`\] Ignore underlines when determining docstring logical lines ([#8929](https://github.com/astral-sh/ruff/pull/8929))
- \[`pylint`\] Extend `self-assigning-variable` to multi-target assignments ([#8839](https://github.com/astral-sh/ruff/pull/8839))
- \[`tryceratops`\] Avoid repeated triggers in nested `tryceratops` diagnostics ([#8772](https://github.com/astral-sh/ruff/pull/8772))
### Documentation
- Add advice for fixing RUF008 when mutability is not desired ([#8853](https://github.com/astral-sh/ruff/pull/8853))
- Added the command to run ruff using pkgx to the installation.md ([#8955](https://github.com/astral-sh/ruff/pull/8955))
- Document fix safety for flake8-comprehensions and some pyupgrade rules ([#8918](https://github.com/astral-sh/ruff/pull/8918))
- Fix doc formatting for zero-sleep-call ([#8937](https://github.com/astral-sh/ruff/pull/8937))
- Remove duplicate imports from os-stat documentation ([#8930](https://github.com/astral-sh/ruff/pull/8930))
- Replace generated reference to MkDocs ([#8806](https://github.com/astral-sh/ruff/pull/8806))
- Update Arch Linux package URL in installation.md ([#8802](https://github.com/astral-sh/ruff/pull/8802))
- \[`flake8-pyi`\] Fix error in `t-suffixed-type-alias` (`PYI043`) example ([#8963](https://github.com/astral-sh/ruff/pull/8963))
- \[`flake8-pyi`\] Improve motivation for `custom-type-var-return-type` (`PYI019`) ([#8766](https://github.com/astral-sh/ruff/pull/8766))
## 0.1.6
### Preview features
- \[`flake8-boolean-trap`\] Extend `boolean-type-hint-positional-argument` (`FBT001`) to include booleans in unions ([#7501](https://github.com/astral-sh/ruff/pull/7501))
- \[`flake8-pie`\] Extend `reimplemented-list-builtin` (`PIE807`) to `dict` reimplementations ([#8608](https://github.com/astral-sh/ruff/pull/8608))
- \[`flake8-pie`\] Extend `unnecessary-pass` (`PIE790`) to include ellipses (`...`) ([#8641](https://github.com/astral-sh/ruff/pull/8641))
- \[`flake8-pie`\] Implement fix for `unnecessary-spread` (`PIE800`) ([#8668](https://github.com/astral-sh/ruff/pull/8668))
- \[`flake8-quotes`\] Implement `unnecessary-escaped-quote` (`Q004`) ([#8630](https://github.com/astral-sh/ruff/pull/8630))
- \[`pycodestyle`\] Implement fix for `multiple-spaces-after-keyword` (`E271`) and `multiple-spaces-before-keyword` (`E272`) ([#8622](https://github.com/astral-sh/ruff/pull/8622))
- \[`pycodestyle`\] Implement fix for `multiple-spaces-after-operator` (`E222`) and `multiple-spaces-before-operator` (`E221`) ([#8623](https://github.com/astral-sh/ruff/pull/8623))
- \[`pyflakes`\] Extend `is-literal` (`F632`) to include comparisons against mutable initializers ([#8607](https://github.com/astral-sh/ruff/pull/8607))
- \[`pylint`\] Implement `redefined-argument-from-local` (`PLR1704`) ([#8159](https://github.com/astral-sh/ruff/pull/8159))
- \[`pylint`\] Implement fix for `unnecessary-lambda` (`PLW0108`) ([#8621](https://github.com/astral-sh/ruff/pull/8621))
- \[`refurb`\] Implement `if-expr-min-max` (`FURB136`) ([#8664](https://github.com/astral-sh/ruff/pull/8664))
- \[`refurb`\] Implement `math-constant` (`FURB152`) ([#8727](https://github.com/astral-sh/ruff/pull/8727))
### Rule changes
- \[`flake8-annotations`\] Add autotyping-like return type inference for annotation rules ([#8643](https://github.com/astral-sh/ruff/pull/8643))
- \[`flake8-future-annotations`\] Implement fix for `future-required-type-annotation` (`FA102`) ([#8711](https://github.com/astral-sh/ruff/pull/8711))
- \[`flake8-implicit-namespace-package`\] Avoid missing namespace violations in scripts with shebangs ([#8710](https://github.com/astral-sh/ruff/pull/8710))
- \[`pydocstyle`\] Update `over-indentation` (`D208`) to preserve indentation offsets when fixing overindented lines ([#8699](https://github.com/astral-sh/ruff/pull/8699))
- \[`pyupgrade`\] Refine `timeout-error-alias` (`UP041`) to remove false positives ([#8587](https://github.com/astral-sh/ruff/pull/8587))
### Formatter
- Fix instability in `await` formatting with fluent style ([#8676](https://github.com/astral-sh/ruff/pull/8676))
- Compare formatted and unformatted ASTs during formatter tests ([#8624](https://github.com/astral-sh/ruff/pull/8624))
- Preserve trailing semicolon for Notebooks ([#8590](https://github.com/astral-sh/ruff/pull/8590))
### CLI
- Improve debug printing for resolving origin of config settings ([#8729](https://github.com/astral-sh/ruff/pull/8729))
- Write unchanged, excluded files to stdout when read via stdin ([#8596](https://github.com/astral-sh/ruff/pull/8596))
### Configuration
- \[`isort`\] Support disabling sections with `no-sections = true` ([#8657](https://github.com/astral-sh/ruff/pull/8657))
- \[`pep8-naming`\] Support local and dynamic class- and static-method decorators ([#8592](https://github.com/astral-sh/ruff/pull/8592))
- \[`pydocstyle`\] Allow overriding pydocstyle convention rules ([#8586](https://github.com/astral-sh/ruff/pull/8586))
### Bug fixes
- Avoid syntax error via importing `trio.lowlevel` ([#8730](https://github.com/astral-sh/ruff/pull/8730))
- Omit unrolled augmented assignments in `PIE794` ([#8634](https://github.com/astral-sh/ruff/pull/8634))
- Slice source code instead of generating it for `EM` fixes ([#7746](https://github.com/astral-sh/ruff/pull/7746))
- Allow whitespace around colon in slices for `whitespace-before-punctuation` (`E203`) ([#8654](https://github.com/astral-sh/ruff/pull/8654))
- Use function range for `no-self-use` ([#8637](https://github.com/astral-sh/ruff/pull/8637))
- F-strings doesn't contain bytes literal for `PLW0129` ([#8675](https://github.com/astral-sh/ruff/pull/8675))
- Improve detection of `TYPE_CHECKING` blocks imported from `typing_extensions` or `_typeshed` ([#8429](https://github.com/astral-sh/ruff/pull/8429))
- Treat display as a builtin in IPython ([#8707](https://github.com/astral-sh/ruff/pull/8707))
- Avoid `FURB113` autofix if comments are present ([#8494](https://github.com/astral-sh/ruff/pull/8494))
- Consider the new f-string tokens for `flake8-commas` ([#8582](https://github.com/astral-sh/ruff/pull/8582))
- Remove erroneous bad-dunder-name reference ([#8742](https://github.com/astral-sh/ruff/pull/8742))
- Avoid recommending Self usages in metaclasses ([#8639](https://github.com/astral-sh/ruff/pull/8639))
- Detect runtime-evaluated base classes defined in the current file ([#8572](https://github.com/astral-sh/ruff/pull/8572))
- Avoid inserting trailing commas within f-strings ([#8574](https://github.com/astral-sh/ruff/pull/8574))
- Remove incorrect deprecation label for stdout and stderr ([#8743](https://github.com/astral-sh/ruff/pull/8743))
- Fix unnecessary parentheses in UP007 fix ([#8610](https://github.com/astral-sh/ruff/pull/8610))
- Remove repeated and erroneous scoped settings headers in docs ([#8670](https://github.com/astral-sh/ruff/pull/8670))
- Trim trailing empty strings when converting to f-strings ([#8712](https://github.com/astral-sh/ruff/pull/8712))
- Fix ordering for `force-sort-within-sections` ([#8665](https://github.com/astral-sh/ruff/pull/8665))
- Run unicode prefix rule over tokens ([#8709](https://github.com/astral-sh/ruff/pull/8709))
- Update UP032 to unescape curly braces in literal parts of converted strings ([#8697](https://github.com/astral-sh/ruff/pull/8697))
- List all ipython builtins ([#8719](https://github.com/astral-sh/ruff/pull/8719))
### Documentation
- Document conventions in the FAQ ([#8638](https://github.com/astral-sh/ruff/pull/8638))
- Redirect from rule codes to rule pages in docs ([#8636](https://github.com/astral-sh/ruff/pull/8636))
- Fix permalink to convention setting ([#8575](https://github.com/astral-sh/ruff/pull/8575))
## 0.1.5
### Preview features
- \[`flake8-bandit`\] Implement `mako-templates` (`S702`) ([#8533](https://github.com/astral-sh/ruff/pull/8533))
- \[`flake8-trio`\] Implement `TRIO105` ([#8490](https://github.com/astral-sh/ruff/pull/8490))
- \[`flake8-trio`\] Implement `TRIO109` ([#8534](https://github.com/astral-sh/ruff/pull/8534))
- \[`flake8-trio`\] Implement `TRIO110` ([#8537](https://github.com/astral-sh/ruff/pull/8537))
- \[`flake8-trio`\] Implement `TRIO115` ([#8486](https://github.com/astral-sh/ruff/pull/8486))
- \[`refurb`\] Implement `type-none-comparison` (`FURB169`) ([#8487](https://github.com/astral-sh/ruff/pull/8487))
- Flag all comparisons against builtin types in `E721` ([#8491](https://github.com/astral-sh/ruff/pull/8491))
- Make `SIM118` fix as safe when the expression is a known dictionary ([#8525](https://github.com/astral-sh/ruff/pull/8525))
### Formatter
- Fix multiline lambda expression statement formatting ([#8466](https://github.com/astral-sh/ruff/pull/8466))
### CLI
- Add hidden `--extension` to override inference of source type from file extension ([#8373](https://github.com/astral-sh/ruff/pull/8373))
### Configuration
- Account for selector specificity when merging `extend_unsafe_fixes` and `override extend_safe_fixes` ([#8444](https://github.com/astral-sh/ruff/pull/8444))
- Add support for disabling cache with `RUFF_NO_CACHE` environment variable ([#8538](https://github.com/astral-sh/ruff/pull/8538))
### Bug fixes
- \[`E721`\] Flag comparisons to `memoryview` ([#8485](https://github.com/astral-sh/ruff/pull/8485))
- Allow collapsed-ellipsis bodies in other statements ([#8499](https://github.com/astral-sh/ruff/pull/8499))
- Avoid `D301` autofix for `u` prefixed strings ([#8495](https://github.com/astral-sh/ruff/pull/8495))
- Only flag `flake8-trio` rules when `trio` import is present ([#8550](https://github.com/astral-sh/ruff/pull/8550))
- Reject more syntactically invalid Python programs ([#8524](https://github.com/astral-sh/ruff/pull/8524))
- Avoid raising `TRIO115` violations for `trio.sleep(...)` calls with non-number values ([#8532](https://github.com/astral-sh/ruff/pull/8532))
- Fix `F841` false negative on assignment to multiple variables ([#8489](https://github.com/astral-sh/ruff/pull/8489))
### Documentation
- Fix link to isort `known-first-party` ([#8562](https://github.com/astral-sh/ruff/pull/8562))
- Add notes on fix safety to a few rules ([#8500](https://github.com/astral-sh/ruff/pull/8500))
- Add missing toml config tabs ([#8512](https://github.com/astral-sh/ruff/pull/8512))
- Add instructions for configuration of Emacs ([#8488](https://github.com/astral-sh/ruff/pull/8488))
- Improve detail link contrast in dark mode ([#8548](https://github.com/astral-sh/ruff/pull/8548))
- Fix typo in example ([#8506](https://github.com/astral-sh/ruff/pull/8506))
- Added tabs for configuration files in the documentation ([#8480](https://github.com/astral-sh/ruff/pull/8480))
- Recommend `project.requires-python` over `target-version` ([#8513](https://github.com/astral-sh/ruff/pull/8513))
- Add singleton escape hatch to `B008` documentation ([#8501](https://github.com/astral-sh/ruff/pull/8501))
- Fix tab configuration docs ([#8502](https://github.com/astral-sh/ruff/pull/8502))
## 0.1.4
### Preview features
- \[`flake8-trio`\] Implement `timeout-without-await` (`TRIO001`) ([#8439](https://github.com/astral-sh/ruff/pull/8439))
- \[`numpy`\] Implement NumPy 2.0 migration rule (`NPY200`) ([#7702](https://github.com/astral-sh/ruff/pull/7702))
- \[`pylint`\] Implement `bad-open-mode` (`W1501`) ([#8294](https://github.com/astral-sh/ruff/pull/8294))
- \[`pylint`\] Implement `import-outside-toplevel` (`C0415`) rule ([#5180](https://github.com/astral-sh/ruff/pull/5180))
- \[`pylint`\] Implement `useless-with-lock` (`W2101`) ([#8321](https://github.com/astral-sh/ruff/pull/8321))
- \[`pyupgrade`\] Implement `timeout-error-alias` (`UP041`) ([#8476](https://github.com/astral-sh/ruff/pull/8476))
- \[`refurb`\] Implement `isinstance-type-none` (`FURB168`) ([#8308](https://github.com/astral-sh/ruff/pull/8308))
- Detect confusable Unicode-to-Unicode units in `RUF001`, `RUF002`, and `RUF003` ([#4430](https://github.com/astral-sh/ruff/pull/4430))
- Add newline after module docstrings in preview style ([#8283](https://github.com/astral-sh/ruff/pull/8283))
### Formatter
- Add a note on line-too-long to the formatter docs ([#8314](https://github.com/astral-sh/ruff/pull/8314))
- Preserve trailing statement semicolons when using `fmt: skip` ([#8273](https://github.com/astral-sh/ruff/pull/8273))
- Preserve trailing semicolons when using `fmt: off` ([#8275](https://github.com/astral-sh/ruff/pull/8275))
- Avoid duplicating linter-formatter compatibility warnings ([#8292](https://github.com/astral-sh/ruff/pull/8292))
- Avoid inserting a newline after function docstrings ([#8375](https://github.com/astral-sh/ruff/pull/8375))
- Insert newline between docstring and following own line comment ([#8216](https://github.com/astral-sh/ruff/pull/8216))
- Split tuples in return positions by comma first ([#8280](https://github.com/astral-sh/ruff/pull/8280))
- Avoid treating byte strings as docstrings ([#8350](https://github.com/astral-sh/ruff/pull/8350))
- Add `--line-length` option to `format` command ([#8363](https://github.com/astral-sh/ruff/pull/8363))
- Avoid parenthesizing unsplittable because of comments ([#8431](https://github.com/astral-sh/ruff/pull/8431))
### CLI
- Add `--output-format` to `ruff rule` and `ruff linter` ([#8203](https://github.com/astral-sh/ruff/pull/8203))
### Bug fixes
- Respect `--force-exclude` in `lint.exclude` and `format.exclude` ([#8393](https://github.com/astral-sh/ruff/pull/8393))
- Respect `--extend-per-file-ignores` on the CLI ([#8329](https://github.com/astral-sh/ruff/pull/8329))
- Extend `bad-dunder-method-name` to permit `__index__` ([#8300](https://github.com/astral-sh/ruff/pull/8300))
- Fix panic with 8 in octal escape ([#8356](https://github.com/astral-sh/ruff/pull/8356))
- Avoid raising `D300` when both triple quote styles are present ([#8462](https://github.com/astral-sh/ruff/pull/8462))
- Consider unterminated f-strings in `FStringRanges` ([#8154](https://github.com/astral-sh/ruff/pull/8154))
- Avoid including literal `shell=True` for truthy, non-`True` diagnostics ([#8359](https://github.com/astral-sh/ruff/pull/8359))
- Avoid triggering single-element test for starred expressions ([#8433](https://github.com/astral-sh/ruff/pull/8433))
- Detect and ignore Jupyter automagics ([#8398](https://github.com/astral-sh/ruff/pull/8398))
- Fix invalid E231 error with f-strings ([#8369](https://github.com/astral-sh/ruff/pull/8369))
- Avoid triggering `NamedTuple` rewrite with starred annotation ([#8434](https://github.com/astral-sh/ruff/pull/8434))
- Avoid un-setting bracket flag in logical lines ([#8380](https://github.com/astral-sh/ruff/pull/8380))
- Place 'r' prefix before 'f' for raw format strings ([#8464](https://github.com/astral-sh/ruff/pull/8464))
- Remove trailing periods from NumPy 2.0 code actions ([#8475](https://github.com/astral-sh/ruff/pull/8475))
- Fix bug where `PLE1307` was raised when formatting `%c` with characters ([#8407](https://github.com/astral-sh/ruff/pull/8407))
- Remove unicode flag from comparable ([#8440](https://github.com/astral-sh/ruff/pull/8440))
- Improve B015 message ([#8295](https://github.com/astral-sh/ruff/pull/8295))
- Use `fixedOverflowWidgets` for playground popover ([#8458](https://github.com/astral-sh/ruff/pull/8458))
- Mark `byte_bounds` as a non-backwards-compatible NumPy 2.0 change ([#8474](https://github.com/astral-sh/ruff/pull/8474))
### Internals
- Add a dedicated cache directory per Ruff version ([#8333](https://github.com/astral-sh/ruff/pull/8333))
- Allow selective caching for `--fix` and `--diff` ([#8316](https://github.com/astral-sh/ruff/pull/8316))
- Improve performance of comment parsing ([#8193](https://github.com/astral-sh/ruff/pull/8193))
- Improve performance of string parsing ([#8227](https://github.com/astral-sh/ruff/pull/8227))
- Use a dedicated sort key for isort import sorting ([#7963](https://github.com/astral-sh/ruff/pull/7963))
## 0.1.3
This release includes a variety of improvements to the Ruff formatter, removing several known and

View File

@@ -72,7 +72,7 @@ representative at an online or offline event.
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
<charlie.r.marsh@gmail.com>.
charlie.r.marsh@gmail.com.
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the

View File

@@ -295,7 +295,7 @@ To preview any changes to the documentation locally:
```shell
# For contributors.
mkdocs serve -f mkdocs.public.yml
mkdocs serve -f mkdocs.generated.yml
# For members of the Astral org, which has access to MkDocs Insiders via sponsorship.
mkdocs serve -f mkdocs.insiders.yml
@@ -315,18 +315,9 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
### Creating a new release
We use an experimental in-house tool for managing releases.
1. Install `rooster`: `pip install git+https://github.com/zanieb/rooster@main`
1. Run `rooster release`; this command will:
- Generate a changelog entry in `CHANGELOG.md`
- Update versions in `pyproject.toml` and `Cargo.toml`
- Update references to versions in the `README.md` and documentation
1. The changelog should then be editorialized for consistency
- Often labels will be missing from pull requests they will need to be manually organized into the proper section
- Changes should be edited to be user-facing descriptions, avoiding internal details
1. Highlight any breaking changes in `BREAKING_CHANGES.md`
1. Create a pull request with the changelog and version updates
1. Update the version with `rg 0.0.269 --files-with-matches | xargs sed -i 's/0.0.269/0.0.270/g'`
1. Update `BREAKING_CHANGES.md`
1. Create a PR with the version and `BREAKING_CHANGES.md` updated
1. Merge the PR
1. Run the release workflow with the version number (without starting `v`) as input. Make sure
main has your merged PR as last commit
@@ -339,26 +330,23 @@ We use an experimental in-house tool for managing releases.
1. Attach artifacts to draft GitHub release
1. Trigger downstream repositories. This can fail non-catastrophically, as we can run any
downstream jobs manually if needed.
1. Publish the GitHub release
1. Open the draft release in the GitHub release section
1. Copy the changelog for the release into the GitHub release
- See previous releases for formatting of section headers
1. Generate the contributor list with `rooster contributors` and add to the release notes
1. Create release notes in GitHub UI and promote from draft.
1. If needed, [update the schemastore](https://github.com/charliermarsh/ruff/blob/main/scripts/update_schemastore.py)
1. If needed, update the `ruff-lsp` and `ruff-vscode` repositories.
## Ecosystem CI
GitHub Actions will run your changes against a number of real-world projects from GitHub and
report on any linter or formatter differences. You can also run those checks locally via:
report on any diagnostic differences. You can also run those checks locally via:
```shell
pip install -e ./python/ruff-ecosystem
ruff-ecosystem check ruff "./target/debug/ruff"
ruff-ecosystem format ruff "./target/debug/ruff"
python scripts/check_ecosystem.py path/to/your/ruff path/to/older/ruff
```
See the [ruff-ecosystem package](https://github.com/astral-sh/ruff/tree/main/python/ruff-ecosystem) for more details.
You can also run the Ecosystem CI check in a Docker container across a larger set of projects by
downloading the [`known-github-tomls.json`](https://github.com/akx/ruff-usage-aggregate/blob/master/data/known-github-tomls.jsonl)
as `github_search.jsonl` and following the instructions in [scripts/Dockerfile.ecosystem](https://github.com/astral-sh/ruff/blob/main/scripts/Dockerfile.ecosystem).
Note that this check will take a while to run.
## Benchmarking and Profiling

272
Cargo.lock generated
View File

@@ -64,9 +64,9 @@ checksum = "c7021ce4924a3f25f802b2cccd1af585e39ea1a363a1aa2e72afe54b67a3a7a7"
[[package]]
name = "annotate-snippets"
version = "0.9.2"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ccaf7e9dfbb6ab22c82e473cd1a8a7bd313c19a5b7e40970f3d89ef5a5c9e81e"
checksum = "c3b9d411ecbaf79885c6df4d75fff75858d5995ff25385657a28af47e82f9c36"
dependencies = [
"unicode-width",
"yansi-term",
@@ -210,9 +210,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bitflags"
version = "2.4.1"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07"
checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635"
[[package]]
name = "bstr"
@@ -278,7 +278,9 @@ checksum = "7f2c685bad3eb3d45a01354cedb7d5faa66194d1d58ba6e267a8de788f79db38"
dependencies = [
"android-tzdata",
"iana-time-zone",
"js-sys",
"num-traits",
"wasm-bindgen",
"windows-targets 0.48.5",
]
@@ -381,7 +383,7 @@ dependencies = [
"heck",
"proc-macro2",
"quote",
"syn 2.0.39",
"syn 2.0.38",
]
[[package]]
@@ -405,9 +407,9 @@ dependencies = [
[[package]]
name = "codspeed"
version = "2.3.3"
version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0eb4ab4dcb6554eb4f590fb16f99d3b102ab76f5f56554c9a5340518b32c499b"
checksum = "d680ccd1eedd2dd7c7a3649a78c7d06e0f16b191b30d81cc58e7bc906488d344"
dependencies = [
"colored",
"libc",
@@ -416,9 +418,9 @@ dependencies = [
[[package]]
name = "codspeed-criterion-compat"
version = "2.3.3"
version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cc07a3d3f7e0c8961d0ffdee149d39b231bafdcdc3d978dc5ad790c615f55f3f"
checksum = "58b48b6c8e890d7d4ad0ed85e9ab4949bf7023198c006000ef6338ba84cf5b71"
dependencies = [
"codspeed",
"colored",
@@ -444,9 +446,9 @@ dependencies = [
[[package]]
name = "configparser"
version = "3.0.3"
version = "3.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e0e56e414a2a52ab2a104f85cd40933c2fbc278b83637facf646ecf451b49237"
checksum = "5458d9d1a587efaf5091602c59d299696a3877a439c8f6d461a2d3cce11df87a"
[[package]]
name = "console"
@@ -606,7 +608,7 @@ dependencies = [
"proc-macro2",
"quote",
"strsim",
"syn 2.0.39",
"syn 2.0.38",
]
[[package]]
@@ -617,7 +619,7 @@ checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5"
dependencies = [
"darling_core",
"quote",
"syn 2.0.39",
"syn 2.0.38",
]
[[package]]
@@ -808,7 +810,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flake8-to-ruff"
version = "0.1.7"
version = "0.1.3"
dependencies = [
"anyhow",
"clap",
@@ -827,7 +829,7 @@ dependencies = [
"serde_json",
"strum",
"strum_macros",
"toml 0.7.8",
"toml",
]
[[package]]
@@ -848,21 +850,18 @@ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
[[package]]
name = "form_urlencoded"
version = "1.2.1"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456"
checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652"
dependencies = [
"percent-encoding",
]
[[package]]
name = "fs-err"
version = "2.11.0"
version = "2.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "88a41f105fe1d5b6b34b2055e3dc59bb79b46b48b2040b9e6c7b4b5de097aa41"
dependencies = [
"autocfg",
]
checksum = "0845fa252299212f0389d64ba26f34fa32cfe41588355f21ed507c59a0f64541"
[[package]]
name = "fsevent-sys"
@@ -903,15 +902,15 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
[[package]]
name = "globset"
version = "0.4.14"
version = "0.4.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57da3b9b5b85bd66f31093f8c408b90a74431672542466497dcbdfdc02034be1"
checksum = "759c97c1e17c55525b57192c06a267cda0ac5210b222d6b82189a2338fa1c13d"
dependencies = [
"aho-corasick",
"bstr",
"fnv",
"log",
"regex-automata 0.4.3",
"regex-syntax 0.8.2",
"regex",
]
[[package]]
@@ -928,9 +927,9 @@ checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
[[package]]
name = "hashbrown"
version = "0.14.2"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f93e7192158dbcda357bdec5fb5788eebf8bbac027f3f33e719d29135ae84156"
checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a"
[[package]]
name = "heck"
@@ -987,9 +986,9 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
[[package]]
name = "idna"
version = "0.5.0"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6"
checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c"
dependencies = [
"unicode-bidi",
"unicode-normalization",
@@ -1034,12 +1033,12 @@ dependencies = [
[[package]]
name = "indexmap"
version = "2.1.0"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f"
checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d"
dependencies = [
"equivalent",
"hashbrown 0.14.2",
"hashbrown 0.14.0",
"serde",
]
@@ -1130,7 +1129,7 @@ dependencies = [
"pmutil 0.6.1",
"proc-macro2",
"quote",
"syn 2.0.39",
"syn 2.0.38",
]
[[package]]
@@ -1170,9 +1169,9 @@ checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38"
[[package]]
name = "js-sys"
version = "0.3.66"
version = "0.3.64"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cee9c64da59eae3b50095c18d3e74f8b73c0b86d2792824ff01bbce68ba229ca"
checksum = "c5f195fe497f702db0f318b07fdd68edb16955aed830df8363d837542f8f935a"
dependencies = [
"wasm-bindgen",
]
@@ -1441,7 +1440,7 @@ version = "6.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6205bd8bb1e454ad2e27422015fb5e4f2bcc7e08fa8f27058670d208324a4d2d"
dependencies = [
"bitflags 2.4.1",
"bitflags 2.4.0",
"crossbeam-channel",
"filetime",
"fsevent-sys",
@@ -1622,9 +1621,9 @@ dependencies = [
[[package]]
name = "percent-encoding"
version = "2.3.1"
version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94"
[[package]]
name = "petgraph"
@@ -1708,7 +1707,7 @@ checksum = "52a40bc70c2c58040d2d8b167ba9a5ff59fc9dab7ad44771cfde3dcfde7a09c6"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.39",
"syn 2.0.38",
]
[[package]]
@@ -1793,46 +1792,45 @@ dependencies = [
[[package]]
name = "proc-macro2"
version = "1.0.70"
version = "1.0.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39278fbbf5fb4f646ce651690877f89d1c5811a3d4acb27700c1cb3cdb78fd3b"
checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da"
dependencies = [
"unicode-ident",
]
[[package]]
name = "pyproject-toml"
version = "0.8.1"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "46d4a5e69187f23a29f8aa0ea57491d104ba541bc55f76552c2a74962aa20e04"
checksum = "0774c13ff0b8b7ebb4791c050c497aefcfe3f6a222c0829c7017161ed38391ff"
dependencies = [
"indexmap",
"pep440_rs",
"pep508_rs",
"serde",
"toml 0.8.2",
"toml",
]
[[package]]
name = "quick-junit"
version = "0.3.5"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b9599bffc2cd7511355996e0cfd979266b2cfa3f3ff5247d07a3a6e1ded6158"
checksum = "6bf780b59d590c25f8c59b44c124166a2a93587868b619fb8f5b47fb15e9ed6d"
dependencies = [
"chrono",
"indexmap",
"nextest-workspace-hack",
"quick-xml",
"strip-ansi-escapes",
"thiserror",
"uuid",
]
[[package]]
name = "quick-xml"
version = "0.31.0"
version = "0.29.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1004a344b30a54e2ee58d66a71b32d2db2feb0a31f9a2d302bf0536f15de2a33"
checksum = "81b9228215d82c7b61490fec1de287136b5de6f5700f6e58ea9ad61a7964ca51"
dependencies = [
"memchr",
]
@@ -2062,14 +2060,14 @@ dependencies = [
[[package]]
name = "ruff_cli"
version = "0.1.7"
version = "0.1.3"
dependencies = [
"annotate-snippets 0.9.2",
"annotate-snippets 0.9.1",
"anyhow",
"argfile",
"assert_cmd",
"bincode",
"bitflags 2.4.1",
"bitflags 2.4.0",
"cachedir",
"chrono",
"clap",
@@ -2154,7 +2152,7 @@ dependencies = [
"strum",
"strum_macros",
"tempfile",
"toml 0.7.8",
"toml",
"tracing",
"tracing-indicatif",
"tracing-subscriber",
@@ -2198,12 +2196,12 @@ dependencies = [
[[package]]
name = "ruff_linter"
version = "0.1.7"
version = "0.1.3"
dependencies = [
"aho-corasick",
"annotate-snippets 0.9.2",
"annotate-snippets 0.9.1",
"anyhow",
"bitflags 2.4.1",
"bitflags 2.4.0",
"chrono",
"clap",
"colored",
@@ -2254,7 +2252,7 @@ dependencies = [
"tempfile",
"test-case",
"thiserror",
"toml 0.7.8",
"toml",
"typed-arena",
"unicode-width",
"unicode_names2",
@@ -2269,7 +2267,7 @@ dependencies = [
"proc-macro2",
"quote",
"ruff_python_trivia",
"syn 2.0.39",
"syn 2.0.38",
]
[[package]]
@@ -2295,7 +2293,7 @@ dependencies = [
name = "ruff_python_ast"
version = "0.0.0"
dependencies = [
"bitflags 2.4.1",
"bitflags 2.4.0",
"insta",
"is-macro",
"itertools 0.11.0",
@@ -2327,14 +2325,13 @@ name = "ruff_python_formatter"
version = "0.0.0"
dependencies = [
"anyhow",
"bitflags 2.4.1",
"bitflags 2.4.0",
"clap",
"countme",
"insta",
"itertools 0.11.0",
"memchr",
"once_cell",
"regex",
"ruff_cache",
"ruff_formatter",
"ruff_macros",
@@ -2372,7 +2369,7 @@ dependencies = [
name = "ruff_python_literal"
version = "0.0.0"
dependencies = [
"bitflags 2.4.1",
"bitflags 2.4.0",
"hexf-parse",
"is-macro",
"itertools 0.11.0",
@@ -2386,7 +2383,7 @@ name = "ruff_python_parser"
version = "0.0.0"
dependencies = [
"anyhow",
"bitflags 2.4.1",
"bitflags 2.4.0",
"insta",
"is-macro",
"itertools 0.11.0",
@@ -2416,7 +2413,7 @@ dependencies = [
name = "ruff_python_semantic"
version = "0.0.0"
dependencies = [
"bitflags 2.4.1",
"bitflags 2.4.0",
"is-macro",
"ruff_index",
"ruff_python_ast",
@@ -2450,7 +2447,7 @@ dependencies = [
[[package]]
name = "ruff_shrinking"
version = "0.1.7"
version = "0.1.3"
dependencies = [
"anyhow",
"clap",
@@ -2541,7 +2538,7 @@ dependencies = [
"shellexpand",
"strum",
"tempfile",
"toml 0.7.8",
"toml",
]
[[package]]
@@ -2566,7 +2563,7 @@ version = "0.38.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b426b0506e5d50a7d8dafcf2e81471400deb602392c7dd110815afb4eaf02a3"
dependencies = [
"bitflags 2.4.1",
"bitflags 2.4.0",
"errno",
"libc",
"linux-raw-sys",
@@ -2618,9 +2615,9 @@ dependencies = [
[[package]]
name = "schemars"
version = "0.8.16"
version = "0.8.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "45a28f4c49489add4ce10783f7911893516f15afe45d015608d41faca6bc4d29"
checksum = "1f7b0ce13155372a76ee2e1c5ffba1fe61ede73fbea5630d61eee6fac4929c0c"
dependencies = [
"dyn-clone",
"schemars_derive",
@@ -2630,9 +2627,9 @@ dependencies = [
[[package]]
name = "schemars_derive"
version = "0.8.16"
version = "0.8.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c767fd6fa65d9ccf9cf026122c1b555f2ef9a4f0cea69da4d7dbc3e258d30967"
checksum = "e85e2a16b12bdb763244c69ab79363d71db2b4b918a2def53f80b02e0574b13c"
dependencies = [
"proc-macro2",
"quote",
@@ -2685,9 +2682,9 @@ dependencies = [
[[package]]
name = "serde-wasm-bindgen"
version = "0.6.1"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "17ba92964781421b6cef36bf0d7da26d201e96d84e1b10e7ae6ed416e516906d"
checksum = "30c9933e5689bd420dc6c87b7a1835701810cbc10cd86a26e4da45b73e6b1d78"
dependencies = [
"js-sys",
"serde",
@@ -2702,7 +2699,7 @@ checksum = "67c5609f394e5c2bd7fc51efda478004ea80ef42fee983d5c67a65e34f32c0e3"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.39",
"syn 2.0.38",
]
[[package]]
@@ -2718,9 +2715,9 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.108"
version = "1.0.107"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d1c7e3eac408d115102c4c24ad393e0821bb3a5df4d506a80f85f7a742a526b"
checksum = "6b420ce6e3d8bd882e9b243c6eed35dbc9a6110c9769e74b584e0d68d1f20c65"
dependencies = [
"itoa",
"ryu",
@@ -2764,7 +2761,7 @@ dependencies = [
"darling",
"proc-macro2",
"quote",
"syn 2.0.39",
"syn 2.0.38",
]
[[package]]
@@ -2805,9 +2802,9 @@ checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d"
[[package]]
name = "smallvec"
version = "1.11.2"
version = "1.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4dccd0940a2dcdf68d092b8cbab7dc0ad8fa938bf95787e1b916b0e3d0e8e970"
checksum = "942b4a808e05215192e39f4ab80813e599068285906cc91aa64f923db842bd5a"
[[package]]
name = "spin"
@@ -2834,15 +2831,6 @@ dependencies = [
"precomputed-hash",
]
[[package]]
name = "strip-ansi-escapes"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55ff8ef943b384c414f54aefa961dd2bd853add74ec75e7ac74cf91dba62bcfa"
dependencies = [
"vte",
]
[[package]]
name = "strsim"
version = "0.10.0"
@@ -2868,7 +2856,7 @@ dependencies = [
"proc-macro2",
"quote",
"rustversion",
"syn 2.0.39",
"syn 2.0.38",
]
[[package]]
@@ -2884,9 +2872,9 @@ dependencies = [
[[package]]
name = "syn"
version = "2.0.39"
version = "2.0.38"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "23e78b90f2fcf45d3e842032ce32e3f2d1545ba6636271dcbf24fa306d87be7a"
checksum = "e96b79aaa137db8f61e26363a0c9b47d8b4ec75da28b7d1d614c2303e232408b"
dependencies = [
"proc-macro2",
"quote",
@@ -2973,7 +2961,7 @@ dependencies = [
"proc-macro-error",
"proc-macro2",
"quote",
"syn 2.0.39",
"syn 2.0.38",
]
[[package]]
@@ -2985,7 +2973,7 @@ dependencies = [
"proc-macro-error",
"proc-macro2",
"quote",
"syn 2.0.39",
"syn 2.0.38",
"test-case-core",
]
@@ -3006,7 +2994,7 @@ checksum = "266b2e40bc00e5a6c09c3584011e08b06f123c00362c92b975ba9843aaaa14b8"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.39",
"syn 2.0.38",
]
[[package]]
@@ -3098,19 +3086,7 @@ dependencies = [
"serde",
"serde_spanned",
"toml_datetime",
"toml_edit 0.19.15",
]
[[package]]
name = "toml"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "185d8ab0dfbb35cf1399a6344d8484209c088f75f8f68230da55d48d95d43e3d"
dependencies = [
"serde",
"serde_spanned",
"toml_datetime",
"toml_edit 0.20.2",
"toml_edit",
]
[[package]]
@@ -3135,19 +3111,6 @@ dependencies = [
"winnow",
]
[[package]]
name = "toml_edit"
version = "0.20.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "396e4d48bbb2b7554c944bde63101b5ae446cff6ec4a24227428f15eb72ef338"
dependencies = [
"indexmap",
"serde",
"serde_spanned",
"toml_datetime",
"winnow",
]
[[package]]
name = "tracing"
version = "0.1.40"
@@ -3168,7 +3131,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.39",
"syn 2.0.38",
]
[[package]]
@@ -3195,20 +3158,20 @@ dependencies = [
[[package]]
name = "tracing-log"
version = "0.2.0"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3"
checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922"
dependencies = [
"lazy_static",
"log",
"once_cell",
"tracing-core",
]
[[package]]
name = "tracing-subscriber"
version = "0.3.18"
version = "0.3.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b"
checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77"
dependencies = [
"matchers",
"nu-ansi-term",
@@ -3334,9 +3297,9 @@ checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a"
[[package]]
name = "ureq"
version = "2.9.1"
version = "2.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8cdd25c339e200129fe4de81451814e5228c9b771d57378817d6117cc2b3f97"
checksum = "f5ccd538d4a604753ebc2f17cd9946e89b77bf87f6a8e2309667c6f2e87855e3"
dependencies = [
"base64",
"flate2",
@@ -3350,9 +3313,9 @@ dependencies = [
[[package]]
name = "url"
version = "2.5.0"
version = "2.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633"
checksum = "143b538f18257fac9cad154828a57c6bf5157e1aa604d4816b5995bf6de87ae5"
dependencies = [
"form_urlencoded",
"idna",
@@ -3368,9 +3331,9 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
[[package]]
name = "uuid"
version = "1.6.1"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e395fcf16a7a3d8127ec99782007af141946b4795001f876d54fb0d55978560"
checksum = "88ad59a7560b41a70d191093a945f0b87bc1deeda46fb237479708a1d6b6cdfc"
dependencies = [
"getrandom",
"rand",
@@ -3380,13 +3343,13 @@ dependencies = [
[[package]]
name = "uuid-macro-internal"
version = "1.6.1"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f49e7f3f3db8040a100710a11932239fd30697115e2ba4107080d8252939845e"
checksum = "3d8c6bba9b149ee82950daefc9623b32bb1dacbfb1890e352f6b887bd582adaf"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.39",
"syn 2.0.38",
]
[[package]]
@@ -3461,9 +3424,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
[[package]]
name = "wasm-bindgen"
version = "0.2.89"
version = "0.2.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ed0d4f68a3015cc185aff4db9506a015f4b96f95303897bfa23f846db54064e"
checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342"
dependencies = [
"cfg-if",
"wasm-bindgen-macro",
@@ -3471,24 +3434,24 @@ dependencies = [
[[package]]
name = "wasm-bindgen-backend"
version = "0.2.89"
version = "0.2.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b56f625e64f3a1084ded111c4d5f477df9f8c92df113852fa5a374dbda78826"
checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd"
dependencies = [
"bumpalo",
"log",
"once_cell",
"proc-macro2",
"quote",
"syn 2.0.39",
"syn 2.0.38",
"wasm-bindgen-shared",
]
[[package]]
name = "wasm-bindgen-futures"
version = "0.4.38"
version = "0.4.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9afec9963e3d0994cac82455b2b3502b81a7f40f9a0d32181f7528d9f4b43e02"
checksum = "c02dbc21516f9f1f04f187958890d7e6026df8d16540b7ad9492bc34a67cea03"
dependencies = [
"cfg-if",
"js-sys",
@@ -3498,9 +3461,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-macro"
version = "0.2.89"
version = "0.2.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0162dbf37223cd2afce98f3d0785506dcb8d266223983e4b5b525859e6e182b2"
checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d"
dependencies = [
"quote",
"wasm-bindgen-macro-support",
@@ -3508,28 +3471,28 @@ dependencies = [
[[package]]
name = "wasm-bindgen-macro-support"
version = "0.2.89"
version = "0.2.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f0eb82fcb7930ae6219a7ecfd55b217f5f0893484b7a13022ebb2b2bf20b5283"
checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.39",
"syn 2.0.38",
"wasm-bindgen-backend",
"wasm-bindgen-shared",
]
[[package]]
name = "wasm-bindgen-shared"
version = "0.2.89"
version = "0.2.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ab9b36309365056cd639da3134bf87fa8f3d86008abf99e612384a6eecd459f"
checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1"
[[package]]
name = "wasm-bindgen-test"
version = "0.3.38"
version = "0.3.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c6433b7c56db97397842c46b67e11873eda263170afeb3a2dc74a7cb370fee0d"
checksum = "6e6e302a7ea94f83a6d09e78e7dc7d9ca7b186bc2829c24a22d0753efd680671"
dependencies = [
"console_error_panic_hook",
"js-sys",
@@ -3541,13 +3504,12 @@ dependencies = [
[[package]]
name = "wasm-bindgen-test-macro"
version = "0.3.38"
version = "0.3.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "493fcbab756bb764fa37e6bee8cec2dd709eb4273d06d0c282a5e74275ded735"
checksum = "ecb993dd8c836930ed130e020e77d9b2e65dd0fbab1b67c790b0f5d80b11a575"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.39",
]
[[package]]

View File

@@ -9,17 +9,17 @@ homepage = "https://docs.astral.sh/ruff"
documentation = "https://docs.astral.sh/ruff"
repository = "https://github.com/astral-sh/ruff"
authors = ["Charlie Marsh <charlie.r.marsh@gmail.com>"]
license = "MIT2"
license = "MIT"
[workspace.dependencies]
anyhow = { version = "1.0.69" }
bitflags = { version = "2.4.1" }
bitflags = { version = "2.3.1" }
chrono = { version = "0.4.31", default-features = false, features = ["clock"] }
clap = { version = "4.4.7", features = ["derive"] }
colored = { version = "2.0.0" }
filetime = { version = "0.2.20" }
glob = { version = "0.3.1" }
globset = { version = "0.4.14" }
globset = { version = "0.4.10" }
ignore = { version = "0.4.20" }
insta = { version = "1.34.0", feature = ["filters", "glob"] }
is-macro = { version = "0.3.0" }
@@ -29,64 +29,32 @@ log = { version = "0.4.17" }
memchr = { version = "2.6.4" }
once_cell = { version = "1.17.1" }
path-absolutize = { version = "3.1.1" }
proc-macro2 = { version = "1.0.70" }
proc-macro2 = { version = "1.0.69" }
quote = { version = "1.0.23" }
regex = { version = "1.10.2" }
rustc-hash = { version = "1.1.0" }
schemars = { version = "0.8.16" }
schemars = { version = "0.8.15" }
serde = { version = "1.0.190", features = ["derive"] }
serde_json = { version = "1.0.108" }
serde_json = { version = "1.0.107" }
shellexpand = { version = "3.0.0" }
similar = { version = "2.3.0", features = ["inline"] }
smallvec = { version = "1.11.2" }
smallvec = { version = "1.11.1" }
static_assertions = "1.1.0"
strum = { version = "0.25.0", features = ["strum_macros"] }
strum_macros = { version = "0.25.3" }
syn = { version = "2.0.39" }
syn = { version = "2.0.38" }
test-case = { version = "3.2.1" }
thiserror = { version = "1.0.50" }
toml = { version = "0.7.8" }
tracing = { version = "0.1.40" }
tracing-indicatif = { version = "0.3.4" }
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
tracing-subscriber = { version = "0.3.17", features = ["env-filter"] }
unicode-ident = { version = "1.0.12" }
unicode_names2 = { version = "1.2.0" }
unicode-width = { version = "0.1.11" }
uuid = { version = "1.6.1", features = ["v4", "fast-rng", "macro-diagnostics", "js"] }
uuid = { version = "1.5.0", features = ["v4", "fast-rng", "macro-diagnostics", "js"] }
wsl = { version = "0.1.0" }
[workspace.lints.rust]
unsafe_code = "warn"
unreachable_pub = "warn"
[workspace.lints.clippy]
pedantic = { level = "warn", priority = -2 }
# Allowed pedantic lints
char_lit_as_u8 = "allow"
collapsible_else_if = "allow"
collapsible_if = "allow"
implicit_hasher = "allow"
match_same_arms = "allow"
missing_errors_doc = "allow"
missing_panics_doc = "allow"
module_name_repetitions = "allow"
must_use_candidate = "allow"
similar_names = "allow"
too_many_lines = "allow"
# To allow `#[allow(clippy::all)]` in `crates/ruff_python_parser/src/python.rs`.
needless_raw_string_hashes = "allow"
# Disallowed restriction lints
print_stdout = "warn"
print_stderr = "warn"
dbg_macro = "warn"
empty_drop = "warn"
empty_structs_with_brackets = "warn"
exit = "warn"
get_unwrap = "warn"
rc_buffer = "warn"
rc_mutex = "warn"
rest_pat_in_fully_bound_structs = "warn"
[profile.release]
lto = "fat"
codegen-units = 1

View File

@@ -1,38 +0,0 @@
FROM --platform=$BUILDPLATFORM ubuntu as build
ENV HOME="/root"
WORKDIR $HOME
RUN apt update && apt install -y build-essential curl python3-venv
# Setup zig as cross compiling linker
RUN python3 -m venv $HOME/.venv
RUN .venv/bin/pip install cargo-zigbuild
ENV PATH="$HOME/.venv/bin:$PATH"
# Install rust
ARG TARGETPLATFORM
RUN case "$TARGETPLATFORM" in \
"linux/arm64") echo "aarch64-unknown-linux-musl" > rust_target.txt ;; \
"linux/amd64") echo "x86_64-unknown-linux-musl" > rust_target.txt ;; \
*) exit 1 ;; \
esac
# Update rustup whenever we bump the rust version
COPY rust-toolchain.toml rust-toolchain.toml
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --target $(cat rust_target.txt) --profile minimal --default-toolchain none
ENV PATH="$HOME/.cargo/bin:$PATH"
# Installs the correct toolchain version from rust-toolchain.toml and then the musl target
RUN rustup target add $(cat rust_target.txt)
# Build
COPY crates crates
COPY Cargo.toml Cargo.toml
COPY Cargo.lock Cargo.lock
RUN cargo zigbuild --bin ruff --target $(cat rust_target.txt) --release
RUN cp target/$(cat rust_target.txt)/release/ruff /ruff
# TODO: Optimize binary size, with a version that also works when cross compiling
# RUN strip --strip-all /ruff
FROM scratch
COPY --from=build /ruff /ruff
WORKDIR /io
ENTRYPOINT ["/ruff"]

25
LICENSE
View File

@@ -1269,31 +1269,6 @@ are:
SOFTWARE.
"""
- flake8-trio, licensed as follows:
"""
MIT License
Copyright (c) 2022 Zac Hatfield-Dodds
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
- Pyright, licensed as follows:
"""
MIT License

View File

@@ -54,7 +54,7 @@ Ruff is extremely actively developed and used in major open-source projects like
- [Pandas](https://github.com/pandas-dev/pandas)
- [SciPy](https://github.com/scipy/scipy)
...and [many more](#whos-using-ruff).
...and many more.
Ruff is backed by [Astral](https://astral.sh). Read the [launch post](https://astral.sh/blog/announcing-astral-the-company-behind-ruff),
or the original [project announcement](https://notes.crmarsh.com/python-tooling-could-be-much-much-faster).
@@ -148,14 +148,14 @@ ruff format @arguments.txt # Format using an input file, treating its
Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff-pre-commit`](https://github.com/astral-sh/ruff-pre-commit):
```yaml
# Run the Ruff linter.
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.1.7
rev: v0.1.3
hooks:
# Run the linter.
# Run the Ruff linter.
- id: ruff
args: [ --fix ]
# Run the formatter.
# Run the Ruff formatter.
- id: ruff-format
```
@@ -314,7 +314,6 @@ quality tools, including:
- [flake8-super](https://pypi.org/project/flake8-super/)
- [flake8-tidy-imports](https://pypi.org/project/flake8-tidy-imports/)
- [flake8-todos](https://pypi.org/project/flake8-todos/)
- [flake8-trio](https://pypi.org/project/flake8-trio/)
- [flake8-type-checking](https://pypi.org/project/flake8-type-checking/)
- [flake8-use-pathlib](https://pypi.org/project/flake8-use-pathlib/)
- [flynt](https://pypi.org/project/flynt/) ([#2102](https://github.com/astral-sh/ruff/issues/2102))
@@ -377,8 +376,8 @@ Ruff is used by a number of major open-source projects and companies, including:
- Anthropic ([Python SDK](https://github.com/anthropics/anthropic-sdk-python))
- [Apache Airflow](https://github.com/apache/airflow)
- AstraZeneca ([Magnus](https://github.com/AstraZeneca/magnus-core))
- [Babel](https://github.com/python-babel/babel)
- Benchling ([Refac](https://github.com/benchling/refac))
- [Babel](https://github.com/python-babel/babel)
- [Bokeh](https://github.com/bokeh/bokeh)
- [Cryptography (PyCA)](https://github.com/pyca/cryptography)
- [DVC](https://github.com/iterative/dvc)
@@ -389,16 +388,15 @@ Ruff is used by a number of major open-source projects and companies, including:
- [Gradio](https://github.com/gradio-app/gradio)
- [Great Expectations](https://github.com/great-expectations/great_expectations)
- [HTTPX](https://github.com/encode/httpx)
- [Hatch](https://github.com/pypa/hatch)
- [Home Assistant](https://github.com/home-assistant/core)
- Hugging Face ([Transformers](https://github.com/huggingface/transformers),
[Datasets](https://github.com/huggingface/datasets),
[Diffusers](https://github.com/huggingface/diffusers))
- [Hatch](https://github.com/pypa/hatch)
- [Home Assistant](https://github.com/home-assistant/core)
- ING Bank ([popmon](https://github.com/ing-bank/popmon), [probatus](https://github.com/ing-bank/probatus))
- [Ibis](https://github.com/ibis-project/ibis)
- [Jupyter](https://github.com/jupyter-server/jupyter_server)
- [LangChain](https://github.com/hwchase17/langchain)
- [Litestar](https://litestar.dev/)
- [LlamaIndex](https://github.com/jerryjliu/llama_index)
- Matrix ([Synapse](https://github.com/matrix-org/synapse))
- [MegaLinter](https://github.com/oxsecurity/megalinter)
@@ -417,27 +415,24 @@ Ruff is used by a number of major open-source projects and companies, including:
- [PDM](https://github.com/pdm-project/pdm)
- [PaddlePaddle](https://github.com/PaddlePaddle/Paddle)
- [Pandas](https://github.com/pandas-dev/pandas)
- [Pillow](https://github.com/python-pillow/Pillow)
- [Poetry](https://github.com/python-poetry/poetry)
- [Polars](https://github.com/pola-rs/polars)
- [PostHog](https://github.com/PostHog/posthog)
- Prefect ([Python SDK](https://github.com/PrefectHQ/prefect), [Marvin](https://github.com/PrefectHQ/marvin))
- [PyInstaller](https://github.com/pyinstaller/pyinstaller)
- [PyMC-Marketing](https://github.com/pymc-labs/pymc-marketing)
- [PyTorch](https://github.com/pytorch/pytorch)
- [Pydantic](https://github.com/pydantic/pydantic)
- [Pylint](https://github.com/PyCQA/pylint)
- [Reflex](https://github.com/reflex-dev/reflex)
- [River](https://github.com/online-ml/river)
- [Rippling](https://rippling.com)
- [Robyn](https://github.com/sansyrox/robyn)
- [Saleor](https://github.com/saleor/saleor)
- Scale AI ([Launch SDK](https://github.com/scaleapi/launch-python-client))
- [SciPy](https://github.com/scipy/scipy)
- Snowflake ([SnowCLI](https://github.com/Snowflake-Labs/snowcli))
- [Saleor](https://github.com/saleor/saleor)
- [SciPy](https://github.com/scipy/scipy)
- [Sphinx](https://github.com/sphinx-doc/sphinx)
- [Stable Baselines3](https://github.com/DLR-RM/stable-baselines3)
- [Starlette](https://github.com/encode/starlette)
- [Litestar](https://litestar.dev/)
- [The Algorithms](https://github.com/TheAlgorithms/Python)
- [Vega-Altair](https://github.com/altair-viz/altair)
- WordPress ([Openverse](https://github.com/WordPress/openverse))

View File

@@ -1,6 +1,5 @@
[files]
# https://github.com/crate-ci/typos/issues/868
extend-exclude = ["**/resources/**/*", "**/snapshots/**/*"]
extend-exclude = ["resources", "snapshots"]
[default.extend-words]
hel = "hel"

View File

@@ -1,6 +1,6 @@
[package]
name = "flake8-to-ruff"
version = "0.1.7"
version = "0.1.3"
description = """
Convert Flake8 configuration files to Ruff configuration files.
"""
@@ -19,7 +19,7 @@ ruff_workspace = { path = "../ruff_workspace" }
anyhow = { workspace = true }
clap = { workspace = true }
colored = { workspace = true }
configparser = { version = "3.0.3" }
configparser = { version = "3.0.2" }
itertools = { workspace = true }
log = { workspace = true }
once_cell = { workspace = true }
@@ -34,6 +34,3 @@ toml = { workspace = true }
[dev-dependencies]
pretty_assertions = "1.3.0"
[lints]
workspace = true

View File

@@ -34,10 +34,10 @@ harness = false
once_cell.workspace = true
serde.workspace = true
serde_json.workspace = true
url = "2.5.0"
ureq = "2.9.1"
url = "2.3.1"
ureq = "2.8.0"
criterion = { version = "0.5.1", default-features = false }
codspeed-criterion-compat = { version="2.3.3", default-features = false, optional = true}
codspeed-criterion-compat = { version="2.3.0", default-features = false, optional = true}
[dev-dependencies]
ruff_linter.path = "../ruff_linter"
@@ -46,9 +46,6 @@ ruff_python_formatter = { path = "../ruff_python_formatter" }
ruff_python_index = { path = "../ruff_python_index" }
ruff_python_parser = { path = "../ruff_python_parser" }
[lints]
workspace = true
[features]
codspeed = ["codspeed-criterion-compat"]

View File

@@ -4,7 +4,7 @@ use ruff_benchmark::criterion::{
criterion_group, criterion_main, BenchmarkId, Criterion, Throughput,
};
use ruff_benchmark::{TestCase, TestFile, TestFileDownloadError};
use ruff_python_formatter::{format_module_ast, PreviewMode, PyFormatOptions};
use ruff_python_formatter::{format_module_ast, PyFormatOptions};
use ruff_python_index::CommentRangesBuilder;
use ruff_python_parser::lexer::lex;
use ruff_python_parser::{parse_tokens, Mode};
@@ -69,8 +69,7 @@ fn benchmark_formatter(criterion: &mut Criterion) {
.expect("Input to be a valid python program");
b.iter(|| {
let options = PyFormatOptions::from_extension(Path::new(case.name()))
.with_preview(PreviewMode::Enabled);
let options = PyFormatOptions::from_extension(Path::new(case.name()));
let formatted =
format_module_ast(&module, &comment_ranges, case.code(), options)
.expect("Formatting to succeed");

View File

@@ -3,9 +3,7 @@ use ruff_benchmark::criterion::{
};
use ruff_benchmark::{TestCase, TestFile, TestFileDownloadError};
use ruff_linter::linter::lint_only;
use ruff_linter::rule_selector::PreviewOptions;
use ruff_linter::settings::rule_table::RuleTable;
use ruff_linter::settings::types::PreviewMode;
use ruff_linter::settings::{flags, LinterSettings};
use ruff_linter::source_kind::SourceKind;
use ruff_linter::{registry::Rule, RuleSelector};
@@ -80,21 +78,12 @@ fn benchmark_default_rules(criterion: &mut Criterion) {
benchmark_linter(group, &LinterSettings::default());
}
/// Disables IO based rules because they are a source of flakiness
fn disable_io_rules(rules: &mut RuleTable) {
fn benchmark_all_rules(criterion: &mut Criterion) {
let mut rules: RuleTable = RuleSelector::All.all_rules().collect();
// Disable IO based rules because it is a source of flakiness
rules.disable(Rule::ShebangMissingExecutableFile);
rules.disable(Rule::ShebangNotExecutable);
}
fn benchmark_all_rules(criterion: &mut Criterion) {
let mut rules: RuleTable = RuleSelector::All
.rules(&PreviewOptions {
mode: PreviewMode::Disabled,
require_explicit: false,
})
.collect();
disable_io_rules(&mut rules);
let settings = LinterSettings {
rules,
@@ -105,22 +94,6 @@ fn benchmark_all_rules(criterion: &mut Criterion) {
benchmark_linter(group, &settings);
}
fn benchmark_preview_rules(criterion: &mut Criterion) {
let mut rules: RuleTable = RuleSelector::All.all_rules().collect();
disable_io_rules(&mut rules);
let settings = LinterSettings {
rules,
preview: PreviewMode::Enabled,
..LinterSettings::default()
};
let group = criterion.benchmark_group("linter/all-with-preview-rules");
benchmark_linter(group, &settings);
}
criterion_group!(default_rules, benchmark_default_rules);
criterion_group!(all_rules, benchmark_all_rules);
criterion_group!(preview_rules, benchmark_preview_rules);
criterion_main!(default_rules, all_rules, preview_rules);
criterion_main!(default_rules, all_rules);

View File

@@ -20,6 +20,3 @@ seahash = "4.1.0"
[dev-dependencies]
ruff_macros = { path = "../ruff_macros" }
[lints]
workspace = true

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_cli"
version = "0.1.7"
version = "0.1.3"
publish = false
authors = { workspace = true }
edition = { workspace = true }
@@ -28,7 +28,7 @@ ruff_python_trivia = { path = "../ruff_python_trivia" }
ruff_workspace = { path = "../ruff_workspace" }
ruff_text_size = { path = "../ruff_text_size" }
annotate-snippets = { version = "0.9.2", features = ["color"] }
annotate-snippets = { version = "0.9.1", features = ["color"] }
anyhow = { workspace = true }
argfile = { version = "0.1.6" }
bincode = { version = "1.3.3" }
@@ -69,13 +69,10 @@ insta = { workspace = true, features = ["filters", "json"] }
insta-cmd = { version = "0.4.0" }
tempfile = "3.8.1"
test-case = { workspace = true }
ureq = { version = "2.9.1", features = [] }
ureq = { version = "2.8.0", features = [] }
[target.'cfg(target_os = "windows")'.dependencies]
mimalloc = "0.1.39"
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64")))'.dependencies]
tikv-jemallocator = "0.5.0"
[lints]
workspace = true

View File

@@ -1,2 +0,0 @@
[tool.ruff]
select = []

View File

@@ -1,2 +0,0 @@
[tool.ruff]
include = ["a.py", "subdirectory/c.py"]

View File

@@ -1,413 +0,0 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"id": "4f8ce941-1492-4d4e-8ab5-70d733fe891a",
"metadata": {},
"outputs": [],
"source": [
"%config ZMQInteractiveShell.ast_node_interactivity=\"last_expr_or_assign\""
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "721ec705-0c65-4bfb-9809-7ed8bc534186",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1"
]
},
"execution_count": 2,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Assignment statement without a semicolon\n",
"x = 1"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "de50e495-17e5-41cc-94bd-565757555d7e",
"metadata": {},
"outputs": [],
"source": [
"# Assignment statement with a semicolon\n",
"x = 1;\n",
"x = 1;"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "39e31201-23da-44eb-8684-41bba3663991",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"2"
]
},
"execution_count": 4,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Augmented assignment without a semicolon\n",
"x += 1"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "6b73d3dd-c73a-4697-9e97-e109a6c1fbab",
"metadata": {},
"outputs": [],
"source": [
"# Augmented assignment without a semicolon\n",
"x += 1;\n",
"x += 1; # comment\n",
"# comment"
]
},
{
"cell_type": "code",
"execution_count": 6,
"id": "2a3e5b86-aa5b-46ba-b9c6-0386d876f58c",
"metadata": {},
"outputs": [],
"source": [
"# Multiple assignment without a semicolon\n",
"x = y = 1"
]
},
{
"cell_type": "code",
"execution_count": 7,
"id": "07f89e51-9357-4cfb-8fc5-76fb75e35949",
"metadata": {},
"outputs": [],
"source": [
"# Multiple assignment with a semicolon\n",
"x = y = 1;\n",
"x = y = 1;"
]
},
{
"cell_type": "code",
"execution_count": 8,
"id": "c22b539d-473e-48f8-a236-625e58c47a00",
"metadata": {},
"outputs": [],
"source": [
"# Tuple unpacking without a semicolon\n",
"x, y = 1, 2"
]
},
{
"cell_type": "code",
"execution_count": 9,
"id": "12c87940-a0d5-403b-a81c-7507eb06dc7e",
"metadata": {},
"outputs": [],
"source": [
"# Tuple unpacking with a semicolon (irrelevant)\n",
"x, y = 1, 2;\n",
"x, y = 1, 2; # comment\n",
"# comment"
]
},
{
"cell_type": "code",
"execution_count": 10,
"id": "5a768c76-6bc4-470c-b37e-8cc14bc6caf4",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1"
]
},
"execution_count": 10,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Annotated assignment statement without a semicolon\n",
"x: int = 1"
]
},
{
"cell_type": "code",
"execution_count": 11,
"id": "21bfda82-1a9a-4ba1-9078-74ac480804b5",
"metadata": {},
"outputs": [],
"source": [
"# Annotated assignment statement without a semicolon\n",
"x: int = 1;\n",
"x: int = 1; # comment\n",
"# comment"
]
},
{
"cell_type": "code",
"execution_count": 12,
"id": "09929999-ff29-4d10-ad2b-e665af15812d",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1"
]
},
"execution_count": 12,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Assignment expression without a semicolon\n",
"(x := 1)"
]
},
{
"cell_type": "code",
"execution_count": 13,
"id": "32a83217-1bad-4f61-855e-ffcdb119c763",
"metadata": {},
"outputs": [],
"source": [
"# Assignment expression with a semicolon\n",
"(x := 1);\n",
"(x := 1); # comment\n",
"# comment"
]
},
{
"cell_type": "code",
"execution_count": 14,
"id": "61b81865-277e-4964-b03e-eb78f1f318eb",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1"
]
},
"execution_count": 14,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"x = 1\n",
"# Expression without a semicolon\n",
"x"
]
},
{
"cell_type": "code",
"execution_count": 15,
"id": "974c29be-67e1-4000-95fa-6ca118a63bad",
"metadata": {},
"outputs": [],
"source": [
"x = 1\n",
"# Expression with a semicolon\n",
"x;"
]
},
{
"cell_type": "code",
"execution_count": 16,
"id": "cfeb1757-46d6-4f13-969f-a283b6d0304f",
"metadata": {},
"outputs": [],
"source": [
"class Point:\n",
" def __init__(self, x, y):\n",
" self.x = x\n",
" self.y = y\n",
"\n",
"\n",
"p = Point(0, 0);"
]
},
{
"cell_type": "code",
"execution_count": 17,
"id": "2ee7f1a5-ccfe-4004-bfa4-ef834a58da97",
"metadata": {},
"outputs": [],
"source": [
"# Assignment statement where the left is an attribute access doesn't\n",
"# print the value.\n",
"p.x = 1;"
]
},
{
"cell_type": "code",
"execution_count": 18,
"id": "3e49370a-048b-474d-aa0a-3d1d4a73ad37",
"metadata": {},
"outputs": [],
"source": [
"data = {}\n",
"\n",
"# Neither does the subscript node\n",
"data[\"foo\"] = 1;"
]
},
{
"cell_type": "code",
"execution_count": 19,
"id": "d594bdd3-eaa9-41ef-8cda-cf01bc273b2d",
"metadata": {},
"outputs": [],
"source": [
"if (x := 1):\n",
" # It should be the top level statement\n",
" x"
]
},
{
"cell_type": "code",
"execution_count": 20,
"id": "e532f0cf-80c7-42b7-8226-6002fcf74fb6",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1"
]
},
"execution_count": 20,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Parentheses with comments\n",
"(\n",
" x := 1 # comment\n",
") # comment"
]
},
{
"cell_type": "code",
"execution_count": 21,
"id": "473c5d62-871b-46ed-8a34-27095243f462",
"metadata": {},
"outputs": [],
"source": [
"# Parentheses with comments\n",
"(\n",
" x := 1 # comment\n",
"); # comment"
]
},
{
"cell_type": "code",
"execution_count": 22,
"id": "8c3c2361-f49f-45fe-bbe3-7e27410a8a86",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"'Hello world!'"
]
},
"execution_count": 22,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"\"\"\"Hello world!\"\"\""
]
},
{
"cell_type": "code",
"execution_count": 23,
"id": "23dbe9b5-3f68-4890-ab2d-ab0dbfd0712a",
"metadata": {},
"outputs": [],
"source": [
"\"\"\"Hello world!\"\"\"; # comment\n",
"# comment"
]
},
{
"cell_type": "code",
"execution_count": 24,
"id": "3ce33108-d95d-4c70-83d1-0d4fd36a2951",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"'x = 1'"
]
},
"execution_count": 24,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"x = 1\n",
"f\"x = {x}\""
]
},
{
"cell_type": "code",
"execution_count": 25,
"id": "654a4a67-de43-4684-824a-9451c67db48f",
"metadata": {},
"outputs": [],
"source": [
"x = 1\n",
"f\"x = {x}\";\n",
"f\"x = {x}\"; # comment\n",
"# comment"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python (ruff-playground)",
"language": "python",
"name": "ruff-playground"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.3"
}
},
"nbformat": 4,
"nbformat_minor": 5
}

View File

@@ -8,8 +8,8 @@ use ruff_linter::line_width::LineLength;
use ruff_linter::logging::LogLevel;
use ruff_linter::registry::Rule;
use ruff_linter::settings::types::{
ExtensionPair, FilePattern, PatternPrefixPair, PerFileIgnore, PreviewMode, PythonVersion,
SerializationFormat, UnsafeFixes,
FilePattern, PatternPrefixPair, PerFileIgnore, PreviewMode, PythonVersion, SerializationFormat,
UnsafeFixes,
};
use ruff_linter::{RuleParser, RuleSelector, RuleSelectorParser};
use ruff_workspace::configuration::{Configuration, RuleSelection};
@@ -88,7 +88,6 @@ pub enum Command {
#[allow(clippy::struct_excessive_bools)]
pub struct CheckCommand {
/// List of files or directories to check.
#[clap(help = "List of files or directories to check [default: .]")]
pub files: Vec<PathBuf>,
/// Apply fixes to resolve lint violations.
/// Use `--no-fix` to disable or `--unsafe-fixes` to include unsafe fixes.
@@ -279,7 +278,7 @@ pub struct CheckCommand {
#[arg(long, help_heading = "Rule configuration", hide = true)]
pub dummy_variable_rgx: Option<Regex>,
/// Disable cache reads.
#[arg(short, long, env = "RUFF_NO_CACHE", help_heading = "Miscellaneous")]
#[arg(short, long, help_heading = "Miscellaneous")]
pub no_cache: bool,
/// Ignore all configuration files.
#[arg(long, conflicts_with = "config", help_heading = "Miscellaneous")]
@@ -352,9 +351,6 @@ pub struct CheckCommand {
conflicts_with = "watch",
)]
pub show_settings: bool,
/// List of mappings from file extension to language (one of ["python", "ipynb", "pyi"]).
#[arg(long, value_delimiter = ',', hide = true)]
pub extension: Option<Vec<ExtensionPair>>,
/// Dev-only argument to show fixes
#[arg(long, hide = true)]
pub ecosystem_ci: bool,
@@ -364,7 +360,6 @@ pub struct CheckCommand {
#[allow(clippy::struct_excessive_bools)]
pub struct FormatCommand {
/// List of files or directories to format.
#[clap(help = "List of files or directories to format [default: .]")]
pub files: Vec<PathBuf>,
/// Avoid writing any formatted files back; instead, exit with a non-zero status code if any
/// files would have been modified, and zero otherwise.
@@ -379,7 +374,7 @@ pub struct FormatCommand {
pub config: Option<PathBuf>,
/// Disable cache reads.
#[arg(short, long, env = "RUFF_NO_CACHE", help_heading = "Miscellaneous")]
#[arg(short, long, help_heading = "Miscellaneous")]
pub no_cache: bool,
/// Path to the cache directory.
#[arg(long, env = "RUFF_CACHE_DIR", help_heading = "Miscellaneous")]
@@ -414,9 +409,6 @@ pub struct FormatCommand {
force_exclude: bool,
#[clap(long, overrides_with("force_exclude"), hide = true)]
no_force_exclude: bool,
/// Set the line-length.
#[arg(long, help_heading = "Format configuration")]
pub line_length: Option<LineLength>,
/// Ignore all configuration files.
#[arg(long, conflicts_with = "config", help_heading = "Miscellaneous")]
pub isolated: bool,
@@ -540,7 +532,6 @@ impl CheckCommand {
force_exclude: resolve_bool_arg(self.force_exclude, self.no_force_exclude),
output_format: self.output_format,
show_fixes: resolve_bool_arg(self.show_fixes, self.no_show_fixes),
extension: self.extension,
},
)
}
@@ -561,7 +552,6 @@ impl FormatCommand {
stdin_filename: self.stdin_filename,
},
CliOverrides {
line_length: self.line_length,
respect_gitignore: resolve_bool_arg(
self.respect_gitignore,
self.no_respect_gitignore,
@@ -653,7 +643,6 @@ pub struct CliOverrides {
pub force_exclude: Option<bool>,
pub output_format: Option<SerializationFormat>,
pub show_fixes: Option<bool>,
pub extension: Option<Vec<ExtensionPair>>,
}
impl ConfigurationTransformer for CliOverrides {
@@ -738,9 +727,6 @@ impl ConfigurationTransformer for CliOverrides {
if let Some(target_version) = &self.target_version {
config.target_version = Some(*target_version);
}
if let Some(extension) = &self.extension {
config.lint.extension = Some(extension.clone().into_iter().collect());
}
config
}

View File

@@ -30,7 +30,6 @@ use crate::diagnostics::Diagnostics;
use crate::panic::catch_unwind;
/// Run the linter over a collection of files.
#[allow(clippy::too_many_arguments)]
pub(crate) fn check(
files: &[PathBuf],
pyproject_config: &PyprojectConfig,
@@ -185,7 +184,6 @@ pub(crate) fn check(
/// Wraps [`lint_path`](crate::diagnostics::lint_path) in a [`catch_unwind`](std::panic::catch_unwind) and emits
/// a diagnostic if the linting the file panics.
#[allow(clippy::too_many_arguments)]
fn lint_path(
path: &Path,
package: Option<&Path>,
@@ -202,12 +200,12 @@ fn lint_path(
match result {
Ok(inner) => inner,
Err(error) => {
let message = r"This indicates a bug in Ruff. If you could open an issue at:
let message = r#"This indicates a bug in Ruff. If you could open an issue at:
https://github.com/astral-sh/ruff/issues/new?title=%5BLinter%20panic%5D
...with the relevant file contents, the `pyproject.toml` settings, and the following stack trace, we'd be very appreciative!
";
"#;
error!(
"{}{}{} {message}\n{error}",

View File

@@ -8,7 +8,7 @@ use ruff_workspace::resolver::{match_exclusion, python_file_at_path, PyprojectCo
use crate::args::CliOverrides;
use crate::diagnostics::{lint_stdin, Diagnostics};
use crate::stdin::{parrot_stdin, read_from_stdin};
use crate::stdin::read_from_stdin;
/// Run the linter over a single file, read from `stdin`.
pub(crate) fn check_stdin(
@@ -21,9 +21,6 @@ pub(crate) fn check_stdin(
if pyproject_config.settings.file_resolver.force_exclude {
if let Some(filename) = filename {
if !python_file_at_path(filename, pyproject_config, overrides)? {
if fix_mode.is_apply() {
parrot_stdin()?;
}
return Ok(Diagnostics::default());
}
@@ -32,17 +29,14 @@ pub(crate) fn check_stdin(
.file_name()
.is_some_and(|name| match_exclusion(filename, name, &lint_settings.exclude))
{
if fix_mode.is_apply() {
parrot_stdin()?;
}
return Ok(Diagnostics::default());
}
}
}
let stdin = read_from_stdin()?;
let package_root = filename.and_then(Path::parent).and_then(|path| {
packaging::detect_package_root(path, &pyproject_config.settings.linter.namespace_packages)
});
let stdin = read_from_stdin()?;
let mut diagnostics = lint_stdin(
filename,
package_root,

View File

@@ -34,7 +34,7 @@ use crate::args::{CliOverrides, FormatArguments};
use crate::cache::{Cache, FileCacheKey, PackageCacheMap, PackageCaches};
use crate::panic::{catch_unwind, PanicError};
use crate::resolve::resolve;
use crate::{resolve_default_files, ExitStatus};
use crate::ExitStatus;
#[derive(Debug, Copy, Clone, is_macro::Is)]
pub(crate) enum FormatMode {
@@ -60,7 +60,7 @@ impl FormatMode {
/// Format a set of files, and return the exit status.
pub(crate) fn format(
cli: FormatArguments,
cli: &FormatArguments,
overrides: &CliOverrides,
log_level: LogLevel,
) -> Result<ExitStatus> {
@@ -70,9 +70,8 @@ pub(crate) fn format(
overrides,
cli.stdin_filename.as_deref(),
)?;
let mode = FormatMode::from_cli(&cli);
let files = resolve_default_files(cli.files, false);
let (paths, resolver) = python_files_in_path(&files, &pyproject_config, overrides)?;
let mode = FormatMode::from_cli(cli);
let (paths, resolver) = python_files_in_path(&cli.files, &pyproject_config, overrides)?;
if paths.is_empty() {
warn_user_once!("No Python files found under the given path(s)");
@@ -661,12 +660,12 @@ impl Display for FormatCommandError {
}
}
Self::Panic(path, err) => {
let message = r"This indicates a bug in Ruff. If you could open an issue at:
let message = r#"This indicates a bug in Ruff. If you could open an issue at:
https://github.com/astral-sh/ruff/issues/new?title=%5BFormatter%20panic%5D
...with the relevant file contents, the `pyproject.toml` settings, and the following stack trace, we'd be very appreciative!
";
"#;
if let Some(path) = path {
write!(
f,

View File

@@ -15,7 +15,7 @@ use crate::commands::format::{
FormatResult, FormattedSource,
};
use crate::resolve::resolve;
use crate::stdin::{parrot_stdin, read_from_stdin};
use crate::stdin::read_from_stdin;
use crate::ExitStatus;
/// Run the formatter over a single file, read from `stdin`.
@@ -34,9 +34,6 @@ pub(crate) fn format_stdin(cli: &FormatArguments, overrides: &CliOverrides) -> R
if pyproject_config.settings.file_resolver.force_exclude {
if let Some(filename) = cli.stdin_filename.as_deref() {
if !python_file_at_path(filename, &pyproject_config, overrides)? {
if mode.is_write() {
parrot_stdin()?;
}
return Ok(ExitStatus::Success);
}
@@ -45,9 +42,6 @@ pub(crate) fn format_stdin(cli: &FormatArguments, overrides: &CliOverrides) -> R
.file_name()
.is_some_and(|name| match_exclusion(filename, name, &format_settings.exclude))
{
if mode.is_write() {
parrot_stdin()?;
}
return Ok(ExitStatus::Success);
}
}
@@ -56,9 +50,6 @@ pub(crate) fn format_stdin(cli: &FormatArguments, overrides: &CliOverrides) -> R
let path = cli.stdin_filename.as_deref();
let SourceType::Python(source_type) = path.map(SourceType::from).unwrap_or_default() else {
if mode.is_write() {
parrot_stdin()?;
}
return Ok(ExitStatus::Success);
};

View File

@@ -63,7 +63,7 @@ fn format_rule_text(rule: Rule) -> String {
if rule.is_preview() || rule.is_nursery() {
output.push_str(
r"This rule is in preview and is not stable. The `--preview` flag is required for use.",
r#"This rule is in preview and is not stable. The `--preview` flag is required for use."#,
);
output.push('\n');
output.push('\n');

View File

@@ -17,13 +17,13 @@ use ruff_linter::logging::DisplayParseError;
use ruff_linter::message::Message;
use ruff_linter::pyproject_toml::lint_pyproject_toml;
use ruff_linter::registry::AsRule;
use ruff_linter::settings::types::{ExtensionMapping, UnsafeFixes};
use ruff_linter::settings::types::UnsafeFixes;
use ruff_linter::settings::{flags, LinterSettings};
use ruff_linter::source_kind::{SourceError, SourceKind};
use ruff_linter::{fs, IOError, SyntaxError};
use ruff_notebook::{Notebook, NotebookError, NotebookIndex};
use ruff_python_ast::imports::ImportMap;
use ruff_python_ast::{PySourceType, SourceType, TomlSourceType};
use ruff_python_ast::{SourceType, TomlSourceType};
use ruff_source_file::{LineIndex, SourceCode, SourceFileBuilder};
use ruff_text_size::{TextRange, TextSize};
use ruff_workspace::Settings;
@@ -177,11 +177,6 @@ impl AddAssign for FixMap {
}
}
fn override_source_type(path: Option<&Path>, extension: &ExtensionMapping) -> Option<PySourceType> {
let ext = path?.extension()?.to_str()?;
extension.get(ext).map(PySourceType::from)
}
/// Lint the source code at the given `Path`.
pub(crate) fn lint_path(
path: &Path,
@@ -226,35 +221,31 @@ pub(crate) fn lint_path(
debug!("Checking: {}", path.display());
let source_type = match override_source_type(Some(path), &settings.extension) {
Some(source_type) => source_type,
None => match SourceType::from(path) {
SourceType::Toml(TomlSourceType::Pyproject) => {
let messages = if settings
.rules
.iter_enabled()
.any(|rule_code| rule_code.lint_source().is_pyproject_toml())
{
let contents = match std::fs::read_to_string(path).map_err(SourceError::from) {
Ok(contents) => contents,
Err(err) => {
return Ok(Diagnostics::from_source_error(&err, Some(path), settings));
}
};
let source_file =
SourceFileBuilder::new(path.to_string_lossy(), contents).finish();
lint_pyproject_toml(source_file, settings)
} else {
vec![]
let source_type = match SourceType::from(path) {
SourceType::Toml(TomlSourceType::Pyproject) => {
let messages = if settings
.rules
.iter_enabled()
.any(|rule_code| rule_code.lint_source().is_pyproject_toml())
{
let contents = match std::fs::read_to_string(path).map_err(SourceError::from) {
Ok(contents) => contents,
Err(err) => {
return Ok(Diagnostics::from_source_error(&err, Some(path), settings));
}
};
return Ok(Diagnostics {
messages,
..Diagnostics::default()
});
}
SourceType::Toml(_) => return Ok(Diagnostics::default()),
SourceType::Python(source_type) => source_type,
},
let source_file = SourceFileBuilder::new(path.to_string_lossy(), contents).finish();
lint_pyproject_toml(source_file, settings)
} else {
vec![]
};
return Ok(Diagnostics {
messages,
..Diagnostics::default()
});
}
SourceType::Toml(_) => return Ok(Diagnostics::default()),
SourceType::Python(source_type) => source_type,
};
// Extract the sources from the file.
@@ -379,15 +370,8 @@ pub(crate) fn lint_stdin(
fix_mode: flags::FixMode,
) -> Result<Diagnostics> {
// TODO(charlie): Support `pyproject.toml`.
let source_type = if let Some(source_type) =
override_source_type(path, &settings.linter.extension)
{
source_type
} else {
let SourceType::Python(source_type) = path.map(SourceType::from).unwrap_or_default() else {
return Ok(Diagnostics::default());
};
source_type
let SourceType::Python(source_type) = path.map(SourceType::from).unwrap_or_default() else {
return Ok(Diagnostics::default());
};
// Extract the sources from the file.

View File

@@ -101,19 +101,6 @@ fn is_stdin(files: &[PathBuf], stdin_filename: Option<&Path>) -> bool {
file == Path::new("-")
}
/// Returns the default set of files if none are provided, otherwise returns `None`.
fn resolve_default_files(files: Vec<PathBuf>, is_stdin: bool) -> Vec<PathBuf> {
if files.is_empty() {
if is_stdin {
vec![Path::new("-").to_path_buf()]
} else {
vec![Path::new(".").to_path_buf()]
}
} else {
files
}
}
/// Get the actual value of the `format` desired from either `output_format`
/// or `format`, and warn the user if they're using the deprecated form.
fn resolve_help_output_format(output_format: HelpFormat, format: Option<HelpFormat>) -> HelpFormat {
@@ -209,7 +196,7 @@ fn format(args: FormatCommand, log_level: LogLevel) -> Result<ExitStatus> {
if is_stdin(&cli.files, cli.stdin_filename.as_deref()) {
commands::format_stdin::format_stdin(&cli, &overrides)
} else {
commands::format::format(cli, &overrides, log_level)
commands::format::format(&cli, &overrides, log_level)
}
}
@@ -235,15 +222,17 @@ pub fn check(args: CheckCommand, log_level: LogLevel) -> Result<ExitStatus> {
};
let stderr_writer = Box::new(BufWriter::new(io::stderr()));
let is_stdin = is_stdin(&cli.files, cli.stdin_filename.as_deref());
let files = resolve_default_files(cli.files, is_stdin);
if cli.show_settings {
commands::show_settings::show_settings(&files, &pyproject_config, &overrides, &mut writer)?;
commands::show_settings::show_settings(
&cli.files,
&pyproject_config,
&overrides,
&mut writer,
)?;
return Ok(ExitStatus::Success);
}
if cli.show_files {
commands::show_files::show_files(&files, &pyproject_config, &overrides, &mut writer)?;
commands::show_files::show_files(&cli.files, &pyproject_config, &overrides, &mut writer)?;
return Ok(ExitStatus::Success);
}
@@ -306,7 +295,8 @@ pub fn check(args: CheckCommand, log_level: LogLevel) -> Result<ExitStatus> {
if !fix_mode.is_generate() {
warn_user!("--fix is incompatible with --add-noqa.");
}
let modifications = commands::add_noqa::add_noqa(&files, &pyproject_config, &overrides)?;
let modifications =
commands::add_noqa::add_noqa(&cli.files, &pyproject_config, &overrides)?;
if modifications > 0 && log_level >= LogLevel::Default {
let s = if modifications == 1 { "" } else { "s" };
#[allow(clippy::print_stderr)]
@@ -333,7 +323,7 @@ pub fn check(args: CheckCommand, log_level: LogLevel) -> Result<ExitStatus> {
// Configure the file watcher.
let (tx, rx) = channel();
let mut watcher = recommended_watcher(tx)?;
for file in &files {
for file in &cli.files {
watcher.watch(file, RecursiveMode::Recursive)?;
}
if let Some(file) = pyproject_config.path.as_ref() {
@@ -345,7 +335,7 @@ pub fn check(args: CheckCommand, log_level: LogLevel) -> Result<ExitStatus> {
printer.write_to_user("Starting linter in watch mode...\n");
let messages = commands::check::check(
&files,
&cli.files,
&pyproject_config,
&overrides,
cache.into(),
@@ -378,7 +368,7 @@ pub fn check(args: CheckCommand, log_level: LogLevel) -> Result<ExitStatus> {
printer.write_to_user("File change detected...\n");
let messages = commands::check::check(
&files,
&cli.files,
&pyproject_config,
&overrides,
cache.into(),
@@ -392,6 +382,8 @@ pub fn check(args: CheckCommand, log_level: LogLevel) -> Result<ExitStatus> {
}
}
} else {
let is_stdin = is_stdin(&cli.files, cli.stdin_filename.as_deref());
// Generate lint violations.
let diagnostics = if is_stdin {
commands::check_stdin::check_stdin(
@@ -403,7 +395,7 @@ pub fn check(args: CheckCommand, log_level: LogLevel) -> Result<ExitStatus> {
)?
} else {
commands::check::check(
&files,
&cli.files,
&pyproject_config,
&overrides,
cache.into(),

View File

@@ -43,7 +43,7 @@ pub fn resolve(
{
let settings = resolve_root_settings(&pyproject, Relativity::Cwd, overrides)?;
debug!(
"Using user-specified configuration file at: {}",
"Using user specified pyproject.toml at {}",
pyproject.display()
);
return Ok(PyprojectConfig::new(
@@ -63,10 +63,7 @@ pub fn resolve(
.as_ref()
.unwrap_or(&path_dedot::CWD.as_path()),
)? {
debug!(
"Using configuration file (via parent) at: {}",
pyproject.display()
);
debug!("Using pyproject.toml (parent) at {}", pyproject.display());
let settings = resolve_root_settings(&pyproject, Relativity::Parent, overrides)?;
return Ok(PyprojectConfig::new(
PyprojectDiscoveryStrategy::Hierarchical,
@@ -80,10 +77,7 @@ pub fn resolve(
// end up the "closest" `pyproject.toml` file for every Python file later on, so
// these act as the "default" settings.)
if let Some(pyproject) = pyproject::find_user_settings_toml() {
debug!(
"Using configuration file (via cwd) at: {}",
pyproject.display()
);
debug!("Using pyproject.toml (cwd) at {}", pyproject.display());
let settings = resolve_root_settings(&pyproject, Relativity::Cwd, overrides)?;
return Ok(PyprojectConfig::new(
PyprojectDiscoveryStrategy::Hierarchical,

View File

@@ -1,5 +1,5 @@
use std::io;
use std::io::{Read, Write};
use std::io::Read;
/// Read a string from `stdin`.
pub(crate) fn read_from_stdin() -> Result<String, io::Error> {
@@ -7,11 +7,3 @@ pub(crate) fn read_from_stdin() -> Result<String, io::Error> {
io::stdin().lock().read_to_string(&mut buffer)?;
Ok(buffer)
}
/// Read bytes from `stdin` and write them to `stdout`.
pub(crate) fn parrot_stdin() -> Result<(), io::Error> {
let mut buffer = String::new();
io::stdin().lock().read_to_string(&mut buffer)?;
io::stdout().write_all(buffer.as_bytes())?;
Ok(())
}

View File

@@ -43,53 +43,6 @@ if condition:
"###);
}
#[test]
fn default_files() -> Result<()> {
let tempdir = TempDir::new()?;
fs::write(
tempdir.path().join("foo.py"),
r#"
foo = "needs formatting"
"#,
)?;
fs::write(
tempdir.path().join("bar.py"),
r#"
bar = "needs formatting"
"#,
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["format", "--isolated", "--no-cache", "--check"]).current_dir(tempdir.path()), @r###"
success: false
exit_code: 1
----- stdout -----
Would reformat: bar.py
Would reformat: foo.py
2 files would be reformatted
----- stderr -----
"###);
Ok(())
}
#[test]
fn format_warn_stdin_filename_with_files() {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["format", "--isolated", "--stdin-filename", "foo.py"])
.arg("foo.py")
.pass_stdin("foo = 1"), @r###"
success: true
exit_code: 0
----- stdout -----
foo = 1
----- stderr -----
warning: Ignoring file foo.py in favor of standard input.
"###);
}
#[test]
fn format_options() -> Result<()> {
let tempdir = TempDir::new()?;
@@ -367,11 +320,6 @@ if __name__ == '__main__':
exit_code: 0
----- stdout -----
from test import say_hy
if __name__ == '__main__':
say_hy("dear Ruff contributor")
----- stderr -----
"###);
Ok(())
@@ -442,9 +390,9 @@ fn deprecated_options() -> Result<()> {
let ruff_toml = tempdir.path().join("ruff.toml");
fs::write(
&ruff_toml,
r"
r#"
tab-size = 2
",
"#,
)?;
insta::with_settings!({filters => vec![
@@ -454,10 +402,10 @@ tab-size = 2
.args(["format", "--config"])
.arg(&ruff_toml)
.arg("-")
.pass_stdin(r"
.pass_stdin(r#"
if True:
pass
"), @r###"
"#), @r###"
success: true
exit_code: 0
----- stdout -----
@@ -490,9 +438,9 @@ format = "json"
.args(["check", "--select", "F401", "--no-cache", "--config"])
.arg(&ruff_toml)
.arg("-")
.pass_stdin(r"
.pass_stdin(r#"
import os
"), @r###"
"#), @r###"
success: false
exit_code: 2
----- stdout -----
@@ -865,432 +813,3 @@ fn test_diff_stdin_formatted() {
----- stderr -----
"###);
}
#[test]
fn test_notebook_trailing_semicolon() {
let fixtures = Path::new("resources").join("test").join("fixtures");
let unformatted = fs::read(fixtures.join("trailing_semicolon.ipynb")).unwrap();
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["format", "--isolated", "--stdin-filename", "test.ipynb"])
.arg("-")
.pass_stdin(unformatted), @r###"
success: true
exit_code: 0
----- stdout -----
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"id": "4f8ce941-1492-4d4e-8ab5-70d733fe891a",
"metadata": {},
"outputs": [],
"source": [
"%config ZMQInteractiveShell.ast_node_interactivity=\"last_expr_or_assign\""
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "721ec705-0c65-4bfb-9809-7ed8bc534186",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1"
]
},
"execution_count": 2,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Assignment statement without a semicolon\n",
"x = 1"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "de50e495-17e5-41cc-94bd-565757555d7e",
"metadata": {},
"outputs": [],
"source": [
"# Assignment statement with a semicolon\n",
"x = 1\n",
"x = 1;"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "39e31201-23da-44eb-8684-41bba3663991",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"2"
]
},
"execution_count": 4,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Augmented assignment without a semicolon\n",
"x += 1"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "6b73d3dd-c73a-4697-9e97-e109a6c1fbab",
"metadata": {},
"outputs": [],
"source": [
"# Augmented assignment without a semicolon\n",
"x += 1\n",
"x += 1; # comment\n",
"# comment"
]
},
{
"cell_type": "code",
"execution_count": 6,
"id": "2a3e5b86-aa5b-46ba-b9c6-0386d876f58c",
"metadata": {},
"outputs": [],
"source": [
"# Multiple assignment without a semicolon\n",
"x = y = 1"
]
},
{
"cell_type": "code",
"execution_count": 7,
"id": "07f89e51-9357-4cfb-8fc5-76fb75e35949",
"metadata": {},
"outputs": [],
"source": [
"# Multiple assignment with a semicolon\n",
"x = y = 1\n",
"x = y = 1"
]
},
{
"cell_type": "code",
"execution_count": 8,
"id": "c22b539d-473e-48f8-a236-625e58c47a00",
"metadata": {},
"outputs": [],
"source": [
"# Tuple unpacking without a semicolon\n",
"x, y = 1, 2"
]
},
{
"cell_type": "code",
"execution_count": 9,
"id": "12c87940-a0d5-403b-a81c-7507eb06dc7e",
"metadata": {},
"outputs": [],
"source": [
"# Tuple unpacking with a semicolon (irrelevant)\n",
"x, y = 1, 2\n",
"x, y = 1, 2 # comment\n",
"# comment"
]
},
{
"cell_type": "code",
"execution_count": 10,
"id": "5a768c76-6bc4-470c-b37e-8cc14bc6caf4",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1"
]
},
"execution_count": 10,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Annotated assignment statement without a semicolon\n",
"x: int = 1"
]
},
{
"cell_type": "code",
"execution_count": 11,
"id": "21bfda82-1a9a-4ba1-9078-74ac480804b5",
"metadata": {},
"outputs": [],
"source": [
"# Annotated assignment statement without a semicolon\n",
"x: int = 1\n",
"x: int = 1; # comment\n",
"# comment"
]
},
{
"cell_type": "code",
"execution_count": 12,
"id": "09929999-ff29-4d10-ad2b-e665af15812d",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1"
]
},
"execution_count": 12,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Assignment expression without a semicolon\n",
"(x := 1)"
]
},
{
"cell_type": "code",
"execution_count": 13,
"id": "32a83217-1bad-4f61-855e-ffcdb119c763",
"metadata": {},
"outputs": [],
"source": [
"# Assignment expression with a semicolon\n",
"(x := 1)\n",
"(x := 1); # comment\n",
"# comment"
]
},
{
"cell_type": "code",
"execution_count": 14,
"id": "61b81865-277e-4964-b03e-eb78f1f318eb",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1"
]
},
"execution_count": 14,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"x = 1\n",
"# Expression without a semicolon\n",
"x"
]
},
{
"cell_type": "code",
"execution_count": 15,
"id": "974c29be-67e1-4000-95fa-6ca118a63bad",
"metadata": {},
"outputs": [],
"source": [
"x = 1\n",
"# Expression with a semicolon\n",
"x;"
]
},
{
"cell_type": "code",
"execution_count": 16,
"id": "cfeb1757-46d6-4f13-969f-a283b6d0304f",
"metadata": {},
"outputs": [],
"source": [
"class Point:\n",
" def __init__(self, x, y):\n",
" self.x = x\n",
" self.y = y\n",
"\n",
"\n",
"p = Point(0, 0);"
]
},
{
"cell_type": "code",
"execution_count": 17,
"id": "2ee7f1a5-ccfe-4004-bfa4-ef834a58da97",
"metadata": {},
"outputs": [],
"source": [
"# Assignment statement where the left is an attribute access doesn't\n",
"# print the value.\n",
"p.x = 1"
]
},
{
"cell_type": "code",
"execution_count": 18,
"id": "3e49370a-048b-474d-aa0a-3d1d4a73ad37",
"metadata": {},
"outputs": [],
"source": [
"data = {}\n",
"\n",
"# Neither does the subscript node\n",
"data[\"foo\"] = 1"
]
},
{
"cell_type": "code",
"execution_count": 19,
"id": "d594bdd3-eaa9-41ef-8cda-cf01bc273b2d",
"metadata": {},
"outputs": [],
"source": [
"if x := 1:\n",
" # It should be the top level statement\n",
" x"
]
},
{
"cell_type": "code",
"execution_count": 20,
"id": "e532f0cf-80c7-42b7-8226-6002fcf74fb6",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1"
]
},
"execution_count": 20,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Parentheses with comments\n",
"(\n",
" x := 1 # comment\n",
") # comment"
]
},
{
"cell_type": "code",
"execution_count": 21,
"id": "473c5d62-871b-46ed-8a34-27095243f462",
"metadata": {},
"outputs": [],
"source": [
"# Parentheses with comments\n",
"(\n",
" x := 1 # comment\n",
"); # comment"
]
},
{
"cell_type": "code",
"execution_count": 22,
"id": "8c3c2361-f49f-45fe-bbe3-7e27410a8a86",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"'Hello world!'"
]
},
"execution_count": 22,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"\"\"\"Hello world!\"\"\""
]
},
{
"cell_type": "code",
"execution_count": 23,
"id": "23dbe9b5-3f68-4890-ab2d-ab0dbfd0712a",
"metadata": {},
"outputs": [],
"source": [
"\"\"\"Hello world!\"\"\"; # comment\n",
"# comment"
]
},
{
"cell_type": "code",
"execution_count": 24,
"id": "3ce33108-d95d-4c70-83d1-0d4fd36a2951",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"'x = 1'"
]
},
"execution_count": 24,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"x = 1\n",
"f\"x = {x}\""
]
},
{
"cell_type": "code",
"execution_count": 25,
"id": "654a4a67-de43-4684-824a-9451c67db48f",
"metadata": {},
"outputs": [],
"source": [
"x = 1\n",
"f\"x = {x}\"\n",
"f\"x = {x}\"; # comment\n",
"# comment"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python (ruff-playground)",
"language": "python",
"name": "ruff-playground"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.3"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
----- stderr -----
"###);
}

File diff suppressed because it is too large Load Diff

View File

@@ -396,43 +396,3 @@ if __name__ == "__main__":
"###);
Ok(())
}
/// Regression test for [#8858](https://github.com/astral-sh/ruff/issues/8858)
#[test]
fn parent_configuration_override() -> Result<()> {
let tempdir = TempDir::new()?;
let root_ruff = tempdir.path().join("ruff.toml");
fs::write(
root_ruff,
r#"
[lint]
select = ["ALL"]
"#,
)?;
let sub_dir = tempdir.path().join("subdirectory");
fs::create_dir(&sub_dir)?;
let subdirectory_ruff = sub_dir.join("ruff.toml");
fs::write(
subdirectory_ruff,
r#"
[lint]
ignore = ["D203", "D212"]
"#,
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.current_dir(sub_dir)
.arg("check")
.args(STDIN_BASE_OPTIONS)
, @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
warning: No Python files found under the given path(s)
"###);
Ok(())
}

View File

@@ -1,101 +0,0 @@
#![cfg(not(target_family = "wasm"))]
use std::path::Path;
use std::process::Command;
use std::str;
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
const BIN_NAME: &str = "ruff";
#[cfg(not(target_os = "windows"))]
const TEST_FILTERS: &[(&str, &str)] = &[(".*/resources/test/fixtures/", "[BASEPATH]/")];
#[cfg(target_os = "windows")]
const TEST_FILTERS: &[(&str, &str)] = &[
(r".*\\resources\\test\\fixtures\\", "[BASEPATH]\\"),
(r"\\", "/"),
];
#[test]
fn check_project_include_defaults() {
// Defaults to checking the current working directory
//
// The test directory includes:
// - A pyproject.toml which specifies an include
// - A nested pyproject.toml which has a Ruff section
//
// The nested project should all be checked instead of respecting the parent includes
insta::with_settings!({
filters => TEST_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["check", "--show-files"]).current_dir(Path::new("./resources/test/fixtures/include-test")), @r###"
success: true
exit_code: 0
----- stdout -----
[BASEPATH]/include-test/a.py
[BASEPATH]/include-test/nested-project/e.py
[BASEPATH]/include-test/nested-project/pyproject.toml
[BASEPATH]/include-test/subdirectory/c.py
----- stderr -----
"###);
});
}
#[test]
fn check_project_respects_direct_paths() {
// Given a direct path not included in the project `includes`, it should be checked
insta::with_settings!({
filters => TEST_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["check", "--show-files", "b.py"]).current_dir(Path::new("./resources/test/fixtures/include-test")), @r###"
success: true
exit_code: 0
----- stdout -----
[BASEPATH]/include-test/b.py
----- stderr -----
"###);
});
}
#[test]
fn check_project_respects_subdirectory_includes() {
// Given a direct path to a subdirectory, the include should be respected
insta::with_settings!({
filters => TEST_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["check", "--show-files", "subdirectory"]).current_dir(Path::new("./resources/test/fixtures/include-test")), @r###"
success: true
exit_code: 0
----- stdout -----
[BASEPATH]/include-test/subdirectory/c.py
----- stderr -----
"###);
});
}
#[test]
fn check_project_from_project_subdirectory_respects_includes() {
// Run from a project subdirectory, the include specified in the parent directory should be respected
insta::with_settings!({
filters => TEST_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["check", "--show-files"]).current_dir(Path::new("./resources/test/fixtures/include-test/subdirectory")), @r###"
success: true
exit_code: 0
----- stdout -----
[BASEPATH]/include-test/subdirectory/c.py
----- stderr -----
"###);
});
}

View File

@@ -48,12 +48,9 @@ tracing-indicatif = { workspace = true }
tracing-subscriber = { workspace = true, features = ["env-filter"] }
imara-diff = "0.1.5"
[dev-dependencies]
indoc = "2.0.4"
[features]
# Turn off rayon for profiling
singlethreaded = []
[lints]
workspace = true
[dev-dependencies]
indoc = "2.0.4"

View File

@@ -49,7 +49,7 @@ pub(crate) fn main(args: &Args) -> Result<()> {
if rule.is_preview() || rule.is_nursery() {
output.push_str(
r"This rule is unstable and in [preview](../preview.md). The `--preview` flag is required for use.",
r#"This rule is unstable and in [preview](../preview.md). The `--preview` flag is required for use."#,
);
output.push('\n');
output.push('\n');

View File

@@ -3,7 +3,6 @@
//! Used for <https://docs.astral.sh/ruff/settings/>.
use std::fmt::Write;
use ruff_python_trivia::textwrap;
use ruff_workspace::options::Options;
use ruff_workspace::options_base::{OptionField, OptionSet, OptionsMetadata, Visit};
@@ -126,87 +125,22 @@ fn emit_field(output: &mut String, name: &str, field: &OptionField, parent_set:
output.push('\n');
output.push_str(&format!("**Type**: `{}`\n", field.value_type));
output.push('\n');
output.push_str("**Example usage**:\n\n");
output.push_str(&format_tab(
"pyproject.toml",
&format_header(field.scope, parent_set, ConfigurationFile::PyprojectToml),
field.example,
));
output.push_str(&format_tab(
"ruff.toml",
&format_header(field.scope, parent_set, ConfigurationFile::RuffToml),
field.example,
output.push_str(&format!(
"**Example usage**:\n\n```toml\n[tool.ruff{}]\n{}\n```\n",
if let Some(set_name) = parent_set.name() {
if set_name == "format" {
String::from(".format")
} else {
format!(".lint.{set_name}")
}
} else {
String::new()
},
field.example
));
output.push('\n');
}
fn format_tab(tab_name: &str, header: &str, content: &str) -> String {
format!(
"=== \"{}\"\n\n ```toml\n {}\n{}\n ```\n",
tab_name,
header,
textwrap::indent(content, " ")
)
}
/// Format the TOML header for the example usage for a given option.
///
/// For example: `[tool.ruff.format]` or `[tool.ruff.lint.isort]`.
fn format_header(
scope: Option<&str>,
parent_set: &Set,
configuration: ConfigurationFile,
) -> String {
match configuration {
ConfigurationFile::PyprojectToml => {
let mut header = if let Some(set_name) = parent_set.name() {
if set_name == "format" {
String::from("tool.ruff.format")
} else {
format!("tool.ruff.lint.{set_name}")
}
} else {
"tool.ruff".to_string()
};
if let Some(scope) = scope {
if !header.is_empty() {
header.push('.');
}
header.push_str(scope);
}
format!("[{header}]")
}
ConfigurationFile::RuffToml => {
let mut header = if let Some(set_name) = parent_set.name() {
if set_name == "format" {
String::from("format")
} else {
format!("lint.{set_name}")
}
} else {
String::new()
};
if let Some(scope) = scope {
if !header.is_empty() {
header.push('.');
}
header.push_str(scope);
}
if header.is_empty() {
String::new()
} else {
format!("[{header}]")
}
}
}
}
#[derive(Debug, Copy, Clone)]
enum ConfigurationFile {
PyprojectToml,
RuffToml,
}
#[derive(Default)]
struct CollectOptionsVisitor {
groups: Vec<(String, OptionSet)>,

View File

@@ -1,34 +1,30 @@
//! Print the AST for a given Python file.
#![allow(clippy::print_stdout, clippy::print_stderr)]
use std::fs;
use std::path::PathBuf;
use anyhow::Result;
use ruff_linter::source_kind::SourceKind;
use ruff_python_ast::PySourceType;
use ruff_python_parser::{parse, AsMode};
use ruff_python_parser::{parse, Mode};
#[derive(clap::Args)]
pub(crate) struct Args {
/// Python file for which to generate the AST.
#[arg(required = true)]
file: PathBuf,
/// Run in Jupyter mode i.e., allow line magics.
#[arg(long)]
jupyter: bool,
}
pub(crate) fn main(args: &Args) -> Result<()> {
let source_type = PySourceType::from(&args.file);
let source_kind = SourceKind::from_path(&args.file, source_type)?.ok_or_else(|| {
anyhow::anyhow!(
"Could not determine source kind for file: {}",
args.file.display()
)
})?;
let python_ast = parse(
source_kind.source_code(),
source_type.as_mode(),
&args.file.to_string_lossy(),
)?;
let contents = fs::read_to_string(&args.file)?;
let mode = if args.jupyter {
Mode::Ipython
} else {
Mode::Module
};
let python_ast = parse(&contents, mode, &args.file.to_string_lossy())?;
println!("{python_ast:#?}");
Ok(())
}

View File

@@ -1,30 +1,30 @@
//! Print the token stream for a given Python file.
#![allow(clippy::print_stdout, clippy::print_stderr)]
use std::fs;
use std::path::PathBuf;
use anyhow::Result;
use ruff_linter::source_kind::SourceKind;
use ruff_python_ast::PySourceType;
use ruff_python_parser::{lexer, AsMode};
use ruff_python_parser::{lexer, Mode};
#[derive(clap::Args)]
pub(crate) struct Args {
/// Python file for which to generate the AST.
#[arg(required = true)]
file: PathBuf,
/// Run in Jupyter mode i.e., allow line magics (`%`, `!`, `?`, `/`, `,`, `;`).
#[arg(long)]
jupyter: bool,
}
pub(crate) fn main(args: &Args) -> Result<()> {
let source_type = PySourceType::from(&args.file);
let source_kind = SourceKind::from_path(&args.file, source_type)?.ok_or_else(|| {
anyhow::anyhow!(
"Could not determine source kind for file: {}",
args.file.display()
)
})?;
for (tok, range) in lexer::lex(source_kind.source_code(), source_type.as_mode()).flatten() {
let contents = fs::read_to_string(&args.file)?;
let mode = if args.jupyter {
Mode::Ipython
} else {
Mode::Module
};
for (tok, range) in lexer::lex(&contents, mode).flatten() {
println!(
"{start:#?} {tok:#?} {end:#?}",
start = range.start(),

View File

@@ -12,7 +12,7 @@ use crate::edit::Edit;
pub enum Applicability {
/// The fix is unsafe and should only be displayed for manual application by the user.
/// The fix is likely to be incorrect or the resulting code may have invalid syntax.
DisplayOnly,
Display,
/// The fix is unsafe and should only be applied with user opt-in.
/// The fix may be what the user intended, but it is uncertain; the resulting code will have valid syntax.
@@ -87,46 +87,22 @@ impl Fix {
}
}
/// Create a new [`Fix`] that should only [display](Applicability::DisplayOnly) and not apply from an [`Edit`] element .
pub fn display_only_edit(edit: Edit) -> Self {
/// Create a new [`Fix`] that should only [display](Applicability::Display) and not apply from an [`Edit`] element .
pub fn display_edit(edit: Edit) -> Self {
Self {
edits: vec![edit],
applicability: Applicability::DisplayOnly,
applicability: Applicability::Display,
isolation_level: IsolationLevel::default(),
}
}
/// Create a new [`Fix`] that should only [display](Applicability::DisplayOnly) and not apply from multiple [`Edit`] elements.
pub fn display_only_edits(edit: Edit, rest: impl IntoIterator<Item = Edit>) -> Self {
/// Create a new [`Fix`] that should only [display](Applicability::Display) and not apply from multiple [`Edit`] elements.
pub fn display_edits(edit: Edit, rest: impl IntoIterator<Item = Edit>) -> Self {
let mut edits: Vec<Edit> = std::iter::once(edit).chain(rest).collect();
edits.sort_by_key(|edit| (edit.start(), edit.end()));
Self {
edits,
applicability: Applicability::DisplayOnly,
isolation_level: IsolationLevel::default(),
}
}
/// Create a new [`Fix`] with the specified [`Applicability`] to apply an [`Edit`] element.
pub fn applicable_edit(edit: Edit, applicability: Applicability) -> Self {
Self {
edits: vec![edit],
applicability,
isolation_level: IsolationLevel::default(),
}
}
/// Create a new [`Fix`] with the specified [`Applicability`] to apply multiple [`Edit`] elements.
pub fn applicable_edits(
edit: Edit,
rest: impl IntoIterator<Item = Edit>,
applicability: Applicability,
) -> Self {
let mut edits: Vec<Edit> = std::iter::once(edit).chain(rest).collect();
edits.sort_by_key(|edit| (edit.start(), edit.end()));
Self {
edits,
applicability,
applicability: Applicability::Display,
isolation_level: IsolationLevel::default(),
}
}

View File

@@ -29,6 +29,3 @@ insta = { workspace = true }
[features]
serde = ["dep:serde", "ruff_text_size/serde"]
schemars = ["dep:schemars", "ruff_text_size/schemars"]
[lints]
workspace = true

View File

@@ -1,9 +1,23 @@
use super::{Buffer, Format, Formatter};
use crate::FormatResult;
use std::ffi::c_void;
use std::marker::PhantomData;
/// A convenience wrapper for representing a formattable argument.
/// Mono-morphed type to format an object. Used by the [`crate::format`!], [`crate::format_args`!], and
/// [`crate::write`!] macros.
///
/// This struct is similar to a dynamic dispatch (using `dyn Format`) because it stores a pointer to the value.
/// However, it doesn't store the pointer to `dyn Format`'s vtable, instead it statically resolves the function
/// pointer of `Format::format` and stores it in `formatter`.
pub struct Argument<'fmt, Context> {
value: &'fmt dyn Format<Context>,
/// The value to format stored as a raw pointer where `lifetime` stores the value's lifetime.
value: *const c_void,
/// Stores the lifetime of the value. To get the most out of our dear borrow checker.
lifetime: PhantomData<&'fmt ()>,
/// The function pointer to `value`'s `Format::format` method
formatter: fn(*const c_void, &mut Formatter<'_, Context>) -> FormatResult<()>,
}
impl<Context> Clone for Argument<'_, Context> {
@@ -14,19 +28,32 @@ impl<Context> Clone for Argument<'_, Context> {
impl<Context> Copy for Argument<'_, Context> {}
impl<'fmt, Context> Argument<'fmt, Context> {
/// Called by the [ruff_formatter::format_args] macro.
/// Called by the [ruff_formatter::format_args] macro. Creates a mono-morphed value for formatting
/// an object.
#[doc(hidden)]
#[inline]
pub fn new<F: Format<Context>>(value: &'fmt F) -> Self {
Self { value }
#[inline]
fn formatter<F: Format<Context>, Context>(
ptr: *const c_void,
fmt: &mut Formatter<Context>,
) -> FormatResult<()> {
// SAFETY: Safe because the 'fmt lifetime is captured by the 'lifetime' field.
#[allow(unsafe_code)]
F::fmt(unsafe { &*ptr.cast::<F>() }, fmt)
}
Self {
value: (value as *const F).cast::<std::ffi::c_void>(),
lifetime: PhantomData,
formatter: formatter::<F, Context>,
}
}
/// Formats the value stored by this argument using the given formatter.
#[inline]
// Seems to only be triggered on wasm32 and looks like a false positive?
#[allow(clippy::trivially_copy_pass_by_ref)]
pub(super) fn format(&self, f: &mut Formatter<Context>) -> FormatResult<()> {
self.value.fmt(f)
(self.formatter)(self.value, f)
}
}

View File

@@ -2555,17 +2555,17 @@ pub struct BestFitting<'a, Context> {
}
impl<'a, Context> BestFitting<'a, Context> {
/// Creates a new best fitting IR with the given variants.
///
/// Callers are required to ensure that the number of variants given
/// is at least 2.
/// Creates a new best fitting IR with the given variants. The method itself isn't unsafe
/// but it is to discourage people from using it because the printer will panic if
/// the slice doesn't contain at least the least and most expanded variants.
///
/// You're looking for a way to create a `BestFitting` object, use the `best_fitting![least_expanded, most_expanded]` macro.
///
/// # Panics
///
/// When the slice contains less than two variants.
pub fn from_arguments_unchecked(variants: Arguments<'a, Context>) -> Self {
/// ## Safety
/// The slice must contain at least two variants.
#[allow(unsafe_code)]
pub unsafe fn from_arguments_unchecked(variants: Arguments<'a, Context>) -> Self {
assert!(
variants.0.len() >= 2,
"Requires at least the least expanded and most expanded variants"
@@ -2696,12 +2696,14 @@ impl<Context> Format<Context> for BestFitting<'_, Context> {
buffer.write_element(FormatElement::Tag(EndBestFittingEntry));
}
// OK because the constructor guarantees that there are always at
// least two variants.
let variants = BestFittingVariants::from_vec_unchecked(buffer.into_vec());
let element = FormatElement::BestFitting {
variants,
mode: self.mode,
// SAFETY: The constructor guarantees that there are always at least two variants. It's, therefore,
// safe to call into the unsafe `from_vec_unchecked` function
#[allow(unsafe_code)]
let element = unsafe {
FormatElement::BestFitting {
variants: BestFittingVariants::from_vec_unchecked(buffer.into_vec()),
mode: self.mode,
}
};
f.write_element(element);

View File

@@ -332,14 +332,17 @@ pub enum BestFittingMode {
pub struct BestFittingVariants(Box<[FormatElement]>);
impl BestFittingVariants {
/// Creates a new best fitting IR with the given variants.
///
/// Callers are required to ensure that the number of variants given
/// is at least 2 when using `most_expanded` or `most_flag`.
/// Creates a new best fitting IR with the given variants. The method itself isn't unsafe
/// but it is to discourage people from using it because the printer will panic if
/// the slice doesn't contain at least the least and most expanded variants.
///
/// You're looking for a way to create a `BestFitting` object, use the `best_fitting![least_expanded, most_expanded]` macro.
///
/// ## Safety
/// The slice must contain at least two variants.
#[doc(hidden)]
pub fn from_vec_unchecked(variants: Vec<FormatElement>) -> Self {
#[allow(unsafe_code)]
pub unsafe fn from_vec_unchecked(variants: Vec<FormatElement>) -> Self {
debug_assert!(
variants
.iter()
@@ -348,23 +351,12 @@ impl BestFittingVariants {
>= 2,
"Requires at least the least expanded and most expanded variants"
);
Self(variants.into_boxed_slice())
}
/// Returns the most expanded variant
///
/// # Panics
///
/// When the number of variants is less than two.
pub fn most_expanded(&self) -> &[FormatElement] {
assert!(
self.as_slice()
.iter()
.filter(|element| matches!(element, FormatElement::Tag(Tag::StartBestFittingEntry)))
.count()
>= 2,
"Requires at least the least expanded and most expanded variants"
);
self.into_iter().last().unwrap()
}
@@ -373,19 +365,7 @@ impl BestFittingVariants {
}
/// Returns the least expanded variant
///
/// # Panics
///
/// When the number of variants is less than two.
pub fn most_flat(&self) -> &[FormatElement] {
assert!(
self.as_slice()
.iter()
.filter(|element| matches!(element, FormatElement::Tag(Tag::StartBestFittingEntry)))
.count()
>= 2,
"Requires at least the least expanded and most expanded variants"
);
self.into_iter().next().unwrap()
}
}

View File

@@ -329,8 +329,10 @@ macro_rules! format {
#[macro_export]
macro_rules! best_fitting {
($least_expanded:expr, $($tail:expr),+ $(,)?) => {{
// OK because the macro syntax requires at least two variants.
$crate::BestFitting::from_arguments_unchecked($crate::format_args!($least_expanded, $($tail),+))
#[allow(unsafe_code)]
unsafe {
$crate::BestFitting::from_arguments_unchecked($crate::format_args!($least_expanded, $($tail),+))
}
}}
}

View File

@@ -1711,14 +1711,14 @@ mod tests {
));
assert_eq!(
"a
r#"a
b
c
d
d
c
b
a",
a"#,
formatted.as_code()
);
}
@@ -2047,10 +2047,10 @@ two lines`,
assert_eq!(
printed.as_code(),
"Group with id-2
r#"Group with id-2
Group with id-1 does not fit on the line because it exceeds the line width of 80 characters by
Group 2 fits
Group 1 breaks"
Group 1 breaks"#
);
}

View File

@@ -17,6 +17,3 @@ ruff_macros = { path = "../ruff_macros" }
[dev-dependencies]
static_assertions = "1.1.0"
[lints]
workspace = true

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_linter"
version = "0.1.7"
version = "0.1.3"
publish = false
authors = { workspace = true }
edition = { workspace = true }
@@ -30,7 +30,7 @@ ruff_source_file = { path = "../ruff_source_file", features = ["serde"] }
ruff_text_size = { path = "../ruff_text_size" }
aho-corasick = { version = "1.1.2" }
annotate-snippets = { version = "0.9.2", features = ["color"] }
annotate-snippets = { version = "0.9.1", features = ["color"] }
anyhow = { workspace = true }
bitflags = { workspace = true }
chrono = { workspace = true }
@@ -53,8 +53,8 @@ path-absolutize = { workspace = true, features = [
] }
pathdiff = { version = "0.2.1" }
pep440_rs = { version = "0.3.12", features = ["serde"] }
pyproject-toml = { version = "0.8.1" }
quick-junit = { version = "0.3.5" }
pyproject-toml = { version = "0.8.0" }
quick-junit = { version = "0.3.2" }
regex = { workspace = true }
result-like = { version = "0.4.6" }
rustc-hash = { workspace = true }
@@ -86,6 +86,3 @@ default = []
schemars = ["dep:schemars"]
# Enables the UnreachableCode rule
unreachable-code = []
[lints]
workspace = true

View File

@@ -0,0 +1,3 @@
# fixtures
Fixture files used for snapshot testing.

View File

@@ -39,18 +39,3 @@ def func():
for i in range(1110):
if True:
break
# TODO(charlie): The `pass` here does not get properly redirected to the top of the
# loop, unlike below.
def func():
for i in range(5):
pass
else:
return 1
def func():
for i in range(5):
pass
else:
return 1
x = 1

View File

@@ -129,11 +129,3 @@ def func():
print("Grass is green")
case Color.BLUE:
print("I'm feeling the blues :(")
def func(point):
match point:
case (0, 0):
print("Origin")
case foo:
raise ValueError("oops")

View File

@@ -1,184 +0,0 @@
def func():
return 1
def func():
return 1.5
def func(x: int):
if x > 0:
return 1
else:
return 1.5
def func():
return True
def func(x: int):
if x > 0:
return None
else:
return
def func(x: int):
return 1 or 2.5 if x > 0 else 1.5 or "str"
def func(x: int):
return 1 + 2.5 if x > 0 else 1.5 or "str"
def func(x: int):
if not x:
return None
return {"foo": 1}
def func(x: int):
return {"foo": 1}
def func(x: int):
if not x:
return 1
else:
return True
def func(x: int):
if not x:
return 1
else:
return None
def func(x: int):
if not x:
return 1
elif x > 5:
return "str"
else:
return None
def func(x: int):
if x:
return 1
def func():
x = 1
def func(x: int):
if x > 0:
return 1
def func(x: int):
match x:
case [1, 2, 3]:
return 1
case 4 as y:
return "foo"
def func(x: int):
for i in range(5):
if i > 0:
return 1
def func(x: int):
for i in range(5):
if i > 0:
return 1
else:
return 4
def func(x: int):
for i in range(5):
if i > 0:
break
else:
return 4
def func(x: int):
try:
pass
except:
return 1
def func(x: int):
try:
pass
except:
return 1
finally:
return 2
def func(x: int):
try:
pass
except:
return 1
else:
return 2
def func(x: int):
try:
return 1
except:
return 2
else:
pass
def func(x: int):
while x > 0:
break
return 1
import abc
from abc import abstractmethod
class Foo(abc.ABC):
@abstractmethod
def method(self):
pass
@abc.abstractmethod
def method(self):
"""Docstring."""
@abc.abstractmethod
def method(self):
...
@staticmethod
@abstractmethod
def method():
pass
@classmethod
@abstractmethod
def method(cls):
pass
@abstractmethod
def method(self):
if self.x > 0:
return 1
else:
return 1.5

View File

@@ -1,65 +0,0 @@
import sys
import tarfile
import tempfile
def unsafe_archive_handler(filename):
tar = tarfile.open(filename)
tar.extractall(path=tempfile.mkdtemp())
tar.close()
def managed_members_archive_handler(filename):
tar = tarfile.open(filename)
tar.extractall(path=tempfile.mkdtemp(), members=members_filter(tar))
tar.close()
def list_members_archive_handler(filename):
tar = tarfile.open(filename)
tar.extractall(path=tempfile.mkdtemp(), members=[])
tar.close()
def provided_members_archive_handler(filename):
tar = tarfile.open(filename)
tarfile.extractall(path=tempfile.mkdtemp(), members=tar)
tar.close()
def filter_data(filename):
tar = tarfile.open(filename)
tarfile.extractall(path=tempfile.mkdtemp(), filter="data")
tar.close()
def filter_fully_trusted(filename):
tar = tarfile.open(filename)
tarfile.extractall(path=tempfile.mkdtemp(), filter="fully_trusted")
tar.close()
def filter_tar(filename):
tar = tarfile.open(filename)
tarfile.extractall(path=tempfile.mkdtemp(), filter="tar")
tar.close()
def members_filter(tarfile):
result = []
for member in tarfile.getmembers():
if '../' in member.name:
print('Member name container directory traversal sequence')
continue
elif (member.issym() or member.islnk()) and ('../' in member.linkname):
print('Symlink to external resource')
continue
result.append(member)
return result
if __name__ == "__main__":
if len(sys.argv) > 1:
filename = sys.argv[1]
unsafe_archive_handler(filename)
managed_members_archive_handler(filename)

View File

@@ -1,13 +0,0 @@
from django.db.models.expressions import RawSQL
from django.contrib.auth.models import User
User.objects.annotate(val=RawSQL('secure', []))
User.objects.annotate(val=RawSQL('%secure' % 'nos', []))
User.objects.annotate(val=RawSQL('{}secure'.format('no'), []))
raw = '"username") AS "val" FROM "auth_user" WHERE "username"="admin" --'
User.objects.annotate(val=RawSQL(raw, []))
raw = '"username") AS "val" FROM "auth_user"' \
' WHERE "username"="admin" OR 1=%s --'
User.objects.annotate(val=RawSQL(raw, [0]))
User.objects.annotate(val=RawSQL(sql='{}secure'.format('no'), params=[]))
User.objects.annotate(val=RawSQL(params=[], sql='{}secure'.format('no')))

View File

@@ -1,9 +0,0 @@
from mako.template import Template
from mako import template
import mako
Template("hello")
mako.template.Template("hern")
template.Template("hern")

View File

@@ -91,26 +91,3 @@ class Registry:
def foo(self) -> None:
object.__setattr__(self, "flag", True)
from typing import Optional, Union
def func(x: Union[list, Optional[int | str | float | bool]]):
pass
def func(x: bool | str):
pass
def func(x: int | str):
pass
from typing import override
@override
def func(x: bool):
pass

View File

@@ -1,149 +0,0 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"id": "33faf7ad-a3fd-4ac4-a0c3-52e507ed49df",
"metadata": {},
"outputs": [],
"source": [
"x = 1"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "481fb4bf-c1b9-47da-927f-3cfdfe4b49ec",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"True"
]
},
"execution_count": 2,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Simple case\n",
"x == 1"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "2f0c65a5-0a0e-4080-afce-5a8ed0d706df",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"True"
]
},
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Only skip the last expression\n",
"x == 1 # B018\n",
"x == 1"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "5a3fd75d-26d9-44f7-b013-1684aabfd0ae",
"metadata": {},
"outputs": [],
"source": [
"# Nested expressions isn't relevant\n",
"if True:\n",
" x == 1"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "e00e1afa-b76c-4774-be2a-7223641579e4",
"metadata": {},
"outputs": [],
"source": [
"# Semicolons shouldn't affect the output\n",
"x == 1;"
]
},
{
"cell_type": "code",
"execution_count": 6,
"id": "05eab5b9-e2ba-4954-8ef3-b035a79573fe",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"True"
]
},
"execution_count": 6,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Semicolons with multiple expressions\n",
"x == 1; x == 1"
]
},
{
"cell_type": "code",
"execution_count": 7,
"id": "9cbbddc5-83fc-4fdb-81ab-53a3912ae898",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"True"
]
},
"execution_count": 7,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Comments, newlines and whitespace\n",
"x == 1 # comment\n",
"\n",
"# another comment"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python (ruff-playground)",
"language": "python",
"name": "ruff-playground"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.3"
}
},
"nbformat": 4,
"nbformat_minor": 5
}

View File

@@ -1,149 +0,0 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"id": "33faf7ad-a3fd-4ac4-a0c3-52e507ed49df",
"metadata": {},
"outputs": [],
"source": [
"x = 1"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "481fb4bf-c1b9-47da-927f-3cfdfe4b49ec",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1"
]
},
"execution_count": 2,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Simple case\n",
"x"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "2f0c65a5-0a0e-4080-afce-5a8ed0d706df",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1"
]
},
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Only skip the last expression\n",
"x # B018\n",
"x"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "5a3fd75d-26d9-44f7-b013-1684aabfd0ae",
"metadata": {},
"outputs": [],
"source": [
"# Nested expressions isn't relevant\n",
"if True:\n",
" x"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "e00e1afa-b76c-4774-be2a-7223641579e4",
"metadata": {},
"outputs": [],
"source": [
"# Semicolons shouldn't affect the output\n",
"x;"
]
},
{
"cell_type": "code",
"execution_count": 6,
"id": "05eab5b9-e2ba-4954-8ef3-b035a79573fe",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1"
]
},
"execution_count": 6,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Semicolons with multiple expressions\n",
"x; x"
]
},
{
"cell_type": "code",
"execution_count": 7,
"id": "9cbbddc5-83fc-4fdb-81ab-53a3912ae898",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1"
]
},
"execution_count": 7,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Comments, newlines and whitespace\n",
"x # comment\n",
"\n",
"# another comment"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python (ruff-playground)",
"language": "python",
"name": "ruff-playground"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.3"
}
},
"nbformat": 4,
"nbformat_minor": 5
}

View File

@@ -639,18 +639,3 @@ foo = namedtuple(
:20
],
)
# F-strings
kwargs.pop("remove", f"this {trailing_comma}",)
raise Exception(
"first", extra=f"Add trailing comma here ->"
)
assert False, f"<- This is not a trailing comma"
f"""This is a test. {
"Another sentence."
if True else
"Don't add a trailing comma here ->"
}"""

View File

@@ -58,33 +58,3 @@ def f_fix_indentation_check(foo):
# Report these, but don't fix them
if foo: raise RuntimeError("This is an example exception")
if foo: x = 1; raise RuntimeError("This is an example exception")
def f_triple_quoted_string():
raise RuntimeError(f"""This is an {"example"} exception""")
def f_multi_line_string():
raise RuntimeError(
"first"
"second"
)
def f_multi_line_string2():
raise RuntimeError(
"This is an {example} exception".format(
example="example"
)
)
def f_multi_line_string2():
raise RuntimeError(
(
"This is an "
"{example} exception"
).format(
example="example"
)
)

View File

@@ -148,62 +148,3 @@ for i in range(10):
for i in range(10):
pass # comment
pass
def foo():
print("foo")
...
def foo():
"""A docstring."""
print("foo")
...
for i in range(10):
...
...
for i in range(10):
...
...
for i in range(10):
... # comment
...
for i in range(10):
...
pass
from typing import Protocol
class Repro(Protocol):
def func(self) -> str:
"""Docstring"""
...
def impl(self) -> str:
"""Docstring"""
return self.func()
import abc
class Repro:
@abc.abstractmethod
def func(self) -> str:
"""Docstring"""
...
def impl(self) -> str:
"""Docstring"""
return self.func()
def stub(self) -> str:
"""Docstring"""
...

View File

@@ -38,15 +38,3 @@ class User:
foo: bool = BooleanField()
# ...
bar = StringField() # PIE794
class Person:
name = "Foo"
name = name + " Bar"
name = "Bar" # PIE794
class Person:
name: str = "Foo"
name: str = name + " Bar"
name: str = "Bar" # PIE794

View File

@@ -64,9 +64,3 @@ class FakeEnum10(enum.Enum):
A = enum.auto()
B = enum.auto()
C = enum.auto()
class FakeEnum10(enum.Enum):
A = ...
B = ... # PIE796
C = ... # PIE796

View File

@@ -1,7 +0,0 @@
import enum
class FakeEnum1(enum.Enum):
A = ...
B = ...
C = ...

View File

@@ -1,21 +1,9 @@
{"foo": 1, **{"bar": 1}} # PIE800
{**{"bar": 10}, "a": "b"} # PIE800
foo({**foo, **{"bar": True}}) # PIE800
{**foo, **{"bar": 10}} # PIE800
{ # PIE800
"a": "b",
# Preserve
**{
# all
"bar": 10, # the
# comments
},
}
{**foo, **buzz, **{bar: 10}} # PIE800
{**foo, "bar": True } # OK

View File

@@ -10,6 +10,7 @@ Foo.objects.create(**{**bar}) # PIE804
foo(**{})
foo(**{**data, "foo": "buzz"})
foo(**buzz)
foo(**{"bar-foo": True})
@@ -19,5 +20,3 @@ foo(**{buzz: True})
foo(**{"": True})
foo(**{f"buzz__{bar}": True})
abc(**{"for": 3})
foo(**{},)

View File

@@ -1,37 +1,27 @@
@dataclass
class Foo:
foo: List[str] = field(default_factory=lambda: []) # PIE807
bar: Dict[str, int] = field(default_factory=lambda: {}) # PIE807
class FooTable(BaseTable):
foo = fields.ListField(default=lambda: []) # PIE807
bar = fields.ListField(default=lambda: {}) # PIE807
bar = fields.ListField(default=lambda: []) # PIE807
class FooTable(BaseTable):
foo = fields.ListField(lambda: []) # PIE807
bar = fields.ListField(default=lambda: {}) # PIE807
bar = fields.ListField(lambda: []) # PIE807
@dataclass
class Foo:
foo: List[str] = field(default_factory=list)
bar: Dict[str, int] = field(default_factory=dict)
class FooTable(BaseTable):
foo = fields.ListField(list)
bar = fields.ListField(dict)
bar = fields.ListField(list)
lambda *args, **kwargs: []
lambda *args, **kwargs: {}
lambda *args: []
lambda *args: {}
lambda **kwargs: []
lambda **kwargs: {}
lambda: {**unwrap}

View File

@@ -36,54 +36,3 @@ field10: (Literal[1] | str) | Literal[2] # Error
# Should emit for union in generic parent type.
field11: dict[Literal[1] | Literal[2], str] # Error
# Should emit for unions with more than two cases
field12: Literal[1] | Literal[2] | Literal[3] # Error
field13: Literal[1] | Literal[2] | Literal[3] | Literal[4] # Error
# Should emit for unions with more than two cases, even if not directly adjacent
field14: Literal[1] | Literal[2] | str | Literal[3] # Error
# Should emit for unions with mixed literal internal types
field15: Literal[1] | Literal["foo"] | Literal[True] # Error
# Shouldn't emit for duplicate field types with same value; covered by Y016
field16: Literal[1] | Literal[1] # OK
# Shouldn't emit if in new parent type
field17: Literal[1] | dict[Literal[2], str] # OK
# Shouldn't emit if not in a union parent
field18: dict[Literal[1], Literal[2]] # OK
# Should respect name of literal type used
field19: typing.Literal[1] | typing.Literal[2] # Error
# Should emit in cases with newlines
field20: typing.Union[
Literal[
1 # test
],
Literal[2],
] # Error, newline and comment will not be emitted in message
# Should handle multiple unions with multiple members
field21: Literal[1, 2] | Literal[3, 4] # Error
# Should emit in cases with `typing.Union` instead of `|`
field22: typing.Union[Literal[1], Literal[2]] # Error
# Should emit in cases with `typing_extensions.Literal`
field23: typing_extensions.Literal[1] | typing_extensions.Literal[2] # Error
# Should emit in cases with nested `typing.Union`
field24: typing.Union[Literal[1], typing.Union[Literal[2], str]] # Error
# Should emit in cases with mixed `typing.Union` and `|`
field25: typing.Union[Literal[1], Literal[2] | str] # Error
# Should emit only once in cases with multiple nested `typing.Union`
field24: typing.Union[Literal[1], typing.Union[Literal[2], typing.Union[Literal[3], Literal[4]]]] # Error
# Should use the first literal subscript attribute when fixing
field25: typing.Union[typing_extensions.Literal[1], typing.Union[Literal[2], typing.Union[Literal[3], Literal[4]]], str] # Error

View File

@@ -84,6 +84,3 @@ field25: typing.Union[Literal[1], Literal[2] | str] # Error
# Should emit only once in cases with multiple nested `typing.Union`
field24: typing.Union[Literal[1], typing.Union[Literal[2], typing.Union[Literal[3], Literal[4]]]] # Error
# Should use the first literal subscript attribute when fixing
field25: typing.Union[typing_extensions.Literal[1], typing.Union[Literal[2], typing.Union[Literal[3], Literal[4]]], str] # Error

View File

@@ -3,11 +3,9 @@
import abc
import builtins
import collections.abc
import enum
import typing
from abc import ABCMeta, abstractmethod
from abc import abstractmethod
from collections.abc import AsyncIterable, AsyncIterator, Iterable, Iterator
from enum import EnumMeta
from typing import Any, overload
import typing_extensions
@@ -201,31 +199,6 @@ class AsyncIteratorReturningAsyncIterable:
... # Y045 "__aiter__" methods should return an AsyncIterator, not an AsyncIterable
class MetaclassInWhichSelfCannotBeUsed(type):
def __new__(cls) -> MetaclassInWhichSelfCannotBeUsed: ...
def __enter__(self) -> MetaclassInWhichSelfCannotBeUsed: ...
async def __aenter__(self) -> MetaclassInWhichSelfCannotBeUsed: ...
def __isub__(self, other: MetaclassInWhichSelfCannotBeUsed) -> MetaclassInWhichSelfCannotBeUsed: ...
class MetaclassInWhichSelfCannotBeUsed2(EnumMeta):
def __new__(cls) -> MetaclassInWhichSelfCannotBeUsed2: ...
def __enter__(self) -> MetaclassInWhichSelfCannotBeUsed2: ...
async def __aenter__(self) -> MetaclassInWhichSelfCannotBeUsed2: ...
def __isub__(self, other: MetaclassInWhichSelfCannotBeUsed2) -> MetaclassInWhichSelfCannotBeUsed2: ...
class MetaclassInWhichSelfCannotBeUsed3(enum.EnumType):
def __new__(cls) -> MetaclassInWhichSelfCannotBeUsed3: ...
def __enter__(self) -> MetaclassInWhichSelfCannotBeUsed3: ...
async def __aenter__(self) -> MetaclassInWhichSelfCannotBeUsed3: ...
def __isub__(self, other: MetaclassInWhichSelfCannotBeUsed3) -> MetaclassInWhichSelfCannotBeUsed3: ...
class MetaclassInWhichSelfCannotBeUsed4(ABCMeta):
def __new__(cls) -> MetaclassInWhichSelfCannotBeUsed4: ...
def __enter__(self) -> MetaclassInWhichSelfCannotBeUsed4: ...
async def __aenter__(self) -> MetaclassInWhichSelfCannotBeUsed4: ...
def __isub__(self, other: MetaclassInWhichSelfCannotBeUsed4) -> MetaclassInWhichSelfCannotBeUsed4: ...
class Abstract(Iterator[str]):
@abstractmethod
def __iter__(self) -> Iterator[str]:

View File

@@ -3,11 +3,9 @@
import abc
import builtins
import collections.abc
import enum
import typing
from abc import ABCMeta, abstractmethod
from abc import abstractmethod
from collections.abc import AsyncIterable, AsyncIterator, Iterable, Iterator
from enum import EnumMeta
from typing import Any, overload
import typing_extensions
@@ -154,30 +152,6 @@ class AsyncIteratorReturningAsyncIterable:
str
]: ... # Y045 "__aiter__" methods should return an AsyncIterator, not an AsyncIterable
class MetaclassInWhichSelfCannotBeUsed(type):
def __new__(cls) -> MetaclassInWhichSelfCannotBeUsed: ...
def __enter__(self) -> MetaclassInWhichSelfCannotBeUsed: ...
async def __aenter__(self) -> MetaclassInWhichSelfCannotBeUsed: ...
def __isub__(self, other: MetaclassInWhichSelfCannotBeUsed) -> MetaclassInWhichSelfCannotBeUsed: ...
class MetaclassInWhichSelfCannotBeUsed2(EnumMeta):
def __new__(cls) -> MetaclassInWhichSelfCannotBeUsed2: ...
def __enter__(self) -> MetaclassInWhichSelfCannotBeUsed2: ...
async def __aenter__(self) -> MetaclassInWhichSelfCannotBeUsed2: ...
def __isub__(self, other: MetaclassInWhichSelfCannotBeUsed2) -> MetaclassInWhichSelfCannotBeUsed2: ...
class MetaclassInWhichSelfCannotBeUsed3(enum.EnumType):
def __new__(cls) -> MetaclassInWhichSelfCannotBeUsed3: ...
def __enter__(self) -> MetaclassInWhichSelfCannotBeUsed3: ...
async def __aenter__(self) -> MetaclassInWhichSelfCannotBeUsed3: ...
def __isub__(self, other: MetaclassInWhichSelfCannotBeUsed3) -> MetaclassInWhichSelfCannotBeUsed3: ...
class MetaclassInWhichSelfCannotBeUsed4(ABCMeta):
def __new__(cls) -> MetaclassInWhichSelfCannotBeUsed4: ...
def __enter__(self) -> MetaclassInWhichSelfCannotBeUsed4: ...
async def __aenter__(self) -> MetaclassInWhichSelfCannotBeUsed4: ...
def __isub__(self, other: MetaclassInWhichSelfCannotBeUsed4) -> MetaclassInWhichSelfCannotBeUsed4: ...
class Abstract(Iterator[str]):
@abstractmethod
def __iter__(self) -> Iterator[str]: ...

View File

@@ -22,7 +22,3 @@ Snake_case_alias: TypeAlias = int | float # PYI042, since not camel case
# check that this edge case doesn't crash
_: TypeAlias = str | int
# PEP 695
type foo_bar = int | str
type FooBar = int | str

View File

@@ -22,7 +22,3 @@ Snake_case_alias: TypeAlias = int | float # PYI042, since not camel case
# check that this edge case doesn't crash
_: TypeAlias = str | int
# PEP 695
type foo_bar = int | str
type FooBar = int | str

View File

@@ -21,7 +21,3 @@ _PrivateAliasS2: TypeAlias = Annotated[str, "also okay"]
# check that this edge case doesn't crash
_: TypeAlias = str | int
# PEP 695
type _FooT = str | int
type Foo = str | int

View File

@@ -21,7 +21,3 @@ _PrivateAliasS2: TypeAlias = Annotated[str, "also okay"]
# check that this edge case doesn't crash
_: TypeAlias = str | int
# PEP 695
type _FooT = str | int
type Foo = str | int

View File

@@ -10,14 +10,3 @@ def foo_no_return_typing_extensions(
def foo_no_return_kwarg(arg: int, *, arg2: NoReturn): ... # Error: PYI050
def foo_no_return_pos_only(arg: int, /, arg2: NoReturn): ... # Error: PYI050
def foo_never(arg: Never): ...
def foo_args(*args: NoReturn): ... # Error: PYI050
def foo_kwargs(**kwargs: NoReturn): ... # Error: PYI050
def foo_args_kwargs(*args: NoReturn, **kwargs: NoReturn): ... # Error: PYI050
def foo_int_args(*args: int): ...
def foo_int_kwargs(**kwargs: int): ...
def foo_int_args_kwargs(*args: int, **kwargs: int): ...
def foo_int_args_no_return(*args: int, **kwargs: NoReturn): ... # Error: PYI050
def foo_int_kwargs_no_return(*args: NoReturn, **kwargs: int): ... # Error: PYI050
def foo_args_never(*args: Never): ...
def foo_kwargs_never(**kwargs: Never): ...
def foo_args_kwargs_never(*args: Never, **kwargs: Never): ...

View File

@@ -91,17 +91,3 @@ field27 = list[str]
field28 = builtins.str
field29 = str
field30 = str | bytes | None
# We shouldn't emit Y052 for `enum` subclasses.
from enum import Enum
class Foo(Enum):
FOO = 0
BAR = 1
class Bar(Foo):
BAZ = 2
BOP = 3
class Bop:
WIZ = 4

View File

@@ -98,17 +98,3 @@ field27 = list[str]
field28 = builtins.str
field29 = str
field30 = str | bytes | None
# We shouldn't emit Y052 for `enum` subclasses.
from enum import Enum
class Foo(Enum):
FOO = 0
BAR = 1
class Bar(Foo):
BAZ = 2
BOP = 3
class Bop:
WIZ = 4

View File

@@ -1,45 +0,0 @@
this_should_raise_Q004 = 'This is a \"string\"'
this_should_raise_Q004 = 'This is \\ a \\\"string\"'
this_is_fine = '"This" is a \"string\"'
this_is_fine = "This is a 'string'"
this_is_fine = "\"This\" is a 'string'"
this_is_fine = r'This is a \"string\"'
this_is_fine = R'This is a \"string\"'
this_should_raise_Q004 = (
'This is a'
'\"string\"'
)
# Same as above, but with f-strings
f'This is a \"string\"' # Q004
f'This is \\ a \\\"string\"' # Q004
f'"This" is a \"string\"'
f"This is a 'string'"
f"\"This\" is a 'string'"
fr'This is a \"string\"'
fR'This is a \"string\"'
this_should_raise_Q004 = (
f'This is a'
f'\"string\"' # Q004
)
# Nested f-strings (Python 3.12+)
#
# The first one is interesting because the fix for it is valid pre 3.12:
#
# f"'foo' {'nested'}"
#
# but as the actual string itself is invalid pre 3.12, we don't catch it.
f'\"foo\" {'nested'}' # Q004
f'\"foo\" {f'nested'}' # Q004
f'\"foo\" {f'\"nested\"'} \"\"' # Q004
f'normal {f'nested'} normal'
f'\"normal\" {f'nested'} normal' # Q004
f'\"normal\" {f'nested'} "double quotes"'
f'\"normal\" {f'\"nested\" {'other'} normal'} "double quotes"' # Q004
f'\"normal\" {f'\"nested\" {'other'} "double quotes"'} normal' # Q004
# Make sure we do not unescape quotes
this_is_fine = 'This is an \\"escaped\\" quote'
this_should_raise_Q004 = 'This is an \\\"escaped\\\" quote with an extra backslash'

View File

@@ -1,43 +0,0 @@
this_should_raise_Q004 = "This is a \'string\'"
this_should_raise_Q004 = "'This' is a \'string\'"
this_is_fine = 'This is a "string"'
this_is_fine = '\'This\' is a "string"'
this_is_fine = r"This is a \'string\'"
this_is_fine = R"This is a \'string\'"
this_should_raise_Q004 = (
"This is a"
"\'string\'"
)
# Same as above, but with f-strings
f"This is a \'string\'" # Q004
f"'This' is a \'string\'" # Q004
f'This is a "string"'
f'\'This\' is a "string"'
fr"This is a \'string\'"
fR"This is a \'string\'"
this_should_raise_Q004 = (
f"This is a"
f"\'string\'" # Q004
)
# Nested f-strings (Python 3.12+)
#
# The first one is interesting because the fix for it is valid pre 3.12:
#
# f'"foo" {"nested"}'
#
# but as the actual string itself is invalid pre 3.12, we don't catch it.
f"\'foo\' {"foo"}" # Q004
f"\'foo\' {f"foo"}" # Q004
f"\'foo\' {f"\'foo\'"} \'\'" # Q004
f"normal {f"nested"} normal"
f"\'normal\' {f"nested"} normal" # Q004
f"\'normal\' {f"nested"} 'single quotes'"
f"\'normal\' {f"\'nested\' {"other"} normal"} 'single quotes'" # Q004
f"\'normal\' {f"\'nested\' {"other"} 'single quotes'"} normal" # Q004
# Make sure we do not unescape quotes
this_is_fine = "This is an \\'escaped\\' quote"
this_should_raise_Q004 = "This is an \\\'escaped\\\' quote with an extra backslash"

View File

@@ -82,14 +82,3 @@ raise IndexError();
# RSE102
raise Foo()
# OK
raise ctypes.WinError()
def func():
pass
# OK
raise func()

View File

@@ -134,32 +134,3 @@ with A() as a:
f" something { my_dict["key"] } something else "
f"foo {f"bar {x}"} baz"
# Allow cascading for some statements.
import anyio
import asyncio
import trio
async with asyncio.timeout(1):
async with A() as a:
pass
async with A():
async with asyncio.timeout(1):
pass
async with asyncio.timeout(1):
async with asyncio.timeout_at(1):
async with anyio.CancelScope():
async with anyio.fail_after(1):
async with anyio.move_on_after(1):
async with trio.fail_after(1):
async with trio.fail_at(1):
async with trio.move_on_after(1):
async with trio.move_on_at(1):
pass
# Do not supress combination, if a context manager is already combined with another.
async with asyncio.timeout(1), A():
async with B():
pass

View File

@@ -1,5 +1,3 @@
obj = {}
key in obj.keys() # SIM118
key not in obj.keys() # SIM118

View File

@@ -25,11 +25,3 @@ a = {}.get(key, None)
# SIM910
({}).get(key, None)
# SIM910
ages = {"Tom": 23, "Maria": 23, "Dog": 11}
age = ages.get("Cat", None)
# OK
ages = ["Tom", "Maria", "Dog"]
age = ages.get("Cat", None)

View File

@@ -1,18 +0,0 @@
import trio
async def foo():
with trio.fail_after():
...
async def foo():
with trio.fail_at():
await ...
async def foo():
with trio.move_on_after():
...
async def foo():
with trio.move_at():
await ...

View File

@@ -1,64 +0,0 @@
import trio
async def func() -> None:
trio.run(foo) # OK, not async
# OK
await trio.aclose_forcefully(foo)
await trio.open_file(foo)
await trio.open_ssl_over_tcp_listeners(foo, foo)
await trio.open_ssl_over_tcp_stream(foo, foo)
await trio.open_tcp_listeners(foo)
await trio.open_tcp_stream(foo, foo)
await trio.open_unix_socket(foo)
await trio.run_process(foo)
await trio.sleep(5)
await trio.sleep_until(5)
await trio.lowlevel.cancel_shielded_checkpoint()
await trio.lowlevel.checkpoint()
await trio.lowlevel.checkpoint_if_cancelled()
await trio.lowlevel.open_process(foo)
await trio.lowlevel.permanently_detach_coroutine_object(foo)
await trio.lowlevel.reattach_detached_coroutine_object(foo, foo)
await trio.lowlevel.temporarily_detach_coroutine_object(foo)
await trio.lowlevel.wait_readable(foo)
await trio.lowlevel.wait_task_rescheduled(foo)
await trio.lowlevel.wait_writable(foo)
# TRIO105
trio.aclose_forcefully(foo)
trio.open_file(foo)
trio.open_ssl_over_tcp_listeners(foo, foo)
trio.open_ssl_over_tcp_stream(foo, foo)
trio.open_tcp_listeners(foo)
trio.open_tcp_stream(foo, foo)
trio.open_unix_socket(foo)
trio.run_process(foo)
trio.serve_listeners(foo, foo)
trio.serve_ssl_over_tcp(foo, foo, foo)
trio.serve_tcp(foo, foo)
trio.sleep(foo)
trio.sleep_forever()
trio.sleep_until(foo)
trio.lowlevel.cancel_shielded_checkpoint()
trio.lowlevel.checkpoint()
trio.lowlevel.checkpoint_if_cancelled()
trio.lowlevel.open_process()
trio.lowlevel.permanently_detach_coroutine_object(foo)
trio.lowlevel.reattach_detached_coroutine_object(foo, foo)
trio.lowlevel.temporarily_detach_coroutine_object(foo)
trio.lowlevel.wait_readable(foo)
trio.lowlevel.wait_task_rescheduled(foo)
trio.lowlevel.wait_writable(foo)
async with await trio.open_file(foo): # Ok
pass
async with trio.open_file(foo): # TRIO105
pass
def func() -> None:
# TRIO105 (without fix)
trio.open_file(foo)

View File

@@ -1,13 +0,0 @@
import trio
async def func():
...
async def func(timeout):
...
async def func(timeout=10):
...

Some files were not shown because too many files have changed in this diff Show More