Compare commits

..

1 Commits

Author SHA1 Message Date
konstin
8d54302e05 Import cases dir from black tests 2023-11-10 13:45:30 +01:00
1175 changed files with 57681 additions and 66239 deletions

View File

@@ -1,3 +1,37 @@
[alias]
dev = "run --package ruff_dev --bin ruff_dev"
benchmark = "bench -p ruff_benchmark --bench linter --bench formatter --"
[target.'cfg(all())']
rustflags = [
# CLIPPY LINT SETTINGS
# This is a workaround to configure lints for the entire workspace, pending the ability to configure this via TOML.
# See: `https://github.com/rust-lang/cargo/issues/5034`
# `https://github.com/EmbarkStudios/rust-ecosystem/issues/22#issuecomment-947011395`
"-Dunsafe_code",
"-Wclippy::pedantic",
# Allowed pedantic lints
"-Wclippy::char_lit_as_u8",
"-Aclippy::collapsible_else_if",
"-Aclippy::collapsible_if",
"-Aclippy::implicit_hasher",
"-Aclippy::match_same_arms",
"-Aclippy::missing_errors_doc",
"-Aclippy::missing_panics_doc",
"-Aclippy::module_name_repetitions",
"-Aclippy::must_use_candidate",
"-Aclippy::similar_names",
"-Aclippy::too_many_lines",
# Disallowed restriction lints
"-Wclippy::print_stdout",
"-Wclippy::print_stderr",
"-Wclippy::dbg_macro",
"-Wclippy::empty_drop",
"-Wclippy::empty_structs_with_brackets",
"-Wclippy::exit",
"-Wclippy::get_unwrap",
"-Wclippy::rc_buffer",
"-Wclippy::rc_mutex",
"-Wclippy::rest_pat_in_fully_bound_structs",
"-Wunreachable_pub"
]

3
.gitattributes vendored
View File

@@ -3,8 +3,5 @@
crates/ruff_linter/resources/test/fixtures/isort/line_ending_crlf.py text eol=crlf
crates/ruff_linter/resources/test/fixtures/pycodestyle/W605_1.py text eol=crlf
crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring_code_examples_crlf.py text eol=crlf
crates/ruff_python_formatter/tests/snapshots/format@docstring_code_examples_crlf.py.snap text eol=crlf
ruff.schema.json linguist-generated=true text=auto eol=lf
*.md.snap linguist-language=Markdown

View File

@@ -23,13 +23,8 @@ jobs:
name: "Determine changes"
runs-on: ubuntu-latest
outputs:
# Flag that is raised when any code that affects linter is changed
linter: ${{ steps.changed.outputs.linter_any_changed }}
# Flag that is raised when any code that affects formatter is changed
formatter: ${{ steps.changed.outputs.formatter_any_changed }}
# Flag that is raised when any code is changed
# This is superset of the linter and formatter
code: ${{ steps.changed.outputs.code_any_changed }}
steps:
- uses: actions/checkout@v4
with:
@@ -48,7 +43,6 @@ jobs:
- "!crates/ruff_dev/**"
- "!crates/ruff_shrinking/**"
- scripts/*
- python/**
- .github/workflows/ci.yaml
formatter:
@@ -64,15 +58,8 @@ jobs:
- crates/ruff_python_parser/**
- crates/ruff_dev/**
- scripts/*
- python/**
- .github/workflows/ci.yaml
code:
- "**/*"
- "!**/*.md"
- "!docs/**"
- "!assets/**"
cargo-fmt:
name: "cargo fmt"
runs-on: ubuntu-latest
@@ -85,8 +72,6 @@ jobs:
cargo-clippy:
name: "cargo clippy"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
steps:
- uses: actions/checkout@v4
- name: "Install Rust toolchain"
@@ -101,8 +86,6 @@ jobs:
cargo-test-linux:
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
name: "cargo test (linux)"
steps:
- uses: actions/checkout@v4
@@ -127,8 +110,6 @@ jobs:
cargo-test-windows:
runs-on: windows-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
name: "cargo test (windows)"
steps:
- uses: actions/checkout@v4
@@ -146,8 +127,6 @@ jobs:
cargo-test-wasm:
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
name: "cargo test (wasm)"
steps:
- uses: actions/checkout@v4
@@ -167,8 +146,6 @@ jobs:
cargo-fuzz:
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
name: "cargo fuzz"
steps:
- uses: actions/checkout@v4
@@ -186,8 +163,6 @@ jobs:
scripts:
name: "test scripts"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
steps:
- uses: actions/checkout@v4
- name: "Install Rust toolchain"
@@ -211,7 +186,8 @@ jobs:
# Only runs on pull requests, since that is the only we way we can find the base version for comparison.
# Ecosystem check needs linter and/or formatter changes.
if: github.event_name == 'pull_request' && ${{
needs.determine_changes.outputs.code == 'true'
needs.determine_changes.outputs.linter == 'true' ||
needs.determine_changes.outputs.formatter == 'true'
}}
steps:
- uses: actions/checkout@v4
@@ -320,8 +296,6 @@ jobs:
cargo-udeps:
name: "cargo udeps"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
steps:
- uses: actions/checkout@v4
- name: "Install nightly Rust toolchain"
@@ -418,7 +392,7 @@ jobs:
run: mkdocs build --strict -f mkdocs.insiders.yml
- name: "Build docs"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
run: mkdocs build --strict -f mkdocs.public.yml
run: mkdocs build --strict -f mkdocs.generated.yml
check-formatter-instability-and-black-similarity:
name: "formatter instabilities and black similarity"
@@ -441,10 +415,7 @@ jobs:
check-ruff-lsp:
name: "test ruff-lsp"
runs-on: ubuntu-latest
needs:
- cargo-test-linux
- determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
needs: cargo-test-linux
steps:
- uses: extractions/setup-just@v1
env:
@@ -482,8 +453,6 @@ jobs:
benchmarks:
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
steps:
- name: "Checkout Branch"
uses: actions/checkout@v4
@@ -502,7 +471,7 @@ jobs:
run: cargo codspeed build --features codspeed -p ruff_benchmark
- name: "Run benchmarks"
uses: CodSpeedHQ/action@v2
uses: CodSpeedHQ/action@v1
with:
run: cargo codspeed run
token: ${{ secrets.CODSPEED_TOKEN }}

View File

@@ -44,7 +44,7 @@ jobs:
run: mkdocs build --strict -f mkdocs.insiders.yml
- name: "Build docs"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
run: mkdocs build --strict -f mkdocs.public.yml
run: mkdocs build --strict -f mkdocs.generated.yml
- name: "Deploy to Cloudflare Pages"
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
uses: cloudflare/wrangler-action@v3.3.2

View File

@@ -86,7 +86,7 @@ jobs:
path: dist
- name: "Archive binary"
run: |
ARCHIVE_FILE=ruff-${{ inputs.tag }}-x86_64-apple-darwin.tar.gz
ARCHIVE_FILE=ruff-x86_64-apple-darwin.tar.gz
tar czvf $ARCHIVE_FILE -C target/x86_64-apple-darwin/release ruff
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
@@ -125,7 +125,7 @@ jobs:
path: dist
- name: "Archive binary"
run: |
ARCHIVE_FILE=ruff-${{ inputs.tag }}-aarch64-apple-darwin.tar.gz
ARCHIVE_FILE=ruff-aarch64-apple-darwin.tar.gz
tar czvf $ARCHIVE_FILE -C target/aarch64-apple-darwin/release ruff
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
@@ -177,7 +177,7 @@ jobs:
- name: "Archive binary"
shell: bash
run: |
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.zip
ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.zip
7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/ruff.exe
sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
@@ -224,7 +224,7 @@ jobs:
path: dist
- name: "Archive binary"
run: |
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.target }}.tar.gz
ARCHIVE_FILE=ruff-${{ matrix.target }}.tar.gz
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
@@ -291,7 +291,7 @@ jobs:
path: dist
- name: "Archive binary"
run: |
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.tar.gz
ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.tar.gz
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
@@ -343,7 +343,7 @@ jobs:
path: dist
- name: "Archive binary"
run: |
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.target }}.tar.gz
ARCHIVE_FILE=ruff-${{ matrix.target }}.tar.gz
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
@@ -399,7 +399,7 @@ jobs:
path: dist
- name: "Archive binary"
run: |
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.tar.gz
ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.tar.gz
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
@@ -516,62 +516,6 @@ jobs:
files: binaries/*
tag_name: v${{ inputs.tag }}
docker-publish:
# This action doesn't need to wait on any other task, it's easy to re-tag if something failed and we're validating
# the tag here also
name: Push Docker image ghcr.io/astral-sh/ruff
runs-on: ubuntu-latest
environment:
name: release
permissions:
# For the docker push
packages: write
steps:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.sha }}
- uses: docker/setup-buildx-action@v3
- uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v5
with:
images: ghcr.io/astral-sh/ruff
- name: Check tag consistency
# Unlike validate-tag we don't check if the commit is on the main branch, but it seems good enough since we can
# change docker tags
if: ${{ inputs.tag }}
run: |
version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
if [ "${{ inputs.tag }}" != "${version}" ]; then
echo "The input tag does not match the version from pyproject.toml:" >&2
echo "${{ inputs.tag }}" >&2
echo "${version}" >&2
exit 1
else
echo "Releasing ${version}"
fi
- name: "Build and push Docker image"
uses: docker/build-push-action@v5
with:
context: .
platforms: linux/amd64,linux/arm64
# Reuse the builder
cache-from: type=gha
cache-to: type=gha,mode=max
push: ${{ inputs.tag != '' }}
tags: ghcr.io/astral-sh/ruff:latest,ghcr.io/astral-sh/ruff:${{ inputs.tag || 'dry-run' }}
labels: ${{ steps.meta.outputs.labels }}
# After the release has been published, we update downstream repositories
# This is separate because if this fails the release is still fine, we just need to do some manual workflow triggers
update-dependents:
@@ -580,7 +524,7 @@ jobs:
needs: publish-release
steps:
- name: "Update pre-commit mirror"
uses: actions/github-script@v7
uses: actions/github-script@v6
with:
github-token: ${{ secrets.RUFF_PRE_COMMIT_PAT }}
script: |

View File

@@ -1,172 +1,5 @@
# Changelog
## 0.1.7
### Preview features
- Implement multiline dictionary and list hugging for preview style ([#8293](https://github.com/astral-sh/ruff/pull/8293))
- Implement the `fix_power_op_line_length` preview style ([#8947](https://github.com/astral-sh/ruff/pull/8947))
- Use Python version to determine typing rewrite safety ([#8919](https://github.com/astral-sh/ruff/pull/8919))
- \[`flake8-annotations`\] Enable auto-return-type involving `Optional` and `Union` annotations ([#8885](https://github.com/astral-sh/ruff/pull/8885))
- \[`flake8-bandit`\] Implement `django-raw-sql` (`S611`) ([#8651](https://github.com/astral-sh/ruff/pull/8651))
- \[`flake8-bandit`\] Implement `tarfile-unsafe-members` (`S202`) ([#8829](https://github.com/astral-sh/ruff/pull/8829))
- \[`flake8-pyi`\] Implement fix for `unnecessary-literal-union` (`PYI030`) ([#7934](https://github.com/astral-sh/ruff/pull/7934))
- \[`flake8-simplify`\] Extend `dict-get-with-none-default` (`SIM910`) to non-literals ([#8762](https://github.com/astral-sh/ruff/pull/8762))
- \[`pylint`\] - add `unnecessary-list-index-lookup` (`PLR1736`) + autofix ([#7999](https://github.com/astral-sh/ruff/pull/7999))
- \[`pylint`\] - implement R0202 and R0203 with autofixes ([#8335](https://github.com/astral-sh/ruff/pull/8335))
- \[`pylint`\] Implement `repeated-keyword` (`PLe1132`) ([#8706](https://github.com/astral-sh/ruff/pull/8706))
- \[`pylint`\] Implement `too-many-positional` (`PLR0917`) ([#8995](https://github.com/astral-sh/ruff/pull/8995))
- \[`pylint`\] Implement `unnecessary-dict-index-lookup` (`PLR1733`) ([#8036](https://github.com/astral-sh/ruff/pull/8036))
- \[`refurb`\] Implement `redundant-log-base` (`FURB163`) ([#8842](https://github.com/astral-sh/ruff/pull/8842))
### Rule changes
- \[`flake8-boolean-trap`\] Allow booleans in `@override` methods ([#8882](https://github.com/astral-sh/ruff/pull/8882))
- \[`flake8-bugbear`\] Avoid `B015`,`B018` for last expression in a cell ([#8815](https://github.com/astral-sh/ruff/pull/8815))
- \[`flake8-pie`\] Allow ellipses for enum values in stub files ([#8825](https://github.com/astral-sh/ruff/pull/8825))
- \[`flake8-pyi`\] Check PEP 695 type aliases for `snake-case-type-alias` and `t-suffixed-type-alias` ([#8966](https://github.com/astral-sh/ruff/pull/8966))
- \[`flake8-pyi`\] Check for kwarg and vararg `NoReturn` type annotations ([#8948](https://github.com/astral-sh/ruff/pull/8948))
- \[`flake8-simplify`\] Omit select context managers from `SIM117` ([#8801](https://github.com/astral-sh/ruff/pull/8801))
- \[`pep8-naming`\] Allow Django model loads in `non-lowercase-variable-in-function` (`N806`) ([#8917](https://github.com/astral-sh/ruff/pull/8917))
- \[`pycodestyle`\] Avoid `E703` for last expression in a cell ([#8821](https://github.com/astral-sh/ruff/pull/8821))
- \[`pycodestyle`\] Update `E402` to work at cell level for notebooks ([#8872](https://github.com/astral-sh/ruff/pull/8872))
- \[`pydocstyle`\] Avoid `D100` for Jupyter Notebooks ([#8816](https://github.com/astral-sh/ruff/pull/8816))
- \[`pylint`\] Implement fix for `unspecified-encoding` (`PLW1514`) ([#8928](https://github.com/astral-sh/ruff/pull/8928))
### Formatter
- Avoid unstable formatting in ellipsis-only body with trailing comment ([#8984](https://github.com/astral-sh/ruff/pull/8984))
- Inline trailing comments for type alias similar to assignments ([#8941](https://github.com/astral-sh/ruff/pull/8941))
- Insert trailing comma when function breaks with single argument ([#8921](https://github.com/astral-sh/ruff/pull/8921))
### CLI
- Update `ruff check` and `ruff format` to default to the current directory ([#8791](https://github.com/astral-sh/ruff/pull/8791))
- Stop at the first resolved parent configuration ([#8864](https://github.com/astral-sh/ruff/pull/8864))
### Configuration
- \[`pylint`\] Default `max-positional-args` to `max-args` ([#8998](https://github.com/astral-sh/ruff/pull/8998))
- \[`pylint`\] Add `allow-dunder-method-names` setting for `bad-dunder-method-name` (`PLW3201`) ([#8812](https://github.com/astral-sh/ruff/pull/8812))
- \[`isort`\] Add support for `from-first` setting ([#8663](https://github.com/astral-sh/ruff/pull/8663))
- \[`isort`\] Add support for `length-sort` settings ([#8841](https://github.com/astral-sh/ruff/pull/8841))
### Bug fixes
- Add support for `@functools.singledispatch` ([#8934](https://github.com/astral-sh/ruff/pull/8934))
- Avoid off-by-one error in stripping noqa following multi-byte char ([#8979](https://github.com/astral-sh/ruff/pull/8979))
- Avoid off-by-one error in with-item named expressions ([#8915](https://github.com/astral-sh/ruff/pull/8915))
- Avoid syntax error via invalid ur string prefix ([#8971](https://github.com/astral-sh/ruff/pull/8971))
- Avoid underflow in `get_model` matching ([#8965](https://github.com/astral-sh/ruff/pull/8965))
- Avoid unnecessary index diagnostics when value is modified ([#8970](https://github.com/astral-sh/ruff/pull/8970))
- Convert over-indentation rule to use number of characters ([#8983](https://github.com/astral-sh/ruff/pull/8983))
- Detect implicit returns in auto-return-types ([#8952](https://github.com/astral-sh/ruff/pull/8952))
- Fix start >= end error in over-indentation ([#8982](https://github.com/astral-sh/ruff/pull/8982))
- Ignore `@overload` and `@override` methods for too-many-arguments checks ([#8954](https://github.com/astral-sh/ruff/pull/8954))
- Lexer start of line is false only for `Mode::Expression` ([#8880](https://github.com/astral-sh/ruff/pull/8880))
- Mark `pydantic_settings.BaseSettings` as having default copy semantics ([#8793](https://github.com/astral-sh/ruff/pull/8793))
- Respect dictionary unpacking in `NamedTuple` assignments ([#8810](https://github.com/astral-sh/ruff/pull/8810))
- Respect local subclasses in `flake8-type-checking` ([#8768](https://github.com/astral-sh/ruff/pull/8768))
- Support type alias statements in simple statement positions ([#8916](https://github.com/astral-sh/ruff/pull/8916))
- \[`flake8-annotations`\] Avoid filtering out un-representable types in return annotation ([#8881](https://github.com/astral-sh/ruff/pull/8881))
- \[`flake8-pie`\] Retain extra ellipses in protocols and abstract methods ([#8769](https://github.com/astral-sh/ruff/pull/8769))
- \[`flake8-pyi`\] Respect local enum subclasses in `simple-defaults` (`PYI052`) ([#8767](https://github.com/astral-sh/ruff/pull/8767))
- \[`flake8-trio`\] Use correct range for `TRIO115` fix ([#8933](https://github.com/astral-sh/ruff/pull/8933))
- \[`flake8-trio`\] Use full arguments range for zero-sleep-call ([#8936](https://github.com/astral-sh/ruff/pull/8936))
- \[`isort`\] fix: mark `__main__` as first-party import ([#8805](https://github.com/astral-sh/ruff/pull/8805))
- \[`pep8-naming`\] Avoid `N806` errors for type alias statements ([#8785](https://github.com/astral-sh/ruff/pull/8785))
- \[`perflint`\] Avoid `PERF101` if there's an append in loop body ([#8809](https://github.com/astral-sh/ruff/pull/8809))
- \[`pycodestyle`\] Allow space-before-colon after end-of-slice ([#8838](https://github.com/astral-sh/ruff/pull/8838))
- \[`pydocstyle`\] Avoid non-character breaks in `over-indentation` (`D208`) ([#8866](https://github.com/astral-sh/ruff/pull/8866))
- \[`pydocstyle`\] Ignore underlines when determining docstring logical lines ([#8929](https://github.com/astral-sh/ruff/pull/8929))
- \[`pylint`\] Extend `self-assigning-variable` to multi-target assignments ([#8839](https://github.com/astral-sh/ruff/pull/8839))
- \[`tryceratops`\] Avoid repeated triggers in nested `tryceratops` diagnostics ([#8772](https://github.com/astral-sh/ruff/pull/8772))
### Documentation
- Add advice for fixing RUF008 when mutability is not desired ([#8853](https://github.com/astral-sh/ruff/pull/8853))
- Added the command to run ruff using pkgx to the installation.md ([#8955](https://github.com/astral-sh/ruff/pull/8955))
- Document fix safety for flake8-comprehensions and some pyupgrade rules ([#8918](https://github.com/astral-sh/ruff/pull/8918))
- Fix doc formatting for zero-sleep-call ([#8937](https://github.com/astral-sh/ruff/pull/8937))
- Remove duplicate imports from os-stat documentation ([#8930](https://github.com/astral-sh/ruff/pull/8930))
- Replace generated reference to MkDocs ([#8806](https://github.com/astral-sh/ruff/pull/8806))
- Update Arch Linux package URL in installation.md ([#8802](https://github.com/astral-sh/ruff/pull/8802))
- \[`flake8-pyi`\] Fix error in `t-suffixed-type-alias` (`PYI043`) example ([#8963](https://github.com/astral-sh/ruff/pull/8963))
- \[`flake8-pyi`\] Improve motivation for `custom-type-var-return-type` (`PYI019`) ([#8766](https://github.com/astral-sh/ruff/pull/8766))
## 0.1.6
### Preview features
- \[`flake8-boolean-trap`\] Extend `boolean-type-hint-positional-argument` (`FBT001`) to include booleans in unions ([#7501](https://github.com/astral-sh/ruff/pull/7501))
- \[`flake8-pie`\] Extend `reimplemented-list-builtin` (`PIE807`) to `dict` reimplementations ([#8608](https://github.com/astral-sh/ruff/pull/8608))
- \[`flake8-pie`\] Extend `unnecessary-pass` (`PIE790`) to include ellipses (`...`) ([#8641](https://github.com/astral-sh/ruff/pull/8641))
- \[`flake8-pie`\] Implement fix for `unnecessary-spread` (`PIE800`) ([#8668](https://github.com/astral-sh/ruff/pull/8668))
- \[`flake8-quotes`\] Implement `unnecessary-escaped-quote` (`Q004`) ([#8630](https://github.com/astral-sh/ruff/pull/8630))
- \[`pycodestyle`\] Implement fix for `multiple-spaces-after-keyword` (`E271`) and `multiple-spaces-before-keyword` (`E272`) ([#8622](https://github.com/astral-sh/ruff/pull/8622))
- \[`pycodestyle`\] Implement fix for `multiple-spaces-after-operator` (`E222`) and `multiple-spaces-before-operator` (`E221`) ([#8623](https://github.com/astral-sh/ruff/pull/8623))
- \[`pyflakes`\] Extend `is-literal` (`F632`) to include comparisons against mutable initializers ([#8607](https://github.com/astral-sh/ruff/pull/8607))
- \[`pylint`\] Implement `redefined-argument-from-local` (`PLR1704`) ([#8159](https://github.com/astral-sh/ruff/pull/8159))
- \[`pylint`\] Implement fix for `unnecessary-lambda` (`PLW0108`) ([#8621](https://github.com/astral-sh/ruff/pull/8621))
- \[`refurb`\] Implement `if-expr-min-max` (`FURB136`) ([#8664](https://github.com/astral-sh/ruff/pull/8664))
- \[`refurb`\] Implement `math-constant` (`FURB152`) ([#8727](https://github.com/astral-sh/ruff/pull/8727))
### Rule changes
- \[`flake8-annotations`\] Add autotyping-like return type inference for annotation rules ([#8643](https://github.com/astral-sh/ruff/pull/8643))
- \[`flake8-future-annotations`\] Implement fix for `future-required-type-annotation` (`FA102`) ([#8711](https://github.com/astral-sh/ruff/pull/8711))
- \[`flake8-implicit-namespace-package`\] Avoid missing namespace violations in scripts with shebangs ([#8710](https://github.com/astral-sh/ruff/pull/8710))
- \[`pydocstyle`\] Update `over-indentation` (`D208`) to preserve indentation offsets when fixing overindented lines ([#8699](https://github.com/astral-sh/ruff/pull/8699))
- \[`pyupgrade`\] Refine `timeout-error-alias` (`UP041`) to remove false positives ([#8587](https://github.com/astral-sh/ruff/pull/8587))
### Formatter
- Fix instability in `await` formatting with fluent style ([#8676](https://github.com/astral-sh/ruff/pull/8676))
- Compare formatted and unformatted ASTs during formatter tests ([#8624](https://github.com/astral-sh/ruff/pull/8624))
- Preserve trailing semicolon for Notebooks ([#8590](https://github.com/astral-sh/ruff/pull/8590))
### CLI
- Improve debug printing for resolving origin of config settings ([#8729](https://github.com/astral-sh/ruff/pull/8729))
- Write unchanged, excluded files to stdout when read via stdin ([#8596](https://github.com/astral-sh/ruff/pull/8596))
### Configuration
- \[`isort`\] Support disabling sections with `no-sections = true` ([#8657](https://github.com/astral-sh/ruff/pull/8657))
- \[`pep8-naming`\] Support local and dynamic class- and static-method decorators ([#8592](https://github.com/astral-sh/ruff/pull/8592))
- \[`pydocstyle`\] Allow overriding pydocstyle convention rules ([#8586](https://github.com/astral-sh/ruff/pull/8586))
### Bug fixes
- Avoid syntax error via importing `trio.lowlevel` ([#8730](https://github.com/astral-sh/ruff/pull/8730))
- Omit unrolled augmented assignments in `PIE794` ([#8634](https://github.com/astral-sh/ruff/pull/8634))
- Slice source code instead of generating it for `EM` fixes ([#7746](https://github.com/astral-sh/ruff/pull/7746))
- Allow whitespace around colon in slices for `whitespace-before-punctuation` (`E203`) ([#8654](https://github.com/astral-sh/ruff/pull/8654))
- Use function range for `no-self-use` ([#8637](https://github.com/astral-sh/ruff/pull/8637))
- F-strings doesn't contain bytes literal for `PLW0129` ([#8675](https://github.com/astral-sh/ruff/pull/8675))
- Improve detection of `TYPE_CHECKING` blocks imported from `typing_extensions` or `_typeshed` ([#8429](https://github.com/astral-sh/ruff/pull/8429))
- Treat display as a builtin in IPython ([#8707](https://github.com/astral-sh/ruff/pull/8707))
- Avoid `FURB113` autofix if comments are present ([#8494](https://github.com/astral-sh/ruff/pull/8494))
- Consider the new f-string tokens for `flake8-commas` ([#8582](https://github.com/astral-sh/ruff/pull/8582))
- Remove erroneous bad-dunder-name reference ([#8742](https://github.com/astral-sh/ruff/pull/8742))
- Avoid recommending Self usages in metaclasses ([#8639](https://github.com/astral-sh/ruff/pull/8639))
- Detect runtime-evaluated base classes defined in the current file ([#8572](https://github.com/astral-sh/ruff/pull/8572))
- Avoid inserting trailing commas within f-strings ([#8574](https://github.com/astral-sh/ruff/pull/8574))
- Remove incorrect deprecation label for stdout and stderr ([#8743](https://github.com/astral-sh/ruff/pull/8743))
- Fix unnecessary parentheses in UP007 fix ([#8610](https://github.com/astral-sh/ruff/pull/8610))
- Remove repeated and erroneous scoped settings headers in docs ([#8670](https://github.com/astral-sh/ruff/pull/8670))
- Trim trailing empty strings when converting to f-strings ([#8712](https://github.com/astral-sh/ruff/pull/8712))
- Fix ordering for `force-sort-within-sections` ([#8665](https://github.com/astral-sh/ruff/pull/8665))
- Run unicode prefix rule over tokens ([#8709](https://github.com/astral-sh/ruff/pull/8709))
- Update UP032 to unescape curly braces in literal parts of converted strings ([#8697](https://github.com/astral-sh/ruff/pull/8697))
- List all ipython builtins ([#8719](https://github.com/astral-sh/ruff/pull/8719))
### Documentation
- Document conventions in the FAQ ([#8638](https://github.com/astral-sh/ruff/pull/8638))
- Redirect from rule codes to rule pages in docs ([#8636](https://github.com/astral-sh/ruff/pull/8636))
- Fix permalink to convention setting ([#8575](https://github.com/astral-sh/ruff/pull/8575))
## 0.1.5
### Preview features

View File

@@ -295,7 +295,7 @@ To preview any changes to the documentation locally:
```shell
# For contributors.
mkdocs serve -f mkdocs.public.yml
mkdocs serve -f mkdocs.generated.yml
# For members of the Astral org, which has access to MkDocs Insiders via sponsorship.
mkdocs serve -f mkdocs.insiders.yml

206
Cargo.lock generated
View File

@@ -64,9 +64,9 @@ checksum = "c7021ce4924a3f25f802b2cccd1af585e39ea1a363a1aa2e72afe54b67a3a7a7"
[[package]]
name = "annotate-snippets"
version = "0.9.2"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ccaf7e9dfbb6ab22c82e473cd1a8a7bd313c19a5b7e40970f3d89ef5a5c9e81e"
checksum = "c3b9d411ecbaf79885c6df4d75fff75858d5995ff25385657a28af47e82f9c36"
dependencies = [
"unicode-width",
"yansi-term",
@@ -278,7 +278,9 @@ checksum = "7f2c685bad3eb3d45a01354cedb7d5faa66194d1d58ba6e267a8de788f79db38"
dependencies = [
"android-tzdata",
"iana-time-zone",
"js-sys",
"num-traits",
"wasm-bindgen",
"windows-targets 0.48.5",
]
@@ -405,9 +407,9 @@ dependencies = [
[[package]]
name = "codspeed"
version = "2.3.3"
version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0eb4ab4dcb6554eb4f590fb16f99d3b102ab76f5f56554c9a5340518b32c499b"
checksum = "918b13a0f1a32460ab3bd5debd56b5a27a7071fa5ff5dfeb3a5cf291a85b174b"
dependencies = [
"colored",
"libc",
@@ -416,9 +418,9 @@ dependencies = [
[[package]]
name = "codspeed-criterion-compat"
version = "2.3.3"
version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cc07a3d3f7e0c8961d0ffdee149d39b231bafdcdc3d978dc5ad790c615f55f3f"
checksum = "c683c7fef2b873fbbdf4062782914c652309951244bf0bd362fe608b7d6f901c"
dependencies = [
"codspeed",
"colored",
@@ -444,9 +446,9 @@ dependencies = [
[[package]]
name = "configparser"
version = "3.0.3"
version = "3.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e0e56e414a2a52ab2a104f85cd40933c2fbc278b83637facf646ecf451b49237"
checksum = "5458d9d1a587efaf5091602c59d299696a3877a439c8f6d461a2d3cce11df87a"
[[package]]
name = "console"
@@ -808,7 +810,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flake8-to-ruff"
version = "0.1.7"
version = "0.1.5"
dependencies = [
"anyhow",
"clap",
@@ -827,7 +829,7 @@ dependencies = [
"serde_json",
"strum",
"strum_macros",
"toml 0.7.8",
"toml",
]
[[package]]
@@ -848,21 +850,18 @@ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
[[package]]
name = "form_urlencoded"
version = "1.2.1"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456"
checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652"
dependencies = [
"percent-encoding",
]
[[package]]
name = "fs-err"
version = "2.11.0"
version = "2.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "88a41f105fe1d5b6b34b2055e3dc59bb79b46b48b2040b9e6c7b4b5de097aa41"
dependencies = [
"autocfg",
]
checksum = "0845fa252299212f0389d64ba26f34fa32cfe41588355f21ed507c59a0f64541"
[[package]]
name = "fsevent-sys"
@@ -903,15 +902,15 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
[[package]]
name = "globset"
version = "0.4.14"
version = "0.4.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57da3b9b5b85bd66f31093f8c408b90a74431672542466497dcbdfdc02034be1"
checksum = "759c97c1e17c55525b57192c06a267cda0ac5210b222d6b82189a2338fa1c13d"
dependencies = [
"aho-corasick",
"bstr",
"fnv",
"log",
"regex-automata 0.4.3",
"regex-syntax 0.8.2",
"regex",
]
[[package]]
@@ -928,9 +927,9 @@ checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
[[package]]
name = "hashbrown"
version = "0.14.2"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f93e7192158dbcda357bdec5fb5788eebf8bbac027f3f33e719d29135ae84156"
checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a"
[[package]]
name = "heck"
@@ -987,9 +986,9 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
[[package]]
name = "idna"
version = "0.5.0"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6"
checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c"
dependencies = [
"unicode-bidi",
"unicode-normalization",
@@ -1034,12 +1033,12 @@ dependencies = [
[[package]]
name = "indexmap"
version = "2.1.0"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f"
checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d"
dependencies = [
"equivalent",
"hashbrown 0.14.2",
"hashbrown 0.14.0",
"serde",
]
@@ -1170,9 +1169,9 @@ checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38"
[[package]]
name = "js-sys"
version = "0.3.66"
version = "0.3.64"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cee9c64da59eae3b50095c18d3e74f8b73c0b86d2792824ff01bbce68ba229ca"
checksum = "c5f195fe497f702db0f318b07fdd68edb16955aed830df8363d837542f8f935a"
dependencies = [
"wasm-bindgen",
]
@@ -1622,9 +1621,9 @@ dependencies = [
[[package]]
name = "percent-encoding"
version = "2.3.1"
version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94"
[[package]]
name = "petgraph"
@@ -1793,46 +1792,45 @@ dependencies = [
[[package]]
name = "proc-macro2"
version = "1.0.70"
version = "1.0.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39278fbbf5fb4f646ce651690877f89d1c5811a3d4acb27700c1cb3cdb78fd3b"
checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da"
dependencies = [
"unicode-ident",
]
[[package]]
name = "pyproject-toml"
version = "0.8.1"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "46d4a5e69187f23a29f8aa0ea57491d104ba541bc55f76552c2a74962aa20e04"
checksum = "0774c13ff0b8b7ebb4791c050c497aefcfe3f6a222c0829c7017161ed38391ff"
dependencies = [
"indexmap",
"pep440_rs",
"pep508_rs",
"serde",
"toml 0.8.2",
"toml",
]
[[package]]
name = "quick-junit"
version = "0.3.5"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b9599bffc2cd7511355996e0cfd979266b2cfa3f3ff5247d07a3a6e1ded6158"
checksum = "6bf780b59d590c25f8c59b44c124166a2a93587868b619fb8f5b47fb15e9ed6d"
dependencies = [
"chrono",
"indexmap",
"nextest-workspace-hack",
"quick-xml",
"strip-ansi-escapes",
"thiserror",
"uuid",
]
[[package]]
name = "quick-xml"
version = "0.31.0"
version = "0.29.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1004a344b30a54e2ee58d66a71b32d2db2feb0a31f9a2d302bf0536f15de2a33"
checksum = "81b9228215d82c7b61490fec1de287136b5de6f5700f6e58ea9ad61a7964ca51"
dependencies = [
"memchr",
]
@@ -2062,9 +2060,9 @@ dependencies = [
[[package]]
name = "ruff_cli"
version = "0.1.7"
version = "0.1.5"
dependencies = [
"annotate-snippets 0.9.2",
"annotate-snippets 0.9.1",
"anyhow",
"argfile",
"assert_cmd",
@@ -2154,7 +2152,7 @@ dependencies = [
"strum",
"strum_macros",
"tempfile",
"toml 0.7.8",
"toml",
"tracing",
"tracing-indicatif",
"tracing-subscriber",
@@ -2198,10 +2196,10 @@ dependencies = [
[[package]]
name = "ruff_linter"
version = "0.1.7"
version = "0.1.5"
dependencies = [
"aho-corasick",
"annotate-snippets 0.9.2",
"annotate-snippets 0.9.1",
"anyhow",
"bitflags 2.4.1",
"chrono",
@@ -2254,7 +2252,7 @@ dependencies = [
"tempfile",
"test-case",
"thiserror",
"toml 0.7.8",
"toml",
"typed-arena",
"unicode-width",
"unicode_names2",
@@ -2334,7 +2332,6 @@ dependencies = [
"itertools 0.11.0",
"memchr",
"once_cell",
"regex",
"ruff_cache",
"ruff_formatter",
"ruff_macros",
@@ -2450,7 +2447,7 @@ dependencies = [
[[package]]
name = "ruff_shrinking"
version = "0.1.7"
version = "0.1.5"
dependencies = [
"anyhow",
"clap",
@@ -2541,7 +2538,7 @@ dependencies = [
"shellexpand",
"strum",
"tempfile",
"toml 0.7.8",
"toml",
]
[[package]]
@@ -2618,9 +2615,9 @@ dependencies = [
[[package]]
name = "schemars"
version = "0.8.16"
version = "0.8.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "45a28f4c49489add4ce10783f7911893516f15afe45d015608d41faca6bc4d29"
checksum = "1f7b0ce13155372a76ee2e1c5ffba1fe61ede73fbea5630d61eee6fac4929c0c"
dependencies = [
"dyn-clone",
"schemars_derive",
@@ -2630,9 +2627,9 @@ dependencies = [
[[package]]
name = "schemars_derive"
version = "0.8.16"
version = "0.8.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c767fd6fa65d9ccf9cf026122c1b555f2ef9a4f0cea69da4d7dbc3e258d30967"
checksum = "e85e2a16b12bdb763244c69ab79363d71db2b4b918a2def53f80b02e0574b13c"
dependencies = [
"proc-macro2",
"quote",
@@ -2805,9 +2802,9 @@ checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d"
[[package]]
name = "smallvec"
version = "1.11.2"
version = "1.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4dccd0940a2dcdf68d092b8cbab7dc0ad8fa938bf95787e1b916b0e3d0e8e970"
checksum = "942b4a808e05215192e39f4ab80813e599068285906cc91aa64f923db842bd5a"
[[package]]
name = "spin"
@@ -2834,15 +2831,6 @@ dependencies = [
"precomputed-hash",
]
[[package]]
name = "strip-ansi-escapes"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55ff8ef943b384c414f54aefa961dd2bd853add74ec75e7ac74cf91dba62bcfa"
dependencies = [
"vte",
]
[[package]]
name = "strsim"
version = "0.10.0"
@@ -3098,19 +3086,7 @@ dependencies = [
"serde",
"serde_spanned",
"toml_datetime",
"toml_edit 0.19.15",
]
[[package]]
name = "toml"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "185d8ab0dfbb35cf1399a6344d8484209c088f75f8f68230da55d48d95d43e3d"
dependencies = [
"serde",
"serde_spanned",
"toml_datetime",
"toml_edit 0.20.2",
"toml_edit",
]
[[package]]
@@ -3135,19 +3111,6 @@ dependencies = [
"winnow",
]
[[package]]
name = "toml_edit"
version = "0.20.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "396e4d48bbb2b7554c944bde63101b5ae446cff6ec4a24227428f15eb72ef338"
dependencies = [
"indexmap",
"serde",
"serde_spanned",
"toml_datetime",
"winnow",
]
[[package]]
name = "tracing"
version = "0.1.40"
@@ -3195,20 +3158,20 @@ dependencies = [
[[package]]
name = "tracing-log"
version = "0.2.0"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3"
checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922"
dependencies = [
"lazy_static",
"log",
"once_cell",
"tracing-core",
]
[[package]]
name = "tracing-subscriber"
version = "0.3.18"
version = "0.3.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b"
checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77"
dependencies = [
"matchers",
"nu-ansi-term",
@@ -3334,9 +3297,9 @@ checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a"
[[package]]
name = "ureq"
version = "2.9.1"
version = "2.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8cdd25c339e200129fe4de81451814e5228c9b771d57378817d6117cc2b3f97"
checksum = "f5ccd538d4a604753ebc2f17cd9946e89b77bf87f6a8e2309667c6f2e87855e3"
dependencies = [
"base64",
"flate2",
@@ -3350,9 +3313,9 @@ dependencies = [
[[package]]
name = "url"
version = "2.5.0"
version = "2.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633"
checksum = "143b538f18257fac9cad154828a57c6bf5157e1aa604d4816b5995bf6de87ae5"
dependencies = [
"form_urlencoded",
"idna",
@@ -3368,9 +3331,9 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
[[package]]
name = "uuid"
version = "1.6.1"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e395fcf16a7a3d8127ec99782007af141946b4795001f876d54fb0d55978560"
checksum = "88ad59a7560b41a70d191093a945f0b87bc1deeda46fb237479708a1d6b6cdfc"
dependencies = [
"getrandom",
"rand",
@@ -3380,9 +3343,9 @@ dependencies = [
[[package]]
name = "uuid-macro-internal"
version = "1.6.1"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f49e7f3f3db8040a100710a11932239fd30697115e2ba4107080d8252939845e"
checksum = "3d8c6bba9b149ee82950daefc9623b32bb1dacbfb1890e352f6b887bd582adaf"
dependencies = [
"proc-macro2",
"quote",
@@ -3461,9 +3424,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
[[package]]
name = "wasm-bindgen"
version = "0.2.89"
version = "0.2.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ed0d4f68a3015cc185aff4db9506a015f4b96f95303897bfa23f846db54064e"
checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342"
dependencies = [
"cfg-if",
"wasm-bindgen-macro",
@@ -3471,9 +3434,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-backend"
version = "0.2.89"
version = "0.2.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b56f625e64f3a1084ded111c4d5f477df9f8c92df113852fa5a374dbda78826"
checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd"
dependencies = [
"bumpalo",
"log",
@@ -3486,9 +3449,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-futures"
version = "0.4.38"
version = "0.4.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9afec9963e3d0994cac82455b2b3502b81a7f40f9a0d32181f7528d9f4b43e02"
checksum = "c02dbc21516f9f1f04f187958890d7e6026df8d16540b7ad9492bc34a67cea03"
dependencies = [
"cfg-if",
"js-sys",
@@ -3498,9 +3461,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-macro"
version = "0.2.89"
version = "0.2.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0162dbf37223cd2afce98f3d0785506dcb8d266223983e4b5b525859e6e182b2"
checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d"
dependencies = [
"quote",
"wasm-bindgen-macro-support",
@@ -3508,9 +3471,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-macro-support"
version = "0.2.89"
version = "0.2.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f0eb82fcb7930ae6219a7ecfd55b217f5f0893484b7a13022ebb2b2bf20b5283"
checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b"
dependencies = [
"proc-macro2",
"quote",
@@ -3521,15 +3484,15 @@ dependencies = [
[[package]]
name = "wasm-bindgen-shared"
version = "0.2.89"
version = "0.2.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ab9b36309365056cd639da3134bf87fa8f3d86008abf99e612384a6eecd459f"
checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1"
[[package]]
name = "wasm-bindgen-test"
version = "0.3.38"
version = "0.3.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c6433b7c56db97397842c46b67e11873eda263170afeb3a2dc74a7cb370fee0d"
checksum = "6e6e302a7ea94f83a6d09e78e7dc7d9ca7b186bc2829c24a22d0753efd680671"
dependencies = [
"console_error_panic_hook",
"js-sys",
@@ -3541,13 +3504,12 @@ dependencies = [
[[package]]
name = "wasm-bindgen-test-macro"
version = "0.3.38"
version = "0.3.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "493fcbab756bb764fa37e6bee8cec2dd709eb4273d06d0c282a5e74275ded735"
checksum = "ecb993dd8c836930ed130e020e77d9b2e65dd0fbab1b67c790b0f5d80b11a575"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.39",
]
[[package]]

View File

@@ -9,7 +9,7 @@ homepage = "https://docs.astral.sh/ruff"
documentation = "https://docs.astral.sh/ruff"
repository = "https://github.com/astral-sh/ruff"
authors = ["Charlie Marsh <charlie.r.marsh@gmail.com>"]
license = "MIT2"
license = "MIT"
[workspace.dependencies]
anyhow = { version = "1.0.69" }
@@ -19,7 +19,7 @@ clap = { version = "4.4.7", features = ["derive"] }
colored = { version = "2.0.0" }
filetime = { version = "0.2.20" }
glob = { version = "0.3.1" }
globset = { version = "0.4.14" }
globset = { version = "0.4.10" }
ignore = { version = "0.4.20" }
insta = { version = "1.34.0", feature = ["filters", "glob"] }
is-macro = { version = "0.3.0" }
@@ -29,16 +29,16 @@ log = { version = "0.4.17" }
memchr = { version = "2.6.4" }
once_cell = { version = "1.17.1" }
path-absolutize = { version = "3.1.1" }
proc-macro2 = { version = "1.0.70" }
proc-macro2 = { version = "1.0.69" }
quote = { version = "1.0.23" }
regex = { version = "1.10.2" }
rustc-hash = { version = "1.1.0" }
schemars = { version = "0.8.16" }
schemars = { version = "0.8.15" }
serde = { version = "1.0.190", features = ["derive"] }
serde_json = { version = "1.0.108" }
shellexpand = { version = "3.0.0" }
similar = { version = "2.3.0", features = ["inline"] }
smallvec = { version = "1.11.2" }
smallvec = { version = "1.11.1" }
static_assertions = "1.1.0"
strum = { version = "0.25.0", features = ["strum_macros"] }
strum_macros = { version = "0.25.3" }
@@ -48,45 +48,13 @@ thiserror = { version = "1.0.50" }
toml = { version = "0.7.8" }
tracing = { version = "0.1.40" }
tracing-indicatif = { version = "0.3.4" }
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
tracing-subscriber = { version = "0.3.17", features = ["env-filter"] }
unicode-ident = { version = "1.0.12" }
unicode_names2 = { version = "1.2.0" }
unicode-width = { version = "0.1.11" }
uuid = { version = "1.6.1", features = ["v4", "fast-rng", "macro-diagnostics", "js"] }
uuid = { version = "1.5.0", features = ["v4", "fast-rng", "macro-diagnostics", "js"] }
wsl = { version = "0.1.0" }
[workspace.lints.rust]
unsafe_code = "warn"
unreachable_pub = "warn"
[workspace.lints.clippy]
pedantic = { level = "warn", priority = -2 }
# Allowed pedantic lints
char_lit_as_u8 = "allow"
collapsible_else_if = "allow"
collapsible_if = "allow"
implicit_hasher = "allow"
match_same_arms = "allow"
missing_errors_doc = "allow"
missing_panics_doc = "allow"
module_name_repetitions = "allow"
must_use_candidate = "allow"
similar_names = "allow"
too_many_lines = "allow"
# To allow `#[allow(clippy::all)]` in `crates/ruff_python_parser/src/python.rs`.
needless_raw_string_hashes = "allow"
# Disallowed restriction lints
print_stdout = "warn"
print_stderr = "warn"
dbg_macro = "warn"
empty_drop = "warn"
empty_structs_with_brackets = "warn"
exit = "warn"
get_unwrap = "warn"
rc_buffer = "warn"
rc_mutex = "warn"
rest_pat_in_fully_bound_structs = "warn"
[profile.release]
lto = "fat"
codegen-units = 1

View File

@@ -1,38 +0,0 @@
FROM --platform=$BUILDPLATFORM ubuntu as build
ENV HOME="/root"
WORKDIR $HOME
RUN apt update && apt install -y build-essential curl python3-venv
# Setup zig as cross compiling linker
RUN python3 -m venv $HOME/.venv
RUN .venv/bin/pip install cargo-zigbuild
ENV PATH="$HOME/.venv/bin:$PATH"
# Install rust
ARG TARGETPLATFORM
RUN case "$TARGETPLATFORM" in \
"linux/arm64") echo "aarch64-unknown-linux-musl" > rust_target.txt ;; \
"linux/amd64") echo "x86_64-unknown-linux-musl" > rust_target.txt ;; \
*) exit 1 ;; \
esac
# Update rustup whenever we bump the rust version
COPY rust-toolchain.toml rust-toolchain.toml
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --target $(cat rust_target.txt) --profile minimal --default-toolchain none
ENV PATH="$HOME/.cargo/bin:$PATH"
# Installs the correct toolchain version from rust-toolchain.toml and then the musl target
RUN rustup target add $(cat rust_target.txt)
# Build
COPY crates crates
COPY Cargo.toml Cargo.toml
COPY Cargo.lock Cargo.lock
RUN cargo zigbuild --bin ruff --target $(cat rust_target.txt) --release
RUN cp target/$(cat rust_target.txt)/release/ruff /ruff
# TODO: Optimize binary size, with a version that also works when cross compiling
# RUN strip --strip-all /ruff
FROM scratch
COPY --from=build /ruff /ruff
WORKDIR /io
ENTRYPOINT ["/ruff"]

View File

@@ -54,7 +54,7 @@ Ruff is extremely actively developed and used in major open-source projects like
- [Pandas](https://github.com/pandas-dev/pandas)
- [SciPy](https://github.com/scipy/scipy)
...and [many more](#whos-using-ruff).
...and many more.
Ruff is backed by [Astral](https://astral.sh). Read the [launch post](https://astral.sh/blog/announcing-astral-the-company-behind-ruff),
or the original [project announcement](https://notes.crmarsh.com/python-tooling-could-be-much-much-faster).
@@ -150,7 +150,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
```yaml
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.1.7
rev: v0.1.5
hooks:
# Run the linter.
- id: ruff
@@ -377,8 +377,8 @@ Ruff is used by a number of major open-source projects and companies, including:
- Anthropic ([Python SDK](https://github.com/anthropics/anthropic-sdk-python))
- [Apache Airflow](https://github.com/apache/airflow)
- AstraZeneca ([Magnus](https://github.com/AstraZeneca/magnus-core))
- [Babel](https://github.com/python-babel/babel)
- Benchling ([Refac](https://github.com/benchling/refac))
- [Babel](https://github.com/python-babel/babel)
- [Bokeh](https://github.com/bokeh/bokeh)
- [Cryptography (PyCA)](https://github.com/pyca/cryptography)
- [DVC](https://github.com/iterative/dvc)
@@ -389,16 +389,15 @@ Ruff is used by a number of major open-source projects and companies, including:
- [Gradio](https://github.com/gradio-app/gradio)
- [Great Expectations](https://github.com/great-expectations/great_expectations)
- [HTTPX](https://github.com/encode/httpx)
- [Hatch](https://github.com/pypa/hatch)
- [Home Assistant](https://github.com/home-assistant/core)
- Hugging Face ([Transformers](https://github.com/huggingface/transformers),
[Datasets](https://github.com/huggingface/datasets),
[Diffusers](https://github.com/huggingface/diffusers))
- [Hatch](https://github.com/pypa/hatch)
- [Home Assistant](https://github.com/home-assistant/core)
- ING Bank ([popmon](https://github.com/ing-bank/popmon), [probatus](https://github.com/ing-bank/probatus))
- [Ibis](https://github.com/ibis-project/ibis)
- [Jupyter](https://github.com/jupyter-server/jupyter_server)
- [LangChain](https://github.com/hwchase17/langchain)
- [Litestar](https://litestar.dev/)
- [LlamaIndex](https://github.com/jerryjliu/llama_index)
- Matrix ([Synapse](https://github.com/matrix-org/synapse))
- [MegaLinter](https://github.com/oxsecurity/megalinter)
@@ -423,21 +422,20 @@ Ruff is used by a number of major open-source projects and companies, including:
- [PostHog](https://github.com/PostHog/posthog)
- Prefect ([Python SDK](https://github.com/PrefectHQ/prefect), [Marvin](https://github.com/PrefectHQ/marvin))
- [PyInstaller](https://github.com/pyinstaller/pyinstaller)
- [PyMC-Marketing](https://github.com/pymc-labs/pymc-marketing)
- [PyTorch](https://github.com/pytorch/pytorch)
- [Pydantic](https://github.com/pydantic/pydantic)
- [Pylint](https://github.com/PyCQA/pylint)
- [PyMC-Marketing](https://github.com/pymc-labs/pymc-marketing)
- [Reflex](https://github.com/reflex-dev/reflex)
- [River](https://github.com/online-ml/river)
- [Rippling](https://rippling.com)
- [Robyn](https://github.com/sansyrox/robyn)
- [Saleor](https://github.com/saleor/saleor)
- Scale AI ([Launch SDK](https://github.com/scaleapi/launch-python-client))
- [SciPy](https://github.com/scipy/scipy)
- Snowflake ([SnowCLI](https://github.com/Snowflake-Labs/snowcli))
- [Saleor](https://github.com/saleor/saleor)
- [SciPy](https://github.com/scipy/scipy)
- [Sphinx](https://github.com/sphinx-doc/sphinx)
- [Stable Baselines3](https://github.com/DLR-RM/stable-baselines3)
- [Starlette](https://github.com/encode/starlette)
- [Litestar](https://litestar.dev/)
- [The Algorithms](https://github.com/TheAlgorithms/Python)
- [Vega-Altair](https://github.com/altair-viz/altair)
- WordPress ([Openverse](https://github.com/WordPress/openverse))

View File

@@ -1,6 +1,6 @@
[package]
name = "flake8-to-ruff"
version = "0.1.7"
version = "0.1.5"
description = """
Convert Flake8 configuration files to Ruff configuration files.
"""
@@ -19,7 +19,7 @@ ruff_workspace = { path = "../ruff_workspace" }
anyhow = { workspace = true }
clap = { workspace = true }
colored = { workspace = true }
configparser = { version = "3.0.3" }
configparser = { version = "3.0.2" }
itertools = { workspace = true }
log = { workspace = true }
once_cell = { workspace = true }
@@ -34,6 +34,3 @@ toml = { workspace = true }
[dev-dependencies]
pretty_assertions = "1.3.0"
[lints]
workspace = true

View File

@@ -34,10 +34,10 @@ harness = false
once_cell.workspace = true
serde.workspace = true
serde_json.workspace = true
url = "2.5.0"
ureq = "2.9.1"
url = "2.3.1"
ureq = "2.8.0"
criterion = { version = "0.5.1", default-features = false }
codspeed-criterion-compat = { version="2.3.3", default-features = false, optional = true}
codspeed-criterion-compat = { version="2.3.1", default-features = false, optional = true}
[dev-dependencies]
ruff_linter.path = "../ruff_linter"
@@ -46,9 +46,6 @@ ruff_python_formatter = { path = "../ruff_python_formatter" }
ruff_python_index = { path = "../ruff_python_index" }
ruff_python_parser = { path = "../ruff_python_parser" }
[lints]
workspace = true
[features]
codspeed = ["codspeed-criterion-compat"]

View File

@@ -4,7 +4,7 @@ use ruff_benchmark::criterion::{
criterion_group, criterion_main, BenchmarkId, Criterion, Throughput,
};
use ruff_benchmark::{TestCase, TestFile, TestFileDownloadError};
use ruff_python_formatter::{format_module_ast, PreviewMode, PyFormatOptions};
use ruff_python_formatter::{format_module_ast, PyFormatOptions};
use ruff_python_index::CommentRangesBuilder;
use ruff_python_parser::lexer::lex;
use ruff_python_parser::{parse_tokens, Mode};
@@ -69,8 +69,7 @@ fn benchmark_formatter(criterion: &mut Criterion) {
.expect("Input to be a valid python program");
b.iter(|| {
let options = PyFormatOptions::from_extension(Path::new(case.name()))
.with_preview(PreviewMode::Enabled);
let options = PyFormatOptions::from_extension(Path::new(case.name()));
let formatted =
format_module_ast(&module, &comment_ranges, case.code(), options)
.expect("Formatting to succeed");

View File

@@ -3,9 +3,7 @@ use ruff_benchmark::criterion::{
};
use ruff_benchmark::{TestCase, TestFile, TestFileDownloadError};
use ruff_linter::linter::lint_only;
use ruff_linter::rule_selector::PreviewOptions;
use ruff_linter::settings::rule_table::RuleTable;
use ruff_linter::settings::types::PreviewMode;
use ruff_linter::settings::{flags, LinterSettings};
use ruff_linter::source_kind::SourceKind;
use ruff_linter::{registry::Rule, RuleSelector};
@@ -80,21 +78,12 @@ fn benchmark_default_rules(criterion: &mut Criterion) {
benchmark_linter(group, &LinterSettings::default());
}
/// Disables IO based rules because they are a source of flakiness
fn disable_io_rules(rules: &mut RuleTable) {
fn benchmark_all_rules(criterion: &mut Criterion) {
let mut rules: RuleTable = RuleSelector::All.all_rules().collect();
// Disable IO based rules because it is a source of flakiness
rules.disable(Rule::ShebangMissingExecutableFile);
rules.disable(Rule::ShebangNotExecutable);
}
fn benchmark_all_rules(criterion: &mut Criterion) {
let mut rules: RuleTable = RuleSelector::All
.rules(&PreviewOptions {
mode: PreviewMode::Disabled,
require_explicit: false,
})
.collect();
disable_io_rules(&mut rules);
let settings = LinterSettings {
rules,
@@ -105,22 +94,6 @@ fn benchmark_all_rules(criterion: &mut Criterion) {
benchmark_linter(group, &settings);
}
fn benchmark_preview_rules(criterion: &mut Criterion) {
let mut rules: RuleTable = RuleSelector::All.all_rules().collect();
disable_io_rules(&mut rules);
let settings = LinterSettings {
rules,
preview: PreviewMode::Enabled,
..LinterSettings::default()
};
let group = criterion.benchmark_group("linter/all-with-preview-rules");
benchmark_linter(group, &settings);
}
criterion_group!(default_rules, benchmark_default_rules);
criterion_group!(all_rules, benchmark_all_rules);
criterion_group!(preview_rules, benchmark_preview_rules);
criterion_main!(default_rules, all_rules, preview_rules);
criterion_main!(default_rules, all_rules);

View File

@@ -20,6 +20,3 @@ seahash = "4.1.0"
[dev-dependencies]
ruff_macros = { path = "../ruff_macros" }
[lints]
workspace = true

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_cli"
version = "0.1.7"
version = "0.1.5"
publish = false
authors = { workspace = true }
edition = { workspace = true }
@@ -28,7 +28,7 @@ ruff_python_trivia = { path = "../ruff_python_trivia" }
ruff_workspace = { path = "../ruff_workspace" }
ruff_text_size = { path = "../ruff_text_size" }
annotate-snippets = { version = "0.9.2", features = ["color"] }
annotate-snippets = { version = "0.9.1", features = ["color"] }
anyhow = { workspace = true }
argfile = { version = "0.1.6" }
bincode = { version = "1.3.3" }
@@ -69,13 +69,10 @@ insta = { workspace = true, features = ["filters", "json"] }
insta-cmd = { version = "0.4.0" }
tempfile = "3.8.1"
test-case = { workspace = true }
ureq = { version = "2.9.1", features = [] }
ureq = { version = "2.8.0", features = [] }
[target.'cfg(target_os = "windows")'.dependencies]
mimalloc = "0.1.39"
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64")))'.dependencies]
tikv-jemallocator = "0.5.0"
[lints]
workspace = true

View File

@@ -1,2 +0,0 @@
[tool.ruff]
select = []

View File

@@ -1,2 +0,0 @@
[tool.ruff]
include = ["a.py", "subdirectory/c.py"]

View File

@@ -1,413 +0,0 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"id": "4f8ce941-1492-4d4e-8ab5-70d733fe891a",
"metadata": {},
"outputs": [],
"source": [
"%config ZMQInteractiveShell.ast_node_interactivity=\"last_expr_or_assign\""
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "721ec705-0c65-4bfb-9809-7ed8bc534186",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1"
]
},
"execution_count": 2,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Assignment statement without a semicolon\n",
"x = 1"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "de50e495-17e5-41cc-94bd-565757555d7e",
"metadata": {},
"outputs": [],
"source": [
"# Assignment statement with a semicolon\n",
"x = 1;\n",
"x = 1;"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "39e31201-23da-44eb-8684-41bba3663991",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"2"
]
},
"execution_count": 4,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Augmented assignment without a semicolon\n",
"x += 1"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "6b73d3dd-c73a-4697-9e97-e109a6c1fbab",
"metadata": {},
"outputs": [],
"source": [
"# Augmented assignment without a semicolon\n",
"x += 1;\n",
"x += 1; # comment\n",
"# comment"
]
},
{
"cell_type": "code",
"execution_count": 6,
"id": "2a3e5b86-aa5b-46ba-b9c6-0386d876f58c",
"metadata": {},
"outputs": [],
"source": [
"# Multiple assignment without a semicolon\n",
"x = y = 1"
]
},
{
"cell_type": "code",
"execution_count": 7,
"id": "07f89e51-9357-4cfb-8fc5-76fb75e35949",
"metadata": {},
"outputs": [],
"source": [
"# Multiple assignment with a semicolon\n",
"x = y = 1;\n",
"x = y = 1;"
]
},
{
"cell_type": "code",
"execution_count": 8,
"id": "c22b539d-473e-48f8-a236-625e58c47a00",
"metadata": {},
"outputs": [],
"source": [
"# Tuple unpacking without a semicolon\n",
"x, y = 1, 2"
]
},
{
"cell_type": "code",
"execution_count": 9,
"id": "12c87940-a0d5-403b-a81c-7507eb06dc7e",
"metadata": {},
"outputs": [],
"source": [
"# Tuple unpacking with a semicolon (irrelevant)\n",
"x, y = 1, 2;\n",
"x, y = 1, 2; # comment\n",
"# comment"
]
},
{
"cell_type": "code",
"execution_count": 10,
"id": "5a768c76-6bc4-470c-b37e-8cc14bc6caf4",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1"
]
},
"execution_count": 10,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Annotated assignment statement without a semicolon\n",
"x: int = 1"
]
},
{
"cell_type": "code",
"execution_count": 11,
"id": "21bfda82-1a9a-4ba1-9078-74ac480804b5",
"metadata": {},
"outputs": [],
"source": [
"# Annotated assignment statement without a semicolon\n",
"x: int = 1;\n",
"x: int = 1; # comment\n",
"# comment"
]
},
{
"cell_type": "code",
"execution_count": 12,
"id": "09929999-ff29-4d10-ad2b-e665af15812d",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1"
]
},
"execution_count": 12,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Assignment expression without a semicolon\n",
"(x := 1)"
]
},
{
"cell_type": "code",
"execution_count": 13,
"id": "32a83217-1bad-4f61-855e-ffcdb119c763",
"metadata": {},
"outputs": [],
"source": [
"# Assignment expression with a semicolon\n",
"(x := 1);\n",
"(x := 1); # comment\n",
"# comment"
]
},
{
"cell_type": "code",
"execution_count": 14,
"id": "61b81865-277e-4964-b03e-eb78f1f318eb",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1"
]
},
"execution_count": 14,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"x = 1\n",
"# Expression without a semicolon\n",
"x"
]
},
{
"cell_type": "code",
"execution_count": 15,
"id": "974c29be-67e1-4000-95fa-6ca118a63bad",
"metadata": {},
"outputs": [],
"source": [
"x = 1\n",
"# Expression with a semicolon\n",
"x;"
]
},
{
"cell_type": "code",
"execution_count": 16,
"id": "cfeb1757-46d6-4f13-969f-a283b6d0304f",
"metadata": {},
"outputs": [],
"source": [
"class Point:\n",
" def __init__(self, x, y):\n",
" self.x = x\n",
" self.y = y\n",
"\n",
"\n",
"p = Point(0, 0);"
]
},
{
"cell_type": "code",
"execution_count": 17,
"id": "2ee7f1a5-ccfe-4004-bfa4-ef834a58da97",
"metadata": {},
"outputs": [],
"source": [
"# Assignment statement where the left is an attribute access doesn't\n",
"# print the value.\n",
"p.x = 1;"
]
},
{
"cell_type": "code",
"execution_count": 18,
"id": "3e49370a-048b-474d-aa0a-3d1d4a73ad37",
"metadata": {},
"outputs": [],
"source": [
"data = {}\n",
"\n",
"# Neither does the subscript node\n",
"data[\"foo\"] = 1;"
]
},
{
"cell_type": "code",
"execution_count": 19,
"id": "d594bdd3-eaa9-41ef-8cda-cf01bc273b2d",
"metadata": {},
"outputs": [],
"source": [
"if (x := 1):\n",
" # It should be the top level statement\n",
" x"
]
},
{
"cell_type": "code",
"execution_count": 20,
"id": "e532f0cf-80c7-42b7-8226-6002fcf74fb6",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1"
]
},
"execution_count": 20,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Parentheses with comments\n",
"(\n",
" x := 1 # comment\n",
") # comment"
]
},
{
"cell_type": "code",
"execution_count": 21,
"id": "473c5d62-871b-46ed-8a34-27095243f462",
"metadata": {},
"outputs": [],
"source": [
"# Parentheses with comments\n",
"(\n",
" x := 1 # comment\n",
"); # comment"
]
},
{
"cell_type": "code",
"execution_count": 22,
"id": "8c3c2361-f49f-45fe-bbe3-7e27410a8a86",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"'Hello world!'"
]
},
"execution_count": 22,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"\"\"\"Hello world!\"\"\""
]
},
{
"cell_type": "code",
"execution_count": 23,
"id": "23dbe9b5-3f68-4890-ab2d-ab0dbfd0712a",
"metadata": {},
"outputs": [],
"source": [
"\"\"\"Hello world!\"\"\"; # comment\n",
"# comment"
]
},
{
"cell_type": "code",
"execution_count": 24,
"id": "3ce33108-d95d-4c70-83d1-0d4fd36a2951",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"'x = 1'"
]
},
"execution_count": 24,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"x = 1\n",
"f\"x = {x}\""
]
},
{
"cell_type": "code",
"execution_count": 25,
"id": "654a4a67-de43-4684-824a-9451c67db48f",
"metadata": {},
"outputs": [],
"source": [
"x = 1\n",
"f\"x = {x}\";\n",
"f\"x = {x}\"; # comment\n",
"# comment"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python (ruff-playground)",
"language": "python",
"name": "ruff-playground"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.3"
}
},
"nbformat": 4,
"nbformat_minor": 5
}

View File

@@ -88,7 +88,6 @@ pub enum Command {
#[allow(clippy::struct_excessive_bools)]
pub struct CheckCommand {
/// List of files or directories to check.
#[clap(help = "List of files or directories to check [default: .]")]
pub files: Vec<PathBuf>,
/// Apply fixes to resolve lint violations.
/// Use `--no-fix` to disable or `--unsafe-fixes` to include unsafe fixes.
@@ -364,7 +363,6 @@ pub struct CheckCommand {
#[allow(clippy::struct_excessive_bools)]
pub struct FormatCommand {
/// List of files or directories to format.
#[clap(help = "List of files or directories to format [default: .]")]
pub files: Vec<PathBuf>,
/// Avoid writing any formatted files back; instead, exit with a non-zero status code if any
/// files would have been modified, and zero otherwise.

View File

@@ -202,12 +202,12 @@ fn lint_path(
match result {
Ok(inner) => inner,
Err(error) => {
let message = r"This indicates a bug in Ruff. If you could open an issue at:
let message = r#"This indicates a bug in Ruff. If you could open an issue at:
https://github.com/astral-sh/ruff/issues/new?title=%5BLinter%20panic%5D
...with the relevant file contents, the `pyproject.toml` settings, and the following stack trace, we'd be very appreciative!
";
"#;
error!(
"{}{}{} {message}\n{error}",

View File

@@ -34,7 +34,7 @@ use crate::args::{CliOverrides, FormatArguments};
use crate::cache::{Cache, FileCacheKey, PackageCacheMap, PackageCaches};
use crate::panic::{catch_unwind, PanicError};
use crate::resolve::resolve;
use crate::{resolve_default_files, ExitStatus};
use crate::ExitStatus;
#[derive(Debug, Copy, Clone, is_macro::Is)]
pub(crate) enum FormatMode {
@@ -60,7 +60,7 @@ impl FormatMode {
/// Format a set of files, and return the exit status.
pub(crate) fn format(
cli: FormatArguments,
cli: &FormatArguments,
overrides: &CliOverrides,
log_level: LogLevel,
) -> Result<ExitStatus> {
@@ -70,9 +70,8 @@ pub(crate) fn format(
overrides,
cli.stdin_filename.as_deref(),
)?;
let mode = FormatMode::from_cli(&cli);
let files = resolve_default_files(cli.files, false);
let (paths, resolver) = python_files_in_path(&files, &pyproject_config, overrides)?;
let mode = FormatMode::from_cli(cli);
let (paths, resolver) = python_files_in_path(&cli.files, &pyproject_config, overrides)?;
if paths.is_empty() {
warn_user_once!("No Python files found under the given path(s)");
@@ -661,12 +660,12 @@ impl Display for FormatCommandError {
}
}
Self::Panic(path, err) => {
let message = r"This indicates a bug in Ruff. If you could open an issue at:
let message = r#"This indicates a bug in Ruff. If you could open an issue at:
https://github.com/astral-sh/ruff/issues/new?title=%5BFormatter%20panic%5D
...with the relevant file contents, the `pyproject.toml` settings, and the following stack trace, we'd be very appreciative!
";
"#;
if let Some(path) = path {
write!(
f,

View File

@@ -63,7 +63,7 @@ fn format_rule_text(rule: Rule) -> String {
if rule.is_preview() || rule.is_nursery() {
output.push_str(
r"This rule is in preview and is not stable. The `--preview` flag is required for use.",
r#"This rule is in preview and is not stable. The `--preview` flag is required for use."#,
);
output.push('\n');
output.push('\n');

View File

@@ -101,19 +101,6 @@ fn is_stdin(files: &[PathBuf], stdin_filename: Option<&Path>) -> bool {
file == Path::new("-")
}
/// Returns the default set of files if none are provided, otherwise returns `None`.
fn resolve_default_files(files: Vec<PathBuf>, is_stdin: bool) -> Vec<PathBuf> {
if files.is_empty() {
if is_stdin {
vec![Path::new("-").to_path_buf()]
} else {
vec![Path::new(".").to_path_buf()]
}
} else {
files
}
}
/// Get the actual value of the `format` desired from either `output_format`
/// or `format`, and warn the user if they're using the deprecated form.
fn resolve_help_output_format(output_format: HelpFormat, format: Option<HelpFormat>) -> HelpFormat {
@@ -209,7 +196,7 @@ fn format(args: FormatCommand, log_level: LogLevel) -> Result<ExitStatus> {
if is_stdin(&cli.files, cli.stdin_filename.as_deref()) {
commands::format_stdin::format_stdin(&cli, &overrides)
} else {
commands::format::format(cli, &overrides, log_level)
commands::format::format(&cli, &overrides, log_level)
}
}
@@ -235,15 +222,17 @@ pub fn check(args: CheckCommand, log_level: LogLevel) -> Result<ExitStatus> {
};
let stderr_writer = Box::new(BufWriter::new(io::stderr()));
let is_stdin = is_stdin(&cli.files, cli.stdin_filename.as_deref());
let files = resolve_default_files(cli.files, is_stdin);
if cli.show_settings {
commands::show_settings::show_settings(&files, &pyproject_config, &overrides, &mut writer)?;
commands::show_settings::show_settings(
&cli.files,
&pyproject_config,
&overrides,
&mut writer,
)?;
return Ok(ExitStatus::Success);
}
if cli.show_files {
commands::show_files::show_files(&files, &pyproject_config, &overrides, &mut writer)?;
commands::show_files::show_files(&cli.files, &pyproject_config, &overrides, &mut writer)?;
return Ok(ExitStatus::Success);
}
@@ -306,7 +295,8 @@ pub fn check(args: CheckCommand, log_level: LogLevel) -> Result<ExitStatus> {
if !fix_mode.is_generate() {
warn_user!("--fix is incompatible with --add-noqa.");
}
let modifications = commands::add_noqa::add_noqa(&files, &pyproject_config, &overrides)?;
let modifications =
commands::add_noqa::add_noqa(&cli.files, &pyproject_config, &overrides)?;
if modifications > 0 && log_level >= LogLevel::Default {
let s = if modifications == 1 { "" } else { "s" };
#[allow(clippy::print_stderr)]
@@ -333,7 +323,7 @@ pub fn check(args: CheckCommand, log_level: LogLevel) -> Result<ExitStatus> {
// Configure the file watcher.
let (tx, rx) = channel();
let mut watcher = recommended_watcher(tx)?;
for file in &files {
for file in &cli.files {
watcher.watch(file, RecursiveMode::Recursive)?;
}
if let Some(file) = pyproject_config.path.as_ref() {
@@ -345,7 +335,7 @@ pub fn check(args: CheckCommand, log_level: LogLevel) -> Result<ExitStatus> {
printer.write_to_user("Starting linter in watch mode...\n");
let messages = commands::check::check(
&files,
&cli.files,
&pyproject_config,
&overrides,
cache.into(),
@@ -378,7 +368,7 @@ pub fn check(args: CheckCommand, log_level: LogLevel) -> Result<ExitStatus> {
printer.write_to_user("File change detected...\n");
let messages = commands::check::check(
&files,
&cli.files,
&pyproject_config,
&overrides,
cache.into(),
@@ -392,6 +382,8 @@ pub fn check(args: CheckCommand, log_level: LogLevel) -> Result<ExitStatus> {
}
}
} else {
let is_stdin = is_stdin(&cli.files, cli.stdin_filename.as_deref());
// Generate lint violations.
let diagnostics = if is_stdin {
commands::check_stdin::check_stdin(
@@ -403,7 +395,7 @@ pub fn check(args: CheckCommand, log_level: LogLevel) -> Result<ExitStatus> {
)?
} else {
commands::check::check(
&files,
&cli.files,
&pyproject_config,
&overrides,
cache.into(),

View File

@@ -43,7 +43,7 @@ pub fn resolve(
{
let settings = resolve_root_settings(&pyproject, Relativity::Cwd, overrides)?;
debug!(
"Using user-specified configuration file at: {}",
"Using user specified pyproject.toml at {}",
pyproject.display()
);
return Ok(PyprojectConfig::new(
@@ -63,10 +63,7 @@ pub fn resolve(
.as_ref()
.unwrap_or(&path_dedot::CWD.as_path()),
)? {
debug!(
"Using configuration file (via parent) at: {}",
pyproject.display()
);
debug!("Using pyproject.toml (parent) at {}", pyproject.display());
let settings = resolve_root_settings(&pyproject, Relativity::Parent, overrides)?;
return Ok(PyprojectConfig::new(
PyprojectDiscoveryStrategy::Hierarchical,
@@ -80,10 +77,7 @@ pub fn resolve(
// end up the "closest" `pyproject.toml` file for every Python file later on, so
// these act as the "default" settings.)
if let Some(pyproject) = pyproject::find_user_settings_toml() {
debug!(
"Using configuration file (via cwd) at: {}",
pyproject.display()
);
debug!("Using pyproject.toml (cwd) at {}", pyproject.display());
let settings = resolve_root_settings(&pyproject, Relativity::Cwd, overrides)?;
return Ok(PyprojectConfig::new(
PyprojectDiscoveryStrategy::Hierarchical,

View File

@@ -43,53 +43,6 @@ if condition:
"###);
}
#[test]
fn default_files() -> Result<()> {
let tempdir = TempDir::new()?;
fs::write(
tempdir.path().join("foo.py"),
r#"
foo = "needs formatting"
"#,
)?;
fs::write(
tempdir.path().join("bar.py"),
r#"
bar = "needs formatting"
"#,
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["format", "--isolated", "--no-cache", "--check"]).current_dir(tempdir.path()), @r###"
success: false
exit_code: 1
----- stdout -----
Would reformat: bar.py
Would reformat: foo.py
2 files would be reformatted
----- stderr -----
"###);
Ok(())
}
#[test]
fn format_warn_stdin_filename_with_files() {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["format", "--isolated", "--stdin-filename", "foo.py"])
.arg("foo.py")
.pass_stdin("foo = 1"), @r###"
success: true
exit_code: 0
----- stdout -----
foo = 1
----- stderr -----
warning: Ignoring file foo.py in favor of standard input.
"###);
}
#[test]
fn format_options() -> Result<()> {
let tempdir = TempDir::new()?;
@@ -442,9 +395,9 @@ fn deprecated_options() -> Result<()> {
let ruff_toml = tempdir.path().join("ruff.toml");
fs::write(
&ruff_toml,
r"
r#"
tab-size = 2
",
"#,
)?;
insta::with_settings!({filters => vec![
@@ -454,10 +407,10 @@ tab-size = 2
.args(["format", "--config"])
.arg(&ruff_toml)
.arg("-")
.pass_stdin(r"
.pass_stdin(r#"
if True:
pass
"), @r###"
"#), @r###"
success: true
exit_code: 0
----- stdout -----
@@ -490,9 +443,9 @@ format = "json"
.args(["check", "--select", "F401", "--no-cache", "--config"])
.arg(&ruff_toml)
.arg("-")
.pass_stdin(r"
.pass_stdin(r#"
import os
"), @r###"
"#), @r###"
success: false
exit_code: 2
----- stdout -----
@@ -865,432 +818,3 @@ fn test_diff_stdin_formatted() {
----- stderr -----
"###);
}
#[test]
fn test_notebook_trailing_semicolon() {
let fixtures = Path::new("resources").join("test").join("fixtures");
let unformatted = fs::read(fixtures.join("trailing_semicolon.ipynb")).unwrap();
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["format", "--isolated", "--stdin-filename", "test.ipynb"])
.arg("-")
.pass_stdin(unformatted), @r###"
success: true
exit_code: 0
----- stdout -----
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"id": "4f8ce941-1492-4d4e-8ab5-70d733fe891a",
"metadata": {},
"outputs": [],
"source": [
"%config ZMQInteractiveShell.ast_node_interactivity=\"last_expr_or_assign\""
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "721ec705-0c65-4bfb-9809-7ed8bc534186",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1"
]
},
"execution_count": 2,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Assignment statement without a semicolon\n",
"x = 1"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "de50e495-17e5-41cc-94bd-565757555d7e",
"metadata": {},
"outputs": [],
"source": [
"# Assignment statement with a semicolon\n",
"x = 1\n",
"x = 1;"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "39e31201-23da-44eb-8684-41bba3663991",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"2"
]
},
"execution_count": 4,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Augmented assignment without a semicolon\n",
"x += 1"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "6b73d3dd-c73a-4697-9e97-e109a6c1fbab",
"metadata": {},
"outputs": [],
"source": [
"# Augmented assignment without a semicolon\n",
"x += 1\n",
"x += 1; # comment\n",
"# comment"
]
},
{
"cell_type": "code",
"execution_count": 6,
"id": "2a3e5b86-aa5b-46ba-b9c6-0386d876f58c",
"metadata": {},
"outputs": [],
"source": [
"# Multiple assignment without a semicolon\n",
"x = y = 1"
]
},
{
"cell_type": "code",
"execution_count": 7,
"id": "07f89e51-9357-4cfb-8fc5-76fb75e35949",
"metadata": {},
"outputs": [],
"source": [
"# Multiple assignment with a semicolon\n",
"x = y = 1\n",
"x = y = 1"
]
},
{
"cell_type": "code",
"execution_count": 8,
"id": "c22b539d-473e-48f8-a236-625e58c47a00",
"metadata": {},
"outputs": [],
"source": [
"# Tuple unpacking without a semicolon\n",
"x, y = 1, 2"
]
},
{
"cell_type": "code",
"execution_count": 9,
"id": "12c87940-a0d5-403b-a81c-7507eb06dc7e",
"metadata": {},
"outputs": [],
"source": [
"# Tuple unpacking with a semicolon (irrelevant)\n",
"x, y = 1, 2\n",
"x, y = 1, 2 # comment\n",
"# comment"
]
},
{
"cell_type": "code",
"execution_count": 10,
"id": "5a768c76-6bc4-470c-b37e-8cc14bc6caf4",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1"
]
},
"execution_count": 10,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Annotated assignment statement without a semicolon\n",
"x: int = 1"
]
},
{
"cell_type": "code",
"execution_count": 11,
"id": "21bfda82-1a9a-4ba1-9078-74ac480804b5",
"metadata": {},
"outputs": [],
"source": [
"# Annotated assignment statement without a semicolon\n",
"x: int = 1\n",
"x: int = 1; # comment\n",
"# comment"
]
},
{
"cell_type": "code",
"execution_count": 12,
"id": "09929999-ff29-4d10-ad2b-e665af15812d",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1"
]
},
"execution_count": 12,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Assignment expression without a semicolon\n",
"(x := 1)"
]
},
{
"cell_type": "code",
"execution_count": 13,
"id": "32a83217-1bad-4f61-855e-ffcdb119c763",
"metadata": {},
"outputs": [],
"source": [
"# Assignment expression with a semicolon\n",
"(x := 1)\n",
"(x := 1); # comment\n",
"# comment"
]
},
{
"cell_type": "code",
"execution_count": 14,
"id": "61b81865-277e-4964-b03e-eb78f1f318eb",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1"
]
},
"execution_count": 14,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"x = 1\n",
"# Expression without a semicolon\n",
"x"
]
},
{
"cell_type": "code",
"execution_count": 15,
"id": "974c29be-67e1-4000-95fa-6ca118a63bad",
"metadata": {},
"outputs": [],
"source": [
"x = 1\n",
"# Expression with a semicolon\n",
"x;"
]
},
{
"cell_type": "code",
"execution_count": 16,
"id": "cfeb1757-46d6-4f13-969f-a283b6d0304f",
"metadata": {},
"outputs": [],
"source": [
"class Point:\n",
" def __init__(self, x, y):\n",
" self.x = x\n",
" self.y = y\n",
"\n",
"\n",
"p = Point(0, 0);"
]
},
{
"cell_type": "code",
"execution_count": 17,
"id": "2ee7f1a5-ccfe-4004-bfa4-ef834a58da97",
"metadata": {},
"outputs": [],
"source": [
"# Assignment statement where the left is an attribute access doesn't\n",
"# print the value.\n",
"p.x = 1"
]
},
{
"cell_type": "code",
"execution_count": 18,
"id": "3e49370a-048b-474d-aa0a-3d1d4a73ad37",
"metadata": {},
"outputs": [],
"source": [
"data = {}\n",
"\n",
"# Neither does the subscript node\n",
"data[\"foo\"] = 1"
]
},
{
"cell_type": "code",
"execution_count": 19,
"id": "d594bdd3-eaa9-41ef-8cda-cf01bc273b2d",
"metadata": {},
"outputs": [],
"source": [
"if x := 1:\n",
" # It should be the top level statement\n",
" x"
]
},
{
"cell_type": "code",
"execution_count": 20,
"id": "e532f0cf-80c7-42b7-8226-6002fcf74fb6",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1"
]
},
"execution_count": 20,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Parentheses with comments\n",
"(\n",
" x := 1 # comment\n",
") # comment"
]
},
{
"cell_type": "code",
"execution_count": 21,
"id": "473c5d62-871b-46ed-8a34-27095243f462",
"metadata": {},
"outputs": [],
"source": [
"# Parentheses with comments\n",
"(\n",
" x := 1 # comment\n",
"); # comment"
]
},
{
"cell_type": "code",
"execution_count": 22,
"id": "8c3c2361-f49f-45fe-bbe3-7e27410a8a86",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"'Hello world!'"
]
},
"execution_count": 22,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"\"\"\"Hello world!\"\"\""
]
},
{
"cell_type": "code",
"execution_count": 23,
"id": "23dbe9b5-3f68-4890-ab2d-ab0dbfd0712a",
"metadata": {},
"outputs": [],
"source": [
"\"\"\"Hello world!\"\"\"; # comment\n",
"# comment"
]
},
{
"cell_type": "code",
"execution_count": 24,
"id": "3ce33108-d95d-4c70-83d1-0d4fd36a2951",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"'x = 1'"
]
},
"execution_count": 24,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"x = 1\n",
"f\"x = {x}\""
]
},
{
"cell_type": "code",
"execution_count": 25,
"id": "654a4a67-de43-4684-824a-9451c67db48f",
"metadata": {},
"outputs": [],
"source": [
"x = 1\n",
"f\"x = {x}\"\n",
"f\"x = {x}\"; # comment\n",
"# comment"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python (ruff-playground)",
"language": "python",
"name": "ruff-playground"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.3"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
----- stderr -----
"###);
}

File diff suppressed because it is too large Load Diff

View File

@@ -396,43 +396,3 @@ if __name__ == "__main__":
"###);
Ok(())
}
/// Regression test for [#8858](https://github.com/astral-sh/ruff/issues/8858)
#[test]
fn parent_configuration_override() -> Result<()> {
let tempdir = TempDir::new()?;
let root_ruff = tempdir.path().join("ruff.toml");
fs::write(
root_ruff,
r#"
[lint]
select = ["ALL"]
"#,
)?;
let sub_dir = tempdir.path().join("subdirectory");
fs::create_dir(&sub_dir)?;
let subdirectory_ruff = sub_dir.join("ruff.toml");
fs::write(
subdirectory_ruff,
r#"
[lint]
ignore = ["D203", "D212"]
"#,
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.current_dir(sub_dir)
.arg("check")
.args(STDIN_BASE_OPTIONS)
, @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
warning: No Python files found under the given path(s)
"###);
Ok(())
}

View File

@@ -1,101 +0,0 @@
#![cfg(not(target_family = "wasm"))]
use std::path::Path;
use std::process::Command;
use std::str;
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
const BIN_NAME: &str = "ruff";
#[cfg(not(target_os = "windows"))]
const TEST_FILTERS: &[(&str, &str)] = &[(".*/resources/test/fixtures/", "[BASEPATH]/")];
#[cfg(target_os = "windows")]
const TEST_FILTERS: &[(&str, &str)] = &[
(r".*\\resources\\test\\fixtures\\", "[BASEPATH]\\"),
(r"\\", "/"),
];
#[test]
fn check_project_include_defaults() {
// Defaults to checking the current working directory
//
// The test directory includes:
// - A pyproject.toml which specifies an include
// - A nested pyproject.toml which has a Ruff section
//
// The nested project should all be checked instead of respecting the parent includes
insta::with_settings!({
filters => TEST_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["check", "--show-files"]).current_dir(Path::new("./resources/test/fixtures/include-test")), @r###"
success: true
exit_code: 0
----- stdout -----
[BASEPATH]/include-test/a.py
[BASEPATH]/include-test/nested-project/e.py
[BASEPATH]/include-test/nested-project/pyproject.toml
[BASEPATH]/include-test/subdirectory/c.py
----- stderr -----
"###);
});
}
#[test]
fn check_project_respects_direct_paths() {
// Given a direct path not included in the project `includes`, it should be checked
insta::with_settings!({
filters => TEST_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["check", "--show-files", "b.py"]).current_dir(Path::new("./resources/test/fixtures/include-test")), @r###"
success: true
exit_code: 0
----- stdout -----
[BASEPATH]/include-test/b.py
----- stderr -----
"###);
});
}
#[test]
fn check_project_respects_subdirectory_includes() {
// Given a direct path to a subdirectory, the include should be respected
insta::with_settings!({
filters => TEST_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["check", "--show-files", "subdirectory"]).current_dir(Path::new("./resources/test/fixtures/include-test")), @r###"
success: true
exit_code: 0
----- stdout -----
[BASEPATH]/include-test/subdirectory/c.py
----- stderr -----
"###);
});
}
#[test]
fn check_project_from_project_subdirectory_respects_includes() {
// Run from a project subdirectory, the include specified in the parent directory should be respected
insta::with_settings!({
filters => TEST_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["check", "--show-files"]).current_dir(Path::new("./resources/test/fixtures/include-test/subdirectory")), @r###"
success: true
exit_code: 0
----- stdout -----
[BASEPATH]/include-test/subdirectory/c.py
----- stderr -----
"###);
});
}

View File

@@ -48,12 +48,9 @@ tracing-indicatif = { workspace = true }
tracing-subscriber = { workspace = true, features = ["env-filter"] }
imara-diff = "0.1.5"
[dev-dependencies]
indoc = "2.0.4"
[features]
# Turn off rayon for profiling
singlethreaded = []
[lints]
workspace = true
[dev-dependencies]
indoc = "2.0.4"

View File

@@ -49,7 +49,7 @@ pub(crate) fn main(args: &Args) -> Result<()> {
if rule.is_preview() || rule.is_nursery() {
output.push_str(
r"This rule is unstable and in [preview](../preview.md). The `--preview` flag is required for use.",
r#"This rule is unstable and in [preview](../preview.md). The `--preview` flag is required for use."#,
);
output.push('\n');
output.push('\n');

View File

@@ -129,12 +129,12 @@ fn emit_field(output: &mut String, name: &str, field: &OptionField, parent_set:
output.push_str("**Example usage**:\n\n");
output.push_str(&format_tab(
"pyproject.toml",
&format_header(field.scope, parent_set, ConfigurationFile::PyprojectToml),
&format_header(parent_set, ConfigurationFile::PyprojectToml),
field.example,
));
output.push_str(&format_tab(
"ruff.toml",
&format_header(field.scope, parent_set, ConfigurationFile::RuffToml),
&format_header(parent_set, ConfigurationFile::RuffToml),
field.example,
));
output.push('\n');
@@ -149,53 +149,23 @@ fn format_tab(tab_name: &str, header: &str, content: &str) -> String {
)
}
/// Format the TOML header for the example usage for a given option.
///
/// For example: `[tool.ruff.format]` or `[tool.ruff.lint.isort]`.
fn format_header(
scope: Option<&str>,
parent_set: &Set,
configuration: ConfigurationFile,
) -> String {
match configuration {
ConfigurationFile::PyprojectToml => {
let mut header = if let Some(set_name) = parent_set.name() {
if set_name == "format" {
String::from("tool.ruff.format")
} else {
format!("tool.ruff.lint.{set_name}")
}
} else {
"tool.ruff".to_string()
};
if let Some(scope) = scope {
if !header.is_empty() {
header.push('.');
}
header.push_str(scope);
}
format!("[{header}]")
fn format_header(parent_set: &Set, configuration: ConfigurationFile) -> String {
let fmt = if let Some(set_name) = parent_set.name() {
if set_name == "format" {
String::from(".format")
} else {
format!(".lint.{set_name}")
}
} else {
String::new()
};
match configuration {
ConfigurationFile::PyprojectToml => format!("[tool.ruff{fmt}]"),
ConfigurationFile::RuffToml => {
let mut header = if let Some(set_name) = parent_set.name() {
if set_name == "format" {
String::from("format")
} else {
format!("lint.{set_name}")
}
} else {
String::new()
};
if let Some(scope) = scope {
if !header.is_empty() {
header.push('.');
}
header.push_str(scope);
}
if header.is_empty() {
if fmt.is_empty() {
String::new()
} else {
format!("[{header}]")
format!("[{}]", fmt.strip_prefix('.').unwrap())
}
}
}

View File

@@ -1,34 +1,30 @@
//! Print the AST for a given Python file.
#![allow(clippy::print_stdout, clippy::print_stderr)]
use std::fs;
use std::path::PathBuf;
use anyhow::Result;
use ruff_linter::source_kind::SourceKind;
use ruff_python_ast::PySourceType;
use ruff_python_parser::{parse, AsMode};
use ruff_python_parser::{parse, Mode};
#[derive(clap::Args)]
pub(crate) struct Args {
/// Python file for which to generate the AST.
#[arg(required = true)]
file: PathBuf,
/// Run in Jupyter mode i.e., allow line magics.
#[arg(long)]
jupyter: bool,
}
pub(crate) fn main(args: &Args) -> Result<()> {
let source_type = PySourceType::from(&args.file);
let source_kind = SourceKind::from_path(&args.file, source_type)?.ok_or_else(|| {
anyhow::anyhow!(
"Could not determine source kind for file: {}",
args.file.display()
)
})?;
let python_ast = parse(
source_kind.source_code(),
source_type.as_mode(),
&args.file.to_string_lossy(),
)?;
let contents = fs::read_to_string(&args.file)?;
let mode = if args.jupyter {
Mode::Ipython
} else {
Mode::Module
};
let python_ast = parse(&contents, mode, &args.file.to_string_lossy())?;
println!("{python_ast:#?}");
Ok(())
}

View File

@@ -1,30 +1,30 @@
//! Print the token stream for a given Python file.
#![allow(clippy::print_stdout, clippy::print_stderr)]
use std::fs;
use std::path::PathBuf;
use anyhow::Result;
use ruff_linter::source_kind::SourceKind;
use ruff_python_ast::PySourceType;
use ruff_python_parser::{lexer, AsMode};
use ruff_python_parser::{lexer, Mode};
#[derive(clap::Args)]
pub(crate) struct Args {
/// Python file for which to generate the AST.
#[arg(required = true)]
file: PathBuf,
/// Run in Jupyter mode i.e., allow line magics (`%`, `!`, `?`, `/`, `,`, `;`).
#[arg(long)]
jupyter: bool,
}
pub(crate) fn main(args: &Args) -> Result<()> {
let source_type = PySourceType::from(&args.file);
let source_kind = SourceKind::from_path(&args.file, source_type)?.ok_or_else(|| {
anyhow::anyhow!(
"Could not determine source kind for file: {}",
args.file.display()
)
})?;
for (tok, range) in lexer::lex(source_kind.source_code(), source_type.as_mode()).flatten() {
let contents = fs::read_to_string(&args.file)?;
let mode = if args.jupyter {
Mode::Ipython
} else {
Mode::Module
};
for (tok, range) in lexer::lex(&contents, mode).flatten() {
println!(
"{start:#?} {tok:#?} {end:#?}",
start = range.start(),

View File

@@ -29,6 +29,3 @@ insta = { workspace = true }
[features]
serde = ["dep:serde", "ruff_text_size/serde"]
schemars = ["dep:schemars", "ruff_text_size/schemars"]
[lints]
workspace = true

View File

@@ -1,9 +1,23 @@
use super::{Buffer, Format, Formatter};
use crate::FormatResult;
use std::ffi::c_void;
use std::marker::PhantomData;
/// A convenience wrapper for representing a formattable argument.
/// Mono-morphed type to format an object. Used by the [`crate::format`!], [`crate::format_args`!], and
/// [`crate::write`!] macros.
///
/// This struct is similar to a dynamic dispatch (using `dyn Format`) because it stores a pointer to the value.
/// However, it doesn't store the pointer to `dyn Format`'s vtable, instead it statically resolves the function
/// pointer of `Format::format` and stores it in `formatter`.
pub struct Argument<'fmt, Context> {
value: &'fmt dyn Format<Context>,
/// The value to format stored as a raw pointer where `lifetime` stores the value's lifetime.
value: *const c_void,
/// Stores the lifetime of the value. To get the most out of our dear borrow checker.
lifetime: PhantomData<&'fmt ()>,
/// The function pointer to `value`'s `Format::format` method
formatter: fn(*const c_void, &mut Formatter<'_, Context>) -> FormatResult<()>,
}
impl<Context> Clone for Argument<'_, Context> {
@@ -14,19 +28,32 @@ impl<Context> Clone for Argument<'_, Context> {
impl<Context> Copy for Argument<'_, Context> {}
impl<'fmt, Context> Argument<'fmt, Context> {
/// Called by the [ruff_formatter::format_args] macro.
/// Called by the [ruff_formatter::format_args] macro. Creates a mono-morphed value for formatting
/// an object.
#[doc(hidden)]
#[inline]
pub fn new<F: Format<Context>>(value: &'fmt F) -> Self {
Self { value }
#[inline]
fn formatter<F: Format<Context>, Context>(
ptr: *const c_void,
fmt: &mut Formatter<Context>,
) -> FormatResult<()> {
// SAFETY: Safe because the 'fmt lifetime is captured by the 'lifetime' field.
#[allow(unsafe_code)]
F::fmt(unsafe { &*ptr.cast::<F>() }, fmt)
}
Self {
value: (value as *const F).cast::<std::ffi::c_void>(),
lifetime: PhantomData,
formatter: formatter::<F, Context>,
}
}
/// Formats the value stored by this argument using the given formatter.
#[inline]
// Seems to only be triggered on wasm32 and looks like a false positive?
#[allow(clippy::trivially_copy_pass_by_ref)]
pub(super) fn format(&self, f: &mut Formatter<Context>) -> FormatResult<()> {
self.value.fmt(f)
(self.formatter)(self.value, f)
}
}

View File

@@ -2555,17 +2555,17 @@ pub struct BestFitting<'a, Context> {
}
impl<'a, Context> BestFitting<'a, Context> {
/// Creates a new best fitting IR with the given variants.
///
/// Callers are required to ensure that the number of variants given
/// is at least 2.
/// Creates a new best fitting IR with the given variants. The method itself isn't unsafe
/// but it is to discourage people from using it because the printer will panic if
/// the slice doesn't contain at least the least and most expanded variants.
///
/// You're looking for a way to create a `BestFitting` object, use the `best_fitting![least_expanded, most_expanded]` macro.
///
/// # Panics
///
/// When the slice contains less than two variants.
pub fn from_arguments_unchecked(variants: Arguments<'a, Context>) -> Self {
/// ## Safety
/// The slice must contain at least two variants.
#[allow(unsafe_code)]
pub unsafe fn from_arguments_unchecked(variants: Arguments<'a, Context>) -> Self {
assert!(
variants.0.len() >= 2,
"Requires at least the least expanded and most expanded variants"
@@ -2696,12 +2696,14 @@ impl<Context> Format<Context> for BestFitting<'_, Context> {
buffer.write_element(FormatElement::Tag(EndBestFittingEntry));
}
// OK because the constructor guarantees that there are always at
// least two variants.
let variants = BestFittingVariants::from_vec_unchecked(buffer.into_vec());
let element = FormatElement::BestFitting {
variants,
mode: self.mode,
// SAFETY: The constructor guarantees that there are always at least two variants. It's, therefore,
// safe to call into the unsafe `from_vec_unchecked` function
#[allow(unsafe_code)]
let element = unsafe {
FormatElement::BestFitting {
variants: BestFittingVariants::from_vec_unchecked(buffer.into_vec()),
mode: self.mode,
}
};
f.write_element(element);

View File

@@ -332,14 +332,17 @@ pub enum BestFittingMode {
pub struct BestFittingVariants(Box<[FormatElement]>);
impl BestFittingVariants {
/// Creates a new best fitting IR with the given variants.
///
/// Callers are required to ensure that the number of variants given
/// is at least 2 when using `most_expanded` or `most_flag`.
/// Creates a new best fitting IR with the given variants. The method itself isn't unsafe
/// but it is to discourage people from using it because the printer will panic if
/// the slice doesn't contain at least the least and most expanded variants.
///
/// You're looking for a way to create a `BestFitting` object, use the `best_fitting![least_expanded, most_expanded]` macro.
///
/// ## Safety
/// The slice must contain at least two variants.
#[doc(hidden)]
pub fn from_vec_unchecked(variants: Vec<FormatElement>) -> Self {
#[allow(unsafe_code)]
pub unsafe fn from_vec_unchecked(variants: Vec<FormatElement>) -> Self {
debug_assert!(
variants
.iter()
@@ -348,23 +351,12 @@ impl BestFittingVariants {
>= 2,
"Requires at least the least expanded and most expanded variants"
);
Self(variants.into_boxed_slice())
}
/// Returns the most expanded variant
///
/// # Panics
///
/// When the number of variants is less than two.
pub fn most_expanded(&self) -> &[FormatElement] {
assert!(
self.as_slice()
.iter()
.filter(|element| matches!(element, FormatElement::Tag(Tag::StartBestFittingEntry)))
.count()
>= 2,
"Requires at least the least expanded and most expanded variants"
);
self.into_iter().last().unwrap()
}
@@ -373,19 +365,7 @@ impl BestFittingVariants {
}
/// Returns the least expanded variant
///
/// # Panics
///
/// When the number of variants is less than two.
pub fn most_flat(&self) -> &[FormatElement] {
assert!(
self.as_slice()
.iter()
.filter(|element| matches!(element, FormatElement::Tag(Tag::StartBestFittingEntry)))
.count()
>= 2,
"Requires at least the least expanded and most expanded variants"
);
self.into_iter().next().unwrap()
}
}

View File

@@ -329,8 +329,10 @@ macro_rules! format {
#[macro_export]
macro_rules! best_fitting {
($least_expanded:expr, $($tail:expr),+ $(,)?) => {{
// OK because the macro syntax requires at least two variants.
$crate::BestFitting::from_arguments_unchecked($crate::format_args!($least_expanded, $($tail),+))
#[allow(unsafe_code)]
unsafe {
$crate::BestFitting::from_arguments_unchecked($crate::format_args!($least_expanded, $($tail),+))
}
}}
}

View File

@@ -1711,14 +1711,14 @@ mod tests {
));
assert_eq!(
"a
r#"a
b
c
d
d
c
b
a",
a"#,
formatted.as_code()
);
}
@@ -2047,10 +2047,10 @@ two lines`,
assert_eq!(
printed.as_code(),
"Group with id-2
r#"Group with id-2
Group with id-1 does not fit on the line because it exceeds the line width of 80 characters by
Group 2 fits
Group 1 breaks"
Group 1 breaks"#
);
}

View File

@@ -17,6 +17,3 @@ ruff_macros = { path = "../ruff_macros" }
[dev-dependencies]
static_assertions = "1.1.0"
[lints]
workspace = true

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_linter"
version = "0.1.7"
version = "0.1.5"
publish = false
authors = { workspace = true }
edition = { workspace = true }
@@ -30,7 +30,7 @@ ruff_source_file = { path = "../ruff_source_file", features = ["serde"] }
ruff_text_size = { path = "../ruff_text_size" }
aho-corasick = { version = "1.1.2" }
annotate-snippets = { version = "0.9.2", features = ["color"] }
annotate-snippets = { version = "0.9.1", features = ["color"] }
anyhow = { workspace = true }
bitflags = { workspace = true }
chrono = { workspace = true }
@@ -53,8 +53,8 @@ path-absolutize = { workspace = true, features = [
] }
pathdiff = { version = "0.2.1" }
pep440_rs = { version = "0.3.12", features = ["serde"] }
pyproject-toml = { version = "0.8.1" }
quick-junit = { version = "0.3.5" }
pyproject-toml = { version = "0.8.0" }
quick-junit = { version = "0.3.2" }
regex = { workspace = true }
result-like = { version = "0.4.6" }
rustc-hash = { workspace = true }
@@ -86,6 +86,3 @@ default = []
schemars = ["dep:schemars"]
# Enables the UnreachableCode rule
unreachable-code = []
[lints]
workspace = true

View File

@@ -0,0 +1,3 @@
# fixtures
Fixture files used for snapshot testing.

View File

@@ -39,18 +39,3 @@ def func():
for i in range(1110):
if True:
break
# TODO(charlie): The `pass` here does not get properly redirected to the top of the
# loop, unlike below.
def func():
for i in range(5):
pass
else:
return 1
def func():
for i in range(5):
pass
else:
return 1
x = 1

View File

@@ -129,11 +129,3 @@ def func():
print("Grass is green")
case Color.BLUE:
print("I'm feeling the blues :(")
def func(point):
match point:
case (0, 0):
print("Origin")
case foo:
raise ValueError("oops")

View File

@@ -1,184 +0,0 @@
def func():
return 1
def func():
return 1.5
def func(x: int):
if x > 0:
return 1
else:
return 1.5
def func():
return True
def func(x: int):
if x > 0:
return None
else:
return
def func(x: int):
return 1 or 2.5 if x > 0 else 1.5 or "str"
def func(x: int):
return 1 + 2.5 if x > 0 else 1.5 or "str"
def func(x: int):
if not x:
return None
return {"foo": 1}
def func(x: int):
return {"foo": 1}
def func(x: int):
if not x:
return 1
else:
return True
def func(x: int):
if not x:
return 1
else:
return None
def func(x: int):
if not x:
return 1
elif x > 5:
return "str"
else:
return None
def func(x: int):
if x:
return 1
def func():
x = 1
def func(x: int):
if x > 0:
return 1
def func(x: int):
match x:
case [1, 2, 3]:
return 1
case 4 as y:
return "foo"
def func(x: int):
for i in range(5):
if i > 0:
return 1
def func(x: int):
for i in range(5):
if i > 0:
return 1
else:
return 4
def func(x: int):
for i in range(5):
if i > 0:
break
else:
return 4
def func(x: int):
try:
pass
except:
return 1
def func(x: int):
try:
pass
except:
return 1
finally:
return 2
def func(x: int):
try:
pass
except:
return 1
else:
return 2
def func(x: int):
try:
return 1
except:
return 2
else:
pass
def func(x: int):
while x > 0:
break
return 1
import abc
from abc import abstractmethod
class Foo(abc.ABC):
@abstractmethod
def method(self):
pass
@abc.abstractmethod
def method(self):
"""Docstring."""
@abc.abstractmethod
def method(self):
...
@staticmethod
@abstractmethod
def method():
pass
@classmethod
@abstractmethod
def method(cls):
pass
@abstractmethod
def method(self):
if self.x > 0:
return 1
else:
return 1.5

View File

@@ -1,65 +0,0 @@
import sys
import tarfile
import tempfile
def unsafe_archive_handler(filename):
tar = tarfile.open(filename)
tar.extractall(path=tempfile.mkdtemp())
tar.close()
def managed_members_archive_handler(filename):
tar = tarfile.open(filename)
tar.extractall(path=tempfile.mkdtemp(), members=members_filter(tar))
tar.close()
def list_members_archive_handler(filename):
tar = tarfile.open(filename)
tar.extractall(path=tempfile.mkdtemp(), members=[])
tar.close()
def provided_members_archive_handler(filename):
tar = tarfile.open(filename)
tarfile.extractall(path=tempfile.mkdtemp(), members=tar)
tar.close()
def filter_data(filename):
tar = tarfile.open(filename)
tarfile.extractall(path=tempfile.mkdtemp(), filter="data")
tar.close()
def filter_fully_trusted(filename):
tar = tarfile.open(filename)
tarfile.extractall(path=tempfile.mkdtemp(), filter="fully_trusted")
tar.close()
def filter_tar(filename):
tar = tarfile.open(filename)
tarfile.extractall(path=tempfile.mkdtemp(), filter="tar")
tar.close()
def members_filter(tarfile):
result = []
for member in tarfile.getmembers():
if '../' in member.name:
print('Member name container directory traversal sequence')
continue
elif (member.issym() or member.islnk()) and ('../' in member.linkname):
print('Symlink to external resource')
continue
result.append(member)
return result
if __name__ == "__main__":
if len(sys.argv) > 1:
filename = sys.argv[1]
unsafe_archive_handler(filename)
managed_members_archive_handler(filename)

View File

@@ -1,13 +0,0 @@
from django.db.models.expressions import RawSQL
from django.contrib.auth.models import User
User.objects.annotate(val=RawSQL('secure', []))
User.objects.annotate(val=RawSQL('%secure' % 'nos', []))
User.objects.annotate(val=RawSQL('{}secure'.format('no'), []))
raw = '"username") AS "val" FROM "auth_user" WHERE "username"="admin" --'
User.objects.annotate(val=RawSQL(raw, []))
raw = '"username") AS "val" FROM "auth_user"' \
' WHERE "username"="admin" OR 1=%s --'
User.objects.annotate(val=RawSQL(raw, [0]))
User.objects.annotate(val=RawSQL(sql='{}secure'.format('no'), params=[]))
User.objects.annotate(val=RawSQL(params=[], sql='{}secure'.format('no')))

View File

@@ -91,26 +91,3 @@ class Registry:
def foo(self) -> None:
object.__setattr__(self, "flag", True)
from typing import Optional, Union
def func(x: Union[list, Optional[int | str | float | bool]]):
pass
def func(x: bool | str):
pass
def func(x: int | str):
pass
from typing import override
@override
def func(x: bool):
pass

View File

@@ -1,149 +0,0 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"id": "33faf7ad-a3fd-4ac4-a0c3-52e507ed49df",
"metadata": {},
"outputs": [],
"source": [
"x = 1"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "481fb4bf-c1b9-47da-927f-3cfdfe4b49ec",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"True"
]
},
"execution_count": 2,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Simple case\n",
"x == 1"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "2f0c65a5-0a0e-4080-afce-5a8ed0d706df",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"True"
]
},
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Only skip the last expression\n",
"x == 1 # B018\n",
"x == 1"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "5a3fd75d-26d9-44f7-b013-1684aabfd0ae",
"metadata": {},
"outputs": [],
"source": [
"# Nested expressions isn't relevant\n",
"if True:\n",
" x == 1"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "e00e1afa-b76c-4774-be2a-7223641579e4",
"metadata": {},
"outputs": [],
"source": [
"# Semicolons shouldn't affect the output\n",
"x == 1;"
]
},
{
"cell_type": "code",
"execution_count": 6,
"id": "05eab5b9-e2ba-4954-8ef3-b035a79573fe",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"True"
]
},
"execution_count": 6,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Semicolons with multiple expressions\n",
"x == 1; x == 1"
]
},
{
"cell_type": "code",
"execution_count": 7,
"id": "9cbbddc5-83fc-4fdb-81ab-53a3912ae898",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"True"
]
},
"execution_count": 7,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Comments, newlines and whitespace\n",
"x == 1 # comment\n",
"\n",
"# another comment"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python (ruff-playground)",
"language": "python",
"name": "ruff-playground"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.3"
}
},
"nbformat": 4,
"nbformat_minor": 5
}

View File

@@ -1,149 +0,0 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"id": "33faf7ad-a3fd-4ac4-a0c3-52e507ed49df",
"metadata": {},
"outputs": [],
"source": [
"x = 1"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "481fb4bf-c1b9-47da-927f-3cfdfe4b49ec",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1"
]
},
"execution_count": 2,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Simple case\n",
"x"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "2f0c65a5-0a0e-4080-afce-5a8ed0d706df",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1"
]
},
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Only skip the last expression\n",
"x # B018\n",
"x"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "5a3fd75d-26d9-44f7-b013-1684aabfd0ae",
"metadata": {},
"outputs": [],
"source": [
"# Nested expressions isn't relevant\n",
"if True:\n",
" x"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "e00e1afa-b76c-4774-be2a-7223641579e4",
"metadata": {},
"outputs": [],
"source": [
"# Semicolons shouldn't affect the output\n",
"x;"
]
},
{
"cell_type": "code",
"execution_count": 6,
"id": "05eab5b9-e2ba-4954-8ef3-b035a79573fe",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1"
]
},
"execution_count": 6,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Semicolons with multiple expressions\n",
"x; x"
]
},
{
"cell_type": "code",
"execution_count": 7,
"id": "9cbbddc5-83fc-4fdb-81ab-53a3912ae898",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1"
]
},
"execution_count": 7,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Comments, newlines and whitespace\n",
"x # comment\n",
"\n",
"# another comment"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python (ruff-playground)",
"language": "python",
"name": "ruff-playground"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.3"
}
},
"nbformat": 4,
"nbformat_minor": 5
}

View File

@@ -148,62 +148,3 @@ for i in range(10):
for i in range(10):
pass # comment
pass
def foo():
print("foo")
...
def foo():
"""A docstring."""
print("foo")
...
for i in range(10):
...
...
for i in range(10):
...
...
for i in range(10):
... # comment
...
for i in range(10):
...
pass
from typing import Protocol
class Repro(Protocol):
def func(self) -> str:
"""Docstring"""
...
def impl(self) -> str:
"""Docstring"""
return self.func()
import abc
class Repro:
@abc.abstractmethod
def func(self) -> str:
"""Docstring"""
...
def impl(self) -> str:
"""Docstring"""
return self.func()
def stub(self) -> str:
"""Docstring"""
...

View File

@@ -38,15 +38,3 @@ class User:
foo: bool = BooleanField()
# ...
bar = StringField() # PIE794
class Person:
name = "Foo"
name = name + " Bar"
name = "Bar" # PIE794
class Person:
name: str = "Foo"
name: str = name + " Bar"
name: str = "Bar" # PIE794

View File

@@ -64,9 +64,3 @@ class FakeEnum10(enum.Enum):
A = enum.auto()
B = enum.auto()
C = enum.auto()
class FakeEnum10(enum.Enum):
A = ...
B = ... # PIE796
C = ... # PIE796

View File

@@ -1,7 +0,0 @@
import enum
class FakeEnum1(enum.Enum):
A = ...
B = ...
C = ...

View File

@@ -1,21 +1,9 @@
{"foo": 1, **{"bar": 1}} # PIE800
{**{"bar": 10}, "a": "b"} # PIE800
foo({**foo, **{"bar": True}}) # PIE800
{**foo, **{"bar": 10}} # PIE800
{ # PIE800
"a": "b",
# Preserve
**{
# all
"bar": 10, # the
# comments
},
}
{**foo, **buzz, **{bar: 10}} # PIE800
{**foo, "bar": True } # OK

View File

@@ -10,6 +10,7 @@ Foo.objects.create(**{**bar}) # PIE804
foo(**{})
foo(**{**data, "foo": "buzz"})
foo(**buzz)
foo(**{"bar-foo": True})
@@ -19,5 +20,3 @@ foo(**{buzz: True})
foo(**{"": True})
foo(**{f"buzz__{bar}": True})
abc(**{"for": 3})
foo(**{},)

View File

@@ -1,37 +1,27 @@
@dataclass
class Foo:
foo: List[str] = field(default_factory=lambda: []) # PIE807
bar: Dict[str, int] = field(default_factory=lambda: {}) # PIE807
class FooTable(BaseTable):
foo = fields.ListField(default=lambda: []) # PIE807
bar = fields.ListField(default=lambda: {}) # PIE807
bar = fields.ListField(default=lambda: []) # PIE807
class FooTable(BaseTable):
foo = fields.ListField(lambda: []) # PIE807
bar = fields.ListField(default=lambda: {}) # PIE807
bar = fields.ListField(lambda: []) # PIE807
@dataclass
class Foo:
foo: List[str] = field(default_factory=list)
bar: Dict[str, int] = field(default_factory=dict)
class FooTable(BaseTable):
foo = fields.ListField(list)
bar = fields.ListField(dict)
bar = fields.ListField(list)
lambda *args, **kwargs: []
lambda *args, **kwargs: {}
lambda *args: []
lambda *args: {}
lambda **kwargs: []
lambda **kwargs: {}
lambda: {**unwrap}

View File

@@ -36,54 +36,3 @@ field10: (Literal[1] | str) | Literal[2] # Error
# Should emit for union in generic parent type.
field11: dict[Literal[1] | Literal[2], str] # Error
# Should emit for unions with more than two cases
field12: Literal[1] | Literal[2] | Literal[3] # Error
field13: Literal[1] | Literal[2] | Literal[3] | Literal[4] # Error
# Should emit for unions with more than two cases, even if not directly adjacent
field14: Literal[1] | Literal[2] | str | Literal[3] # Error
# Should emit for unions with mixed literal internal types
field15: Literal[1] | Literal["foo"] | Literal[True] # Error
# Shouldn't emit for duplicate field types with same value; covered by Y016
field16: Literal[1] | Literal[1] # OK
# Shouldn't emit if in new parent type
field17: Literal[1] | dict[Literal[2], str] # OK
# Shouldn't emit if not in a union parent
field18: dict[Literal[1], Literal[2]] # OK
# Should respect name of literal type used
field19: typing.Literal[1] | typing.Literal[2] # Error
# Should emit in cases with newlines
field20: typing.Union[
Literal[
1 # test
],
Literal[2],
] # Error, newline and comment will not be emitted in message
# Should handle multiple unions with multiple members
field21: Literal[1, 2] | Literal[3, 4] # Error
# Should emit in cases with `typing.Union` instead of `|`
field22: typing.Union[Literal[1], Literal[2]] # Error
# Should emit in cases with `typing_extensions.Literal`
field23: typing_extensions.Literal[1] | typing_extensions.Literal[2] # Error
# Should emit in cases with nested `typing.Union`
field24: typing.Union[Literal[1], typing.Union[Literal[2], str]] # Error
# Should emit in cases with mixed `typing.Union` and `|`
field25: typing.Union[Literal[1], Literal[2] | str] # Error
# Should emit only once in cases with multiple nested `typing.Union`
field24: typing.Union[Literal[1], typing.Union[Literal[2], typing.Union[Literal[3], Literal[4]]]] # Error
# Should use the first literal subscript attribute when fixing
field25: typing.Union[typing_extensions.Literal[1], typing.Union[Literal[2], typing.Union[Literal[3], Literal[4]]], str] # Error

View File

@@ -84,6 +84,3 @@ field25: typing.Union[Literal[1], Literal[2] | str] # Error
# Should emit only once in cases with multiple nested `typing.Union`
field24: typing.Union[Literal[1], typing.Union[Literal[2], typing.Union[Literal[3], Literal[4]]]] # Error
# Should use the first literal subscript attribute when fixing
field25: typing.Union[typing_extensions.Literal[1], typing.Union[Literal[2], typing.Union[Literal[3], Literal[4]]], str] # Error

View File

@@ -3,11 +3,9 @@
import abc
import builtins
import collections.abc
import enum
import typing
from abc import ABCMeta, abstractmethod
from abc import abstractmethod
from collections.abc import AsyncIterable, AsyncIterator, Iterable, Iterator
from enum import EnumMeta
from typing import Any, overload
import typing_extensions
@@ -201,31 +199,6 @@ class AsyncIteratorReturningAsyncIterable:
... # Y045 "__aiter__" methods should return an AsyncIterator, not an AsyncIterable
class MetaclassInWhichSelfCannotBeUsed(type):
def __new__(cls) -> MetaclassInWhichSelfCannotBeUsed: ...
def __enter__(self) -> MetaclassInWhichSelfCannotBeUsed: ...
async def __aenter__(self) -> MetaclassInWhichSelfCannotBeUsed: ...
def __isub__(self, other: MetaclassInWhichSelfCannotBeUsed) -> MetaclassInWhichSelfCannotBeUsed: ...
class MetaclassInWhichSelfCannotBeUsed2(EnumMeta):
def __new__(cls) -> MetaclassInWhichSelfCannotBeUsed2: ...
def __enter__(self) -> MetaclassInWhichSelfCannotBeUsed2: ...
async def __aenter__(self) -> MetaclassInWhichSelfCannotBeUsed2: ...
def __isub__(self, other: MetaclassInWhichSelfCannotBeUsed2) -> MetaclassInWhichSelfCannotBeUsed2: ...
class MetaclassInWhichSelfCannotBeUsed3(enum.EnumType):
def __new__(cls) -> MetaclassInWhichSelfCannotBeUsed3: ...
def __enter__(self) -> MetaclassInWhichSelfCannotBeUsed3: ...
async def __aenter__(self) -> MetaclassInWhichSelfCannotBeUsed3: ...
def __isub__(self, other: MetaclassInWhichSelfCannotBeUsed3) -> MetaclassInWhichSelfCannotBeUsed3: ...
class MetaclassInWhichSelfCannotBeUsed4(ABCMeta):
def __new__(cls) -> MetaclassInWhichSelfCannotBeUsed4: ...
def __enter__(self) -> MetaclassInWhichSelfCannotBeUsed4: ...
async def __aenter__(self) -> MetaclassInWhichSelfCannotBeUsed4: ...
def __isub__(self, other: MetaclassInWhichSelfCannotBeUsed4) -> MetaclassInWhichSelfCannotBeUsed4: ...
class Abstract(Iterator[str]):
@abstractmethod
def __iter__(self) -> Iterator[str]:

View File

@@ -3,11 +3,9 @@
import abc
import builtins
import collections.abc
import enum
import typing
from abc import ABCMeta, abstractmethod
from abc import abstractmethod
from collections.abc import AsyncIterable, AsyncIterator, Iterable, Iterator
from enum import EnumMeta
from typing import Any, overload
import typing_extensions
@@ -154,30 +152,6 @@ class AsyncIteratorReturningAsyncIterable:
str
]: ... # Y045 "__aiter__" methods should return an AsyncIterator, not an AsyncIterable
class MetaclassInWhichSelfCannotBeUsed(type):
def __new__(cls) -> MetaclassInWhichSelfCannotBeUsed: ...
def __enter__(self) -> MetaclassInWhichSelfCannotBeUsed: ...
async def __aenter__(self) -> MetaclassInWhichSelfCannotBeUsed: ...
def __isub__(self, other: MetaclassInWhichSelfCannotBeUsed) -> MetaclassInWhichSelfCannotBeUsed: ...
class MetaclassInWhichSelfCannotBeUsed2(EnumMeta):
def __new__(cls) -> MetaclassInWhichSelfCannotBeUsed2: ...
def __enter__(self) -> MetaclassInWhichSelfCannotBeUsed2: ...
async def __aenter__(self) -> MetaclassInWhichSelfCannotBeUsed2: ...
def __isub__(self, other: MetaclassInWhichSelfCannotBeUsed2) -> MetaclassInWhichSelfCannotBeUsed2: ...
class MetaclassInWhichSelfCannotBeUsed3(enum.EnumType):
def __new__(cls) -> MetaclassInWhichSelfCannotBeUsed3: ...
def __enter__(self) -> MetaclassInWhichSelfCannotBeUsed3: ...
async def __aenter__(self) -> MetaclassInWhichSelfCannotBeUsed3: ...
def __isub__(self, other: MetaclassInWhichSelfCannotBeUsed3) -> MetaclassInWhichSelfCannotBeUsed3: ...
class MetaclassInWhichSelfCannotBeUsed4(ABCMeta):
def __new__(cls) -> MetaclassInWhichSelfCannotBeUsed4: ...
def __enter__(self) -> MetaclassInWhichSelfCannotBeUsed4: ...
async def __aenter__(self) -> MetaclassInWhichSelfCannotBeUsed4: ...
def __isub__(self, other: MetaclassInWhichSelfCannotBeUsed4) -> MetaclassInWhichSelfCannotBeUsed4: ...
class Abstract(Iterator[str]):
@abstractmethod
def __iter__(self) -> Iterator[str]: ...

View File

@@ -22,7 +22,3 @@ Snake_case_alias: TypeAlias = int | float # PYI042, since not camel case
# check that this edge case doesn't crash
_: TypeAlias = str | int
# PEP 695
type foo_bar = int | str
type FooBar = int | str

View File

@@ -22,7 +22,3 @@ Snake_case_alias: TypeAlias = int | float # PYI042, since not camel case
# check that this edge case doesn't crash
_: TypeAlias = str | int
# PEP 695
type foo_bar = int | str
type FooBar = int | str

View File

@@ -21,7 +21,3 @@ _PrivateAliasS2: TypeAlias = Annotated[str, "also okay"]
# check that this edge case doesn't crash
_: TypeAlias = str | int
# PEP 695
type _FooT = str | int
type Foo = str | int

View File

@@ -21,7 +21,3 @@ _PrivateAliasS2: TypeAlias = Annotated[str, "also okay"]
# check that this edge case doesn't crash
_: TypeAlias = str | int
# PEP 695
type _FooT = str | int
type Foo = str | int

View File

@@ -10,14 +10,3 @@ def foo_no_return_typing_extensions(
def foo_no_return_kwarg(arg: int, *, arg2: NoReturn): ... # Error: PYI050
def foo_no_return_pos_only(arg: int, /, arg2: NoReturn): ... # Error: PYI050
def foo_never(arg: Never): ...
def foo_args(*args: NoReturn): ... # Error: PYI050
def foo_kwargs(**kwargs: NoReturn): ... # Error: PYI050
def foo_args_kwargs(*args: NoReturn, **kwargs: NoReturn): ... # Error: PYI050
def foo_int_args(*args: int): ...
def foo_int_kwargs(**kwargs: int): ...
def foo_int_args_kwargs(*args: int, **kwargs: int): ...
def foo_int_args_no_return(*args: int, **kwargs: NoReturn): ... # Error: PYI050
def foo_int_kwargs_no_return(*args: NoReturn, **kwargs: int): ... # Error: PYI050
def foo_args_never(*args: Never): ...
def foo_kwargs_never(**kwargs: Never): ...
def foo_args_kwargs_never(*args: Never, **kwargs: Never): ...

View File

@@ -91,17 +91,3 @@ field27 = list[str]
field28 = builtins.str
field29 = str
field30 = str | bytes | None
# We shouldn't emit Y052 for `enum` subclasses.
from enum import Enum
class Foo(Enum):
FOO = 0
BAR = 1
class Bar(Foo):
BAZ = 2
BOP = 3
class Bop:
WIZ = 4

View File

@@ -98,17 +98,3 @@ field27 = list[str]
field28 = builtins.str
field29 = str
field30 = str | bytes | None
# We shouldn't emit Y052 for `enum` subclasses.
from enum import Enum
class Foo(Enum):
FOO = 0
BAR = 1
class Bar(Foo):
BAZ = 2
BOP = 3
class Bop:
WIZ = 4

View File

@@ -1,45 +0,0 @@
this_should_raise_Q004 = 'This is a \"string\"'
this_should_raise_Q004 = 'This is \\ a \\\"string\"'
this_is_fine = '"This" is a \"string\"'
this_is_fine = "This is a 'string'"
this_is_fine = "\"This\" is a 'string'"
this_is_fine = r'This is a \"string\"'
this_is_fine = R'This is a \"string\"'
this_should_raise_Q004 = (
'This is a'
'\"string\"'
)
# Same as above, but with f-strings
f'This is a \"string\"' # Q004
f'This is \\ a \\\"string\"' # Q004
f'"This" is a \"string\"'
f"This is a 'string'"
f"\"This\" is a 'string'"
fr'This is a \"string\"'
fR'This is a \"string\"'
this_should_raise_Q004 = (
f'This is a'
f'\"string\"' # Q004
)
# Nested f-strings (Python 3.12+)
#
# The first one is interesting because the fix for it is valid pre 3.12:
#
# f"'foo' {'nested'}"
#
# but as the actual string itself is invalid pre 3.12, we don't catch it.
f'\"foo\" {'nested'}' # Q004
f'\"foo\" {f'nested'}' # Q004
f'\"foo\" {f'\"nested\"'} \"\"' # Q004
f'normal {f'nested'} normal'
f'\"normal\" {f'nested'} normal' # Q004
f'\"normal\" {f'nested'} "double quotes"'
f'\"normal\" {f'\"nested\" {'other'} normal'} "double quotes"' # Q004
f'\"normal\" {f'\"nested\" {'other'} "double quotes"'} normal' # Q004
# Make sure we do not unescape quotes
this_is_fine = 'This is an \\"escaped\\" quote'
this_should_raise_Q004 = 'This is an \\\"escaped\\\" quote with an extra backslash'

View File

@@ -1,43 +0,0 @@
this_should_raise_Q004 = "This is a \'string\'"
this_should_raise_Q004 = "'This' is a \'string\'"
this_is_fine = 'This is a "string"'
this_is_fine = '\'This\' is a "string"'
this_is_fine = r"This is a \'string\'"
this_is_fine = R"This is a \'string\'"
this_should_raise_Q004 = (
"This is a"
"\'string\'"
)
# Same as above, but with f-strings
f"This is a \'string\'" # Q004
f"'This' is a \'string\'" # Q004
f'This is a "string"'
f'\'This\' is a "string"'
fr"This is a \'string\'"
fR"This is a \'string\'"
this_should_raise_Q004 = (
f"This is a"
f"\'string\'" # Q004
)
# Nested f-strings (Python 3.12+)
#
# The first one is interesting because the fix for it is valid pre 3.12:
#
# f'"foo" {"nested"}'
#
# but as the actual string itself is invalid pre 3.12, we don't catch it.
f"\'foo\' {"foo"}" # Q004
f"\'foo\' {f"foo"}" # Q004
f"\'foo\' {f"\'foo\'"} \'\'" # Q004
f"normal {f"nested"} normal"
f"\'normal\' {f"nested"} normal" # Q004
f"\'normal\' {f"nested"} 'single quotes'"
f"\'normal\' {f"\'nested\' {"other"} normal"} 'single quotes'" # Q004
f"\'normal\' {f"\'nested\' {"other"} 'single quotes'"} normal" # Q004
# Make sure we do not unescape quotes
this_is_fine = "This is an \\'escaped\\' quote"
this_should_raise_Q004 = "This is an \\\'escaped\\\' quote with an extra backslash"

View File

@@ -82,14 +82,3 @@ raise IndexError();
# RSE102
raise Foo()
# OK
raise ctypes.WinError()
def func():
pass
# OK
raise func()

View File

@@ -134,32 +134,3 @@ with A() as a:
f" something { my_dict["key"] } something else "
f"foo {f"bar {x}"} baz"
# Allow cascading for some statements.
import anyio
import asyncio
import trio
async with asyncio.timeout(1):
async with A() as a:
pass
async with A():
async with asyncio.timeout(1):
pass
async with asyncio.timeout(1):
async with asyncio.timeout_at(1):
async with anyio.CancelScope():
async with anyio.fail_after(1):
async with anyio.move_on_after(1):
async with trio.fail_after(1):
async with trio.fail_at(1):
async with trio.move_on_after(1):
async with trio.move_on_at(1):
pass
# Do not supress combination, if a context manager is already combined with another.
async with asyncio.timeout(1), A():
async with B():
pass

View File

@@ -25,11 +25,3 @@ a = {}.get(key, None)
# SIM910
({}).get(key, None)
# SIM910
ages = {"Tom": 23, "Maria": 23, "Dog": 11}
age = ages.get("Cat", None)
# OK
ages = ["Tom", "Maria", "Dog"]
age = ages.get("Cat", None)

View File

@@ -1,7 +1,8 @@
async def func():
import trio
from trio import sleep
import trio
from trio import sleep
async def func():
await trio.sleep(0) # TRIO115
await trio.sleep(1) # OK
await trio.sleep(0, 1) # OK
@@ -20,16 +21,8 @@ async def func():
trio.sleep(bar)
trio.sleep(0) # TRIO115
def func():
trio.run(trio.sleep(0)) # TRIO115
from trio import Event, sleep
def func():
sleep(0) # TRIO115
async def func():
await sleep(seconds=0) # TRIO115

View File

@@ -1,12 +1,11 @@
from __future__ import annotations
from pandas import DataFrame
from pydantic import BaseModel
from collections.abc import Sequence # TCH003
class Parent(BaseModel):
...
class MyBaseClass:
pass
class Child(Parent):
baz: DataFrame
class Foo(MyBaseClass):
foo: Sequence

View File

@@ -1,34 +0,0 @@
"""Test module."""
from __future__ import annotations
from functools import singledispatch
from typing import TYPE_CHECKING
from numpy import asarray
from numpy.typing import ArrayLike
from scipy.sparse import spmatrix
from pandas import DataFrame
if TYPE_CHECKING:
from numpy import ndarray
@singledispatch
def to_array_or_mat(a: ArrayLike | spmatrix) -> ndarray | spmatrix:
"""Convert arg to array or leaves it as sparse matrix."""
msg = f"Unhandled type {type(a)}"
raise NotImplementedError(msg)
@to_array_or_mat.register
def _(a: ArrayLike) -> ndarray:
return asarray(a)
@to_array_or_mat.register
def _(a: spmatrix) -> spmatrix:
return a
def _(a: DataFrame) -> DataFrame:
return a

View File

@@ -1,2 +0,0 @@
import __future__
from __future__ import annotations

View File

@@ -1,5 +0,0 @@
import encodings
from datetime import timezone as tz
from datetime import timedelta
import datetime as dt
import datetime

View File

@@ -1,5 +0,0 @@
from __future__ import blah
from os import path
import os

View File

@@ -1,2 +0,0 @@
import __future__
from __future__ import annotations

View File

@@ -1,3 +0,0 @@
from mediuuuuuuuuuuum import a
from short import b
from loooooooooooooooooooooog import c

View File

@@ -1,11 +0,0 @@
from module1 import (
loooooooooooooong,
σηορτ,
mediuuuuum,
shoort,
looooooooooooooong,
μεδιυυυυυμ,
short,
mediuuuuuum,
λοοοοοοοοοοοοοονγ,
)

View File

@@ -1,9 +0,0 @@
import loooooooooooooong
import mediuuuuuum
import short
import σηορτ
import shoort
import mediuuuuum
import λοοοοοοοοοοοοοονγ
import μεδιυυυυυμ
import looooooooooooooong

View File

@@ -1,6 +0,0 @@
import mediuuuuuum
import short
import looooooooooooooooong
from looooooooooooooong import a
from mediuuuum import c
from short import b

View File

@@ -1,4 +0,0 @@
import mediuuuuuumb
import short
import looooooooooooooooong
import mediuuuuuuma

View File

@@ -1,7 +0,0 @@
from ..looooooooooooooong import a
from ...mediuuuum import b
from .short import c
from ....short import c
from . import d
from .mediuuuum import a
from ......short import b

View File

@@ -1,3 +0,0 @@
from looooooooooooooong import a
from mediuuuum import *
from short import *

View File

@@ -1,11 +0,0 @@
import os
import __main__
import third_party
import first_party
os.a
third_party.a
__main__.a
first_party.a

View File

@@ -1,8 +0,0 @@
from __future__ import annotations
import django.settings
import os
import pytz
import sys
from . import local
from library import foo

View File

@@ -1,6 +1,6 @@
import collections
from collections import namedtuple
from typing import Type, TypeAlias, TypeVar, NewType, NamedTuple, TypedDict
from typing import TypeAlias, TypeVar, NewType, NamedTuple, TypedDict
GLOBAL: str = "foo"
@@ -25,8 +25,6 @@ def assign():
IntOrStr: TypeAlias = int | str
type MyInt = int
def aug_assign(rank, world_size):
global CURRENT_PORT
@@ -40,18 +38,3 @@ def loop_assign():
global CURRENT_PORT
for CURRENT_PORT in range(5):
pass
def model_assign() -> None:
Bad = apps.get_model("zerver", "Stream") # N806
Attachment = apps.get_model("zerver", "Attachment") # OK
Recipient = apps.get_model("zerver", model_name="Recipient") # OK
Address: Type = apps.get_model("zerver", "Address") # OK
from django.utils.module_loading import import_string
Bad = import_string("django.core.exceptions.ValidationError") # N806
ValidationError = import_string("django.core.exceptions.ValidationError") # OK
Bad = apps.get_model() # N806
Bad = apps.get_model(model_name="Stream") # N806

View File

@@ -50,16 +50,3 @@ import itertools
for i in itertools.product(foo_int): # Ok
pass
for i in list(foo_list): # Ok
foo_list.append(i + 1)
for i in list(foo_list): # PERF101
# Make sure we match the correct list
other_list.append(i + 1)
for i in list(foo_tuple): # Ok
foo_tuple.append(i + 1)
for i in list(foo_set): # Ok
foo_set.append(i + 1)

View File

@@ -89,61 +89,3 @@ x = [ #
f"{ {'a': 1} }"
f"{[ { {'a': 1} } ]}"
f"normal { {f"{ { [1, 2] } }" } } normal"
#: Okay
ham[lower + offset : upper + offset]
#: Okay
ham[(lower + offset) : upper + offset]
#: E203:1:19
ham{lower + offset : upper + offset}
#: E203:1:19
ham[lower + offset : upper + offset]
#: Okay
release_lines = history_file_lines[history_file_lines.index('## Unreleased') + 1: -1]
#: Okay
release_lines = history_file_lines[history_file_lines.index('## Unreleased') + 1 : -1]
#: Okay
ham[1:9], ham[1:9:3], ham[:9:3], ham[1::3], ham[1:9:]
ham[lower:upper], ham[lower:upper:], ham[lower::step]
ham[lower+offset : upper+offset]
ham[: upper_fn(x) : step_fn(x)], ham[:: step_fn(x)]
ham[lower + offset : upper + offset]
#: E201:1:5
ham[ : upper]
#: Okay
ham[lower + offset :: upper + offset]
#: Okay
ham[(lower + offset) :: upper + offset]
#: Okay
ham[lower + offset::upper + offset]
#: E203:1:21
ham[lower + offset : : upper + offset]
#: E203:1:20
ham[lower + offset: :upper + offset]
#: E203:1:20
ham[{lower + offset : upper + offset} : upper + offset]
#: Okay
ham[upper:]
#: Okay
ham[upper :]
#: E202:1:12
ham[upper : ]
#: E203:1:10
ham[upper :]

Some files were not shown because too many files have changed in this diff Show More