Compare commits

..

1 Commits

Author SHA1 Message Date
Zanie
17e77fe515 Change the Cargo.toml file 2023-12-06 22:13:44 -06:00
688 changed files with 10953 additions and 37301 deletions

View File

@@ -5,10 +5,6 @@ updates:
schedule:
interval: "weekly"
labels: ["internal"]
groups:
actions:
patterns:
- "*"
- package-ecosystem: "cargo"
directory: "/"

View File

@@ -95,9 +95,9 @@ jobs:
rustup target add wasm32-unknown-unknown
- uses: Swatinem/rust-cache@v2
- name: "Clippy"
run: cargo clippy --workspace --all-targets --all-features --locked -- -D warnings
run: cargo clippy --workspace --all-targets --all-features -- -D warnings
- name: "Clippy (wasm)"
run: cargo clippy -p ruff_wasm --target wasm32-unknown-unknown --all-features --locked -- -D warnings
run: cargo clippy -p ruff_wasm --target wasm32-unknown-unknown --all-features -- -D warnings
cargo-test-linux:
runs-on: ubuntu-latest
@@ -180,7 +180,7 @@ jobs:
- name: "Install cargo-fuzz"
uses: taiki-e/install-action@v2
with:
tool: cargo-fuzz@0.11.2
tool: cargo-fuzz@0.11
- run: cargo fuzz build -s none
scripts:
@@ -215,7 +215,7 @@ jobs:
}}
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
@@ -226,7 +226,7 @@ jobs:
name: ruff
path: target/debug
- uses: dawidd6/action-download-artifact@v3
- uses: dawidd6/action-download-artifact@v2
name: Download baseline Ruff binary
with:
name: ruff
@@ -338,7 +338,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
@@ -362,7 +362,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Install Rust toolchain"
@@ -392,7 +392,7 @@ jobs:
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
- name: "Add SSH key"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
uses: webfactory/ssh-agent@v0.8.0
@@ -455,7 +455,7 @@ jobs:
with:
repository: "astral-sh/ruff-lsp"
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}

View File

@@ -20,7 +20,7 @@ jobs:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.ref }}
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
- name: "Add SSH key"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
uses: webfactory/ssh-agent@v0.8.0

247
.github/workflows/flake8-to-ruff.yaml vendored Normal file
View File

@@ -0,0 +1,247 @@
name: "[flake8-to-ruff] Release"
on: workflow_dispatch
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
PACKAGE_NAME: flake8-to-ruff
CRATE_NAME: flake8_to_ruff
PYTHON_VERSION: "3.11"
CARGO_INCREMENTAL: 0
CARGO_NET_RETRY: 10
CARGO_TERM_COLOR: always
RUSTUP_MAX_RETRIES: 10
jobs:
macos-x86_64:
runs-on: macos-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
- name: "Install Rust toolchain"
run: rustup show
- name: "Build wheels - x86_64"
uses: PyO3/maturin-action@v1
with:
target: x86_64
args: --release --out dist --sdist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- name: "Install built wheel - x86_64"
run: |
pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
macos-universal:
runs-on: macos-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
- name: "Install Rust toolchain"
run: rustup show
- name: "Build wheels - universal2"
uses: PyO3/maturin-action@v1
with:
args: --release --target universal2-apple-darwin --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- name: "Install built wheel - universal2"
run: |
pip install dist/${{ env.CRATE_NAME }}-*universal2.whl --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
windows:
runs-on: windows-latest
strategy:
matrix:
target: [x64, x86]
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: ${{ matrix.target }}
- name: "Install Rust toolchain"
run: rustup show
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.target }}
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- name: "Install built wheel"
shell: bash
run: |
python -m pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
linux:
runs-on: ubuntu-latest
strategy:
matrix:
target: [x86_64, i686]
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.target }}
manylinux: auto
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- name: "Install built wheel"
if: matrix.target == 'x86_64'
run: |
pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
linux-cross:
runs-on: ubuntu-latest
strategy:
matrix:
target: [aarch64, armv7, s390x, ppc64le, ppc64]
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.target }}
manylinux: auto
args: --no-default-features --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- uses: uraimo/run-on-arch-action@v2
if: matrix.target != 'ppc64'
name: Install built wheel
with:
arch: ${{ matrix.target }}
distro: ubuntu20.04
githubToken: ${{ github.token }}
install: |
apt-get update
apt-get install -y --no-install-recommends python3 python3-pip
pip3 install -U pip
run: |
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
musllinux:
runs-on: ubuntu-latest
strategy:
matrix:
target:
- x86_64-unknown-linux-musl
- i686-unknown-linux-musl
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.target }}
manylinux: musllinux_1_2
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- name: "Install built wheel"
if: matrix.target == 'x86_64-unknown-linux-musl'
uses: addnab/docker-run-action@v3
with:
image: alpine:latest
options: -v ${{ github.workspace }}:/io -w /io
run: |
apk add py3-pip
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links /io/dist/ --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
musllinux-cross:
runs-on: ubuntu-latest
strategy:
matrix:
platform:
- target: aarch64-unknown-linux-musl
arch: aarch64
- target: armv7-unknown-linux-musleabihf
arch: armv7
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.platform.target }}
manylinux: musllinux_1_2
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- uses: uraimo/run-on-arch-action@v2
name: Install built wheel
with:
arch: ${{ matrix.platform.arch }}
distro: alpine_latest
githubToken: ${{ github.token }}
install: |
apk add py3-pip
run: |
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
release:
name: Release
runs-on: ubuntu-latest
needs:
- macos-universal
- macos-x86_64
- windows
- linux
- linux-cross
- musllinux
- musllinux-cross
steps:
- uses: actions/download-artifact@v3
with:
name: wheels
- uses: actions/setup-python@v4
- name: "Publish to PyPi"
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.FLAKE8_TO_RUFF_TOKEN }}
run: |
pip install --upgrade twine
twine upload --skip-existing *

View File

@@ -17,7 +17,7 @@ jobs:
comment:
runs-on: ubuntu-latest
steps:
- uses: dawidd6/action-download-artifact@v3
- uses: dawidd6/action-download-artifact@v2
name: Download pull request number
with:
name: pr-number
@@ -32,7 +32,7 @@ jobs:
echo "pr-number=$(<pr-number)" >> $GITHUB_OUTPUT
fi
- uses: dawidd6/action-download-artifact@v3
- uses: dawidd6/action-download-artifact@v2
name: "Download ecosystem results"
id: download-ecosystem-result
if: steps.pr-number.outputs.pr-number

View File

@@ -36,7 +36,7 @@ jobs:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.sha }}
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Prep README.md"
@@ -63,7 +63,7 @@ jobs:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.sha }}
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
@@ -73,7 +73,7 @@ jobs:
uses: PyO3/maturin-action@v1
with:
target: x86_64
args: --release --locked --out dist
args: --release --out dist
- name: "Test wheel - x86_64"
run: |
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
@@ -103,7 +103,7 @@ jobs:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.sha }}
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
@@ -112,7 +112,7 @@ jobs:
- name: "Build wheels - universal2"
uses: PyO3/maturin-action@v1
with:
args: --release --locked --target universal2-apple-darwin --out dist
args: --release --target universal2-apple-darwin --out dist
- name: "Test wheel - universal2"
run: |
pip install dist/${{ env.PACKAGE_NAME }}-*universal2.whl --force-reinstall
@@ -151,7 +151,7 @@ jobs:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.sha }}
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: ${{ matrix.platform.arch }}
@@ -161,7 +161,7 @@ jobs:
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.platform.target }}
args: --release --locked --out dist
args: --release --out dist
- name: "Test wheel"
if: ${{ !startsWith(matrix.platform.target, 'aarch64') }}
shell: bash
@@ -199,7 +199,7 @@ jobs:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.sha }}
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
@@ -210,7 +210,7 @@ jobs:
with:
target: ${{ matrix.target }}
manylinux: auto
args: --release --locked --out dist
args: --release --out dist
- name: "Test wheel"
if: ${{ startsWith(matrix.target, 'x86_64') }}
run: |
@@ -258,7 +258,7 @@ jobs:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.sha }}
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Prep README.md"
@@ -269,7 +269,7 @@ jobs:
target: ${{ matrix.platform.target }}
manylinux: auto
docker-options: ${{ matrix.platform.maturin_docker_options }}
args: --release --locked --out dist
args: --release --out dist
- uses: uraimo/run-on-arch-action@v2
if: matrix.platform.arch != 'ppc64'
name: Test wheel
@@ -313,7 +313,7 @@ jobs:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.sha }}
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
@@ -324,7 +324,7 @@ jobs:
with:
target: ${{ matrix.target }}
manylinux: musllinux_1_2
args: --release --locked --out dist
args: --release --out dist
- name: "Test wheel"
if: matrix.target == 'x86_64-unknown-linux-musl'
uses: addnab/docker-run-action@v3
@@ -332,10 +332,10 @@ jobs:
image: alpine:latest
options: -v ${{ github.workspace }}:/io -w /io
run: |
apk add python3
python -m venv .venv
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
.venv/bin/ruff check --help
apk add py3-pip
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links /io/dist/ --force-reinstall
ruff --help
python -m ruff --help
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
@@ -369,7 +369,7 @@ jobs:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.sha }}
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Prep README.md"
@@ -379,7 +379,7 @@ jobs:
with:
target: ${{ matrix.platform.target }}
manylinux: musllinux_1_2
args: --release --locked --out dist
args: --release --out dist
docker-options: ${{ matrix.platform.maturin_docker_options }}
- uses: uraimo/run-on-arch-action@v2
name: Test wheel
@@ -388,11 +388,10 @@ jobs:
distro: alpine_latest
githubToken: ${{ github.token }}
install: |
apk add python3
apk add py3-pip
run: |
python -m venv .venv
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
.venv/bin/ruff check --help
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
ruff check --help
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:

View File

@@ -1,42 +1,5 @@
# Breaking Changes
## 0.1.9
### `site-packages` is now excluded by default ([#5513](https://github.com/astral-sh/ruff/pull/5513))
Ruff maintains a list of default exclusions, which now consists of the following patterns:
- `.bzr`
- `.direnv`
- `.eggs`
- `.git-rewrite`
- `.git`
- `.hg`
- `.ipynb_checkpoints`
- `.mypy_cache`
- `.nox`
- `.pants.d`
- `.pyenv`
- `.pytest_cache`
- `.pytype`
- `.ruff_cache`
- `.svn`
- `.tox`
- `.venv`
- `.vscode`
- `__pypackages__`
- `_build`
- `buck-out`
- `build`
- `dist`
- `node_modules`
- `site-packages`
- `venv`
Previously, the `site-packages` directory was not excluded by default. While `site-packages` tends
to be excluded anyway by virtue of the `.venv` exclusion, this may not be the case when using Ruff
from VS Code outside a virtual environment.
## 0.1.0
### The deprecated `format` setting has been removed

View File

@@ -1,185 +1,5 @@
# Changelog
## 0.1.11
### Preview features
- \[`pylint`\] Implement `super-without-brackets` (`W0245`) ([#9257](https://github.com/astral-sh/ruff/pull/9257))
### Bug fixes
- Check path string properly in `python -m ruff` invocations ([#9367](https://github.com/astral-sh/ruff/pull/9367))
### Documentation
- Tweak `relative-imports` message ([#9365](https://github.com/astral-sh/ruff/pull/9365))
- Add fix safety note for `yield-in-for-loop` ([#9364](https://github.com/astral-sh/ruff/pull/9364))
## 0.1.10
### Preview features
- Improve `dummy_implementations` preview style formatting ([#9240](https://github.com/astral-sh/ruff/pull/9240))
- Normalise Hex and unicode escape sequences in strings ([#9280](https://github.com/astral-sh/ruff/pull/9280))
- Parenthesize long type annotations in annotated assignments ([#9210](https://github.com/astral-sh/ruff/pull/9210))
- Parenthesize multi-context managers in `with` statements ([#9222](https://github.com/astral-sh/ruff/pull/9222))
- \[`flake8-pyi`\] Implement `generator-return-from-iter-method` (`PYI058`) ([#9313](https://github.com/astral-sh/ruff/pull/9313))
- \[`pylint`\] Implement `empty-comment` (`PLR2044`) ([#9174](https://github.com/astral-sh/ruff/pull/9174))
- \[`refurb`\] Implement `bit-count` (`FURB161`) ([#9265](https://github.com/astral-sh/ruff/pull/9265))
- \[`ruff`\] Add `never-union` rule to detect redundant `typing.NoReturn` and `typing.Never` ([#9217](https://github.com/astral-sh/ruff/pull/9217))
### CLI
- Add paths to TOML parse errors ([#9358](https://github.com/astral-sh/ruff/pull/9358))
- Add row and column numbers to formatter parse errors ([#9321](https://github.com/astral-sh/ruff/pull/9321))
- Improve responsiveness when invoked via Python ([#9315](https://github.com/astral-sh/ruff/pull/9315))
- Short rule messages should not end with a period ([#9345](https://github.com/astral-sh/ruff/pull/9345))
### Configuration
- Respect runtime-required decorators on functions ([#9317](https://github.com/astral-sh/ruff/pull/9317))
### Bug fixes
- Avoid `asyncio-dangling-task` for nonlocal and global bindings ([#9263](https://github.com/astral-sh/ruff/pull/9263))
- Escape trailing placeholders in rule documentation ([#9301](https://github.com/astral-sh/ruff/pull/9301))
- Fix continuation detection following multi-line strings ([#9332](https://github.com/astral-sh/ruff/pull/9332))
- Fix scoping for generators in named expressions in classes ([#9248](https://github.com/astral-sh/ruff/pull/9248))
- Port from obsolete wsl crate to is-wsl ([#9356](https://github.com/astral-sh/ruff/pull/9356))
- Remove special pre-visit for module docstrings ([#9261](https://github.com/astral-sh/ruff/pull/9261))
- Respect `__str__` definitions from super classes ([#9338](https://github.com/astral-sh/ruff/pull/9338))
- Respect `unused-noqa` via `per-file-ignores` ([#9300](https://github.com/astral-sh/ruff/pull/9300))
- Respect attribute chains when resolving builtin call paths ([#9309](https://github.com/astral-sh/ruff/pull/9309))
- Treat all `typing_extensions` members as typing aliases ([#9335](https://github.com/astral-sh/ruff/pull/9335))
- Use `Display` for formatter parse errors ([#9316](https://github.com/astral-sh/ruff/pull/9316))
- Wrap subscripted dicts in parens for f-string conversion ([#9238](https://github.com/astral-sh/ruff/pull/9238))
- \[`flake8-annotations`\] Avoid adding return types to stub methods ([#9277](https://github.com/astral-sh/ruff/pull/9277))
- \[`flake8-annotations`\] Respect mixed `return` and `raise` cases in return-type analysis ([#9310](https://github.com/astral-sh/ruff/pull/9310))
- \[`flake8-bandit`\] Don't report violations when `SafeLoader` is imported from `yaml.loader` (`S506`) ([#9299](https://github.com/astral-sh/ruff/pull/9299))
- \[`pylint`\] Avoid panic when comment is preceded by Unicode ([#9331](https://github.com/astral-sh/ruff/pull/9331))
- \[`pylint`\] Change `PLR0917` error message to match other `PLR09XX` messages ([#9308](https://github.com/astral-sh/ruff/pull/9308))
- \[`refurb`\] Avoid false positives for `math-constant` (`FURB152`) ([#9290](https://github.com/astral-sh/ruff/pull/9290))
### Documentation
- Expand target name for better rule documentation ([#9302](https://github.com/astral-sh/ruff/pull/9302))
- Fix typos found by codespell ([#9346](https://github.com/astral-sh/ruff/pull/9346))
- \[`perflint`\] Document `PERF102` fix un-safety ([#9351](https://github.com/astral-sh/ruff/pull/9351))
- \[`pyupgrade`\] Document `UP007` fix un-safety ([#9306](https://github.com/astral-sh/ruff/pull/9306))
## 0.1.9
### Breaking changes
- Add site-packages to default exclusions ([#9188](https://github.com/astral-sh/ruff/pull/9188))
### Preview features
- Fix: Avoid parenthesizing subscript targets and values ([#9209](https://github.com/astral-sh/ruff/pull/9209))
- \[`pylint`\] Implement `too-many-locals` (`PLR0914`) ([#9163](https://github.com/astral-sh/ruff/pull/9163))
- Implement `reimplemented_operator` (FURB118) ([#9171](https://github.com/astral-sh/ruff/pull/9171))
- Add a rule to detect string members in runtime-evaluated unions ([#9143](https://github.com/astral-sh/ruff/pull/9143))
- Implement `no_blank_line_before_class_docstring` preview style ([#9154](https://github.com/astral-sh/ruff/pull/9154))
### Rule changes
- `CONSTANT_CASE` variables are improperly flagged for yoda violation (`SIM300`) ([#9164](https://github.com/astral-sh/ruff/pull/9164))
- \[`flake8-pyi`\] Cover ParamSpecs and TypeVarTuples (`PYI018`) ([#9198](https://github.com/astral-sh/ruff/pull/9198))
- \[`flake8-bugbear`\] Add fix for `zip-without-explicit-strict` (`B905`) ([#9176](https://github.com/astral-sh/ruff/pull/9176))
- Add fix to automatically remove `print` and `pprint` statements (`T201`, `T203`) ([#9208](https://github.com/astral-sh/ruff/pull/9208))
- Prefer `Never` to `NoReturn` in auto-typing in Python >= 3.11 (`ANN201`) ([#9213](https://github.com/astral-sh/ruff/pull/9213))
### Formatter
- `can_omit_optional_parentheses`: Exit early for unparenthesized expressions ([#9125](https://github.com/astral-sh/ruff/pull/9125))
- Fix `dynamic` mode with doctests so that it doesn't exceed configured line width ([#9129](https://github.com/astral-sh/ruff/pull/9129))
- Fix `can_omit_optional_parentheses` for expressions with a right most fstring ([#9124](https://github.com/astral-sh/ruff/pull/9124))
- Add `target_version` to formatter options ([#9220](https://github.com/astral-sh/ruff/pull/9220))
### CLI
- Update `ruff format --check` to display message for already formatted files ([#9153](https://github.com/astral-sh/ruff/pull/9153))
### Bug fixes
- Reverse order of arguments for `operator.contains` ([#9192](https://github.com/astral-sh/ruff/pull/9192))
- Iterate over lambdas in deferred type annotations ([#9175](https://github.com/astral-sh/ruff/pull/9175))
- Fix panic in `D208` with multibyte indent ([#9147](https://github.com/astral-sh/ruff/pull/9147))
- Add support for `NoReturn` in auto-return-typing ([#9206](https://github.com/astral-sh/ruff/pull/9206))
- Allow removal of `typing` from `exempt-modules` ([#9214](https://github.com/astral-sh/ruff/pull/9214))
- Avoid `mutable-class-default` violations for Pydantic subclasses ([#9187](https://github.com/astral-sh/ruff/pull/9187))
- Fix dropped union expressions for piped non-types in `PYI055` autofix ([#9161](https://github.com/astral-sh/ruff/pull/9161))
- Enable annotation quoting for multi-line expressions ([#9142](https://github.com/astral-sh/ruff/pull/9142))
- Deduplicate edits when quoting annotations ([#9140](https://github.com/astral-sh/ruff/pull/9140))
- Prevent invalid utf8 indexing in cell magic detection ([#9146](https://github.com/astral-sh/ruff/pull/9146))
- Avoid nested quotations in auto-quoting fix ([#9168](https://github.com/astral-sh/ruff/pull/9168))
- Add base-class inheritance detection to flake8-django rules ([#9151](https://github.com/astral-sh/ruff/pull/9151))
- Avoid `asyncio-dangling-task` violations on shadowed bindings ([#9215](https://github.com/astral-sh/ruff/pull/9215))
### Documentation
- Fix blog post URL in changelog ([#9119](https://github.com/astral-sh/ruff/pull/9119))
- Add error suppression hint for multi-line strings ([#9205](https://github.com/astral-sh/ruff/pull/9205))
- Fix typo in SemanticModel.parent_expression docstring ([#9167](https://github.com/astral-sh/ruff/pull/9167))
- Document link between import sorting and formatter ([#9117](https://github.com/astral-sh/ruff/pull/9117))
## 0.1.8
This release includes opt-in support for formatting Python snippets within
docstrings via the `docstring-code-format` setting.
[Check out the blog post](https://astral.sh/blog/ruff-v0.1.8) for more details!
### Preview features
- Add `"preserve"` quote-style to mimic Black's skip-string-normalization ([#8822](https://github.com/astral-sh/ruff/pull/8822))
- Implement `prefer_splitting_right_hand_side_of_assignments` preview style ([#8943](https://github.com/astral-sh/ruff/pull/8943))
- \[`pycodestyle`\] Add fix for `unexpected-spaces-around-keyword-parameter-equals` ([#9072](https://github.com/astral-sh/ruff/pull/9072))
- \[`pycodestyle`\] Add fix for comment-related whitespace rules ([#9075](https://github.com/astral-sh/ruff/pull/9075))
- \[`pycodestyle`\] Allow `sys.path` modifications between imports ([#9047](https://github.com/astral-sh/ruff/pull/9047))
- \[`refurb`\] Implement `hashlib-digest-hex` (`FURB181`) ([#9077](https://github.com/astral-sh/ruff/pull/9077))
### Rule changes
- Allow `flake8-type-checking` rules to automatically quote runtime-evaluated references ([#6001](https://github.com/astral-sh/ruff/pull/6001))
- Allow transparent cell magics in Jupyter Notebooks ([#8911](https://github.com/astral-sh/ruff/pull/8911))
- \[`flake8-annotations`\] Avoid `ANN2xx` fixes for abstract methods with empty bodies ([#9034](https://github.com/astral-sh/ruff/pull/9034))
- \[`flake8-self`\] Ignore underscore references in type annotations ([#9036](https://github.com/astral-sh/ruff/pull/9036))
- \[`pep8-naming`\] Allow class names when `apps.get_model` is a non-string ([#9065](https://github.com/astral-sh/ruff/pull/9065))
- \[`pycodestyle`\] Allow `matplotlib.use` calls to intersperse imports ([#9094](https://github.com/astral-sh/ruff/pull/9094))
- \[`pyflakes`\] Support fixing unused assignments in tuples by renaming variables (`F841`) ([#9107](https://github.com/astral-sh/ruff/pull/9107))
- \[`pylint`\] Add fix for `subprocess-run-without-check` (`PLW1510`) ([#6708](https://github.com/astral-sh/ruff/pull/6708))
### Formatter
- Add `docstring-code-format` knob to enable docstring snippet formatting ([#8854](https://github.com/astral-sh/ruff/pull/8854))
- Use double quotes for all docstrings, including single-quoted docstrings ([#9020](https://github.com/astral-sh/ruff/pull/9020))
- Implement "dynamic" line width mode for docstring code formatting ([#9098](https://github.com/astral-sh/ruff/pull/9098))
- Support reformatting Markdown code blocks ([#9030](https://github.com/astral-sh/ruff/pull/9030))
- add support for formatting reStructuredText code snippets ([#9003](https://github.com/astral-sh/ruff/pull/9003))
- Avoid trailing comma for single-argument with positional separator ([#9076](https://github.com/astral-sh/ruff/pull/9076))
- Fix handling of trailing target comment ([#9051](https://github.com/astral-sh/ruff/pull/9051))
### CLI
- Hide unsafe fix suggestions when explicitly disabled ([#9095](https://github.com/astral-sh/ruff/pull/9095))
- Add SARIF support to `--output-format` ([#9078](https://github.com/astral-sh/ruff/pull/9078))
### Bug fixes
- Apply unnecessary index rule prior to enumerate rewrite ([#9012](https://github.com/astral-sh/ruff/pull/9012))
- \[`flake8-err-msg`\] Allow `EM` fixes even if `msg` variable is defined ([#9059](https://github.com/astral-sh/ruff/pull/9059))
- \[`flake8-pie`\] Prevent keyword arguments duplication ([#8450](https://github.com/astral-sh/ruff/pull/8450))
- \[`flake8-pie`\] Respect trailing comma in `unnecessary-dict-kwargs` (`PIE804`) ([#9015](https://github.com/astral-sh/ruff/pull/9015))
- \[`flake8-raise`\] Avoid removing parentheses on ctypes.WinError ([#9027](https://github.com/astral-sh/ruff/pull/9027))
- \[`isort`\] Avoid invalid combination of `force-sort-within-types` and `lines-between-types` ([#9041](https://github.com/astral-sh/ruff/pull/9041))
- \[`isort`\] Ensure that from-style imports are always ordered first in `__future__` ([#9039](https://github.com/astral-sh/ruff/pull/9039))
- \[`pycodestyle`\] Allow tab indentation before keyword ([#9099](https://github.com/astral-sh/ruff/pull/9099))
- \[`pylint`\] Ignore `@overrides` and `@overloads` for `too-many-positional` ([#9000](https://github.com/astral-sh/ruff/pull/9000))
- \[`pyupgrade`\] Enable `printf-string-formatting` fix with comments on right-hand side ([#9037](https://github.com/astral-sh/ruff/pull/9037))
- \[`refurb`\] Make `math-constant` (`FURB152`) rule more targeted ([#9054](https://github.com/astral-sh/ruff/pull/9054))
- \[`refurb`\] Support floating-point base in `redundant-log-base` (`FURB163`) ([#9100](https://github.com/astral-sh/ruff/pull/9100))
- \[`ruff`\] Detect `unused-asyncio-dangling-task` (`RUF006`) on unused assignments ([#9060](https://github.com/astral-sh/ruff/pull/9060))
## 0.1.7
### Preview features

View File

@@ -326,18 +326,16 @@ We use an experimental in-house tool for managing releases.
- Often labels will be missing from pull requests they will need to be manually organized into the proper section
- Changes should be edited to be user-facing descriptions, avoiding internal details
1. Highlight any breaking changes in `BREAKING_CHANGES.md`
1. Run `cargo check`. This should update the lock file with new versions.
1. Create a pull request with the changelog and version updates
1. Merge the PR
1. Run the [release workflow](https://github.com/astral-sh/ruff/actions/workflows/release.yaml) with:
- The new version number (without starting `v`)
- The commit hash of the merged release pull request on `main`
1. Run the release workflow with the version number (without starting `v`) as input. Make sure
main has your merged PR as last commit
1. The release workflow will do the following:
1. Build all the assets. If this fails (even though we tested in step 4), we haven't tagged or
uploaded anything, you can restart after pushing a fix.
1. Upload to PyPI.
1. Create and push the Git tag (as extracted from `pyproject.toml`). We create the Git tag only
after building the wheels and uploading to PyPI, since we can't delete or modify the tag ([#4468](https://github.com/astral-sh/ruff/issues/4468)).
after building the wheels and uploading to PyPI, since we can't delete or modify the tag ([#4468](https://github.com/charliermarsh/ruff/issues/4468)).
1. Attach artifacts to draft GitHub release
1. Trigger downstream repositories. This can fail non-catastrophically, as we can run any
downstream jobs manually if needed.
@@ -346,10 +344,7 @@ We use an experimental in-house tool for managing releases.
1. Copy the changelog for the release into the GitHub release
- See previous releases for formatting of section headers
1. Generate the contributor list with `rooster contributors` and add to the release notes
1. If needed, [update the schemastore](https://github.com/astral-sh/ruff/blob/main/scripts/update_schemastore.py).
1. One can determine if an update is needed when
`git diff old-version-tag new-version-tag -- ruff.schema.json` returns a non-empty diff.
1. Once run successfully, you should follow the link in the output to create a PR.
1. If needed, [update the schemastore](https://github.com/charliermarsh/ruff/blob/main/scripts/update_schemastore.py)
1. If needed, update the `ruff-lsp` and `ruff-vscode` repositories.
## Ecosystem CI
@@ -370,7 +365,7 @@ See the [ruff-ecosystem package](https://github.com/astral-sh/ruff/tree/main/pyt
We have several ways of benchmarking and profiling Ruff:
- Our main performance benchmark comparing Ruff with other tools on the CPython codebase
- Microbenchmarks which run the linter or the formatter on individual files. These run on pull requests.
- Microbenchmarks which the linter or the formatter on individual files. There run on pull requests.
- Profiling the linter on either the microbenchmarks or entire projects
### CPython Benchmark
@@ -561,10 +556,10 @@ examples.
#### Linux
Install `perf` and build `ruff_benchmark` with the `profiling` profile and then run it with perf
Install `perf` and build `ruff_benchmark` with the `release-debug` profile and then run it with perf
```shell
cargo bench -p ruff_benchmark --no-run --profile=profiling && perf record --call-graph dwarf -F 9999 cargo bench -p ruff_benchmark --profile=profiling -- --profile-time=1
cargo bench -p ruff_benchmark --no-run --profile=release-debug && perf record --call-graph dwarf -F 9999 cargo bench -p ruff_benchmark --profile=release-debug -- --profile-time=1
```
You can also use the `ruff_dev` launcher to run `ruff check` multiple times on a repository to
@@ -572,8 +567,8 @@ gather enough samples for a good flamegraph (change the 999, the sample rate, an
of checks, to your liking)
```shell
cargo build --bin ruff_dev --profile=profiling
perf record -g -F 999 target/profiling/ruff_dev repeat --repeat 30 --exit-zero --no-cache path/to/cpython > /dev/null
cargo build --bin ruff_dev --profile=release-debug
perf record -g -F 999 target/release-debug/ruff_dev repeat --repeat 30 --exit-zero --no-cache path/to/cpython > /dev/null
```
Then convert the recorded profile
@@ -603,7 +598,7 @@ cargo install cargo-instruments
Then run the profiler with
```shell
cargo instruments -t time --bench linter --profile profiling -p ruff_benchmark -- --profile-time=1
cargo instruments -t time --bench linter --profile release-debug -p ruff_benchmark -- --profile-time=1
```
- `-t`: Specifies what to profile. Useful options are `time` to profile the wall time and `alloc`

447
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -9,106 +9,51 @@ homepage = "https://docs.astral.sh/ruff"
documentation = "https://docs.astral.sh/ruff"
repository = "https://github.com/astral-sh/ruff"
authors = ["Charlie Marsh <charlie.r.marsh@gmail.com>"]
license = "MIT"
license = "MIT2"
[workspace.dependencies]
aho-corasick = { version = "1.1.2" }
annotate-snippets = { version = "0.9.2", features = ["color"] }
anyhow = { version = "1.0.76" }
argfile = { version = "0.1.6" }
assert_cmd = { version = "2.0.8" }
bincode = { version = "1.3.3" }
anyhow = { version = "1.0.69" }
bitflags = { version = "2.4.1" }
cachedir = { version = "0.3.1" }
chrono = { version = "0.4.31", default-features = false, features = ["clock"] }
clap = { version = "4.4.12", features = ["derive"] }
clap_complete_command = { version = "0.5.1" }
clearscreen = { version = "2.0.0" }
codspeed-criterion-compat = { version = "2.3.3", default-features = false }
colored = { version = "2.1.0" }
configparser = { version = "3.0.3" }
console_error_panic_hook = { version = "0.1.7" }
console_log = { version = "1.0.0" }
countme = { version ="3.0.1"}
criterion = { version = "0.5.1", default-features = false }
dirs = { version = "5.0.0" }
drop_bomb = { version = "0.1.5" }
env_logger = { version ="0.10.1"}
fern = { version = "0.6.1" }
filetime = { version = "0.2.23" }
fs-err = { version ="2.11.0"}
clap = { version = "4.4.7", features = ["derive"] }
colored = { version = "2.0.0" }
filetime = { version = "0.2.20" }
glob = { version = "0.3.1" }
globset = { version = "0.4.14" }
hexf-parse = { version ="0.2.1"}
ignore = { version = "0.4.21" }
imara-diff ={ version = "0.1.5"}
imperative = { version = "1.0.4" }
indicatif ={ version = "0.17.7"}
indoc ={ version = "2.0.4"}
ignore = { version = "0.4.20" }
insta = { version = "1.34.0", feature = ["filters", "glob"] }
insta-cmd = { version = "0.4.0" }
is-macro = { version = "0.3.4" }
is-wsl = { version = "0.4.0" }
itertools = { version = "0.12.0" }
js-sys = { version = "0.3.66" }
lalrpop-util = { version = "0.20.0", default-features = false }
lexical-parse-float = { version = "0.8.0", features = ["format"] }
is-macro = { version = "0.3.0" }
itertools = { version = "0.11.0" }
libcst = { version = "1.1.0", default-features = false }
log = { version = "0.4.17" }
memchr = { version = "2.6.4" }
mimalloc = { version ="0.1.39"}
natord = { version = "1.0.9" }
notify = { version = "6.1.1" }
once_cell = { version = "1.19.0" }
once_cell = { version = "1.17.1" }
path-absolutize = { version = "3.1.1" }
pathdiff = { version = "0.2.1" }
pep440_rs = { version = "0.4.0", features = ["serde"] }
pretty_assertions = "1.3.0"
proc-macro2 = { version = "1.0.73" }
pyproject-toml = { version = "0.8.1" }
quick-junit = { version = "0.3.5" }
proc-macro2 = { version = "1.0.70" }
quote = { version = "1.0.23" }
rand = { version = "0.8.5" }
rayon = { version = "1.8.0" }
regex = { version = "1.10.2" }
result-like = { version = "0.5.0" }
rustc-hash = { version = "1.1.0" }
schemars = { version = "0.8.16" }
seahash = { version ="4.1.0"}
semver = { version = "1.0.20" }
serde = { version = "1.0.193", features = ["derive"] }
serde-wasm-bindgen = { version = "0.6.3" }
serde_json = { version = "1.0.109" }
serde_test = { version = "1.0.152" }
serde_with = { version = "3.4.0", default-features = false, features = ["macros"] }
serde = { version = "1.0.190", features = ["derive"] }
serde_json = { version = "1.0.108" }
shellexpand = { version = "3.0.0" }
shlex = { version ="1.2.0"}
similar = { version = "2.3.0", features = ["inline"] }
smallvec = { version = "1.11.2" }
static_assertions = "1.1.0"
strum = { version = "0.25.0", features = ["strum_macros"] }
strum_macros = { version = "0.25.3" }
syn = { version = "2.0.40" }
tempfile = { version ="3.8.1"}
test-case = { version = "3.3.1" }
thiserror = { version = "1.0.51" }
tikv-jemallocator = { version ="0.5.0"}
toml = { version = "0.8.8" }
syn = { version = "2.0.39" }
test-case = { version = "3.2.1" }
thiserror = { version = "1.0.50" }
toml = { version = "0.7.8" }
tracing = { version = "0.1.40" }
tracing-indicatif = { version = "0.3.6" }
tracing-indicatif = { version = "0.3.4" }
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
typed-arena = { version = "2.0.2" }
unic-ucd-category = { version ="0.9"}
unicode-ident = { version = "1.0.12" }
unicode_names2 = { version = "1.2.0" }
unicode-width = { version = "0.1.11" }
unicode_names2 = { version = "1.2.1" }
ureq = { version = "2.9.1" }
url = { version = "2.5.0" }
uuid = { version = "1.6.1", features = ["v4", "fast-rng", "macro-diagnostics", "js"] }
walkdir = { version = "2.3.2" }
wasm-bindgen = { version = "0.2.84" }
wasm-bindgen-test = { version = "0.3.39" }
wild = { version = "2" }
wsl = { version = "0.1.0" }
[workspace.lints.rust]
unsafe_code = "warn"
@@ -143,20 +88,7 @@ rc_mutex = "warn"
rest_pat_in_fully_bound_structs = "warn"
[profile.release]
# Note that we set these explicitly, and these values
# were chosen based on a trade-off between compile times
# and runtime performance[1].
#
# [1]: https://github.com/astral-sh/ruff/pull/9031
lto = "thin"
codegen-units = 16
# Some crates don't change as much but benefit more from
# more expensive optimization passes, so we selectively
# decrease codegen-units in some cases.
[profile.release.package.ruff_python_parser]
codegen-units = 1
[profile.release.package.ruff_python_ast]
lto = "fat"
codegen-units = 1
[profile.dev.package.insta]
@@ -170,8 +102,8 @@ opt-level = 3
[profile.dev.package.ruff_python_parser]
opt-level = 1
# Use the `--profile profiling` flag to show symbols in release mode.
# e.g. `cargo build --profile profiling`
[profile.profiling]
# Use the `--profile release-debug` flag to show symbols in release mode.
# e.g. `cargo build --profile release-debug`
[profile.release-debug]
inherits = "release"
debug = 1

View File

@@ -150,7 +150,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
```yaml
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.1.11
rev: v0.1.7
hooks:
# Run the linter.
- id: ruff
@@ -194,25 +194,20 @@ exclude = [
".git",
".git-rewrite",
".hg",
".ipynb_checkpoints",
".mypy_cache",
".nox",
".pants.d",
".pyenv",
".pytest_cache",
".pytype",
".ruff_cache",
".svn",
".tox",
".venv",
".vscode",
"__pypackages__",
"_build",
"buck-out",
"build",
"dist",
"node_modules",
"site-packages",
"venv",
]
@@ -386,7 +381,6 @@ Ruff is used by a number of major open-source projects and companies, including:
- Benchling ([Refac](https://github.com/benchling/refac))
- [Bokeh](https://github.com/bokeh/bokeh)
- [Cryptography (PyCA)](https://github.com/pyca/cryptography)
- CERN ([Indico](https://getindico.io/))
- [DVC](https://github.com/iterative/dvc)
- [Dagger](https://github.com/dagger/dagger)
- [Dagster](https://github.com/dagster-io/dagster)
@@ -433,7 +427,6 @@ Ruff is used by a number of major open-source projects and companies, including:
- [PyTorch](https://github.com/pytorch/pytorch)
- [Pydantic](https://github.com/pydantic/pydantic)
- [Pylint](https://github.com/PyCQA/pylint)
- [PyVista](https://github.com/pyvista/pyvista)
- [Reflex](https://github.com/reflex-dev/reflex)
- [River](https://github.com/online-ml/river)
- [Rippling](https://rippling.com)

View File

@@ -0,0 +1,39 @@
[package]
name = "flake8-to-ruff"
version = "0.1.7"
description = """
Convert Flake8 configuration files to Ruff configuration files.
"""
authors = { workspace = true }
edition = { workspace = true }
rust-version = { workspace = true }
homepage = { workspace = true }
documentation = { workspace = true }
repository = { workspace = true }
license = { workspace = true }
[dependencies]
ruff_linter = { path = "../ruff_linter", default-features = false }
ruff_workspace = { path = "../ruff_workspace" }
anyhow = { workspace = true }
clap = { workspace = true }
colored = { workspace = true }
configparser = { version = "3.0.3" }
itertools = { workspace = true }
log = { workspace = true }
once_cell = { workspace = true }
pep440_rs = { version = "0.3.12", features = ["serde"] }
regex = { workspace = true }
rustc-hash = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
strum = { workspace = true }
strum_macros = { workspace = true }
toml = { workspace = true }
[dev-dependencies]
pretty_assertions = "1.3.0"
[lints]
workspace = true

View File

@@ -0,0 +1,99 @@
# flake8-to-ruff
Convert existing Flake8 configuration files (`setup.cfg`, `tox.ini`, or `.flake8`) for use with
[Ruff](https://github.com/astral-sh/ruff).
Generates a Ruff-compatible `pyproject.toml` section.
## Installation and Usage
### Installation
Available as [`flake8-to-ruff`](https://pypi.org/project/flake8-to-ruff/) on PyPI:
```shell
pip install flake8-to-ruff
```
### Usage
To run `flake8-to-ruff`:
```shell
flake8-to-ruff path/to/setup.cfg
flake8-to-ruff path/to/tox.ini
flake8-to-ruff path/to/.flake8
```
`flake8-to-ruff` will print the relevant `pyproject.toml` sections to standard output, like so:
```toml
[tool.ruff]
exclude = [
'.svn',
'CVS',
'.bzr',
'.hg',
'.git',
'__pycache__',
'.tox',
'.idea',
'.mypy_cache',
'.venv',
'node_modules',
'_state_machine.py',
'test_fstring.py',
'bad_coding2.py',
'badsyntax_*.py',
]
select = [
'A',
'E',
'F',
'Q',
]
ignore = []
[tool.ruff.flake8-quotes]
inline-quotes = 'single'
[tool.ruff.pep8-naming]
ignore-names = [
'foo',
'bar',
]
```
### Plugins
`flake8-to-ruff` will attempt to infer any activated plugins based on the settings provided in your
configuration file.
For example, if your `.flake8` file includes a `docstring-convention` property, `flake8-to-ruff`
will enable the appropriate [`flake8-docstrings`](https://pypi.org/project/flake8-docstrings/)
checks.
Alternatively, you can manually specify plugins on the command-line:
```shell
flake8-to-ruff path/to/.flake8 --plugin flake8-builtins --plugin flake8-quotes
```
## Limitations
1. Ruff only supports a subset of the Flake configuration options. `flake8-to-ruff` will warn on and
ignore unsupported options in the `.flake8` file (or equivalent). (Similarly, Ruff has a few
configuration options that don't exist in Flake8.)
1. Ruff will omit any rule codes that are unimplemented or unsupported by Ruff, including rule
codes from unsupported plugins. (See the
[documentation](https://docs.astral.sh/ruff/faq/#how-does-ruff-compare-to-flake8) for the complete
list of supported plugins.)
## License
MIT
## Contributing
Contributions are welcome and hugely appreciated. To get started, check out the
[contributing guidelines](https://github.com/astral-sh/ruff/blob/main/CONTRIBUTING.md).

View File

@@ -0,0 +1,65 @@
[build-system]
requires = [
# The minimum setuptools version is specific to the PEP 517 backend,
# and may be stricter than the version required in `setup.cfg`
"setuptools>=40.6.0,!=60.9.0",
"wheel",
# Must be kept in sync with the `install_requirements` in `setup.cfg`
"cffi>=1.12; platform_python_implementation != 'PyPy'",
"setuptools-rust>=0.11.4",
]
build-backend = "setuptools.build_meta"
[tool.black]
line-length = 79
target-version = ["py36"]
[tool.pytest.ini_options]
addopts = "-r s --capture=no --strict-markers --benchmark-disable"
markers = [
"skip_fips: this test is not executed in FIPS mode",
"supported: parametrized test requiring only_if and skip_message",
]
[tool.mypy]
show_error_codes = true
check_untyped_defs = true
no_implicit_reexport = true
warn_redundant_casts = true
warn_unused_ignores = true
warn_unused_configs = true
strict_equality = true
[[tool.mypy.overrides]]
module = [
"pretend"
]
ignore_missing_imports = true
[tool.coverage.run]
branch = true
relative_files = true
source = [
"cryptography",
"tests/",
]
[tool.coverage.paths]
source = [
"src/cryptography",
"*.tox/*/lib*/python*/site-packages/cryptography",
"*.tox\\*\\Lib\\site-packages\\cryptography",
"*.tox/pypy/site-packages/cryptography",
]
tests =[
"tests/",
"*tests\\",
]
[tool.coverage.report]
exclude_lines = [
"@abc.abstractmethod",
"@abc.abstractproperty",
"@typing.overload",
"if typing.TYPE_CHECKING",
]

View File

@@ -0,0 +1,91 @@
[metadata]
name = cryptography
version = attr: cryptography.__version__
description = cryptography is a package which provides cryptographic recipes and primitives to Python developers.
long_description = file: README.rst
long_description_content_type = text/x-rst
license = BSD-3-Clause OR Apache-2.0
url = https://github.com/pyca/cryptography
author = The Python Cryptographic Authority and individual contributors
author_email = cryptography-dev@python.org
project_urls =
Documentation=https://cryptography.io/
Source=https://github.com/pyca/cryptography/
Issues=https://github.com/pyca/cryptography/issues
Changelog=https://cryptography.io/en/latest/changelog/
classifiers =
Development Status :: 5 - Production/Stable
Intended Audience :: Developers
License :: OSI Approved :: Apache Software License
License :: OSI Approved :: BSD License
Natural Language :: English
Operating System :: MacOS :: MacOS X
Operating System :: POSIX
Operating System :: POSIX :: BSD
Operating System :: POSIX :: Linux
Operating System :: Microsoft :: Windows
Programming Language :: Python
Programming Language :: Python :: 3
Programming Language :: Python :: 3 :: Only
Programming Language :: Python :: 3.6
Programming Language :: Python :: 3.7
Programming Language :: Python :: 3.8
Programming Language :: Python :: 3.9
Programming Language :: Python :: 3.10
Programming Language :: Python :: 3.11
Programming Language :: Python :: Implementation :: CPython
Programming Language :: Python :: Implementation :: PyPy
Topic :: Security :: Cryptography
[options]
python_requires = >=3.6
include_package_data = True
zip_safe = False
package_dir =
=src
packages = find:
# `install_requires` must be kept in sync with `pyproject.toml`
install_requires =
cffi >=1.12
[options.packages.find]
where = src
exclude =
_cffi_src
_cffi_src.*
[options.extras_require]
test =
pytest>=6.2.0
pytest-benchmark
pytest-cov
pytest-subtests
pytest-xdist
pretend
iso8601
pytz
hypothesis>=1.11.4,!=3.79.2
docs =
sphinx >= 1.6.5,!=1.8.0,!=3.1.0,!=3.1.1,!=5.2.0,!=5.2.0.post0
sphinx_rtd_theme
docstest =
pyenchant >= 1.6.11
twine >= 1.12.0
sphinxcontrib-spelling >= 4.0.1
sdist =
setuptools_rust >= 0.11.4
pep8test =
black
flake8
flake8-import-order
pep8-naming
# This extra is for OpenSSH private keys that use bcrypt KDF
# Versions: v3.1.3 - ignore_few_rounds, v3.1.5 - abi3
ssh =
bcrypt >= 3.1.5
[flake8]
ignore = E203,E211,W503,W504,N818
exclude = .tox,*.egg,.git,_build,.hypothesis
select = E,W,F,N,I
application-import-names = cryptography,cryptography_vectors,tests

View File

@@ -0,0 +1,19 @@
[flake8]
# Ignore style and complexity
# E: style errors
# W: style warnings
# C: complexity
# D: docstring warnings (unused pydocstyle extension)
# F841: local variable assigned but never used
ignore = E, C, W, D, F841
builtins = c, get_config
exclude =
.cache,
.github,
docs,
jupyterhub/alembic*,
onbuild,
scripts,
share,
tools,
setup.py

View File

@@ -0,0 +1,43 @@
[flake8]
# Exclude the grpc generated code
exclude = ./manim/grpc/gen/*
max-complexity = 15
max-line-length = 88
statistics = True
# Prevents some flake8-rst-docstrings errors
rst-roles = attr,class,func,meth,mod,obj,ref,doc,exc
rst-directives = manim, SEEALSO, seealso
docstring-convention=numpy
select = A,A00,B,B9,C4,C90,D,E,F,F,PT,RST,SIM,W
# General Compatibility
extend-ignore = E203, W503, D202, D212, D213, D404
# Misc
F401, F403, F405, F841, E501, E731, E402, F811, F821,
# Plug-in: flake8-builtins
A001, A002, A003,
# Plug-in: flake8-bugbear
B006, B007, B008, B009, B010, B903, B950,
# Plug-in: flake8-simplify
SIM105, SIM106, SIM119,
# Plug-in: flake8-comprehensions
C901
# Plug-in: flake8-pytest-style
PT001, PT004, PT006, PT011, PT018, PT022, PT023,
# Plug-in: flake8-docstrings
D100, D101, D102, D103, D104, D105, D106, D107,
D200, D202, D204, D205, D209,
D301,
D400, D401, D402, D403, D405, D406, D407, D409, D411, D412, D414,
# Plug-in: flake8-rst-docstrings
RST201, RST203, RST210, RST212, RST213, RST215,
RST301, RST303,

View File

@@ -0,0 +1,36 @@
[flake8]
min_python_version = 3.7.0
max-line-length = 88
ban-relative-imports = true
# flake8-use-fstring: https://github.com/MichaelKim0407/flake8-use-fstring#--percent-greedy-and---format-greedy
format-greedy = 1
inline-quotes = double
enable-extensions = TC, TC1
type-checking-strict = true
eradicate-whitelist-extend = ^-.*;
extend-ignore =
# E203: Whitespace before ':' (pycqa/pycodestyle#373)
E203,
# SIM106: Handle error-cases first
SIM106,
# ANN101: Missing type annotation for self in method
ANN101,
# ANN102: Missing type annotation for cls in classmethod
ANN102,
# PIE781: assign-and-return
PIE781,
# PIE798 no-unnecessary-class: Consider using a module for namespacing instead
PIE798,
per-file-ignores =
# TC002: Move third-party import '...' into a type-checking block
__init__.py:TC002,
# ANN201: Missing return type annotation for public function
tests/test_*:ANN201
tests/**/test_*:ANN201
extend-exclude =
# Frozen and not subject to change in this repo:
get-poetry.py,
install-poetry.py,
# External to the project's coding standards:
tests/fixtures/*,
tests/**/fixtures/*,

View File

@@ -0,0 +1,19 @@
[flake8]
max-line-length=120
docstring-convention=all
import-order-style=pycharm
application_import_names=bot,tests
exclude=.cache,.venv,.git,constants.py
extend-ignore=
B311,W503,E226,S311,T000,E731
# Missing Docstrings
D100,D104,D105,D107,
# Docstring Whitespace
D203,D212,D214,D215,
# Docstring Quotes
D301,D302,
# Docstring Content
D400,D401,D402,D404,D405,D406,D407,D408,D409,D410,D411,D412,D413,D414,D416,D417
# Type Annotations
ANN002,ANN003,ANN101,ANN102,ANN204,ANN206,ANN401
per-file-ignores=tests/*:D,ANN

View File

@@ -0,0 +1,6 @@
[flake8]
ignore = E203, E501, W503
per-file-ignores =
requests/__init__.py:E402, F401
requests/compat.py:E402, F401
tests/compat.py:F401

View File

@@ -0,0 +1,34 @@
[project]
name = "flake8-to-ruff"
keywords = ["automation", "flake8", "pycodestyle", "pyflakes", "pylint", "clippy"]
classifiers = [
"Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3 :: Only",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Software Development :: Quality Assurance",
]
author = "Charlie Marsh"
author_email = "charlie.r.marsh@gmail.com"
description = "Convert existing Flake8 configuration to Ruff."
requires-python = ">=3.7"
[project.urls]
repository = "https://github.com/astral-sh/ruff#subdirectory=crates/flake8_to_ruff"
[build-system]
requires = ["maturin>=1.0,<2.0"]
build-backend = "maturin"
[tool.maturin]
bindings = "bin"
strip = true

View File

@@ -0,0 +1,13 @@
//! Extract Black configuration settings from a pyproject.toml.
use ruff_linter::line_width::LineLength;
use ruff_linter::settings::types::PythonVersion;
use serde::{Deserialize, Serialize};
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
pub(crate) struct Black {
#[serde(alias = "line-length", alias = "line_length")]
pub(crate) line_length: Option<LineLength>,
#[serde(alias = "target-version", alias = "target_version")]
pub(crate) target_version: Option<Vec<PythonVersion>>,
}

View File

@@ -0,0 +1,687 @@
use std::collections::{HashMap, HashSet};
use std::str::FromStr;
use itertools::Itertools;
use ruff_linter::line_width::LineLength;
use ruff_linter::registry::Linter;
use ruff_linter::rule_selector::RuleSelector;
use ruff_linter::rules::flake8_pytest_style::types::{
ParametrizeNameType, ParametrizeValuesRowType, ParametrizeValuesType,
};
use ruff_linter::rules::flake8_quotes::settings::Quote;
use ruff_linter::rules::flake8_tidy_imports::settings::Strictness;
use ruff_linter::rules::pydocstyle::settings::Convention;
use ruff_linter::settings::types::PythonVersion;
use ruff_linter::settings::DEFAULT_SELECTORS;
use ruff_linter::warn_user;
use ruff_workspace::options::{
Flake8AnnotationsOptions, Flake8BugbearOptions, Flake8BuiltinsOptions, Flake8ErrMsgOptions,
Flake8PytestStyleOptions, Flake8QuotesOptions, Flake8TidyImportsOptions, LintCommonOptions,
LintOptions, McCabeOptions, Options, Pep8NamingOptions, PydocstyleOptions,
};
use ruff_workspace::pyproject::Pyproject;
use super::external_config::ExternalConfig;
use super::plugin::Plugin;
use super::{parser, plugin};
pub(crate) fn convert(
config: &HashMap<String, HashMap<String, Option<String>>>,
external_config: &ExternalConfig,
plugins: Option<Vec<Plugin>>,
) -> Pyproject {
// Extract the Flake8 section.
let flake8 = config
.get("flake8")
.expect("Unable to find flake8 section in INI file");
// Extract all referenced rule code prefixes, to power plugin inference.
let mut referenced_codes: HashSet<RuleSelector> = HashSet::default();
for (key, value) in flake8 {
if let Some(value) = value {
match key.as_str() {
"select" | "ignore" | "extend-select" | "extend_select" | "extend-ignore"
| "extend_ignore" => {
referenced_codes.extend(parser::parse_prefix_codes(value.as_ref()));
}
"per-file-ignores" | "per_file_ignores" => {
if let Ok(per_file_ignores) =
parser::parse_files_to_codes_mapping(value.as_ref())
{
for (_, codes) in parser::collect_per_file_ignores(per_file_ignores) {
referenced_codes.extend(codes);
}
}
}
_ => {}
}
}
}
// Infer plugins, if not provided.
let plugins = plugins.unwrap_or_else(|| {
let from_options = plugin::infer_plugins_from_options(flake8);
if !from_options.is_empty() {
#[allow(clippy::print_stderr)]
{
eprintln!("Inferred plugins from settings: {from_options:#?}");
}
}
let from_codes = plugin::infer_plugins_from_codes(&referenced_codes);
if !from_codes.is_empty() {
#[allow(clippy::print_stderr)]
{
eprintln!("Inferred plugins from referenced codes: {from_codes:#?}");
}
}
from_options.into_iter().chain(from_codes).collect()
});
// Check if the user has specified a `select`. If not, we'll add our own
// default `select`, and populate it based on user plugins.
let mut select = flake8
.get("select")
.and_then(|value| {
value
.as_ref()
.map(|value| HashSet::from_iter(parser::parse_prefix_codes(value)))
})
.unwrap_or_else(|| resolve_select(&plugins));
let mut ignore: HashSet<RuleSelector> = flake8
.get("ignore")
.and_then(|value| {
value
.as_ref()
.map(|value| HashSet::from_iter(parser::parse_prefix_codes(value)))
})
.unwrap_or_default();
// Parse each supported option.
let mut options = Options::default();
let mut lint_options = LintCommonOptions::default();
let mut flake8_annotations = Flake8AnnotationsOptions::default();
let mut flake8_bugbear = Flake8BugbearOptions::default();
let mut flake8_builtins = Flake8BuiltinsOptions::default();
let mut flake8_errmsg = Flake8ErrMsgOptions::default();
let mut flake8_pytest_style = Flake8PytestStyleOptions::default();
let mut flake8_quotes = Flake8QuotesOptions::default();
let mut flake8_tidy_imports = Flake8TidyImportsOptions::default();
let mut mccabe = McCabeOptions::default();
let mut pep8_naming = Pep8NamingOptions::default();
let mut pydocstyle = PydocstyleOptions::default();
for (key, value) in flake8 {
if let Some(value) = value {
match key.as_str() {
// flake8
"builtins" => {
options.builtins = Some(parser::parse_strings(value.as_ref()));
}
"max-line-length" | "max_line_length" => match LineLength::from_str(value) {
Ok(line_length) => options.line_length = Some(line_length),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
},
"select" => {
// No-op (handled above).
select.extend(parser::parse_prefix_codes(value.as_ref()));
}
"ignore" => {
// No-op (handled above).
}
"extend-select" | "extend_select" => {
// Unlike Flake8, use a single explicit `select`.
select.extend(parser::parse_prefix_codes(value.as_ref()));
}
"extend-ignore" | "extend_ignore" => {
// Unlike Flake8, use a single explicit `ignore`.
ignore.extend(parser::parse_prefix_codes(value.as_ref()));
}
"exclude" => {
options.exclude = Some(parser::parse_strings(value.as_ref()));
}
"extend-exclude" | "extend_exclude" => {
options.extend_exclude = Some(parser::parse_strings(value.as_ref()));
}
"per-file-ignores" | "per_file_ignores" => {
match parser::parse_files_to_codes_mapping(value.as_ref()) {
Ok(per_file_ignores) => {
lint_options.per_file_ignores =
Some(parser::collect_per_file_ignores(per_file_ignores));
}
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
// flake8-bugbear
"extend-immutable-calls" | "extend_immutable_calls" => {
flake8_bugbear.extend_immutable_calls =
Some(parser::parse_strings(value.as_ref()));
}
// flake8-builtins
"builtins-ignorelist" | "builtins_ignorelist" => {
flake8_builtins.builtins_ignorelist =
Some(parser::parse_strings(value.as_ref()));
}
// flake8-annotations
"suppress-none-returning" | "suppress_none_returning" => {
match parser::parse_bool(value.as_ref()) {
Ok(bool) => flake8_annotations.suppress_none_returning = Some(bool),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
"suppress-dummy-args" | "suppress_dummy_args" => {
match parser::parse_bool(value.as_ref()) {
Ok(bool) => flake8_annotations.suppress_dummy_args = Some(bool),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
"mypy-init-return" | "mypy_init_return" => {
match parser::parse_bool(value.as_ref()) {
Ok(bool) => flake8_annotations.mypy_init_return = Some(bool),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
"allow-star-arg-any" | "allow_star_arg_any" => {
match parser::parse_bool(value.as_ref()) {
Ok(bool) => flake8_annotations.allow_star_arg_any = Some(bool),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
// flake8-quotes
"quotes" | "inline-quotes" | "inline_quotes" => match value.trim() {
"'" | "single" => flake8_quotes.inline_quotes = Some(Quote::Single),
"\"" | "double" => flake8_quotes.inline_quotes = Some(Quote::Double),
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
},
"multiline-quotes" | "multiline_quotes" => match value.trim() {
"'" | "single" => flake8_quotes.multiline_quotes = Some(Quote::Single),
"\"" | "double" => flake8_quotes.multiline_quotes = Some(Quote::Double),
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
},
"docstring-quotes" | "docstring_quotes" => match value.trim() {
"'" | "single" => flake8_quotes.docstring_quotes = Some(Quote::Single),
"\"" | "double" => flake8_quotes.docstring_quotes = Some(Quote::Double),
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
},
"avoid-escape" | "avoid_escape" => match parser::parse_bool(value.as_ref()) {
Ok(bool) => flake8_quotes.avoid_escape = Some(bool),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
},
// pep8-naming
"ignore-names" | "ignore_names" => {
pep8_naming.ignore_names = Some(parser::parse_strings(value.as_ref()));
}
"classmethod-decorators" | "classmethod_decorators" => {
pep8_naming.classmethod_decorators =
Some(parser::parse_strings(value.as_ref()));
}
"staticmethod-decorators" | "staticmethod_decorators" => {
pep8_naming.staticmethod_decorators =
Some(parser::parse_strings(value.as_ref()));
}
// flake8-tidy-imports
"ban-relative-imports" | "ban_relative_imports" => match value.trim() {
"true" => flake8_tidy_imports.ban_relative_imports = Some(Strictness::All),
"parents" => {
flake8_tidy_imports.ban_relative_imports = Some(Strictness::Parents);
}
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
},
// flake8-docstrings
"docstring-convention" => match value.trim() {
"google" => pydocstyle.convention = Some(Convention::Google),
"numpy" => pydocstyle.convention = Some(Convention::Numpy),
"pep257" => pydocstyle.convention = Some(Convention::Pep257),
"all" => pydocstyle.convention = None,
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
},
// mccabe
"max-complexity" | "max_complexity" => match value.parse::<usize>() {
Ok(max_complexity) => mccabe.max_complexity = Some(max_complexity),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
},
// flake8-errmsg
"errmsg-max-string-length" | "errmsg_max_string_length" => {
match value.parse::<usize>() {
Ok(max_string_length) => {
flake8_errmsg.max_string_length = Some(max_string_length);
}
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
// flake8-pytest-style
"pytest-fixture-no-parentheses" | "pytest_fixture_no_parentheses " => {
match parser::parse_bool(value.as_ref()) {
Ok(bool) => flake8_pytest_style.fixture_parentheses = Some(!bool),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
"pytest-parametrize-names-type" | "pytest_parametrize_names_type" => {
match value.trim() {
"csv" => {
flake8_pytest_style.parametrize_names_type =
Some(ParametrizeNameType::Csv);
}
"tuple" => {
flake8_pytest_style.parametrize_names_type =
Some(ParametrizeNameType::Tuple);
}
"list" => {
flake8_pytest_style.parametrize_names_type =
Some(ParametrizeNameType::List);
}
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
}
}
"pytest-parametrize-values-type" | "pytest_parametrize_values_type" => {
match value.trim() {
"tuple" => {
flake8_pytest_style.parametrize_values_type =
Some(ParametrizeValuesType::Tuple);
}
"list" => {
flake8_pytest_style.parametrize_values_type =
Some(ParametrizeValuesType::List);
}
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
}
}
"pytest-parametrize-values-row-type" | "pytest_parametrize_values_row_type" => {
match value.trim() {
"tuple" => {
flake8_pytest_style.parametrize_values_row_type =
Some(ParametrizeValuesRowType::Tuple);
}
"list" => {
flake8_pytest_style.parametrize_values_row_type =
Some(ParametrizeValuesRowType::List);
}
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
}
}
"pytest-raises-require-match-for" | "pytest_raises_require_match_for" => {
flake8_pytest_style.raises_require_match_for =
Some(parser::parse_strings(value.as_ref()));
}
"pytest-mark-no-parentheses" | "pytest_mark_no_parentheses" => {
match parser::parse_bool(value.as_ref()) {
Ok(bool) => flake8_pytest_style.mark_parentheses = Some(!bool),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
// Unknown
_ => {
warn_user!("Skipping unsupported property: {}", key);
}
}
}
}
// Deduplicate and sort.
lint_options.select = Some(
select
.into_iter()
.sorted_by_key(RuleSelector::prefix_and_code)
.collect(),
);
lint_options.ignore = Some(
ignore
.into_iter()
.sorted_by_key(RuleSelector::prefix_and_code)
.collect(),
);
if flake8_annotations != Flake8AnnotationsOptions::default() {
lint_options.flake8_annotations = Some(flake8_annotations);
}
if flake8_bugbear != Flake8BugbearOptions::default() {
lint_options.flake8_bugbear = Some(flake8_bugbear);
}
if flake8_builtins != Flake8BuiltinsOptions::default() {
lint_options.flake8_builtins = Some(flake8_builtins);
}
if flake8_errmsg != Flake8ErrMsgOptions::default() {
lint_options.flake8_errmsg = Some(flake8_errmsg);
}
if flake8_pytest_style != Flake8PytestStyleOptions::default() {
lint_options.flake8_pytest_style = Some(flake8_pytest_style);
}
if flake8_quotes != Flake8QuotesOptions::default() {
lint_options.flake8_quotes = Some(flake8_quotes);
}
if flake8_tidy_imports != Flake8TidyImportsOptions::default() {
lint_options.flake8_tidy_imports = Some(flake8_tidy_imports);
}
if mccabe != McCabeOptions::default() {
lint_options.mccabe = Some(mccabe);
}
if pep8_naming != Pep8NamingOptions::default() {
lint_options.pep8_naming = Some(pep8_naming);
}
if pydocstyle != PydocstyleOptions::default() {
lint_options.pydocstyle = Some(pydocstyle);
}
// Extract any settings from the existing `pyproject.toml`.
if let Some(black) = &external_config.black {
if let Some(line_length) = &black.line_length {
options.line_length = Some(*line_length);
}
if let Some(target_version) = &black.target_version {
if let Some(target_version) = target_version.iter().min() {
options.target_version = Some(*target_version);
}
}
}
if let Some(isort) = &external_config.isort {
if let Some(src_paths) = &isort.src_paths {
match options.src.as_mut() {
Some(src) => {
src.extend_from_slice(src_paths);
}
None => {
options.src = Some(src_paths.clone());
}
}
}
}
if let Some(project) = &external_config.project {
if let Some(requires_python) = &project.requires_python {
if options.target_version.is_none() {
options.target_version =
PythonVersion::get_minimum_supported_version(requires_python);
}
}
}
if lint_options != LintCommonOptions::default() {
options.lint = Some(LintOptions {
common: lint_options,
..LintOptions::default()
});
}
// Create the pyproject.toml.
Pyproject::new(options)
}
/// Resolve the set of enabled `RuleSelector` values for the given
/// plugins.
fn resolve_select(plugins: &[Plugin]) -> HashSet<RuleSelector> {
let mut select: HashSet<_> = DEFAULT_SELECTORS.iter().cloned().collect();
select.extend(plugins.iter().map(|p| Linter::from(p).into()));
select
}
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use std::str::FromStr;
use anyhow::Result;
use itertools::Itertools;
use pep440_rs::VersionSpecifiers;
use pretty_assertions::assert_eq;
use ruff_linter::line_width::LineLength;
use ruff_linter::registry::Linter;
use ruff_linter::rule_selector::RuleSelector;
use ruff_linter::rules::flake8_quotes;
use ruff_linter::rules::pydocstyle::settings::Convention;
use ruff_linter::settings::types::PythonVersion;
use ruff_workspace::options::{
Flake8QuotesOptions, LintCommonOptions, LintOptions, Options, PydocstyleOptions,
};
use ruff_workspace::pyproject::Pyproject;
use crate::converter::DEFAULT_SELECTORS;
use crate::pep621::Project;
use crate::ExternalConfig;
use super::super::plugin::Plugin;
use super::convert;
fn lint_default_options(plugins: impl IntoIterator<Item = RuleSelector>) -> LintCommonOptions {
LintCommonOptions {
ignore: Some(vec![]),
select: Some(
DEFAULT_SELECTORS
.iter()
.cloned()
.chain(plugins)
.sorted_by_key(RuleSelector::prefix_and_code)
.collect(),
),
..LintCommonOptions::default()
}
}
#[test]
fn it_converts_empty() {
let actual = convert(
&HashMap::from([("flake8".to_string(), HashMap::default())]),
&ExternalConfig::default(),
None,
);
let expected = Pyproject::new(Options {
lint: Some(LintOptions {
common: lint_default_options([]),
..LintOptions::default()
}),
..Options::default()
});
assert_eq!(actual, expected);
}
#[test]
fn it_converts_dashes() {
let actual = convert(
&HashMap::from([(
"flake8".to_string(),
HashMap::from([("max-line-length".to_string(), Some("100".to_string()))]),
)]),
&ExternalConfig::default(),
Some(vec![]),
);
let expected = Pyproject::new(Options {
line_length: Some(LineLength::try_from(100).unwrap()),
lint: Some(LintOptions {
common: lint_default_options([]),
..LintOptions::default()
}),
..Options::default()
});
assert_eq!(actual, expected);
}
#[test]
fn it_converts_underscores() {
let actual = convert(
&HashMap::from([(
"flake8".to_string(),
HashMap::from([("max_line_length".to_string(), Some("100".to_string()))]),
)]),
&ExternalConfig::default(),
Some(vec![]),
);
let expected = Pyproject::new(Options {
line_length: Some(LineLength::try_from(100).unwrap()),
lint: Some(LintOptions {
common: lint_default_options([]),
..LintOptions::default()
}),
..Options::default()
});
assert_eq!(actual, expected);
}
#[test]
fn it_ignores_parse_errors() {
let actual = convert(
&HashMap::from([(
"flake8".to_string(),
HashMap::from([("max_line_length".to_string(), Some("abc".to_string()))]),
)]),
&ExternalConfig::default(),
Some(vec![]),
);
let expected = Pyproject::new(Options {
lint: Some(LintOptions {
common: lint_default_options([]),
..LintOptions::default()
}),
..Options::default()
});
assert_eq!(actual, expected);
}
#[test]
fn it_converts_plugin_options() {
let actual = convert(
&HashMap::from([(
"flake8".to_string(),
HashMap::from([("inline-quotes".to_string(), Some("single".to_string()))]),
)]),
&ExternalConfig::default(),
Some(vec![]),
);
let expected = Pyproject::new(Options {
lint: Some(LintOptions {
common: LintCommonOptions {
flake8_quotes: Some(Flake8QuotesOptions {
inline_quotes: Some(flake8_quotes::settings::Quote::Single),
multiline_quotes: None,
docstring_quotes: None,
avoid_escape: None,
}),
..lint_default_options([])
},
..LintOptions::default()
}),
..Options::default()
});
assert_eq!(actual, expected);
}
#[test]
fn it_converts_docstring_conventions() {
let actual = convert(
&HashMap::from([(
"flake8".to_string(),
HashMap::from([(
"docstring-convention".to_string(),
Some("numpy".to_string()),
)]),
)]),
&ExternalConfig::default(),
Some(vec![Plugin::Flake8Docstrings]),
);
let expected = Pyproject::new(Options {
lint: Some(LintOptions {
common: LintCommonOptions {
pydocstyle: Some(PydocstyleOptions {
convention: Some(Convention::Numpy),
ignore_decorators: None,
property_decorators: None,
}),
..lint_default_options([Linter::Pydocstyle.into()])
},
..LintOptions::default()
}),
..Options::default()
});
assert_eq!(actual, expected);
}
#[test]
fn it_infers_plugins_if_omitted() {
let actual = convert(
&HashMap::from([(
"flake8".to_string(),
HashMap::from([("inline-quotes".to_string(), Some("single".to_string()))]),
)]),
&ExternalConfig::default(),
None,
);
let expected = Pyproject::new(Options {
lint: Some(LintOptions {
common: LintCommonOptions {
flake8_quotes: Some(Flake8QuotesOptions {
inline_quotes: Some(flake8_quotes::settings::Quote::Single),
multiline_quotes: None,
docstring_quotes: None,
avoid_escape: None,
}),
..lint_default_options([Linter::Flake8Quotes.into()])
},
..LintOptions::default()
}),
..Options::default()
});
assert_eq!(actual, expected);
}
#[test]
fn it_converts_project_requires_python() -> Result<()> {
let actual = convert(
&HashMap::from([("flake8".to_string(), HashMap::default())]),
&ExternalConfig {
project: Some(&Project {
requires_python: Some(VersionSpecifiers::from_str(">=3.8.16, <3.11")?),
}),
..ExternalConfig::default()
},
Some(vec![]),
);
let expected = Pyproject::new(Options {
target_version: Some(PythonVersion::Py38),
lint: Some(LintOptions {
common: lint_default_options([]),
..LintOptions::default()
}),
..Options::default()
});
assert_eq!(actual, expected);
Ok(())
}
}

View File

@@ -0,0 +1,10 @@
use super::black::Black;
use super::isort::Isort;
use super::pep621::Project;
#[derive(Default)]
pub(crate) struct ExternalConfig<'a> {
pub(crate) black: Option<&'a Black>,
pub(crate) isort: Option<&'a Isort>,
pub(crate) project: Option<&'a Project>,
}

View File

@@ -0,0 +1,10 @@
//! Extract isort configuration settings from a pyproject.toml.
use serde::{Deserialize, Serialize};
/// The [isort configuration](https://pycqa.github.io/isort/docs/configuration/config_files.html).
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
pub(crate) struct Isort {
#[serde(alias = "src-paths", alias = "src_paths")]
pub(crate) src_paths: Option<Vec<String>>,
}

View File

@@ -0,0 +1,80 @@
//! Utility to generate Ruff's `pyproject.toml` section from a Flake8 INI file.
mod black;
mod converter;
mod external_config;
mod isort;
mod parser;
mod pep621;
mod plugin;
mod pyproject;
use std::path::PathBuf;
use anyhow::Result;
use clap::Parser;
use configparser::ini::Ini;
use crate::converter::convert;
use crate::external_config::ExternalConfig;
use crate::plugin::Plugin;
use crate::pyproject::parse;
use ruff_linter::logging::{set_up_logging, LogLevel};
#[derive(Parser)]
#[command(
about = "Convert existing Flake8 configuration to Ruff.",
long_about = None
)]
struct Args {
/// Path to the Flake8 configuration file (e.g., `setup.cfg`, `tox.ini`, or
/// `.flake8`).
#[arg(required = true)]
file: PathBuf,
/// Optional path to a `pyproject.toml` file, used to ensure compatibility
/// with Black.
#[arg(long)]
pyproject: Option<PathBuf>,
/// List of plugins to enable.
#[arg(long, value_delimiter = ',')]
plugin: Option<Vec<Plugin>>,
}
fn main() -> Result<()> {
set_up_logging(&LogLevel::Default)?;
let args = Args::parse();
// Read the INI file.
let mut ini = Ini::new_cs();
ini.set_multiline(true);
let config = ini.load(args.file).map_err(|msg| anyhow::anyhow!(msg))?;
// Read the pyproject.toml file.
let pyproject = args.pyproject.map(parse).transpose()?;
let external_config = pyproject
.as_ref()
.and_then(|pyproject| pyproject.tool.as_ref())
.map(|tool| ExternalConfig {
black: tool.black.as_ref(),
isort: tool.isort.as_ref(),
..Default::default()
})
.unwrap_or_default();
let external_config = ExternalConfig {
project: pyproject
.as_ref()
.and_then(|pyproject| pyproject.project.as_ref()),
..external_config
};
// Create Ruff's pyproject.toml section.
let pyproject = convert(&config, &external_config, args.plugin);
#[allow(clippy::print_stdout)]
{
println!("{}", toml::to_string_pretty(&pyproject)?);
}
Ok(())
}

View File

@@ -0,0 +1,391 @@
use std::str::FromStr;
use anyhow::{bail, Result};
use once_cell::sync::Lazy;
use regex::Regex;
use rustc_hash::FxHashMap;
use ruff_linter::settings::types::PatternPrefixPair;
use ruff_linter::{warn_user, RuleSelector};
static COMMA_SEPARATED_LIST_RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"[,\s]").unwrap());
/// Parse a comma-separated list of `RuleSelector` values (e.g.,
/// "F401,E501").
pub(crate) fn parse_prefix_codes(value: &str) -> Vec<RuleSelector> {
let mut codes: Vec<RuleSelector> = vec![];
for code in COMMA_SEPARATED_LIST_RE.split(value) {
let code = code.trim();
if code.is_empty() {
continue;
}
if let Ok(code) = RuleSelector::from_str(code) {
codes.push(code);
} else {
warn_user!("Unsupported prefix code: {code}");
}
}
codes
}
/// Parse a comma-separated list of strings (e.g., "__init__.py,__main__.py").
pub(crate) fn parse_strings(value: &str) -> Vec<String> {
COMMA_SEPARATED_LIST_RE
.split(value)
.map(str::trim)
.filter(|part| !part.is_empty())
.map(String::from)
.collect()
}
/// Parse a boolean.
pub(crate) fn parse_bool(value: &str) -> Result<bool> {
match value.trim() {
"true" => Ok(true),
"false" => Ok(false),
_ => bail!("Unexpected boolean value: {value}"),
}
}
#[derive(Debug)]
struct Token {
token_name: TokenType,
src: String,
}
#[derive(Debug, Copy, Clone)]
enum TokenType {
Code,
File,
Colon,
Comma,
Ws,
Eof,
}
struct State {
seen_sep: bool,
seen_colon: bool,
filenames: Vec<String>,
codes: Vec<String>,
}
impl State {
const fn new() -> Self {
Self {
seen_sep: true,
seen_colon: false,
filenames: vec![],
codes: vec![],
}
}
/// Generate the list of `StrRuleCodePair` pairs for the current
/// state.
fn parse(&self) -> Vec<PatternPrefixPair> {
let mut codes: Vec<PatternPrefixPair> = vec![];
for code in &self.codes {
if let Ok(code) = RuleSelector::from_str(code) {
for filename in &self.filenames {
codes.push(PatternPrefixPair {
pattern: filename.clone(),
prefix: code.clone(),
});
}
} else {
warn_user!("Unsupported prefix code: {code}");
}
}
codes
}
}
/// Tokenize the raw 'files-to-codes' mapping.
fn tokenize_files_to_codes_mapping(value: &str) -> Vec<Token> {
let mut tokens = vec![];
let mut i = 0;
while i < value.len() {
for (token_re, token_name) in [
(
Regex::new(r"([A-Z]+[0-9]*)(?:$|\s|,)").unwrap(),
TokenType::Code,
),
(Regex::new(r"([^\s:,]+)").unwrap(), TokenType::File),
(Regex::new(r"(\s*:\s*)").unwrap(), TokenType::Colon),
(Regex::new(r"(\s*,\s*)").unwrap(), TokenType::Comma),
(Regex::new(r"(\s+)").unwrap(), TokenType::Ws),
] {
if let Some(cap) = token_re.captures(&value[i..]) {
let mat = cap.get(1).unwrap();
if mat.start() == 0 {
tokens.push(Token {
token_name,
src: mat.as_str().trim().to_string(),
});
i += mat.end();
break;
}
}
}
}
tokens.push(Token {
token_name: TokenType::Eof,
src: String::new(),
});
tokens
}
/// Parse a 'files-to-codes' mapping, mimicking Flake8's internal logic.
/// See: <https://github.com/PyCQA/flake8/blob/7dfe99616fc2f07c0017df2ba5fa884158f3ea8a/src/flake8/utils.py#L45>
pub(crate) fn parse_files_to_codes_mapping(value: &str) -> Result<Vec<PatternPrefixPair>> {
if value.trim().is_empty() {
return Ok(vec![]);
}
let mut codes: Vec<PatternPrefixPair> = vec![];
let mut state = State::new();
for token in tokenize_files_to_codes_mapping(value) {
if matches!(token.token_name, TokenType::Comma | TokenType::Ws) {
state.seen_sep = true;
} else if !state.seen_colon {
if matches!(token.token_name, TokenType::Colon) {
state.seen_colon = true;
state.seen_sep = true;
} else if state.seen_sep && matches!(token.token_name, TokenType::File) {
state.filenames.push(token.src);
state.seen_sep = false;
} else {
bail!("Unexpected token: {:?}", token.token_name);
}
} else {
if matches!(token.token_name, TokenType::Eof) {
codes.extend(state.parse());
state = State::new();
} else if state.seen_sep && matches!(token.token_name, TokenType::Code) {
state.codes.push(token.src);
state.seen_sep = false;
} else if state.seen_sep && matches!(token.token_name, TokenType::File) {
codes.extend(state.parse());
state = State::new();
state.filenames.push(token.src);
state.seen_sep = false;
} else {
bail!("Unexpected token: {:?}", token.token_name);
}
}
}
Ok(codes)
}
/// Collect a list of `PatternPrefixPair` structs as a `BTreeMap`.
pub(crate) fn collect_per_file_ignores(
pairs: Vec<PatternPrefixPair>,
) -> FxHashMap<String, Vec<RuleSelector>> {
let mut per_file_ignores: FxHashMap<String, Vec<RuleSelector>> = FxHashMap::default();
for pair in pairs {
per_file_ignores
.entry(pair.pattern)
.or_default()
.push(pair.prefix);
}
per_file_ignores
}
#[cfg(test)]
mod tests {
use anyhow::Result;
use ruff_linter::codes;
use ruff_linter::registry::Linter;
use ruff_linter::settings::types::PatternPrefixPair;
use ruff_linter::RuleSelector;
use super::{parse_files_to_codes_mapping, parse_prefix_codes, parse_strings};
#[test]
fn it_parses_prefix_codes() {
let actual = parse_prefix_codes("");
let expected: Vec<RuleSelector> = vec![];
assert_eq!(actual, expected);
let actual = parse_prefix_codes(" ");
let expected: Vec<RuleSelector> = vec![];
assert_eq!(actual, expected);
let actual = parse_prefix_codes("F401");
let expected = vec![codes::Pyflakes::_401.into()];
assert_eq!(actual, expected);
let actual = parse_prefix_codes("F401,");
let expected = vec![codes::Pyflakes::_401.into()];
assert_eq!(actual, expected);
let actual = parse_prefix_codes("F401,E501");
let expected = vec![
codes::Pyflakes::_401.into(),
codes::Pycodestyle::E501.into(),
];
assert_eq!(actual, expected);
let actual = parse_prefix_codes("F401, E501");
let expected = vec![
codes::Pyflakes::_401.into(),
codes::Pycodestyle::E501.into(),
];
assert_eq!(actual, expected);
}
#[test]
fn it_parses_strings() {
let actual = parse_strings("");
let expected: Vec<String> = vec![];
assert_eq!(actual, expected);
let actual = parse_strings(" ");
let expected: Vec<String> = vec![];
assert_eq!(actual, expected);
let actual = parse_strings("__init__.py");
let expected = vec!["__init__.py".to_string()];
assert_eq!(actual, expected);
let actual = parse_strings("__init__.py,");
let expected = vec!["__init__.py".to_string()];
assert_eq!(actual, expected);
let actual = parse_strings("__init__.py,__main__.py");
let expected = vec!["__init__.py".to_string(), "__main__.py".to_string()];
assert_eq!(actual, expected);
let actual = parse_strings("__init__.py, __main__.py");
let expected = vec!["__init__.py".to_string(), "__main__.py".to_string()];
assert_eq!(actual, expected);
}
#[test]
fn it_parse_files_to_codes_mapping() -> Result<()> {
let actual = parse_files_to_codes_mapping("")?;
let expected: Vec<PatternPrefixPair> = vec![];
assert_eq!(actual, expected);
let actual = parse_files_to_codes_mapping(" ")?;
let expected: Vec<PatternPrefixPair> = vec![];
assert_eq!(actual, expected);
// Ex) locust
let actual = parse_files_to_codes_mapping(
"per-file-ignores =
locust/test/*: F841
examples/*: F841
*.pyi: E302,E704"
.strip_prefix("per-file-ignores =")
.unwrap(),
)?;
let expected: Vec<PatternPrefixPair> = vec![
PatternPrefixPair {
pattern: "locust/test/*".to_string(),
prefix: codes::Pyflakes::_841.into(),
},
PatternPrefixPair {
pattern: "examples/*".to_string(),
prefix: codes::Pyflakes::_841.into(),
},
];
assert_eq!(actual, expected);
// Ex) celery
let actual = parse_files_to_codes_mapping(
"per-file-ignores =
t/*,setup.py,examples/*,docs/*,extra/*:
D,"
.strip_prefix("per-file-ignores =")
.unwrap(),
)?;
let expected: Vec<PatternPrefixPair> = vec![
PatternPrefixPair {
pattern: "t/*".to_string(),
prefix: Linter::Pydocstyle.into(),
},
PatternPrefixPair {
pattern: "setup.py".to_string(),
prefix: Linter::Pydocstyle.into(),
},
PatternPrefixPair {
pattern: "examples/*".to_string(),
prefix: Linter::Pydocstyle.into(),
},
PatternPrefixPair {
pattern: "docs/*".to_string(),
prefix: Linter::Pydocstyle.into(),
},
PatternPrefixPair {
pattern: "extra/*".to_string(),
prefix: Linter::Pydocstyle.into(),
},
];
assert_eq!(actual, expected);
// Ex) scrapy
let actual = parse_files_to_codes_mapping(
"per-file-ignores =
scrapy/__init__.py:E402
scrapy/core/downloader/handlers/http.py:F401
scrapy/http/__init__.py:F401
scrapy/linkextractors/__init__.py:E402,F401
scrapy/selector/__init__.py:F401
scrapy/spiders/__init__.py:E402,F401
scrapy/utils/url.py:F403,F405
tests/test_loader.py:E741"
.strip_prefix("per-file-ignores =")
.unwrap(),
)?;
let expected: Vec<PatternPrefixPair> = vec![
PatternPrefixPair {
pattern: "scrapy/__init__.py".to_string(),
prefix: codes::Pycodestyle::E402.into(),
},
PatternPrefixPair {
pattern: "scrapy/core/downloader/handlers/http.py".to_string(),
prefix: codes::Pyflakes::_401.into(),
},
PatternPrefixPair {
pattern: "scrapy/http/__init__.py".to_string(),
prefix: codes::Pyflakes::_401.into(),
},
PatternPrefixPair {
pattern: "scrapy/linkextractors/__init__.py".to_string(),
prefix: codes::Pycodestyle::E402.into(),
},
PatternPrefixPair {
pattern: "scrapy/linkextractors/__init__.py".to_string(),
prefix: codes::Pyflakes::_401.into(),
},
PatternPrefixPair {
pattern: "scrapy/selector/__init__.py".to_string(),
prefix: codes::Pyflakes::_401.into(),
},
PatternPrefixPair {
pattern: "scrapy/spiders/__init__.py".to_string(),
prefix: codes::Pycodestyle::E402.into(),
},
PatternPrefixPair {
pattern: "scrapy/spiders/__init__.py".to_string(),
prefix: codes::Pyflakes::_401.into(),
},
PatternPrefixPair {
pattern: "scrapy/utils/url.py".to_string(),
prefix: codes::Pyflakes::_403.into(),
},
PatternPrefixPair {
pattern: "scrapy/utils/url.py".to_string(),
prefix: codes::Pyflakes::_405.into(),
},
PatternPrefixPair {
pattern: "tests/test_loader.py".to_string(),
prefix: codes::Pycodestyle::E741.into(),
},
];
assert_eq!(actual, expected);
Ok(())
}
}

View File

@@ -0,0 +1,10 @@
//! Extract PEP 621 configuration settings from a pyproject.toml.
use pep440_rs::VersionSpecifiers;
use serde::{Deserialize, Serialize};
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
pub(crate) struct Project {
#[serde(alias = "requires-python", alias = "requires_python")]
pub(crate) requires_python: Option<VersionSpecifiers>,
}

View File

@@ -0,0 +1,368 @@
use std::collections::{BTreeSet, HashMap, HashSet};
use std::fmt;
use std::str::FromStr;
use anyhow::anyhow;
use ruff_linter::registry::Linter;
use ruff_linter::rule_selector::PreviewOptions;
use ruff_linter::RuleSelector;
#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq)]
pub enum Plugin {
Flake82020,
Flake8Annotations,
Flake8Bandit,
Flake8BlindExcept,
Flake8BooleanTrap,
Flake8Bugbear,
Flake8Builtins,
Flake8Commas,
Flake8Comprehensions,
Flake8Datetimez,
Flake8Debugger,
Flake8Docstrings,
Flake8Eradicate,
Flake8ErrMsg,
Flake8Executable,
Flake8ImplicitStrConcat,
Flake8ImportConventions,
Flake8NoPep420,
Flake8Pie,
Flake8Print,
Flake8PytestStyle,
Flake8Quotes,
Flake8Return,
Flake8Simplify,
Flake8TidyImports,
Flake8TypeChecking,
Flake8UnusedArguments,
Flake8UsePathlib,
McCabe,
PEP8Naming,
PandasVet,
Pyupgrade,
Tryceratops,
}
impl FromStr for Plugin {
type Err = anyhow::Error;
fn from_str(string: &str) -> Result<Self, Self::Err> {
match string {
"flake8-2020" => Ok(Plugin::Flake82020),
"flake8-annotations" => Ok(Plugin::Flake8Annotations),
"flake8-bandit" => Ok(Plugin::Flake8Bandit),
"flake8-blind-except" => Ok(Plugin::Flake8BlindExcept),
"flake8-boolean-trap" => Ok(Plugin::Flake8BooleanTrap),
"flake8-bugbear" => Ok(Plugin::Flake8Bugbear),
"flake8-builtins" => Ok(Plugin::Flake8Builtins),
"flake8-commas" => Ok(Plugin::Flake8Commas),
"flake8-comprehensions" => Ok(Plugin::Flake8Comprehensions),
"flake8-datetimez" => Ok(Plugin::Flake8Datetimez),
"flake8-debugger" => Ok(Plugin::Flake8Debugger),
"flake8-docstrings" => Ok(Plugin::Flake8Docstrings),
"flake8-eradicate" => Ok(Plugin::Flake8Eradicate),
"flake8-errmsg" => Ok(Plugin::Flake8ErrMsg),
"flake8-executable" => Ok(Plugin::Flake8Executable),
"flake8-implicit-str-concat" => Ok(Plugin::Flake8ImplicitStrConcat),
"flake8-import-conventions" => Ok(Plugin::Flake8ImportConventions),
"flake8-no-pep420" => Ok(Plugin::Flake8NoPep420),
"flake8-pie" => Ok(Plugin::Flake8Pie),
"flake8-print" => Ok(Plugin::Flake8Print),
"flake8-pytest-style" => Ok(Plugin::Flake8PytestStyle),
"flake8-quotes" => Ok(Plugin::Flake8Quotes),
"flake8-return" => Ok(Plugin::Flake8Return),
"flake8-simplify" => Ok(Plugin::Flake8Simplify),
"flake8-tidy-imports" => Ok(Plugin::Flake8TidyImports),
"flake8-type-checking" => Ok(Plugin::Flake8TypeChecking),
"flake8-unused-arguments" => Ok(Plugin::Flake8UnusedArguments),
"flake8-use-pathlib" => Ok(Plugin::Flake8UsePathlib),
"mccabe" => Ok(Plugin::McCabe),
"pep8-naming" => Ok(Plugin::PEP8Naming),
"pandas-vet" => Ok(Plugin::PandasVet),
"pyupgrade" => Ok(Plugin::Pyupgrade),
"tryceratops" => Ok(Plugin::Tryceratops),
_ => Err(anyhow!("Unknown plugin: {string}")),
}
}
}
impl fmt::Debug for Plugin {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"{}",
match self {
Plugin::Flake82020 => "flake8-2020",
Plugin::Flake8Annotations => "flake8-annotations",
Plugin::Flake8Bandit => "flake8-bandit",
Plugin::Flake8BlindExcept => "flake8-blind-except",
Plugin::Flake8BooleanTrap => "flake8-boolean-trap",
Plugin::Flake8Bugbear => "flake8-bugbear",
Plugin::Flake8Builtins => "flake8-builtins",
Plugin::Flake8Commas => "flake8-commas",
Plugin::Flake8Comprehensions => "flake8-comprehensions",
Plugin::Flake8Datetimez => "flake8-datetimez",
Plugin::Flake8Debugger => "flake8-debugger",
Plugin::Flake8Docstrings => "flake8-docstrings",
Plugin::Flake8Eradicate => "flake8-eradicate",
Plugin::Flake8ErrMsg => "flake8-errmsg",
Plugin::Flake8Executable => "flake8-executable",
Plugin::Flake8ImplicitStrConcat => "flake8-implicit-str-concat",
Plugin::Flake8ImportConventions => "flake8-import-conventions",
Plugin::Flake8NoPep420 => "flake8-no-pep420",
Plugin::Flake8Pie => "flake8-pie",
Plugin::Flake8Print => "flake8-print",
Plugin::Flake8PytestStyle => "flake8-pytest-style",
Plugin::Flake8Quotes => "flake8-quotes",
Plugin::Flake8Return => "flake8-return",
Plugin::Flake8Simplify => "flake8-simplify",
Plugin::Flake8TidyImports => "flake8-tidy-imports",
Plugin::Flake8TypeChecking => "flake8-type-checking",
Plugin::Flake8UnusedArguments => "flake8-unused-arguments",
Plugin::Flake8UsePathlib => "flake8-use-pathlib",
Plugin::McCabe => "mccabe",
Plugin::PEP8Naming => "pep8-naming",
Plugin::PandasVet => "pandas-vet",
Plugin::Pyupgrade => "pyupgrade",
Plugin::Tryceratops => "tryceratops",
}
)
}
}
impl From<&Plugin> for Linter {
fn from(plugin: &Plugin) -> Self {
match plugin {
Plugin::Flake82020 => Linter::Flake82020,
Plugin::Flake8Annotations => Linter::Flake8Annotations,
Plugin::Flake8Bandit => Linter::Flake8Bandit,
Plugin::Flake8BlindExcept => Linter::Flake8BlindExcept,
Plugin::Flake8BooleanTrap => Linter::Flake8BooleanTrap,
Plugin::Flake8Bugbear => Linter::Flake8Bugbear,
Plugin::Flake8Builtins => Linter::Flake8Builtins,
Plugin::Flake8Commas => Linter::Flake8Commas,
Plugin::Flake8Comprehensions => Linter::Flake8Comprehensions,
Plugin::Flake8Datetimez => Linter::Flake8Datetimez,
Plugin::Flake8Debugger => Linter::Flake8Debugger,
Plugin::Flake8Docstrings => Linter::Pydocstyle,
Plugin::Flake8Eradicate => Linter::Eradicate,
Plugin::Flake8ErrMsg => Linter::Flake8ErrMsg,
Plugin::Flake8Executable => Linter::Flake8Executable,
Plugin::Flake8ImplicitStrConcat => Linter::Flake8ImplicitStrConcat,
Plugin::Flake8ImportConventions => Linter::Flake8ImportConventions,
Plugin::Flake8NoPep420 => Linter::Flake8NoPep420,
Plugin::Flake8Pie => Linter::Flake8Pie,
Plugin::Flake8Print => Linter::Flake8Print,
Plugin::Flake8PytestStyle => Linter::Flake8PytestStyle,
Plugin::Flake8Quotes => Linter::Flake8Quotes,
Plugin::Flake8Return => Linter::Flake8Return,
Plugin::Flake8Simplify => Linter::Flake8Simplify,
Plugin::Flake8TidyImports => Linter::Flake8TidyImports,
Plugin::Flake8TypeChecking => Linter::Flake8TypeChecking,
Plugin::Flake8UnusedArguments => Linter::Flake8UnusedArguments,
Plugin::Flake8UsePathlib => Linter::Flake8UsePathlib,
Plugin::McCabe => Linter::McCabe,
Plugin::PEP8Naming => Linter::PEP8Naming,
Plugin::PandasVet => Linter::PandasVet,
Plugin::Pyupgrade => Linter::Pyupgrade,
Plugin::Tryceratops => Linter::Tryceratops,
}
}
}
/// Infer the enabled plugins based on user-provided options.
///
/// For example, if the user specified a `mypy-init-return` setting, we should
/// infer that `flake8-annotations` is active.
pub(crate) fn infer_plugins_from_options(flake8: &HashMap<String, Option<String>>) -> Vec<Plugin> {
let mut plugins = BTreeSet::new();
for key in flake8.keys() {
match key.as_str() {
// flake8-annotations
"suppress-none-returning" | "suppress_none_returning" => {
plugins.insert(Plugin::Flake8Annotations);
}
"suppress-dummy-args" | "suppress_dummy_args" => {
plugins.insert(Plugin::Flake8Annotations);
}
"allow-untyped-defs" | "allow_untyped_defs" => {
plugins.insert(Plugin::Flake8Annotations);
}
"allow-untyped-nested" | "allow_untyped_nested" => {
plugins.insert(Plugin::Flake8Annotations);
}
"mypy-init-return" | "mypy_init_return" => {
plugins.insert(Plugin::Flake8Annotations);
}
"dispatch-decorators" | "dispatch_decorators" => {
plugins.insert(Plugin::Flake8Annotations);
}
"overload-decorators" | "overload_decorators" => {
plugins.insert(Plugin::Flake8Annotations);
}
"allow-star-arg-any" | "allow_star_arg_any" => {
plugins.insert(Plugin::Flake8Annotations);
}
// flake8-bugbear
"extend-immutable-calls" | "extend_immutable_calls" => {
plugins.insert(Plugin::Flake8Bugbear);
}
// flake8-builtins
"builtins-ignorelist" | "builtins_ignorelist" => {
plugins.insert(Plugin::Flake8Builtins);
}
// flake8-docstrings
"docstring-convention" | "docstring_convention" => {
plugins.insert(Plugin::Flake8Docstrings);
}
// flake8-eradicate
"eradicate-aggressive" | "eradicate_aggressive" => {
plugins.insert(Plugin::Flake8Eradicate);
}
"eradicate-whitelist" | "eradicate_whitelist" => {
plugins.insert(Plugin::Flake8Eradicate);
}
"eradicate-whitelist-extend" | "eradicate_whitelist_extend" => {
plugins.insert(Plugin::Flake8Eradicate);
}
// flake8-pytest-style
"pytest-fixture-no-parentheses" | "pytest_fixture_no_parentheses " => {
plugins.insert(Plugin::Flake8PytestStyle);
}
"pytest-parametrize-names-type" | "pytest_parametrize_names_type" => {
plugins.insert(Plugin::Flake8PytestStyle);
}
"pytest-parametrize-values-type" | "pytest_parametrize_values_type" => {
plugins.insert(Plugin::Flake8PytestStyle);
}
"pytest-parametrize-values-row-type" | "pytest_parametrize_values_row_type" => {
plugins.insert(Plugin::Flake8PytestStyle);
}
"pytest-raises-require-match-for" | "pytest_raises_require_match_for" => {
plugins.insert(Plugin::Flake8PytestStyle);
}
"pytest-mark-no-parentheses" | "pytest_mark_no_parentheses" => {
plugins.insert(Plugin::Flake8PytestStyle);
}
// flake8-quotes
"quotes" | "inline-quotes" | "inline_quotes" => {
plugins.insert(Plugin::Flake8Quotes);
}
"multiline-quotes" | "multiline_quotes" => {
plugins.insert(Plugin::Flake8Quotes);
}
"docstring-quotes" | "docstring_quotes" => {
plugins.insert(Plugin::Flake8Quotes);
}
"avoid-escape" | "avoid_escape" => {
plugins.insert(Plugin::Flake8Quotes);
}
// flake8-tidy-imports
"ban-relative-imports" | "ban_relative_imports" => {
plugins.insert(Plugin::Flake8TidyImports);
}
"banned-modules" | "banned_modules" => {
plugins.insert(Plugin::Flake8TidyImports);
}
// mccabe
"max-complexity" | "max_complexity" => {
plugins.insert(Plugin::McCabe);
}
// pep8-naming
"ignore-names" | "ignore_names" => {
plugins.insert(Plugin::PEP8Naming);
}
"classmethod-decorators" | "classmethod_decorators" => {
plugins.insert(Plugin::PEP8Naming);
}
"staticmethod-decorators" | "staticmethod_decorators" => {
plugins.insert(Plugin::PEP8Naming);
}
"max-string-length" | "max_string_length" => {
plugins.insert(Plugin::Flake8ErrMsg);
}
_ => {}
}
}
Vec::from_iter(plugins)
}
/// Infer the enabled plugins based on the referenced prefixes.
///
/// For example, if the user ignores `ANN101`, we should infer that
/// `flake8-annotations` is active.
pub(crate) fn infer_plugins_from_codes(selectors: &HashSet<RuleSelector>) -> Vec<Plugin> {
// Ignore cases in which we've knowingly changed rule prefixes.
[
Plugin::Flake82020,
Plugin::Flake8Annotations,
Plugin::Flake8Bandit,
// Plugin::Flake8BlindExcept,
Plugin::Flake8BooleanTrap,
Plugin::Flake8Bugbear,
Plugin::Flake8Builtins,
// Plugin::Flake8Commas,
Plugin::Flake8Comprehensions,
Plugin::Flake8Datetimez,
Plugin::Flake8Debugger,
Plugin::Flake8Docstrings,
// Plugin::Flake8Eradicate,
Plugin::Flake8ErrMsg,
Plugin::Flake8Executable,
Plugin::Flake8ImplicitStrConcat,
// Plugin::Flake8ImportConventions,
Plugin::Flake8NoPep420,
Plugin::Flake8Pie,
Plugin::Flake8Print,
Plugin::Flake8PytestStyle,
Plugin::Flake8Quotes,
Plugin::Flake8Return,
Plugin::Flake8Simplify,
// Plugin::Flake8TidyImports,
// Plugin::Flake8TypeChecking,
Plugin::Flake8UnusedArguments,
// Plugin::Flake8UsePathlib,
Plugin::McCabe,
Plugin::PEP8Naming,
Plugin::PandasVet,
Plugin::Tryceratops,
]
.into_iter()
.filter(|plugin| {
for selector in selectors {
if selector
.rules(&PreviewOptions::default())
.any(|rule| Linter::from(plugin).rules().any(|r| r == rule))
{
return true;
}
}
false
})
.collect()
}
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use super::{infer_plugins_from_options, Plugin};
#[test]
fn it_infers_plugins() {
let actual = infer_plugins_from_options(&HashMap::from([(
"inline-quotes".to_string(),
Some("single".to_string()),
)]));
let expected = vec![Plugin::Flake8Quotes];
assert_eq!(actual, expected);
let actual = infer_plugins_from_options(&HashMap::from([(
"staticmethod-decorators".to_string(),
Some("[]".to_string()),
)]));
let expected = vec![Plugin::PEP8Naming];
assert_eq!(actual, expected);
}
}

View File

@@ -0,0 +1,26 @@
use std::path::Path;
use anyhow::Result;
use serde::{Deserialize, Serialize};
use super::black::Black;
use super::isort::Isort;
use super::pep621::Project;
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
pub(crate) struct Tools {
pub(crate) black: Option<Black>,
pub(crate) isort: Option<Isort>,
}
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
pub(crate) struct Pyproject {
pub(crate) tool: Option<Tools>,
pub(crate) project: Option<Project>,
}
pub(crate) fn parse<P: AsRef<Path>>(path: P) -> Result<Pyproject> {
let contents = std::fs::read_to_string(path)?;
let pyproject = toml::from_str::<Pyproject>(&contents)?;
Ok(pyproject)
}

View File

@@ -31,17 +31,17 @@ name = "formatter"
harness = false
[dependencies]
once_cell = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
url = { workspace = true }
ureq = { workspace = true }
criterion = { workspace = true, default-features = false }
codspeed-criterion-compat = { workspace = true, default-features = false, optional = true}
once_cell.workspace = true
serde.workspace = true
serde_json.workspace = true
url = "2.5.0"
ureq = "2.9.1"
criterion = { version = "0.5.1", default-features = false }
codspeed-criterion-compat = { version="2.3.3", default-features = false, optional = true}
[dev-dependencies]
ruff_linter = { path = "../ruff_linter" }
ruff_python_ast = { path = "../ruff_python_ast" }
ruff_linter.path = "../ruff_linter"
ruff_python_ast.path = "../ruff_python_ast"
ruff_python_formatter = { path = "../ruff_python_formatter" }
ruff_python_index = { path = "../ruff_python_index" }
ruff_python_parser = { path = "../ruff_python_parser" }
@@ -53,7 +53,7 @@ workspace = true
codspeed = ["codspeed-criterion-compat"]
[target.'cfg(target_os = "windows")'.dev-dependencies]
mimalloc = { workspace = true }
mimalloc = "0.1.39"
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64")))'.dev-dependencies]
tikv-jemallocator = { workspace = true }
tikv-jemallocator = "0.5.0"

View File

@@ -65,7 +65,7 @@ fn benchmark_formatter(criterion: &mut Criterion) {
let comment_ranges = comment_ranges.finish();
// Parse the AST.
let module = parse_tokens(tokens, case.code(), Mode::Module)
let module = parse_tokens(tokens, case.code(), Mode::Module, "<filename>")
.expect("Input to be a valid python program");
b.iter(|| {

View File

@@ -2,7 +2,7 @@ use ruff_benchmark::criterion::{
criterion_group, criterion_main, BenchmarkGroup, BenchmarkId, Criterion, Throughput,
};
use ruff_benchmark::{TestCase, TestFile, TestFileDownloadError};
use ruff_linter::linter::{lint_only, ParseSource};
use ruff_linter::linter::lint_only;
use ruff_linter::rule_selector::PreviewOptions;
use ruff_linter::settings::rule_table::RuleTable;
use ruff_linter::settings::types::PreviewMode;
@@ -10,7 +10,6 @@ use ruff_linter::settings::{flags, LinterSettings};
use ruff_linter::source_kind::SourceKind;
use ruff_linter::{registry::Rule, RuleSelector};
use ruff_python_ast::PySourceType;
use ruff_python_parser::{lexer, parse_program_tokens, Mode};
#[cfg(target_os = "windows")]
#[global_allocator]
@@ -54,12 +53,7 @@ fn benchmark_linter(mut group: BenchmarkGroup, settings: &LinterSettings) {
BenchmarkId::from_parameter(case.name()),
&case,
|b, case| {
// Tokenize the source.
let tokens = lexer::lex(case.code(), Mode::Module).collect::<Vec<_>>();
// Parse the source.
let ast = parse_program_tokens(tokens.clone(), case.code(), false).unwrap();
let kind = SourceKind::Python(case.code().to_string());
b.iter(|| {
let path = case.path();
let result = lint_only(
@@ -67,12 +61,8 @@ fn benchmark_linter(mut group: BenchmarkGroup, settings: &LinterSettings) {
None,
settings,
flags::Noqa::Enabled,
&SourceKind::Python(case.code().to_string()),
&kind,
PySourceType::from(path.as_path()),
ParseSource::Precomputed {
tokens: &tokens,
ast: &ast,
},
);
// Assert that file contains no parse errors

View File

@@ -60,7 +60,7 @@ fn benchmark_parser(criterion: &mut Criterion<WallTime>) {
&case,
|b, case| {
b.iter(|| {
let parsed = parse_suite(case.code()).unwrap();
let parsed = parse_suite(case.code(), case.name()).unwrap();
let mut visitor = CountVisitor { count: 0 };
visitor.visit_body(&parsed);

View File

@@ -16,7 +16,7 @@ glob = { workspace = true }
globset = { workspace = true }
regex = { workspace = true }
filetime = { workspace = true }
seahash = { workspace = true }
seahash = "4.1.0"
[dev-dependencies]
ruff_macros = { path = "../ruff_macros" }

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_cli"
version = "0.1.11"
version = "0.1.7"
publish = false
authors = { workspace = true }
edition = { workspace = true }
@@ -15,60 +15,67 @@ readme = "../../README.md"
name = "ruff"
[dependencies]
ruff_linter = { path = "../ruff_linter", features = ["clap"] }
ruff_cache = { path = "../ruff_cache" }
ruff_diagnostics = { path = "../ruff_diagnostics" }
ruff_linter = { path = "../ruff_linter", features = ["clap"] }
ruff_macros = { path = "../ruff_macros" }
ruff_formatter = { path = "../ruff_formatter" }
ruff_notebook = { path = "../ruff_notebook" }
ruff_macros = { path = "../ruff_macros" }
ruff_python_ast = { path = "../ruff_python_ast" }
ruff_python_formatter = { path = "../ruff_python_formatter" }
ruff_source_file = { path = "../ruff_source_file" }
ruff_text_size = { path = "../ruff_text_size" }
ruff_python_trivia = { path = "../ruff_python_trivia" }
ruff_workspace = { path = "../ruff_workspace" }
ruff_text_size = { path = "../ruff_text_size" }
annotate-snippets = { version = "0.9.2", features = ["color"] }
anyhow = { workspace = true }
argfile = { workspace = true }
bincode = { workspace = true }
argfile = { version = "0.1.6" }
bincode = { version = "1.3.3" }
bitflags = { workspace = true }
cachedir = { workspace = true }
cachedir = { version = "0.3.0" }
chrono = { workspace = true }
clap = { workspace = true, features = ["derive", "env"] }
clap_complete_command = { workspace = true }
clearscreen = { workspace = true }
clap_complete_command = { version = "0.5.1" }
clearscreen = { version = "2.0.0" }
colored = { workspace = true }
filetime = { workspace = true }
glob = { workspace = true }
ignore = { workspace = true }
is-macro = { workspace = true }
itertools = { workspace = true }
log = { workspace = true }
notify = { workspace = true }
notify = { version = "6.1.1" }
path-absolutize = { workspace = true, features = ["once_cell_cache"] }
rayon = { workspace = true }
rayon = { version = "1.8.0" }
regex = { workspace = true }
ruff_python_stdlib = { path = "../ruff_python_stdlib" }
rustc-hash = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
shellexpand = { workspace = true }
similar = { workspace = true }
strum = { workspace = true, features = [] }
thiserror = { workspace = true }
tracing = { workspace = true, features = ["log"] }
walkdir = { workspace = true }
wild = { workspace = true }
walkdir = { version = "2.3.2" }
wild = { version = "2" }
[dev-dependencies]
assert_cmd = { workspace = true }
assert_cmd = { version = "2.0.8" }
# Avoid writing colored snapshots when running tests from the terminal
colored = { workspace = true, features = ["no-color"]}
insta = { workspace = true, features = ["filters", "json"] }
insta-cmd = { workspace = true }
tempfile = { workspace = true }
insta-cmd = { version = "0.4.0" }
tempfile = "3.8.1"
test-case = { workspace = true }
ureq = { version = "2.9.1", features = [] }
[target.'cfg(target_os = "windows")'.dependencies]
mimalloc = { workspace = true }
mimalloc = "0.1.39"
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64")))'.dependencies]
tikv-jemallocator = { workspace = true }
tikv-jemallocator = "0.5.0"
[lints]
workspace = true

View File

@@ -17,7 +17,7 @@ use tracing::debug;
use ruff_diagnostics::SourceMap;
use ruff_linter::fs;
use ruff_linter::logging::{DisplayParseError, LogLevel};
use ruff_linter::logging::LogLevel;
use ruff_linter::registry::Rule;
use ruff_linter::rules::flake8_quotes::settings::Quote;
use ruff_linter::source_kind::{SourceError, SourceKind};
@@ -244,7 +244,7 @@ pub(crate) fn format_path(
// Extract the sources from the file.
let unformatted = match SourceKind::from_path(path, source_type) {
Ok(Some(source_kind)) => source_kind,
// Non-Python Jupyter notebook.
// Non Python Jupyter notebook
Ok(None) => return Ok(FormatResult::Skipped),
Err(err) => {
return Err(FormatCommandError::Read(Some(path.to_path_buf()), err));
@@ -321,22 +321,12 @@ pub(crate) fn format_source(
path: Option<&Path>,
settings: &FormatterSettings,
) -> Result<FormattedSource, FormatCommandError> {
match &source_kind {
match source_kind {
SourceKind::Python(unformatted) => {
let options = settings.to_format_options(source_type, unformatted);
let formatted = format_module_source(unformatted, options).map_err(|err| {
if let FormatModuleError::ParseError(err) = err {
DisplayParseError::from_source_kind(
err,
path.map(Path::to_path_buf),
source_kind,
)
.into()
} else {
FormatCommandError::Format(path.map(Path::to_path_buf), err)
}
})?;
let formatted = format_module_source(unformatted, options)
.map_err(|err| FormatCommandError::Format(path.map(Path::to_path_buf), err))?;
let formatted = formatted.into_code();
if formatted.len() == unformatted.len() && formatted == *unformatted {
@@ -362,19 +352,8 @@ pub(crate) fn format_source(
let unformatted = &notebook.source_code()[range];
// Format the cell.
let formatted =
format_module_source(unformatted, options.clone()).map_err(|err| {
if let FormatModuleError::ParseError(err) = err {
DisplayParseError::from_source_kind(
err,
path.map(Path::to_path_buf),
source_kind,
)
.into()
} else {
FormatCommandError::Format(path.map(Path::to_path_buf), err)
}
})?;
let formatted = format_module_source(unformatted, options.clone())
.map_err(|err| FormatCommandError::Format(path.map(Path::to_path_buf), err))?;
// If the cell is unchanged, skip it.
let formatted = formatted.as_code();
@@ -429,13 +408,11 @@ pub(crate) fn format_source(
pub(crate) enum FormatResult {
/// The file was formatted.
Formatted,
/// The file was formatted, [`SourceKind`] contains the formatted code
Diff {
unformatted: SourceKind,
formatted: SourceKind,
},
/// The file was unchanged, as the formatted contents matched the existing contents.
Unchanged,
@@ -538,7 +515,7 @@ impl<'a> FormatResults<'a> {
if changed > 0 && unchanged > 0 {
writeln!(
f,
"{} file{} {}, {} file{} {}",
"{} file{} {}, {} file{} left unchanged",
changed,
if changed == 1 { "" } else { "s" },
match self.mode {
@@ -547,10 +524,6 @@ impl<'a> FormatResults<'a> {
},
unchanged,
if unchanged == 1 { "" } else { "s" },
match self.mode {
FormatMode::Write => "left unchanged",
FormatMode::Check | FormatMode::Diff => "already formatted",
},
)
} else if changed > 0 {
writeln!(
@@ -566,13 +539,9 @@ impl<'a> FormatResults<'a> {
} else if unchanged > 0 {
writeln!(
f,
"{} file{} {}",
"{} file{} left unchanged",
unchanged,
if unchanged == 1 { "" } else { "s" },
match self.mode {
FormatMode::Write => "left unchanged",
FormatMode::Check | FormatMode::Diff => "already formatted",
},
)
} else {
Ok(())
@@ -584,7 +553,6 @@ impl<'a> FormatResults<'a> {
#[derive(Error, Debug)]
pub(crate) enum FormatCommandError {
Ignore(#[from] ignore::Error),
Parse(#[from] DisplayParseError),
Panic(Option<PathBuf>, PanicError),
Read(Option<PathBuf>, SourceError),
Format(Option<PathBuf>, FormatModuleError),
@@ -602,7 +570,6 @@ impl FormatCommandError {
None
}
}
Self::Parse(err) => err.path(),
Self::Panic(path, _)
| Self::Read(path, _)
| Self::Format(path, _)
@@ -636,9 +603,6 @@ impl Display for FormatCommandError {
)
}
}
Self::Parse(err) => {
write!(f, "{err}")
}
Self::Read(path, err) => {
if let Some(path) = path {
write!(

View File

@@ -1,6 +1,5 @@
#![cfg_attr(target_family = "wasm", allow(dead_code))]
use std::borrow::Cow;
use std::fs::File;
use std::io;
use std::ops::{Add, AddAssign};
@@ -11,8 +10,9 @@ use colored::Colorize;
use log::{debug, error, warn};
use rustc_hash::FxHashMap;
use crate::cache::{Cache, FileCacheKey, LintCacheData};
use ruff_diagnostics::Diagnostic;
use ruff_linter::linter::{lint_fix, lint_only, FixTable, FixerResult, LinterResult, ParseSource};
use ruff_linter::linter::{lint_fix, lint_only, FixTable, FixerResult, LinterResult};
use ruff_linter::logging::DisplayParseError;
use ruff_linter::message::Message;
use ruff_linter::pyproject_toml::lint_pyproject_toml;
@@ -24,12 +24,10 @@ use ruff_linter::{fs, IOError, SyntaxError};
use ruff_notebook::{Notebook, NotebookError, NotebookIndex};
use ruff_python_ast::imports::ImportMap;
use ruff_python_ast::{PySourceType, SourceType, TomlSourceType};
use ruff_source_file::SourceFileBuilder;
use ruff_source_file::{LineIndex, SourceCode, SourceFileBuilder};
use ruff_text_size::{TextRange, TextSize};
use ruff_workspace::Settings;
use crate::cache::{Cache, FileCacheKey, LintCacheData};
#[derive(Debug, Default, PartialEq)]
pub(crate) struct Diagnostics {
pub(crate) messages: Vec<Message>,
@@ -274,7 +272,6 @@ pub(crate) fn lint_path(
data: (messages, imports),
error: parse_error,
},
transformed,
fixed,
) = if matches!(fix_mode, flags::FixMode::Apply | flags::FixMode::Diff) {
if let Ok(FixerResult {
@@ -303,40 +300,17 @@ pub(crate) fn lint_path(
flags::FixMode::Generate => {}
}
}
let transformed = if let Cow::Owned(transformed) = transformed {
transformed
} else {
source_kind
};
(result, transformed, fixed)
(result, fixed)
} else {
// If we fail to fix, lint the original source code.
let result = lint_only(
path,
package,
settings,
noqa,
&source_kind,
source_type,
ParseSource::None,
);
let transformed = source_kind;
let result = lint_only(path, package, settings, noqa, &source_kind, source_type);
let fixed = FxHashMap::default();
(result, transformed, fixed)
(result, fixed)
}
} else {
let result = lint_only(
path,
package,
settings,
noqa,
&source_kind,
source_type,
ParseSource::None,
);
let transformed = source_kind;
let result = lint_only(path, package, settings, noqa, &source_kind, source_type);
let fixed = FxHashMap::default();
(result, transformed, fixed)
(result, fixed)
};
let imports = imports.unwrap_or_default();
@@ -344,7 +318,7 @@ pub(crate) fn lint_path(
if let Some((cache, relative_path, key)) = caching {
// We don't cache parsing errors.
if parse_error.is_none() {
// `FixMode::Apply` and `FixMode::Diff` rely on side-effects (writing to disk,
// `FixMode::Generate` and `FixMode::Diff` rely on side-effects (writing to disk,
// and writing the diff to stdout, respectively). If a file has diagnostics, we
// need to avoid reading from and writing to the cache in these modes.
if match fix_mode {
@@ -359,21 +333,28 @@ pub(crate) fn lint_path(
LintCacheData::from_messages(
&messages,
imports.clone(),
transformed.as_ipy_notebook().map(Notebook::index).cloned(),
source_kind.as_ipy_notebook().map(Notebook::index).cloned(),
),
);
}
}
}
if let Some(error) = parse_error {
if let Some(err) = parse_error {
error!(
"{}",
DisplayParseError::from_source_kind(error, Some(path.to_path_buf()), &transformed)
DisplayParseError::new(
err,
SourceCode::new(
source_kind.source_code(),
&LineIndex::from_source_text(source_kind.source_code())
),
&source_kind,
)
);
}
let notebook_indexes = if let SourceKind::IpyNotebook(notebook) = transformed {
let notebook_indexes = if let SourceKind::IpyNotebook(notebook) = source_kind {
FxHashMap::from_iter([(path.to_string_lossy().to_string(), notebook.into_index())])
} else {
FxHashMap::default()
@@ -424,7 +405,6 @@ pub(crate) fn lint_stdin(
data: (messages, imports),
error: parse_error,
},
transformed,
fixed,
) = if matches!(fix_mode, flags::FixMode::Apply | flags::FixMode::Diff) {
if let Ok(FixerResult {
@@ -453,12 +433,8 @@ pub(crate) fn lint_stdin(
}
flags::FixMode::Generate => {}
}
let transformed = if let Cow::Owned(transformed) = transformed {
transformed
} else {
source_kind
};
(result, transformed, fixed)
(result, fixed)
} else {
// If we fail to fix, lint the original source code.
let result = lint_only(
@@ -468,17 +444,15 @@ pub(crate) fn lint_stdin(
noqa,
&source_kind,
source_type,
ParseSource::None,
);
let fixed = FxHashMap::default();
// Write the contents to stdout anyway.
if fix_mode.is_apply() {
source_kind.write(&mut io::stdout().lock())?;
}
let transformed = source_kind;
let fixed = FxHashMap::default();
(result, transformed, fixed)
(result, fixed)
}
} else {
let result = lint_only(
@@ -488,23 +462,21 @@ pub(crate) fn lint_stdin(
noqa,
&source_kind,
source_type,
ParseSource::None,
);
let transformed = source_kind;
let fixed = FxHashMap::default();
(result, transformed, fixed)
(result, fixed)
};
let imports = imports.unwrap_or_default();
if let Some(error) = parse_error {
if let Some(err) = parse_error {
error!(
"{}",
DisplayParseError::from_source_kind(error, path.map(Path::to_path_buf), &transformed)
"Failed to parse {}: {err}",
path.map_or_else(|| "-".into(), fs::relativize_path).bold()
);
}
let notebook_indexes = if let SourceKind::IpyNotebook(notebook) = transformed {
let notebook_indexes = if let SourceKind::IpyNotebook(notebook) = source_kind {
FxHashMap::from_iter([(
path.map_or_else(|| "-".into(), |path| path.to_string_lossy().to_string()),
notebook.into_index(),

View File

@@ -13,7 +13,7 @@ use ruff_linter::fs::relativize_path;
use ruff_linter::logging::LogLevel;
use ruff_linter::message::{
AzureEmitter, Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter,
JsonEmitter, JsonLinesEmitter, JunitEmitter, PylintEmitter, SarifEmitter, TextEmitter,
JsonEmitter, JsonLinesEmitter, JunitEmitter, PylintEmitter, TextEmitter,
};
use ruff_linter::notify_user;
use ruff_linter::registry::{AsRule, Rule};
@@ -125,7 +125,15 @@ impl Printer {
if let Some(fixables) = fixables {
let fix_prefix = format!("[{}]", "*".cyan());
if self.unsafe_fixes.is_hint() {
if self.unsafe_fixes.is_enabled() {
if fixables.applicable > 0 {
writeln!(
writer,
"{fix_prefix} {} fixable with the --fix option.",
fixables.applicable
)?;
}
} else {
if fixables.applicable > 0 && fixables.unapplicable_unsafe > 0 {
let es = if fixables.unapplicable_unsafe == 1 {
""
@@ -155,14 +163,6 @@ impl Printer {
fixables.unapplicable_unsafe
)?;
}
} else {
if fixables.applicable > 0 {
writeln!(
writer,
"{fix_prefix} {} fixable with the --fix option.",
fixables.applicable
)?;
}
}
}
} else {
@@ -291,9 +291,6 @@ impl Printer {
SerializationFormat::Azure => {
AzureEmitter.emit(writer, &diagnostics.messages, &context)?;
}
SerializationFormat::Sarif => {
SarifEmitter.emit(writer, &diagnostics.messages, &context)?;
}
}
writer.flush()?;

View File

@@ -139,99 +139,6 @@ if condition:
Ok(())
}
#[test]
fn docstring_options() -> Result<()> {
let tempdir = TempDir::new()?;
let ruff_toml = tempdir.path().join("ruff.toml");
fs::write(
&ruff_toml,
r#"
[format]
docstring-code-format = true
docstring-code-line-length = 20
"#,
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["format", "--config"])
.arg(&ruff_toml)
.arg("-")
.pass_stdin(r#"
def f(x):
'''
Something about `f`. And an example:
.. code-block:: python
foo, bar, quux = this_is_a_long_line(lion, hippo, lemur, bear)
Another example:
```py
foo, bar, quux = this_is_a_long_line(lion, hippo, lemur, bear)
```
And another:
>>> foo, bar, quux = this_is_a_long_line(lion, hippo, lemur, bear)
'''
pass
"#), @r###"
success: true
exit_code: 0
----- stdout -----
def f(x):
"""
Something about `f`. And an example:
.. code-block:: python
(
foo,
bar,
quux,
) = this_is_a_long_line(
lion,
hippo,
lemur,
bear,
)
Another example:
```py
(
foo,
bar,
quux,
) = this_is_a_long_line(
lion,
hippo,
lemur,
bear,
)
```
And another:
>>> (
... foo,
... bar,
... quux,
... ) = this_is_a_long_line(
... lion,
... hippo,
... lemur,
... bear,
... )
"""
pass
----- stderr -----
"###);
Ok(())
}
#[test]
fn mixed_line_endings() -> Result<()> {
let tempdir = TempDir::new()?;
@@ -255,7 +162,7 @@ fn mixed_line_endings() -> Result<()> {
----- stdout -----
----- stderr -----
2 files already formatted
2 files left unchanged
"###);
Ok(())
}
@@ -328,86 +235,6 @@ OTHER = "OTHER"
Ok(())
}
#[test]
fn syntax_error() -> Result<()> {
let tempdir = TempDir::new()?;
fs::write(
tempdir.path().join("main.py"),
r#"
from module import =
"#,
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.current_dir(tempdir.path())
.args(["format", "--no-cache", "--isolated", "--check"])
.arg("main.py"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to parse main.py:2:20: Unexpected token '='
"###);
Ok(())
}
#[test]
fn messages() -> Result<()> {
let tempdir = TempDir::new()?;
fs::write(
tempdir.path().join("main.py"),
r#"
from test import say_hy
if __name__ == "__main__":
say_hy("dear Ruff contributor")
"#,
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.current_dir(tempdir.path())
.args(["format", "--no-cache", "--isolated", "--check"])
.arg("main.py"), @r###"
success: false
exit_code: 1
----- stdout -----
Would reformat: main.py
1 file would be reformatted
----- stderr -----
"###);
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.current_dir(tempdir.path())
.args(["format", "--no-cache", "--isolated"])
.arg("main.py"), @r###"
success: true
exit_code: 0
----- stdout -----
1 file reformatted
----- stderr -----
"###);
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.current_dir(tempdir.path())
.args(["format", "--no-cache", "--isolated"])
.arg("main.py"), @r###"
success: true
exit_code: 0
----- stdout -----
1 file left unchanged
----- stderr -----
"###);
Ok(())
}
#[test]
fn force_exclude() -> Result<()> {
let tempdir = TempDir::new()?;
@@ -672,8 +499,7 @@ format = "json"
----- stderr -----
ruff failed
Cause: Failed to parse [RUFF-TOML-PATH]
Cause: TOML parse error at line 2, column 10
Cause: Failed to parse `[RUFF-TOML-PATH]`: TOML parse error at line 2, column 10
|
2 | format = "json"
| ^^^^^^
@@ -957,7 +783,7 @@ fn test_diff() {
----- stderr -----
2 files would be reformatted, 1 file already formatted
2 files would be reformatted, 1 file left unchanged
"###);
});
}

View File

@@ -284,8 +284,7 @@ fn stdin_fix_jupyter() {
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"print(1)"
"import os"
]
},
{
@@ -303,8 +302,7 @@ fn stdin_fix_jupyter() {
"metadata": {},
"outputs": [],
"source": [
"import sys\n",
"print(x)"
"import sys"
]
},
{
@@ -356,8 +354,8 @@ fn stdin_fix_jupyter() {
"nbformat": 4,
"nbformat_minor": 5
}"#), @r###"
success: false
exit_code: 1
success: true
exit_code: 0
----- stdout -----
{
"cells": [
@@ -367,9 +365,7 @@ fn stdin_fix_jupyter() {
"id": "dccc687c-96e2-4604-b957-a8a89b5bec06",
"metadata": {},
"outputs": [],
"source": [
"print(1)"
]
"source": []
},
{
"cell_type": "markdown",
@@ -385,9 +381,7 @@ fn stdin_fix_jupyter() {
"id": "cdce7b92-b0fb-4c02-86f6-e233b26fa84f",
"metadata": {},
"outputs": [],
"source": [
"print(x)"
]
"source": []
},
{
"cell_type": "code",
@@ -439,8 +433,7 @@ fn stdin_fix_jupyter() {
"nbformat_minor": 5
}
----- stderr -----
Jupyter.ipynb:cell 3:1:7: F821 Undefined name `x`
Found 3 errors (2 fixed, 1 remaining).
Found 2 errors (2 fixed, 0 remaining).
"###);
}
@@ -726,22 +719,6 @@ fn stdin_format_jupyter() {
"###);
}
#[test]
fn stdin_parse_error() {
let mut cmd = RuffCheck::default().build();
assert_cmd_snapshot!(cmd
.pass_stdin("from foo import =\n"), @r###"
success: false
exit_code: 1
----- stdout -----
-:1:17: E999 SyntaxError: Unexpected token '='
Found 1 error.
----- stderr -----
error: Failed to parse at 1:17: Unexpected token '='
"###);
}
#[test]
fn show_source() {
let mut cmd = RuffCheck::default().args(["--show-source"]).build();
@@ -766,7 +743,6 @@ fn show_source() {
fn explain_status_codes_f401() {
assert_cmd_snapshot!(ruff_cmd().args(["--explain", "F401"]));
}
#[test]
fn explain_status_codes_ruf404() {
assert_cmd_snapshot!(ruff_cmd().args(["--explain", "RUF404"]), @r###"
@@ -1071,10 +1047,7 @@ fn unreadable_pyproject_toml() -> Result<()> {
err.chain()
.map(std::string::ToString::to_string)
.collect::<Vec<_>>(),
vec![
format!("Failed to read {}/pyproject.toml", tempdir.path().display()),
"Permission denied (os error 13)".to_string()
],
vec!["Permission denied (os error 13)".to_string()],
);
Ok(())
}
@@ -1185,44 +1158,6 @@ fn check_hints_hidden_unsafe_fixes_with_no_safe_fixes() {
"###);
}
#[test]
fn check_no_hint_for_hidden_unsafe_fixes_when_disabled() {
let mut cmd = RuffCheck::default()
.args(["--select", "F601,UP034", "--no-unsafe-fixes"])
.build();
assert_cmd_snapshot!(cmd
.pass_stdin("x = {'a': 1, 'a': 1}\nprint(('foo'))\n"),
@r###"
success: false
exit_code: 1
----- stdout -----
-:1:14: F601 Dictionary key literal `'a'` repeated
-:2:7: UP034 [*] Avoid extraneous parentheses
Found 2 errors.
[*] 1 fixable with the --fix option.
----- stderr -----
"###);
}
#[test]
fn check_no_hint_for_hidden_unsafe_fixes_with_no_safe_fixes_when_disabled() {
let mut cmd = RuffCheck::default()
.args(["--select", "F601", "--no-unsafe-fixes"])
.build();
assert_cmd_snapshot!(cmd
.pass_stdin("x = {'a': 1, 'a': 1}\n"),
@r###"
success: false
exit_code: 1
----- stdout -----
-:1:14: F601 Dictionary key literal `'a'` repeated
Found 1 error.
----- stderr -----
"###);
}
#[test]
fn check_shows_unsafe_fixes_with_opt_in() {
let mut cmd = RuffCheck::default()

View File

@@ -19,6 +19,7 @@ ruff_python_ast = { path = "../ruff_python_ast" }
ruff_python_codegen = { path = "../ruff_python_codegen" }
ruff_python_formatter = { path = "../ruff_python_formatter" }
ruff_notebook = { path = "../ruff_notebook" }
ruff_python_literal = { path = "../ruff_python_literal" }
ruff_python_parser = { path = "../ruff_python_parser" }
ruff_python_stdlib = { path = "../ruff_python_stdlib" }
ruff_python_trivia = { path = "../ruff_python_trivia" }
@@ -27,27 +28,28 @@ ruff_workspace = { path = "../ruff_workspace", features = ["schemars"]}
anyhow = { workspace = true }
clap = { workspace = true }
ignore = { workspace = true }
imara-diff = { workspace = true }
indicatif = { workspace = true }
indicatif = "0.17.7"
itertools = { workspace = true }
libcst = { workspace = true }
once_cell = { workspace = true }
pretty_assertions = { workspace = true }
rayon = { workspace = true }
pretty_assertions = { version = "1.3.0" }
rayon = "1.8.0"
regex = { workspace = true }
schemars = { workspace = true }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
similar = { workspace = true }
strum = { workspace = true }
tempfile = { workspace = true }
strum_macros = { workspace = true }
tempfile = "3.8.1"
toml = { workspace = true, features = ["parse"] }
tracing = { workspace = true }
tracing-indicatif = { workspace = true }
tracing-subscriber = { workspace = true, features = ["env-filter"] }
imara-diff = "0.1.5"
[dev-dependencies]
indoc = { workspace = true }
indoc = "2.0.4"
[features]
# Turn off rayon for profiling

View File

@@ -35,7 +35,6 @@ use ruff_linter::settings::types::{FilePattern, FilePatternSet};
use ruff_python_formatter::{
format_module_source, FormatModuleError, MagicTrailingComma, PreviewMode, PyFormatOptions,
};
use ruff_python_parser::ParseError;
use ruff_workspace::resolver::{python_files_in_path, PyprojectConfig, ResolvedFile, Resolver};
/// Find files that ruff would check so we can format them. Adapted from `ruff_cli`.
@@ -743,11 +742,11 @@ enum CheckFileError {
reformatted: String,
},
/// The input file was already invalid (not a bug)
SyntaxErrorInInput(ParseError),
SyntaxErrorInInput(FormatModuleError),
/// The formatter introduced a syntax error
SyntaxErrorInOutput {
formatted: String,
error: ParseError,
error: FormatModuleError,
},
/// The formatter failed (bug)
FormatError(FormatError),
@@ -797,7 +796,7 @@ fn format_dev_file(
let start = Instant::now();
let printed = match format_module_source(&content, options.clone()) {
Ok(printed) => printed,
Err(FormatModuleError::ParseError(err)) => {
Err(err @ (FormatModuleError::LexError(_) | FormatModuleError::ParseError(_))) => {
return Err(CheckFileError::SyntaxErrorInInput(err));
}
Err(FormatModuleError::FormatError(err)) => {
@@ -824,7 +823,7 @@ fn format_dev_file(
if stability_check {
let reformatted = match format_module_source(formatted, options) {
Ok(reformatted) => reformatted,
Err(FormatModuleError::ParseError(err)) => {
Err(err @ (FormatModuleError::LexError(_) | FormatModuleError::ParseError(_))) => {
return Err(CheckFileError::SyntaxErrorInOutput {
formatted: formatted.to_string(),
error: err,

View File

@@ -3,7 +3,6 @@
//! Used for <https://docs.astral.sh/ruff/rules/>.
use itertools::Itertools;
use std::borrow::Cow;
use strum::IntoEnumIterator;
use ruff_diagnostics::FixAvailability;
@@ -38,16 +37,6 @@ fn generate_table(table_out: &mut String, rules: impl IntoIterator<Item = Rule>,
let rule_name = rule.as_ref();
// If the message ends in a bracketed expression (like: "Use {replacement}"), escape the
// brackets. Otherwise, it'll be interpreted as an HTML attribute via the `attr_list`
// plugin. (Above, we'd convert to "Use {replacement\}".)
let message = rule.message_formats()[0];
let message = if let Some(prefix) = message.strip_suffix('}') {
Cow::Owned(format!("{prefix}\\}}"))
} else {
Cow::Borrowed(message)
};
#[allow(clippy::or_fun_call)]
table_out.push_str(&format!(
"| {0}{1} {{ #{0}{1} }} | {2} | {3} | {4} |",
@@ -57,7 +46,7 @@ fn generate_table(table_out: &mut String, rules: impl IntoIterator<Item = Rule>,
.is_some()
.then_some(format_args!("[{rule_name}](rules/{rule_name}.md)"))
.unwrap_or(format_args!("{rule_name}")),
message,
rule.message_formats()[0],
status_token,
));
table_out.push('\n');

View File

@@ -24,7 +24,11 @@ pub(crate) fn main(args: &Args) -> Result<()> {
args.file.display()
)
})?;
let python_ast = parse(source_kind.source_code(), source_type.as_mode())?;
let python_ast = parse(
source_kind.source_code(),
source_type.as_mode(),
&args.file.to_string_lossy(),
)?;
println!("{python_ast:#?}");
Ok(())
}

View File

@@ -22,7 +22,7 @@ pub(crate) fn main(args: &Args) -> Result<()> {
println!("{}", ruff_notebook::round_trip(path)?);
} else {
let contents = fs::read_to_string(&args.file)?;
println!("{}", round_trip(&contents)?);
println!("{}", round_trip(&contents, &args.file.to_string_lossy())?);
}
Ok(())
}

View File

@@ -7,7 +7,7 @@ use ruff_text_size::{Ranged, TextRange, TextSize};
/// A text edit to be applied to a source file. Inserts, deletes, or replaces
/// content at a given location.
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
#[derive(Clone, Debug, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct Edit {
/// The start location of the edit.

View File

@@ -15,7 +15,7 @@ ruff_cache = { path = "../ruff_cache" }
ruff_macros = { path = "../ruff_macros" }
ruff_text_size = { path = "../ruff_text_size" }
drop_bomb = { workspace = true }
drop_bomb = { version = "0.1.5" }
rustc-hash = { workspace = true }
schemars = { workspace = true, optional = true }
serde = { workspace = true, optional = true }

View File

@@ -16,7 +16,7 @@ license = { workspace = true }
ruff_macros = { path = "../ruff_macros" }
[dev-dependencies]
static_assertions = { workspace = true }
static_assertions = "1.1.0"
[lints]
workspace = true

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_linter"
version = "0.1.11"
version = "0.1.7"
publish = false
authors = { workspace = true }
edition = { workspace = true }
@@ -29,38 +29,37 @@ ruff_python_parser = { path = "../ruff_python_parser" }
ruff_source_file = { path = "../ruff_source_file", features = ["serde"] }
ruff_text_size = { path = "../ruff_text_size" }
aho-corasick = { workspace = true }
annotate-snippets = { workspace = true, features = ["color"] }
aho-corasick = { version = "1.1.2" }
annotate-snippets = { version = "0.9.2", features = ["color"] }
anyhow = { workspace = true }
bitflags = { workspace = true }
chrono = { workspace = true }
clap = { workspace = true, features = ["derive", "string"], optional = true }
colored = { workspace = true }
fern = { workspace = true }
fern = { version = "0.6.1" }
glob = { workspace = true }
globset = { workspace = true }
imperative = { workspace = true }
imperative = { version = "1.0.4" }
is-macro = { workspace = true }
is-wsl = { workspace = true }
itertools = { workspace = true }
libcst = { workspace = true }
log = { workspace = true }
memchr = { workspace = true }
natord = { workspace = true }
natord = { version = "1.0.9" }
once_cell = { workspace = true }
path-absolutize = { workspace = true, features = [
"once_cell_cache",
"use_unix_paths_on_wasm",
] }
pathdiff = { workspace = true }
pep440_rs = { workspace = true, features = ["serde"] }
pyproject-toml = { workspace = true }
quick-junit = { workspace = true }
pathdiff = { version = "0.2.1" }
pep440_rs = { version = "0.3.12", features = ["serde"] }
pyproject-toml = { version = "0.8.1" }
quick-junit = { version = "0.3.5" }
regex = { workspace = true }
result-like = { workspace = true }
result-like = { version = "0.4.6" }
rustc-hash = { workspace = true }
schemars = { workspace = true, optional = true }
semver = { workspace = true }
semver = { version = "1.0.20" }
serde = { workspace = true }
serde_json = { workspace = true }
similar = { workspace = true }
@@ -69,18 +68,18 @@ strum = { workspace = true }
strum_macros = { workspace = true }
thiserror = { workspace = true }
toml = { workspace = true }
typed-arena = { workspace = true }
typed-arena = { version = "2.0.2" }
unicode-width = { workspace = true }
unicode_names2 = { workspace = true }
url = { workspace = true }
wsl = { version = "0.1.0" }
[dev-dependencies]
insta = { workspace = true }
pretty_assertions = { workspace = true }
pretty_assertions = "1.3.0"
test-case = { workspace = true }
# Disable colored output in tests
colored = { workspace = true, features = ["no-color"] }
tempfile = { workspace = true }
tempfile = "3.8.1"
[features]
default = []

View File

@@ -182,123 +182,3 @@ class Foo(abc.ABC):
return 1
else:
return 1.5
def func(x: int):
try:
pass
except:
return 2
def func(x: int):
try:
pass
except:
return 2
else:
return 3
def func(x: int):
if not x:
raise ValueError
else:
raise TypeError
def func(x: int):
if not x:
raise ValueError
else:
return 1
from typing import overload
@overload
def overloaded(i: int) -> "int":
...
@overload
def overloaded(i: "str") -> "str":
...
def overloaded(i):
return i
def func(x: int):
if not x:
return 1
raise ValueError
def func(x: int):
if not x:
return 1
else:
return 2
raise ValueError
def func():
try:
raise ValueError
except:
return 2
def func():
try:
return 1
except:
pass
def func(x: int):
for _ in range(3):
if x > 0:
return 1
raise ValueError
def func(x: int):
if x > 5:
raise ValueError
else:
pass
def func(x: int):
if x > 5:
raise ValueError
elif x > 10:
pass
def func(x: int):
if x > 5:
raise ValueError
elif x > 10:
return 5
def func():
try:
return 5
except:
pass
raise ValueError
def func(x: int):
match x:
case [1, 2, 3]:
return 1
case y:
return "foo"

View File

@@ -8,7 +8,6 @@ def func(address):
# Error
"0.0.0.0"
'0.0.0.0'
f"0.0.0.0"
# Error

View File

@@ -5,9 +5,6 @@ with open("/abc/tmp", "w") as f:
with open("/tmp/abc", "w") as f:
f.write("def")
with open(f"/tmp/abc", "w") as f:
f.write("def")
with open("/var/tmp/123", "w") as f:
f.write("def")

View File

@@ -1,2 +0,0 @@
import telnetlib # S401
from telnetlib import Telnet # S401

View File

@@ -1,2 +0,0 @@
import ftplib # S402
from ftplib import FTP # S402

View File

@@ -1,8 +0,0 @@
import dill # S403
from dill import objects # S403
import shelve
from shelve import open
import cPickle
from cPickle import load
import pickle
from pickle import load

View File

@@ -1,3 +0,0 @@
import subprocess # S404
from subprocess import Popen # S404
from subprocess import Popen as pop # S404

View File

@@ -1,4 +0,0 @@
import xml.etree.cElementTree # S405
from xml.etree import cElementTree # S405
import xml.etree.ElementTree # S405
from xml.etree import ElementTree # S405

View File

@@ -1,3 +0,0 @@
from xml import sax # S406
import xml.sax as xmls # S406
import xml.sax # S406

View File

@@ -1,2 +0,0 @@
from xml.dom import expatbuilder # S407
import xml.dom.expatbuilder # S407

View File

@@ -1,2 +0,0 @@
from xml.dom.minidom import parseString # S408
import xml.dom.minidom # S408

View File

@@ -1,2 +0,0 @@
from xml.dom.pulldom import parseString # S409
import xml.dom.pulldom # S409

View File

@@ -1,2 +0,0 @@
import lxml # S410
from lxml import etree # S410

View File

@@ -1,2 +0,0 @@
import xmlrpc # S411
from xmlrpc import server # S411

View File

@@ -1 +0,0 @@
from twisted.web.twcgi import CGIScript # S412

View File

@@ -1,4 +0,0 @@
import Crypto.Hash # S413
from Crypto.Hash import MD2 # S413
import Crypto.PublicKey # S413
from Crypto.PublicKey import RSA # S413

View File

@@ -1,3 +0,0 @@
import pyghmi # S415
from pyghmi import foo # S415

View File

@@ -1,16 +0,0 @@
import ssl
from ssl import wrap_socket
from OpenSSL import SSL
from OpenSSL.SSL import Context
wrap_socket(ssl_version=ssl.PROTOCOL_SSLv3) # S502
ssl.wrap_socket(ssl_version=ssl.PROTOCOL_TLSv1) # S502
ssl.wrap_socket(ssl_version=ssl.PROTOCOL_SSLv2) # S502
SSL.Context(method=SSL.SSLv2_METHOD) # S502
SSL.Context(method=SSL.SSLv23_METHOD) # S502
Context(method=SSL.SSLv3_METHOD) # S502
Context(method=SSL.TLSv1_METHOD) # S502
wrap_socket(ssl_version=ssl.PROTOCOL_TLS_CLIENT) # OK
SSL.Context(method=SSL.TLS_SERVER_METHOD) # OK
func(ssl_version=ssl.PROTOCOL_TLSv1_2) # OK

View File

@@ -1,23 +0,0 @@
import ssl
from OpenSSL import SSL
from ssl import PROTOCOL_TLSv1
def func(version=ssl.PROTOCOL_SSLv2): # S503
pass
def func(protocol=SSL.SSLv2_METHOD): # S503
pass
def func(version=SSL.SSLv23_METHOD): # S503
pass
def func(protocol=PROTOCOL_TLSv1): # S503
pass
def func(version=SSL.TLSv1_2_METHOD): # OK
pass

View File

@@ -1,15 +0,0 @@
import ssl
from ssl import wrap_socket
ssl.wrap_socket() # S504
wrap_socket() # S504
ssl.wrap_socket(ssl_version=ssl.PROTOCOL_TLSv1_2) # OK
class Class:
def wrap_socket(self):
pass
obj = Class()
obj.wrap_socket() # OK

View File

@@ -2,7 +2,7 @@ import json
import yaml
from yaml import CSafeLoader
from yaml import SafeLoader
from yaml.loader import SafeLoader as NewSafeLoader
from yaml import SafeLoader as NewSafeLoader
def test_yaml_load():
@@ -29,8 +29,3 @@ yaml.load("{}", yaml.SafeLoader)
yaml.load("{}", CSafeLoader)
yaml.load("{}", yaml.CSafeLoader)
yaml.load("{}", NewSafeLoader)
yaml.load("{}", Loader=SafeLoader)
yaml.load("{}", Loader=yaml.SafeLoader)
yaml.load("{}", Loader=CSafeLoader)
yaml.load("{}", Loader=yaml.CSafeLoader)
yaml.load("{}", Loader=NewSafeLoader)

View File

@@ -165,19 +165,3 @@ class AbstractTestModel5(models.Model):
def my_beautiful_method(self):
return 2
# Subclass with its own __str__
class SubclassTestModel1(TestModel1):
def __str__(self):
return self.new_field
# Subclass with inherited __str__
class SubclassTestModel2(TestModel4):
pass
# Subclass without __str__
class SubclassTestModel3(TestModel1):
pass

View File

@@ -127,21 +127,3 @@ class MultipleConsecutiveFields(models.Model):
pass
middle_name = models.CharField(max_length=32)
class BaseModel(models.Model):
pass
class StrBeforeFieldInheritedModel(BaseModel):
"""Model with `__str__` before fields."""
class Meta:
verbose_name = "test"
verbose_name_plural = "tests"
def __str__(self):
return "foobar"
first_name = models.CharField(max_length=32)

View File

@@ -27,7 +27,7 @@ def f_ok():
raise RuntimeError(msg)
def f_msg_defined():
def f_unfixable():
msg = "hello"
raise RuntimeError("This is an example exception")

View File

@@ -19,8 +19,5 @@ foo(**{buzz: True})
foo(**{"": True})
foo(**{f"buzz__{bar}": True})
abc(**{"for": 3})
foo(**{},)
# Duplicated key names won't be fixed, to avoid syntax errors.
abc(**{'a': b}, **{'a': c}) # PIE804
abc(a=1, **{'a': c}, **{'b': c}) # PIE804
foo(**{},)

View File

@@ -1,13 +1,8 @@
import typing
import typing_extensions
from typing import TypeVar
from typing_extensions import ParamSpec, TypeVarTuple
_T = typing.TypeVar("_T")
_Ts = typing_extensions.TypeVarTuple("_Ts")
_P = ParamSpec("_P")
_P2 = typing.ParamSpec("_P2")
_Ts2 = TypeVarTuple("_Ts2")
_P = TypeVar("_P")
# OK
_UsedTypeVar = TypeVar("_UsedTypeVar")

View File

@@ -1,13 +1,8 @@
import typing
import typing_extensions
from typing import TypeVar
from typing_extensions import ParamSpec, TypeVarTuple
_T = typing.TypeVar("_T")
_Ts = typing_extensions.TypeVarTuple("_Ts")
_P = ParamSpec("_P")
_P2 = typing.ParamSpec("_P2")
_Ts2 = TypeVarTuple("_Ts2")
_P = TypeVar("_P")
# OK
_UsedTypeVar = TypeVar("_UsedTypeVar")

View File

@@ -1,5 +1,5 @@
import typing
from typing import Protocol, TypeVar
from typing import Protocol
class _Foo(Protocol):
@@ -10,23 +10,9 @@ class _Bar(typing.Protocol):
bar: int
_T = TypeVar("_T")
class _Baz(Protocol[_T]):
x: _T
# OK
class _UsedPrivateProtocol(Protocol):
bar: int
# Also OK
class _UsedGenericPrivateProtocol(Protocol[_T]):
x: _T
def uses_some_private_protocols(
arg: _UsedPrivateProtocol, arg2: _UsedGenericPrivateProtocol[int]
) -> None: ...
def uses__UsedPrivateProtocol(arg: _UsedPrivateProtocol) -> None: ...

View File

@@ -1,5 +1,5 @@
import typing
from typing import Protocol, TypeVar
from typing import Protocol
class _Foo(object, Protocol):
@@ -10,23 +10,9 @@ class _Bar(typing.Protocol):
bar: int
_T = TypeVar("_T")
class _Baz(Protocol[_T]):
x: _T
# OK
class _UsedPrivateProtocol(Protocol):
bar: int
# Also OK
class _UsedGenericPrivateProtocol(Protocol[_T]):
x: _T
def uses_some_private_protocols(
arg: _UsedPrivateProtocol, arg2: _UsedGenericPrivateProtocol[int]
) -> None: ...
def uses__UsedPrivateProtocol(arg: _UsedPrivateProtocol) -> None: ...

View File

@@ -32,7 +32,6 @@ def f8(x: bytes = b"50 character byte stringgggggggggggggggggggggggggg\xff") ->
foo: str = "50 character stringggggggggggggggggggggggggggggggg"
bar: str = "51 character stringgggggggggggggggggggggggggggggggg"
baz: str = f"51 character stringgggggggggggggggggggggggggggggggg"
baz: bytes = b"50 character byte stringgggggggggggggggggggggggggg"

View File

@@ -29,10 +29,6 @@ baz: bytes = b"50 character byte stringgggggggggggggggggggggggggg" # OK
qux: bytes = b"51 character byte stringggggggggggggggggggggggggggg\xff" # Error: PYI053
ffoo: str = f"50 character stringggggggggggggggggggggggggggggggg" # OK
fbar: str = f"51 character stringgggggggggggggggggggggggggggggggg" # Error: PYI053
class Demo:
"""Docstrings are excluded from this rule. Some padding.""" # OK

View File

@@ -37,28 +37,3 @@ def func():
# PYI055
x: Union[type[requests_mock.Mocker], type[httpretty], type[str]] = requests_mock.Mocker
def convert_union(union: UnionType) -> _T | None:
converters: tuple[
type[_T] | type[Converter[_T]] | Converter[_T] | Callable[[str], _T], ... # PYI055
] = union.__args__
...
def convert_union(union: UnionType) -> _T | None:
converters: tuple[
Union[type[_T] | type[Converter[_T]] | Converter[_T] | Callable[[str], _T]], ... # PYI055
] = union.__args__
...
def convert_union(union: UnionType) -> _T | None:
converters: tuple[
Union[type[_T] | type[Converter[_T]]] | Converter[_T] | Callable[[str], _T], ... # PYI055
] = union.__args__
...
def convert_union(union: UnionType) -> _T | None:
converters: tuple[
Union[type[_T] | type[Converter[_T]] | str] | Converter[_T] | Callable[[str], _T], ... # PYI055
] = union.__args__
...

View File

@@ -1,174 +0,0 @@
def scope():
from collections.abc import Generator
class IteratorReturningSimpleGenerator1:
def __iter__(self) -> Generator:
... # PYI058 (use `Iterator`)
def scope():
import typing
class IteratorReturningSimpleGenerator2:
def __iter__(self) -> typing.Generator:
... # PYI058 (use `Iterator`)
def scope():
import collections.abc
class IteratorReturningSimpleGenerator3:
def __iter__(self) -> collections.abc.Generator:
... # PYI058 (use `Iterator`)
def scope():
import collections.abc
from typing import Any
class IteratorReturningSimpleGenerator4:
def __iter__(self, /) -> collections.abc.Generator[str, Any, None]:
... # PYI058 (use `Iterator`)
def scope():
import collections.abc
import typing
class IteratorReturningSimpleGenerator5:
def __iter__(self, /) -> collections.abc.Generator[str, None, typing.Any]:
... # PYI058 (use `Iterator`)
def scope():
from collections.abc import Generator
class IteratorReturningSimpleGenerator6:
def __iter__(self, /) -> Generator[str, None, None]:
... # PYI058 (use `Iterator`)
def scope():
import typing_extensions
class AsyncIteratorReturningSimpleAsyncGenerator1:
def __aiter__(
self,
) -> typing_extensions.AsyncGenerator:
... # PYI058 (Use `AsyncIterator`)
def scope():
import collections.abc
class AsyncIteratorReturningSimpleAsyncGenerator2:
def __aiter__(self, /) -> collections.abc.AsyncGenerator[str, Any]:
... # PYI058 (Use `AsyncIterator`)
def scope():
import collections.abc
class AsyncIteratorReturningSimpleAsyncGenerator3:
def __aiter__(self, /) -> collections.abc.AsyncGenerator[str, None]:
... # PYI058 (Use `AsyncIterator`)
def scope():
from typing import Iterator
class CorrectIterator:
def __iter__(self) -> Iterator[str]:
... # OK
def scope():
import collections.abc
class CorrectAsyncIterator:
def __aiter__(self) -> collections.abc.AsyncIterator[int]:
... # OK
def scope():
import typing
class Fine:
def __iter__(self) -> typing.Self:
... # OK
def scope():
class StrangeButWeWontComplainHere:
def __aiter__(self) -> list[bytes]:
... # OK
def scope():
from collections.abc import Generator
def __iter__(self) -> Generator:
... # OK (not in class scope)
def scope():
from collections.abc import AsyncGenerator
def __aiter__(self) -> AsyncGenerator:
... # OK (not in class scope)
def scope():
from collections.abc import Generator
class IteratorReturningComplexGenerator:
def __iter__(self) -> Generator[str, int, bytes]:
... # OK
def scope():
from collections.abc import AsyncGenerator
class AsyncIteratorReturningComplexAsyncGenerator:
def __aiter__(self) -> AsyncGenerator[str, int]:
... # OK
def scope():
from collections.abc import AsyncGenerator
class ClassWithInvalidAsyncAiterMethod:
async def __aiter__(self) -> AsyncGenerator:
... # OK
def scope():
from collections.abc import Generator
class IteratorWithUnusualParameters1:
def __iter__(self, foo) -> Generator:
... # OK
def scope():
from collections.abc import Generator
class IteratorWithUnusualParameters2:
def __iter__(self, *, bar) -> Generator:
... # OK
def scope():
from collections.abc import Generator
class IteratorWithUnusualParameters3:
def __iter__(self, *args) -> Generator:
... # OK
def scope():
from collections.abc import Generator
class IteratorWithUnusualParameters4:
def __iter__(self, **kwargs) -> Generator:
... # OK

View File

@@ -1,128 +0,0 @@
def scope():
from collections.abc import Generator
class IteratorReturningSimpleGenerator1:
def __iter__(self) -> Generator: ... # PYI058 (use `Iterator`)
def scope():
import typing
class IteratorReturningSimpleGenerator2:
def __iter__(self) -> typing.Generator: ... # PYI058 (use `Iterator`)
def scope():
import collections.abc
class IteratorReturningSimpleGenerator3:
def __iter__(self) -> collections.abc.Generator: ... # PYI058 (use `Iterator`)
def scope():
import collections.abc
from typing import Any
class IteratorReturningSimpleGenerator4:
def __iter__(self, /) -> collections.abc.Generator[str, Any, None]: ... # PYI058 (use `Iterator`)
def scope():
import collections.abc
import typing
class IteratorReturningSimpleGenerator5:
def __iter__(self, /) -> collections.abc.Generator[str, None, typing.Any]: ... # PYI058 (use `Iterator`)
def scope():
from collections.abc import Generator
class IteratorReturningSimpleGenerator6:
def __iter__(self, /) -> Generator[str, None, None]: ... # PYI058 (use `Iterator`)
def scope():
import typing_extensions
class AsyncIteratorReturningSimpleAsyncGenerator1:
def __aiter__(self,) -> typing_extensions.AsyncGenerator: ... # PYI058 (Use `AsyncIterator`)
def scope():
import collections.abc
class AsyncIteratorReturningSimpleAsyncGenerator3:
def __aiter__(self, /) -> collections.abc.AsyncGenerator[str, None]:
... # PYI058 (Use `AsyncIterator`)
def scope():
import collections.abc
class AsyncIteratorReturningSimpleAsyncGenerator3:
def __aiter__(self, /) -> collections.abc.AsyncGenerator[str, None]: ... # PYI058 (Use `AsyncIterator`)
def scope():
from typing import Iterator
class CorrectIterator:
def __iter__(self) -> Iterator[str]: ... # OK
def scope():
import collections.abc
class CorrectAsyncIterator:
def __aiter__(self) -> collections.abc.AsyncIterator[int]: ... # OK
def scope():
import typing
class Fine:
def __iter__(self) -> typing.Self: ... # OK
def scope():
class StrangeButWeWontComplainHere:
def __aiter__(self) -> list[bytes]: ... # OK
def scope():
from collections.abc import Generator
def __iter__(self) -> Generator: ... # OK (not in class scope)
def scope():
from collections.abc import AsyncGenerator
def __aiter__(self) -> AsyncGenerator: ... # OK (not in class scope)
def scope():
from collections.abc import Generator
class IteratorReturningComplexGenerator:
def __iter__(self) -> Generator[str, int, bytes]: ... # OK
def scope():
from collections.abc import AsyncGenerator
class AsyncIteratorReturningComplexAsyncGenerator:
def __aiter__(self) -> AsyncGenerator[str, int]: ... # OK
def scope():
from collections.abc import AsyncGenerator
class ClassWithInvalidAsyncAiterMethod:
async def __aiter__(self) -> AsyncGenerator: ... # OK
def scope():
from collections.abc import Generator
class IteratorWithUnusualParameters1:
def __iter__(self, foo) -> Generator: ... # OK
def scope():
from collections.abc import Generator
class IteratorWithUnusualParameters2:
def __iter__(self, *, bar) -> Generator: ... # OK
def scope():
from collections.abc import Generator
class IteratorWithUnusualParameters3:
def __iter__(self, *args) -> Generator: ... # OK
def scope():
from collections.abc import Generator
class IteratorWithUnusualParameters4:
def __iter__(self, **kwargs) -> Generator: ... # OK

View File

@@ -159,7 +159,7 @@ async with asyncio.timeout(1):
async with trio.move_on_at(1):
pass
# Do not suppress combination, if a context manager is already combined with another.
# Do not supress combination, if a context manager is already combined with another.
async with asyncio.timeout(1), A():
async with B():
pass

View File

@@ -1,5 +1,6 @@
# Errors
"yoda" == compare # SIM300
"yoda" == compare # SIM300
42 == age # SIM300
("a", "b") == compare # SIM300
"yoda" <= compare # SIM300
@@ -12,17 +13,10 @@ YODA > age # SIM300
YODA >= age # SIM300
JediOrder.YODA == age # SIM300
0 < (number - 100) # SIM300
SomeClass().settings.SOME_CONSTANT_VALUE > (60 * 60) # SIM300
B<A[0][0]or B
B or(B)<A[0][0]
# Errors in preview
['upper'] == UPPER_LIST
{} == DummyHandler.CONFIG
# Errors in stable
UPPER_LIST == ['upper']
DummyHandler.CONFIG == {}
# OK
compare == "yoda"
age == 42
@@ -37,6 +31,3 @@ age <= YODA
YODA == YODA
age == JediOrder.YODA
(number - 100) > 0
SECONDS_IN_DAY == 60 * 60 * 24 # Error in 0.1.8
SomeClass().settings.SOME_CONSTANT_VALUE > (60 * 60) # Error in 0.1.8
{"non-empty-dict": "is-ok"} == DummyHandler.CONFIG

View File

@@ -19,32 +19,8 @@ async def func():
bar = "bar"
trio.sleep(bar)
x, y = 0, 2000
trio.sleep(x) # TRIO115
trio.sleep(y) # OK
(a, b, [c, (d, e)]) = (1, 2, (0, [4, 0]))
trio.sleep(c) # TRIO115
trio.sleep(d) # OK
trio.sleep(e) # TRIO115
m_x, m_y = 0
trio.sleep(m_y) # OK
trio.sleep(m_x) # OK
m_a = m_b = 0
trio.sleep(m_a) # TRIO115
trio.sleep(m_b) # TRIO115
m_c = (m_d, m_e) = (0, 0)
trio.sleep(m_c) # OK
trio.sleep(m_d) # TRIO115
trio.sleep(m_e) # TRIO115
def func():
import trio
trio.run(trio.sleep(0)) # TRIO115
@@ -57,10 +33,3 @@ def func():
async def func():
await sleep(seconds=0) # TRIO115
def func():
import trio
if (walrus := 0) == 0:
trio.sleep(walrus) # TRIO115

View File

@@ -1,18 +0,0 @@
from __future__ import annotations
from typing import TypeVar
x: "int" | str # TCH006
x: ("int" | str) | "bool" # TCH006
def func():
x: "int" | str # OK
z: list[str, str | "int"] = [] # TCH006
type A = Value["int" | str] # OK
OldS = TypeVar('OldS', int | 'str', str) # TCH006

View File

@@ -1,16 +0,0 @@
from typing import TypeVar
x: "int" | str # TCH006
x: ("int" | str) | "bool" # TCH006
def func():
x: "int" | str # OK
z: list[str, str | "int"] = [] # TCH006
type A = Value["int" | str] # OK
OldS = TypeVar('OldS', int | 'str', str) # TCH006

View File

@@ -1,7 +0,0 @@
"""Add `TYPE_CHECKING` to an existing `typing` import. Another member is moved."""
from __future__ import annotations
from typing import Final
Const: Final[dict] = {}

View File

@@ -1,7 +0,0 @@
"""Using `TYPE_CHECKING` from an existing `typing` import. Another member is moved."""
from __future__ import annotations
from typing import Final, TYPE_CHECKING
Const: Final[dict] = {}

View File

@@ -1,7 +0,0 @@
"""Using `TYPE_CHECKING` from an existing `typing` import. Another member is moved."""
from __future__ import annotations
from typing import Final, Mapping
Const: Final[dict] = {}

View File

@@ -1,92 +0,0 @@
def f():
from pandas import DataFrame
def baz() -> DataFrame:
...
def f():
from pandas import DataFrame
def baz() -> DataFrame[int]:
...
def f():
from pandas import DataFrame
def baz() -> DataFrame["int"]:
...
def f():
import pandas as pd
def baz() -> pd.DataFrame:
...
def f():
import pandas as pd
def baz() -> pd.DataFrame.Extra:
...
def f():
import pandas as pd
def baz() -> pd.DataFrame | int:
...
def f():
from pandas import DataFrame
def baz() -> DataFrame():
...
def f():
from typing import Literal
from pandas import DataFrame
def baz() -> DataFrame[Literal["int"]]:
...
def f():
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from pandas import DataFrame
def func(value: DataFrame):
...
def f():
from pandas import DataFrame, Series
def baz() -> DataFrame | Series:
...
def f():
from pandas import DataFrame, Series
def baz() -> (
DataFrame |
Series
):
...
class C:
x: DataFrame[
int
] = 1
def func() -> DataFrame[[DataFrame[_P, _R]], DataFrame[_P, _R]]:
...

View File

@@ -4,8 +4,6 @@ import datetime
from array import array
from dataclasses import dataclass
from uuid import UUID # TCH003
from collections.abc import Sequence
from pydantic import validate_call
import attrs
from attrs import frozen
@@ -24,8 +22,3 @@ class B:
@dataclass
class C:
x: UUID
@validate_call(config={'arbitrary_types_allowed': True})
def test(user: Sequence):
...

View File

@@ -1,4 +0,0 @@
from a import x
import b
from c import y
import d

View File

@@ -55,6 +55,3 @@ def model_assign() -> None:
Bad = apps.get_model() # N806
Bad = apps.get_model(model_name="Stream") # N806
Address: Type = apps.get_model("zerver", variable) # OK
ValidationError = import_string(variable) # N806

View File

@@ -63,8 +63,3 @@ for i in list(foo_tuple): # Ok
for i in list(foo_set): # Ok
foo_set.append(i + 1)
x, y, nested_tuple = (1, 2, (3, 4, 5))
for i in list(nested_tuple): # PERF101
pass

View File

@@ -72,15 +72,3 @@ a = 42 # (Two spaces)
# EF Means test is giving error and Failing
#! Means test is segfaulting
# 8 Means test runs forever
#: Colon prefix is okay
###This is a variable ###
# We should strip the space, but preserve the hashes.
#: E266:1:3
## Foo
a = 1 ## Foo
a = 1 #:Foo

View File

@@ -60,6 +60,3 @@ def f():
if (a and
b):
pass
#: Okay
def f():
return 1

Some files were not shown because too many files have changed in this diff Show More