Compare commits
69 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c014622003 | ||
|
|
b6fb972e6f | ||
|
|
18452cf477 | ||
|
|
cb99815c3e | ||
|
|
45f603000d | ||
|
|
1a65e544c5 | ||
|
|
4d2ee5bf98 | ||
|
|
8314c8bb05 | ||
|
|
cb201bc4a5 | ||
|
|
6c0068eeec | ||
|
|
c306f85691 | ||
|
|
b972455ac7 | ||
|
|
5559827a78 | ||
|
|
cb8eea64a8 | ||
|
|
8e9bf84047 | ||
|
|
2993c342d2 | ||
|
|
108260298f | ||
|
|
a53d59f6bd | ||
|
|
1026ece946 | ||
|
|
f452bf8cad | ||
|
|
07380e0657 | ||
|
|
3aa6a30395 | ||
|
|
efb76ffa64 | ||
|
|
4e461cbf03 | ||
|
|
93417b5644 | ||
|
|
b8dd499b2a | ||
|
|
cd2bf26845 | ||
|
|
6e36dcfefe | ||
|
|
febc69ab48 | ||
|
|
6c2613b44e | ||
|
|
cb8a2f5615 | ||
|
|
b7b137abc8 | ||
|
|
f69a35a021 | ||
|
|
829a808526 | ||
|
|
85fc57e7f9 | ||
|
|
20e33bf514 | ||
|
|
b7dd2b5941 | ||
|
|
e043bd46b5 | ||
|
|
d0d88d9375 | ||
|
|
a224f19903 | ||
|
|
2414298289 | ||
|
|
6bbabceead | ||
|
|
04ec11a73d | ||
|
|
b021ede481 | ||
|
|
96ae9fe685 | ||
|
|
cdac90ef68 | ||
|
|
fcc08894cf | ||
|
|
ebc7ac31cb | ||
|
|
981a0703ed | ||
|
|
946b308197 | ||
|
|
d22ce5372d | ||
|
|
acab5f3cf2 | ||
|
|
06c9f625b6 | ||
|
|
bbb0a0c360 | ||
|
|
9361e22fe9 | ||
|
|
f484df5470 | ||
|
|
af88ffc57e | ||
|
|
b918647927 | ||
|
|
ef7778d794 | ||
|
|
bd443ebe91 | ||
|
|
ee6548d7dd | ||
|
|
b4a050c21d | ||
|
|
958702ded0 | ||
|
|
268d95e911 | ||
|
|
3def18fc21 | ||
|
|
c48ba690eb | ||
|
|
fd49fb935f | ||
|
|
fe54ef08aa | ||
|
|
b7ffd73edd |
34
.github/workflows/ci.yaml
vendored
34
.github/workflows/ci.yaml
vendored
@@ -48,8 +48,8 @@ jobs:
|
||||
- "!crates/ruff_dev/**"
|
||||
- "!crates/ruff_shrinking/**"
|
||||
- scripts/*
|
||||
- .github/workflows/ci.yaml
|
||||
- python/**
|
||||
- .github/workflows/ci.yaml
|
||||
|
||||
formatter:
|
||||
- Cargo.toml
|
||||
@@ -68,7 +68,7 @@ jobs:
|
||||
- .github/workflows/ci.yaml
|
||||
|
||||
code:
|
||||
- "*/**"
|
||||
- "**/*"
|
||||
- "!**/*.md"
|
||||
- "!docs/**"
|
||||
- "!assets/**"
|
||||
@@ -86,7 +86,7 @@ jobs:
|
||||
name: "cargo clippy"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main'
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
@@ -102,7 +102,7 @@ jobs:
|
||||
cargo-test-linux:
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main'
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
name: "cargo test (linux)"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -128,7 +128,7 @@ jobs:
|
||||
cargo-test-windows:
|
||||
runs-on: windows-latest
|
||||
needs: determine_changes
|
||||
if: needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main'
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
name: "cargo test (windows)"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -147,7 +147,7 @@ jobs:
|
||||
cargo-test-wasm:
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main'
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
name: "cargo test (wasm)"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -168,7 +168,7 @@ jobs:
|
||||
cargo-fuzz:
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main'
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
name: "cargo fuzz"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -187,7 +187,7 @@ jobs:
|
||||
name: "test scripts"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main'
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
@@ -215,7 +215,7 @@ jobs:
|
||||
}}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
@@ -321,7 +321,7 @@ jobs:
|
||||
name: "cargo udeps"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main'
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install nightly Rust toolchain"
|
||||
@@ -338,7 +338,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
@@ -362,7 +362,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Install Rust toolchain"
|
||||
@@ -392,7 +392,7 @@ jobs:
|
||||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@v0.8.0
|
||||
@@ -444,7 +444,7 @@ jobs:
|
||||
needs:
|
||||
- cargo-test-linux
|
||||
- determine_changes
|
||||
if: needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main'
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
steps:
|
||||
- uses: extractions/setup-just@v1
|
||||
env:
|
||||
@@ -455,7 +455,7 @@ jobs:
|
||||
with:
|
||||
repository: "astral-sh/ruff-lsp"
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
@@ -483,7 +483,7 @@ jobs:
|
||||
benchmarks:
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main'
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
steps:
|
||||
- name: "Checkout Branch"
|
||||
uses: actions/checkout@v4
|
||||
@@ -502,7 +502,7 @@ jobs:
|
||||
run: cargo codspeed build --features codspeed -p ruff_benchmark
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@v1
|
||||
uses: CodSpeedHQ/action@v2
|
||||
with:
|
||||
run: cargo codspeed run
|
||||
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||
|
||||
2
.github/workflows/docs.yaml
vendored
2
.github/workflows/docs.yaml
vendored
@@ -20,7 +20,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@v0.8.0
|
||||
|
||||
16
.github/workflows/flake8-to-ruff.yaml
vendored
16
.github/workflows/flake8-to-ruff.yaml
vendored
@@ -20,7 +20,7 @@ jobs:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
@@ -43,7 +43,7 @@ jobs:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
@@ -69,7 +69,7 @@ jobs:
|
||||
target: [x64, x86]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: ${{ matrix.target }}
|
||||
@@ -97,7 +97,7 @@ jobs:
|
||||
target: [x86_64, i686]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
@@ -124,7 +124,7 @@ jobs:
|
||||
target: [aarch64, armv7, s390x, ppc64le, ppc64]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Build wheels"
|
||||
@@ -161,7 +161,7 @@ jobs:
|
||||
- i686-unknown-linux-musl
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
@@ -197,7 +197,7 @@ jobs:
|
||||
arch: armv7
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Build wheels"
|
||||
@@ -237,7 +237,7 @@ jobs:
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
- name: "Publish to PyPi"
|
||||
env:
|
||||
TWINE_USERNAME: __token__
|
||||
|
||||
45
.github/workflows/release.yaml
vendored
45
.github/workflows/release.yaml
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
@@ -63,7 +63,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
@@ -86,7 +86,7 @@ jobs:
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-x86_64-apple-darwin.tar.gz
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-x86_64-apple-darwin.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/x86_64-apple-darwin/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
@@ -103,7 +103,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
@@ -125,7 +125,7 @@ jobs:
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-aarch64-apple-darwin.tar.gz
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-aarch64-apple-darwin.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/aarch64-apple-darwin/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
@@ -151,7 +151,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: ${{ matrix.platform.arch }}
|
||||
@@ -177,7 +177,7 @@ jobs:
|
||||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.zip
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.zip
|
||||
7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/ruff.exe
|
||||
sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
@@ -199,7 +199,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
@@ -224,7 +224,7 @@ jobs:
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ matrix.target }}.tar.gz
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.target }}.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
@@ -258,7 +258,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
@@ -291,7 +291,7 @@ jobs:
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.tar.gz
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
@@ -313,7 +313,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
@@ -332,10 +332,10 @@ jobs:
|
||||
image: alpine:latest
|
||||
options: -v ${{ github.workspace }}:/io -w /io
|
||||
run: |
|
||||
apk add py3-pip
|
||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links /io/dist/ --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
apk add python3
|
||||
python -m venv .venv
|
||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
.venv/bin/ruff check --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
@@ -343,7 +343,7 @@ jobs:
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ matrix.target }}.tar.gz
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.target }}.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
@@ -369,7 +369,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
@@ -388,10 +388,11 @@ jobs:
|
||||
distro: alpine_latest
|
||||
githubToken: ${{ github.token }}
|
||||
install: |
|
||||
apk add py3-pip
|
||||
apk add python3
|
||||
run: |
|
||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
ruff check --help
|
||||
python -m venv .venv
|
||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
.venv/bin/ruff check --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
@@ -399,7 +400,7 @@ jobs:
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.tar.gz
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
|
||||
57
CHANGELOG.md
57
CHANGELOG.md
@@ -1,5 +1,62 @@
|
||||
# Changelog
|
||||
|
||||
## 0.1.8
|
||||
|
||||
This release includes opt-in support for formatting Python snippets within
|
||||
docstrings via the `docstring-code-format` setting.
|
||||
[Check out the blog post](https://astral.sh/blog/v0.1.8) for more details!
|
||||
|
||||
### Preview features
|
||||
|
||||
- Add `"preserve"` quote-style to mimic Black's skip-string-normalization ([#8822](https://github.com/astral-sh/ruff/pull/8822))
|
||||
- Implement `prefer_splitting_right_hand_side_of_assignments` preview style ([#8943](https://github.com/astral-sh/ruff/pull/8943))
|
||||
- \[`pycodestyle`\] Add fix for `unexpected-spaces-around-keyword-parameter-equals` ([#9072](https://github.com/astral-sh/ruff/pull/9072))
|
||||
- \[`pycodestyle`\] Add fix for comment-related whitespace rules ([#9075](https://github.com/astral-sh/ruff/pull/9075))
|
||||
- \[`pycodestyle`\] Allow `sys.path` modifications between imports ([#9047](https://github.com/astral-sh/ruff/pull/9047))
|
||||
- \[`refurb`\] Implement `hashlib-digest-hex` (`FURB181`) ([#9077](https://github.com/astral-sh/ruff/pull/9077))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- Allow `flake8-type-checking` rules to automatically quote runtime-evaluated references ([#6001](https://github.com/astral-sh/ruff/pull/6001))
|
||||
- Allow transparent cell magics in Jupyter Notebooks ([#8911](https://github.com/astral-sh/ruff/pull/8911))
|
||||
- \[`flake8-annotations`\] Avoid `ANN2xx` fixes for abstract methods with empty bodies ([#9034](https://github.com/astral-sh/ruff/pull/9034))
|
||||
- \[`flake8-self`\] Ignore underscore references in type annotations ([#9036](https://github.com/astral-sh/ruff/pull/9036))
|
||||
- \[`pep8-naming`\] Allow class names when `apps.get_model` is a non-string ([#9065](https://github.com/astral-sh/ruff/pull/9065))
|
||||
- \[`pycodestyle`\] Allow `matplotlib.use` calls to intersperse imports ([#9094](https://github.com/astral-sh/ruff/pull/9094))
|
||||
- \[`pyflakes`\] Support fixing unused assignments in tuples by renaming variables (`F841`) ([#9107](https://github.com/astral-sh/ruff/pull/9107))
|
||||
- \[`pylint`\] Add fix for `subprocess-run-without-check` (`PLW1510`) ([#6708](https://github.com/astral-sh/ruff/pull/6708))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Add `docstring-code-format` knob to enable docstring snippet formatting ([#8854](https://github.com/astral-sh/ruff/pull/8854))
|
||||
- Use double quotes for all docstrings, including single-quoted docstrings ([#9020](https://github.com/astral-sh/ruff/pull/9020))
|
||||
- Implement "dynamic" line width mode for docstring code formatting ([#9098](https://github.com/astral-sh/ruff/pull/9098))
|
||||
- Support reformatting Markdown code blocks ([#9030](https://github.com/astral-sh/ruff/pull/9030))
|
||||
- add support for formatting reStructuredText code snippets ([#9003](https://github.com/astral-sh/ruff/pull/9003))
|
||||
- Avoid trailing comma for single-argument with positional separator ([#9076](https://github.com/astral-sh/ruff/pull/9076))
|
||||
- Fix handling of trailing target comment ([#9051](https://github.com/astral-sh/ruff/pull/9051))
|
||||
|
||||
### CLI
|
||||
|
||||
- Hide unsafe fix suggestions when explicitly disabled ([#9095](https://github.com/astral-sh/ruff/pull/9095))
|
||||
- Add SARIF support to `--output-format` ([#9078](https://github.com/astral-sh/ruff/pull/9078))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Apply unnecessary index rule prior to enumerate rewrite ([#9012](https://github.com/astral-sh/ruff/pull/9012))
|
||||
- \[`flake8-err-msg`\] Allow `EM` fixes even if `msg` variable is defined ([#9059](https://github.com/astral-sh/ruff/pull/9059))
|
||||
- \[`flake8-pie`\] Prevent keyword arguments duplication ([#8450](https://github.com/astral-sh/ruff/pull/8450))
|
||||
- \[`flake8-pie`\] Respect trailing comma in `unnecessary-dict-kwargs` (`PIE804`) ([#9015](https://github.com/astral-sh/ruff/pull/9015))
|
||||
- \[`flake8-raise`\] Avoid removing parentheses on ctypes.WinError ([#9027](https://github.com/astral-sh/ruff/pull/9027))
|
||||
- \[`isort`\] Avoid invalid combination of `force-sort-within-types` and `lines-between-types` ([#9041](https://github.com/astral-sh/ruff/pull/9041))
|
||||
- \[`isort`\] Ensure that from-style imports are always ordered first in `__future__` ([#9039](https://github.com/astral-sh/ruff/pull/9039))
|
||||
- \[`pycodestyle`\] Allow tab indentation before keyword ([#9099](https://github.com/astral-sh/ruff/pull/9099))
|
||||
- \[`pylint`\] Ignore `@overrides` and `@overloads` for `too-many-positional` ([#9000](https://github.com/astral-sh/ruff/pull/9000))
|
||||
- \[`pyupgrade`\] Enable `printf-string-formatting` fix with comments on right-hand side ([#9037](https://github.com/astral-sh/ruff/pull/9037))
|
||||
- \[`refurb`\] Make `math-constant` (`FURB152`) rule more targeted ([#9054](https://github.com/astral-sh/ruff/pull/9054))
|
||||
- \[`refurb`\] Support floating-point base in `redundant-log-base` (`FURB163`) ([#9100](https://github.com/astral-sh/ruff/pull/9100))
|
||||
- \[`ruff`\] Detect `unused-asyncio-dangling-task` (`RUF006`) on unused assignments ([#9060](https://github.com/astral-sh/ruff/pull/9060))
|
||||
|
||||
## 0.1.7
|
||||
|
||||
### Preview features
|
||||
|
||||
162
Cargo.lock
generated
162
Cargo.lock
generated
@@ -16,14 +16,15 @@ checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
|
||||
|
||||
[[package]]
|
||||
name = "ahash"
|
||||
version = "0.8.3"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f"
|
||||
checksum = "91429305e9f0a25f6205c5b8e0d2db09e0708a7a6df0f42212bb56c32c8ac97a"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"getrandom",
|
||||
"once_cell",
|
||||
"version_check",
|
||||
"zerocopy",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -381,7 +382,7 @@ dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.39",
|
||||
"syn 2.0.40",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -606,7 +607,7 @@ dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"strsim",
|
||||
"syn 2.0.39",
|
||||
"syn 2.0.40",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -617,7 +618,7 @@ checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5"
|
||||
dependencies = [
|
||||
"darling_core",
|
||||
"quote",
|
||||
"syn 2.0.39",
|
||||
"syn 2.0.40",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -790,14 +791,14 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "filetime"
|
||||
version = "0.2.22"
|
||||
version = "0.2.23"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d4029edd3e734da6fe05b6cd7bd2960760a616bd2ddd0d59a0124746d6272af0"
|
||||
checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"redox_syscall 0.3.5",
|
||||
"windows-sys 0.48.0",
|
||||
"redox_syscall 0.4.1",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -808,7 +809,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
|
||||
|
||||
[[package]]
|
||||
name = "flake8-to-ruff"
|
||||
version = "0.1.7"
|
||||
version = "0.1.8"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
@@ -1122,15 +1123,15 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "is-macro"
|
||||
version = "0.3.0"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f4467ed1321b310c2625c5aa6c1b1ffc5de4d9e42668cf697a08fb033ee8265e"
|
||||
checksum = "bc74b7abae208af9314a406bd7dcc65091230b6e749c09e07a645885fecf34f9"
|
||||
dependencies = [
|
||||
"Inflector",
|
||||
"pmutil 0.6.1",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.39",
|
||||
"syn 2.0.40",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1708,7 +1709,7 @@ checksum = "52a40bc70c2c58040d2d8b167ba9a5ff59fc9dab7ad44771cfde3dcfde7a09c6"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.39",
|
||||
"syn 2.0.40",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2062,7 +2063,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_cli"
|
||||
version = "0.1.7"
|
||||
version = "0.1.8"
|
||||
dependencies = [
|
||||
"annotate-snippets 0.9.2",
|
||||
"anyhow",
|
||||
@@ -2198,7 +2199,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_linter"
|
||||
version = "0.1.7"
|
||||
version = "0.1.8"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"annotate-snippets 0.9.2",
|
||||
@@ -2258,6 +2259,7 @@ dependencies = [
|
||||
"typed-arena",
|
||||
"unicode-width",
|
||||
"unicode_names2",
|
||||
"url",
|
||||
"wsl",
|
||||
]
|
||||
|
||||
@@ -2269,7 +2271,7 @@ dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"ruff_python_trivia",
|
||||
"syn 2.0.39",
|
||||
"syn 2.0.40",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2450,7 +2452,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_shrinking"
|
||||
version = "0.1.7"
|
||||
version = "0.1.8"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
@@ -2676,18 +2678,18 @@ checksum = "836fa6a3e1e547f9a2c4040802ec865b5d85f4014efe00555d7090a3dcaa1090"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.190"
|
||||
version = "1.0.193"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "91d3c334ca1ee894a2c6f6ad698fe8c435b76d504b13d436f0685d648d6d96f7"
|
||||
checksum = "25dd9975e68d0cb5aa1120c288333fc98731bd1dd12f561e468ea4728c042b89"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde-wasm-bindgen"
|
||||
version = "0.6.1"
|
||||
version = "0.6.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "17ba92964781421b6cef36bf0d7da26d201e96d84e1b10e7ae6ed416e516906d"
|
||||
checksum = "b9b713f70513ae1f8d92665bbbbda5c295c2cf1da5542881ae5eefe20c9af132"
|
||||
dependencies = [
|
||||
"js-sys",
|
||||
"serde",
|
||||
@@ -2696,13 +2698,13 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.190"
|
||||
version = "1.0.193"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "67c5609f394e5c2bd7fc51efda478004ea80ef42fee983d5c67a65e34f32c0e3"
|
||||
checksum = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.39",
|
||||
"syn 2.0.40",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2764,7 +2766,7 @@ dependencies = [
|
||||
"darling",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.39",
|
||||
"syn 2.0.40",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2868,7 +2870,7 @@ dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"rustversion",
|
||||
"syn 2.0.39",
|
||||
"syn 2.0.40",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2884,9 +2886,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.39"
|
||||
version = "2.0.40"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "23e78b90f2fcf45d3e842032ce32e3f2d1545ba6636271dcbf24fa306d87be7a"
|
||||
checksum = "13fa70a4ee923979ffb522cacce59d34421ebdea5625e1073c4326ef9d2dd42e"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -2973,7 +2975,7 @@ dependencies = [
|
||||
"proc-macro-error",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.39",
|
||||
"syn 2.0.40",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2985,7 +2987,7 @@ dependencies = [
|
||||
"proc-macro-error",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.39",
|
||||
"syn 2.0.40",
|
||||
"test-case-core",
|
||||
]
|
||||
|
||||
@@ -3006,7 +3008,7 @@ checksum = "266b2e40bc00e5a6c09c3584011e08b06f123c00362c92b975ba9843aaaa14b8"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.39",
|
||||
"syn 2.0.40",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3168,7 +3170,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.39",
|
||||
"syn 2.0.40",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3386,7 +3388,7 @@ checksum = "f49e7f3f3db8040a100710a11932239fd30697115e2ba4107080d8252939845e"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.39",
|
||||
"syn 2.0.40",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3480,7 +3482,7 @@ dependencies = [
|
||||
"once_cell",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.39",
|
||||
"syn 2.0.40",
|
||||
"wasm-bindgen-shared",
|
||||
]
|
||||
|
||||
@@ -3514,7 +3516,7 @@ checksum = "f0eb82fcb7930ae6219a7ecfd55b217f5f0893484b7a13022ebb2b2bf20b5283"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.39",
|
||||
"syn 2.0.40",
|
||||
"wasm-bindgen-backend",
|
||||
"wasm-bindgen-shared",
|
||||
]
|
||||
@@ -3547,7 +3549,7 @@ checksum = "493fcbab756bb764fa37e6bee8cec2dd709eb4273d06d0c282a5e74275ded735"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.39",
|
||||
"syn 2.0.40",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3644,6 +3646,15 @@ dependencies = [
|
||||
"windows-targets 0.48.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
|
||||
dependencies = [
|
||||
"windows-targets 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-targets"
|
||||
version = "0.42.2"
|
||||
@@ -3674,6 +3685,21 @@ dependencies = [
|
||||
"windows_x86_64_msvc 0.48.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-targets"
|
||||
version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd"
|
||||
dependencies = [
|
||||
"windows_aarch64_gnullvm 0.52.0",
|
||||
"windows_aarch64_msvc 0.52.0",
|
||||
"windows_i686_gnu 0.52.0",
|
||||
"windows_i686_msvc 0.52.0",
|
||||
"windows_x86_64_gnu 0.52.0",
|
||||
"windows_x86_64_gnullvm 0.52.0",
|
||||
"windows_x86_64_msvc 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_gnullvm"
|
||||
version = "0.42.2"
|
||||
@@ -3686,6 +3712,12 @@ version = "0.48.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_gnullvm"
|
||||
version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_msvc"
|
||||
version = "0.42.2"
|
||||
@@ -3698,6 +3730,12 @@ version = "0.48.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_msvc"
|
||||
version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnu"
|
||||
version = "0.42.2"
|
||||
@@ -3710,6 +3748,12 @@ version = "0.48.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnu"
|
||||
version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_msvc"
|
||||
version = "0.42.2"
|
||||
@@ -3722,6 +3766,12 @@ version = "0.48.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_msvc"
|
||||
version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnu"
|
||||
version = "0.42.2"
|
||||
@@ -3734,6 +3784,12 @@ version = "0.48.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnu"
|
||||
version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnullvm"
|
||||
version = "0.42.2"
|
||||
@@ -3746,6 +3802,12 @@ version = "0.48.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnullvm"
|
||||
version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_msvc"
|
||||
version = "0.42.2"
|
||||
@@ -3758,6 +3820,12 @@ version = "0.48.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_msvc"
|
||||
version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04"
|
||||
|
||||
[[package]]
|
||||
name = "winnow"
|
||||
version = "0.5.15"
|
||||
@@ -3796,3 +3864,23 @@ checksum = "fe5c30ade05e61656247b2e334a031dfd0cc466fadef865bdcdea8d537951bf1"
|
||||
dependencies = [
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy"
|
||||
version = "0.7.30"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "306dca4455518f1f31635ec308b6b3e4eb1b11758cefafc782827d0aa7acb5c7"
|
||||
dependencies = [
|
||||
"zerocopy-derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy-derive"
|
||||
version = "0.7.30"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "be912bf68235a88fbefd1b73415cb218405958d1655b2ece9035a19920bdf6ba"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.40",
|
||||
]
|
||||
|
||||
@@ -17,12 +17,12 @@ bitflags = { version = "2.4.1" }
|
||||
chrono = { version = "0.4.31", default-features = false, features = ["clock"] }
|
||||
clap = { version = "4.4.7", features = ["derive"] }
|
||||
colored = { version = "2.0.0" }
|
||||
filetime = { version = "0.2.20" }
|
||||
filetime = { version = "0.2.23" }
|
||||
glob = { version = "0.3.1" }
|
||||
globset = { version = "0.4.14" }
|
||||
ignore = { version = "0.4.20" }
|
||||
insta = { version = "1.34.0", feature = ["filters", "glob"] }
|
||||
is-macro = { version = "0.3.0" }
|
||||
is-macro = { version = "0.3.1" }
|
||||
itertools = { version = "0.11.0" }
|
||||
libcst = { version = "1.1.0", default-features = false }
|
||||
log = { version = "0.4.17" }
|
||||
@@ -34,7 +34,7 @@ quote = { version = "1.0.23" }
|
||||
regex = { version = "1.10.2" }
|
||||
rustc-hash = { version = "1.1.0" }
|
||||
schemars = { version = "0.8.16" }
|
||||
serde = { version = "1.0.190", features = ["derive"] }
|
||||
serde = { version = "1.0.193", features = ["derive"] }
|
||||
serde_json = { version = "1.0.108" }
|
||||
shellexpand = { version = "3.0.0" }
|
||||
similar = { version = "2.3.0", features = ["inline"] }
|
||||
@@ -42,7 +42,7 @@ smallvec = { version = "1.11.2" }
|
||||
static_assertions = "1.1.0"
|
||||
strum = { version = "0.25.0", features = ["strum_macros"] }
|
||||
strum_macros = { version = "0.25.3" }
|
||||
syn = { version = "2.0.39" }
|
||||
syn = { version = "2.0.40" }
|
||||
test-case = { version = "3.2.1" }
|
||||
thiserror = { version = "1.0.50" }
|
||||
toml = { version = "0.7.8" }
|
||||
|
||||
@@ -150,7 +150,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.1.7
|
||||
rev: v0.1.8
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "flake8-to-ruff"
|
||||
version = "0.1.7"
|
||||
version = "0.1.8"
|
||||
description = """
|
||||
Convert Flake8 configuration files to Ruff configuration files.
|
||||
"""
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff_cli"
|
||||
version = "0.1.7"
|
||||
version = "0.1.8"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
@@ -13,7 +13,7 @@ use ruff_linter::fs::relativize_path;
|
||||
use ruff_linter::logging::LogLevel;
|
||||
use ruff_linter::message::{
|
||||
AzureEmitter, Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter,
|
||||
JsonEmitter, JsonLinesEmitter, JunitEmitter, PylintEmitter, TextEmitter,
|
||||
JsonEmitter, JsonLinesEmitter, JunitEmitter, PylintEmitter, SarifEmitter, TextEmitter,
|
||||
};
|
||||
use ruff_linter::notify_user;
|
||||
use ruff_linter::registry::{AsRule, Rule};
|
||||
@@ -125,15 +125,7 @@ impl Printer {
|
||||
if let Some(fixables) = fixables {
|
||||
let fix_prefix = format!("[{}]", "*".cyan());
|
||||
|
||||
if self.unsafe_fixes.is_enabled() {
|
||||
if fixables.applicable > 0 {
|
||||
writeln!(
|
||||
writer,
|
||||
"{fix_prefix} {} fixable with the --fix option.",
|
||||
fixables.applicable
|
||||
)?;
|
||||
}
|
||||
} else {
|
||||
if self.unsafe_fixes.is_hint() {
|
||||
if fixables.applicable > 0 && fixables.unapplicable_unsafe > 0 {
|
||||
let es = if fixables.unapplicable_unsafe == 1 {
|
||||
""
|
||||
@@ -163,6 +155,14 @@ impl Printer {
|
||||
fixables.unapplicable_unsafe
|
||||
)?;
|
||||
}
|
||||
} else {
|
||||
if fixables.applicable > 0 {
|
||||
writeln!(
|
||||
writer,
|
||||
"{fix_prefix} {} fixable with the --fix option.",
|
||||
fixables.applicable
|
||||
)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@@ -291,6 +291,9 @@ impl Printer {
|
||||
SerializationFormat::Azure => {
|
||||
AzureEmitter.emit(writer, &diagnostics.messages, &context)?;
|
||||
}
|
||||
SerializationFormat::Sarif => {
|
||||
SarifEmitter.emit(writer, &diagnostics.messages, &context)?;
|
||||
}
|
||||
}
|
||||
|
||||
writer.flush()?;
|
||||
|
||||
@@ -139,6 +139,99 @@ if condition:
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn docstring_options() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
[format]
|
||||
docstring-code-format = true
|
||||
docstring-code-line-length = 20
|
||||
"#,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(["format", "--config"])
|
||||
.arg(&ruff_toml)
|
||||
.arg("-")
|
||||
.pass_stdin(r#"
|
||||
def f(x):
|
||||
'''
|
||||
Something about `f`. And an example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
foo, bar, quux = this_is_a_long_line(lion, hippo, lemur, bear)
|
||||
|
||||
Another example:
|
||||
|
||||
```py
|
||||
foo, bar, quux = this_is_a_long_line(lion, hippo, lemur, bear)
|
||||
```
|
||||
|
||||
And another:
|
||||
|
||||
>>> foo, bar, quux = this_is_a_long_line(lion, hippo, lemur, bear)
|
||||
'''
|
||||
pass
|
||||
"#), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
def f(x):
|
||||
"""
|
||||
Something about `f`. And an example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
(
|
||||
foo,
|
||||
bar,
|
||||
quux,
|
||||
) = this_is_a_long_line(
|
||||
lion,
|
||||
hippo,
|
||||
lemur,
|
||||
bear,
|
||||
)
|
||||
|
||||
Another example:
|
||||
|
||||
```py
|
||||
(
|
||||
foo,
|
||||
bar,
|
||||
quux,
|
||||
) = this_is_a_long_line(
|
||||
lion,
|
||||
hippo,
|
||||
lemur,
|
||||
bear,
|
||||
)
|
||||
```
|
||||
|
||||
And another:
|
||||
|
||||
>>> (
|
||||
... foo,
|
||||
... bar,
|
||||
... quux,
|
||||
... ) = this_is_a_long_line(
|
||||
... lion,
|
||||
... hippo,
|
||||
... lemur,
|
||||
... bear,
|
||||
... )
|
||||
"""
|
||||
pass
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn mixed_line_endings() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
|
||||
@@ -1158,6 +1158,44 @@ fn check_hints_hidden_unsafe_fixes_with_no_safe_fixes() {
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_no_hint_for_hidden_unsafe_fixes_when_disabled() {
|
||||
let mut cmd = RuffCheck::default()
|
||||
.args(["--select", "F601,UP034", "--no-unsafe-fixes"])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd
|
||||
.pass_stdin("x = {'a': 1, 'a': 1}\nprint(('foo'))\n"),
|
||||
@r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:1:14: F601 Dictionary key literal `'a'` repeated
|
||||
-:2:7: UP034 [*] Avoid extraneous parentheses
|
||||
Found 2 errors.
|
||||
[*] 1 fixable with the --fix option.
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_no_hint_for_hidden_unsafe_fixes_with_no_safe_fixes_when_disabled() {
|
||||
let mut cmd = RuffCheck::default()
|
||||
.args(["--select", "F601", "--no-unsafe-fixes"])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd
|
||||
.pass_stdin("x = {'a': 1, 'a': 1}\n"),
|
||||
@r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:1:14: F601 Dictionary key literal `'a'` repeated
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_shows_unsafe_fixes_with_opt_in() {
|
||||
let mut cmd = RuffCheck::default()
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff_linter"
|
||||
version = "0.1.7"
|
||||
version = "0.1.8"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
@@ -71,6 +71,7 @@ toml = { workspace = true }
|
||||
typed-arena = { version = "2.0.2" }
|
||||
unicode-width = { workspace = true }
|
||||
unicode_names2 = { workspace = true }
|
||||
url = { version = "2.2.2" }
|
||||
wsl = { version = "0.1.0" }
|
||||
|
||||
[dev-dependencies]
|
||||
|
||||
@@ -147,3 +147,38 @@ def func(x: int):
|
||||
while x > 0:
|
||||
break
|
||||
return 1
|
||||
|
||||
|
||||
import abc
|
||||
from abc import abstractmethod
|
||||
|
||||
|
||||
class Foo(abc.ABC):
|
||||
@abstractmethod
|
||||
def method(self):
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def method(self):
|
||||
"""Docstring."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def method(self):
|
||||
...
|
||||
|
||||
@staticmethod
|
||||
@abstractmethod
|
||||
def method():
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
def method(cls):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def method(self):
|
||||
if self.x > 0:
|
||||
return 1
|
||||
else:
|
||||
return 1.5
|
||||
|
||||
@@ -8,6 +8,7 @@ def func(address):
|
||||
# Error
|
||||
"0.0.0.0"
|
||||
'0.0.0.0'
|
||||
f"0.0.0.0"
|
||||
|
||||
|
||||
# Error
|
||||
|
||||
@@ -5,6 +5,9 @@ with open("/abc/tmp", "w") as f:
|
||||
with open("/tmp/abc", "w") as f:
|
||||
f.write("def")
|
||||
|
||||
with open(f"/tmp/abc", "w") as f:
|
||||
f.write("def")
|
||||
|
||||
with open("/var/tmp/123", "w") as f:
|
||||
f.write("def")
|
||||
|
||||
|
||||
@@ -27,7 +27,7 @@ def f_ok():
|
||||
raise RuntimeError(msg)
|
||||
|
||||
|
||||
def f_unfixable():
|
||||
def f_msg_defined():
|
||||
msg = "hello"
|
||||
raise RuntimeError("This is an example exception")
|
||||
|
||||
|
||||
@@ -10,7 +10,6 @@ Foo.objects.create(**{**bar}) # PIE804
|
||||
|
||||
foo(**{})
|
||||
|
||||
|
||||
foo(**{**data, "foo": "buzz"})
|
||||
foo(**buzz)
|
||||
foo(**{"bar-foo": True})
|
||||
@@ -20,3 +19,8 @@ foo(**{buzz: True})
|
||||
foo(**{"": True})
|
||||
foo(**{f"buzz__{bar}": True})
|
||||
abc(**{"for": 3})
|
||||
foo(**{},)
|
||||
|
||||
# Duplicated key names won't be fixed, to avoid syntax errors.
|
||||
abc(**{'a': b}, **{'a': c}) # PIE804
|
||||
abc(a=1, **{'a': c}, **{'b': c}) # PIE804
|
||||
|
||||
@@ -32,6 +32,7 @@ def f8(x: bytes = b"50 character byte stringgggggggggggggggggggggggggg\xff") ->
|
||||
|
||||
foo: str = "50 character stringggggggggggggggggggggggggggggggg"
|
||||
bar: str = "51 character stringgggggggggggggggggggggggggggggggg"
|
||||
baz: str = f"51 character stringgggggggggggggggggggggggggggggggg"
|
||||
|
||||
baz: bytes = b"50 character byte stringgggggggggggggggggggggggggg"
|
||||
|
||||
|
||||
@@ -29,6 +29,10 @@ baz: bytes = b"50 character byte stringgggggggggggggggggggggggggg" # OK
|
||||
|
||||
qux: bytes = b"51 character byte stringggggggggggggggggggggggggggg\xff" # Error: PYI053
|
||||
|
||||
ffoo: str = f"50 character stringggggggggggggggggggggggggggggggg" # OK
|
||||
|
||||
fbar: str = f"51 character stringgggggggggggggggggggggggggggggggg" # Error: PYI053
|
||||
|
||||
class Demo:
|
||||
"""Docstrings are excluded from this rule. Some padding.""" # OK
|
||||
|
||||
|
||||
@@ -82,3 +82,14 @@ raise IndexError();
|
||||
|
||||
# RSE102
|
||||
raise Foo()
|
||||
|
||||
# OK
|
||||
raise ctypes.WinError()
|
||||
|
||||
|
||||
def func():
|
||||
pass
|
||||
|
||||
|
||||
# OK
|
||||
raise func()
|
||||
|
||||
@@ -19,8 +19,32 @@ async def func():
|
||||
bar = "bar"
|
||||
trio.sleep(bar)
|
||||
|
||||
x, y = 0, 2000
|
||||
trio.sleep(x) # TRIO115
|
||||
trio.sleep(y) # OK
|
||||
|
||||
(a, b, [c, (d, e)]) = (1, 2, (0, [4, 0]))
|
||||
trio.sleep(c) # TRIO115
|
||||
trio.sleep(d) # OK
|
||||
trio.sleep(e) # TRIO115
|
||||
|
||||
m_x, m_y = 0
|
||||
trio.sleep(m_y) # OK
|
||||
trio.sleep(m_x) # OK
|
||||
|
||||
m_a = m_b = 0
|
||||
trio.sleep(m_a) # TRIO115
|
||||
trio.sleep(m_b) # TRIO115
|
||||
|
||||
m_c = (m_d, m_e) = (0, 0)
|
||||
trio.sleep(m_c) # OK
|
||||
trio.sleep(m_d) # TRIO115
|
||||
trio.sleep(m_e) # TRIO115
|
||||
|
||||
|
||||
def func():
|
||||
import trio
|
||||
|
||||
trio.run(trio.sleep(0)) # TRIO115
|
||||
|
||||
|
||||
@@ -33,3 +57,10 @@ def func():
|
||||
|
||||
async def func():
|
||||
await sleep(seconds=0) # TRIO115
|
||||
|
||||
|
||||
def func():
|
||||
import trio
|
||||
|
||||
if (walrus := 0) == 0:
|
||||
trio.sleep(walrus) # TRIO115
|
||||
|
||||
67
crates/ruff_linter/resources/test/fixtures/flake8_type_checking/quote.py
vendored
Normal file
67
crates/ruff_linter/resources/test/fixtures/flake8_type_checking/quote.py
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
def f():
|
||||
from pandas import DataFrame
|
||||
|
||||
def baz() -> DataFrame:
|
||||
...
|
||||
|
||||
|
||||
def f():
|
||||
from pandas import DataFrame
|
||||
|
||||
def baz() -> DataFrame[int]:
|
||||
...
|
||||
|
||||
|
||||
def f():
|
||||
from pandas import DataFrame
|
||||
|
||||
def baz() -> DataFrame["int"]:
|
||||
...
|
||||
|
||||
|
||||
def f():
|
||||
import pandas as pd
|
||||
|
||||
def baz() -> pd.DataFrame:
|
||||
...
|
||||
|
||||
|
||||
def f():
|
||||
import pandas as pd
|
||||
|
||||
def baz() -> pd.DataFrame.Extra:
|
||||
...
|
||||
|
||||
|
||||
def f():
|
||||
import pandas as pd
|
||||
|
||||
def baz() -> pd.DataFrame | int:
|
||||
...
|
||||
|
||||
|
||||
|
||||
def f():
|
||||
from pandas import DataFrame
|
||||
|
||||
def baz() -> DataFrame():
|
||||
...
|
||||
|
||||
|
||||
def f():
|
||||
from typing import Literal
|
||||
|
||||
from pandas import DataFrame
|
||||
|
||||
def baz() -> DataFrame[Literal["int"]]:
|
||||
...
|
||||
|
||||
|
||||
def f():
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pandas import DataFrame
|
||||
|
||||
def func(value: DataFrame):
|
||||
...
|
||||
2
crates/ruff_linter/resources/test/fixtures/isort/force_sort_within_sections_future.py
vendored
Normal file
2
crates/ruff_linter/resources/test/fixtures/isort/force_sort_within_sections_future.py
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
import __future__
|
||||
from __future__ import annotations
|
||||
4
crates/ruff_linter/resources/test/fixtures/isort/force_sort_within_sections_lines_between.py
vendored
Normal file
4
crates/ruff_linter/resources/test/fixtures/isort/force_sort_within_sections_lines_between.py
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
from a import x
|
||||
import b
|
||||
from c import y
|
||||
import d
|
||||
2
crates/ruff_linter/resources/test/fixtures/isort/future_from.py
vendored
Normal file
2
crates/ruff_linter/resources/test/fixtures/isort/future_from.py
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
import __future__
|
||||
from __future__ import annotations
|
||||
@@ -55,3 +55,6 @@ def model_assign() -> None:
|
||||
|
||||
Bad = apps.get_model() # N806
|
||||
Bad = apps.get_model(model_name="Stream") # N806
|
||||
|
||||
Address: Type = apps.get_model("zerver", variable) # OK
|
||||
ValidationError = import_string(variable) # N806
|
||||
|
||||
@@ -63,3 +63,8 @@ for i in list(foo_tuple): # Ok
|
||||
|
||||
for i in list(foo_set): # Ok
|
||||
foo_set.append(i + 1)
|
||||
|
||||
x, y, nested_tuple = (1, 2, (3, 4, 5))
|
||||
|
||||
for i in list(nested_tuple): # PERF101
|
||||
pass
|
||||
|
||||
@@ -72,3 +72,15 @@ a = 42 # (Two spaces)
|
||||
# EF Means test is giving error and Failing
|
||||
#! Means test is segfaulting
|
||||
# 8 Means test runs forever
|
||||
|
||||
#: Colon prefix is okay
|
||||
|
||||
###This is a variable ###
|
||||
|
||||
# We should strip the space, but preserve the hashes.
|
||||
#: E266:1:3
|
||||
## Foo
|
||||
|
||||
a = 1 ## Foo
|
||||
|
||||
a = 1 #:Foo
|
||||
|
||||
@@ -60,3 +60,6 @@ def f():
|
||||
if (a and
|
||||
b):
|
||||
pass
|
||||
#: Okay
|
||||
def f():
|
||||
return 1
|
||||
|
||||
@@ -19,21 +19,32 @@ if x > 0:
|
||||
else:
|
||||
import e
|
||||
|
||||
__some__magic = 1
|
||||
import sys
|
||||
sys.path.insert(0, "some/path")
|
||||
|
||||
import f
|
||||
|
||||
import matplotlib
|
||||
|
||||
matplotlib.use("Agg")
|
||||
|
||||
import g
|
||||
|
||||
__some__magic = 1
|
||||
|
||||
import h
|
||||
|
||||
|
||||
def foo() -> None:
|
||||
import e
|
||||
import i
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import g
|
||||
import j
|
||||
|
||||
import h; import i
|
||||
import k; import l
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import j; \
|
||||
import k
|
||||
import m; \
|
||||
import n
|
||||
|
||||
@@ -3,6 +3,11 @@ import subprocess
|
||||
# Errors.
|
||||
subprocess.run("ls")
|
||||
subprocess.run("ls", shell=True)
|
||||
subprocess.run(
|
||||
["ls"],
|
||||
shell=False,
|
||||
)
|
||||
subprocess.run(["ls"], **kwargs)
|
||||
|
||||
# Non-errors.
|
||||
subprocess.run("ls", check=True)
|
||||
|
||||
@@ -110,3 +110,10 @@ print('Hello %(arg)s' % bar['bop'])
|
||||
"%s" % (
|
||||
x, # comment
|
||||
)
|
||||
|
||||
|
||||
path = "%s-%s-%s.pem" % (
|
||||
safe_domain_name(cn), # common name, which should be filename safe because it is IDNA-encoded, but in case of a malformed cert make sure it's ok to use as a filename
|
||||
cert.not_valid_after.date().isoformat().replace("-", ""), # expiration date
|
||||
hexlify(cert.fingerprint(hashes.SHA256())).decode("ascii")[0:8], # fingerprint prefix
|
||||
)
|
||||
|
||||
@@ -5,3 +5,11 @@ A = 3.14 * r ** 2 # FURB152
|
||||
C = 6.28 * r # FURB152
|
||||
|
||||
e = 2.71 # FURB152
|
||||
|
||||
r = 3.15 # OK
|
||||
|
||||
r = 3.141 # FURB152
|
||||
|
||||
r = 3.1415 # FURB152
|
||||
|
||||
e = 2.7 # OK
|
||||
|
||||
@@ -16,6 +16,8 @@ special_log(1, 2)
|
||||
special_log(1, 10)
|
||||
special_log(1, math.e)
|
||||
special_log(1, special_e)
|
||||
math.log(1, 2.0)
|
||||
math.log(1, 10.0)
|
||||
|
||||
# Ok.
|
||||
math.log2(1)
|
||||
@@ -45,3 +47,6 @@ def log(*args):
|
||||
log(1, 2)
|
||||
log(1, 10)
|
||||
log(1, math.e)
|
||||
|
||||
math.log(1, 2.0001)
|
||||
math.log(1, 10.0001)
|
||||
|
||||
57
crates/ruff_linter/resources/test/fixtures/refurb/FURB181.py
vendored
Normal file
57
crates/ruff_linter/resources/test/fixtures/refurb/FURB181.py
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
import hashlib
|
||||
from hashlib import (
|
||||
blake2b,
|
||||
blake2s,
|
||||
md5,
|
||||
sha1,
|
||||
sha3_224,
|
||||
sha3_256,
|
||||
sha3_384,
|
||||
sha3_512,
|
||||
sha224,
|
||||
)
|
||||
from hashlib import sha256
|
||||
from hashlib import sha256 as hash_algo
|
||||
from hashlib import sha384, sha512, shake_128, shake_256
|
||||
|
||||
# these will match
|
||||
|
||||
blake2b().digest().hex()
|
||||
blake2s().digest().hex()
|
||||
md5().digest().hex()
|
||||
sha1().digest().hex()
|
||||
sha224().digest().hex()
|
||||
sha256().digest().hex()
|
||||
sha384().digest().hex()
|
||||
sha3_224().digest().hex()
|
||||
sha3_256().digest().hex()
|
||||
sha3_384().digest().hex()
|
||||
sha3_512().digest().hex()
|
||||
sha512().digest().hex()
|
||||
shake_128().digest(10).hex()
|
||||
shake_256().digest(10).hex()
|
||||
|
||||
hashlib.sha256().digest().hex()
|
||||
|
||||
sha256(b"text").digest().hex()
|
||||
|
||||
hash_algo().digest().hex()
|
||||
|
||||
# not yet supported
|
||||
h = sha256()
|
||||
h.digest().hex()
|
||||
|
||||
|
||||
# these will not
|
||||
|
||||
sha256().digest()
|
||||
sha256().digest().hex("_")
|
||||
sha256().digest().hex(bytes_per_sep=4)
|
||||
sha256().hexdigest()
|
||||
|
||||
class Hash:
|
||||
def digest(self) -> bytes:
|
||||
return b""
|
||||
|
||||
|
||||
Hash().digest().hex()
|
||||
@@ -63,11 +63,29 @@ def f():
|
||||
tasks = [asyncio.create_task(task) for task in tasks]
|
||||
|
||||
|
||||
# OK (false negative)
|
||||
# Error
|
||||
def f():
|
||||
task = asyncio.create_task(coordinator.ws_connect())
|
||||
|
||||
|
||||
# Error
|
||||
def f():
|
||||
loop = asyncio.get_running_loop()
|
||||
task: asyncio.Task = loop.create_task(coordinator.ws_connect())
|
||||
|
||||
|
||||
# OK (potential false negative)
|
||||
def f():
|
||||
task = asyncio.create_task(coordinator.ws_connect())
|
||||
background_tasks.add(task)
|
||||
|
||||
|
||||
# OK
|
||||
async def f():
|
||||
task = asyncio.create_task(coordinator.ws_connect())
|
||||
await task
|
||||
|
||||
|
||||
# OK (potential false negative)
|
||||
def f():
|
||||
do_nothing_with_the_task(asyncio.create_task(coordinator.ws_connect()))
|
||||
@@ -88,3 +106,19 @@ def f():
|
||||
def f():
|
||||
loop = asyncio.get_running_loop()
|
||||
loop.do_thing(coordinator.ws_connect())
|
||||
|
||||
|
||||
# OK
|
||||
async def f():
|
||||
task = unused = asyncio.create_task(coordinator.ws_connect())
|
||||
await task
|
||||
|
||||
|
||||
# OK (false negative)
|
||||
async def f():
|
||||
task = unused = asyncio.create_task(coordinator.ws_connect())
|
||||
|
||||
|
||||
# OK
|
||||
async def f():
|
||||
task[i] = asyncio.create_task(coordinator.ws_connect())
|
||||
|
||||
@@ -3,11 +3,12 @@ use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::codes::Rule;
|
||||
use crate::rules::{flake8_import_conventions, flake8_pyi, pyflakes, pylint};
|
||||
use crate::rules::{flake8_import_conventions, flake8_pyi, pyflakes, pylint, ruff};
|
||||
|
||||
/// Run lint rules over the [`Binding`]s.
|
||||
pub(crate) fn bindings(checker: &mut Checker) {
|
||||
if !checker.any_enabled(&[
|
||||
Rule::AsyncioDanglingTask,
|
||||
Rule::InvalidAllFormat,
|
||||
Rule::InvalidAllObject,
|
||||
Rule::NonAsciiName,
|
||||
@@ -71,5 +72,12 @@ pub(crate) fn bindings(checker: &mut Checker) {
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
if checker.enabled(Rule::AsyncioDanglingTask) {
|
||||
if let Some(diagnostic) =
|
||||
ruff::rules::asyncio_dangling_binding(binding, &checker.semantic)
|
||||
{
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -59,6 +59,7 @@ pub(crate) fn deferred_scopes(checker: &mut Checker) {
|
||||
flake8_type_checking::helpers::is_valid_runtime_import(
|
||||
binding,
|
||||
&checker.semantic,
|
||||
&checker.settings.flake8_type_checking,
|
||||
)
|
||||
})
|
||||
.collect()
|
||||
|
||||
@@ -356,6 +356,8 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
||||
Rule::FString,
|
||||
// flynt
|
||||
Rule::StaticJoinToFString,
|
||||
// refurb
|
||||
Rule::HashlibDigestHex,
|
||||
]) {
|
||||
if let Expr::Attribute(ast::ExprAttribute { value, attr, .. }) = func.as_ref() {
|
||||
let attr = attr.as_str();
|
||||
@@ -543,7 +545,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
||||
flake8_bugbear::rules::no_explicit_stacklevel(checker, call);
|
||||
}
|
||||
if checker.enabled(Rule::UnnecessaryDictKwargs) {
|
||||
flake8_pie::rules::unnecessary_dict_kwargs(checker, expr, keywords);
|
||||
flake8_pie::rules::unnecessary_dict_kwargs(checker, call);
|
||||
}
|
||||
if checker.enabled(Rule::UnnecessaryRangeStart) {
|
||||
flake8_pie::rules::unnecessary_range_start(checker, call);
|
||||
@@ -581,6 +583,9 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
||||
if checker.enabled(Rule::HashlibInsecureHashFunction) {
|
||||
flake8_bandit::rules::hashlib_insecure_hash_functions(checker, call);
|
||||
}
|
||||
if checker.enabled(Rule::HashlibDigestHex) {
|
||||
refurb::rules::hashlib_digest_hex(checker, call);
|
||||
}
|
||||
if checker.enabled(Rule::RequestWithoutTimeout) {
|
||||
flake8_bandit::rules::request_without_timeout(checker, call);
|
||||
}
|
||||
@@ -1270,32 +1275,12 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
||||
refurb::rules::math_constant(checker, number_literal);
|
||||
}
|
||||
}
|
||||
Expr::BytesLiteral(_) => {
|
||||
if checker.source_type.is_stub() && checker.enabled(Rule::StringOrBytesTooLong) {
|
||||
flake8_pyi::rules::string_or_bytes_too_long(checker, expr);
|
||||
}
|
||||
}
|
||||
Expr::StringLiteral(string) => {
|
||||
if checker.enabled(Rule::HardcodedBindAllInterfaces) {
|
||||
if let Some(diagnostic) =
|
||||
flake8_bandit::rules::hardcoded_bind_all_interfaces(string)
|
||||
{
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
if checker.enabled(Rule::HardcodedTempFile) {
|
||||
flake8_bandit::rules::hardcoded_tmp_directory(checker, string);
|
||||
}
|
||||
Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) => {
|
||||
if checker.enabled(Rule::UnicodeKindPrefix) {
|
||||
for string_part in string.value.parts() {
|
||||
for string_part in value {
|
||||
pyupgrade::rules::unicode_kind_prefix(checker, string_part);
|
||||
}
|
||||
}
|
||||
if checker.source_type.is_stub() {
|
||||
if checker.enabled(Rule::StringOrBytesTooLong) {
|
||||
flake8_pyi::rules::string_or_bytes_too_long(checker, expr);
|
||||
}
|
||||
}
|
||||
}
|
||||
Expr::IfExp(
|
||||
if_exp @ ast::ExprIfExp {
|
||||
|
||||
@@ -10,6 +10,7 @@ pub(super) use module::module;
|
||||
pub(super) use parameter::parameter;
|
||||
pub(super) use parameters::parameters;
|
||||
pub(super) use statement::statement;
|
||||
pub(super) use string_like::string_like;
|
||||
pub(super) use suite::suite;
|
||||
pub(super) use unresolved_references::unresolved_references;
|
||||
|
||||
@@ -25,5 +26,6 @@ mod module;
|
||||
mod parameter;
|
||||
mod parameters;
|
||||
mod statement;
|
||||
mod string_like;
|
||||
mod suite;
|
||||
mod unresolved_references;
|
||||
|
||||
@@ -251,7 +251,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
pylint::rules::too_many_arguments(checker, function_def);
|
||||
}
|
||||
if checker.enabled(Rule::TooManyPositional) {
|
||||
pylint::rules::too_many_positional(checker, parameters, stmt);
|
||||
pylint::rules::too_many_positional(checker, function_def);
|
||||
}
|
||||
if checker.enabled(Rule::TooManyReturnStatements) {
|
||||
if let Some(diagnostic) = pylint::rules::too_many_return_statements(
|
||||
@@ -1571,7 +1571,11 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
pylint::rules::named_expr_without_context(checker, value);
|
||||
}
|
||||
if checker.enabled(Rule::AsyncioDanglingTask) {
|
||||
ruff::rules::asyncio_dangling_task(checker, value);
|
||||
if let Some(diagnostic) =
|
||||
ruff::rules::asyncio_dangling_task(value, checker.semantic())
|
||||
{
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
if checker.enabled(Rule::RepeatedAppend) {
|
||||
refurb::rules::repeated_append(checker, stmt);
|
||||
|
||||
20
crates/ruff_linter/src/checkers/ast/analyze/string_like.rs
Normal file
20
crates/ruff_linter/src/checkers/ast/analyze/string_like.rs
Normal file
@@ -0,0 +1,20 @@
|
||||
use ruff_python_ast::StringLike;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::codes::Rule;
|
||||
use crate::rules::{flake8_bandit, flake8_pyi};
|
||||
|
||||
/// Run lint rules over a [`StringLike`] syntax nodes.
|
||||
pub(crate) fn string_like(string_like: StringLike, checker: &mut Checker) {
|
||||
if checker.enabled(Rule::HardcodedBindAllInterfaces) {
|
||||
flake8_bandit::rules::hardcoded_bind_all_interfaces(checker, string_like);
|
||||
}
|
||||
if checker.enabled(Rule::HardcodedTempFile) {
|
||||
flake8_bandit::rules::hardcoded_tmp_directory(checker, string_like);
|
||||
}
|
||||
if checker.source_type.is_stub() {
|
||||
if checker.enabled(Rule::StringOrBytesTooLong) {
|
||||
flake8_pyi::rules::string_or_bytes_too_long(checker, string_like);
|
||||
}
|
||||
}
|
||||
}
|
||||
66
crates/ruff_linter/src/checkers/ast/annotation.rs
Normal file
66
crates/ruff_linter/src/checkers/ast/annotation.rs
Normal file
@@ -0,0 +1,66 @@
|
||||
use ruff_python_semantic::{ScopeKind, SemanticModel};
|
||||
|
||||
use crate::rules::flake8_type_checking;
|
||||
use crate::settings::LinterSettings;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub(super) enum AnnotationContext {
|
||||
/// Python will evaluate the annotation at runtime, but it's not _required_ and, as such, could
|
||||
/// be quoted to convert it into a typing-only annotation.
|
||||
///
|
||||
/// For example:
|
||||
/// ```python
|
||||
/// from pandas import DataFrame
|
||||
///
|
||||
/// def foo() -> DataFrame:
|
||||
/// ...
|
||||
/// ```
|
||||
///
|
||||
/// Above, Python will evaluate `DataFrame` at runtime in order to add it to `__annotations__`.
|
||||
RuntimeEvaluated,
|
||||
/// Python will evaluate the annotation at runtime, and it's required to be available at
|
||||
/// runtime, as a library (like Pydantic) needs access to it.
|
||||
RuntimeRequired,
|
||||
/// The annotation is only evaluated at type-checking time.
|
||||
TypingOnly,
|
||||
}
|
||||
|
||||
impl AnnotationContext {
|
||||
pub(super) fn from_model(semantic: &SemanticModel, settings: &LinterSettings) -> Self {
|
||||
// If the annotation is in a class scope (e.g., an annotated assignment for a
|
||||
// class field), and that class is marked as annotation as runtime-required.
|
||||
if semantic
|
||||
.current_scope()
|
||||
.kind
|
||||
.as_class()
|
||||
.is_some_and(|class_def| {
|
||||
flake8_type_checking::helpers::runtime_required_class(
|
||||
class_def,
|
||||
&settings.flake8_type_checking.runtime_required_base_classes,
|
||||
&settings.flake8_type_checking.runtime_required_decorators,
|
||||
semantic,
|
||||
)
|
||||
})
|
||||
{
|
||||
return Self::RuntimeRequired;
|
||||
}
|
||||
|
||||
// If `__future__` annotations are enabled, then annotations are never evaluated
|
||||
// at runtime, so we can treat them as typing-only.
|
||||
if semantic.future_annotations() {
|
||||
return Self::TypingOnly;
|
||||
}
|
||||
|
||||
// Otherwise, if we're in a class or module scope, then the annotation needs to
|
||||
// be available at runtime.
|
||||
// See: https://docs.python.org/3/reference/simple_stmts.html#annotated-assignment-statements
|
||||
if matches!(
|
||||
semantic.current_scope().kind,
|
||||
ScopeKind::Class(_) | ScopeKind::Module
|
||||
) {
|
||||
return Self::RuntimeEvaluated;
|
||||
}
|
||||
|
||||
Self::TypingOnly
|
||||
}
|
||||
}
|
||||
@@ -44,12 +44,12 @@ use ruff_python_ast::helpers::{
|
||||
};
|
||||
use ruff_python_ast::identifier::Identifier;
|
||||
use ruff_python_ast::str::trailing_quote;
|
||||
use ruff_python_ast::visitor::{walk_except_handler, walk_pattern, Visitor};
|
||||
use ruff_python_ast::visitor::{walk_except_handler, walk_f_string_element, walk_pattern, Visitor};
|
||||
use ruff_python_ast::{helpers, str, visitor, PySourceType};
|
||||
use ruff_python_codegen::{Generator, Quote, Stylist};
|
||||
use ruff_python_index::Indexer;
|
||||
use ruff_python_parser::typing::{parse_type_annotation, AnnotationKind};
|
||||
use ruff_python_semantic::analyze::{typing, visibility};
|
||||
use ruff_python_semantic::analyze::{imports, typing, visibility};
|
||||
use ruff_python_semantic::{
|
||||
BindingFlags, BindingId, BindingKind, Exceptions, Export, FromImport, Globals, Import, Module,
|
||||
ModuleKind, NodeId, ScopeId, ScopeKind, SemanticModel, SemanticModelFlags, Snapshot,
|
||||
@@ -58,6 +58,7 @@ use ruff_python_semantic::{
|
||||
use ruff_python_stdlib::builtins::{IPYTHON_BUILTINS, MAGIC_GLOBALS, PYTHON_BUILTINS};
|
||||
use ruff_source_file::Locator;
|
||||
|
||||
use crate::checkers::ast::annotation::AnnotationContext;
|
||||
use crate::checkers::ast::deferred::Deferred;
|
||||
use crate::docstrings::extraction::ExtractionTarget;
|
||||
use crate::importer::Importer;
|
||||
@@ -68,6 +69,7 @@ use crate::settings::{flags, LinterSettings};
|
||||
use crate::{docstrings, noqa};
|
||||
|
||||
mod analyze;
|
||||
mod annotation;
|
||||
mod deferred;
|
||||
|
||||
pub(crate) struct Checker<'a> {
|
||||
@@ -303,9 +305,12 @@ where
|
||||
}
|
||||
_ => {
|
||||
self.semantic.flags |= SemanticModelFlags::FUTURES_BOUNDARY;
|
||||
if !self.semantic.seen_import_boundary()
|
||||
&& !helpers::is_assignment_to_a_dunder(stmt)
|
||||
&& !helpers::in_nested_block(self.semantic.current_statements())
|
||||
if !(self.semantic.seen_import_boundary()
|
||||
|| helpers::is_assignment_to_a_dunder(stmt)
|
||||
|| helpers::in_nested_block(self.semantic.current_statements())
|
||||
|| imports::is_matplotlib_activation(stmt, self.semantic())
|
||||
|| self.settings.preview.is_enabled()
|
||||
&& imports::is_sys_path_modification(stmt, self.semantic()))
|
||||
{
|
||||
self.semantic.flags |= SemanticModelFlags::IMPORT_BOUNDARY;
|
||||
}
|
||||
@@ -512,8 +517,10 @@ where
|
||||
.chain(¶meters.kwonlyargs)
|
||||
{
|
||||
if let Some(expr) = ¶meter_with_default.parameter.annotation {
|
||||
if runtime_annotation || singledispatch {
|
||||
self.visit_runtime_annotation(expr);
|
||||
if singledispatch {
|
||||
self.visit_runtime_required_annotation(expr);
|
||||
} else if runtime_annotation {
|
||||
self.visit_runtime_evaluated_annotation(expr);
|
||||
} else {
|
||||
self.visit_annotation(expr);
|
||||
};
|
||||
@@ -526,7 +533,7 @@ where
|
||||
if let Some(arg) = ¶meters.vararg {
|
||||
if let Some(expr) = &arg.annotation {
|
||||
if runtime_annotation {
|
||||
self.visit_runtime_annotation(expr);
|
||||
self.visit_runtime_evaluated_annotation(expr);
|
||||
} else {
|
||||
self.visit_annotation(expr);
|
||||
};
|
||||
@@ -535,7 +542,7 @@ where
|
||||
if let Some(arg) = ¶meters.kwarg {
|
||||
if let Some(expr) = &arg.annotation {
|
||||
if runtime_annotation {
|
||||
self.visit_runtime_annotation(expr);
|
||||
self.visit_runtime_evaluated_annotation(expr);
|
||||
} else {
|
||||
self.visit_annotation(expr);
|
||||
};
|
||||
@@ -543,7 +550,7 @@ where
|
||||
}
|
||||
for expr in returns {
|
||||
if runtime_annotation {
|
||||
self.visit_runtime_annotation(expr);
|
||||
self.visit_runtime_evaluated_annotation(expr);
|
||||
} else {
|
||||
self.visit_annotation(expr);
|
||||
};
|
||||
@@ -674,40 +681,16 @@ where
|
||||
value,
|
||||
..
|
||||
}) => {
|
||||
// If we're in a class or module scope, then the annotation needs to be
|
||||
// available at runtime.
|
||||
// See: https://docs.python.org/3/reference/simple_stmts.html#annotated-assignment-statements
|
||||
let runtime_annotation = if self.semantic.future_annotations() {
|
||||
self.semantic
|
||||
.current_scope()
|
||||
.kind
|
||||
.as_class()
|
||||
.is_some_and(|class_def| {
|
||||
flake8_type_checking::helpers::runtime_evaluated_class(
|
||||
class_def,
|
||||
&self
|
||||
.settings
|
||||
.flake8_type_checking
|
||||
.runtime_evaluated_base_classes,
|
||||
&self
|
||||
.settings
|
||||
.flake8_type_checking
|
||||
.runtime_evaluated_decorators,
|
||||
&self.semantic,
|
||||
)
|
||||
})
|
||||
} else {
|
||||
matches!(
|
||||
self.semantic.current_scope().kind,
|
||||
ScopeKind::Class(_) | ScopeKind::Module
|
||||
)
|
||||
};
|
||||
|
||||
if runtime_annotation {
|
||||
self.visit_runtime_annotation(annotation);
|
||||
} else {
|
||||
self.visit_annotation(annotation);
|
||||
match AnnotationContext::from_model(&self.semantic, self.settings) {
|
||||
AnnotationContext::RuntimeRequired => {
|
||||
self.visit_runtime_required_annotation(annotation);
|
||||
}
|
||||
AnnotationContext::RuntimeEvaluated => {
|
||||
self.visit_runtime_evaluated_annotation(annotation);
|
||||
}
|
||||
AnnotationContext::TypingOnly => self.visit_annotation(annotation),
|
||||
}
|
||||
|
||||
if let Some(expr) = value {
|
||||
if self.semantic.match_typing_expr(annotation, "TypeAlias") {
|
||||
self.visit_type_definition(expr);
|
||||
@@ -815,8 +798,7 @@ where
|
||||
|
||||
fn visit_expr(&mut self, expr: &'b Expr) {
|
||||
// Step 0: Pre-processing
|
||||
if !self.semantic.in_f_string()
|
||||
&& !self.semantic.in_typing_literal()
|
||||
if !self.semantic.in_typing_literal()
|
||||
&& !self.semantic.in_deferred_type_definition()
|
||||
&& self.semantic.in_type_definition()
|
||||
&& self.semantic.future_annotations()
|
||||
@@ -1238,10 +1220,7 @@ where
|
||||
}
|
||||
}
|
||||
Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) => {
|
||||
if self.semantic.in_type_definition()
|
||||
&& !self.semantic.in_typing_literal()
|
||||
&& !self.semantic.in_f_string()
|
||||
{
|
||||
if self.semantic.in_type_definition() && !self.semantic.in_typing_literal() {
|
||||
self.deferred.string_type_definitions.push((
|
||||
expr.range(),
|
||||
value.to_str(),
|
||||
@@ -1271,6 +1250,13 @@ where
|
||||
|
||||
// Step 4: Analysis
|
||||
analyze::expression(expr, self);
|
||||
match expr {
|
||||
Expr::StringLiteral(string_literal) => {
|
||||
analyze::string_like(string_literal.into(), self);
|
||||
}
|
||||
Expr::BytesLiteral(bytes_literal) => analyze::string_like(bytes_literal.into(), self),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
self.semantic.flags = flags_snapshot;
|
||||
self.semantic.pop_node();
|
||||
@@ -1326,17 +1312,6 @@ where
|
||||
self.semantic.flags = flags_snapshot;
|
||||
}
|
||||
|
||||
fn visit_format_spec(&mut self, format_spec: &'b Expr) {
|
||||
match format_spec {
|
||||
Expr::FString(ast::ExprFString { value, .. }) => {
|
||||
for expr in value.elements() {
|
||||
self.visit_expr(expr);
|
||||
}
|
||||
}
|
||||
_ => unreachable!("Unexpected expression for format_spec"),
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_parameters(&mut self, parameters: &'b Parameters) {
|
||||
// Step 1: Binding.
|
||||
// Bind, but intentionally avoid walking default expressions, as we handle them
|
||||
@@ -1446,6 +1421,16 @@ where
|
||||
.push((bound, self.semantic.snapshot()));
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_f_string_element(&mut self, f_string_element: &'b ast::FStringElement) {
|
||||
// Step 2: Traversal
|
||||
walk_f_string_element(self, f_string_element);
|
||||
|
||||
// Step 4: Analysis
|
||||
if let Some(literal) = f_string_element.as_literal() {
|
||||
analyze::string_like(literal.into(), self);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Checker<'a> {
|
||||
@@ -1522,10 +1507,18 @@ impl<'a> Checker<'a> {
|
||||
self.semantic.flags = snapshot;
|
||||
}
|
||||
|
||||
/// Visit an [`Expr`], and treat it as a runtime-required type annotation.
|
||||
fn visit_runtime_annotation(&mut self, expr: &'a Expr) {
|
||||
/// Visit an [`Expr`], and treat it as a runtime-evaluated type annotation.
|
||||
fn visit_runtime_evaluated_annotation(&mut self, expr: &'a Expr) {
|
||||
let snapshot = self.semantic.flags;
|
||||
self.semantic.flags |= SemanticModelFlags::RUNTIME_ANNOTATION;
|
||||
self.semantic.flags |= SemanticModelFlags::RUNTIME_EVALUATED_ANNOTATION;
|
||||
self.visit_type_definition(expr);
|
||||
self.semantic.flags = snapshot;
|
||||
}
|
||||
|
||||
/// Visit an [`Expr`], and treat it as a runtime-required type annotation.
|
||||
fn visit_runtime_required_annotation(&mut self, expr: &'a Expr) {
|
||||
let snapshot = self.semantic.flags;
|
||||
self.semantic.flags |= SemanticModelFlags::RUNTIME_REQUIRED_ANNOTATION;
|
||||
self.visit_type_definition(expr);
|
||||
self.semantic.flags = snapshot;
|
||||
}
|
||||
|
||||
@@ -965,6 +965,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Refurb, "169") => (RuleGroup::Preview, rules::refurb::rules::TypeNoneComparison),
|
||||
(Refurb, "171") => (RuleGroup::Preview, rules::refurb::rules::SingleItemMembershipTest),
|
||||
(Refurb, "177") => (RuleGroup::Preview, rules::refurb::rules::ImplicitCwd),
|
||||
(Refurb, "181") => (RuleGroup::Preview, rules::refurb::rules::HashlibDigestHex),
|
||||
|
||||
// flake8-logging
|
||||
(Flake8Logging, "001") => (RuleGroup::Preview, rules::flake8_logging::rules::DirectLoggerInstantiation),
|
||||
|
||||
@@ -17,6 +17,7 @@ use ruff_diagnostics::{Diagnostic, DiagnosticKind, Fix};
|
||||
use ruff_notebook::NotebookIndex;
|
||||
use ruff_source_file::{SourceFile, SourceLocation};
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
pub use sarif::SarifEmitter;
|
||||
pub use text::TextEmitter;
|
||||
|
||||
mod azure;
|
||||
@@ -28,6 +29,7 @@ mod json;
|
||||
mod json_lines;
|
||||
mod junit;
|
||||
mod pylint;
|
||||
mod sarif;
|
||||
mod text;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
|
||||
212
crates/ruff_linter/src/message/sarif.rs
Normal file
212
crates/ruff_linter/src/message/sarif.rs
Normal file
@@ -0,0 +1,212 @@
|
||||
use std::io::Write;
|
||||
|
||||
use anyhow::Result;
|
||||
use serde::{Serialize, Serializer};
|
||||
use serde_json::json;
|
||||
|
||||
use ruff_source_file::OneIndexed;
|
||||
|
||||
use crate::codes::Rule;
|
||||
use crate::fs::normalize_path;
|
||||
use crate::message::{Emitter, EmitterContext, Message};
|
||||
use crate::registry::{AsRule, Linter, RuleNamespace};
|
||||
use crate::VERSION;
|
||||
|
||||
use strum::IntoEnumIterator;
|
||||
|
||||
pub struct SarifEmitter;
|
||||
|
||||
impl Emitter for SarifEmitter {
|
||||
fn emit(
|
||||
&mut self,
|
||||
writer: &mut dyn Write,
|
||||
messages: &[Message],
|
||||
_context: &EmitterContext,
|
||||
) -> Result<()> {
|
||||
let results = messages
|
||||
.iter()
|
||||
.map(SarifResult::from_message)
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
|
||||
let output = json!({
|
||||
"$schema": "https://json.schemastore.org/sarif-2.1.0.json",
|
||||
"version": "2.1.0",
|
||||
"runs": [{
|
||||
"tool": {
|
||||
"driver": {
|
||||
"name": "ruff",
|
||||
"informationUri": "https://github.com/astral-sh/ruff",
|
||||
"rules": Rule::iter().map(SarifRule::from).collect::<Vec<_>>(),
|
||||
"version": VERSION.to_string(),
|
||||
}
|
||||
},
|
||||
"results": results,
|
||||
}],
|
||||
});
|
||||
serde_json::to_writer_pretty(writer, &output)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct SarifRule<'a> {
|
||||
name: &'a str,
|
||||
code: String,
|
||||
linter: &'a str,
|
||||
summary: &'a str,
|
||||
explanation: Option<&'a str>,
|
||||
url: Option<String>,
|
||||
}
|
||||
|
||||
impl From<Rule> for SarifRule<'_> {
|
||||
fn from(rule: Rule) -> Self {
|
||||
let code = rule.noqa_code().to_string();
|
||||
let (linter, _) = Linter::parse_code(&code).unwrap();
|
||||
Self {
|
||||
name: rule.into(),
|
||||
code,
|
||||
linter: linter.name(),
|
||||
summary: rule.message_formats()[0],
|
||||
explanation: rule.explanation(),
|
||||
url: rule.url(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for SarifRule<'_> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
json!({
|
||||
"id": self.code,
|
||||
"shortDescription": {
|
||||
"text": self.summary,
|
||||
},
|
||||
"fullDescription": {
|
||||
"text": self.explanation,
|
||||
},
|
||||
"help": {
|
||||
"text": self.summary,
|
||||
},
|
||||
"helpUri": self.url,
|
||||
"properties": {
|
||||
"id": self.code,
|
||||
"kind": self.linter,
|
||||
"name": self.name,
|
||||
"problem.severity": "error".to_string(),
|
||||
},
|
||||
})
|
||||
.serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct SarifResult {
|
||||
rule: Rule,
|
||||
level: String,
|
||||
message: String,
|
||||
uri: String,
|
||||
start_line: OneIndexed,
|
||||
start_column: OneIndexed,
|
||||
end_line: OneIndexed,
|
||||
end_column: OneIndexed,
|
||||
}
|
||||
|
||||
impl SarifResult {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
fn from_message(message: &Message) -> Result<Self> {
|
||||
let start_location = message.compute_start_location();
|
||||
let end_location = message.compute_end_location();
|
||||
let path = normalize_path(message.filename());
|
||||
Ok(Self {
|
||||
rule: message.kind.rule(),
|
||||
level: "error".to_string(),
|
||||
message: message.kind.name.clone(),
|
||||
uri: url::Url::from_file_path(&path)
|
||||
.map_err(|()| anyhow::anyhow!("Failed to convert path to URL: {}", path.display()))?
|
||||
.to_string(),
|
||||
start_line: start_location.row,
|
||||
start_column: start_location.column,
|
||||
end_line: end_location.row,
|
||||
end_column: end_location.column,
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
#[allow(clippy::unnecessary_wraps)]
|
||||
fn from_message(message: &Message) -> Result<Self> {
|
||||
let start_location = message.compute_start_location();
|
||||
let end_location = message.compute_end_location();
|
||||
let path = normalize_path(message.filename());
|
||||
Ok(Self {
|
||||
rule: message.kind.rule(),
|
||||
level: "error".to_string(),
|
||||
message: message.kind.name.clone(),
|
||||
uri: path.display().to_string(),
|
||||
start_line: start_location.row,
|
||||
start_column: start_location.column,
|
||||
end_line: end_location.row,
|
||||
end_column: end_location.column,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for SarifResult {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
json!({
|
||||
"level": self.level,
|
||||
"message": {
|
||||
"text": self.message,
|
||||
},
|
||||
"locations": [{
|
||||
"physicalLocation": {
|
||||
"artifactLocation": {
|
||||
"uri": self.uri,
|
||||
},
|
||||
"region": {
|
||||
"startLine": self.start_line,
|
||||
"startColumn": self.start_column,
|
||||
"endLine": self.end_line,
|
||||
"endColumn": self.end_column,
|
||||
}
|
||||
}
|
||||
}],
|
||||
"ruleId": self.rule.noqa_code().to_string(),
|
||||
})
|
||||
.serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use crate::message::tests::{capture_emitter_output, create_messages};
|
||||
use crate::message::SarifEmitter;
|
||||
|
||||
fn get_output() -> String {
|
||||
let mut emitter = SarifEmitter {};
|
||||
capture_emitter_output(&mut emitter, &create_messages())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn valid_json() {
|
||||
let content = get_output();
|
||||
serde_json::from_str::<serde_json::Value>(&content).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_results() {
|
||||
let content = get_output();
|
||||
let sarif = serde_json::from_str::<serde_json::Value>(content.as_str()).unwrap();
|
||||
let rules = sarif["runs"][0]["tool"]["driver"]["rules"]
|
||||
.as_array()
|
||||
.unwrap();
|
||||
let results = sarif["runs"][0]["results"].as_array().unwrap();
|
||||
assert_eq!(results.len(), 3);
|
||||
assert!(rules.len() > 3);
|
||||
}
|
||||
}
|
||||
@@ -537,6 +537,19 @@ fn check_dynamically_typed<F>(
|
||||
}
|
||||
}
|
||||
|
||||
fn is_empty_body(body: &[Stmt]) -> bool {
|
||||
body.iter().all(|stmt| match stmt {
|
||||
Stmt::Pass(_) => true,
|
||||
Stmt::Expr(ast::StmtExpr { value, range: _ }) => {
|
||||
matches!(
|
||||
value.as_ref(),
|
||||
Expr::StringLiteral(_) | Expr::EllipsisLiteral(_)
|
||||
)
|
||||
}
|
||||
_ => false,
|
||||
})
|
||||
}
|
||||
|
||||
/// Generate flake8-annotation checks for a given `Definition`.
|
||||
pub(crate) fn definition(
|
||||
checker: &Checker,
|
||||
@@ -725,16 +738,22 @@ pub(crate) fn definition(
|
||||
) {
|
||||
if is_method && visibility::is_classmethod(decorator_list, checker.semantic()) {
|
||||
if checker.enabled(Rule::MissingReturnTypeClassMethod) {
|
||||
let return_type = auto_return_type(function)
|
||||
.and_then(|return_type| {
|
||||
return_type.into_expression(
|
||||
checker.importer(),
|
||||
function.parameters.start(),
|
||||
checker.semantic(),
|
||||
checker.settings.target_version,
|
||||
)
|
||||
})
|
||||
.map(|(return_type, edits)| (checker.generator().expr(&return_type), edits));
|
||||
let return_type = if visibility::is_abstract(decorator_list, checker.semantic())
|
||||
&& is_empty_body(body)
|
||||
{
|
||||
None
|
||||
} else {
|
||||
auto_return_type(function)
|
||||
.and_then(|return_type| {
|
||||
return_type.into_expression(
|
||||
checker.importer(),
|
||||
function.parameters.start(),
|
||||
checker.semantic(),
|
||||
checker.settings.target_version,
|
||||
)
|
||||
})
|
||||
.map(|(return_type, edits)| (checker.generator().expr(&return_type), edits))
|
||||
};
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
MissingReturnTypeClassMethod {
|
||||
name: name.to_string(),
|
||||
@@ -752,16 +771,22 @@ pub(crate) fn definition(
|
||||
}
|
||||
} else if is_method && visibility::is_staticmethod(decorator_list, checker.semantic()) {
|
||||
if checker.enabled(Rule::MissingReturnTypeStaticMethod) {
|
||||
let return_type = auto_return_type(function)
|
||||
.and_then(|return_type| {
|
||||
return_type.into_expression(
|
||||
checker.importer(),
|
||||
function.parameters.start(),
|
||||
checker.semantic(),
|
||||
checker.settings.target_version,
|
||||
)
|
||||
})
|
||||
.map(|(return_type, edits)| (checker.generator().expr(&return_type), edits));
|
||||
let return_type = if visibility::is_abstract(decorator_list, checker.semantic())
|
||||
&& is_empty_body(body)
|
||||
{
|
||||
None
|
||||
} else {
|
||||
auto_return_type(function)
|
||||
.and_then(|return_type| {
|
||||
return_type.into_expression(
|
||||
checker.importer(),
|
||||
function.parameters.start(),
|
||||
checker.semantic(),
|
||||
checker.settings.target_version,
|
||||
)
|
||||
})
|
||||
.map(|(return_type, edits)| (checker.generator().expr(&return_type), edits))
|
||||
};
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
MissingReturnTypeStaticMethod {
|
||||
name: name.to_string(),
|
||||
@@ -818,18 +843,25 @@ pub(crate) fn definition(
|
||||
match visibility {
|
||||
visibility::Visibility::Public => {
|
||||
if checker.enabled(Rule::MissingReturnTypeUndocumentedPublicFunction) {
|
||||
let return_type = auto_return_type(function)
|
||||
.and_then(|return_type| {
|
||||
return_type.into_expression(
|
||||
checker.importer(),
|
||||
function.parameters.start(),
|
||||
checker.semantic(),
|
||||
checker.settings.target_version,
|
||||
)
|
||||
})
|
||||
.map(|(return_type, edits)| {
|
||||
(checker.generator().expr(&return_type), edits)
|
||||
});
|
||||
let return_type =
|
||||
if visibility::is_abstract(decorator_list, checker.semantic())
|
||||
&& is_empty_body(body)
|
||||
{
|
||||
None
|
||||
} else {
|
||||
auto_return_type(function)
|
||||
.and_then(|return_type| {
|
||||
return_type.into_expression(
|
||||
checker.importer(),
|
||||
function.parameters.start(),
|
||||
checker.semantic(),
|
||||
checker.settings.target_version,
|
||||
)
|
||||
})
|
||||
.map(|(return_type, edits)| {
|
||||
(checker.generator().expr(&return_type), edits)
|
||||
})
|
||||
};
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
MissingReturnTypeUndocumentedPublicFunction {
|
||||
name: name.to_string(),
|
||||
@@ -853,18 +885,25 @@ pub(crate) fn definition(
|
||||
}
|
||||
visibility::Visibility::Private => {
|
||||
if checker.enabled(Rule::MissingReturnTypePrivateFunction) {
|
||||
let return_type = auto_return_type(function)
|
||||
.and_then(|return_type| {
|
||||
return_type.into_expression(
|
||||
checker.importer(),
|
||||
function.parameters.start(),
|
||||
checker.semantic(),
|
||||
checker.settings.target_version,
|
||||
)
|
||||
})
|
||||
.map(|(return_type, edits)| {
|
||||
(checker.generator().expr(&return_type), edits)
|
||||
});
|
||||
let return_type =
|
||||
if visibility::is_abstract(decorator_list, checker.semantic())
|
||||
&& is_empty_body(body)
|
||||
{
|
||||
None
|
||||
} else {
|
||||
auto_return_type(function)
|
||||
.and_then(|return_type| {
|
||||
return_type.into_expression(
|
||||
checker.importer(),
|
||||
function.parameters.start(),
|
||||
checker.semantic(),
|
||||
checker.settings.target_version,
|
||||
)
|
||||
})
|
||||
.map(|(return_type, edits)| {
|
||||
(checker.generator().expr(&return_type), edits)
|
||||
})
|
||||
};
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
MissingReturnTypePrivateFunction {
|
||||
name: name.to_string(),
|
||||
|
||||
@@ -427,4 +427,72 @@ auto_return_type.py:146:5: ANN201 [*] Missing return type annotation for public
|
||||
148 148 | break
|
||||
149 149 | return 1
|
||||
|
||||
auto_return_type.py:158:9: ANN201 Missing return type annotation for public function `method`
|
||||
|
|
||||
156 | class Foo(abc.ABC):
|
||||
157 | @abstractmethod
|
||||
158 | def method(self):
|
||||
| ^^^^^^ ANN201
|
||||
159 | pass
|
||||
|
|
||||
= help: Add return type annotation
|
||||
|
||||
auto_return_type.py:162:9: ANN201 Missing return type annotation for public function `method`
|
||||
|
|
||||
161 | @abc.abstractmethod
|
||||
162 | def method(self):
|
||||
| ^^^^^^ ANN201
|
||||
163 | """Docstring."""
|
||||
|
|
||||
= help: Add return type annotation
|
||||
|
||||
auto_return_type.py:166:9: ANN201 Missing return type annotation for public function `method`
|
||||
|
|
||||
165 | @abc.abstractmethod
|
||||
166 | def method(self):
|
||||
| ^^^^^^ ANN201
|
||||
167 | ...
|
||||
|
|
||||
= help: Add return type annotation
|
||||
|
||||
auto_return_type.py:171:9: ANN205 Missing return type annotation for staticmethod `method`
|
||||
|
|
||||
169 | @staticmethod
|
||||
170 | @abstractmethod
|
||||
171 | def method():
|
||||
| ^^^^^^ ANN205
|
||||
172 | pass
|
||||
|
|
||||
= help: Add return type annotation
|
||||
|
||||
auto_return_type.py:176:9: ANN206 Missing return type annotation for classmethod `method`
|
||||
|
|
||||
174 | @classmethod
|
||||
175 | @abstractmethod
|
||||
176 | def method(cls):
|
||||
| ^^^^^^ ANN206
|
||||
177 | pass
|
||||
|
|
||||
= help: Add return type annotation
|
||||
|
||||
auto_return_type.py:180:9: ANN201 [*] Missing return type annotation for public function `method`
|
||||
|
|
||||
179 | @abstractmethod
|
||||
180 | def method(self):
|
||||
| ^^^^^^ ANN201
|
||||
181 | if self.x > 0:
|
||||
182 | return 1
|
||||
|
|
||||
= help: Add return type annotation: `float`
|
||||
|
||||
ℹ Unsafe fix
|
||||
177 177 | pass
|
||||
178 178 |
|
||||
179 179 | @abstractmethod
|
||||
180 |- def method(self):
|
||||
180 |+ def method(self) -> float:
|
||||
181 181 | if self.x > 0:
|
||||
182 182 | return 1
|
||||
183 183 | else:
|
||||
|
||||
|
||||
|
||||
@@ -482,4 +482,72 @@ auto_return_type.py:146:5: ANN201 [*] Missing return type annotation for public
|
||||
148 149 | break
|
||||
149 150 | return 1
|
||||
|
||||
auto_return_type.py:158:9: ANN201 Missing return type annotation for public function `method`
|
||||
|
|
||||
156 | class Foo(abc.ABC):
|
||||
157 | @abstractmethod
|
||||
158 | def method(self):
|
||||
| ^^^^^^ ANN201
|
||||
159 | pass
|
||||
|
|
||||
= help: Add return type annotation
|
||||
|
||||
auto_return_type.py:162:9: ANN201 Missing return type annotation for public function `method`
|
||||
|
|
||||
161 | @abc.abstractmethod
|
||||
162 | def method(self):
|
||||
| ^^^^^^ ANN201
|
||||
163 | """Docstring."""
|
||||
|
|
||||
= help: Add return type annotation
|
||||
|
||||
auto_return_type.py:166:9: ANN201 Missing return type annotation for public function `method`
|
||||
|
|
||||
165 | @abc.abstractmethod
|
||||
166 | def method(self):
|
||||
| ^^^^^^ ANN201
|
||||
167 | ...
|
||||
|
|
||||
= help: Add return type annotation
|
||||
|
||||
auto_return_type.py:171:9: ANN205 Missing return type annotation for staticmethod `method`
|
||||
|
|
||||
169 | @staticmethod
|
||||
170 | @abstractmethod
|
||||
171 | def method():
|
||||
| ^^^^^^ ANN205
|
||||
172 | pass
|
||||
|
|
||||
= help: Add return type annotation
|
||||
|
||||
auto_return_type.py:176:9: ANN206 Missing return type annotation for classmethod `method`
|
||||
|
|
||||
174 | @classmethod
|
||||
175 | @abstractmethod
|
||||
176 | def method(cls):
|
||||
| ^^^^^^ ANN206
|
||||
177 | pass
|
||||
|
|
||||
= help: Add return type annotation
|
||||
|
||||
auto_return_type.py:180:9: ANN201 [*] Missing return type annotation for public function `method`
|
||||
|
|
||||
179 | @abstractmethod
|
||||
180 | def method(self):
|
||||
| ^^^^^^ ANN201
|
||||
181 | if self.x > 0:
|
||||
182 | return 1
|
||||
|
|
||||
= help: Add return type annotation: `float`
|
||||
|
||||
ℹ Unsafe fix
|
||||
177 177 | pass
|
||||
178 178 |
|
||||
179 179 | @abstractmethod
|
||||
180 |- def method(self):
|
||||
180 |+ def method(self) -> float:
|
||||
181 181 | if self.x > 0:
|
||||
182 182 | return 1
|
||||
183 183 | else:
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::ExprStringLiteral;
|
||||
use ruff_python_ast::{self as ast, StringLike};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for hardcoded bindings to all network interfaces (`0.0.0.0`).
|
||||
@@ -34,10 +37,16 @@ impl Violation for HardcodedBindAllInterfaces {
|
||||
}
|
||||
|
||||
/// S104
|
||||
pub(crate) fn hardcoded_bind_all_interfaces(string: &ExprStringLiteral) -> Option<Diagnostic> {
|
||||
if string.value.to_str() == "0.0.0.0" {
|
||||
Some(Diagnostic::new(HardcodedBindAllInterfaces, string.range))
|
||||
} else {
|
||||
None
|
||||
pub(crate) fn hardcoded_bind_all_interfaces(checker: &mut Checker, string: StringLike) {
|
||||
let is_bind_all_interface = match string {
|
||||
StringLike::StringLiteral(ast::ExprStringLiteral { value, .. }) => value == "0.0.0.0",
|
||||
StringLike::FStringLiteral(ast::FStringLiteralElement { value, .. }) => value == "0.0.0.0",
|
||||
StringLike::BytesLiteral(_) => return,
|
||||
};
|
||||
|
||||
if is_bind_all_interface {
|
||||
checker
|
||||
.diagnostics
|
||||
.push(Diagnostic::new(HardcodedBindAllInterfaces, string.range()));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -57,7 +57,7 @@ impl Violation for HardcodedSQLExpression {
|
||||
/// becomes `foobar {x}baz`.
|
||||
fn concatenated_f_string(expr: &ast::ExprFString, locator: &Locator) -> String {
|
||||
expr.value
|
||||
.parts()
|
||||
.iter()
|
||||
.filter_map(|part| {
|
||||
raw_contents(locator.slice(part)).map(|s| s.escape_default().to_string())
|
||||
})
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use ruff_python_ast::{self as ast, Expr};
|
||||
use ruff_python_ast::{self as ast, Expr, StringLike};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
@@ -51,13 +52,19 @@ impl Violation for HardcodedTempFile {
|
||||
}
|
||||
|
||||
/// S108
|
||||
pub(crate) fn hardcoded_tmp_directory(checker: &mut Checker, string: &ast::ExprStringLiteral) {
|
||||
pub(crate) fn hardcoded_tmp_directory(checker: &mut Checker, string: StringLike) {
|
||||
let value = match string {
|
||||
StringLike::StringLiteral(ast::ExprStringLiteral { value, .. }) => value.to_str(),
|
||||
StringLike::FStringLiteral(ast::FStringLiteralElement { value, .. }) => value,
|
||||
StringLike::BytesLiteral(_) => return,
|
||||
};
|
||||
|
||||
if !checker
|
||||
.settings
|
||||
.flake8_bandit
|
||||
.hardcoded_tmp_directory
|
||||
.iter()
|
||||
.any(|prefix| string.value.to_str().starts_with(prefix))
|
||||
.any(|prefix| value.starts_with(prefix))
|
||||
{
|
||||
return;
|
||||
}
|
||||
@@ -76,8 +83,8 @@ pub(crate) fn hardcoded_tmp_directory(checker: &mut Checker, string: &ast::ExprS
|
||||
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
HardcodedTempFile {
|
||||
string: string.value.to_string(),
|
||||
string: value.to_string(),
|
||||
},
|
||||
string.range,
|
||||
string.range(),
|
||||
));
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ S104.py:9:1: S104 Possible binding to all interfaces
|
||||
9 | "0.0.0.0"
|
||||
| ^^^^^^^^^ S104
|
||||
10 | '0.0.0.0'
|
||||
11 | f"0.0.0.0"
|
||||
|
|
||||
|
||||
S104.py:10:1: S104 Possible binding to all interfaces
|
||||
@@ -15,21 +16,30 @@ S104.py:10:1: S104 Possible binding to all interfaces
|
||||
9 | "0.0.0.0"
|
||||
10 | '0.0.0.0'
|
||||
| ^^^^^^^^^ S104
|
||||
11 | f"0.0.0.0"
|
||||
|
|
||||
|
||||
S104.py:14:6: S104 Possible binding to all interfaces
|
||||
S104.py:11:3: S104 Possible binding to all interfaces
|
||||
|
|
||||
13 | # Error
|
||||
14 | func("0.0.0.0")
|
||||
9 | "0.0.0.0"
|
||||
10 | '0.0.0.0'
|
||||
11 | f"0.0.0.0"
|
||||
| ^^^^^^^ S104
|
||||
|
|
||||
|
||||
S104.py:15:6: S104 Possible binding to all interfaces
|
||||
|
|
||||
14 | # Error
|
||||
15 | func("0.0.0.0")
|
||||
| ^^^^^^^^^ S104
|
||||
|
|
||||
|
||||
S104.py:18:9: S104 Possible binding to all interfaces
|
||||
S104.py:19:9: S104 Possible binding to all interfaces
|
||||
|
|
||||
17 | def my_func():
|
||||
18 | x = "0.0.0.0"
|
||||
18 | def my_func():
|
||||
19 | x = "0.0.0.0"
|
||||
| ^^^^^^^^^ S104
|
||||
19 | print(x)
|
||||
20 | print(x)
|
||||
|
|
||||
|
||||
|
||||
|
||||
@@ -10,22 +10,31 @@ S108.py:5:11: S108 Probable insecure usage of temporary file or directory: "/tmp
|
||||
6 | f.write("def")
|
||||
|
|
||||
|
||||
S108.py:8:11: S108 Probable insecure usage of temporary file or directory: "/var/tmp/123"
|
||||
S108.py:8:13: S108 Probable insecure usage of temporary file or directory: "/tmp/abc"
|
||||
|
|
||||
6 | f.write("def")
|
||||
7 |
|
||||
8 | with open("/var/tmp/123", "w") as f:
|
||||
| ^^^^^^^^^^^^^^ S108
|
||||
8 | with open(f"/tmp/abc", "w") as f:
|
||||
| ^^^^^^^^ S108
|
||||
9 | f.write("def")
|
||||
|
|
||||
|
||||
S108.py:11:11: S108 Probable insecure usage of temporary file or directory: "/dev/shm/unit/test"
|
||||
S108.py:11:11: S108 Probable insecure usage of temporary file or directory: "/var/tmp/123"
|
||||
|
|
||||
9 | f.write("def")
|
||||
10 |
|
||||
11 | with open("/dev/shm/unit/test", "w") as f:
|
||||
| ^^^^^^^^^^^^^^^^^^^^ S108
|
||||
11 | with open("/var/tmp/123", "w") as f:
|
||||
| ^^^^^^^^^^^^^^ S108
|
||||
12 | f.write("def")
|
||||
|
|
||||
|
||||
S108.py:14:11: S108 Probable insecure usage of temporary file or directory: "/dev/shm/unit/test"
|
||||
|
|
||||
12 | f.write("def")
|
||||
13 |
|
||||
14 | with open("/dev/shm/unit/test", "w") as f:
|
||||
| ^^^^^^^^^^^^^^^^^^^^ S108
|
||||
15 | f.write("def")
|
||||
|
|
||||
|
||||
|
||||
|
||||
@@ -10,30 +10,39 @@ S108.py:5:11: S108 Probable insecure usage of temporary file or directory: "/tmp
|
||||
6 | f.write("def")
|
||||
|
|
||||
|
||||
S108.py:8:11: S108 Probable insecure usage of temporary file or directory: "/var/tmp/123"
|
||||
S108.py:8:13: S108 Probable insecure usage of temporary file or directory: "/tmp/abc"
|
||||
|
|
||||
6 | f.write("def")
|
||||
7 |
|
||||
8 | with open("/var/tmp/123", "w") as f:
|
||||
| ^^^^^^^^^^^^^^ S108
|
||||
8 | with open(f"/tmp/abc", "w") as f:
|
||||
| ^^^^^^^^ S108
|
||||
9 | f.write("def")
|
||||
|
|
||||
|
||||
S108.py:11:11: S108 Probable insecure usage of temporary file or directory: "/dev/shm/unit/test"
|
||||
S108.py:11:11: S108 Probable insecure usage of temporary file or directory: "/var/tmp/123"
|
||||
|
|
||||
9 | f.write("def")
|
||||
10 |
|
||||
11 | with open("/dev/shm/unit/test", "w") as f:
|
||||
| ^^^^^^^^^^^^^^^^^^^^ S108
|
||||
11 | with open("/var/tmp/123", "w") as f:
|
||||
| ^^^^^^^^^^^^^^ S108
|
||||
12 | f.write("def")
|
||||
|
|
||||
|
||||
S108.py:15:11: S108 Probable insecure usage of temporary file or directory: "/foo/bar"
|
||||
S108.py:14:11: S108 Probable insecure usage of temporary file or directory: "/dev/shm/unit/test"
|
||||
|
|
||||
14 | # not ok by config
|
||||
15 | with open("/foo/bar", "w") as f:
|
||||
12 | f.write("def")
|
||||
13 |
|
||||
14 | with open("/dev/shm/unit/test", "w") as f:
|
||||
| ^^^^^^^^^^^^^^^^^^^^ S108
|
||||
15 | f.write("def")
|
||||
|
|
||||
|
||||
S108.py:18:11: S108 Probable insecure usage of temporary file or directory: "/foo/bar"
|
||||
|
|
||||
17 | # not ok by config
|
||||
18 | with open("/foo/bar", "w") as f:
|
||||
| ^^^^^^^^^^ S108
|
||||
16 | f.write("def")
|
||||
19 | f.write("def")
|
||||
|
|
||||
|
||||
|
||||
|
||||
@@ -1083,7 +1083,7 @@ pub(crate) fn fix_unnecessary_map(
|
||||
// If the expression is embedded in an f-string, surround it with spaces to avoid
|
||||
// syntax errors.
|
||||
if matches!(object_type, ObjectType::Set | ObjectType::Dict) {
|
||||
if parent.is_some_and(Expr::is_formatted_value_expr) {
|
||||
if parent.is_some_and(Expr::is_f_string_expr) {
|
||||
content = format!(" {content} ");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -191,15 +191,13 @@ pub(crate) fn string_in_exception(checker: &mut Checker, stmt: &Stmt, exc: &Expr
|
||||
if let Some(indentation) =
|
||||
whitespace::indentation(checker.locator(), stmt)
|
||||
{
|
||||
if checker.semantic().is_available("msg") {
|
||||
diagnostic.set_fix(generate_fix(
|
||||
stmt,
|
||||
first,
|
||||
indentation,
|
||||
checker.stylist(),
|
||||
checker.locator(),
|
||||
));
|
||||
}
|
||||
diagnostic.set_fix(generate_fix(
|
||||
stmt,
|
||||
first,
|
||||
indentation,
|
||||
checker.stylist(),
|
||||
checker.locator(),
|
||||
));
|
||||
}
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
@@ -211,15 +209,13 @@ pub(crate) fn string_in_exception(checker: &mut Checker, stmt: &Stmt, exc: &Expr
|
||||
let mut diagnostic = Diagnostic::new(FStringInException, first.range());
|
||||
if let Some(indentation) = whitespace::indentation(checker.locator(), stmt)
|
||||
{
|
||||
if checker.semantic().is_available("msg") {
|
||||
diagnostic.set_fix(generate_fix(
|
||||
stmt,
|
||||
first,
|
||||
indentation,
|
||||
checker.stylist(),
|
||||
checker.locator(),
|
||||
));
|
||||
}
|
||||
diagnostic.set_fix(generate_fix(
|
||||
stmt,
|
||||
first,
|
||||
indentation,
|
||||
checker.stylist(),
|
||||
checker.locator(),
|
||||
));
|
||||
}
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
@@ -236,15 +232,13 @@ pub(crate) fn string_in_exception(checker: &mut Checker, stmt: &Stmt, exc: &Expr
|
||||
if let Some(indentation) =
|
||||
whitespace::indentation(checker.locator(), stmt)
|
||||
{
|
||||
if checker.semantic().is_available("msg") {
|
||||
diagnostic.set_fix(generate_fix(
|
||||
stmt,
|
||||
first,
|
||||
indentation,
|
||||
checker.stylist(),
|
||||
checker.locator(),
|
||||
));
|
||||
}
|
||||
diagnostic.set_fix(generate_fix(
|
||||
stmt,
|
||||
first,
|
||||
indentation,
|
||||
checker.stylist(),
|
||||
checker.locator(),
|
||||
));
|
||||
}
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
|
||||
@@ -59,15 +59,26 @@ EM.py:22:24: EM103 [*] Exception must not use a `.format()` string directly, ass
|
||||
24 25 |
|
||||
25 26 | def f_ok():
|
||||
|
||||
EM.py:32:24: EM101 Exception must not use a string literal, assign to variable first
|
||||
EM.py:32:24: EM101 [*] Exception must not use a string literal, assign to variable first
|
||||
|
|
||||
30 | def f_unfixable():
|
||||
30 | def f_msg_defined():
|
||||
31 | msg = "hello"
|
||||
32 | raise RuntimeError("This is an example exception")
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ EM101
|
||||
|
|
||||
= help: Assign to variable; remove string literal
|
||||
|
||||
ℹ Unsafe fix
|
||||
29 29 |
|
||||
30 30 | def f_msg_defined():
|
||||
31 31 | msg = "hello"
|
||||
32 |- raise RuntimeError("This is an example exception")
|
||||
32 |+ msg = "This is an example exception"
|
||||
33 |+ raise RuntimeError(msg)
|
||||
33 34 |
|
||||
34 35 |
|
||||
35 36 | def f_msg_in_nested_scope():
|
||||
|
||||
EM.py:39:24: EM101 [*] Exception must not use a string literal, assign to variable first
|
||||
|
|
||||
37 | msg = "hello"
|
||||
@@ -88,7 +99,7 @@ EM.py:39:24: EM101 [*] Exception must not use a string literal, assign to variab
|
||||
41 42 |
|
||||
42 43 | def f_msg_in_parent_scope():
|
||||
|
||||
EM.py:46:28: EM101 Exception must not use a string literal, assign to variable first
|
||||
EM.py:46:28: EM101 [*] Exception must not use a string literal, assign to variable first
|
||||
|
|
||||
45 | def nested():
|
||||
46 | raise RuntimeError("This is an example exception")
|
||||
@@ -96,6 +107,17 @@ EM.py:46:28: EM101 Exception must not use a string literal, assign to variable f
|
||||
|
|
||||
= help: Assign to variable; remove string literal
|
||||
|
||||
ℹ Unsafe fix
|
||||
43 43 | msg = "hello"
|
||||
44 44 |
|
||||
45 45 | def nested():
|
||||
46 |- raise RuntimeError("This is an example exception")
|
||||
46 |+ msg = "This is an example exception"
|
||||
47 |+ raise RuntimeError(msg)
|
||||
47 48 |
|
||||
48 49 |
|
||||
49 50 | def f_fix_indentation_check(foo):
|
||||
|
||||
EM.py:51:28: EM101 [*] Exception must not use a string literal, assign to variable first
|
||||
|
|
||||
49 | def f_fix_indentation_check(foo):
|
||||
|
||||
@@ -97,15 +97,26 @@ EM.py:22:24: EM103 [*] Exception must not use a `.format()` string directly, ass
|
||||
24 25 |
|
||||
25 26 | def f_ok():
|
||||
|
||||
EM.py:32:24: EM101 Exception must not use a string literal, assign to variable first
|
||||
EM.py:32:24: EM101 [*] Exception must not use a string literal, assign to variable first
|
||||
|
|
||||
30 | def f_unfixable():
|
||||
30 | def f_msg_defined():
|
||||
31 | msg = "hello"
|
||||
32 | raise RuntimeError("This is an example exception")
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ EM101
|
||||
|
|
||||
= help: Assign to variable; remove string literal
|
||||
|
||||
ℹ Unsafe fix
|
||||
29 29 |
|
||||
30 30 | def f_msg_defined():
|
||||
31 31 | msg = "hello"
|
||||
32 |- raise RuntimeError("This is an example exception")
|
||||
32 |+ msg = "This is an example exception"
|
||||
33 |+ raise RuntimeError(msg)
|
||||
33 34 |
|
||||
34 35 |
|
||||
35 36 | def f_msg_in_nested_scope():
|
||||
|
||||
EM.py:39:24: EM101 [*] Exception must not use a string literal, assign to variable first
|
||||
|
|
||||
37 | msg = "hello"
|
||||
@@ -126,7 +137,7 @@ EM.py:39:24: EM101 [*] Exception must not use a string literal, assign to variab
|
||||
41 42 |
|
||||
42 43 | def f_msg_in_parent_scope():
|
||||
|
||||
EM.py:46:28: EM101 Exception must not use a string literal, assign to variable first
|
||||
EM.py:46:28: EM101 [*] Exception must not use a string literal, assign to variable first
|
||||
|
|
||||
45 | def nested():
|
||||
46 | raise RuntimeError("This is an example exception")
|
||||
@@ -134,6 +145,17 @@ EM.py:46:28: EM101 Exception must not use a string literal, assign to variable f
|
||||
|
|
||||
= help: Assign to variable; remove string literal
|
||||
|
||||
ℹ Unsafe fix
|
||||
43 43 | msg = "hello"
|
||||
44 44 |
|
||||
45 45 | def nested():
|
||||
46 |- raise RuntimeError("This is an example exception")
|
||||
46 |+ msg = "This is an example exception"
|
||||
47 |+ raise RuntimeError(msg)
|
||||
47 48 |
|
||||
48 49 |
|
||||
49 50 | def f_fix_indentation_check(foo):
|
||||
|
||||
EM.py:51:28: EM101 [*] Exception must not use a string literal, assign to variable first
|
||||
|
|
||||
49 | def f_fix_indentation_check(foo):
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
use itertools::Itertools;
|
||||
use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix};
|
||||
use ruff_python_ast::{self as ast, Expr, Keyword};
|
||||
use std::hash::BuildHasherDefault;
|
||||
|
||||
use itertools::Itertools;
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Edit, Fix, FixAvailability, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::{self as ast, Expr};
|
||||
use ruff_python_stdlib::identifiers::is_identifier;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use ruff_python_stdlib::identifiers::is_identifier;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::fix::edits::{remove_argument, Parentheses};
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for unnecessary `dict` kwargs.
|
||||
@@ -40,36 +43,39 @@ use crate::checkers::ast::Checker;
|
||||
#[violation]
|
||||
pub struct UnnecessaryDictKwargs;
|
||||
|
||||
impl AlwaysFixableViolation for UnnecessaryDictKwargs {
|
||||
impl Violation for UnnecessaryDictKwargs {
|
||||
const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes;
|
||||
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("Unnecessary `dict` kwargs")
|
||||
}
|
||||
|
||||
fn fix_title(&self) -> String {
|
||||
format!("Remove unnecessary kwargs")
|
||||
fn fix_title(&self) -> Option<String> {
|
||||
Some(format!("Remove unnecessary kwargs"))
|
||||
}
|
||||
}
|
||||
|
||||
/// PIE804
|
||||
pub(crate) fn unnecessary_dict_kwargs(checker: &mut Checker, expr: &Expr, kwargs: &[Keyword]) {
|
||||
for kw in kwargs {
|
||||
// keyword is a spread operator (indicated by None)
|
||||
if kw.arg.is_some() {
|
||||
pub(crate) fn unnecessary_dict_kwargs(checker: &mut Checker, call: &ast::ExprCall) {
|
||||
let mut duplicate_keywords = None;
|
||||
for keyword in &call.arguments.keywords {
|
||||
// keyword is a spread operator (indicated by None).
|
||||
if keyword.arg.is_some() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let Expr::Dict(ast::ExprDict { keys, values, .. }) = &kw.value else {
|
||||
let Expr::Dict(ast::ExprDict { keys, values, .. }) = &keyword.value else {
|
||||
continue;
|
||||
};
|
||||
|
||||
// Ex) `foo(**{**bar})`
|
||||
if matches!(keys.as_slice(), [None]) {
|
||||
let mut diagnostic = Diagnostic::new(UnnecessaryDictKwargs, expr.range());
|
||||
let mut diagnostic = Diagnostic::new(UnnecessaryDictKwargs, keyword.range());
|
||||
|
||||
diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement(
|
||||
format!("**{}", checker.locator().slice(values[0].range())),
|
||||
kw.range(),
|
||||
keyword.range(),
|
||||
)));
|
||||
|
||||
checker.diagnostics.push(diagnostic);
|
||||
@@ -86,27 +92,77 @@ pub(crate) fn unnecessary_dict_kwargs(checker: &mut Checker, expr: &Expr, kwargs
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut diagnostic = Diagnostic::new(UnnecessaryDictKwargs, expr.range());
|
||||
let mut diagnostic = Diagnostic::new(UnnecessaryDictKwargs, keyword.range());
|
||||
|
||||
if values.is_empty() {
|
||||
diagnostic.set_fix(Fix::safe_edit(Edit::deletion(kw.start(), kw.end())));
|
||||
diagnostic.try_set_fix(|| {
|
||||
remove_argument(
|
||||
keyword,
|
||||
&call.arguments,
|
||||
Parentheses::Preserve,
|
||||
checker.locator().contents(),
|
||||
)
|
||||
.map(Fix::safe_edit)
|
||||
});
|
||||
} else {
|
||||
diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement(
|
||||
kwargs
|
||||
// Compute the set of duplicate keywords (lazily).
|
||||
if duplicate_keywords.is_none() {
|
||||
duplicate_keywords = Some(duplicates(call));
|
||||
}
|
||||
|
||||
// Avoid fixing if doing so could introduce a duplicate keyword argument.
|
||||
if let Some(duplicate_keywords) = duplicate_keywords.as_ref() {
|
||||
if kwargs
|
||||
.iter()
|
||||
.zip(values.iter())
|
||||
.map(|(kwarg, value)| {
|
||||
format!("{}={}", kwarg, checker.locator().slice(value.range()))
|
||||
})
|
||||
.join(", "),
|
||||
kw.range(),
|
||||
)));
|
||||
.all(|kwarg| !duplicate_keywords.contains(kwarg))
|
||||
{
|
||||
diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement(
|
||||
kwargs
|
||||
.iter()
|
||||
.zip(values.iter())
|
||||
.map(|(kwarg, value)| {
|
||||
format!("{}={}", kwarg, checker.locator().slice(value.range()))
|
||||
})
|
||||
.join(", "),
|
||||
keyword.range(),
|
||||
)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
|
||||
/// Determine the set of keywords that appear in multiple positions (either directly, as in
|
||||
/// `func(x=1)`, or indirectly, as in `func(**{"x": 1})`).
|
||||
fn duplicates(call: &ast::ExprCall) -> FxHashSet<&str> {
|
||||
let mut seen = FxHashSet::with_capacity_and_hasher(
|
||||
call.arguments.keywords.len(),
|
||||
BuildHasherDefault::default(),
|
||||
);
|
||||
let mut duplicates = FxHashSet::with_capacity_and_hasher(
|
||||
call.arguments.keywords.len(),
|
||||
BuildHasherDefault::default(),
|
||||
);
|
||||
for keyword in &call.arguments.keywords {
|
||||
if let Some(name) = &keyword.arg {
|
||||
if !seen.insert(name.as_str()) {
|
||||
duplicates.insert(name.as_str());
|
||||
}
|
||||
} else if let Expr::Dict(ast::ExprDict { keys, .. }) = &keyword.value {
|
||||
for key in keys {
|
||||
if let Some(name) = key.as_ref().and_then(as_kwarg) {
|
||||
if !seen.insert(name) {
|
||||
duplicates.insert(name);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
duplicates
|
||||
}
|
||||
|
||||
/// Return `Some` if a key is a valid keyword argument name, or `None` otherwise.
|
||||
fn as_kwarg(key: &Expr) -> Option<&str> {
|
||||
if let Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) = key {
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_pie/mod.rs
|
||||
---
|
||||
PIE804.py:1:1: PIE804 [*] Unnecessary `dict` kwargs
|
||||
PIE804.py:1:5: PIE804 [*] Unnecessary `dict` kwargs
|
||||
|
|
||||
1 | foo(**{"bar": True}) # PIE804
|
||||
| ^^^^^^^^^^^^^^^^^^^^ PIE804
|
||||
| ^^^^^^^^^^^^^^^ PIE804
|
||||
2 |
|
||||
3 | foo(**{"r2d2": True}) # PIE804
|
||||
|
|
||||
@@ -17,12 +17,12 @@ PIE804.py:1:1: PIE804 [*] Unnecessary `dict` kwargs
|
||||
3 3 | foo(**{"r2d2": True}) # PIE804
|
||||
4 4 |
|
||||
|
||||
PIE804.py:3:1: PIE804 [*] Unnecessary `dict` kwargs
|
||||
PIE804.py:3:5: PIE804 [*] Unnecessary `dict` kwargs
|
||||
|
|
||||
1 | foo(**{"bar": True}) # PIE804
|
||||
2 |
|
||||
3 | foo(**{"r2d2": True}) # PIE804
|
||||
| ^^^^^^^^^^^^^^^^^^^^^ PIE804
|
||||
| ^^^^^^^^^^^^^^^^ PIE804
|
||||
4 |
|
||||
5 | Foo.objects.create(**{"bar": True}) # PIE804
|
||||
|
|
||||
@@ -37,12 +37,12 @@ PIE804.py:3:1: PIE804 [*] Unnecessary `dict` kwargs
|
||||
5 5 | Foo.objects.create(**{"bar": True}) # PIE804
|
||||
6 6 |
|
||||
|
||||
PIE804.py:5:1: PIE804 [*] Unnecessary `dict` kwargs
|
||||
PIE804.py:5:20: PIE804 [*] Unnecessary `dict` kwargs
|
||||
|
|
||||
3 | foo(**{"r2d2": True}) # PIE804
|
||||
4 |
|
||||
5 | Foo.objects.create(**{"bar": True}) # PIE804
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PIE804
|
||||
| ^^^^^^^^^^^^^^^ PIE804
|
||||
6 |
|
||||
7 | Foo.objects.create(**{"_id": some_id}) # PIE804
|
||||
|
|
||||
@@ -58,12 +58,12 @@ PIE804.py:5:1: PIE804 [*] Unnecessary `dict` kwargs
|
||||
7 7 | Foo.objects.create(**{"_id": some_id}) # PIE804
|
||||
8 8 |
|
||||
|
||||
PIE804.py:7:1: PIE804 [*] Unnecessary `dict` kwargs
|
||||
PIE804.py:7:20: PIE804 [*] Unnecessary `dict` kwargs
|
||||
|
|
||||
5 | Foo.objects.create(**{"bar": True}) # PIE804
|
||||
6 |
|
||||
7 | Foo.objects.create(**{"_id": some_id}) # PIE804
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PIE804
|
||||
| ^^^^^^^^^^^^^^^^^^ PIE804
|
||||
8 |
|
||||
9 | Foo.objects.create(**{**bar}) # PIE804
|
||||
|
|
||||
@@ -79,12 +79,12 @@ PIE804.py:7:1: PIE804 [*] Unnecessary `dict` kwargs
|
||||
9 9 | Foo.objects.create(**{**bar}) # PIE804
|
||||
10 10 |
|
||||
|
||||
PIE804.py:9:1: PIE804 [*] Unnecessary `dict` kwargs
|
||||
PIE804.py:9:20: PIE804 [*] Unnecessary `dict` kwargs
|
||||
|
|
||||
7 | Foo.objects.create(**{"_id": some_id}) # PIE804
|
||||
8 |
|
||||
9 | Foo.objects.create(**{**bar}) # PIE804
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PIE804
|
||||
| ^^^^^^^^^ PIE804
|
||||
10 |
|
||||
11 | foo(**{})
|
||||
|
|
||||
@@ -100,12 +100,14 @@ PIE804.py:9:1: PIE804 [*] Unnecessary `dict` kwargs
|
||||
11 11 | foo(**{})
|
||||
12 12 |
|
||||
|
||||
PIE804.py:11:1: PIE804 [*] Unnecessary `dict` kwargs
|
||||
PIE804.py:11:5: PIE804 [*] Unnecessary `dict` kwargs
|
||||
|
|
||||
9 | Foo.objects.create(**{**bar}) # PIE804
|
||||
10 |
|
||||
11 | foo(**{})
|
||||
| ^^^^^^^^^ PIE804
|
||||
| ^^^^ PIE804
|
||||
12 |
|
||||
13 | foo(**{**data, "foo": "buzz"})
|
||||
|
|
||||
= help: Remove unnecessary kwargs
|
||||
|
||||
@@ -116,7 +118,71 @@ PIE804.py:11:1: PIE804 [*] Unnecessary `dict` kwargs
|
||||
11 |-foo(**{})
|
||||
11 |+foo()
|
||||
12 12 |
|
||||
13 13 |
|
||||
14 14 | foo(**{**data, "foo": "buzz"})
|
||||
13 13 | foo(**{**data, "foo": "buzz"})
|
||||
14 14 | foo(**buzz)
|
||||
|
||||
PIE804.py:22:5: PIE804 [*] Unnecessary `dict` kwargs
|
||||
|
|
||||
20 | foo(**{f"buzz__{bar}": True})
|
||||
21 | abc(**{"for": 3})
|
||||
22 | foo(**{},)
|
||||
| ^^^^ PIE804
|
||||
23 |
|
||||
24 | # Duplicated key names won't be fixed, to avoid syntax errors.
|
||||
|
|
||||
= help: Remove unnecessary kwargs
|
||||
|
||||
ℹ Safe fix
|
||||
19 19 | foo(**{"": True})
|
||||
20 20 | foo(**{f"buzz__{bar}": True})
|
||||
21 21 | abc(**{"for": 3})
|
||||
22 |-foo(**{},)
|
||||
22 |+foo()
|
||||
23 23 |
|
||||
24 24 | # Duplicated key names won't be fixed, to avoid syntax errors.
|
||||
25 25 | abc(**{'a': b}, **{'a': c}) # PIE804
|
||||
|
||||
PIE804.py:25:5: PIE804 Unnecessary `dict` kwargs
|
||||
|
|
||||
24 | # Duplicated key names won't be fixed, to avoid syntax errors.
|
||||
25 | abc(**{'a': b}, **{'a': c}) # PIE804
|
||||
| ^^^^^^^^^^ PIE804
|
||||
26 | abc(a=1, **{'a': c}, **{'b': c}) # PIE804
|
||||
|
|
||||
= help: Remove unnecessary kwargs
|
||||
|
||||
PIE804.py:25:17: PIE804 Unnecessary `dict` kwargs
|
||||
|
|
||||
24 | # Duplicated key names won't be fixed, to avoid syntax errors.
|
||||
25 | abc(**{'a': b}, **{'a': c}) # PIE804
|
||||
| ^^^^^^^^^^ PIE804
|
||||
26 | abc(a=1, **{'a': c}, **{'b': c}) # PIE804
|
||||
|
|
||||
= help: Remove unnecessary kwargs
|
||||
|
||||
PIE804.py:26:10: PIE804 Unnecessary `dict` kwargs
|
||||
|
|
||||
24 | # Duplicated key names won't be fixed, to avoid syntax errors.
|
||||
25 | abc(**{'a': b}, **{'a': c}) # PIE804
|
||||
26 | abc(a=1, **{'a': c}, **{'b': c}) # PIE804
|
||||
| ^^^^^^^^^^ PIE804
|
||||
|
|
||||
= help: Remove unnecessary kwargs
|
||||
|
||||
PIE804.py:26:22: PIE804 [*] Unnecessary `dict` kwargs
|
||||
|
|
||||
24 | # Duplicated key names won't be fixed, to avoid syntax errors.
|
||||
25 | abc(**{'a': b}, **{'a': c}) # PIE804
|
||||
26 | abc(a=1, **{'a': c}, **{'b': c}) # PIE804
|
||||
| ^^^^^^^^^^ PIE804
|
||||
|
|
||||
= help: Remove unnecessary kwargs
|
||||
|
||||
ℹ Safe fix
|
||||
23 23 |
|
||||
24 24 | # Duplicated key names won't be fixed, to avoid syntax errors.
|
||||
25 25 | abc(**{'a': b}, **{'a': c}) # PIE804
|
||||
26 |-abc(a=1, **{'a': c}, **{'b': c}) # PIE804
|
||||
26 |+abc(a=1, **{'a': c}, b=c) # PIE804
|
||||
|
||||
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
use ruff_python_ast::{self as ast, Expr};
|
||||
|
||||
use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::helpers::is_docstring_stmt;
|
||||
use ruff_python_ast::{self as ast, StringLike};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
@@ -44,25 +43,27 @@ impl AlwaysFixableViolation for StringOrBytesTooLong {
|
||||
}
|
||||
|
||||
/// PYI053
|
||||
pub(crate) fn string_or_bytes_too_long(checker: &mut Checker, expr: &Expr) {
|
||||
pub(crate) fn string_or_bytes_too_long(checker: &mut Checker, string: StringLike) {
|
||||
// Ignore docstrings.
|
||||
if is_docstring_stmt(checker.semantic().current_statement()) {
|
||||
return;
|
||||
}
|
||||
|
||||
let length = match expr {
|
||||
Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) => value.chars().count(),
|
||||
Expr::BytesLiteral(ast::ExprBytesLiteral { value, .. }) => value.len(),
|
||||
_ => return,
|
||||
let length = match string {
|
||||
StringLike::StringLiteral(ast::ExprStringLiteral { value, .. }) => value.chars().count(),
|
||||
StringLike::BytesLiteral(ast::ExprBytesLiteral { value, .. }) => value.len(),
|
||||
StringLike::FStringLiteral(ast::FStringLiteralElement { value, .. }) => {
|
||||
value.chars().count()
|
||||
}
|
||||
};
|
||||
if length <= 50 {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut diagnostic = Diagnostic::new(StringOrBytesTooLong, expr.range());
|
||||
let mut diagnostic = Diagnostic::new(StringOrBytesTooLong, string.range());
|
||||
diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement(
|
||||
"...".to_string(),
|
||||
expr.range(),
|
||||
string.range(),
|
||||
)));
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
|
||||
@@ -90,7 +90,7 @@ PYI053.pyi:30:14: PYI053 [*] String and bytes literals longer than 50 characters
|
||||
30 | qux: bytes = b"51 character byte stringggggggggggggggggggggggggggg\xff" # Error: PYI053
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PYI053
|
||||
31 |
|
||||
32 | class Demo:
|
||||
32 | ffoo: str = f"50 character stringggggggggggggggggggggggggggggggg" # OK
|
||||
|
|
||||
= help: Replace with `...`
|
||||
|
||||
@@ -101,7 +101,28 @@ PYI053.pyi:30:14: PYI053 [*] String and bytes literals longer than 50 characters
|
||||
30 |-qux: bytes = b"51 character byte stringggggggggggggggggggggggggggg\xff" # Error: PYI053
|
||||
30 |+qux: bytes = ... # Error: PYI053
|
||||
31 31 |
|
||||
32 32 | class Demo:
|
||||
33 33 | """Docstrings are excluded from this rule. Some padding.""" # OK
|
||||
32 32 | ffoo: str = f"50 character stringggggggggggggggggggggggggggggggg" # OK
|
||||
33 33 |
|
||||
|
||||
PYI053.pyi:34:15: PYI053 [*] String and bytes literals longer than 50 characters are not permitted
|
||||
|
|
||||
32 | ffoo: str = f"50 character stringggggggggggggggggggggggggggggggg" # OK
|
||||
33 |
|
||||
34 | fbar: str = f"51 character stringgggggggggggggggggggggggggggggggg" # Error: PYI053
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PYI053
|
||||
35 |
|
||||
36 | class Demo:
|
||||
|
|
||||
= help: Replace with `...`
|
||||
|
||||
ℹ Safe fix
|
||||
31 31 |
|
||||
32 32 | ffoo: str = f"50 character stringggggggggggggggggggggggggggggggg" # OK
|
||||
33 33 |
|
||||
34 |-fbar: str = f"51 character stringgggggggggggggggggggggggggggggggg" # Error: PYI053
|
||||
34 |+fbar: str = f"..." # Error: PYI053
|
||||
35 35 |
|
||||
36 36 | class Demo:
|
||||
37 37 | """Docstrings are excluded from this rule. Some padding.""" # OK
|
||||
|
||||
|
||||
|
||||
@@ -33,7 +33,7 @@ use super::unittest_assert::UnittestAssert;
|
||||
/// Checks for assertions that combine multiple independent conditions.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Composite assertion statements are harder debug upon failure, as the
|
||||
/// Composite assertion statements are harder to debug upon failure, as the
|
||||
/// failure message will not indicate which condition failed.
|
||||
///
|
||||
/// ## Example
|
||||
|
||||
@@ -56,17 +56,27 @@ pub(super) fn is_empty_or_null_string(expr: &Expr) -> bool {
|
||||
Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) => value.is_empty(),
|
||||
Expr::NoneLiteral(_) => true,
|
||||
Expr::FString(ast::ExprFString { value, .. }) => {
|
||||
value.parts().all(|f_string_part| match f_string_part {
|
||||
value.iter().all(|f_string_part| match f_string_part {
|
||||
ast::FStringPart::Literal(literal) => literal.is_empty(),
|
||||
ast::FStringPart::FString(f_string) => {
|
||||
f_string.values.iter().all(is_empty_or_null_string)
|
||||
}
|
||||
ast::FStringPart::FString(f_string) => f_string
|
||||
.elements
|
||||
.iter()
|
||||
.all(is_empty_or_null_fstring_element),
|
||||
})
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn is_empty_or_null_fstring_element(element: &ast::FStringElement) -> bool {
|
||||
match element {
|
||||
ast::FStringElement::Literal(ast::FStringLiteralElement { value, .. }) => value.is_empty(),
|
||||
ast::FStringElement::Expression(ast::FStringExpressionElement { expression, .. }) => {
|
||||
is_empty_or_null_string(expression)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn split_names(names: &str) -> Vec<&str> {
|
||||
// Match the following pytest code:
|
||||
// [x.strip() for x in argnames.split(",") if x.strip()]
|
||||
|
||||
@@ -78,9 +78,7 @@ pub(crate) fn unnecessary_paren_on_raise_exception(checker: &mut Checker, expr:
|
||||
|
||||
// `ctypes.WinError()` is a function, not a class. It's part of the standard library, so
|
||||
// we might as well get it right.
|
||||
if exception_type
|
||||
.as_ref()
|
||||
.is_some_and(ExceptionType::is_builtin)
|
||||
if exception_type.is_none()
|
||||
&& checker
|
||||
.semantic()
|
||||
.resolve_call_path(func)
|
||||
|
||||
@@ -266,6 +266,8 @@ RSE102.py:84:10: RSE102 [*] Unnecessary parentheses on raised exception
|
||||
83 | # RSE102
|
||||
84 | raise Foo()
|
||||
| ^^ RSE102
|
||||
85 |
|
||||
86 | # OK
|
||||
|
|
||||
= help: Remove unnecessary parentheses
|
||||
|
||||
@@ -275,5 +277,8 @@ RSE102.py:84:10: RSE102 [*] Unnecessary parentheses on raised exception
|
||||
83 83 | # RSE102
|
||||
84 |-raise Foo()
|
||||
84 |+raise Foo
|
||||
85 85 |
|
||||
86 86 | # OK
|
||||
87 87 | raise ctypes.WinError()
|
||||
|
||||
|
||||
|
||||
@@ -66,6 +66,10 @@ pub(crate) fn private_member_access(checker: &mut Checker, expr: &Expr) {
|
||||
return;
|
||||
};
|
||||
|
||||
if checker.semantic().in_annotation() {
|
||||
return;
|
||||
}
|
||||
|
||||
if (attr.starts_with("__") && !attr.ends_with("__"))
|
||||
|| (attr.starts_with('_') && !attr.starts_with("__"))
|
||||
{
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::Stmt;
|
||||
use ruff_python_ast::{self as ast, Expr, ExprCall, Int};
|
||||
use ruff_python_semantic::analyze::typing::find_assigned_value;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
@@ -71,30 +71,15 @@ pub(crate) fn zero_sleep_call(checker: &mut Checker, call: &ExprCall) {
|
||||
}
|
||||
}
|
||||
Expr::Name(ast::ExprName { id, .. }) => {
|
||||
let scope = checker.semantic().current_scope();
|
||||
if let Some(binding_id) = scope.get(id) {
|
||||
let binding = checker.semantic().binding(binding_id);
|
||||
if binding.kind.is_assignment() || binding.kind.is_named_expr_assignment() {
|
||||
if let Some(parent_id) = binding.source {
|
||||
let parent = checker.semantic().statement(parent_id);
|
||||
if let Stmt::Assign(ast::StmtAssign { value, .. })
|
||||
| Stmt::AnnAssign(ast::StmtAnnAssign {
|
||||
value: Some(value), ..
|
||||
})
|
||||
| Stmt::AugAssign(ast::StmtAugAssign { value, .. }) = parent
|
||||
{
|
||||
let Expr::NumberLiteral(ast::ExprNumberLiteral { value: num, .. }) =
|
||||
value.as_ref()
|
||||
else {
|
||||
return;
|
||||
};
|
||||
let Some(int) = num.as_int() else { return };
|
||||
if *int != Int::ZERO {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
let Some(value) = find_assigned_value(id, checker.semantic()) else {
|
||||
return;
|
||||
};
|
||||
let Expr::NumberLiteral(ast::ExprNumberLiteral { value: num, .. }) = value else {
|
||||
return;
|
||||
};
|
||||
let Some(int) = num.as_int() else { return };
|
||||
if *int != Int::ZERO {
|
||||
return;
|
||||
}
|
||||
}
|
||||
_ => return,
|
||||
|
||||
@@ -85,51 +85,227 @@ TRIO115.py:17:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.s
|
||||
19 19 | bar = "bar"
|
||||
20 20 | trio.sleep(bar)
|
||||
|
||||
TRIO115.py:31:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
TRIO115.py:23:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
30 | def func():
|
||||
31 | sleep(0) # TRIO115
|
||||
22 | x, y = 0, 2000
|
||||
23 | trio.sleep(x) # TRIO115
|
||||
| ^^^^^^^^^^^^^ TRIO115
|
||||
24 | trio.sleep(y) # OK
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
20 20 | trio.sleep(bar)
|
||||
21 21 |
|
||||
22 22 | x, y = 0, 2000
|
||||
23 |- trio.sleep(x) # TRIO115
|
||||
23 |+ trio.lowlevel.checkpoint() # TRIO115
|
||||
24 24 | trio.sleep(y) # OK
|
||||
25 25 |
|
||||
26 26 | (a, b, [c, (d, e)]) = (1, 2, (0, [4, 0]))
|
||||
|
||||
TRIO115.py:27:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
26 | (a, b, [c, (d, e)]) = (1, 2, (0, [4, 0]))
|
||||
27 | trio.sleep(c) # TRIO115
|
||||
| ^^^^^^^^^^^^^ TRIO115
|
||||
28 | trio.sleep(d) # OK
|
||||
29 | trio.sleep(e) # TRIO115
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
24 24 | trio.sleep(y) # OK
|
||||
25 25 |
|
||||
26 26 | (a, b, [c, (d, e)]) = (1, 2, (0, [4, 0]))
|
||||
27 |- trio.sleep(c) # TRIO115
|
||||
27 |+ trio.lowlevel.checkpoint() # TRIO115
|
||||
28 28 | trio.sleep(d) # OK
|
||||
29 29 | trio.sleep(e) # TRIO115
|
||||
30 30 |
|
||||
|
||||
TRIO115.py:29:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
27 | trio.sleep(c) # TRIO115
|
||||
28 | trio.sleep(d) # OK
|
||||
29 | trio.sleep(e) # TRIO115
|
||||
| ^^^^^^^^^^^^^ TRIO115
|
||||
30 |
|
||||
31 | m_x, m_y = 0
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
26 26 | (a, b, [c, (d, e)]) = (1, 2, (0, [4, 0]))
|
||||
27 27 | trio.sleep(c) # TRIO115
|
||||
28 28 | trio.sleep(d) # OK
|
||||
29 |- trio.sleep(e) # TRIO115
|
||||
29 |+ trio.lowlevel.checkpoint() # TRIO115
|
||||
30 30 |
|
||||
31 31 | m_x, m_y = 0
|
||||
32 32 | trio.sleep(m_y) # OK
|
||||
|
||||
TRIO115.py:36:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
35 | m_a = m_b = 0
|
||||
36 | trio.sleep(m_a) # TRIO115
|
||||
| ^^^^^^^^^^^^^^^ TRIO115
|
||||
37 | trio.sleep(m_b) # TRIO115
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
33 33 | trio.sleep(m_x) # OK
|
||||
34 34 |
|
||||
35 35 | m_a = m_b = 0
|
||||
36 |- trio.sleep(m_a) # TRIO115
|
||||
36 |+ trio.lowlevel.checkpoint() # TRIO115
|
||||
37 37 | trio.sleep(m_b) # TRIO115
|
||||
38 38 |
|
||||
39 39 | m_c = (m_d, m_e) = (0, 0)
|
||||
|
||||
TRIO115.py:37:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
35 | m_a = m_b = 0
|
||||
36 | trio.sleep(m_a) # TRIO115
|
||||
37 | trio.sleep(m_b) # TRIO115
|
||||
| ^^^^^^^^^^^^^^^ TRIO115
|
||||
38 |
|
||||
39 | m_c = (m_d, m_e) = (0, 0)
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
34 34 |
|
||||
35 35 | m_a = m_b = 0
|
||||
36 36 | trio.sleep(m_a) # TRIO115
|
||||
37 |- trio.sleep(m_b) # TRIO115
|
||||
37 |+ trio.lowlevel.checkpoint() # TRIO115
|
||||
38 38 |
|
||||
39 39 | m_c = (m_d, m_e) = (0, 0)
|
||||
40 40 | trio.sleep(m_c) # OK
|
||||
|
||||
TRIO115.py:41:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
39 | m_c = (m_d, m_e) = (0, 0)
|
||||
40 | trio.sleep(m_c) # OK
|
||||
41 | trio.sleep(m_d) # TRIO115
|
||||
| ^^^^^^^^^^^^^^^ TRIO115
|
||||
42 | trio.sleep(m_e) # TRIO115
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
38 38 |
|
||||
39 39 | m_c = (m_d, m_e) = (0, 0)
|
||||
40 40 | trio.sleep(m_c) # OK
|
||||
41 |- trio.sleep(m_d) # TRIO115
|
||||
41 |+ trio.lowlevel.checkpoint() # TRIO115
|
||||
42 42 | trio.sleep(m_e) # TRIO115
|
||||
43 43 |
|
||||
44 44 |
|
||||
|
||||
TRIO115.py:42:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
40 | trio.sleep(m_c) # OK
|
||||
41 | trio.sleep(m_d) # TRIO115
|
||||
42 | trio.sleep(m_e) # TRIO115
|
||||
| ^^^^^^^^^^^^^^^ TRIO115
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
39 39 | m_c = (m_d, m_e) = (0, 0)
|
||||
40 40 | trio.sleep(m_c) # OK
|
||||
41 41 | trio.sleep(m_d) # TRIO115
|
||||
42 |- trio.sleep(m_e) # TRIO115
|
||||
42 |+ trio.lowlevel.checkpoint() # TRIO115
|
||||
43 43 |
|
||||
44 44 |
|
||||
45 45 | def func():
|
||||
|
||||
TRIO115.py:48:14: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
46 | import trio
|
||||
47 |
|
||||
48 | trio.run(trio.sleep(0)) # TRIO115
|
||||
| ^^^^^^^^^^^^^ TRIO115
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
45 45 | def func():
|
||||
46 46 | import trio
|
||||
47 47 |
|
||||
48 |- trio.run(trio.sleep(0)) # TRIO115
|
||||
48 |+ trio.run(trio.lowlevel.checkpoint()) # TRIO115
|
||||
49 49 |
|
||||
50 50 |
|
||||
51 51 | from trio import Event, sleep
|
||||
|
||||
TRIO115.py:55:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
54 | def func():
|
||||
55 | sleep(0) # TRIO115
|
||||
| ^^^^^^^^ TRIO115
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
24 24 | trio.run(trio.sleep(0)) # TRIO115
|
||||
25 25 |
|
||||
26 26 |
|
||||
27 |-from trio import Event, sleep
|
||||
27 |+from trio import Event, sleep, lowlevel
|
||||
28 28 |
|
||||
29 29 |
|
||||
30 30 | def func():
|
||||
31 |- sleep(0) # TRIO115
|
||||
31 |+ lowlevel.checkpoint() # TRIO115
|
||||
32 32 |
|
||||
33 33 |
|
||||
34 34 | async def func():
|
||||
48 48 | trio.run(trio.sleep(0)) # TRIO115
|
||||
49 49 |
|
||||
50 50 |
|
||||
51 |-from trio import Event, sleep
|
||||
51 |+from trio import Event, sleep, lowlevel
|
||||
52 52 |
|
||||
53 53 |
|
||||
54 54 | def func():
|
||||
55 |- sleep(0) # TRIO115
|
||||
55 |+ lowlevel.checkpoint() # TRIO115
|
||||
56 56 |
|
||||
57 57 |
|
||||
58 58 | async def func():
|
||||
|
||||
TRIO115.py:35:11: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
TRIO115.py:59:11: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
34 | async def func():
|
||||
35 | await sleep(seconds=0) # TRIO115
|
||||
58 | async def func():
|
||||
59 | await sleep(seconds=0) # TRIO115
|
||||
| ^^^^^^^^^^^^^^^^ TRIO115
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
24 24 | trio.run(trio.sleep(0)) # TRIO115
|
||||
25 25 |
|
||||
26 26 |
|
||||
27 |-from trio import Event, sleep
|
||||
27 |+from trio import Event, sleep, lowlevel
|
||||
28 28 |
|
||||
29 29 |
|
||||
30 30 | def func():
|
||||
48 48 | trio.run(trio.sleep(0)) # TRIO115
|
||||
49 49 |
|
||||
50 50 |
|
||||
51 |-from trio import Event, sleep
|
||||
51 |+from trio import Event, sleep, lowlevel
|
||||
52 52 |
|
||||
53 53 |
|
||||
54 54 | def func():
|
||||
--------------------------------------------------------------------------------
|
||||
32 32 |
|
||||
33 33 |
|
||||
34 34 | async def func():
|
||||
35 |- await sleep(seconds=0) # TRIO115
|
||||
35 |+ await lowlevel.checkpoint() # TRIO115
|
||||
56 56 |
|
||||
57 57 |
|
||||
58 58 | async def func():
|
||||
59 |- await sleep(seconds=0) # TRIO115
|
||||
59 |+ await lowlevel.checkpoint() # TRIO115
|
||||
60 60 |
|
||||
61 61 |
|
||||
62 62 | def func():
|
||||
|
||||
TRIO115.py:66:9: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
65 | if (walrus := 0) == 0:
|
||||
66 | trio.sleep(walrus) # TRIO115
|
||||
| ^^^^^^^^^^^^^^^^^^ TRIO115
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
63 63 | import trio
|
||||
64 64 |
|
||||
65 65 | if (walrus := 0) == 0:
|
||||
66 |- trio.sleep(walrus) # TRIO115
|
||||
66 |+ trio.lowlevel.checkpoint() # TRIO115
|
||||
|
||||
|
||||
|
||||
@@ -1,10 +1,35 @@
|
||||
use anyhow::Result;
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use ruff_diagnostics::Edit;
|
||||
use ruff_python_ast::call_path::from_qualified_name;
|
||||
use ruff_python_ast::helpers::{map_callable, map_subscript};
|
||||
use ruff_python_ast::{self as ast, Expr};
|
||||
use ruff_python_semantic::{Binding, BindingId, BindingKind, SemanticModel};
|
||||
use rustc_hash::FxHashSet;
|
||||
use ruff_python_codegen::Stylist;
|
||||
use ruff_python_semantic::{
|
||||
Binding, BindingId, BindingKind, NodeId, ResolvedReference, SemanticModel,
|
||||
};
|
||||
use ruff_source_file::Locator;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
pub(crate) fn is_valid_runtime_import(binding: &Binding, semantic: &SemanticModel) -> bool {
|
||||
use crate::rules::flake8_type_checking::settings::Settings;
|
||||
|
||||
/// Returns `true` if the [`ResolvedReference`] is in a typing-only context _or_ a runtime-evaluated
|
||||
/// context (with quoting enabled).
|
||||
pub(crate) fn is_typing_reference(reference: &ResolvedReference, settings: &Settings) -> bool {
|
||||
reference.in_type_checking_block()
|
||||
|| reference.in_typing_only_annotation()
|
||||
|| reference.in_complex_string_type_definition()
|
||||
|| reference.in_simple_string_type_definition()
|
||||
|| (settings.quote_annotations && reference.in_runtime_evaluated_annotation())
|
||||
}
|
||||
|
||||
/// Returns `true` if the [`Binding`] represents a runtime-required import.
|
||||
pub(crate) fn is_valid_runtime_import(
|
||||
binding: &Binding,
|
||||
semantic: &SemanticModel,
|
||||
settings: &Settings,
|
||||
) -> bool {
|
||||
if matches!(
|
||||
binding.kind,
|
||||
BindingKind::Import(..) | BindingKind::FromImport(..) | BindingKind::SubmoduleImport(..)
|
||||
@@ -12,28 +37,29 @@ pub(crate) fn is_valid_runtime_import(binding: &Binding, semantic: &SemanticMode
|
||||
binding.context.is_runtime()
|
||||
&& binding
|
||||
.references()
|
||||
.any(|reference_id| semantic.reference(reference_id).context().is_runtime())
|
||||
.map(|reference_id| semantic.reference(reference_id))
|
||||
.any(|reference| !is_typing_reference(reference, settings))
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn runtime_evaluated_class(
|
||||
pub(crate) fn runtime_required_class(
|
||||
class_def: &ast::StmtClassDef,
|
||||
base_classes: &[String],
|
||||
decorators: &[String],
|
||||
semantic: &SemanticModel,
|
||||
) -> bool {
|
||||
if runtime_evaluated_base_class(class_def, base_classes, semantic) {
|
||||
if runtime_required_base_class(class_def, base_classes, semantic) {
|
||||
return true;
|
||||
}
|
||||
if runtime_evaluated_decorators(class_def, decorators, semantic) {
|
||||
if runtime_required_decorators(class_def, decorators, semantic) {
|
||||
return true;
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
fn runtime_evaluated_base_class(
|
||||
fn runtime_required_base_class(
|
||||
class_def: &ast::StmtClassDef,
|
||||
base_classes: &[String],
|
||||
semantic: &SemanticModel,
|
||||
@@ -45,7 +71,7 @@ fn runtime_evaluated_base_class(
|
||||
seen: &mut FxHashSet<BindingId>,
|
||||
) -> bool {
|
||||
class_def.bases().iter().any(|expr| {
|
||||
// If the base class is itself runtime-evaluated, then this is too.
|
||||
// If the base class is itself runtime-required, then this is too.
|
||||
// Ex) `class Foo(BaseModel): ...`
|
||||
if semantic
|
||||
.resolve_call_path(map_subscript(expr))
|
||||
@@ -58,7 +84,7 @@ fn runtime_evaluated_base_class(
|
||||
return true;
|
||||
}
|
||||
|
||||
// If the base class extends a runtime-evaluated class, then this does too.
|
||||
// If the base class extends a runtime-required class, then this does too.
|
||||
// Ex) `class Bar(BaseModel): ...; class Foo(Bar): ...`
|
||||
if let Some(id) = semantic.lookup_attribute(map_subscript(expr)) {
|
||||
if seen.insert(id) {
|
||||
@@ -86,7 +112,7 @@ fn runtime_evaluated_base_class(
|
||||
inner(class_def, base_classes, semantic, &mut FxHashSet::default())
|
||||
}
|
||||
|
||||
fn runtime_evaluated_decorators(
|
||||
fn runtime_required_decorators(
|
||||
class_def: &ast::StmtClassDef,
|
||||
decorators: &[String],
|
||||
semantic: &SemanticModel,
|
||||
@@ -174,3 +200,75 @@ pub(crate) fn is_singledispatch_implementation(
|
||||
is_singledispatch_interface(function_def, semantic)
|
||||
})
|
||||
}
|
||||
|
||||
/// Wrap a type annotation in quotes.
|
||||
///
|
||||
/// This requires more than just wrapping the reference itself in quotes. For example:
|
||||
/// - When quoting `Series` in `Series[pd.Timestamp]`, we want `"Series[pd.Timestamp]"`.
|
||||
/// - When quoting `kubernetes` in `kubernetes.SecurityContext`, we want `"kubernetes.SecurityContext"`.
|
||||
/// - When quoting `Series` in `Series["pd.Timestamp"]`, we want `"Series[pd.Timestamp]"`. (This is currently unsupported.)
|
||||
/// - When quoting `Series` in `Series[Literal["pd.Timestamp"]]`, we want `"Series[Literal['pd.Timestamp']]"`. (This is currently unsupported.)
|
||||
///
|
||||
/// In general, when expanding a component of a call chain, we want to quote the entire call chain.
|
||||
pub(crate) fn quote_annotation(
|
||||
node_id: NodeId,
|
||||
semantic: &SemanticModel,
|
||||
locator: &Locator,
|
||||
stylist: &Stylist,
|
||||
) -> Result<Edit> {
|
||||
let expr = semantic.expression(node_id).expect("Expression not found");
|
||||
if let Some(parent_id) = semantic.parent_expression_id(node_id) {
|
||||
match semantic.expression(parent_id) {
|
||||
Some(Expr::Subscript(parent)) => {
|
||||
if expr == parent.value.as_ref() {
|
||||
// If we're quoting the value of a subscript, we need to quote the entire
|
||||
// expression. For example, when quoting `DataFrame` in `DataFrame[int]`, we
|
||||
// should generate `"DataFrame[int]"`.
|
||||
return quote_annotation(parent_id, semantic, locator, stylist);
|
||||
}
|
||||
}
|
||||
Some(Expr::Attribute(parent)) => {
|
||||
if expr == parent.value.as_ref() {
|
||||
// If we're quoting the value of an attribute, we need to quote the entire
|
||||
// expression. For example, when quoting `DataFrame` in `pd.DataFrame`, we
|
||||
// should generate `"pd.DataFrame"`.
|
||||
return quote_annotation(parent_id, semantic, locator, stylist);
|
||||
}
|
||||
}
|
||||
Some(Expr::Call(parent)) => {
|
||||
if expr == parent.func.as_ref() {
|
||||
// If we're quoting the function of a call, we need to quote the entire
|
||||
// expression. For example, when quoting `DataFrame` in `DataFrame()`, we
|
||||
// should generate `"DataFrame()"`.
|
||||
return quote_annotation(parent_id, semantic, locator, stylist);
|
||||
}
|
||||
}
|
||||
Some(Expr::BinOp(parent)) => {
|
||||
if parent.op.is_bit_or() {
|
||||
// If we're quoting the left or right side of a binary operation, we need to
|
||||
// quote the entire expression. For example, when quoting `DataFrame` in
|
||||
// `DataFrame | Series`, we should generate `"DataFrame | Series"`.
|
||||
return quote_annotation(parent_id, semantic, locator, stylist);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
let annotation = locator.slice(expr);
|
||||
|
||||
// If the annotation already contains a quote, avoid attempting to re-quote it. For example:
|
||||
// ```python
|
||||
// from typing import Literal
|
||||
//
|
||||
// Set[Literal["Foo"]]
|
||||
// ```
|
||||
if annotation.contains('\'') || annotation.contains('"') {
|
||||
return Err(anyhow::anyhow!("Annotation already contains a quote"));
|
||||
}
|
||||
|
||||
// If we're quoting a name, we need to quote the entire expression.
|
||||
let quote = stylist.quote();
|
||||
let annotation = format!("{quote}{annotation}{quote}");
|
||||
Ok(Edit::range_replacement(annotation, expr.range()))
|
||||
}
|
||||
|
||||
22
crates/ruff_linter/src/rules/flake8_type_checking/imports.rs
Normal file
22
crates/ruff_linter/src/rules/flake8_type_checking/imports.rs
Normal file
@@ -0,0 +1,22 @@
|
||||
use ruff_python_semantic::{AnyImport, Binding, ResolvedReferenceId};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
/// An import with its surrounding context.
|
||||
pub(crate) struct ImportBinding<'a> {
|
||||
/// The qualified name of the import (e.g., `typing.List` for `from typing import List`).
|
||||
pub(crate) import: AnyImport<'a>,
|
||||
/// The binding for the imported symbol.
|
||||
pub(crate) binding: &'a Binding<'a>,
|
||||
/// The first reference to the imported symbol.
|
||||
pub(crate) reference_id: ResolvedReferenceId,
|
||||
/// The trimmed range of the import (e.g., `List` in `from typing import List`).
|
||||
pub(crate) range: TextRange,
|
||||
/// The range of the import's parent statement.
|
||||
pub(crate) parent_range: Option<TextRange>,
|
||||
}
|
||||
|
||||
impl Ranged for ImportBinding<'_> {
|
||||
fn range(&self) -> TextRange {
|
||||
self.range
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
//! Rules from [flake8-type-checking](https://pypi.org/project/flake8-type-checking/).
|
||||
pub(crate) mod helpers;
|
||||
mod imports;
|
||||
pub(crate) mod rules;
|
||||
pub mod settings;
|
||||
|
||||
@@ -33,10 +34,12 @@ mod tests {
|
||||
#[test_case(Rule::RuntimeImportInTypeCheckingBlock, Path::new("TCH004_7.py"))]
|
||||
#[test_case(Rule::RuntimeImportInTypeCheckingBlock, Path::new("TCH004_8.py"))]
|
||||
#[test_case(Rule::RuntimeImportInTypeCheckingBlock, Path::new("TCH004_9.py"))]
|
||||
#[test_case(Rule::RuntimeImportInTypeCheckingBlock, Path::new("quote.py"))]
|
||||
#[test_case(Rule::TypingOnlyFirstPartyImport, Path::new("TCH001.py"))]
|
||||
#[test_case(Rule::TypingOnlyStandardLibraryImport, Path::new("TCH003.py"))]
|
||||
#[test_case(Rule::TypingOnlyStandardLibraryImport, Path::new("snapshot.py"))]
|
||||
#[test_case(Rule::TypingOnlyThirdPartyImport, Path::new("TCH002.py"))]
|
||||
#[test_case(Rule::TypingOnlyThirdPartyImport, Path::new("quote.py"))]
|
||||
#[test_case(Rule::TypingOnlyThirdPartyImport, Path::new("singledispatch.py"))]
|
||||
#[test_case(Rule::TypingOnlyThirdPartyImport, Path::new("strict.py"))]
|
||||
#[test_case(Rule::TypingOnlyThirdPartyImport, Path::new("typing_modules_1.py"))]
|
||||
@@ -51,6 +54,24 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_case(Rule::RuntimeImportInTypeCheckingBlock, Path::new("quote.py"))]
|
||||
#[test_case(Rule::TypingOnlyThirdPartyImport, Path::new("quote.py"))]
|
||||
fn quote(rule_code: Rule, path: &Path) -> Result<()> {
|
||||
let snapshot = format!("quote_{}_{}", rule_code.as_ref(), path.to_string_lossy());
|
||||
let diagnostics = test_path(
|
||||
Path::new("flake8_type_checking").join(path).as_path(),
|
||||
&settings::LinterSettings {
|
||||
flake8_type_checking: super::settings::Settings {
|
||||
quote_annotations: true,
|
||||
..Default::default()
|
||||
},
|
||||
..settings::LinterSettings::for_rule(rule_code)
|
||||
},
|
||||
)?;
|
||||
assert_messages!(snapshot, diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_case(Rule::TypingOnlyThirdPartyImport, Path::new("strict.py"))]
|
||||
fn strict(rule_code: Rule, path: &Path) -> Result<()> {
|
||||
let diagnostics = test_path(
|
||||
@@ -109,7 +130,7 @@ mod tests {
|
||||
Path::new("flake8_type_checking").join(path).as_path(),
|
||||
&settings::LinterSettings {
|
||||
flake8_type_checking: super::settings::Settings {
|
||||
runtime_evaluated_base_classes: vec![
|
||||
runtime_required_base_classes: vec![
|
||||
"pydantic.BaseModel".to_string(),
|
||||
"sqlalchemy.orm.DeclarativeBase".to_string(),
|
||||
],
|
||||
@@ -140,7 +161,7 @@ mod tests {
|
||||
Path::new("flake8_type_checking").join(path).as_path(),
|
||||
&settings::LinterSettings {
|
||||
flake8_type_checking: super::settings::Settings {
|
||||
runtime_evaluated_decorators: vec![
|
||||
runtime_required_decorators: vec![
|
||||
"attrs.define".to_string(),
|
||||
"attrs.frozen".to_string(),
|
||||
],
|
||||
@@ -165,7 +186,7 @@ mod tests {
|
||||
Path::new("flake8_type_checking").join(path).as_path(),
|
||||
&settings::LinterSettings {
|
||||
flake8_type_checking: super::settings::Settings {
|
||||
runtime_evaluated_base_classes: vec!["module.direct.MyBaseClass".to_string()],
|
||||
runtime_required_base_classes: vec!["module.direct.MyBaseClass".to_string()],
|
||||
..Default::default()
|
||||
},
|
||||
..settings::LinterSettings::for_rule(rule_code)
|
||||
|
||||
@@ -5,13 +5,15 @@ use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Fix, FixAvailability, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_semantic::{AnyImport, Imported, NodeId, ResolvedReferenceId, Scope};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
use ruff_python_semantic::{Imported, NodeId, Scope};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::codes::Rule;
|
||||
use crate::fix;
|
||||
use crate::importer::ImportedMembers;
|
||||
use crate::rules::flake8_type_checking::helpers::quote_annotation;
|
||||
use crate::rules::flake8_type_checking::imports::ImportBinding;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for runtime imports defined in a type-checking block.
|
||||
@@ -20,6 +22,10 @@ use crate::importer::ImportedMembers;
|
||||
/// The type-checking block is not executed at runtime, so the import will not
|
||||
/// be available at runtime.
|
||||
///
|
||||
/// If [`flake8-type-checking.quote-annotations`] is set to `true`,
|
||||
/// annotations will be wrapped in quotes if doing so would enable the
|
||||
/// corresponding import to remain in the type-checking block.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// from typing import TYPE_CHECKING
|
||||
@@ -41,11 +47,15 @@ use crate::importer::ImportedMembers;
|
||||
/// foo.bar()
|
||||
/// ```
|
||||
///
|
||||
/// ## Options
|
||||
/// - `flake8-type-checking.quote-annotations`
|
||||
///
|
||||
/// ## References
|
||||
/// - [PEP 535](https://peps.python.org/pep-0563/#runtime-annotation-resolution-and-type-checking)
|
||||
#[violation]
|
||||
pub struct RuntimeImportInTypeCheckingBlock {
|
||||
qualified_name: String,
|
||||
strategy: Strategy,
|
||||
}
|
||||
|
||||
impl Violation for RuntimeImportInTypeCheckingBlock {
|
||||
@@ -53,17 +63,39 @@ impl Violation for RuntimeImportInTypeCheckingBlock {
|
||||
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
let RuntimeImportInTypeCheckingBlock { qualified_name } = self;
|
||||
format!(
|
||||
"Move import `{qualified_name}` out of type-checking block. Import is used for more than type hinting."
|
||||
)
|
||||
let Self {
|
||||
qualified_name,
|
||||
strategy,
|
||||
} = self;
|
||||
match strategy {
|
||||
Strategy::MoveImport => format!(
|
||||
"Move import `{qualified_name}` out of type-checking block. Import is used for more than type hinting."
|
||||
),
|
||||
Strategy::QuoteUsages => format!(
|
||||
"Quote references to `{qualified_name}`. Import is in a type-checking block."
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
fn fix_title(&self) -> Option<String> {
|
||||
Some("Move out of type-checking block".to_string())
|
||||
let Self { strategy, .. } = self;
|
||||
match strategy {
|
||||
Strategy::MoveImport => Some("Move out of type-checking block".to_string()),
|
||||
Strategy::QuoteUsages => Some("Quote references".to_string()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
|
||||
enum Action {
|
||||
/// The import should be moved out of the type-checking block.
|
||||
Move,
|
||||
/// All usages of the import should be wrapped in quotes.
|
||||
Quote,
|
||||
/// The import should be ignored.
|
||||
Ignore,
|
||||
}
|
||||
|
||||
/// TCH004
|
||||
pub(crate) fn runtime_import_in_type_checking_block(
|
||||
checker: &Checker,
|
||||
@@ -71,8 +103,7 @@ pub(crate) fn runtime_import_in_type_checking_block(
|
||||
diagnostics: &mut Vec<Diagnostic>,
|
||||
) {
|
||||
// Collect all runtime imports by statement.
|
||||
let mut errors_by_statement: FxHashMap<NodeId, Vec<ImportBinding>> = FxHashMap::default();
|
||||
let mut ignores_by_statement: FxHashMap<NodeId, Vec<ImportBinding>> = FxHashMap::default();
|
||||
let mut actions: FxHashMap<(NodeId, Action), Vec<ImportBinding>> = FxHashMap::default();
|
||||
|
||||
for binding_id in scope.binding_ids() {
|
||||
let binding = checker.semantic().binding(binding_id);
|
||||
@@ -101,6 +132,7 @@ pub(crate) fn runtime_import_in_type_checking_block(
|
||||
let import = ImportBinding {
|
||||
import,
|
||||
reference_id,
|
||||
binding,
|
||||
range: binding.range(),
|
||||
parent_range: binding.parent_range(checker.semantic()),
|
||||
};
|
||||
@@ -113,86 +145,153 @@ pub(crate) fn runtime_import_in_type_checking_block(
|
||||
)
|
||||
})
|
||||
{
|
||||
ignores_by_statement
|
||||
.entry(node_id)
|
||||
actions
|
||||
.entry((node_id, Action::Ignore))
|
||||
.or_default()
|
||||
.push(import);
|
||||
} else {
|
||||
errors_by_statement.entry(node_id).or_default().push(import);
|
||||
// Determine whether the member should be fixed by moving the import out of the
|
||||
// type-checking block, or by quoting its references.
|
||||
if checker.settings.flake8_type_checking.quote_annotations
|
||||
&& binding.references().all(|reference_id| {
|
||||
let reference = checker.semantic().reference(reference_id);
|
||||
reference.context().is_typing()
|
||||
|| reference.in_runtime_evaluated_annotation()
|
||||
})
|
||||
{
|
||||
actions
|
||||
.entry((node_id, Action::Quote))
|
||||
.or_default()
|
||||
.push(import);
|
||||
} else {
|
||||
actions
|
||||
.entry((node_id, Action::Move))
|
||||
.or_default()
|
||||
.push(import);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Generate a diagnostic for every import, but share a fix across all imports within the same
|
||||
// statement (excluding those that are ignored).
|
||||
for (node_id, imports) in errors_by_statement {
|
||||
let fix = fix_imports(checker, node_id, &imports).ok();
|
||||
for ((node_id, action), imports) in actions {
|
||||
match action {
|
||||
// Generate a diagnostic for every import, but share a fix across all imports within the same
|
||||
// statement (excluding those that are ignored).
|
||||
Action::Move => {
|
||||
let fix = move_imports(checker, node_id, &imports).ok();
|
||||
|
||||
for ImportBinding {
|
||||
import,
|
||||
range,
|
||||
parent_range,
|
||||
..
|
||||
} in imports
|
||||
{
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
RuntimeImportInTypeCheckingBlock {
|
||||
qualified_name: import.qualified_name(),
|
||||
},
|
||||
range,
|
||||
);
|
||||
if let Some(range) = parent_range {
|
||||
diagnostic.set_parent(range.start());
|
||||
for ImportBinding {
|
||||
import,
|
||||
range,
|
||||
parent_range,
|
||||
..
|
||||
} in imports
|
||||
{
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
RuntimeImportInTypeCheckingBlock {
|
||||
qualified_name: import.qualified_name(),
|
||||
strategy: Strategy::MoveImport,
|
||||
},
|
||||
range,
|
||||
);
|
||||
if let Some(range) = parent_range {
|
||||
diagnostic.set_parent(range.start());
|
||||
}
|
||||
if let Some(fix) = fix.as_ref() {
|
||||
diagnostic.set_fix(fix.clone());
|
||||
}
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
if let Some(fix) = fix.as_ref() {
|
||||
diagnostic.set_fix(fix.clone());
|
||||
}
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
|
||||
// Separately, generate a diagnostic for every _ignored_ import, to ensure that the
|
||||
// suppression comments aren't marked as unused.
|
||||
for ImportBinding {
|
||||
import,
|
||||
range,
|
||||
parent_range,
|
||||
..
|
||||
} in ignores_by_statement.into_values().flatten()
|
||||
{
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
RuntimeImportInTypeCheckingBlock {
|
||||
qualified_name: import.qualified_name(),
|
||||
},
|
||||
range,
|
||||
);
|
||||
if let Some(range) = parent_range {
|
||||
diagnostic.set_parent(range.start());
|
||||
// Generate a diagnostic for every import, but share a fix across all imports within the same
|
||||
// statement (excluding those that are ignored).
|
||||
Action::Quote => {
|
||||
let fix = quote_imports(checker, node_id, &imports).ok();
|
||||
|
||||
for ImportBinding {
|
||||
import,
|
||||
range,
|
||||
parent_range,
|
||||
..
|
||||
} in imports
|
||||
{
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
RuntimeImportInTypeCheckingBlock {
|
||||
qualified_name: import.qualified_name(),
|
||||
strategy: Strategy::QuoteUsages,
|
||||
},
|
||||
range,
|
||||
);
|
||||
if let Some(range) = parent_range {
|
||||
diagnostic.set_parent(range.start());
|
||||
}
|
||||
if let Some(fix) = fix.as_ref() {
|
||||
diagnostic.set_fix(fix.clone());
|
||||
}
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
|
||||
// Separately, generate a diagnostic for every _ignored_ import, to ensure that the
|
||||
// suppression comments aren't marked as unused.
|
||||
Action::Ignore => {
|
||||
for ImportBinding {
|
||||
import,
|
||||
range,
|
||||
parent_range,
|
||||
..
|
||||
} in imports
|
||||
{
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
RuntimeImportInTypeCheckingBlock {
|
||||
qualified_name: import.qualified_name(),
|
||||
strategy: Strategy::MoveImport,
|
||||
},
|
||||
range,
|
||||
);
|
||||
if let Some(range) = parent_range {
|
||||
diagnostic.set_parent(range.start());
|
||||
}
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
}
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
|
||||
/// A runtime-required import with its surrounding context.
|
||||
struct ImportBinding<'a> {
|
||||
/// The qualified name of the import (e.g., `typing.List` for `from typing import List`).
|
||||
import: AnyImport<'a>,
|
||||
/// The first reference to the imported symbol.
|
||||
reference_id: ResolvedReferenceId,
|
||||
/// The trimmed range of the import (e.g., `List` in `from typing import List`).
|
||||
range: TextRange,
|
||||
/// The range of the import's parent statement.
|
||||
parent_range: Option<TextRange>,
|
||||
}
|
||||
|
||||
impl Ranged for ImportBinding<'_> {
|
||||
fn range(&self) -> TextRange {
|
||||
self.range
|
||||
}
|
||||
/// Generate a [`Fix`] to quote runtime usages for imports in a type-checking block.
|
||||
fn quote_imports(checker: &Checker, node_id: NodeId, imports: &[ImportBinding]) -> Result<Fix> {
|
||||
let mut quote_reference_edits = imports
|
||||
.iter()
|
||||
.flat_map(|ImportBinding { binding, .. }| {
|
||||
binding.references.iter().filter_map(|reference_id| {
|
||||
let reference = checker.semantic().reference(*reference_id);
|
||||
if reference.context().is_runtime() {
|
||||
Some(quote_annotation(
|
||||
reference.expression_id()?,
|
||||
checker.semantic(),
|
||||
checker.locator(),
|
||||
checker.stylist(),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
})
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
let quote_reference_edit = quote_reference_edits
|
||||
.pop()
|
||||
.expect("Expected at least one reference");
|
||||
Ok(
|
||||
Fix::unsafe_edits(quote_reference_edit, quote_reference_edits).isolate(Checker::isolation(
|
||||
checker.semantic().parent_statement_id(node_id),
|
||||
)),
|
||||
)
|
||||
}
|
||||
|
||||
/// Generate a [`Fix`] to remove runtime imports from a type-checking block.
|
||||
fn fix_imports(checker: &Checker, node_id: NodeId, imports: &[ImportBinding]) -> Result<Fix> {
|
||||
fn move_imports(checker: &Checker, node_id: NodeId, imports: &[ImportBinding]) -> Result<Fix> {
|
||||
let statement = checker.semantic().statement(node_id);
|
||||
let parent = checker.semantic().parent_statement(node_id);
|
||||
|
||||
@@ -236,3 +335,18 @@ fn fix_imports(checker: &Checker, node_id: NodeId, imports: &[ImportBinding]) ->
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
enum Strategy {
|
||||
/// The import should be moved out of the type-checking block.
|
||||
///
|
||||
/// This is required when at least one reference to the symbol is in a runtime-required context.
|
||||
/// For example, given `from foo import Bar`, `x = Bar()` would be runtime-required.
|
||||
MoveImport,
|
||||
/// All usages of the import should be wrapped in quotes.
|
||||
///
|
||||
/// This is acceptable when all references to the symbol are in a runtime-evaluated, but not
|
||||
/// runtime-required context. For example, given `from foo import Bar`, `x: Bar` would be
|
||||
/// runtime-evaluated, but not runtime-required.
|
||||
QuoteUsages,
|
||||
}
|
||||
|
||||
@@ -5,13 +5,15 @@ use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, DiagnosticKind, Fix, FixAvailability, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_semantic::{AnyImport, Binding, Imported, NodeId, ResolvedReferenceId, Scope};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
use ruff_python_semantic::{Binding, Imported, NodeId, Scope};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::codes::Rule;
|
||||
use crate::fix;
|
||||
use crate::importer::ImportedMembers;
|
||||
use crate::rules::flake8_type_checking::helpers::{is_typing_reference, quote_annotation};
|
||||
use crate::rules::flake8_type_checking::imports::ImportBinding;
|
||||
use crate::rules::isort::{categorize, ImportSection, ImportType};
|
||||
|
||||
/// ## What it does
|
||||
@@ -24,6 +26,10 @@ use crate::rules::isort::{categorize, ImportSection, ImportType};
|
||||
/// instead be imported conditionally under an `if TYPE_CHECKING:` block to
|
||||
/// minimize runtime overhead.
|
||||
///
|
||||
/// If [`flake8-type-checking.quote-annotations`] is set to `true`,
|
||||
/// annotations will be wrapped in quotes if doing so would enable the
|
||||
/// corresponding import to be moved into an `if TYPE_CHECKING:` block.
|
||||
///
|
||||
/// If a class _requires_ that type annotations be available at runtime (as is
|
||||
/// the case for Pydantic, SQLAlchemy, and other libraries), consider using
|
||||
/// the [`flake8-type-checking.runtime-evaluated-base-classes`] and
|
||||
@@ -56,6 +62,7 @@ use crate::rules::isort::{categorize, ImportSection, ImportType};
|
||||
/// ```
|
||||
///
|
||||
/// ## Options
|
||||
/// - `flake8-type-checking.quote-annotations`
|
||||
/// - `flake8-type-checking.runtime-evaluated-base-classes`
|
||||
/// - `flake8-type-checking.runtime-evaluated-decorators`
|
||||
///
|
||||
@@ -92,6 +99,10 @@ impl Violation for TypingOnlyFirstPartyImport {
|
||||
/// instead be imported conditionally under an `if TYPE_CHECKING:` block to
|
||||
/// minimize runtime overhead.
|
||||
///
|
||||
/// If [`flake8-type-checking.quote-annotations`] is set to `true`,
|
||||
/// annotations will be wrapped in quotes if doing so would enable the
|
||||
/// corresponding import to be moved into an `if TYPE_CHECKING:` block.
|
||||
///
|
||||
/// If a class _requires_ that type annotations be available at runtime (as is
|
||||
/// the case for Pydantic, SQLAlchemy, and other libraries), consider using
|
||||
/// the [`flake8-type-checking.runtime-evaluated-base-classes`] and
|
||||
@@ -124,6 +135,7 @@ impl Violation for TypingOnlyFirstPartyImport {
|
||||
/// ```
|
||||
///
|
||||
/// ## Options
|
||||
/// - `flake8-type-checking.quote-annotations`
|
||||
/// - `flake8-type-checking.runtime-evaluated-base-classes`
|
||||
/// - `flake8-type-checking.runtime-evaluated-decorators`
|
||||
///
|
||||
@@ -160,6 +172,10 @@ impl Violation for TypingOnlyThirdPartyImport {
|
||||
/// instead be imported conditionally under an `if TYPE_CHECKING:` block to
|
||||
/// minimize runtime overhead.
|
||||
///
|
||||
/// If [`flake8-type-checking.quote-annotations`] is set to `true`,
|
||||
/// annotations will be wrapped in quotes if doing so would enable the
|
||||
/// corresponding import to be moved into an `if TYPE_CHECKING:` block.
|
||||
///
|
||||
/// If a class _requires_ that type annotations be available at runtime (as is
|
||||
/// the case for Pydantic, SQLAlchemy, and other libraries), consider using
|
||||
/// the [`flake8-type-checking.runtime-evaluated-base-classes`] and
|
||||
@@ -192,6 +208,7 @@ impl Violation for TypingOnlyThirdPartyImport {
|
||||
/// ```
|
||||
///
|
||||
/// ## Options
|
||||
/// - `flake8-type-checking.quote-annotations`
|
||||
/// - `flake8-type-checking.runtime-evaluated-base-classes`
|
||||
/// - `flake8-type-checking.runtime-evaluated-decorators`
|
||||
///
|
||||
@@ -253,13 +270,12 @@ pub(crate) fn typing_only_runtime_import(
|
||||
};
|
||||
|
||||
if binding.context.is_runtime()
|
||||
&& binding.references().all(|reference_id| {
|
||||
checker
|
||||
.semantic()
|
||||
.reference(reference_id)
|
||||
.context()
|
||||
.is_typing()
|
||||
})
|
||||
&& binding
|
||||
.references()
|
||||
.map(|reference_id| checker.semantic().reference(reference_id))
|
||||
.all(|reference| {
|
||||
is_typing_reference(reference, &checker.settings.flake8_type_checking)
|
||||
})
|
||||
{
|
||||
let qualified_name = import.qualified_name();
|
||||
|
||||
@@ -310,6 +326,7 @@ pub(crate) fn typing_only_runtime_import(
|
||||
let import = ImportBinding {
|
||||
import,
|
||||
reference_id,
|
||||
binding,
|
||||
range: binding.range(),
|
||||
parent_range: binding.parent_range(checker.semantic()),
|
||||
};
|
||||
@@ -376,24 +393,6 @@ pub(crate) fn typing_only_runtime_import(
|
||||
}
|
||||
}
|
||||
|
||||
/// A runtime-required import with its surrounding context.
|
||||
struct ImportBinding<'a> {
|
||||
/// The qualified name of the import (e.g., `typing.List` for `from typing import List`).
|
||||
import: AnyImport<'a>,
|
||||
/// The first reference to the imported symbol.
|
||||
reference_id: ResolvedReferenceId,
|
||||
/// The trimmed range of the import (e.g., `List` in `from typing import List`).
|
||||
range: TextRange,
|
||||
/// The range of the import's parent statement.
|
||||
parent_range: Option<TextRange>,
|
||||
}
|
||||
|
||||
impl Ranged for ImportBinding<'_> {
|
||||
fn range(&self) -> TextRange {
|
||||
self.range
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the [`Rule`] for the given import type.
|
||||
fn rule_for(import_type: ImportType) -> Rule {
|
||||
match import_type {
|
||||
@@ -482,9 +481,34 @@ fn fix_imports(checker: &Checker, node_id: NodeId, imports: &[ImportBinding]) ->
|
||||
checker.source_type,
|
||||
)?;
|
||||
|
||||
Ok(
|
||||
Fix::unsafe_edits(remove_import_edit, add_import_edit.into_edits()).isolate(
|
||||
Checker::isolation(checker.semantic().parent_statement_id(node_id)),
|
||||
),
|
||||
// Step 3) Quote any runtime usages of the referenced symbol.
|
||||
let quote_reference_edits = imports
|
||||
.iter()
|
||||
.flat_map(|ImportBinding { binding, .. }| {
|
||||
binding.references.iter().filter_map(|reference_id| {
|
||||
let reference = checker.semantic().reference(*reference_id);
|
||||
if reference.context().is_runtime() {
|
||||
Some(quote_annotation(
|
||||
reference.expression_id()?,
|
||||
checker.semantic(),
|
||||
checker.locator(),
|
||||
checker.stylist(),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
})
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
|
||||
Ok(Fix::unsafe_edits(
|
||||
remove_import_edit,
|
||||
add_import_edit
|
||||
.into_edits()
|
||||
.into_iter()
|
||||
.chain(quote_reference_edits),
|
||||
)
|
||||
.isolate(Checker::isolation(
|
||||
checker.semantic().parent_statement_id(node_id),
|
||||
)))
|
||||
}
|
||||
|
||||
@@ -6,17 +6,19 @@ use ruff_macros::CacheKey;
|
||||
pub struct Settings {
|
||||
pub strict: bool,
|
||||
pub exempt_modules: Vec<String>,
|
||||
pub runtime_evaluated_base_classes: Vec<String>,
|
||||
pub runtime_evaluated_decorators: Vec<String>,
|
||||
pub runtime_required_base_classes: Vec<String>,
|
||||
pub runtime_required_decorators: Vec<String>,
|
||||
pub quote_annotations: bool,
|
||||
}
|
||||
|
||||
impl Default for Settings {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
strict: false,
|
||||
exempt_modules: vec!["typing".to_string()],
|
||||
runtime_evaluated_base_classes: vec![],
|
||||
runtime_evaluated_decorators: vec![],
|
||||
exempt_modules: vec!["typing".to_string(), "typing_extensions".to_string()],
|
||||
runtime_required_base_classes: vec![],
|
||||
runtime_required_decorators: vec![],
|
||||
quote_annotations: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,22 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_type_checking/mod.rs
|
||||
---
|
||||
quote.py:64:28: TCH004 [*] Quote references to `pandas.DataFrame`. Import is in a type-checking block.
|
||||
|
|
||||
63 | if TYPE_CHECKING:
|
||||
64 | from pandas import DataFrame
|
||||
| ^^^^^^^^^ TCH004
|
||||
65 |
|
||||
66 | def func(value: DataFrame):
|
||||
|
|
||||
= help: Quote references
|
||||
|
||||
ℹ Unsafe fix
|
||||
63 63 | if TYPE_CHECKING:
|
||||
64 64 | from pandas import DataFrame
|
||||
65 65 |
|
||||
66 |- def func(value: DataFrame):
|
||||
66 |+ def func(value: "DataFrame"):
|
||||
67 67 | ...
|
||||
|
||||
|
||||
@@ -0,0 +1,199 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_type_checking/mod.rs
|
||||
---
|
||||
quote.py:2:24: TCH002 [*] Move third-party import `pandas.DataFrame` into a type-checking block
|
||||
|
|
||||
1 | def f():
|
||||
2 | from pandas import DataFrame
|
||||
| ^^^^^^^^^ TCH002
|
||||
3 |
|
||||
4 | def baz() -> DataFrame:
|
||||
|
|
||||
= help: Move into type-checking block
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 |-def f():
|
||||
1 |+from typing import TYPE_CHECKING
|
||||
2 |+
|
||||
3 |+if TYPE_CHECKING:
|
||||
2 4 | from pandas import DataFrame
|
||||
5 |+def f():
|
||||
3 6 |
|
||||
4 |- def baz() -> DataFrame:
|
||||
7 |+ def baz() -> "DataFrame":
|
||||
5 8 | ...
|
||||
6 9 |
|
||||
7 10 |
|
||||
|
||||
quote.py:9:24: TCH002 [*] Move third-party import `pandas.DataFrame` into a type-checking block
|
||||
|
|
||||
8 | def f():
|
||||
9 | from pandas import DataFrame
|
||||
| ^^^^^^^^^ TCH002
|
||||
10 |
|
||||
11 | def baz() -> DataFrame[int]:
|
||||
|
|
||||
= help: Move into type-checking block
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 |+from typing import TYPE_CHECKING
|
||||
2 |+
|
||||
3 |+if TYPE_CHECKING:
|
||||
4 |+ from pandas import DataFrame
|
||||
1 5 | def f():
|
||||
2 6 | from pandas import DataFrame
|
||||
3 7 |
|
||||
--------------------------------------------------------------------------------
|
||||
6 10 |
|
||||
7 11 |
|
||||
8 12 | def f():
|
||||
9 |- from pandas import DataFrame
|
||||
10 13 |
|
||||
11 |- def baz() -> DataFrame[int]:
|
||||
14 |+ def baz() -> "DataFrame[int]":
|
||||
12 15 | ...
|
||||
13 16 |
|
||||
14 17 |
|
||||
|
||||
quote.py:16:24: TCH002 Move third-party import `pandas.DataFrame` into a type-checking block
|
||||
|
|
||||
15 | def f():
|
||||
16 | from pandas import DataFrame
|
||||
| ^^^^^^^^^ TCH002
|
||||
17 |
|
||||
18 | def baz() -> DataFrame["int"]:
|
||||
|
|
||||
= help: Move into type-checking block
|
||||
|
||||
quote.py:23:22: TCH002 [*] Move third-party import `pandas` into a type-checking block
|
||||
|
|
||||
22 | def f():
|
||||
23 | import pandas as pd
|
||||
| ^^ TCH002
|
||||
24 |
|
||||
25 | def baz() -> pd.DataFrame:
|
||||
|
|
||||
= help: Move into type-checking block
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 |+from typing import TYPE_CHECKING
|
||||
2 |+
|
||||
3 |+if TYPE_CHECKING:
|
||||
4 |+ import pandas as pd
|
||||
1 5 | def f():
|
||||
2 6 | from pandas import DataFrame
|
||||
3 7 |
|
||||
--------------------------------------------------------------------------------
|
||||
20 24 |
|
||||
21 25 |
|
||||
22 26 | def f():
|
||||
23 |- import pandas as pd
|
||||
24 27 |
|
||||
25 |- def baz() -> pd.DataFrame:
|
||||
28 |+ def baz() -> "pd.DataFrame":
|
||||
26 29 | ...
|
||||
27 30 |
|
||||
28 31 |
|
||||
|
||||
quote.py:30:22: TCH002 [*] Move third-party import `pandas` into a type-checking block
|
||||
|
|
||||
29 | def f():
|
||||
30 | import pandas as pd
|
||||
| ^^ TCH002
|
||||
31 |
|
||||
32 | def baz() -> pd.DataFrame.Extra:
|
||||
|
|
||||
= help: Move into type-checking block
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 |+from typing import TYPE_CHECKING
|
||||
2 |+
|
||||
3 |+if TYPE_CHECKING:
|
||||
4 |+ import pandas as pd
|
||||
1 5 | def f():
|
||||
2 6 | from pandas import DataFrame
|
||||
3 7 |
|
||||
--------------------------------------------------------------------------------
|
||||
27 31 |
|
||||
28 32 |
|
||||
29 33 | def f():
|
||||
30 |- import pandas as pd
|
||||
31 34 |
|
||||
32 |- def baz() -> pd.DataFrame.Extra:
|
||||
35 |+ def baz() -> "pd.DataFrame.Extra":
|
||||
33 36 | ...
|
||||
34 37 |
|
||||
35 38 |
|
||||
|
||||
quote.py:37:22: TCH002 [*] Move third-party import `pandas` into a type-checking block
|
||||
|
|
||||
36 | def f():
|
||||
37 | import pandas as pd
|
||||
| ^^ TCH002
|
||||
38 |
|
||||
39 | def baz() -> pd.DataFrame | int:
|
||||
|
|
||||
= help: Move into type-checking block
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 |+from typing import TYPE_CHECKING
|
||||
2 |+
|
||||
3 |+if TYPE_CHECKING:
|
||||
4 |+ import pandas as pd
|
||||
1 5 | def f():
|
||||
2 6 | from pandas import DataFrame
|
||||
3 7 |
|
||||
--------------------------------------------------------------------------------
|
||||
34 38 |
|
||||
35 39 |
|
||||
36 40 | def f():
|
||||
37 |- import pandas as pd
|
||||
38 41 |
|
||||
39 |- def baz() -> pd.DataFrame | int:
|
||||
42 |+ def baz() -> "pd.DataFrame | int":
|
||||
40 43 | ...
|
||||
41 44 |
|
||||
42 45 |
|
||||
|
||||
quote.py:45:24: TCH002 [*] Move third-party import `pandas.DataFrame` into a type-checking block
|
||||
|
|
||||
44 | def f():
|
||||
45 | from pandas import DataFrame
|
||||
| ^^^^^^^^^ TCH002
|
||||
46 |
|
||||
47 | def baz() -> DataFrame():
|
||||
|
|
||||
= help: Move into type-checking block
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 |+from typing import TYPE_CHECKING
|
||||
2 |+
|
||||
3 |+if TYPE_CHECKING:
|
||||
4 |+ from pandas import DataFrame
|
||||
1 5 | def f():
|
||||
2 6 | from pandas import DataFrame
|
||||
3 7 |
|
||||
--------------------------------------------------------------------------------
|
||||
42 46 |
|
||||
43 47 |
|
||||
44 48 | def f():
|
||||
45 |- from pandas import DataFrame
|
||||
46 49 |
|
||||
47 |- def baz() -> DataFrame():
|
||||
50 |+ def baz() -> "DataFrame()":
|
||||
48 51 | ...
|
||||
49 52 |
|
||||
50 53 |
|
||||
|
||||
quote.py:54:24: TCH002 Move third-party import `pandas.DataFrame` into a type-checking block
|
||||
|
|
||||
52 | from typing import Literal
|
||||
53 |
|
||||
54 | from pandas import DataFrame
|
||||
| ^^^^^^^^^ TCH002
|
||||
55 |
|
||||
56 | def baz() -> DataFrame[Literal["int"]]:
|
||||
|
|
||||
= help: Move into type-checking block
|
||||
|
||||
|
||||
@@ -0,0 +1,29 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_type_checking/mod.rs
|
||||
---
|
||||
quote.py:64:28: TCH004 [*] Move import `pandas.DataFrame` out of type-checking block. Import is used for more than type hinting.
|
||||
|
|
||||
63 | if TYPE_CHECKING:
|
||||
64 | from pandas import DataFrame
|
||||
| ^^^^^^^^^ TCH004
|
||||
65 |
|
||||
66 | def func(value: DataFrame):
|
||||
|
|
||||
= help: Move out of type-checking block
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 |+from pandas import DataFrame
|
||||
1 2 | def f():
|
||||
2 3 | from pandas import DataFrame
|
||||
3 4 |
|
||||
--------------------------------------------------------------------------------
|
||||
61 62 | from typing import TYPE_CHECKING
|
||||
62 63 |
|
||||
63 64 | if TYPE_CHECKING:
|
||||
64 |- from pandas import DataFrame
|
||||
65 |+ pass
|
||||
65 66 |
|
||||
66 67 | def func(value: DataFrame):
|
||||
67 68 | ...
|
||||
|
||||
|
||||
@@ -0,0 +1,4 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_type_checking/mod.rs
|
||||
---
|
||||
|
||||
@@ -1,25 +1,23 @@
|
||||
use ruff_python_ast::{self as ast, Arguments, ConversionFlag, Expr};
|
||||
use ruff_text_size::TextRange;
|
||||
|
||||
/// Wrap an expression in a `FormattedValue` with no special formatting.
|
||||
fn to_formatted_value_expr(inner: &Expr) -> Expr {
|
||||
let node = ast::ExprFormattedValue {
|
||||
value: Box::new(inner.clone()),
|
||||
/// Wrap an expression in a [`ast::FStringElement::Expression`] with no special formatting.
|
||||
fn to_f_string_expression_element(inner: &Expr) -> ast::FStringElement {
|
||||
ast::FStringElement::Expression(ast::FStringExpressionElement {
|
||||
expression: Box::new(inner.clone()),
|
||||
debug_text: None,
|
||||
conversion: ConversionFlag::None,
|
||||
format_spec: None,
|
||||
range: TextRange::default(),
|
||||
};
|
||||
node.into()
|
||||
})
|
||||
}
|
||||
|
||||
/// Convert a string to a constant string expression.
|
||||
pub(super) fn to_constant_string(s: &str) -> Expr {
|
||||
let node = ast::StringLiteral {
|
||||
value: s.to_string(),
|
||||
..ast::StringLiteral::default()
|
||||
};
|
||||
node.into()
|
||||
/// Convert a string to a [`ast::FStringElement::Literal`].
|
||||
pub(super) fn to_f_string_literal_element(s: &str) -> ast::FStringElement {
|
||||
ast::FStringElement::Literal(ast::FStringLiteralElement {
|
||||
value: s.to_owned(),
|
||||
range: TextRange::default(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Figure out if `expr` represents a "simple" call
|
||||
@@ -51,15 +49,19 @@ fn is_simple_callee(func: &Expr) -> bool {
|
||||
}
|
||||
|
||||
/// Convert an expression to a f-string element (if it looks like a good idea).
|
||||
pub(super) fn to_f_string_element(expr: &Expr) -> Option<Expr> {
|
||||
pub(super) fn to_f_string_element(expr: &Expr) -> Option<ast::FStringElement> {
|
||||
match expr {
|
||||
// These are directly handled by `unparse_f_string_element`:
|
||||
Expr::StringLiteral(_) | Expr::FString(_) | Expr::FormattedValue(_) => Some(expr.clone()),
|
||||
Expr::StringLiteral(ast::ExprStringLiteral { value, range }) => {
|
||||
Some(ast::FStringElement::Literal(ast::FStringLiteralElement {
|
||||
value: value.to_string(),
|
||||
range: *range,
|
||||
}))
|
||||
}
|
||||
// These should be pretty safe to wrap in a formatted value.
|
||||
Expr::NumberLiteral(_) | Expr::BooleanLiteral(_) | Expr::Name(_) | Expr::Attribute(_) => {
|
||||
Some(to_formatted_value_expr(expr))
|
||||
Some(to_f_string_expression_element(expr))
|
||||
}
|
||||
Expr::Call(_) if is_simple_call(expr) => Some(to_formatted_value_expr(expr)),
|
||||
Expr::Call(_) if is_simple_call(expr) => Some(to_f_string_expression_element(expr)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -78,7 +78,7 @@ fn build_fstring(joiner: &str, joinees: &[Expr]) -> Option<Expr> {
|
||||
return Some(node.into());
|
||||
}
|
||||
|
||||
let mut fstring_elems = Vec::with_capacity(joinees.len() * 2);
|
||||
let mut f_string_elements = Vec::with_capacity(joinees.len() * 2);
|
||||
let mut first = true;
|
||||
|
||||
for expr in joinees {
|
||||
@@ -88,13 +88,13 @@ fn build_fstring(joiner: &str, joinees: &[Expr]) -> Option<Expr> {
|
||||
return None;
|
||||
}
|
||||
if !std::mem::take(&mut first) {
|
||||
fstring_elems.push(helpers::to_constant_string(joiner));
|
||||
f_string_elements.push(helpers::to_f_string_literal_element(joiner));
|
||||
}
|
||||
fstring_elems.push(helpers::to_f_string_element(expr)?);
|
||||
f_string_elements.push(helpers::to_f_string_element(expr)?);
|
||||
}
|
||||
|
||||
let node = ast::FString {
|
||||
values: fstring_elems,
|
||||
elements: f_string_elements,
|
||||
range: TextRange::default(),
|
||||
};
|
||||
Some(node.into())
|
||||
@@ -127,7 +127,7 @@ pub(crate) fn static_join_to_fstring(checker: &mut Checker, expr: &Expr, joiner:
|
||||
};
|
||||
|
||||
// Try to build the fstring (internally checks whether e.g. the elements are
|
||||
// convertible to f-string parts).
|
||||
// convertible to f-string elements).
|
||||
let Some(new_expr) = build_fstring(joiner, joinees) else {
|
||||
return;
|
||||
};
|
||||
|
||||
@@ -180,7 +180,7 @@ fn format_import_block(
|
||||
continue;
|
||||
};
|
||||
|
||||
let imports = order_imports(import_block, settings);
|
||||
let imports = order_imports(import_block, import_section, settings);
|
||||
|
||||
// Add a blank line between every section.
|
||||
if is_first_block {
|
||||
@@ -200,6 +200,7 @@ fn format_import_block(
|
||||
// Add a blank lines between direct and from imports.
|
||||
if settings.from_first
|
||||
&& lines_between_types > 0
|
||||
&& !settings.force_sort_within_sections
|
||||
&& line_insertion == Some(LineInsertion::Necessary)
|
||||
{
|
||||
for _ in 0..lines_between_types {
|
||||
@@ -225,6 +226,7 @@ fn format_import_block(
|
||||
// Add a blank lines between direct and from imports.
|
||||
if !settings.from_first
|
||||
&& lines_between_types > 0
|
||||
&& !settings.force_sort_within_sections
|
||||
&& line_insertion == Some(LineInsertion::Necessary)
|
||||
{
|
||||
for _ in 0..lines_between_types {
|
||||
@@ -291,6 +293,7 @@ mod tests {
|
||||
#[test_case(Path::new("force_sort_within_sections.py"))]
|
||||
#[test_case(Path::new("force_to_top.py"))]
|
||||
#[test_case(Path::new("force_wrap_aliases.py"))]
|
||||
#[test_case(Path::new("future_from.py"))]
|
||||
#[test_case(Path::new("if_elif_else.py"))]
|
||||
#[test_case(Path::new("import_from_after_import.py"))]
|
||||
#[test_case(Path::new("inline_comments.py"))]
|
||||
@@ -701,6 +704,7 @@ mod tests {
|
||||
|
||||
#[test_case(Path::new("force_sort_within_sections.py"))]
|
||||
#[test_case(Path::new("force_sort_within_sections_with_as_names.py"))]
|
||||
#[test_case(Path::new("force_sort_within_sections_future.py"))]
|
||||
fn force_sort_within_sections(path: &Path) -> Result<()> {
|
||||
let snapshot = format!("force_sort_within_sections_{}", path.to_string_lossy());
|
||||
let mut diagnostics = test_path(
|
||||
@@ -720,6 +724,26 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_case(Path::new("force_sort_within_sections_lines_between.py"))]
|
||||
fn force_sort_within_sections_lines_between(path: &Path) -> Result<()> {
|
||||
let snapshot = format!("force_sort_within_sections_{}", path.to_string_lossy());
|
||||
let mut diagnostics = test_path(
|
||||
Path::new("isort").join(path).as_path(),
|
||||
&LinterSettings {
|
||||
isort: super::settings::Settings {
|
||||
force_sort_within_sections: true,
|
||||
lines_between_types: 2,
|
||||
..super::settings::Settings::default()
|
||||
},
|
||||
src: vec![test_resource_path("fixtures/isort")],
|
||||
..LinterSettings::for_rule(Rule::UnsortedImports)
|
||||
},
|
||||
)?;
|
||||
diagnostics.sort_by_key(Ranged::start);
|
||||
assert_messages!(snapshot, diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_case(Path::new("comment.py"))]
|
||||
#[test_case(Path::new("comments_and_newlines.py"))]
|
||||
#[test_case(Path::new("docstring.py"))]
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use crate::rules::isort::sorting::ImportStyle;
|
||||
use crate::rules::isort::{ImportSection, ImportType};
|
||||
use itertools::Itertools;
|
||||
|
||||
use super::settings::Settings;
|
||||
@@ -8,6 +9,7 @@ use super::types::{AliasData, CommentSet, ImportBlock, ImportFromStatement};
|
||||
|
||||
pub(crate) fn order_imports<'a>(
|
||||
block: ImportBlock<'a>,
|
||||
section: &ImportSection,
|
||||
settings: &Settings,
|
||||
) -> Vec<EitherImport<'a>> {
|
||||
let straight_imports = block.import.into_iter();
|
||||
@@ -52,7 +54,35 @@ pub(crate) fn order_imports<'a>(
|
||||
},
|
||||
);
|
||||
|
||||
let ordered_imports = if settings.force_sort_within_sections {
|
||||
let ordered_imports = if matches!(section, ImportSection::Known(ImportType::Future)) {
|
||||
from_imports
|
||||
.sorted_by_cached_key(|(import_from, _, _, aliases)| {
|
||||
ModuleKey::from_module(
|
||||
import_from.module,
|
||||
None,
|
||||
import_from.level,
|
||||
aliases.first().map(|(alias, _)| (alias.name, alias.asname)),
|
||||
ImportStyle::From,
|
||||
settings,
|
||||
)
|
||||
})
|
||||
.map(ImportFrom)
|
||||
.chain(
|
||||
straight_imports
|
||||
.sorted_by_cached_key(|(alias, _)| {
|
||||
ModuleKey::from_module(
|
||||
Some(alias.name),
|
||||
alias.asname,
|
||||
None,
|
||||
None,
|
||||
ImportStyle::Straight,
|
||||
settings,
|
||||
)
|
||||
})
|
||||
.map(Import),
|
||||
)
|
||||
.collect()
|
||||
} else if settings.force_sort_within_sections {
|
||||
straight_imports
|
||||
.map(Import)
|
||||
.chain(from_imports.map(ImportFrom))
|
||||
|
||||
@@ -0,0 +1,16 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/isort/mod.rs
|
||||
---
|
||||
force_sort_within_sections_future.py:1:1: I001 [*] Import block is un-sorted or un-formatted
|
||||
|
|
||||
1 | / import __future__
|
||||
2 | | from __future__ import annotations
|
||||
|
|
||||
= help: Organize imports
|
||||
|
||||
ℹ Safe fix
|
||||
1 |+from __future__ import annotations
|
||||
1 2 | import __future__
|
||||
2 |-from __future__ import annotations
|
||||
|
||||
|
||||
@@ -0,0 +1,4 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/isort/mod.rs
|
||||
---
|
||||
|
||||
@@ -0,0 +1,16 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/isort/mod.rs
|
||||
---
|
||||
future_from.py:1:1: I001 [*] Import block is un-sorted or un-formatted
|
||||
|
|
||||
1 | / import __future__
|
||||
2 | | from __future__ import annotations
|
||||
|
|
||||
= help: Organize imports
|
||||
|
||||
ℹ Safe fix
|
||||
1 |+from __future__ import annotations
|
||||
1 2 | import __future__
|
||||
2 |-from __future__ import annotations
|
||||
|
||||
|
||||
@@ -112,7 +112,11 @@ pub(super) fn is_django_model_import(name: &str, stmt: &Stmt, semantic: &Semanti
|
||||
arguments.find_argument("model_name", arguments.args.len().saturating_sub(1))
|
||||
{
|
||||
if let Some(string_literal) = argument.as_string_literal_expr() {
|
||||
return string_literal.value.to_str() == name;
|
||||
if string_literal.value.to_str() == name {
|
||||
return true;
|
||||
}
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -127,7 +131,9 @@ pub(super) fn is_django_model_import(name: &str, stmt: &Stmt, semantic: &Semanti
|
||||
if let Some(argument) = arguments.find_argument("dotted_path", 0) {
|
||||
if let Some(string_literal) = argument.as_string_literal_expr() {
|
||||
if let Some((.., model)) = string_literal.value.to_str().rsplit_once('.') {
|
||||
return model == name;
|
||||
if model == name {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -52,6 +52,15 @@ N806.py:57:5: N806 Variable `Bad` in function should be lowercase
|
||||
56 | Bad = apps.get_model() # N806
|
||||
57 | Bad = apps.get_model(model_name="Stream") # N806
|
||||
| ^^^ N806
|
||||
58 |
|
||||
59 | Address: Type = apps.get_model("zerver", variable) # OK
|
||||
|
|
||||
|
||||
N806.py:60:5: N806 Variable `ValidationError` in function should be lowercase
|
||||
|
|
||||
59 | Address: Type = apps.get_model("zerver", variable) # OK
|
||||
60 | ValidationError = import_string(variable) # N806
|
||||
| ^^^^^^^^^^^^^^^ N806
|
||||
|
|
||||
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::Stmt;
|
||||
use ruff_python_ast::{self as ast, Arguments, Expr};
|
||||
use ruff_python_ast::{self as ast, Arguments, Expr, Stmt};
|
||||
use ruff_python_semantic::analyze::typing::find_assigned_value;
|
||||
use ruff_text_size::TextRange;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
@@ -110,30 +110,13 @@ pub(crate) fn unnecessary_list_cast(checker: &mut Checker, iter: &Expr, body: &[
|
||||
if body.iter().any(|stmt| match_append(stmt, id)) {
|
||||
return;
|
||||
}
|
||||
let scope = checker.semantic().current_scope();
|
||||
if let Some(binding_id) = scope.get(id) {
|
||||
let binding = checker.semantic().binding(binding_id);
|
||||
if binding.kind.is_assignment() || binding.kind.is_named_expr_assignment() {
|
||||
if let Some(parent_id) = binding.source {
|
||||
let parent = checker.semantic().statement(parent_id);
|
||||
if let Stmt::Assign(ast::StmtAssign { value, .. })
|
||||
| Stmt::AnnAssign(ast::StmtAnnAssign {
|
||||
value: Some(value), ..
|
||||
})
|
||||
| Stmt::AugAssign(ast::StmtAugAssign { value, .. }) = parent
|
||||
{
|
||||
if matches!(
|
||||
value.as_ref(),
|
||||
Expr::Tuple(_) | Expr::List(_) | Expr::Set(_)
|
||||
) {
|
||||
let mut diagnostic =
|
||||
Diagnostic::new(UnnecessaryListCast, *list_range);
|
||||
diagnostic.set_fix(remove_cast(*list_range, *iterable_range));
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
let Some(value) = find_assigned_value(id, checker.semantic()) else {
|
||||
return;
|
||||
};
|
||||
if matches!(value, Expr::Tuple(_) | Expr::List(_) | Expr::Set(_)) {
|
||||
let mut diagnostic = Diagnostic::new(UnnecessaryListCast, *list_range);
|
||||
diagnostic.set_fix(remove_cast(*list_range, *iterable_range));
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
|
||||
@@ -201,4 +201,22 @@ PERF101.py:57:10: PERF101 [*] Do not cast an iterable to `list` before iterating
|
||||
59 59 | other_list.append(i + 1)
|
||||
60 60 |
|
||||
|
||||
PERF101.py:69:10: PERF101 [*] Do not cast an iterable to `list` before iterating over it
|
||||
|
|
||||
67 | x, y, nested_tuple = (1, 2, (3, 4, 5))
|
||||
68 |
|
||||
69 | for i in list(nested_tuple): # PERF101
|
||||
| ^^^^^^^^^^^^^^^^^^ PERF101
|
||||
70 | pass
|
||||
|
|
||||
= help: Remove `list()` cast
|
||||
|
||||
ℹ Safe fix
|
||||
66 66 |
|
||||
67 67 | x, y, nested_tuple = (1, 2, (3, 4, 5))
|
||||
68 68 |
|
||||
69 |-for i in list(nested_tuple): # PERF101
|
||||
69 |+for i in nested_tuple: # PERF101
|
||||
70 70 | pass
|
||||
|
||||
|
||||
|
||||
@@ -65,6 +65,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test_case(Rule::IsLiteral, Path::new("constant_literals.py"))]
|
||||
#[test_case(Rule::ModuleImportNotAtTopOfFile, Path::new("E402.py"))]
|
||||
#[test_case(Rule::TypeComparison, Path::new("E721.py"))]
|
||||
fn preview_rules(rule_code: Rule, path: &Path) -> Result<()> {
|
||||
let snapshot = format!(
|
||||
|
||||
@@ -41,6 +41,10 @@ impl Violation for MultipleImportsOnOneLine {
|
||||
/// According to [PEP 8], "imports are always put at the top of the file, just after any
|
||||
/// module comments and docstrings, and before module globals and constants."
|
||||
///
|
||||
/// In [preview], this rule makes an exception for `sys.path` modifications,
|
||||
/// allowing for `sys.path.insert`, `sys.path.append`, and similar
|
||||
/// modifications between import statements.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// "One string"
|
||||
|
||||
@@ -381,20 +381,16 @@ impl Whitespace {
|
||||
}
|
||||
}
|
||||
|
||||
if has_tabs {
|
||||
if len == content.text_len() {
|
||||
// All whitespace up to the start of the line -> Indent
|
||||
(Self::None, TextSize::default())
|
||||
} else if has_tabs {
|
||||
(Self::Tab, len)
|
||||
} else {
|
||||
match count {
|
||||
0 => (Self::None, TextSize::default()),
|
||||
1 => (Self::Single, len),
|
||||
_ => {
|
||||
if len == content.text_len() {
|
||||
// All whitespace up to the start of the line -> Indent
|
||||
(Self::None, TextSize::default())
|
||||
} else {
|
||||
(Self::Many, len)
|
||||
}
|
||||
}
|
||||
_ => (Self::Many, len),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix, Violation};
|
||||
use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_parser::TokenKind;
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
@@ -34,11 +34,15 @@ use crate::rules::pycodestyle::rules::logical_lines::{LogicalLine, LogicalLineTo
|
||||
#[violation]
|
||||
pub struct UnexpectedSpacesAroundKeywordParameterEquals;
|
||||
|
||||
impl Violation for UnexpectedSpacesAroundKeywordParameterEquals {
|
||||
impl AlwaysFixableViolation for UnexpectedSpacesAroundKeywordParameterEquals {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("Unexpected spaces around keyword / parameter equals")
|
||||
}
|
||||
|
||||
fn fix_title(&self) -> String {
|
||||
format!("Remove whitespace")
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
@@ -165,22 +169,31 @@ pub(crate) fn whitespace_around_named_parameter_equals(
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// If there's space between the preceding token and the equals sign, report it.
|
||||
if token.start() != prev_end {
|
||||
context.push(
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
UnexpectedSpacesAroundKeywordParameterEquals,
|
||||
TextRange::new(prev_end, token.start()),
|
||||
);
|
||||
diagnostic.set_fix(Fix::safe_edit(Edit::deletion(prev_end, token.start())));
|
||||
context.push_diagnostic(diagnostic);
|
||||
}
|
||||
|
||||
// If there's space between the equals sign and the following token, report it.
|
||||
while let Some(next) = iter.peek() {
|
||||
if next.kind() == TokenKind::NonLogicalNewline {
|
||||
iter.next();
|
||||
} else {
|
||||
if next.start() != token.end() {
|
||||
context.push(
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
UnexpectedSpacesAroundKeywordParameterEquals,
|
||||
TextRange::new(token.end(), next.start()),
|
||||
);
|
||||
diagnostic.set_fix(Fix::safe_edit(Edit::deletion(
|
||||
token.end(),
|
||||
next.start(),
|
||||
)));
|
||||
context.push_diagnostic(diagnostic);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix, Violation};
|
||||
use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_parser::TokenKind;
|
||||
use ruff_python_trivia::PythonWhitespace;
|
||||
@@ -66,11 +66,15 @@ impl AlwaysFixableViolation for TooFewSpacesBeforeInlineComment {
|
||||
#[violation]
|
||||
pub struct NoSpaceAfterInlineComment;
|
||||
|
||||
impl Violation for NoSpaceAfterInlineComment {
|
||||
impl AlwaysFixableViolation for NoSpaceAfterInlineComment {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("Inline comment should start with `# `")
|
||||
}
|
||||
|
||||
fn fix_title(&self) -> String {
|
||||
format!("Format space")
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
@@ -98,11 +102,15 @@ impl Violation for NoSpaceAfterInlineComment {
|
||||
#[violation]
|
||||
pub struct NoSpaceAfterBlockComment;
|
||||
|
||||
impl Violation for NoSpaceAfterBlockComment {
|
||||
impl AlwaysFixableViolation for NoSpaceAfterBlockComment {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("Block comment should start with `# `")
|
||||
}
|
||||
|
||||
fn fix_title(&self) -> String {
|
||||
format!("Format space")
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
@@ -130,11 +138,15 @@ impl Violation for NoSpaceAfterBlockComment {
|
||||
#[violation]
|
||||
pub struct MultipleLeadingHashesForBlockComment;
|
||||
|
||||
impl Violation for MultipleLeadingHashesForBlockComment {
|
||||
impl AlwaysFixableViolation for MultipleLeadingHashesForBlockComment {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("Too many leading `#` before block comment")
|
||||
}
|
||||
|
||||
fn fix_title(&self) -> String {
|
||||
format!("Remove leading `#`")
|
||||
}
|
||||
}
|
||||
|
||||
/// E261, E262, E265, E266
|
||||
@@ -184,14 +196,30 @@ pub(crate) fn whitespace_before_comment(
|
||||
|
||||
if is_inline_comment {
|
||||
if bad_prefix.is_some() || comment.chars().next().is_some_and(char::is_whitespace) {
|
||||
context.push(NoSpaceAfterInlineComment, range);
|
||||
let mut diagnostic = Diagnostic::new(NoSpaceAfterInlineComment, range);
|
||||
diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement(
|
||||
format_leading_space(token_text),
|
||||
range,
|
||||
)));
|
||||
context.push_diagnostic(diagnostic);
|
||||
}
|
||||
} else if let Some(bad_prefix) = bad_prefix {
|
||||
if bad_prefix != '!' || !line.is_start_of_file() {
|
||||
if bad_prefix != '#' {
|
||||
context.push(NoSpaceAfterBlockComment, range);
|
||||
let mut diagnostic = Diagnostic::new(NoSpaceAfterBlockComment, range);
|
||||
diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement(
|
||||
format_leading_space(token_text),
|
||||
range,
|
||||
)));
|
||||
context.push_diagnostic(diagnostic);
|
||||
} else if !comment.is_empty() {
|
||||
context.push(MultipleLeadingHashesForBlockComment, range);
|
||||
let mut diagnostic =
|
||||
Diagnostic::new(MultipleLeadingHashesForBlockComment, range);
|
||||
diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement(
|
||||
format_leading_hashes(token_text),
|
||||
range,
|
||||
)));
|
||||
context.push_diagnostic(diagnostic);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -200,3 +228,17 @@ pub(crate) fn whitespace_before_comment(
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Format a comment to have a single space after the `#`.
|
||||
fn format_leading_space(comment: &str) -> String {
|
||||
if let Some(rest) = comment.strip_prefix("#:") {
|
||||
format!("#: {}", rest.trim_start())
|
||||
} else {
|
||||
format!("# {}", comment.trim_start_matches('#').trim_start())
|
||||
}
|
||||
}
|
||||
|
||||
/// Format a comment to strip multiple leading `#` characters.
|
||||
fn format_leading_hashes(comment: &str) -> String {
|
||||
format!("# {}", comment.trim_start_matches('#').trim_start())
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
|
||||
---
|
||||
E25.py:2:12: E251 Unexpected spaces around keyword / parameter equals
|
||||
E25.py:2:12: E251 [*] Unexpected spaces around keyword / parameter equals
|
||||
|
|
||||
1 | #: E251 E251
|
||||
2 | def foo(bar = False):
|
||||
@@ -9,8 +9,17 @@ E25.py:2:12: E251 Unexpected spaces around keyword / parameter equals
|
||||
3 | '''Test function with an error in declaration'''
|
||||
4 | pass
|
||||
|
|
||||
= help: Remove whitespace
|
||||
|
||||
E25.py:2:14: E251 Unexpected spaces around keyword / parameter equals
|
||||
ℹ Safe fix
|
||||
1 1 | #: E251 E251
|
||||
2 |-def foo(bar = False):
|
||||
2 |+def foo(bar= False):
|
||||
3 3 | '''Test function with an error in declaration'''
|
||||
4 4 | pass
|
||||
5 5 | #: E251
|
||||
|
||||
E25.py:2:14: E251 [*] Unexpected spaces around keyword / parameter equals
|
||||
|
|
||||
1 | #: E251 E251
|
||||
2 | def foo(bar = False):
|
||||
@@ -18,8 +27,17 @@ E25.py:2:14: E251 Unexpected spaces around keyword / parameter equals
|
||||
3 | '''Test function with an error in declaration'''
|
||||
4 | pass
|
||||
|
|
||||
= help: Remove whitespace
|
||||
|
||||
E25.py:6:9: E251 Unexpected spaces around keyword / parameter equals
|
||||
ℹ Safe fix
|
||||
1 1 | #: E251 E251
|
||||
2 |-def foo(bar = False):
|
||||
2 |+def foo(bar =False):
|
||||
3 3 | '''Test function with an error in declaration'''
|
||||
4 4 | pass
|
||||
5 5 | #: E251
|
||||
|
||||
E25.py:6:9: E251 [*] Unexpected spaces around keyword / parameter equals
|
||||
|
|
||||
4 | pass
|
||||
5 | #: E251
|
||||
@@ -28,8 +46,19 @@ E25.py:6:9: E251 Unexpected spaces around keyword / parameter equals
|
||||
7 | #: E251
|
||||
8 | foo(bar =True)
|
||||
|
|
||||
= help: Remove whitespace
|
||||
|
||||
E25.py:8:8: E251 Unexpected spaces around keyword / parameter equals
|
||||
ℹ Safe fix
|
||||
3 3 | '''Test function with an error in declaration'''
|
||||
4 4 | pass
|
||||
5 5 | #: E251
|
||||
6 |-foo(bar= True)
|
||||
6 |+foo(bar=True)
|
||||
7 7 | #: E251
|
||||
8 8 | foo(bar =True)
|
||||
9 9 | #: E251 E251
|
||||
|
||||
E25.py:8:8: E251 [*] Unexpected spaces around keyword / parameter equals
|
||||
|
|
||||
6 | foo(bar= True)
|
||||
7 | #: E251
|
||||
@@ -38,8 +67,19 @@ E25.py:8:8: E251 Unexpected spaces around keyword / parameter equals
|
||||
9 | #: E251 E251
|
||||
10 | foo(bar = True)
|
||||
|
|
||||
= help: Remove whitespace
|
||||
|
||||
E25.py:10:8: E251 Unexpected spaces around keyword / parameter equals
|
||||
ℹ Safe fix
|
||||
5 5 | #: E251
|
||||
6 6 | foo(bar= True)
|
||||
7 7 | #: E251
|
||||
8 |-foo(bar =True)
|
||||
8 |+foo(bar=True)
|
||||
9 9 | #: E251 E251
|
||||
10 10 | foo(bar = True)
|
||||
11 11 | #: E251
|
||||
|
||||
E25.py:10:8: E251 [*] Unexpected spaces around keyword / parameter equals
|
||||
|
|
||||
8 | foo(bar =True)
|
||||
9 | #: E251 E251
|
||||
@@ -48,8 +88,19 @@ E25.py:10:8: E251 Unexpected spaces around keyword / parameter equals
|
||||
11 | #: E251
|
||||
12 | y = bar(root= "sdasd")
|
||||
|
|
||||
= help: Remove whitespace
|
||||
|
||||
E25.py:10:10: E251 Unexpected spaces around keyword / parameter equals
|
||||
ℹ Safe fix
|
||||
7 7 | #: E251
|
||||
8 8 | foo(bar =True)
|
||||
9 9 | #: E251 E251
|
||||
10 |-foo(bar = True)
|
||||
10 |+foo(bar= True)
|
||||
11 11 | #: E251
|
||||
12 12 | y = bar(root= "sdasd")
|
||||
13 13 | #: E251:2:29
|
||||
|
||||
E25.py:10:10: E251 [*] Unexpected spaces around keyword / parameter equals
|
||||
|
|
||||
8 | foo(bar =True)
|
||||
9 | #: E251 E251
|
||||
@@ -58,8 +109,19 @@ E25.py:10:10: E251 Unexpected spaces around keyword / parameter equals
|
||||
11 | #: E251
|
||||
12 | y = bar(root= "sdasd")
|
||||
|
|
||||
= help: Remove whitespace
|
||||
|
||||
E25.py:12:14: E251 Unexpected spaces around keyword / parameter equals
|
||||
ℹ Safe fix
|
||||
7 7 | #: E251
|
||||
8 8 | foo(bar =True)
|
||||
9 9 | #: E251 E251
|
||||
10 |-foo(bar = True)
|
||||
10 |+foo(bar =True)
|
||||
11 11 | #: E251
|
||||
12 12 | y = bar(root= "sdasd")
|
||||
13 13 | #: E251:2:29
|
||||
|
||||
E25.py:12:14: E251 [*] Unexpected spaces around keyword / parameter equals
|
||||
|
|
||||
10 | foo(bar = True)
|
||||
11 | #: E251
|
||||
@@ -68,8 +130,19 @@ E25.py:12:14: E251 Unexpected spaces around keyword / parameter equals
|
||||
13 | #: E251:2:29
|
||||
14 | parser.add_argument('--long-option',
|
||||
|
|
||||
= help: Remove whitespace
|
||||
|
||||
E25.py:15:29: E251 Unexpected spaces around keyword / parameter equals
|
||||
ℹ Safe fix
|
||||
9 9 | #: E251 E251
|
||||
10 10 | foo(bar = True)
|
||||
11 11 | #: E251
|
||||
12 |-y = bar(root= "sdasd")
|
||||
12 |+y = bar(root="sdasd")
|
||||
13 13 | #: E251:2:29
|
||||
14 14 | parser.add_argument('--long-option',
|
||||
15 15 | default=
|
||||
|
||||
E25.py:15:29: E251 [*] Unexpected spaces around keyword / parameter equals
|
||||
|
|
||||
13 | #: E251:2:29
|
||||
14 | parser.add_argument('--long-option',
|
||||
@@ -80,8 +153,20 @@ E25.py:15:29: E251 Unexpected spaces around keyword / parameter equals
|
||||
17 | #: E251:1:45
|
||||
18 | parser.add_argument('--long-option', default
|
||||
|
|
||||
= help: Remove whitespace
|
||||
|
||||
E25.py:18:45: E251 Unexpected spaces around keyword / parameter equals
|
||||
ℹ Safe fix
|
||||
12 12 | y = bar(root= "sdasd")
|
||||
13 13 | #: E251:2:29
|
||||
14 14 | parser.add_argument('--long-option',
|
||||
15 |- default=
|
||||
16 |- "/rather/long/filesystem/path/here/blah/blah/blah")
|
||||
15 |+ default="/rather/long/filesystem/path/here/blah/blah/blah")
|
||||
17 16 | #: E251:1:45
|
||||
18 17 | parser.add_argument('--long-option', default
|
||||
19 18 | ="/rather/long/filesystem/path/here/blah/blah/blah")
|
||||
|
||||
E25.py:18:45: E251 [*] Unexpected spaces around keyword / parameter equals
|
||||
|
|
||||
16 | "/rather/long/filesystem/path/here/blah/blah/blah")
|
||||
17 | #: E251:1:45
|
||||
@@ -92,8 +177,20 @@ E25.py:18:45: E251 Unexpected spaces around keyword / parameter equals
|
||||
20 | #: E251:3:8 E251:3:10
|
||||
21 | foo(True,
|
||||
|
|
||||
= help: Remove whitespace
|
||||
|
||||
E25.py:23:8: E251 Unexpected spaces around keyword / parameter equals
|
||||
ℹ Safe fix
|
||||
15 15 | default=
|
||||
16 16 | "/rather/long/filesystem/path/here/blah/blah/blah")
|
||||
17 17 | #: E251:1:45
|
||||
18 |-parser.add_argument('--long-option', default
|
||||
19 |- ="/rather/long/filesystem/path/here/blah/blah/blah")
|
||||
18 |+parser.add_argument('--long-option', default="/rather/long/filesystem/path/here/blah/blah/blah")
|
||||
20 19 | #: E251:3:8 E251:3:10
|
||||
21 20 | foo(True,
|
||||
22 21 | baz=(1, 2),
|
||||
|
||||
E25.py:23:8: E251 [*] Unexpected spaces around keyword / parameter equals
|
||||
|
|
||||
21 | foo(True,
|
||||
22 | baz=(1, 2),
|
||||
@@ -102,8 +199,19 @@ E25.py:23:8: E251 Unexpected spaces around keyword / parameter equals
|
||||
24 | )
|
||||
25 | #: Okay
|
||||
|
|
||||
= help: Remove whitespace
|
||||
|
||||
E25.py:23:10: E251 Unexpected spaces around keyword / parameter equals
|
||||
ℹ Safe fix
|
||||
20 20 | #: E251:3:8 E251:3:10
|
||||
21 21 | foo(True,
|
||||
22 22 | baz=(1, 2),
|
||||
23 |- biz = 'foo'
|
||||
23 |+ biz= 'foo'
|
||||
24 24 | )
|
||||
25 25 | #: Okay
|
||||
26 26 | foo(bar=(1 == 1))
|
||||
|
||||
E25.py:23:10: E251 [*] Unexpected spaces around keyword / parameter equals
|
||||
|
|
||||
21 | foo(True,
|
||||
22 | baz=(1, 2),
|
||||
@@ -112,5 +220,16 @@ E25.py:23:10: E251 Unexpected spaces around keyword / parameter equals
|
||||
24 | )
|
||||
25 | #: Okay
|
||||
|
|
||||
= help: Remove whitespace
|
||||
|
||||
ℹ Safe fix
|
||||
20 20 | #: E251:3:8 E251:3:10
|
||||
21 21 | foo(True,
|
||||
22 22 | baz=(1, 2),
|
||||
23 |- biz = 'foo'
|
||||
23 |+ biz ='foo'
|
||||
24 24 | )
|
||||
25 25 | #: Okay
|
||||
26 26 | foo(bar=(1 == 1))
|
||||
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user