Compare commits
378 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
da9ae6a42a | ||
|
|
afa59d78bb | ||
|
|
bbc38fea73 | ||
|
|
6bcc11a90f | ||
|
|
6f36e5dd25 | ||
|
|
1d13752eb1 | ||
|
|
394af0dcff | ||
|
|
51cee471a0 | ||
|
|
8df3a5437a | ||
|
|
a21fe716f2 | ||
|
|
558883299a | ||
|
|
048a13c795 | ||
|
|
5a8b7c1d20 | ||
|
|
f8932ec12b | ||
|
|
2e7878ff48 | ||
|
|
5113ded22a | ||
|
|
bf7bf7aa17 | ||
|
|
560c00ff9d | ||
|
|
befe64a10e | ||
|
|
4eccfdeb69 | ||
|
|
4123ba9851 | ||
|
|
e727c24f79 | ||
|
|
bd3b40688f | ||
|
|
b5549382a7 | ||
|
|
8cf745045f | ||
|
|
f6992cc98c | ||
|
|
3cc74c0564 | ||
|
|
887b9aa840 | ||
|
|
faf8556a5c | ||
|
|
1888f6d41b | ||
|
|
9d8cd2d2fe | ||
|
|
05c19f0091 | ||
|
|
8213b64ad5 | ||
|
|
ff0f5968fa | ||
|
|
6c17670aa5 | ||
|
|
1347b7ebb6 | ||
|
|
f40609f524 | ||
|
|
f572acab30 | ||
|
|
1eb5f3ea75 | ||
|
|
24aa177912 | ||
|
|
2ddc768412 | ||
|
|
0f9508f549 | ||
|
|
7c3d387abd | ||
|
|
d600650214 | ||
|
|
43383bb696 | ||
|
|
ff83f356af | ||
|
|
89622425d4 | ||
|
|
1aafe31c00 | ||
|
|
16c5ac1e91 | ||
|
|
da39cddccb | ||
|
|
de6435f41d | ||
|
|
589ae48f8c | ||
|
|
35cc3094b5 | ||
|
|
07c9fc55f6 | ||
|
|
7773e9728b | ||
|
|
8fc435bad9 | ||
|
|
dd20b23576 | ||
|
|
da8ee1df3e | ||
|
|
7f77ed0f86 | ||
|
|
1b33cfb9cb | ||
|
|
dbc64f1faa | ||
|
|
f4b5f0d259 | ||
|
|
85b882fc54 | ||
|
|
99d9aa61bf | ||
|
|
050f34dd25 | ||
|
|
1cd82d588b | ||
|
|
cea9e34942 | ||
|
|
1ede377402 | ||
|
|
82eff641fb | ||
|
|
eb1bc9f092 | ||
|
|
df88504dea | ||
|
|
b9ec3e9137 | ||
|
|
b067b665ff | ||
|
|
22cfd03b13 | ||
|
|
3ae408a364 | ||
|
|
95073b1586 | ||
|
|
3597a94818 | ||
|
|
2727e5f3b7 | ||
|
|
de53b567f5 | ||
|
|
21479a151d | ||
|
|
d822458d97 | ||
|
|
6741ea9790 | ||
|
|
2e1799dd80 | ||
|
|
c48c7669a8 | ||
|
|
d5fbcd708d | ||
|
|
b335a6a5ec | ||
|
|
f5cbee452a | ||
|
|
5d1ea4410a | ||
|
|
f27163d2c7 | ||
|
|
62c126f70e | ||
|
|
8344d5151c | ||
|
|
f7780eb720 | ||
|
|
c544d84b46 | ||
|
|
fbdc075e5c | ||
|
|
d3472104d0 | ||
|
|
0c8b981216 | ||
|
|
0e3f08aa68 | ||
|
|
a75b1c85ee | ||
|
|
726e6c68cf | ||
|
|
34c91224d7 | ||
|
|
50c4bbc52b | ||
|
|
f0239de559 | ||
|
|
2be632f3cc | ||
|
|
ec3fed5a61 | ||
|
|
1da44485d5 | ||
|
|
e5f30ff5a8 | ||
|
|
c92f5c14a3 | ||
|
|
2f92399f4e | ||
|
|
83dfb5fe8b | ||
|
|
cc915371ca | ||
|
|
5576db3d5a | ||
|
|
f26f38d023 | ||
|
|
578ec4d843 | ||
|
|
2f3bebe5a2 | ||
|
|
22991e3e0e | ||
|
|
242bdf86b1 | ||
|
|
a3f7de2257 | ||
|
|
9f9cbb5520 | ||
|
|
937c83d57f | ||
|
|
e00bcd19f5 | ||
|
|
4550581be2 | ||
|
|
b42d77a4c6 | ||
|
|
e473df1fe9 | ||
|
|
d448281b33 | ||
|
|
94597fefc1 | ||
|
|
add0bdeeb7 | ||
|
|
6a180b95d1 | ||
|
|
416c338237 | ||
|
|
9948be0145 | ||
|
|
f50ff61056 | ||
|
|
017fec2bc5 | ||
|
|
f9def0a139 | ||
|
|
d4fc485a76 | ||
|
|
0827d0beef | ||
|
|
b4a46ab6f0 | ||
|
|
f6e14edc3e | ||
|
|
878a94f9cb | ||
|
|
79ca66ace5 | ||
|
|
c68c6b5424 | ||
|
|
bad5723d80 | ||
|
|
2d83f99dbf | ||
|
|
5123b38758 | ||
|
|
e9a4c8ba13 | ||
|
|
927d716edd | ||
|
|
df6a48fced | ||
|
|
91a8277ac0 | ||
|
|
5797884262 | ||
|
|
5aa8455258 | ||
|
|
8fd713739b | ||
|
|
5de1fcd653 | ||
|
|
032f4f3f12 | ||
|
|
621db96e7f | ||
|
|
05867ef260 | ||
|
|
0cd8b75f06 | ||
|
|
5f07e1d6b5 | ||
|
|
1ce4585c88 | ||
|
|
f3f010cdf5 | ||
|
|
7e5e03fb15 | ||
|
|
062c41b6f5 | ||
|
|
78889efa37 | ||
|
|
97fc281779 | ||
|
|
138b06c98a | ||
|
|
2415d73260 | ||
|
|
b060ae2f22 | ||
|
|
9aa91d3d3c | ||
|
|
dcedb801e5 | ||
|
|
d3e7fdabb5 | ||
|
|
dfa5a4f0f7 | ||
|
|
58a2d600da | ||
|
|
7ecbfe4f6a | ||
|
|
d8248104a7 | ||
|
|
6eb09122c0 | ||
|
|
f84c1f1fa1 | ||
|
|
3aa9528229 | ||
|
|
5a3f06bab1 | ||
|
|
db59d5b558 | ||
|
|
2fcbf3ab62 | ||
|
|
fa9b10be72 | ||
|
|
c495cef529 | ||
|
|
c0c8dff6ce | ||
|
|
80b00cc89f | ||
|
|
934db3d179 | ||
|
|
6a040a0405 | ||
|
|
2821ef0f69 | ||
|
|
343d931ddb | ||
|
|
3fc257f71b | ||
|
|
6dbb0a17e9 | ||
|
|
ae5ad6a4ac | ||
|
|
549af6c584 | ||
|
|
9a799eb4e6 | ||
|
|
f260b873b6 | ||
|
|
782a90b584 | ||
|
|
7df903dc4d | ||
|
|
8fc5e91ec7 | ||
|
|
9ca1a2c273 | ||
|
|
86265c1d7c | ||
|
|
a057c9a323 | ||
|
|
2e63bb6dcb | ||
|
|
1b5db80b32 | ||
|
|
3f20cea402 | ||
|
|
389fe1ff64 | ||
|
|
bad2d7ba85 | ||
|
|
416aa298ac | ||
|
|
a535b1adbf | ||
|
|
05fbd1a283 | ||
|
|
63552cbc8e | ||
|
|
c00bd489f1 | ||
|
|
16c2e3a995 | ||
|
|
650b025181 | ||
|
|
8fe46f7400 | ||
|
|
a9bcc15797 | ||
|
|
b6c856bd07 | ||
|
|
4beea0484a | ||
|
|
2679db1d10 | ||
|
|
3e73462e04 | ||
|
|
f63a87737a | ||
|
|
e7472eac1c | ||
|
|
db3c847771 | ||
|
|
4adbfc24a5 | ||
|
|
8f734a6562 | ||
|
|
bcf7519eb3 | ||
|
|
66089052ee | ||
|
|
d50cc8ff65 | ||
|
|
b75ea94f58 | ||
|
|
2c24e2fd28 | ||
|
|
f8dc208665 | ||
|
|
c72b8e8d1e | ||
|
|
b108c693fa | ||
|
|
aac1912ea7 | ||
|
|
3dcd26aac3 | ||
|
|
e53b9807f6 | ||
|
|
36fe8b76d4 | ||
|
|
f832f88c75 | ||
|
|
659a28de02 | ||
|
|
583149a472 | ||
|
|
206e6463be | ||
|
|
118a9feec8 | ||
|
|
1f2ccb059a | ||
|
|
f4d9d6c858 | ||
|
|
3477f5664a | ||
|
|
edefa5219c | ||
|
|
cf0d198365 | ||
|
|
08b14ed77e | ||
|
|
6ee3075867 | ||
|
|
cc8a945cbf | ||
|
|
1a1922b3fc | ||
|
|
48bd766298 | ||
|
|
1ece3873cd | ||
|
|
472d902486 | ||
|
|
4ac6a18d40 | ||
|
|
8a47ea91ba | ||
|
|
bd4394aa89 | ||
|
|
56f69ce71e | ||
|
|
248a6cd50b | ||
|
|
d9e659d817 | ||
|
|
77e5564f4b | ||
|
|
e79766d5ec | ||
|
|
c55fd76743 | ||
|
|
e2aedc5ba8 | ||
|
|
1b2d085460 | ||
|
|
cb138526b1 | ||
|
|
8eac270d8f | ||
|
|
6e19fd20bb | ||
|
|
10868445f5 | ||
|
|
e3ecf21287 | ||
|
|
fd849e112e | ||
|
|
af27471c77 | ||
|
|
bb466bc8d3 | ||
|
|
7741a713e2 | ||
|
|
3ab1cfc6f8 | ||
|
|
e73f13473d | ||
|
|
3e8ef5b40f | ||
|
|
c59610906c | ||
|
|
2353a52be8 | ||
|
|
bbffdd57ff | ||
|
|
3c15c578a7 | ||
|
|
6a8e31b2ff | ||
|
|
6407fd5a33 | ||
|
|
b64040cbb2 | ||
|
|
952a0eb4e3 | ||
|
|
3e28d6de04 | ||
|
|
9bbfd1d3b2 | ||
|
|
6fb82ab763 | ||
|
|
6b286e9bc1 | ||
|
|
bcddd9e97f | ||
|
|
3e789136af | ||
|
|
07ef3b8754 | ||
|
|
46e1b16472 | ||
|
|
720bfe0161 | ||
|
|
2f69be0d41 | ||
|
|
54cb2eb15b | ||
|
|
167992ad48 | ||
|
|
f0dab24079 | ||
|
|
77055faab6 | ||
|
|
e08e1caf71 | ||
|
|
0072dfd81e | ||
|
|
6ffe02ee05 | ||
|
|
688fc0cd02 | ||
|
|
f30e5e45ab | ||
|
|
1a68a38306 | ||
|
|
590aa92ead | ||
|
|
8868f57a74 | ||
|
|
71802f8861 | ||
|
|
5b6fb8cefa | ||
|
|
2ff964107c | ||
|
|
141132d5be | ||
|
|
8ba872ece4 | ||
|
|
209dce2033 | ||
|
|
90d88dfb10 | ||
|
|
4730911b25 | ||
|
|
4e9fb9907a | ||
|
|
b8dce8922d | ||
|
|
30877127bc | ||
|
|
8b66bbdc9b | ||
|
|
71d3a84b14 | ||
|
|
323a5c857c | ||
|
|
42cec3f5a0 | ||
|
|
ee42413e10 | ||
|
|
765db12b84 | ||
|
|
e1b711d9c6 | ||
|
|
35f593846e | ||
|
|
c384fa513b | ||
|
|
022ff64d29 | ||
|
|
5a06fb28fd | ||
|
|
46750a3e17 | ||
|
|
9cc902b802 | ||
|
|
c2a36ebd1e | ||
|
|
34ca225393 | ||
|
|
38c30905e6 | ||
|
|
2774194b03 | ||
|
|
71ebd39f35 | ||
|
|
a2f78ba2c7 | ||
|
|
b51a080a44 | ||
|
|
6a1d7d8a1c | ||
|
|
10b250ee57 | ||
|
|
30b1b1e15a | ||
|
|
aafe7c0c39 | ||
|
|
f060248656 | ||
|
|
bbe0220c72 | ||
|
|
129e2b6ad3 | ||
|
|
73e744b1d0 | ||
|
|
50a3fc5a67 | ||
|
|
de499f0258 | ||
|
|
e1abe37c6a | ||
|
|
7fe5945541 | ||
|
|
806f3fd4f6 | ||
|
|
2bba643dd2 | ||
|
|
54090bd7ac | ||
|
|
c62727db42 | ||
|
|
d0e1612507 | ||
|
|
5ccd907398 | ||
|
|
346610c2e3 | ||
|
|
307fa26515 | ||
|
|
136d412edd | ||
|
|
d9edec0ac9 | ||
|
|
473675fffb | ||
|
|
95dfc61315 | ||
|
|
dd496c7b52 | ||
|
|
78aafb4b34 | ||
|
|
99c66d513a | ||
|
|
04ade6a2f3 | ||
|
|
4cf2682cda | ||
|
|
e3a7357187 | ||
|
|
b60768debb | ||
|
|
25e476639f | ||
|
|
4645788205 | ||
|
|
0b9eda8836 | ||
|
|
ad23d6acee | ||
|
|
e3d1d01a1f | ||
|
|
6dfdd21a7c | ||
|
|
f17d3b3c44 | ||
|
|
da6b913317 | ||
|
|
bd34850f98 | ||
|
|
d5b33cdb40 | ||
|
|
3fb4cf7009 | ||
|
|
6e19539e28 | ||
|
|
4eac7a07f5 | ||
|
|
5141285c8e |
2
.cargo/config.toml
Normal file
2
.cargo/config.toml
Normal file
@@ -0,0 +1,2 @@
|
||||
[alias]
|
||||
dev = "run --package ruff_dev --bin ruff_dev"
|
||||
28
.github/workflows/ci.yaml
vendored
28
.github/workflows/ci.yaml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
profile: minimal
|
||||
- uses: actions/cache@v3
|
||||
env:
|
||||
cache-name: cache-cargo
|
||||
@@ -27,7 +27,7 @@ jobs:
|
||||
${{ runner.os }}-build-${{ env.cache-name }}-
|
||||
${{ runner.os }}-build-
|
||||
${{ runner.os }}-
|
||||
- run: cargo build --release
|
||||
- run: cargo build --all --release
|
||||
|
||||
cargo_fmt:
|
||||
name: "cargo fmt"
|
||||
@@ -36,7 +36,10 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
profile: minimal
|
||||
toolchain: nightly-2022-11-01
|
||||
override: true
|
||||
components: rustfmt
|
||||
- uses: actions/cache@v3
|
||||
env:
|
||||
cache-name: cache-cargo
|
||||
@@ -49,7 +52,7 @@ jobs:
|
||||
${{ runner.os }}-build-${{ env.cache-name }}-
|
||||
${{ runner.os }}-build-
|
||||
${{ runner.os }}-
|
||||
- run: cargo fmt --check
|
||||
- run: cargo fmt --all --check
|
||||
|
||||
cargo_clippy:
|
||||
name: "cargo clippy"
|
||||
@@ -58,7 +61,10 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
profile: minimal
|
||||
toolchain: nightly-2022-11-01
|
||||
override: true
|
||||
components: clippy
|
||||
- uses: actions/cache@v3
|
||||
env:
|
||||
cache-name: cache-cargo
|
||||
@@ -71,7 +77,7 @@ jobs:
|
||||
${{ runner.os }}-build-${{ env.cache-name }}-
|
||||
${{ runner.os }}-build-
|
||||
${{ runner.os }}-
|
||||
- run: cargo clippy -- -D warnings
|
||||
- run: cargo clippy --all -- -D warnings
|
||||
|
||||
cargo_test:
|
||||
name: "cargo test"
|
||||
@@ -80,7 +86,9 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
profile: minimal
|
||||
toolchain: nightly-2022-11-01
|
||||
override: true
|
||||
- uses: actions/cache@v3
|
||||
env:
|
||||
cache-name: cache-cargo
|
||||
@@ -93,7 +101,7 @@ jobs:
|
||||
${{ runner.os }}-build-${{ env.cache-name }}-
|
||||
${{ runner.os }}-build-
|
||||
${{ runner.os }}-
|
||||
- run: cargo test
|
||||
- run: cargo test --all
|
||||
|
||||
maturin_build:
|
||||
name: "maturin build"
|
||||
@@ -102,7 +110,9 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
profile: minimal
|
||||
toolchain: nightly-2022-11-01
|
||||
override: true
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
298
.github/workflows/flake8-to-ruff.yaml
vendored
Normal file
298
.github/workflows/flake8-to-ruff.yaml
vendored
Normal file
@@ -0,0 +1,298 @@
|
||||
name: "[flake8-to-ruff] Release"
|
||||
|
||||
on: workflow_dispatch
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
PACKAGE_NAME: flake8-to-ruff
|
||||
CRATE_NAME: flake8_to_ruff
|
||||
PYTHON_VERSION: "3.7" # to build abi3 wheels
|
||||
|
||||
jobs:
|
||||
macos-x86_64:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: Install Rust toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
profile: minimal
|
||||
default: true
|
||||
- name: Build wheels - x86_64
|
||||
uses: messense/maturin-action@v1
|
||||
with:
|
||||
target: x86_64
|
||||
args: --release --out dist --sdist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||
maturin-version: "v0.13.0"
|
||||
- name: Install built wheel - x86_64
|
||||
run: |
|
||||
pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
|
||||
- name: Upload wheels
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
macos-universal:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: Install Rust toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
profile: minimal
|
||||
default: true
|
||||
- name: Build wheels - universal2
|
||||
uses: messense/maturin-action@v1
|
||||
with:
|
||||
args: --release --universal2 --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||
maturin-version: "v0.13.0"
|
||||
- name: Install built wheel - universal2
|
||||
run: |
|
||||
pip install dist/${{ env.CRATE_NAME }}-*universal2.whl --force-reinstall
|
||||
- name: Upload wheels
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
|
||||
windows:
|
||||
runs-on: windows-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target: [x64, x86]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: ${{ matrix.target }}
|
||||
- name: Install Rust toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
profile: minimal
|
||||
default: true
|
||||
- name: Build wheels
|
||||
uses: messense/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||
maturin-version: "v0.13.0"
|
||||
- name: Install built wheel
|
||||
shell: bash
|
||||
run: |
|
||||
python -m pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
|
||||
- name: Upload wheels
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
|
||||
linux:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target: [x86_64, i686]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: Build wheels
|
||||
uses: messense/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: auto
|
||||
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||
maturin-version: "v0.13.0"
|
||||
- name: Install built wheel
|
||||
if: matrix.target == 'x86_64'
|
||||
run: |
|
||||
pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
|
||||
- name: Upload wheels
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
|
||||
linux-cross:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target: [aarch64, armv7, s390x, ppc64le, ppc64]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: Build wheels
|
||||
uses: messense/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: auto
|
||||
args: --no-default-features --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||
maturin-version: "v0.13.0"
|
||||
- uses: uraimo/run-on-arch-action@v2.0.5
|
||||
if: matrix.target != 'ppc64'
|
||||
name: Install built wheel
|
||||
with:
|
||||
arch: ${{ matrix.target }}
|
||||
distro: ubuntu20.04
|
||||
githubToken: ${{ github.token }}
|
||||
install: |
|
||||
apt-get update
|
||||
apt-get install -y --no-install-recommends python3 python3-pip
|
||||
pip3 install -U pip
|
||||
run: |
|
||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
- name: Upload wheels
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
|
||||
musllinux:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target:
|
||||
- x86_64-unknown-linux-musl
|
||||
- i686-unknown-linux-musl
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: Build wheels
|
||||
uses: messense/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: musllinux_1_2
|
||||
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||
maturin-version: "v0.13.0"
|
||||
- name: Install built wheel
|
||||
if: matrix.target == 'x86_64-unknown-linux-musl'
|
||||
uses: addnab/docker-run-action@v3
|
||||
with:
|
||||
image: alpine:latest
|
||||
options: -v ${{ github.workspace }}:/io -w /io
|
||||
run: |
|
||||
apk add py3-pip
|
||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links /io/dist/ --force-reinstall
|
||||
- name: Upload wheels
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
|
||||
musllinux-cross:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
platform:
|
||||
- target: aarch64-unknown-linux-musl
|
||||
arch: aarch64
|
||||
- target: armv7-unknown-linux-musleabihf
|
||||
arch: armv7
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: Build wheels
|
||||
uses: messense/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: musllinux_1_2
|
||||
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||
maturin-version: "v0.13.0"
|
||||
- uses: uraimo/run-on-arch-action@master
|
||||
name: Install built wheel
|
||||
with:
|
||||
arch: ${{ matrix.platform.arch }}
|
||||
distro: alpine_latest
|
||||
githubToken: ${{ github.token }}
|
||||
install: |
|
||||
apk add py3-pip
|
||||
run: |
|
||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
- name: Upload wheels
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
|
||||
pypy:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest]
|
||||
target: [x86_64, aarch64]
|
||||
python-version:
|
||||
- "3.7"
|
||||
- "3.8"
|
||||
- "3.9"
|
||||
exclude:
|
||||
- os: macos-latest
|
||||
target: aarch64
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: pypy${{ matrix.python-version }}
|
||||
- name: Build wheels
|
||||
uses: messense/maturin-action@v1
|
||||
with:
|
||||
maturin-version: "v0.13.0"
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: auto
|
||||
args: --release --out dist -i pypy${{ matrix.python-version }} -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||
- name: Install built wheel
|
||||
if: matrix.target == 'x86_64'
|
||||
run: |
|
||||
pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
|
||||
- name: Upload wheels
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
|
||||
release:
|
||||
name: Release
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- macos-universal
|
||||
- macos-x86_64
|
||||
- windows
|
||||
- linux
|
||||
- linux-cross
|
||||
- musllinux
|
||||
- musllinux-cross
|
||||
- pypy
|
||||
steps:
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: wheels
|
||||
- uses: actions/setup-python@v4
|
||||
- name: Publish to PyPi
|
||||
env:
|
||||
TWINE_USERNAME: __token__
|
||||
TWINE_PASSWORD: ${{ secrets.FLAKE8_TO_RUFF_TOKEN }}
|
||||
run: |
|
||||
pip install --upgrade twine
|
||||
twine upload --skip-existing *
|
||||
@@ -1,8 +1,6 @@
|
||||
name: Release
|
||||
name: "[ruff] Release"
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [main]
|
||||
create:
|
||||
tags:
|
||||
- v*
|
||||
@@ -297,7 +295,7 @@ jobs:
|
||||
- name: Publish to PyPi
|
||||
env:
|
||||
TWINE_USERNAME: __token__
|
||||
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
|
||||
TWINE_PASSWORD: ${{ secrets.RUFF_TOKEN }}
|
||||
run: |
|
||||
pip install --upgrade twine
|
||||
twine upload --skip-existing *
|
||||
@@ -1,8 +1,8 @@
|
||||
repos:
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
rev: v0.0.40
|
||||
rev: v0.0.114
|
||||
hooks:
|
||||
- id: lint
|
||||
- id: ruff
|
||||
|
||||
- repo: https://github.com/abravalheri/validate-pyproject
|
||||
rev: v0.10.1
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
# Contributing to ruff
|
||||
# Contributing to Ruff
|
||||
|
||||
Welcome! We're happy to have you here. Thank you in advance for your contribution to ruff.
|
||||
Welcome! We're happy to have you here. Thank you in advance for your contribution to Ruff.
|
||||
|
||||
## The basics
|
||||
|
||||
ruff welcomes contributions in the form of Pull Requests. For small changes (e.g., bug fixes), feel
|
||||
Ruff welcomes contributions in the form of Pull Requests. For small changes (e.g., bug fixes), feel
|
||||
free to submit a PR. For larger changes (e.g., new lint rules, new functionality, new configuration
|
||||
options), consider submitting an [Issue](https://github.com/charliermarsh/ruff/issues) outlining
|
||||
your proposed change.
|
||||
|
||||
### Prerequisites
|
||||
|
||||
ruff is written in Rust (1.63.0). You'll need to install the
|
||||
Ruff is written in Rust. You'll need to install the
|
||||
[Rust toolchain](https://www.rust-lang.org/tools/install) for development.
|
||||
|
||||
You'll also need [Insta](https://insta.rs/docs/) to update snapshot tests:
|
||||
@@ -22,7 +22,7 @@ cargo install cargo-insta
|
||||
|
||||
### Development
|
||||
|
||||
After cloning the repository, run ruff locally with:
|
||||
After cloning the repository, run Ruff locally with:
|
||||
|
||||
```shell
|
||||
cargo run resources/test/fixtures --no-cache
|
||||
@@ -32,9 +32,9 @@ Prior to opening a pull request, ensure that your code has been auto-formatted,
|
||||
both the lint and test validation checks:
|
||||
|
||||
```shell
|
||||
cargo fmt # Auto-formatting...
|
||||
cargo clippy # Linting...
|
||||
cargo test # Testing...
|
||||
cargo +nightly fmt --all # Auto-formatting...
|
||||
cargo +nightly clippy --all # Linting...
|
||||
cargo +nightly test --all # Testing...
|
||||
```
|
||||
|
||||
These checks will run on GitHub Actions when you open your Pull Request, but running them locally
|
||||
@@ -45,12 +45,13 @@ prior to merging.
|
||||
|
||||
### Example: Adding a new lint rule
|
||||
|
||||
There are three phases to adding a new lint rule:
|
||||
There are four phases to adding a new lint rule:
|
||||
|
||||
1. Define the rule in `src/checks.rs`.
|
||||
2. Define the _logic_ for triggering the rule in `src/check_ast.rs` (for AST-based checks)
|
||||
or `src/check_lines.rs` (for text-based checks).
|
||||
2. Define the _logic_ for triggering the rule in `src/check_ast.rs` (for AST-based checks),
|
||||
`src/check_tokens.rs` (for token-based checks), or `src/check_lines.rs` (for text-based checks).
|
||||
3. Add a test fixture.
|
||||
4. Update the generated files (documentation and generated code).
|
||||
|
||||
To define the rule, open up `src/checks.rs`. You'll need to define both a `CheckCode` and
|
||||
`CheckKind`. As an example, you can grep for `E402` and `ModuleImportNotAtTopOfFile`, and follow the
|
||||
@@ -58,37 +59,33 @@ pattern implemented therein.
|
||||
|
||||
To trigger the rule, you'll likely want to augment the logic in `src/check_ast.rs`, which defines
|
||||
the Python AST visitor, responsible for iterating over the abstract syntax tree and collecting
|
||||
lint-rule violations as it goes. Grep for the `Check::new` invocations to understand how other,
|
||||
similar rules are implemented.
|
||||
lint-rule violations as it goes. If you need to inspect the AST, you can run `cargo dev print-ast`
|
||||
with a Python file. Grep for the `Check::new` invocations to understand how other, similar rules
|
||||
are implemented.
|
||||
|
||||
To add a test fixture, create a file under `resources/test/fixtures`, named to match the `CheckCode`
|
||||
you defined earlier (e.g., `E402.py`). This file should contain a variety of violations and
|
||||
non-violations designed to evaluate and demonstrate the behavior of your lint rule. Run ruff locally
|
||||
with (e.g.) `cargo run resources/test/fixtures/E402.py`. Once you're satisfied with the output,
|
||||
codify the behavior as a snapshot test by adding a new function to the `mod tests` section of
|
||||
`src/linter.rs`, like so:
|
||||
non-violations designed to evaluate and demonstrate the behavior of your lint rule. Run Ruff locally
|
||||
with (e.g.) `cargo run resources/test/fixtures/E402.py --no-cache`. Once you're satisfied with the
|
||||
output, codify the behavior as a snapshot test by adding a new `testcase` macro to the `mod tests`
|
||||
section of `src/linter.rs`, like so:
|
||||
|
||||
```rust
|
||||
#[test]
|
||||
fn e402() -> Result<()> {
|
||||
let mut checks = check_path(
|
||||
Path::new("./resources/test/fixtures/E402.py"),
|
||||
&settings::Settings::for_rule(CheckCode::E402),
|
||||
&fixer::Mode::Generate,
|
||||
)?;
|
||||
checks.sort_by_key(|check| check.location);
|
||||
insta::assert_yaml_snapshot!(checks);
|
||||
Ok(())
|
||||
}
|
||||
#[test_case(CheckCode::A001, Path::new("A001.py"); "A001")]
|
||||
...
|
||||
```
|
||||
|
||||
Then, run `cargo test`. Your test will fail, but you'll be prompted to follow-up with
|
||||
`cargo insta review`. Accept the generated snapshot, then commit the snapshot file alongside the
|
||||
rest of your changes.
|
||||
|
||||
Finally, to update the documentation, run `cargo dev generate-rules-table` from the repo root. To
|
||||
update the generated prefix map, run `cargo dev generate-check-code-prefix`. Both of these commands
|
||||
should be run whenever a new check is added to the codebase.
|
||||
|
||||
### Example: Adding a new configuration option
|
||||
|
||||
ruff's user-facing settings live in two places: first, the command-line options defined with
|
||||
Ruff's user-facing settings live in two places: first, the command-line options defined with
|
||||
[clap](https://docs.rs/clap/latest/clap/) via the `Cli` struct in `src/main.rs`; and second, the
|
||||
`Config` struct defined `src/pyproject.rs`, which is responsible for extracting user-defined
|
||||
settings from a `pyproject.toml` file.
|
||||
@@ -103,9 +100,9 @@ acceptable unused variables (e.g., `_`).
|
||||
|
||||
## Release process
|
||||
|
||||
As of now, ruff has an ad hoc release process: releases are cut with high frequency via GitHub
|
||||
As of now, Ruff has an ad hoc release process: releases are cut with high frequency via GitHub
|
||||
Actions, which automatically generates the appropriate wheels across architectures and publishes
|
||||
them to [PyPI](https://pypi.org/project/ruff/).
|
||||
|
||||
ruff follows the [semver](https://semver.org/) versioning standard. However, as pre-1.0 software,
|
||||
Ruff follows the [semver](https://semver.org/) versioning standard. However, as pre-1.0 software,
|
||||
even patch releases may contain [non-backwards-compatible changes](https://semver.org/#spec-item-4).
|
||||
|
||||
755
Cargo.lock
generated
755
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
56
Cargo.toml
56
Cargo.toml
@@ -1,18 +1,23 @@
|
||||
[workspace]
|
||||
members = [
|
||||
"flake8_to_ruff",
|
||||
"ruff_dev",
|
||||
]
|
||||
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.0.57"
|
||||
version = "0.0.114"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
name = "ruff"
|
||||
|
||||
[dependencies]
|
||||
anyhow = { version = "1.0.60" }
|
||||
anyhow = { version = "1.0.66" }
|
||||
bincode = { version = "1.3.3" }
|
||||
cacache = { version = "10.0.1" }
|
||||
chrono = { version = "0.4.21" }
|
||||
bitflags = { version = "1.3.2" }
|
||||
chrono = { version = "0.4.21", default-features = false, features = ["clock"] }
|
||||
clap = { version = "4.0.1", features = ["derive"] }
|
||||
clearscreen = { version = "1.0.10" }
|
||||
colored = { version = "2.0.0" }
|
||||
common-path = { version = "1.0.0" }
|
||||
dirs = { version = "4.0.0" }
|
||||
@@ -20,24 +25,43 @@ fern = { version = "0.6.1" }
|
||||
filetime = { version = "0.2.17" }
|
||||
glob = { version = "0.3.0" }
|
||||
itertools = { version = "0.10.5" }
|
||||
libcst = { git = "https://github.com/charliermarsh/LibCST", rev = "32a044c127668df44582f85699358e67803b0d73" }
|
||||
libcst = { git = "https://github.com/charliermarsh/LibCST", rev = "a13ec97dd4eb925bde4d426c6e422582793b260c" }
|
||||
log = { version = "0.4.17" }
|
||||
nohash-hasher = { version = "0.2.0" }
|
||||
notify = { version = "4.0.17" }
|
||||
once_cell = { version = "1.13.1" }
|
||||
path-absolutize = { version = "3.0.13", features = ["once_cell_cache"] }
|
||||
num-bigint = { version = "0.4.3" }
|
||||
once_cell = { version = "1.16.0" }
|
||||
path-absolutize = { version = "3.0.14", features = ["once_cell_cache", "use_unix_paths_on_wasm"] }
|
||||
rayon = { version = "1.5.3" }
|
||||
regex = { version = "1.6.0" }
|
||||
rustpython-ast = { features = ["unparse"], git = "https://github.com/charliermarsh/RustPython.git", rev = "4f457893efc381ad5c432576b24bcc7e4a08c641" }
|
||||
rustpython-parser = { features = ["lalrpop"], git = "https://github.com/charliermarsh/RustPython.git", rev = "4f457893efc381ad5c432576b24bcc7e4a08c641" }
|
||||
rustpython-common = { git = "https://github.com/charliermarsh/RustPython.git", rev = "4f457893efc381ad5c432576b24bcc7e4a08c641" }
|
||||
serde = { version = "1.0.143", features = ["derive"] }
|
||||
serde_json = { version = "1.0.83" }
|
||||
ropey = { version = "1.5.0", features = ["cr_lines", "simd"], default-features = false }
|
||||
rustpython-ast = { features = ["unparse"], git = "https://github.com/RustPython/RustPython.git", rev = "27bf82a2251d7e6ac6cd75e6ad51be12a53d84bb" }
|
||||
rustpython-common = { git = "https://github.com/RustPython/RustPython.git", rev = "27bf82a2251d7e6ac6cd75e6ad51be12a53d84bb" }
|
||||
rustpython-parser = { features = ["lalrpop"], git = "https://github.com/RustPython/RustPython.git", rev = "27bf82a2251d7e6ac6cd75e6ad51be12a53d84bb" }
|
||||
serde = { version = "1.0.147", features = ["derive"] }
|
||||
serde_json = { version = "1.0.87" }
|
||||
strum = { version = "0.24.1", features = ["strum_macros"] }
|
||||
strum_macros = { version = "0.24.3" }
|
||||
textwrap = { version = "0.16.0" }
|
||||
titlecase = { version = "2.2.1" }
|
||||
toml = { version = "0.5.9" }
|
||||
update-informer = { version = "0.5.0", default_features = false, features = ["pypi"], optional = true }
|
||||
update-informer = { version = "0.5.0", default-features = false, features = ["pypi"], optional = true }
|
||||
walkdir = { version = "2.3.2" }
|
||||
|
||||
[target.'cfg(not(target_family = "wasm"))'.dependencies]
|
||||
cacache = { version = "10.0.1" } # uses async-std
|
||||
clearscreen = { version = "1.0.10" } # uses which
|
||||
|
||||
# https://docs.rs/getrandom/0.2.7/getrandom/#webassembly-support
|
||||
# For (future) wasm-pack support
|
||||
[target.'cfg(all(target_family = "wasm", target_os = "unknown"))'.dependencies]
|
||||
getrandom = { version = "0.2.7", features = ["js"] }
|
||||
|
||||
[dev-dependencies]
|
||||
assert_cmd = { version = "2.0.4" }
|
||||
criterion = { version = "0.4.0" }
|
||||
insta = { version = "1.19.1", features = ["yaml"] }
|
||||
test-case = { version = "2.2.2" }
|
||||
|
||||
[features]
|
||||
default = ["update-informer"]
|
||||
@@ -54,3 +78,7 @@ opt-level = 3
|
||||
|
||||
[profile.dev.package.similar]
|
||||
opt-level = 3
|
||||
|
||||
[[bench]]
|
||||
name = "source_code_locator"
|
||||
harness = false
|
||||
|
||||
793
README.md
793
README.md
@@ -1,4 +1,4 @@
|
||||
# ruff
|
||||
# Ruff
|
||||
|
||||
[](https://pypi.python.org/pypi/ruff)
|
||||
[](https://pypi.python.org/pypi/ruff)
|
||||
@@ -15,22 +15,64 @@ An extremely fast Python linter, written in Rust.
|
||||
<i>Linting the CPython codebase from scratch.</i>
|
||||
</p>
|
||||
|
||||
- ⚡️ 10-100x faster than existing linters
|
||||
- 🐍 Installable via `pip`
|
||||
- 🤝 Python 3.10 compatibility
|
||||
- 🛠️ `pyproject.toml` support
|
||||
- 📦 [ESLint](https://eslint.org/docs/latest/user-guide/command-line-interface#caching)-inspired cache support
|
||||
- 🔧 [ESLint](https://eslint.org/docs/latest/user-guide/command-line-interface#--fix)-inspired `--fix` support
|
||||
- 👀 [TypeScript](https://www.typescriptlang.org/docs/handbook/configuring-watch.html)-inspired `--watch` support
|
||||
- ⚖️ [Near-complete parity](#Parity-with-Flake8) with the built-in Flake8 rule set
|
||||
- ⚡️ 10-100x faster than existing linters
|
||||
- 🐍 Installable via `pip`
|
||||
- 🤝 Python 3.10 compatibility
|
||||
- 🛠️ `pyproject.toml` support
|
||||
- 📦 Built-in caching, to avoid re-analyzing unchanged files
|
||||
- 🔧 `--fix` support, for automatic error correction (e.g., automatically remove unused imports)
|
||||
- 👀 `--watch` support, for continuous file monitoring
|
||||
- ⚖️ [Near-parity](#how-does-ruff-compare-to-flake8) with the built-in Flake8 rule set
|
||||
- 🔌 Native re-implementations of popular Flake8 plugins, like [`flake8-docstrings`](https://pypi.org/project/flake8-docstrings/) ([`pydocstyle`](https://pypi.org/project/pydocstyle/))
|
||||
|
||||
Ruff aims to be orders of magnitude faster than alternative tools while integrating more
|
||||
functionality behind a single, common interface. Ruff can be used to replace Flake8 (plus a variety
|
||||
of plugins), [`isort`](https://pypi.org/project/isort/), [`pydocstyle`](https://pypi.org/project/pydocstyle/),
|
||||
[`yesqa`](https://github.com/asottile/yesqa), and even a subset of [`pyupgrade`](https://pypi.org/project/pyupgrade/)
|
||||
and [`autoflake`](https://pypi.org/project/autoflake/) all while executing tens or hundreds of times
|
||||
faster than any individual tool.
|
||||
|
||||
(Coming from Flake8? Try [`flake8-to-ruff`](https://pypi.org/project/flake8-to-ruff/) to
|
||||
automatically convert your existing configuration.)
|
||||
|
||||
Ruff is actively developed and used in major open-source projects
|
||||
like [Zulip](https://github.com/zulip/zulip), [pydantic](https://github.com/pydantic/pydantic),
|
||||
and [Saleor](https://github.com/saleor/saleor).
|
||||
|
||||
Read the [launch blog post](https://notes.crmarsh.com/python-tooling-could-be-much-much-faster).
|
||||
|
||||
## Installation and usage
|
||||
## Table of Contents
|
||||
|
||||
1. [Installation and Usage](#installation-and-usage)
|
||||
2. [Configuration](#configuration)
|
||||
3. [Supported Rules](#supported-rules)
|
||||
1. [Pyflakes](#pyflakes)
|
||||
2. [pycodestyle](#pycodestyle)
|
||||
3. [pydocstyle](#pydocstyle)
|
||||
4. [pyupgrade](#pyupgrade)
|
||||
5. [pep8-naming](#pep8-naming)
|
||||
6. [flake8-comprehensions](#flake8-comprehensions)
|
||||
7. [flake8-bugbear](#flake8-bugbear)
|
||||
8. [flake8-builtins](#flake8-builtins)
|
||||
9. [flake8-print](#flake8-print)
|
||||
10. [flake8-quotes](#flake8-quotes)
|
||||
11. [flake8-annotations](#flake8-annotations)
|
||||
12. [flake8-2020](#flake8-2020)
|
||||
13. [Ruff-specific rules](#ruff-specific-rules)
|
||||
14. [Meta rules](#meta-rules)
|
||||
5. [Editor Integrations](#editor-integrations)
|
||||
6. [FAQ](#faq)
|
||||
7. [Development](#development)
|
||||
8. [Releases](#releases)
|
||||
9. [Benchmarks](#benchmarks)
|
||||
10. [License](#license)
|
||||
11. [Contributing](#contributing)
|
||||
|
||||
## Installation and Usage
|
||||
|
||||
### Installation
|
||||
|
||||
Available as [ruff](https://pypi.org/project/ruff/) on PyPI:
|
||||
Available as [`ruff`](https://pypi.org/project/ruff/) on PyPI:
|
||||
|
||||
```shell
|
||||
pip install ruff
|
||||
@@ -38,7 +80,7 @@ pip install ruff
|
||||
|
||||
### Usage
|
||||
|
||||
To run ruff, try any of the following:
|
||||
To run Ruff, try any of the following:
|
||||
|
||||
```shell
|
||||
ruff path/to/code/to/check.py
|
||||
@@ -46,38 +88,95 @@ ruff path/to/code/
|
||||
ruff path/to/code/*.py
|
||||
```
|
||||
|
||||
You can run ruff in `--watch` mode to automatically re-run on-change:
|
||||
You can run Ruff in `--watch` mode to automatically re-run on-change:
|
||||
|
||||
```shell
|
||||
ruff path/to/code/ --watch
|
||||
```
|
||||
|
||||
ruff also works with [pre-commit](https://pre-commit.com):
|
||||
Ruff also works with [pre-commit](https://pre-commit.com):
|
||||
|
||||
```yaml
|
||||
repos:
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
rev: v0.0.48
|
||||
rev: v0.0.114
|
||||
hooks:
|
||||
- id: lint
|
||||
- id: ruff
|
||||
```
|
||||
|
||||
<!-- TODO(charlie): Remove this message a few versions after v0.0.86. -->
|
||||
_Note: prior to `v0.0.86`, `ruff-pre-commit` used `lint` (rather than `ruff`) as the hook ID._
|
||||
|
||||
## Configuration
|
||||
|
||||
ruff is configurable both via `pyproject.toml` and the command line.
|
||||
|
||||
For example, you could configure ruff to only enforce a subset of rules with:
|
||||
Ruff is configurable both via `pyproject.toml` and the command line. If left unspecified, the
|
||||
default configuration is equivalent to:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
line-length = 88
|
||||
select = [
|
||||
"F401",
|
||||
"F403",
|
||||
|
||||
# Enable Flake's "E" and "F" codes by default.
|
||||
select = ["E", "F"]
|
||||
ignore = []
|
||||
|
||||
# Exclude a variety of commonly ignored directories.
|
||||
exclude = [
|
||||
".bzr",
|
||||
".direnv",
|
||||
".eggs",
|
||||
".git",
|
||||
".hg",
|
||||
".mypy_cache",
|
||||
".nox",
|
||||
".pants.d",
|
||||
".ruff_cache",
|
||||
".svn",
|
||||
".tox",
|
||||
".venv",
|
||||
"__pypackages__",
|
||||
"_build",
|
||||
"buck-out",
|
||||
"build",
|
||||
"dist",
|
||||
"node_modules",
|
||||
"venv",
|
||||
]
|
||||
per-file-ignores = {}
|
||||
|
||||
# Allow unused variables when underscore-prefixed.
|
||||
dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
|
||||
|
||||
# Assume Python 3.10.
|
||||
target-version = "py310"
|
||||
```
|
||||
|
||||
Alternatively, on the command-line:
|
||||
As an example, the following would configure Ruff to (1) avoid checking for line-length
|
||||
violations (`E501`) and (2) ignore unused import rules in `__init__.py` files:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
select = ["E", "F"]
|
||||
|
||||
# Never enforce `E501`.
|
||||
ignore = ["E501"]
|
||||
|
||||
# Ignore `F401` violations in any `__init__.py` file, and in `path/to/file.py`.
|
||||
per-file-ignores = {"__init__.py" = ["F401"], "path/to/file.py" = ["F401"]}
|
||||
```
|
||||
|
||||
Plugin configurations should be expressed as subsections, e.g.:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
# Add "Q" to the list of enabled codes.
|
||||
select = ["E", "F", "Q"]
|
||||
|
||||
[tool.ruff.flake8-quotes]
|
||||
docstring-quotes = "double"
|
||||
```
|
||||
|
||||
Alternatively, common configuration settings can be provided via the command-line:
|
||||
|
||||
```shell
|
||||
ruff path/to/code/ --select F401 --select F403
|
||||
@@ -94,15 +193,19 @@ Arguments:
|
||||
<FILES>...
|
||||
|
||||
Options:
|
||||
--config <CONFIG>
|
||||
Path to the `pyproject.toml` file to use for configuration
|
||||
-v, --verbose
|
||||
Enable verbose logging
|
||||
-q, --quiet
|
||||
Only log errors
|
||||
-s, --silent
|
||||
Disable all logging (but still exit with status code "1" upon detecting errors)
|
||||
-e, --exit-zero
|
||||
Exit with status code "0", even upon detecting errors
|
||||
-w, --watch
|
||||
Run in watch mode by re-running whenever files change
|
||||
-f, --fix
|
||||
--fix
|
||||
Attempt to automatically fix lint errors
|
||||
-n, --no-cache
|
||||
Disable cache reads
|
||||
@@ -130,6 +233,10 @@ Options:
|
||||
Enable automatic additions of noqa directives to failing lines
|
||||
--dummy-variable-rgx <DUMMY_VARIABLE_RGX>
|
||||
Regular expression matching the name of dummy variables
|
||||
--target-version <TARGET_VERSION>
|
||||
The minimum Python version that should be supported
|
||||
--stdin-filename <STDIN_FILENAME>
|
||||
The name of the file when passing it through stdin
|
||||
-h, --help
|
||||
Print help information
|
||||
-V, --version
|
||||
@@ -152,7 +259,7 @@ Exclusions are based on globs, and can be either:
|
||||
To omit a lint check entirely, add it to the "ignore" list via `--ignore` or `--extend-ignore`,
|
||||
either on the command-line or in your `project.toml` file.
|
||||
|
||||
To ignore an error in-line, ruff uses a `noqa` system similar to [Flake8](https://flake8.pycqa.org/en/3.1.1/user/ignoring-errors.html).
|
||||
To ignore an error in-line, Ruff uses a `noqa` system similar to [Flake8](https://flake8.pycqa.org/en/3.1.1/user/ignoring-errors.html).
|
||||
To ignore an individual error, add `# noqa: {code}` to the end of the line, like so:
|
||||
|
||||
```python
|
||||
@@ -176,129 +283,354 @@ Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor i
|
||||
""" # noqa: E501
|
||||
```
|
||||
|
||||
ruff supports several (experimental) workflows to aid in `noqa` management.
|
||||
Ruff supports several workflows to aid in `noqa` management.
|
||||
|
||||
First, ruff provides a special error code, `M001`, to enforce that your `noqa` directives are
|
||||
First, Ruff provides a special error code, `M001`, to enforce that your `noqa` directives are
|
||||
"valid", in that the errors they _say_ they ignore are actually being triggered on that line (and
|
||||
thus suppressed). **You can run `ruff /path/to/file.py --extend-select M001` to flag unused `noqa`
|
||||
directives.**
|
||||
thus suppressed). You can run `ruff /path/to/file.py --extend-select M001` to flag unused `noqa`
|
||||
directives.
|
||||
|
||||
Second, ruff can _automatically remove_ unused `noqa` directives via its autofix functionality.
|
||||
**You can run `ruff /path/to/file.py --extend-select M001 --fix` to automatically remove unused
|
||||
`noqa` directives.**
|
||||
Second, Ruff can _automatically remove_ unused `noqa` directives via its autofix functionality.
|
||||
You can run `ruff /path/to/file.py --extend-select M001 --fix` to automatically remove unused
|
||||
`noqa` directives.
|
||||
|
||||
Third, ruff can _automatically add_ `noqa` directives to all failing lines. This is useful when
|
||||
migrating a new codebase to ruff. **You can run `ruff /path/to/file.py --add-noqa` to automatically
|
||||
add `noqa` directives to all failing lines, with the appropriate error codes.**
|
||||
Third, Ruff can _automatically add_ `noqa` directives to all failing lines. This is useful when
|
||||
migrating a new codebase to Ruff. You can run `ruff /path/to/file.py --add-noqa` to automatically
|
||||
add `noqa` directives to all failing lines, with the appropriate error codes.
|
||||
|
||||
### Compatibility with Black
|
||||
## Supported Rules
|
||||
|
||||
ruff is compatible with [Black](https://github.com/psf/black) out-of-the-box, as long as
|
||||
the `line-length` setting is consistent between the two.
|
||||
Regardless of the rule's origin, Ruff re-implements every rule in Rust as a first-party feature.
|
||||
|
||||
As a project, ruff is designed to be used alongside Black and, as such, will defer implementing
|
||||
stylistic lint rules that are obviated by autoformatting.
|
||||
|
||||
### Parity with Flake8
|
||||
|
||||
ruff's goal is to achieve feature parity with Flake8 when used (1) without plugins, (2) alongside
|
||||
Black, and (3) on Python 3 code.
|
||||
|
||||
**Under those conditions, ruff implements 44 out of 60 rules.** (ruff is missing: 14 rules related
|
||||
to string `.format` calls, 1 rule related to docstring parsing, and 1 rule related to redefined
|
||||
variables.)
|
||||
|
||||
ruff also implements some of the most popular Flake8 plugins natively, including:
|
||||
|
||||
- [`flake8-builtins`](https://pypi.org/project/flake8-builtins/)
|
||||
- [`flake8-super`](https://pypi.org/project/flake8-super/)
|
||||
- [`flake8-print`](https://pypi.org/project/flake8-print/)
|
||||
- [`pyupgrade`](https://pypi.org/project/pyupgrade/) (partial)
|
||||
|
||||
Beyond rule-set parity, ruff suffers from the following limitations vis-à-vis Flake8:
|
||||
|
||||
1. ruff does not yet support a few Python 3.9 and 3.10 language features, including structural
|
||||
pattern matching and parenthesized context managers.
|
||||
2. Flake8 has a plugin architecture and supports writing custom lint rules.
|
||||
|
||||
## Rules
|
||||
|
||||
The ✅ emoji indicates a rule is enabled by default.
|
||||
By default, Ruff enables all `E` and `F` error codes, which correspond to those built-in to Flake8.
|
||||
|
||||
The 🛠 emoji indicates that a rule is automatically fixable by the `--fix` command-line option.
|
||||
|
||||
| Code | Name | Message | | |
|
||||
| ---- | ---- | ------- | --- | --- |
|
||||
| E402 | ModuleImportNotAtTopOfFile | Module level import not at top of file | ✅ | |
|
||||
| E501 | LineTooLong | Line too long (89 > 88 characters) | ✅ | |
|
||||
| E711 | NoneComparison | Comparison to `None` should be `cond is None` | ✅ | |
|
||||
| E712 | TrueFalseComparison | Comparison to `True` should be `cond is True` | ✅ | |
|
||||
| E713 | NotInTest | Test for membership should be `not in` | ✅ | |
|
||||
| E714 | NotIsTest | Test for object identity should be `is not` | ✅ | |
|
||||
| E721 | TypeComparison | Do not compare types, use `isinstance()` | ✅ | |
|
||||
| E722 | DoNotUseBareExcept | Do not use bare `except` | ✅ | |
|
||||
| E731 | DoNotAssignLambda | Do not assign a lambda expression, use a def | ✅ | |
|
||||
| E741 | AmbiguousVariableName | Ambiguous variable name: `...` | ✅ | |
|
||||
| E742 | AmbiguousClassName | Ambiguous class name: `...` | ✅ | |
|
||||
| E743 | AmbiguousFunctionName | Ambiguous function name: `...` | ✅ | |
|
||||
| E902 | IOError | IOError: `...` | ✅ | |
|
||||
| E999 | SyntaxError | SyntaxError: `...` | ✅ | |
|
||||
| F401 | UnusedImport | `...` imported but unused | ✅ | 🛠 |
|
||||
| F402 | ImportShadowedByLoopVar | Import `...` from line 1 shadowed by loop variable | ✅ | |
|
||||
| F403 | ImportStarUsed | `from ... import *` used; unable to detect undefined names | ✅ | |
|
||||
| F404 | LateFutureImport | `from __future__` imports must occur at the beginning of the file | ✅ | |
|
||||
| F405 | ImportStarUsage | `...` may be undefined, or defined from star imports: `...` | ✅ | |
|
||||
| F406 | ImportStarNotPermitted | `from ... import *` only allowed at module level | ✅ | |
|
||||
| F407 | FutureFeatureNotDefined | Future feature `...` is not defined | ✅ | |
|
||||
| F541 | FStringMissingPlaceholders | f-string without any placeholders | ✅ | |
|
||||
| F601 | MultiValueRepeatedKeyLiteral | Dictionary key literal repeated | ✅ | |
|
||||
| F602 | MultiValueRepeatedKeyVariable | Dictionary key `...` repeated | ✅ | |
|
||||
| F621 | ExpressionsInStarAssignment | Too many expressions in star-unpacking assignment | ✅ | |
|
||||
| F622 | TwoStarredExpressions | Two starred expressions in assignment | ✅ | |
|
||||
| F631 | AssertTuple | Assert test is a non-empty tuple, which is always `True` | ✅ | |
|
||||
| F632 | IsLiteral | Use `==` and `!=` to compare constant literals | ✅ | |
|
||||
| F633 | InvalidPrintSyntax | Use of `>>` is invalid with `print` function | ✅ | |
|
||||
| F634 | IfTuple | If test is a tuple, which is always `True` | ✅ | |
|
||||
| F701 | BreakOutsideLoop | `break` outside loop | ✅ | |
|
||||
| F702 | ContinueOutsideLoop | `continue` not properly in loop | ✅ | |
|
||||
| F704 | YieldOutsideFunction | `yield` or `yield from` statement outside of a function/method | ✅ | |
|
||||
| F706 | ReturnOutsideFunction | `return` statement outside of a function/method | ✅ | |
|
||||
| F707 | DefaultExceptNotLast | An `except:` block as not the last exception handler | ✅ | |
|
||||
| F722 | ForwardAnnotationSyntaxError | Syntax error in forward annotation: `...` | ✅ | |
|
||||
| F821 | UndefinedName | Undefined name `...` | ✅ | |
|
||||
| F822 | UndefinedExport | Undefined name `...` in `__all__` | ✅ | |
|
||||
| F823 | UndefinedLocal | Local variable `...` referenced before assignment | ✅ | |
|
||||
| F831 | DuplicateArgumentName | Duplicate argument name in function definition | ✅ | |
|
||||
| F841 | UnusedVariable | Local variable `...` is assigned to but never used | ✅ | |
|
||||
| F901 | RaiseNotImplemented | `raise NotImplemented` should be `raise NotImplementedError` | ✅ | |
|
||||
| A001 | BuiltinVariableShadowing | Variable `...` is shadowing a python builtin | | |
|
||||
| A002 | BuiltinArgumentShadowing | Argument `...` is shadowing a python builtin | | |
|
||||
| A003 | BuiltinAttributeShadowing | Class attribute `...` is shadowing a python builtin | | |
|
||||
| SPR001 | SuperCallWithParameters | Use `super()` instead of `super(__class__, self)` | | 🛠 |
|
||||
| T201 | PrintFound | `print` found | | 🛠 |
|
||||
| T203 | PPrintFound | `pprint` found | | 🛠 |
|
||||
| U001 | UselessMetaclassType | `__metaclass__ = type` is implied | | 🛠 |
|
||||
| R001 | UselessObjectInheritance | Class `...` inherits from object | | 🛠 |
|
||||
| R002 | NoAssertEquals | `assertEquals` is deprecated, use `assertEqual` instead | | 🛠 |
|
||||
| M001 | UnusedNOQA | Unused `noqa` directive | | 🛠 |
|
||||
<!-- Sections automatically generated by `cargo dev generate-rules-table`. -->
|
||||
<!-- Begin auto-generated sections. -->
|
||||
|
||||
## Integrations
|
||||
### Pyflakes
|
||||
|
||||
For more, see [Pyflakes](https://pypi.org/project/pyflakes/2.5.0/) on PyPI.
|
||||
|
||||
| Code | Name | Message | Fix |
|
||||
| ---- | ---- | ------- | --- |
|
||||
| F401 | UnusedImport | `...` imported but unused | 🛠 |
|
||||
| F402 | ImportShadowedByLoopVar | Import `...` from line 1 shadowed by loop variable | |
|
||||
| F403 | ImportStarUsed | `from ... import *` used; unable to detect undefined names | |
|
||||
| F404 | LateFutureImport | `from __future__` imports must occur at the beginning of the file | |
|
||||
| F405 | ImportStarUsage | `...` may be undefined, or defined from star imports: `...` | |
|
||||
| F406 | ImportStarNotPermitted | `from ... import *` only allowed at module level | |
|
||||
| F407 | FutureFeatureNotDefined | Future feature `...` is not defined | |
|
||||
| F541 | FStringMissingPlaceholders | f-string without any placeholders | |
|
||||
| F601 | MultiValueRepeatedKeyLiteral | Dictionary key literal repeated | |
|
||||
| F602 | MultiValueRepeatedKeyVariable | Dictionary key `...` repeated | |
|
||||
| F621 | ExpressionsInStarAssignment | Too many expressions in star-unpacking assignment | |
|
||||
| F622 | TwoStarredExpressions | Two starred expressions in assignment | |
|
||||
| F631 | AssertTuple | Assert test is a non-empty tuple, which is always `True` | |
|
||||
| F632 | IsLiteral | Use `==` and `!=` to compare constant literals | 🛠 |
|
||||
| F633 | InvalidPrintSyntax | Use of `>>` is invalid with `print` function | |
|
||||
| F634 | IfTuple | If test is a tuple, which is always `True` | |
|
||||
| F701 | BreakOutsideLoop | `break` outside loop | |
|
||||
| F702 | ContinueOutsideLoop | `continue` not properly in loop | |
|
||||
| F704 | YieldOutsideFunction | `yield` or `yield from` statement outside of a function | |
|
||||
| F706 | ReturnOutsideFunction | `return` statement outside of a function/method | |
|
||||
| F707 | DefaultExceptNotLast | An `except:` block as not the last exception handler | |
|
||||
| F722 | ForwardAnnotationSyntaxError | Syntax error in forward annotation: `...` | |
|
||||
| F821 | UndefinedName | Undefined name `...` | |
|
||||
| F822 | UndefinedExport | Undefined name `...` in `__all__` | |
|
||||
| F823 | UndefinedLocal | Local variable `...` referenced before assignment | |
|
||||
| F831 | DuplicateArgumentName | Duplicate argument name in function definition | |
|
||||
| F841 | UnusedVariable | Local variable `...` is assigned to but never used | |
|
||||
| F901 | RaiseNotImplemented | `raise NotImplemented` should be `raise NotImplementedError` | 🛠 |
|
||||
|
||||
### pycodestyle
|
||||
|
||||
For more, see [pycodestyle](https://pypi.org/project/pycodestyle/2.9.1/) on PyPI.
|
||||
|
||||
| Code | Name | Message | Fix |
|
||||
| ---- | ---- | ------- | --- |
|
||||
| E402 | ModuleImportNotAtTopOfFile | Module level import not at top of file | |
|
||||
| E501 | LineTooLong | Line too long (89 > 88 characters) | |
|
||||
| E711 | NoneComparison | Comparison to `None` should be `cond is None` | |
|
||||
| E712 | TrueFalseComparison | Comparison to `True` should be `cond is True` | |
|
||||
| E713 | NotInTest | Test for membership should be `not in` | |
|
||||
| E714 | NotIsTest | Test for object identity should be `is not` | |
|
||||
| E721 | TypeComparison | Do not compare types, use `isinstance()` | |
|
||||
| E722 | DoNotUseBareExcept | Do not use bare `except` | |
|
||||
| E731 | DoNotAssignLambda | Do not assign a lambda expression, use a def | |
|
||||
| E741 | AmbiguousVariableName | Ambiguous variable name: `...` | |
|
||||
| E742 | AmbiguousClassName | Ambiguous class name: `...` | |
|
||||
| E743 | AmbiguousFunctionName | Ambiguous function name: `...` | |
|
||||
| E902 | IOError | IOError: `...` | |
|
||||
| E999 | SyntaxError | SyntaxError: `...` | |
|
||||
| W292 | NoNewLineAtEndOfFile | No newline at end of file | |
|
||||
| W605 | InvalidEscapeSequence | Invalid escape sequence: '\c' | |
|
||||
|
||||
### isort
|
||||
|
||||
For more, see [isort](https://pypi.org/project/isort/5.10.1/) on PyPI.
|
||||
|
||||
| Code | Name | Message | Fix |
|
||||
| ---- | ---- | ------- | --- |
|
||||
| I001 | UnsortedImports | Import block is un-sorted or un-formatted | 🛠 |
|
||||
|
||||
### pydocstyle
|
||||
|
||||
For more, see [pydocstyle](https://pypi.org/project/pydocstyle/6.1.1/) on PyPI.
|
||||
|
||||
| Code | Name | Message | Fix |
|
||||
| ---- | ---- | ------- | --- |
|
||||
| D100 | PublicModule | Missing docstring in public module | |
|
||||
| D101 | PublicClass | Missing docstring in public class | |
|
||||
| D102 | PublicMethod | Missing docstring in public method | |
|
||||
| D103 | PublicFunction | Missing docstring in public function | |
|
||||
| D104 | PublicPackage | Missing docstring in public package | |
|
||||
| D105 | MagicMethod | Missing docstring in magic method | |
|
||||
| D106 | PublicNestedClass | Missing docstring in public nested class | |
|
||||
| D107 | PublicInit | Missing docstring in `__init__` | |
|
||||
| D200 | FitsOnOneLine | One-line docstring should fit on one line | |
|
||||
| D201 | NoBlankLineBeforeFunction | No blank lines allowed before function docstring (found 1) | 🛠 |
|
||||
| D202 | NoBlankLineAfterFunction | No blank lines allowed after function docstring (found 1) | 🛠 |
|
||||
| D203 | OneBlankLineBeforeClass | 1 blank line required before class docstring | 🛠 |
|
||||
| D204 | OneBlankLineAfterClass | 1 blank line required after class docstring | 🛠 |
|
||||
| D205 | BlankLineAfterSummary | 1 blank line required between summary line and description | 🛠 |
|
||||
| D206 | IndentWithSpaces | Docstring should be indented with spaces, not tabs | |
|
||||
| D207 | NoUnderIndentation | Docstring is under-indented | 🛠 |
|
||||
| D208 | NoOverIndentation | Docstring is over-indented | 🛠 |
|
||||
| D209 | NewLineAfterLastParagraph | Multi-line docstring closing quotes should be on a separate line | 🛠 |
|
||||
| D210 | NoSurroundingWhitespace | No whitespaces allowed surrounding docstring text | 🛠 |
|
||||
| D211 | NoBlankLineBeforeClass | No blank lines allowed before class docstring | 🛠 |
|
||||
| D212 | MultiLineSummaryFirstLine | Multi-line docstring summary should start at the first line | |
|
||||
| D213 | MultiLineSummarySecondLine | Multi-line docstring summary should start at the second line | |
|
||||
| D214 | SectionNotOverIndented | Section is over-indented ("Returns") | 🛠 |
|
||||
| D215 | SectionUnderlineNotOverIndented | Section underline is over-indented ("Returns") | 🛠 |
|
||||
| D300 | UsesTripleQuotes | Use """triple double quotes""" | |
|
||||
| D400 | EndsInPeriod | First line should end with a period | |
|
||||
| D402 | NoSignature | First line should not be the function's signature | |
|
||||
| D403 | FirstLineCapitalized | First word of the first line should be properly capitalized | |
|
||||
| D404 | NoThisPrefix | First word of the docstring should not be 'This' | |
|
||||
| D405 | CapitalizeSectionName | Section name should be properly capitalized ("returns") | 🛠 |
|
||||
| D406 | NewLineAfterSectionName | Section name should end with a newline ("Returns") | 🛠 |
|
||||
| D407 | DashedUnderlineAfterSection | Missing dashed underline after section ("Returns") | 🛠 |
|
||||
| D408 | SectionUnderlineAfterName | Section underline should be in the line following the section's name ("Returns") | 🛠 |
|
||||
| D409 | SectionUnderlineMatchesSectionLength | Section underline should match the length of its name ("Returns") | 🛠 |
|
||||
| D410 | BlankLineAfterSection | Missing blank line after section ("Returns") | 🛠 |
|
||||
| D411 | BlankLineBeforeSection | Missing blank line before section ("Returns") | 🛠 |
|
||||
| D412 | NoBlankLinesBetweenHeaderAndContent | No blank lines allowed between a section header and its content ("Returns") | 🛠 |
|
||||
| D413 | BlankLineAfterLastSection | Missing blank line after last section ("Returns") | 🛠 |
|
||||
| D414 | NonEmptySection | Section has no content ("Returns") | |
|
||||
| D415 | EndsInPunctuation | First line should end with a period, question mark, or exclamation point | |
|
||||
| D416 | SectionNameEndsInColon | Section name should end with a colon ("Returns") | 🛠 |
|
||||
| D417 | DocumentAllArguments | Missing argument descriptions in the docstring: `x`, `y` | |
|
||||
| D418 | SkipDocstring | Function decorated with `@overload` shouldn't contain a docstring | |
|
||||
| D419 | NonEmpty | Docstring is empty | |
|
||||
|
||||
### pyupgrade
|
||||
|
||||
For more, see [pyupgrade](https://pypi.org/project/pyupgrade/3.2.0/) on PyPI.
|
||||
|
||||
| Code | Name | Message | Fix |
|
||||
| ---- | ---- | ------- | --- |
|
||||
| U001 | UselessMetaclassType | `__metaclass__ = type` is implied | 🛠 |
|
||||
| U002 | UnnecessaryAbspath | `abspath(__file__)` is unnecessary in Python 3.9 and later | 🛠 |
|
||||
| U003 | TypeOfPrimitive | Use `str` instead of `type(...)` | 🛠 |
|
||||
| U004 | UselessObjectInheritance | Class `...` inherits from object | 🛠 |
|
||||
| U005 | DeprecatedUnittestAlias | `assertEquals` is deprecated, use `assertEqual` instead | 🛠 |
|
||||
| U006 | UsePEP585Annotation | Use `list` instead of `List` for type annotations | 🛠 |
|
||||
| U007 | UsePEP604Annotation | Use `X \| Y` for type annotations | 🛠 |
|
||||
| U008 | SuperCallWithParameters | Use `super()` instead of `super(__class__, self)` | 🛠 |
|
||||
| U009 | PEP3120UnnecessaryCodingComment | utf-8 encoding declaration is unnecessary | 🛠 |
|
||||
| U010 | UnnecessaryFutureImport | Unnecessary `__future__` import `...` for target Python version | 🛠 |
|
||||
| U011 | UnnecessaryLRUCacheParams | Unnecessary parameters to functools.lru_cache | 🛠 |
|
||||
| U012 | UnnecessaryEncodeUTF8 | Unnecessary call to `encode` as UTF-8 | 🛠 |
|
||||
|
||||
### pep8-naming
|
||||
|
||||
For more, see [pep8-naming](https://pypi.org/project/pep8-naming/0.13.2/) on PyPI.
|
||||
|
||||
| Code | Name | Message | Fix |
|
||||
| ---- | ---- | ------- | --- |
|
||||
| N801 | InvalidClassName | Class name `...` should use CapWords convention | |
|
||||
| N802 | InvalidFunctionName | Function name `...` should be lowercase | |
|
||||
| N803 | InvalidArgumentName | Argument name `...` should be lowercase | |
|
||||
| N804 | InvalidFirstArgumentNameForClassMethod | First argument of a class method should be named `cls` | |
|
||||
| N805 | InvalidFirstArgumentNameForMethod | First argument of a method should be named `self` | |
|
||||
| N806 | NonLowercaseVariableInFunction | Variable `...` in function should be lowercase | |
|
||||
| N807 | DunderFunctionName | Function name should not start and end with `__` | |
|
||||
| N811 | ConstantImportedAsNonConstant | Constant `...` imported as non-constant `...` | |
|
||||
| N812 | LowercaseImportedAsNonLowercase | Lowercase `...` imported as non-lowercase `...` | |
|
||||
| N813 | CamelcaseImportedAsLowercase | Camelcase `...` imported as lowercase `...` | |
|
||||
| N814 | CamelcaseImportedAsConstant | Camelcase `...` imported as constant `...` | |
|
||||
| N815 | MixedCaseVariableInClassScope | Variable `mixedCase` in class scope should not be mixedCase | |
|
||||
| N816 | MixedCaseVariableInGlobalScope | Variable `mixedCase` in global scope should not be mixedCase | |
|
||||
| N817 | CamelcaseImportedAsAcronym | Camelcase `...` imported as acronym `...` | |
|
||||
| N818 | ErrorSuffixOnExceptionName | Exception name `...` should be named with an Error suffix | |
|
||||
|
||||
### flake8-comprehensions
|
||||
|
||||
For more, see [flake8-comprehensions](https://pypi.org/project/flake8-comprehensions/3.10.1/) on PyPI.
|
||||
|
||||
| Code | Name | Message | Fix |
|
||||
| ---- | ---- | ------- | --- |
|
||||
| C400 | UnnecessaryGeneratorList | Unnecessary generator (rewrite as a `list` comprehension) | 🛠 |
|
||||
| C401 | UnnecessaryGeneratorSet | Unnecessary generator (rewrite as a `set` comprehension) | 🛠 |
|
||||
| C402 | UnnecessaryGeneratorDict | Unnecessary generator (rewrite as a `dict` comprehension) | 🛠 |
|
||||
| C403 | UnnecessaryListComprehensionSet | Unnecessary `list` comprehension (rewrite as a `set` comprehension) | 🛠 |
|
||||
| C404 | UnnecessaryListComprehensionDict | Unnecessary `list` comprehension (rewrite as a `dict` comprehension) | 🛠 |
|
||||
| C405 | UnnecessaryLiteralSet | Unnecessary `(list\|tuple)` literal (rewrite as a `set` literal) | 🛠 |
|
||||
| C406 | UnnecessaryLiteralDict | Unnecessary `(list\|tuple)` literal (rewrite as a `dict` literal) | 🛠 |
|
||||
| C408 | UnnecessaryCollectionCall | Unnecessary `(dict\|list\|tuple)` call (rewrite as a literal) | 🛠 |
|
||||
| C409 | UnnecessaryLiteralWithinTupleCall | Unnecessary `(list\|tuple)` literal passed to `tuple()` (remove the outer call to `tuple()`) | 🛠 |
|
||||
| C410 | UnnecessaryLiteralWithinListCall | Unnecessary `(list\|tuple)` literal passed to `list()` (rewrite as a `list` literal) | 🛠 |
|
||||
| C411 | UnnecessaryListCall | Unnecessary `list` call (remove the outer call to `list()`) | 🛠 |
|
||||
| C413 | UnnecessaryCallAroundSorted | Unnecessary `(list\|reversed)` call around `sorted()` | |
|
||||
| C414 | UnnecessaryDoubleCastOrProcess | Unnecessary `(list\|reversed\|set\|sorted\|tuple)` call within `(list\|set\|sorted\|tuple)()` | |
|
||||
| C415 | UnnecessarySubscriptReversal | Unnecessary subscript reversal of iterable within `(reversed\|set\|sorted)()` | |
|
||||
| C416 | UnnecessaryComprehension | Unnecessary `(list\|set)` comprehension (rewrite using `(list\|set)()`) | 🛠 |
|
||||
| C417 | UnnecessaryMap | Unnecessary `map` usage (rewrite using a `(list\|set\|dict)` comprehension) | |
|
||||
|
||||
### flake8-bugbear
|
||||
|
||||
For more, see [flake8-bugbear](https://pypi.org/project/flake8-bugbear/22.10.27/) on PyPI.
|
||||
|
||||
| Code | Name | Message | Fix |
|
||||
| ---- | ---- | ------- | --- |
|
||||
| B002 | UnaryPrefixIncrement | Python does not support the unary prefix increment. | |
|
||||
| B003 | AssignmentToOsEnviron | Assigning to `os.environ` doesn't clear the environment. | |
|
||||
| B004 | UnreliableCallableCheck | Using `hasattr(x, '__call__')` to test if x is callable is unreliable. Use `callable(x)` for consistent results. | |
|
||||
| B005 | StripWithMultiCharacters | Using `.strip()` with multi-character strings is misleading the reader. | |
|
||||
| B006 | MutableArgumentDefault | Do not use mutable data structures for argument defaults. | |
|
||||
| B007 | UnusedLoopControlVariable | Loop control variable `i` not used within the loop body. | 🛠 |
|
||||
| B008 | FunctionCallArgumentDefault | Do not perform function calls in argument defaults. | |
|
||||
| B009 | GetAttrWithConstant | Do not call `getattr` with a constant attribute value, it is not any safer than normal property access. | 🛠 |
|
||||
| B010 | SetAttrWithConstant | Do not call `setattr` with a constant attribute value, it is not any safer than normal property access. | |
|
||||
| B011 | DoNotAssertFalse | Do not `assert False` (`python -O` removes these calls), raise `AssertionError()` | 🛠 |
|
||||
| B013 | RedundantTupleInExceptionHandler | A length-one tuple literal is redundant. Write `except ValueError:` instead of `except (ValueError,):`. | |
|
||||
| B014 | DuplicateHandlerException | Exception handler with duplicate exception: `ValueError` | 🛠 |
|
||||
| B015 | UselessComparison | Pointless comparison. This comparison does nothing but waste CPU instructions. Either prepend `assert` or remove it. | |
|
||||
| B016 | CannotRaiseLiteral | Cannot raise a literal. Did you intend to return it or raise an Exception? | |
|
||||
| B017 | NoAssertRaisesException | `assertRaises(Exception):` should be considered evil. | |
|
||||
| B018 | UselessExpression | Found useless expression. Either assign it to a variable or remove it. | |
|
||||
| B019 | CachedInstanceMethod | Use of `functools.lru_cache` or `functools.cache` on methods can lead to memory leaks. | |
|
||||
| B025 | DuplicateTryBlockException | try-except block with duplicate exception `Exception` | |
|
||||
| B026 | StarArgUnpackingAfterKeywordArg | Star-arg unpacking after a keyword argument is strongly discouraged. | |
|
||||
|
||||
### flake8-builtins
|
||||
|
||||
For more, see [flake8-builtins](https://pypi.org/project/flake8-builtins/2.0.1/) on PyPI.
|
||||
|
||||
| Code | Name | Message | Fix |
|
||||
| ---- | ---- | ------- | --- |
|
||||
| A001 | BuiltinVariableShadowing | Variable `...` is shadowing a python builtin | |
|
||||
| A002 | BuiltinArgumentShadowing | Argument `...` is shadowing a python builtin | |
|
||||
| A003 | BuiltinAttributeShadowing | Class attribute `...` is shadowing a python builtin | |
|
||||
|
||||
### flake8-print
|
||||
|
||||
For more, see [flake8-print](https://pypi.org/project/flake8-print/5.0.0/) on PyPI.
|
||||
|
||||
| Code | Name | Message | Fix |
|
||||
| ---- | ---- | ------- | --- |
|
||||
| T201 | PrintFound | `print` found | 🛠 |
|
||||
| T203 | PPrintFound | `pprint` found | 🛠 |
|
||||
|
||||
### flake8-quotes
|
||||
|
||||
For more, see [flake8-quotes](https://pypi.org/project/flake8-quotes/3.3.1/) on PyPI.
|
||||
|
||||
| Code | Name | Message | Fix |
|
||||
| ---- | ---- | ------- | --- |
|
||||
| Q000 | BadQuotesInlineString | Single quotes found but double quotes preferred | |
|
||||
| Q001 | BadQuotesMultilineString | Single quote multiline found but double quotes preferred | |
|
||||
| Q002 | BadQuotesDocstring | Single quote docstring found but double quotes preferred | |
|
||||
| Q003 | AvoidQuoteEscape | Change outer quotes to avoid escaping inner quotes | |
|
||||
|
||||
### flake8-annotations
|
||||
|
||||
For more, see [flake8-annotations](https://pypi.org/project/flake8-annotations/2.9.1/) on PyPI.
|
||||
|
||||
| Code | Name | Message | Fix |
|
||||
| ---- | ---- | ------- | --- |
|
||||
| ANN001 | MissingTypeFunctionArgument | Missing type annotation for function argument `...` | |
|
||||
| ANN002 | MissingTypeArgs | Missing type annotation for `*...` | |
|
||||
| ANN003 | MissingTypeKwargs | Missing type annotation for `**...` | |
|
||||
| ANN101 | MissingTypeSelf | Missing type annotation for `...` in method | |
|
||||
| ANN102 | MissingTypeCls | Missing type annotation for `...` in classmethod | |
|
||||
| ANN201 | MissingReturnTypePublicFunction | Missing return type annotation for public function `...` | |
|
||||
| ANN202 | MissingReturnTypePrivateFunction | Missing return type annotation for private function `...` | |
|
||||
| ANN204 | MissingReturnTypeMagicMethod | Missing return type annotation for magic method `...` | |
|
||||
| ANN205 | MissingReturnTypeStaticMethod | Missing return type annotation for staticmethod `...` | |
|
||||
| ANN206 | MissingReturnTypeClassMethod | Missing return type annotation for classmethod `...` | |
|
||||
| ANN401 | DynamicallyTypedExpression | Dynamically typed expressions (typing.Any) are disallowed in `...` | |
|
||||
|
||||
### flake8-2020
|
||||
|
||||
For more, see [flake8-2020](https://pypi.org/project/flake8-2020/1.7.0/) on PyPI.
|
||||
|
||||
| Code | Name | Message | Fix |
|
||||
| ---- | ---- | ------- | --- |
|
||||
| YTT101 | SysVersionSlice3Referenced | `sys.version[:3]` referenced (python3.10), use `sys.version_info` | |
|
||||
| YTT102 | SysVersion2Referenced | `sys.version[2]` referenced (python3.10), use `sys.version_info` | |
|
||||
| YTT103 | SysVersionCmpStr3 | `sys.version` compared to string (python3.10), use `sys.version_info` | |
|
||||
| YTT201 | SysVersionInfo0Eq3Referenced | `sys.version_info[0] == 3` referenced (python4), use `>=` | |
|
||||
| YTT202 | SixPY3Referenced | `six.PY3` referenced (python4), use `not six.PY2` | |
|
||||
| YTT203 | SysVersionInfo1CmpInt | `sys.version_info[1]` compared to integer (python4), compare `sys.version_info` to tuple | |
|
||||
| YTT204 | SysVersionInfoMinorCmpInt | `sys.version_info.minor` compared to integer (python4), compare `sys.version_info` to tuple | |
|
||||
| YTT301 | SysVersion0Referenced | `sys.version[0]` referenced (python10), use `sys.version_info` | |
|
||||
| YTT302 | SysVersionCmpStr10 | `sys.version` compared to string (python10), use `sys.version_info` | |
|
||||
| YTT303 | SysVersionSlice1Referenced | `sys.version[:1]` referenced (python10), use `sys.version_info` | |
|
||||
|
||||
### Ruff-specific rules
|
||||
|
||||
| Code | Name | Message | Fix |
|
||||
| ---- | ---- | ------- | --- |
|
||||
| RUF001 | AmbiguousUnicodeCharacterString | String contains ambiguous unicode character '𝐁' (did you mean 'B'?) | 🛠 |
|
||||
| RUF002 | AmbiguousUnicodeCharacterDocstring | Docstring contains ambiguous unicode character '𝐁' (did you mean 'B'?) | 🛠 |
|
||||
| RUF003 | AmbiguousUnicodeCharacterComment | Comment contains ambiguous unicode character '𝐁' (did you mean 'B'?) | |
|
||||
|
||||
### Meta rules
|
||||
|
||||
| Code | Name | Message | Fix |
|
||||
| ---- | ---- | ------- | --- |
|
||||
| M001 | UnusedNOQA | Unused `noqa` directive | 🛠 |
|
||||
|
||||
<!-- End auto-generated sections. -->
|
||||
|
||||
## Editor Integrations
|
||||
|
||||
### VS Code (Official)
|
||||
|
||||
Download the [Ruff VS Code extension](https://marketplace.visualstudio.com/items?itemName=charliermarsh.ruff).
|
||||
|
||||
### PyCharm
|
||||
|
||||
ruff can be installed as an [External Tool](https://www.jetbrains.com/help/pycharm/configuring-third-party-tools.html)
|
||||
Ruff can be installed as an [External Tool](https://www.jetbrains.com/help/pycharm/configuring-third-party-tools.html)
|
||||
in PyCharm. Open the Preferences pane, then navigate to "Tools", then "External Tools". From there,
|
||||
add a new tool with the following configuration:
|
||||
|
||||

|
||||

|
||||
|
||||
ruff should then appear as a runnable action:
|
||||
Ruff should then appear as a runnable action:
|
||||
|
||||

|
||||

|
||||
|
||||
### Vim & Neovim (Unofficial)
|
||||
|
||||
Ruff is available as part of the [coc-pyright](https://github.com/fannheyward/coc-pyright) extension
|
||||
for coc.nvim.
|
||||
|
||||
Ruff can also be integrated via [efm](https://github.com/neovim/nvim-lspconfig/blob/master/doc/server_configurations.md#efm)
|
||||
in just a [few lines](https://github.com/JafarAbdi/myconfigs/blob/6f0b6b2450e92ec8fc50422928cd22005b919110/efm-langserver/config.yaml#L14-L20).
|
||||
|
||||
### Language Server Protocol (Unofficial)
|
||||
|
||||
[`ruffd`](https://github.com/Seamooo/ruffd) is a Rust-based language server for Ruff that implements
|
||||
the Language Server Protocol (LSP).
|
||||
|
||||
### GitHub Actions
|
||||
|
||||
GitHub Actions has everything you need to run ruff out-of-the-box:
|
||||
GitHub Actions has everything you need to run Ruff out-of-the-box:
|
||||
|
||||
```yaml
|
||||
name: CI
|
||||
@@ -307,40 +639,225 @@ jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install Python
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install ruff
|
||||
- name: Run ruff
|
||||
- name: Run Ruff
|
||||
run: ruff .
|
||||
```
|
||||
|
||||
## FAQ
|
||||
|
||||
### Is Ruff compatible with Black?
|
||||
|
||||
Yes. Ruff is compatible with [Black](https://github.com/psf/black) out-of-the-box, as long as
|
||||
the `line-length` setting is consistent between the two.
|
||||
|
||||
As a project, Ruff is designed to be used alongside Black and, as such, will defer implementing
|
||||
stylistic lint rules that are obviated by autoformatting.
|
||||
|
||||
### How does Ruff compare to Flake8?
|
||||
|
||||
(Coming from Flake8? Try [`flake8-to-ruff`](https://pypi.org/project/flake8-to-ruff/) to
|
||||
automatically convert your existing configuration.)
|
||||
|
||||
Ruff can be used as a (near) drop-in replacement for Flake8 when used (1) without or with a small
|
||||
number of plugins, (2) alongside Black, and (3) on Python 3 code.
|
||||
|
||||
Under those conditions Ruff is missing 14 rules related to string `.format` calls, 1 rule related
|
||||
to docstring parsing, and 1 rule related to redefined variables.
|
||||
|
||||
Ruff re-implements some of the most popular Flake8 plugins and related code quality tools natively,
|
||||
including:
|
||||
|
||||
- [`pydocstyle`](https://pypi.org/project/pydocstyle/)
|
||||
- [`pep8-naming`](https://pypi.org/project/pep8-naming/)
|
||||
- [`yesqa`](https://github.com/asottile/yesqa)
|
||||
- [`flake8-docstrings`](https://pypi.org/project/flake8-docstrings/)
|
||||
- [`flake8-builtins`](https://pypi.org/project/flake8-builtins/)
|
||||
- [`flake8-super`](https://pypi.org/project/flake8-super/)
|
||||
- [`flake8-print`](https://pypi.org/project/flake8-print/)
|
||||
- [`flake8-quotes`](https://pypi.org/project/flake8-quotes/)
|
||||
- [`flake8-annotations`](https://pypi.org/project/flake8-annotations/)
|
||||
- [`flake8-comprehensions`](https://pypi.org/project/flake8-comprehensions/)
|
||||
- [`flake8-bugbear`](https://pypi.org/project/flake8-bugbear/) (21/32)
|
||||
- [`flake8-2020`](https://pypi.org/project/flake8-2020/)
|
||||
- [`pyupgrade`](https://pypi.org/project/pyupgrade/) (15/34)
|
||||
- [`autoflake`](https://pypi.org/project/autoflake/) (1/7)
|
||||
|
||||
Beyond rule-set parity, Ruff suffers from the following limitations vis-à-vis Flake8:
|
||||
|
||||
1. Ruff does not yet support a few Python 3.9 and 3.10 language features, including structural
|
||||
pattern matching and parenthesized context managers.
|
||||
2. Flake8 has a plugin architecture and supports writing custom lint rules. (To date, popular Flake8
|
||||
plugins have been re-implemented within Ruff directly.)
|
||||
|
||||
### Which tools does Ruff replace?
|
||||
|
||||
Today, Ruff can be used to replace Flake8 when used with any of the following plugins:
|
||||
|
||||
- [`pep8-naming`](https://pypi.org/project/pep8-naming/)
|
||||
- [`flake8-docstrings`](https://pypi.org/project/flake8-docstrings/)
|
||||
- [`flake8-builtins`](https://pypi.org/project/flake8-builtins/)
|
||||
- [`flake8-super`](https://pypi.org/project/flake8-super/)
|
||||
- [`flake8-print`](https://pypi.org/project/flake8-print/)
|
||||
- [`flake8-quotes`](https://pypi.org/project/flake8-quotes/)
|
||||
- [`flake8-annotations`](https://pypi.org/project/flake8-annotations/)
|
||||
- [`flake8-comprehensions`](https://pypi.org/project/flake8-comprehensions/)
|
||||
- [`flake8-bugbear`](https://pypi.org/project/flake8-bugbear/) (21/32)
|
||||
- [`flake8-2020`](https://pypi.org/project/flake8-2020/)
|
||||
|
||||
Ruff can also replace [`isort`](https://pypi.org/project/isort/), [`yesqa`](https://github.com/asottile/yesqa),
|
||||
and a subset of the rules implemented in [`pyupgrade`](https://pypi.org/project/pyupgrade/) (15/34).
|
||||
|
||||
If you're looking to use Ruff, but rely on an unsupported Flake8 plugin, free to file an Issue.
|
||||
|
||||
### Do I need to install Rust to use Ruff?
|
||||
|
||||
Nope! Ruff is available as [`ruff`](https://pypi.org/project/ruff/) on PyPI:
|
||||
|
||||
```shell
|
||||
pip install ruff
|
||||
```
|
||||
|
||||
Ruff ships with wheels for all major platforms, which enables `pip` to install Ruff without relying
|
||||
on Rust at all.
|
||||
|
||||
### Can I write my own plugins for Ruff?
|
||||
|
||||
Ruff does not yet support third-party plugins, though a plugin system is within-scope for the
|
||||
project. See [#283](https://github.com/charliermarsh/ruff/issues/283) for more.
|
||||
|
||||
### How does Ruff's import sorting compare to [`isort`](https://pypi.org/project/isort/)?
|
||||
|
||||
Ruff's import sorting is intended to be equivalent to `isort` when used `profile = "black"`, and a
|
||||
few other settings (`combine_as_imports = true`, `order_by_type = false`, and
|
||||
`case_sensitive` = true`).
|
||||
|
||||
Like `isort`, Ruff's import sorting is compatible with Black.
|
||||
|
||||
Ruff is less configurable than `isort`, but supports the `known-first-party`, `known-third-party`,
|
||||
`extra-standard-library`, and `src` settings, like so:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
select = [
|
||||
# Pyflakes
|
||||
"F",
|
||||
# Pycodestyle
|
||||
"E",
|
||||
"W",
|
||||
# isort
|
||||
"I"
|
||||
]
|
||||
src = ["src", "tests"]
|
||||
|
||||
[tool.ruff.isort]
|
||||
known-first-party = ["my_module1", "my_module2"]
|
||||
```
|
||||
|
||||
### Does Ruff support NumPy- or Google-style docstrings?
|
||||
|
||||
Yes! To enable a specific docstring convention, start by enabling all `pydocstyle` error codes, and
|
||||
then selectively disabling based on your [preferred convention](https://www.pydocstyle.org/en/latest/error_codes.html#default-conventions).
|
||||
|
||||
For example, if you're coming from `flake8-docstrings`, the following configuration is equivalent to
|
||||
`--docstring-convention=numpy`:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
extend-select = ["D"]
|
||||
extend-ignore = [
|
||||
"D107",
|
||||
"D203",
|
||||
"D212",
|
||||
"D213",
|
||||
"D402",
|
||||
"D413",
|
||||
"D415",
|
||||
"D416",
|
||||
"D417",
|
||||
]
|
||||
```
|
||||
|
||||
Similarly, the following is equivalent to `--docstring-convention=google`:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
extend-select = ["D"]
|
||||
extend-ignore = [
|
||||
"D203",
|
||||
"D204",
|
||||
"D213",
|
||||
"D215",
|
||||
"D400",
|
||||
"D404",
|
||||
"D406",
|
||||
"D407",
|
||||
"D408",
|
||||
"D409",
|
||||
"D413",
|
||||
]
|
||||
```
|
||||
|
||||
Similarly, the following is equivalent to `--docstring-convention=pep8`:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
extend-select = ["D"]
|
||||
extend-ignore = [
|
||||
"D203",
|
||||
"D212",
|
||||
"D213",
|
||||
"D214",
|
||||
"D215",
|
||||
"D404",
|
||||
"D405",
|
||||
"D406",
|
||||
"D407",
|
||||
"D408",
|
||||
"D409",
|
||||
"D410",
|
||||
"D411",
|
||||
"D413",
|
||||
"D415",
|
||||
"D416",
|
||||
"D417",
|
||||
]
|
||||
```
|
||||
|
||||
## Development
|
||||
|
||||
ruff is written in Rust (1.63.0). You'll need to install the [Rust toolchain](https://www.rust-lang.org/tools/install)
|
||||
Ruff is written in Rust (1.65.0). You'll need to install the [Rust toolchain](https://www.rust-lang.org/tools/install)
|
||||
for development.
|
||||
|
||||
Assuming you have `cargo` installed, you can run:
|
||||
|
||||
```shell
|
||||
cargo run resources/test/fixtures
|
||||
cargo fmt
|
||||
cargo clippy
|
||||
cargo test
|
||||
```
|
||||
|
||||
## Deployment
|
||||
For development, we use [nightly Rust](https://rust-lang.github.io/rustup/concepts/channels.html#working-with-nightly-rust):
|
||||
|
||||
ruff is distributed on [PyPI](https://pypi.org/project/ruff/), and published via [`maturin`](https://github.com/PyO3/maturin).
|
||||
```shell
|
||||
cargo +nightly fmt
|
||||
cargo +nightly clippy
|
||||
cargo +nightly test
|
||||
```
|
||||
|
||||
## Releases
|
||||
|
||||
Ruff is distributed on [PyPI](https://pypi.org/project/ruff/), and published via [`maturin`](https://github.com/PyO3/maturin).
|
||||
|
||||
See: `.github/workflows/release.yaml`.
|
||||
|
||||
## Benchmarking
|
||||
## Benchmarks
|
||||
|
||||
First, clone [CPython](https://github.com/python/cpython). It's a large and diverse Python codebase,
|
||||
which makes it a good target for benchmarking.
|
||||
@@ -417,9 +934,9 @@ hyperfine --ignore-failure --warmup 5 \
|
||||
|
||||
In order, these evaluate:
|
||||
|
||||
- ruff
|
||||
- Ruff
|
||||
- Pylint
|
||||
- PyFlakes
|
||||
- Pyflakes
|
||||
- autoflake
|
||||
- pycodestyle
|
||||
- Flake8
|
||||
|
||||
18
benches/source_code_locator.rs
Normal file
18
benches/source_code_locator.rs
Normal file
@@ -0,0 +1,18 @@
|
||||
use std::path::Path;
|
||||
|
||||
use criterion::{black_box, criterion_group, criterion_main, Criterion};
|
||||
use ropey::Rope;
|
||||
use ruff::fs;
|
||||
|
||||
fn criterion_benchmark(c: &mut Criterion) {
|
||||
let contents = fs::read_file(Path::new("resources/test/fixtures/D.py")).unwrap();
|
||||
c.bench_function("rope", |b| {
|
||||
b.iter(|| {
|
||||
let rope = Rope::from_str(black_box(&contents));
|
||||
rope.line_to_char(black_box(4));
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
criterion_group!(benches, criterion_benchmark);
|
||||
criterion_main!(benches);
|
||||
@@ -1,27 +0,0 @@
|
||||
/// Generate a Markdown-compatible table of supported lint rules.
|
||||
use ruff::checks::{CheckCode, ALL_CHECK_CODES, DEFAULT_CHECK_CODES};
|
||||
|
||||
fn main() {
|
||||
let mut check_codes: Vec<CheckCode> = ALL_CHECK_CODES.to_vec();
|
||||
check_codes.sort();
|
||||
|
||||
println!("| Code | Name | Message | | |");
|
||||
println!("| ---- | ---- | ------- | --- | --- |");
|
||||
for check_code in check_codes {
|
||||
let check_kind = check_code.kind();
|
||||
let default_token = if DEFAULT_CHECK_CODES.contains(&check_code) {
|
||||
"✅"
|
||||
} else {
|
||||
""
|
||||
};
|
||||
let fix_token = if check_kind.fixable() { "🛠" } else { "" };
|
||||
println!(
|
||||
"| {} | {} | {} | {} | {} |",
|
||||
check_kind.code().as_str(),
|
||||
check_kind.name(),
|
||||
check_kind.body(),
|
||||
default_token,
|
||||
fix_token
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,25 +0,0 @@
|
||||
/// Print the token stream for a given Python file.
|
||||
use std::path::PathBuf;
|
||||
|
||||
use anyhow::Result;
|
||||
use clap::Parser;
|
||||
use rustpython_parser::lexer;
|
||||
|
||||
use ruff::fs;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
struct Cli {
|
||||
#[arg(required = true)]
|
||||
file: PathBuf,
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
let cli = Cli::parse();
|
||||
|
||||
let contents = fs::read_file(&cli.file)?;
|
||||
for (_, tok, _) in lexer::make_tokenizer(&contents).flatten() {
|
||||
println!("{:#?}", tok);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
2964
flake8_to_ruff/Cargo.lock
generated
Normal file
2964
flake8_to_ruff/Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
22
flake8_to_ruff/Cargo.toml
Normal file
22
flake8_to_ruff/Cargo.toml
Normal file
@@ -0,0 +1,22 @@
|
||||
[package]
|
||||
name = "flake8-to-ruff"
|
||||
version = "0.0.114-dev.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
name = "flake8_to_ruff"
|
||||
|
||||
[dependencies]
|
||||
anyhow = { version = "1.0.66" }
|
||||
clap = { version = "4.0.1", features = ["derive"] }
|
||||
configparser = { version = "3.0.2" }
|
||||
once_cell = { version = "1.16.0" }
|
||||
regex = { version = "1.6.0" }
|
||||
ruff = { path = "..", default-features = false }
|
||||
serde = { version = "1.0.147", features = ["derive"] }
|
||||
serde_json = { version = "1.0.87" }
|
||||
toml = { version = "0.5.9" }
|
||||
|
||||
[dev-dependencies]
|
||||
|
||||
[features]
|
||||
98
flake8_to_ruff/README.md
Normal file
98
flake8_to_ruff/README.md
Normal file
@@ -0,0 +1,98 @@
|
||||
# flake8-to-ruff
|
||||
|
||||
Convert existing Flake8 configuration files (`setup.cfg`, `tox.ini`, or `.flake8`) for use with
|
||||
[Ruff](https://github.com/charliermarsh/ruff).
|
||||
|
||||
Generates a Ruff-compatible `pyproject.toml` section.
|
||||
|
||||
## Installation and Usage
|
||||
|
||||
### Installation
|
||||
|
||||
Available as [`flake8-to-ruff`](https://pypi.org/project/flake8-to-ruff/) on PyPI:
|
||||
|
||||
```shell
|
||||
pip install flake8-to-ruff
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
To run `flake8-to-ruff`:
|
||||
|
||||
```shell
|
||||
flake8-to-ruff path/to/setup.cfg
|
||||
flake8-to-ruff path/to/tox.ini
|
||||
flake8-to-ruff path/to/.flake8
|
||||
```
|
||||
|
||||
`flake8-to-ruff` will print the relevant `pyproject.toml` sections to standard output, like so:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
exclude = [
|
||||
'.svn',
|
||||
'CVS',
|
||||
'.bzr',
|
||||
'.hg',
|
||||
'.git',
|
||||
'__pycache__',
|
||||
'.tox',
|
||||
'.idea',
|
||||
'.mypy_cache',
|
||||
'.venv',
|
||||
'node_modules',
|
||||
'_state_machine.py',
|
||||
'test_fstring.py',
|
||||
'bad_coding2.py',
|
||||
'badsyntax_*.py',
|
||||
]
|
||||
select = [
|
||||
'A',
|
||||
'E',
|
||||
'F',
|
||||
'Q',
|
||||
]
|
||||
ignore = []
|
||||
|
||||
[tool.ruff.flake8-quotes]
|
||||
inline-quotes = 'single'
|
||||
|
||||
[tool.ruff.pep8-naming]
|
||||
ignore-names = [
|
||||
'foo',
|
||||
'bar',
|
||||
]
|
||||
```
|
||||
|
||||
### Plugins
|
||||
|
||||
`flake8-to-ruff` will attempt to infer any activated plugins based on the settings provided in your
|
||||
configuration file.
|
||||
|
||||
For example, if your `.flake8` file includes a `docstring-convention` property, `flake8-to-ruff`
|
||||
will enable the appropriate [`flake8-docstrings`](https://pypi.org/project/flake8-docstrings/)
|
||||
checks.
|
||||
|
||||
Alternatively, you can manually specify plugins on the command-line:
|
||||
|
||||
```shell
|
||||
flake8-to-ruff path/to/.flake8 --plugin flake8-builtins --plugin flake8-quotes
|
||||
```
|
||||
|
||||
## Limitations
|
||||
|
||||
1. Ruff only supports a subset of the Flake configuration options. `flake8-to-ruff` will warn on and
|
||||
ignore unsupported options in the `.flake8` file (or equivalent). (Similarly, Ruff has a few
|
||||
configuration options that don't exist in Flake8.)
|
||||
2. Ruff will omit any error codes that are unimplemented or unsupported by Ruff, including error
|
||||
codes from unsupported plugins. (See the [Ruff README](https://github.com/charliermarsh/ruff#user-content-how-does-ruff-compare-to-flake8)
|
||||
for the complete list of supported plugins.)
|
||||
|
||||
## License
|
||||
|
||||
MIT
|
||||
|
||||
## Contributing
|
||||
|
||||
Contributions are welcome and hugely appreciated. To get started, check out the
|
||||
[contributing guidelines](https://github.com/charliermarsh/ruff/blob/main/CONTRIBUTING.md).
|
||||
36
flake8_to_ruff/examples/poetry.ini
Normal file
36
flake8_to_ruff/examples/poetry.ini
Normal file
@@ -0,0 +1,36 @@
|
||||
[flake8]
|
||||
min_python_version = 3.7.0
|
||||
max-line-length = 88
|
||||
ban-relative-imports = true
|
||||
# flake8-use-fstring: https://github.com/MichaelKim0407/flake8-use-fstring#--percent-greedy-and---format-greedy
|
||||
format-greedy = 1
|
||||
inline-quotes = double
|
||||
enable-extensions = TC, TC1
|
||||
type-checking-strict = true
|
||||
eradicate-whitelist-extend = ^-.*;
|
||||
extend-ignore =
|
||||
# E203: Whitespace before ':' (pycqa/pycodestyle#373)
|
||||
E203,
|
||||
# SIM106: Handle error-cases first
|
||||
SIM106,
|
||||
# ANN101: Missing type annotation for self in method
|
||||
ANN101,
|
||||
# ANN102: Missing type annotation for cls in classmethod
|
||||
ANN102,
|
||||
# PIE781: assign-and-return
|
||||
PIE781,
|
||||
# PIE798 no-unnecessary-class: Consider using a module for namespacing instead
|
||||
PIE798,
|
||||
per-file-ignores =
|
||||
# TC002: Move third-party import '...' into a type-checking block
|
||||
__init__.py:TC002,
|
||||
# ANN201: Missing return type annotation for public function
|
||||
tests/test_*:ANN201
|
||||
tests/**/test_*:ANN201
|
||||
extend-exclude =
|
||||
# Frozen and not subject to change in this repo:
|
||||
get-poetry.py,
|
||||
install-poetry.py,
|
||||
# External to the project's coding standards:
|
||||
tests/fixtures/*,
|
||||
tests/**/fixtures/*,
|
||||
19
flake8_to_ruff/examples/python-discord.ini
Normal file
19
flake8_to_ruff/examples/python-discord.ini
Normal file
@@ -0,0 +1,19 @@
|
||||
[flake8]
|
||||
max-line-length=120
|
||||
docstring-convention=all
|
||||
import-order-style=pycharm
|
||||
application_import_names=bot,tests
|
||||
exclude=.cache,.venv,.git,constants.py
|
||||
extend-ignore=
|
||||
B311,W503,E226,S311,T000,E731
|
||||
# Missing Docstrings
|
||||
D100,D104,D105,D107,
|
||||
# Docstring Whitespace
|
||||
D203,D212,D214,D215,
|
||||
# Docstring Quotes
|
||||
D301,D302,
|
||||
# Docstring Content
|
||||
D400,D401,D402,D404,D405,D406,D407,D408,D409,D410,D411,D412,D413,D414,D416,D417
|
||||
# Type Annotations
|
||||
ANN002,ANN003,ANN101,ANN102,ANN204,ANN206,ANN401
|
||||
per-file-ignores=tests/*:D,ANN
|
||||
6
flake8_to_ruff/examples/requests.ini
Normal file
6
flake8_to_ruff/examples/requests.ini
Normal file
@@ -0,0 +1,6 @@
|
||||
[flake8]
|
||||
ignore = E203, E501, W503
|
||||
per-file-ignores =
|
||||
requests/__init__.py:E402, F401
|
||||
requests/compat.py:E402, F401
|
||||
tests/compat.py:F401
|
||||
33
flake8_to_ruff/pyproject.toml
Normal file
33
flake8_to_ruff/pyproject.toml
Normal file
@@ -0,0 +1,33 @@
|
||||
[project]
|
||||
name = "flake8-to-ruff"
|
||||
keywords = ["automation", "flake8", "pycodestyle", "pyflakes", "pylint", "clippy"]
|
||||
classifiers = [
|
||||
"Development Status :: 3 - Alpha",
|
||||
"Environment :: Console",
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Operating System :: OS Independent",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3 :: Only",
|
||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||
"Topic :: Software Development :: Quality Assurance",
|
||||
]
|
||||
author = "Charlie Marsh"
|
||||
author_email = "charlie.r.marsh@gmail.com"
|
||||
description = "Convert existing Flake8 configuration to Ruff."
|
||||
requires-python = ">=3.7"
|
||||
|
||||
[project.urls]
|
||||
repository = "https://github.com/charliermarsh/ruff#subdirectory=crates/flake8_to_ruff"
|
||||
|
||||
[build-system]
|
||||
requires = ["maturin>=0.13,<0.14"]
|
||||
build-backend = "maturin"
|
||||
|
||||
[tool.maturin]
|
||||
bindings = "bin"
|
||||
strip = true
|
||||
482
flake8_to_ruff/src/converter.rs
Normal file
482
flake8_to_ruff/src/converter.rs
Normal file
@@ -0,0 +1,482 @@
|
||||
use std::collections::{BTreeSet, HashMap};
|
||||
|
||||
use anyhow::Result;
|
||||
use ruff::checks_gen::CheckCodePrefix;
|
||||
use ruff::flake8_quotes::settings::Quote;
|
||||
use ruff::settings::options::Options;
|
||||
use ruff::settings::pyproject::Pyproject;
|
||||
use ruff::{flake8_annotations, flake8_quotes, pep8_naming};
|
||||
|
||||
use crate::plugin::Plugin;
|
||||
use crate::{parser, plugin};
|
||||
|
||||
pub fn convert(
|
||||
flake8: &HashMap<String, Option<String>>,
|
||||
plugins: Option<Vec<Plugin>>,
|
||||
) -> Result<Pyproject> {
|
||||
// Extract all referenced check code prefixes, to power plugin inference.
|
||||
let mut referenced_codes: BTreeSet<CheckCodePrefix> = Default::default();
|
||||
for (key, value) in flake8 {
|
||||
if let Some(value) = value {
|
||||
match key.as_str() {
|
||||
"select" | "ignore" | "extend-select" | "extend_select" | "extend-ignore"
|
||||
| "extend_ignore" => {
|
||||
referenced_codes.extend(parser::parse_prefix_codes(value.as_ref()));
|
||||
}
|
||||
"per-file-ignores" | "per_file_ignores" => {
|
||||
if let Ok(per_file_ignores) =
|
||||
parser::parse_files_to_codes_mapping(value.as_ref())
|
||||
{
|
||||
for (_, codes) in parser::collect_per_file_ignores(per_file_ignores) {
|
||||
referenced_codes.extend(codes);
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if the user has specified a `select`. If not, we'll add our own
|
||||
// default `select`, and populate it based on user plugins.
|
||||
let mut select = flake8
|
||||
.get("select")
|
||||
.and_then(|value| {
|
||||
value
|
||||
.as_ref()
|
||||
.map(|value| BTreeSet::from_iter(parser::parse_prefix_codes(value)))
|
||||
})
|
||||
.unwrap_or_else(|| {
|
||||
plugin::resolve_select(
|
||||
flake8,
|
||||
&plugins.unwrap_or_else(|| {
|
||||
plugin::infer_plugins_from_options(flake8)
|
||||
.into_iter()
|
||||
.chain(plugin::infer_plugins_from_codes(&referenced_codes))
|
||||
.collect()
|
||||
}),
|
||||
)
|
||||
});
|
||||
let mut ignore = flake8
|
||||
.get("ignore")
|
||||
.and_then(|value| {
|
||||
value
|
||||
.as_ref()
|
||||
.map(|value| BTreeSet::from_iter(parser::parse_prefix_codes(value)))
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
// Parse each supported option.
|
||||
let mut options: Options = Default::default();
|
||||
let mut flake8_annotations: flake8_annotations::settings::Options = Default::default();
|
||||
let mut flake8_quotes: flake8_quotes::settings::Options = Default::default();
|
||||
let mut pep8_naming: pep8_naming::settings::Options = Default::default();
|
||||
for (key, value) in flake8 {
|
||||
if let Some(value) = value {
|
||||
match key.as_str() {
|
||||
// flake8
|
||||
"max-line-length" | "max_line_length" => match value.clone().parse::<usize>() {
|
||||
Ok(line_length) => options.line_length = Some(line_length),
|
||||
Err(e) => eprintln!("Unable to parse '{key}' property: {e}"),
|
||||
},
|
||||
"select" => {
|
||||
// No-op (handled above).
|
||||
select.extend(parser::parse_prefix_codes(value.as_ref()));
|
||||
}
|
||||
"ignore" => {
|
||||
// No-op (handled above).
|
||||
}
|
||||
"extend-select" | "extend_select" => {
|
||||
// Unlike Flake8, use a single explicit `select`.
|
||||
select.extend(parser::parse_prefix_codes(value.as_ref()));
|
||||
}
|
||||
"extend-ignore" | "extend_ignore" => {
|
||||
// Unlike Flake8, use a single explicit `ignore`.
|
||||
ignore.extend(parser::parse_prefix_codes(value.as_ref()));
|
||||
}
|
||||
"exclude" => {
|
||||
options.exclude = Some(parser::parse_strings(value.as_ref()));
|
||||
}
|
||||
"extend-exclude" | "extend_exclude" => {
|
||||
options.extend_exclude = Some(parser::parse_strings(value.as_ref()));
|
||||
}
|
||||
"per-file-ignores" | "per_file_ignores" => {
|
||||
match parser::parse_files_to_codes_mapping(value.as_ref()) {
|
||||
Ok(per_file_ignores) => {
|
||||
options.per_file_ignores =
|
||||
Some(parser::collect_per_file_ignores(per_file_ignores))
|
||||
}
|
||||
Err(e) => eprintln!("Unable to parse '{key}' property: {e}"),
|
||||
}
|
||||
}
|
||||
// flake8-annotations
|
||||
"suppress-none-returning" | "suppress_none_returning" => {
|
||||
match parser::parse_bool(value.as_ref()) {
|
||||
Ok(bool) => flake8_annotations.suppress_none_returning = Some(bool),
|
||||
Err(e) => eprintln!("Unable to parse '{key}' property: {e}"),
|
||||
}
|
||||
}
|
||||
"suppress-dummy-args" | "suppress_dummy_args" => {
|
||||
match parser::parse_bool(value.as_ref()) {
|
||||
Ok(bool) => flake8_annotations.suppress_dummy_args = Some(bool),
|
||||
Err(e) => eprintln!("Unable to parse '{key}' property: {e}"),
|
||||
}
|
||||
}
|
||||
"mypy-init-return" | "mypy_init_return" => {
|
||||
match parser::parse_bool(value.as_ref()) {
|
||||
Ok(bool) => flake8_annotations.mypy_init_return = Some(bool),
|
||||
Err(e) => eprintln!("Unable to parse '{key}' property: {e}"),
|
||||
}
|
||||
}
|
||||
"allow-star-arg-any" | "allow_star_arg_any" => {
|
||||
match parser::parse_bool(value.as_ref()) {
|
||||
Ok(bool) => flake8_annotations.allow_star_arg_any = Some(bool),
|
||||
Err(e) => eprintln!("Unable to parse '{key}' property: {e}"),
|
||||
}
|
||||
}
|
||||
// flake8-quotes
|
||||
"quotes" | "inline-quotes" | "inline_quotes" => match value.trim() {
|
||||
"'" | "single" => flake8_quotes.inline_quotes = Some(Quote::Single),
|
||||
"\"" | "double" => flake8_quotes.inline_quotes = Some(Quote::Single),
|
||||
_ => eprintln!("Unexpected '{key}' value: {value}"),
|
||||
},
|
||||
"multiline-quotes" | "multiline_quotes" => match value.trim() {
|
||||
"'" | "single" => flake8_quotes.multiline_quotes = Some(Quote::Single),
|
||||
"\"" | "double" => flake8_quotes.multiline_quotes = Some(Quote::Single),
|
||||
_ => eprintln!("Unexpected '{key}' value: {value}"),
|
||||
},
|
||||
"docstring-quotes" | "docstring_quotes" => match value.trim() {
|
||||
"'" | "single" => flake8_quotes.docstring_quotes = Some(Quote::Single),
|
||||
"\"" | "double" => flake8_quotes.docstring_quotes = Some(Quote::Single),
|
||||
_ => eprintln!("Unexpected '{key}' value: {value}"),
|
||||
},
|
||||
"avoid-escape" | "avoid_escape" => match parser::parse_bool(value.as_ref()) {
|
||||
Ok(bool) => flake8_quotes.avoid_escape = Some(bool),
|
||||
Err(e) => eprintln!("Unable to parse '{key}' property: {e}"),
|
||||
},
|
||||
// pep8-naming
|
||||
"ignore-names" | "ignore_names" => {
|
||||
pep8_naming.ignore_names = Some(parser::parse_strings(value.as_ref()));
|
||||
}
|
||||
"classmethod-decorators" | "classmethod_decorators" => {
|
||||
pep8_naming.classmethod_decorators =
|
||||
Some(parser::parse_strings(value.as_ref()));
|
||||
}
|
||||
"staticmethod-decorators" | "staticmethod_decorators" => {
|
||||
pep8_naming.staticmethod_decorators =
|
||||
Some(parser::parse_strings(value.as_ref()));
|
||||
}
|
||||
// flake8-docstrings
|
||||
"docstring-convention" => {
|
||||
// No-op (handled above).
|
||||
}
|
||||
// Unknown
|
||||
_ => eprintln!("Skipping unsupported property: {key}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Deduplicate and sort.
|
||||
options.select = Some(Vec::from_iter(select));
|
||||
options.ignore = Some(Vec::from_iter(ignore));
|
||||
if flake8_quotes != Default::default() {
|
||||
options.flake8_quotes = Some(flake8_quotes);
|
||||
}
|
||||
if pep8_naming != Default::default() {
|
||||
options.pep8_naming = Some(pep8_naming);
|
||||
}
|
||||
|
||||
// Create the pyproject.toml.
|
||||
Ok(Pyproject::new(options))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::collections::HashMap;
|
||||
|
||||
use anyhow::Result;
|
||||
use ruff::checks_gen::CheckCodePrefix;
|
||||
use ruff::flake8_quotes;
|
||||
use ruff::settings::options::Options;
|
||||
use ruff::settings::pyproject::Pyproject;
|
||||
|
||||
use crate::converter::convert;
|
||||
use crate::plugin::Plugin;
|
||||
|
||||
#[test]
|
||||
fn it_converts_empty() -> Result<()> {
|
||||
let actual = convert(&HashMap::from([]), None)?;
|
||||
let expected = Pyproject::new(Options {
|
||||
line_length: None,
|
||||
src: None,
|
||||
fix: None,
|
||||
exclude: None,
|
||||
extend_exclude: None,
|
||||
select: Some(vec![
|
||||
CheckCodePrefix::E,
|
||||
CheckCodePrefix::F,
|
||||
CheckCodePrefix::W,
|
||||
]),
|
||||
extend_select: None,
|
||||
ignore: Some(vec![]),
|
||||
extend_ignore: None,
|
||||
per_file_ignores: None,
|
||||
dummy_variable_rgx: None,
|
||||
target_version: None,
|
||||
flake8_annotations: None,
|
||||
flake8_quotes: None,
|
||||
isort: None,
|
||||
pep8_naming: None,
|
||||
});
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_converts_dashes() -> Result<()> {
|
||||
let actual = convert(
|
||||
&HashMap::from([("max-line-length".to_string(), Some("100".to_string()))]),
|
||||
Some(vec![]),
|
||||
)?;
|
||||
let expected = Pyproject::new(Options {
|
||||
line_length: Some(100),
|
||||
src: None,
|
||||
fix: None,
|
||||
exclude: None,
|
||||
extend_exclude: None,
|
||||
select: Some(vec![
|
||||
CheckCodePrefix::E,
|
||||
CheckCodePrefix::F,
|
||||
CheckCodePrefix::W,
|
||||
]),
|
||||
extend_select: None,
|
||||
ignore: Some(vec![]),
|
||||
extend_ignore: None,
|
||||
per_file_ignores: None,
|
||||
dummy_variable_rgx: None,
|
||||
target_version: None,
|
||||
flake8_annotations: None,
|
||||
flake8_quotes: None,
|
||||
isort: None,
|
||||
pep8_naming: None,
|
||||
});
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_converts_underscores() -> Result<()> {
|
||||
let actual = convert(
|
||||
&HashMap::from([("max_line_length".to_string(), Some("100".to_string()))]),
|
||||
Some(vec![]),
|
||||
)?;
|
||||
let expected = Pyproject::new(Options {
|
||||
line_length: Some(100),
|
||||
src: None,
|
||||
fix: None,
|
||||
exclude: None,
|
||||
extend_exclude: None,
|
||||
select: Some(vec![
|
||||
CheckCodePrefix::E,
|
||||
CheckCodePrefix::F,
|
||||
CheckCodePrefix::W,
|
||||
]),
|
||||
extend_select: None,
|
||||
ignore: Some(vec![]),
|
||||
extend_ignore: None,
|
||||
per_file_ignores: None,
|
||||
dummy_variable_rgx: None,
|
||||
target_version: None,
|
||||
flake8_annotations: None,
|
||||
flake8_quotes: None,
|
||||
isort: None,
|
||||
pep8_naming: None,
|
||||
});
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_ignores_parse_errors() -> Result<()> {
|
||||
let actual = convert(
|
||||
&HashMap::from([("max_line_length".to_string(), Some("abc".to_string()))]),
|
||||
Some(vec![]),
|
||||
)?;
|
||||
let expected = Pyproject::new(Options {
|
||||
line_length: None,
|
||||
src: None,
|
||||
fix: None,
|
||||
exclude: None,
|
||||
extend_exclude: None,
|
||||
select: Some(vec![
|
||||
CheckCodePrefix::E,
|
||||
CheckCodePrefix::F,
|
||||
CheckCodePrefix::W,
|
||||
]),
|
||||
extend_select: None,
|
||||
ignore: Some(vec![]),
|
||||
extend_ignore: None,
|
||||
per_file_ignores: None,
|
||||
dummy_variable_rgx: None,
|
||||
target_version: None,
|
||||
flake8_annotations: None,
|
||||
flake8_quotes: None,
|
||||
isort: None,
|
||||
pep8_naming: None,
|
||||
});
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_converts_plugin_options() -> Result<()> {
|
||||
let actual = convert(
|
||||
&HashMap::from([("inline-quotes".to_string(), Some("single".to_string()))]),
|
||||
Some(vec![]),
|
||||
)?;
|
||||
let expected = Pyproject::new(Options {
|
||||
line_length: None,
|
||||
src: None,
|
||||
fix: None,
|
||||
exclude: None,
|
||||
extend_exclude: None,
|
||||
select: Some(vec![
|
||||
CheckCodePrefix::E,
|
||||
CheckCodePrefix::F,
|
||||
CheckCodePrefix::W,
|
||||
]),
|
||||
extend_select: None,
|
||||
ignore: Some(vec![]),
|
||||
extend_ignore: None,
|
||||
per_file_ignores: None,
|
||||
dummy_variable_rgx: None,
|
||||
target_version: None,
|
||||
flake8_annotations: None,
|
||||
flake8_quotes: Some(flake8_quotes::settings::Options {
|
||||
inline_quotes: Some(flake8_quotes::settings::Quote::Single),
|
||||
multiline_quotes: None,
|
||||
docstring_quotes: None,
|
||||
avoid_escape: None,
|
||||
}),
|
||||
isort: None,
|
||||
pep8_naming: None,
|
||||
});
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_converts_docstring_conventions() -> Result<()> {
|
||||
let actual = convert(
|
||||
&HashMap::from([(
|
||||
"docstring-convention".to_string(),
|
||||
Some("numpy".to_string()),
|
||||
)]),
|
||||
Some(vec![Plugin::Flake8Docstrings]),
|
||||
)?;
|
||||
let expected = Pyproject::new(Options {
|
||||
line_length: None,
|
||||
src: None,
|
||||
fix: None,
|
||||
exclude: None,
|
||||
extend_exclude: None,
|
||||
select: Some(vec![
|
||||
CheckCodePrefix::D100,
|
||||
CheckCodePrefix::D101,
|
||||
CheckCodePrefix::D102,
|
||||
CheckCodePrefix::D103,
|
||||
CheckCodePrefix::D104,
|
||||
CheckCodePrefix::D105,
|
||||
CheckCodePrefix::D106,
|
||||
CheckCodePrefix::D200,
|
||||
CheckCodePrefix::D201,
|
||||
CheckCodePrefix::D202,
|
||||
CheckCodePrefix::D204,
|
||||
CheckCodePrefix::D205,
|
||||
CheckCodePrefix::D206,
|
||||
CheckCodePrefix::D207,
|
||||
CheckCodePrefix::D208,
|
||||
CheckCodePrefix::D209,
|
||||
CheckCodePrefix::D210,
|
||||
CheckCodePrefix::D211,
|
||||
CheckCodePrefix::D214,
|
||||
CheckCodePrefix::D215,
|
||||
CheckCodePrefix::D300,
|
||||
CheckCodePrefix::D400,
|
||||
CheckCodePrefix::D403,
|
||||
CheckCodePrefix::D404,
|
||||
CheckCodePrefix::D405,
|
||||
CheckCodePrefix::D406,
|
||||
CheckCodePrefix::D407,
|
||||
CheckCodePrefix::D408,
|
||||
CheckCodePrefix::D409,
|
||||
CheckCodePrefix::D410,
|
||||
CheckCodePrefix::D411,
|
||||
CheckCodePrefix::D412,
|
||||
CheckCodePrefix::D414,
|
||||
CheckCodePrefix::D418,
|
||||
CheckCodePrefix::D419,
|
||||
CheckCodePrefix::E,
|
||||
CheckCodePrefix::F,
|
||||
CheckCodePrefix::W,
|
||||
]),
|
||||
extend_select: None,
|
||||
ignore: Some(vec![]),
|
||||
extend_ignore: None,
|
||||
per_file_ignores: None,
|
||||
dummy_variable_rgx: None,
|
||||
target_version: None,
|
||||
flake8_annotations: None,
|
||||
flake8_quotes: None,
|
||||
isort: None,
|
||||
pep8_naming: None,
|
||||
});
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_infers_plugins_if_omitted() -> Result<()> {
|
||||
let actual = convert(
|
||||
&HashMap::from([("inline-quotes".to_string(), Some("single".to_string()))]),
|
||||
None,
|
||||
)?;
|
||||
let expected = Pyproject::new(Options {
|
||||
line_length: None,
|
||||
src: None,
|
||||
fix: None,
|
||||
exclude: None,
|
||||
extend_exclude: None,
|
||||
select: Some(vec![
|
||||
CheckCodePrefix::E,
|
||||
CheckCodePrefix::F,
|
||||
CheckCodePrefix::Q,
|
||||
CheckCodePrefix::W,
|
||||
]),
|
||||
extend_select: None,
|
||||
ignore: Some(vec![]),
|
||||
extend_ignore: None,
|
||||
per_file_ignores: None,
|
||||
dummy_variable_rgx: None,
|
||||
target_version: None,
|
||||
flake8_annotations: None,
|
||||
flake8_quotes: Some(flake8_quotes::settings::Options {
|
||||
inline_quotes: Some(flake8_quotes::settings::Quote::Single),
|
||||
multiline_quotes: None,
|
||||
docstring_quotes: None,
|
||||
avoid_escape: None,
|
||||
}),
|
||||
isort: None,
|
||||
pep8_naming: None,
|
||||
});
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
5
flake8_to_ruff/src/lib.rs
Normal file
5
flake8_to_ruff/src/lib.rs
Normal file
@@ -0,0 +1,5 @@
|
||||
#![allow(clippy::collapsible_if, clippy::collapsible_else_if)]
|
||||
|
||||
pub mod converter;
|
||||
mod parser;
|
||||
pub mod plugin;
|
||||
44
flake8_to_ruff/src/main.rs
Normal file
44
flake8_to_ruff/src/main.rs
Normal file
@@ -0,0 +1,44 @@
|
||||
//! Utility to generate Ruff's pyproject.toml section from a Flake8 INI file.
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
use anyhow::Result;
|
||||
use clap::Parser;
|
||||
use configparser::ini::Ini;
|
||||
use flake8_to_ruff::converter;
|
||||
use flake8_to_ruff::plugin::Plugin;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(
|
||||
about = "Convert existing Flake8 configuration to Ruff.",
|
||||
long_about = None
|
||||
)]
|
||||
struct Cli {
|
||||
/// Path to the Flake8 configuration file (e.g., 'setup.cfg', 'tox.ini', or
|
||||
/// '.flake8').
|
||||
#[arg(required = true)]
|
||||
file: PathBuf,
|
||||
/// List of plugins to enable.
|
||||
#[arg(long, value_delimiter = ',')]
|
||||
plugin: Option<Vec<Plugin>>,
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
let cli = Cli::parse();
|
||||
|
||||
// Read the INI file.
|
||||
let mut ini = Ini::new_cs();
|
||||
ini.set_multiline(true);
|
||||
let config = ini.load(cli.file).map_err(|msg| anyhow::anyhow!(msg))?;
|
||||
|
||||
// Extract the Flake8 section.
|
||||
let flake8 = config
|
||||
.get("flake8")
|
||||
.expect("Unable to find flake8 section in INI file.");
|
||||
|
||||
// Create the pyproject.toml.
|
||||
let pyproject = converter::convert(flake8, cli.plugin)?;
|
||||
println!("{}", toml::to_string_pretty(&pyproject)?);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
382
flake8_to_ruff/src/parser.rs
Normal file
382
flake8_to_ruff/src/parser.rs
Normal file
@@ -0,0 +1,382 @@
|
||||
use std::collections::BTreeMap;
|
||||
use std::str::FromStr;
|
||||
|
||||
use anyhow::Result;
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use ruff::checks_gen::CheckCodePrefix;
|
||||
use ruff::settings::types::PatternPrefixPair;
|
||||
|
||||
static COMMA_SEPARATED_LIST_RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"[,\s]").unwrap());
|
||||
|
||||
/// Parse a comma-separated list of `CheckCodePrefix` values (e.g.,
|
||||
/// "F401,E501").
|
||||
pub fn parse_prefix_codes(value: &str) -> Vec<CheckCodePrefix> {
|
||||
let mut codes: Vec<CheckCodePrefix> = vec![];
|
||||
for code in COMMA_SEPARATED_LIST_RE.split(value) {
|
||||
let code = code.trim();
|
||||
if code.is_empty() {
|
||||
continue;
|
||||
}
|
||||
if let Ok(code) = CheckCodePrefix::from_str(code) {
|
||||
codes.push(code);
|
||||
} else {
|
||||
eprintln!("Unsupported prefix code: {code}");
|
||||
}
|
||||
}
|
||||
codes
|
||||
}
|
||||
|
||||
/// Parse a comma-separated list of strings (e.g., "__init__.py,__main__.py").
|
||||
pub fn parse_strings(value: &str) -> Vec<String> {
|
||||
COMMA_SEPARATED_LIST_RE
|
||||
.split(value)
|
||||
.map(|part| part.trim())
|
||||
.filter(|part| !part.is_empty())
|
||||
.map(String::from)
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Parse a boolean.
|
||||
pub fn parse_bool(value: &str) -> Result<bool> {
|
||||
match value.trim() {
|
||||
"true" => Ok(true),
|
||||
"false" => Ok(false),
|
||||
_ => Err(anyhow::anyhow!("Unexpected boolean value: {value}")),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Token {
|
||||
token_name: TokenType,
|
||||
src: String,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum TokenType {
|
||||
Code,
|
||||
File,
|
||||
Colon,
|
||||
Comma,
|
||||
Ws,
|
||||
Eof,
|
||||
}
|
||||
|
||||
struct State {
|
||||
seen_sep: bool,
|
||||
seen_colon: bool,
|
||||
filenames: Vec<String>,
|
||||
codes: Vec<String>,
|
||||
}
|
||||
|
||||
impl State {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
seen_sep: true,
|
||||
seen_colon: false,
|
||||
filenames: vec![],
|
||||
codes: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate the list of `StrCheckCodePair` pairs for the current state.
|
||||
fn parse(&self) -> Vec<PatternPrefixPair> {
|
||||
let mut codes: Vec<PatternPrefixPair> = vec![];
|
||||
for code in &self.codes {
|
||||
match CheckCodePrefix::from_str(code) {
|
||||
Ok(code) => {
|
||||
for filename in &self.filenames {
|
||||
codes.push(PatternPrefixPair {
|
||||
pattern: filename.clone(),
|
||||
prefix: code.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
Err(_) => eprintln!("Skipping unrecognized prefix: {}", code),
|
||||
}
|
||||
}
|
||||
codes
|
||||
}
|
||||
}
|
||||
|
||||
/// Tokenize the raw 'files-to-codes' mapping.
|
||||
fn tokenize_files_to_codes_mapping(value: &str) -> Vec<Token> {
|
||||
let mut tokens = vec![];
|
||||
let mut i = 0;
|
||||
while i < value.len() {
|
||||
for (token_re, token_name) in [
|
||||
(
|
||||
Regex::new(r"([A-Z]+[0-9]*)(?:$|\s|,)").unwrap(),
|
||||
TokenType::Code,
|
||||
),
|
||||
(Regex::new(r"([^\s:,]+)").unwrap(), TokenType::File),
|
||||
(Regex::new(r"(\s*:\s*)").unwrap(), TokenType::Colon),
|
||||
(Regex::new(r"(\s*,\s*)").unwrap(), TokenType::Comma),
|
||||
(Regex::new(r"(\s+)").unwrap(), TokenType::Ws),
|
||||
] {
|
||||
if let Some(cap) = token_re.captures(&value[i..]) {
|
||||
let mat = cap.get(1).unwrap();
|
||||
if mat.start() == 0 {
|
||||
tokens.push(Token {
|
||||
token_name,
|
||||
src: mat.as_str().to_string().trim().to_string(),
|
||||
});
|
||||
i += mat.end();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
tokens.push(Token {
|
||||
token_name: TokenType::Eof,
|
||||
src: "".to_string(),
|
||||
});
|
||||
tokens
|
||||
}
|
||||
|
||||
/// Parse a 'files-to-codes' mapping, mimicking Flake8's internal logic.
|
||||
///
|
||||
/// See: https://github.com/PyCQA/flake8/blob/7dfe99616fc2f07c0017df2ba5fa884158f3ea8a/src/flake8/utils.py#L45
|
||||
pub fn parse_files_to_codes_mapping(value: &str) -> Result<Vec<PatternPrefixPair>> {
|
||||
if value.trim().is_empty() {
|
||||
return Ok(vec![]);
|
||||
}
|
||||
let mut codes: Vec<PatternPrefixPair> = vec![];
|
||||
let mut state = State::new();
|
||||
for token in tokenize_files_to_codes_mapping(value) {
|
||||
if matches!(token.token_name, TokenType::Comma | TokenType::Ws) {
|
||||
state.seen_sep = true;
|
||||
} else if !state.seen_colon {
|
||||
if matches!(token.token_name, TokenType::Colon) {
|
||||
state.seen_colon = true;
|
||||
state.seen_sep = true;
|
||||
} else if state.seen_sep && matches!(token.token_name, TokenType::File) {
|
||||
state.filenames.push(token.src);
|
||||
state.seen_sep = false;
|
||||
} else {
|
||||
return Err(anyhow::anyhow!("Unexpected token: {:?}", token.token_name));
|
||||
}
|
||||
} else {
|
||||
if matches!(token.token_name, TokenType::Eof) {
|
||||
codes.extend(state.parse());
|
||||
state = State::new();
|
||||
} else if state.seen_sep && matches!(token.token_name, TokenType::Code) {
|
||||
state.codes.push(token.src);
|
||||
state.seen_sep = false;
|
||||
} else if state.seen_sep && matches!(token.token_name, TokenType::File) {
|
||||
codes.extend(state.parse());
|
||||
state = State::new();
|
||||
state.filenames.push(token.src);
|
||||
state.seen_sep = false;
|
||||
} else {
|
||||
return Err(anyhow::anyhow!("Unexpected token: {:?}", token.token_name));
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(codes)
|
||||
}
|
||||
|
||||
/// Collect a list of `PatternPrefixPair` structs as a `BTreeMap`.
|
||||
pub fn collect_per_file_ignores(
|
||||
pairs: Vec<PatternPrefixPair>,
|
||||
) -> BTreeMap<String, Vec<CheckCodePrefix>> {
|
||||
let mut per_file_ignores: BTreeMap<String, Vec<CheckCodePrefix>> = BTreeMap::new();
|
||||
for pair in pairs {
|
||||
per_file_ignores
|
||||
.entry(pair.pattern)
|
||||
.or_insert_with(Vec::new)
|
||||
.push(pair.prefix);
|
||||
}
|
||||
per_file_ignores
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use anyhow::Result;
|
||||
use ruff::checks_gen::CheckCodePrefix;
|
||||
use ruff::settings::types::PatternPrefixPair;
|
||||
|
||||
use crate::parser::{parse_files_to_codes_mapping, parse_prefix_codes, parse_strings};
|
||||
|
||||
#[test]
|
||||
fn it_parses_prefix_codes() {
|
||||
let actual = parse_prefix_codes("");
|
||||
let expected: Vec<CheckCodePrefix> = vec![];
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
let actual = parse_prefix_codes(" ");
|
||||
let expected: Vec<CheckCodePrefix> = vec![];
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
let actual = parse_prefix_codes("F401");
|
||||
let expected = vec![CheckCodePrefix::F401];
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
let actual = parse_prefix_codes("F401,");
|
||||
let expected = vec![CheckCodePrefix::F401];
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
let actual = parse_prefix_codes("F401,E501");
|
||||
let expected = vec![CheckCodePrefix::F401, CheckCodePrefix::E501];
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
let actual = parse_prefix_codes("F401, E501");
|
||||
let expected = vec![CheckCodePrefix::F401, CheckCodePrefix::E501];
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_parses_strings() {
|
||||
let actual = parse_strings("");
|
||||
let expected: Vec<String> = vec![];
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
let actual = parse_strings(" ");
|
||||
let expected: Vec<String> = vec![];
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
let actual = parse_strings("__init__.py");
|
||||
let expected = vec!["__init__.py".to_string()];
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
let actual = parse_strings("__init__.py,");
|
||||
let expected = vec!["__init__.py".to_string()];
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
let actual = parse_strings("__init__.py,__main__.py");
|
||||
let expected = vec!["__init__.py".to_string(), "__main__.py".to_string()];
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
let actual = parse_strings("__init__.py, __main__.py");
|
||||
let expected = vec!["__init__.py".to_string(), "__main__.py".to_string()];
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_parse_files_to_codes_mapping() -> Result<()> {
|
||||
let actual = parse_files_to_codes_mapping("")?;
|
||||
let expected: Vec<PatternPrefixPair> = vec![];
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
let actual = parse_files_to_codes_mapping(" ")?;
|
||||
let expected: Vec<PatternPrefixPair> = vec![];
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
// Ex) locust
|
||||
let actual = parse_files_to_codes_mapping(
|
||||
"per-file-ignores =
|
||||
locust/test/*: F841
|
||||
examples/*: F841
|
||||
*.pyi: E302,E704"
|
||||
.strip_prefix("per-file-ignores =")
|
||||
.unwrap(),
|
||||
)?;
|
||||
let expected: Vec<PatternPrefixPair> = vec![
|
||||
PatternPrefixPair {
|
||||
pattern: "locust/test/*".to_string(),
|
||||
prefix: CheckCodePrefix::F841,
|
||||
},
|
||||
PatternPrefixPair {
|
||||
pattern: "examples/*".to_string(),
|
||||
prefix: CheckCodePrefix::F841,
|
||||
},
|
||||
];
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
// Ex) celery
|
||||
let actual = parse_files_to_codes_mapping(
|
||||
"per-file-ignores =
|
||||
t/*,setup.py,examples/*,docs/*,extra/*:
|
||||
D,"
|
||||
.strip_prefix("per-file-ignores =")
|
||||
.unwrap(),
|
||||
)?;
|
||||
let expected: Vec<PatternPrefixPair> = vec![
|
||||
PatternPrefixPair {
|
||||
pattern: "t/*".to_string(),
|
||||
prefix: CheckCodePrefix::D,
|
||||
},
|
||||
PatternPrefixPair {
|
||||
pattern: "setup.py".to_string(),
|
||||
prefix: CheckCodePrefix::D,
|
||||
},
|
||||
PatternPrefixPair {
|
||||
pattern: "examples/*".to_string(),
|
||||
prefix: CheckCodePrefix::D,
|
||||
},
|
||||
PatternPrefixPair {
|
||||
pattern: "docs/*".to_string(),
|
||||
prefix: CheckCodePrefix::D,
|
||||
},
|
||||
PatternPrefixPair {
|
||||
pattern: "extra/*".to_string(),
|
||||
prefix: CheckCodePrefix::D,
|
||||
},
|
||||
];
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
// Ex) scrapy
|
||||
let actual = parse_files_to_codes_mapping(
|
||||
"per-file-ignores =
|
||||
scrapy/__init__.py:E402
|
||||
scrapy/core/downloader/handlers/http.py:F401
|
||||
scrapy/http/__init__.py:F401
|
||||
scrapy/linkextractors/__init__.py:E402,F401
|
||||
scrapy/selector/__init__.py:F401
|
||||
scrapy/spiders/__init__.py:E402,F401
|
||||
scrapy/utils/url.py:F403,F405
|
||||
tests/test_loader.py:E741"
|
||||
.strip_prefix("per-file-ignores =")
|
||||
.unwrap(),
|
||||
)?;
|
||||
let expected: Vec<PatternPrefixPair> = vec![
|
||||
PatternPrefixPair {
|
||||
pattern: "scrapy/__init__.py".to_string(),
|
||||
prefix: CheckCodePrefix::E402,
|
||||
},
|
||||
PatternPrefixPair {
|
||||
pattern: "scrapy/core/downloader/handlers/http.py".to_string(),
|
||||
prefix: CheckCodePrefix::F401,
|
||||
},
|
||||
PatternPrefixPair {
|
||||
pattern: "scrapy/http/__init__.py".to_string(),
|
||||
prefix: CheckCodePrefix::F401,
|
||||
},
|
||||
PatternPrefixPair {
|
||||
pattern: "scrapy/linkextractors/__init__.py".to_string(),
|
||||
prefix: CheckCodePrefix::E402,
|
||||
},
|
||||
PatternPrefixPair {
|
||||
pattern: "scrapy/linkextractors/__init__.py".to_string(),
|
||||
prefix: CheckCodePrefix::F401,
|
||||
},
|
||||
PatternPrefixPair {
|
||||
pattern: "scrapy/selector/__init__.py".to_string(),
|
||||
prefix: CheckCodePrefix::F401,
|
||||
},
|
||||
PatternPrefixPair {
|
||||
pattern: "scrapy/spiders/__init__.py".to_string(),
|
||||
prefix: CheckCodePrefix::E402,
|
||||
},
|
||||
PatternPrefixPair {
|
||||
pattern: "scrapy/spiders/__init__.py".to_string(),
|
||||
prefix: CheckCodePrefix::F401,
|
||||
},
|
||||
PatternPrefixPair {
|
||||
pattern: "scrapy/utils/url.py".to_string(),
|
||||
prefix: CheckCodePrefix::F403,
|
||||
},
|
||||
PatternPrefixPair {
|
||||
pattern: "scrapy/utils/url.py".to_string(),
|
||||
prefix: CheckCodePrefix::F405,
|
||||
},
|
||||
PatternPrefixPair {
|
||||
pattern: "tests/test_loader.py".to_string(),
|
||||
prefix: CheckCodePrefix::E741,
|
||||
},
|
||||
];
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
396
flake8_to_ruff/src/plugin.rs
Normal file
396
flake8_to_ruff/src/plugin.rs
Normal file
@@ -0,0 +1,396 @@
|
||||
use std::collections::{BTreeSet, HashMap};
|
||||
use std::str::FromStr;
|
||||
|
||||
use anyhow::anyhow;
|
||||
use ruff::checks_gen::CheckCodePrefix;
|
||||
|
||||
#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq)]
|
||||
pub enum Plugin {
|
||||
Flake8Bugbear,
|
||||
Flake8Builtins,
|
||||
Flake8Comprehensions,
|
||||
Flake8Docstrings,
|
||||
Flake8Print,
|
||||
Flake8Quotes,
|
||||
Flake8Annotations,
|
||||
PEP8Naming,
|
||||
Pyupgrade,
|
||||
}
|
||||
|
||||
impl FromStr for Plugin {
|
||||
type Err = anyhow::Error;
|
||||
|
||||
fn from_str(string: &str) -> Result<Self, Self::Err> {
|
||||
match string {
|
||||
"flake8-bugbear" => Ok(Plugin::Flake8Bugbear),
|
||||
"flake8-builtins" => Ok(Plugin::Flake8Builtins),
|
||||
"flake8-comprehensions" => Ok(Plugin::Flake8Comprehensions),
|
||||
"flake8-docstrings" => Ok(Plugin::Flake8Docstrings),
|
||||
"flake8-print" => Ok(Plugin::Flake8Print),
|
||||
"flake8-quotes" => Ok(Plugin::Flake8Quotes),
|
||||
"flake8-annotations" => Ok(Plugin::Flake8Annotations),
|
||||
"pep8-naming" => Ok(Plugin::PEP8Naming),
|
||||
"pyupgrade" => Ok(Plugin::Pyupgrade),
|
||||
_ => Err(anyhow!("Unknown plugin: {}", string)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Plugin {
|
||||
pub fn default(&self) -> CheckCodePrefix {
|
||||
match self {
|
||||
Plugin::Flake8Bugbear => CheckCodePrefix::B,
|
||||
Plugin::Flake8Builtins => CheckCodePrefix::A,
|
||||
Plugin::Flake8Comprehensions => CheckCodePrefix::C,
|
||||
Plugin::Flake8Docstrings => CheckCodePrefix::D,
|
||||
Plugin::Flake8Print => CheckCodePrefix::T,
|
||||
Plugin::Flake8Quotes => CheckCodePrefix::Q,
|
||||
Plugin::Flake8Annotations => CheckCodePrefix::ANN,
|
||||
Plugin::PEP8Naming => CheckCodePrefix::N,
|
||||
Plugin::Pyupgrade => CheckCodePrefix::U,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn select(&self, flake8: &HashMap<String, Option<String>>) -> Vec<CheckCodePrefix> {
|
||||
match self {
|
||||
Plugin::Flake8Bugbear => vec![CheckCodePrefix::B],
|
||||
Plugin::Flake8Builtins => vec![CheckCodePrefix::A],
|
||||
Plugin::Flake8Comprehensions => vec![CheckCodePrefix::C],
|
||||
Plugin::Flake8Docstrings => {
|
||||
// Use the user-provided docstring.
|
||||
for key in ["docstring-convention", "docstring_convention"] {
|
||||
if let Some(Some(value)) = flake8.get(key) {
|
||||
match DocstringConvention::from_str(value) {
|
||||
Ok(convention) => return convention.select(),
|
||||
Err(e) => {
|
||||
eprintln!("Unexpected '{key}' value: {value} ({e}");
|
||||
return vec![];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// Default to PEP8.
|
||||
DocstringConvention::PEP8.select()
|
||||
}
|
||||
Plugin::Flake8Print => vec![CheckCodePrefix::T],
|
||||
Plugin::Flake8Quotes => vec![CheckCodePrefix::Q],
|
||||
Plugin::Flake8Annotations => vec![CheckCodePrefix::ANN],
|
||||
Plugin::PEP8Naming => vec![CheckCodePrefix::N],
|
||||
Plugin::Pyupgrade => vec![CheckCodePrefix::U],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub enum DocstringConvention {
|
||||
All,
|
||||
PEP8,
|
||||
NumPy,
|
||||
Google,
|
||||
}
|
||||
|
||||
impl FromStr for DocstringConvention {
|
||||
type Err = anyhow::Error;
|
||||
|
||||
fn from_str(string: &str) -> Result<Self, Self::Err> {
|
||||
match string {
|
||||
"all" => Ok(DocstringConvention::All),
|
||||
"pep8" => Ok(DocstringConvention::PEP8),
|
||||
"numpy" => Ok(DocstringConvention::NumPy),
|
||||
"google" => Ok(DocstringConvention::Google),
|
||||
_ => Err(anyhow!("Unknown docstring convention: {}", string)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl DocstringConvention {
|
||||
fn select(&self) -> Vec<CheckCodePrefix> {
|
||||
match self {
|
||||
DocstringConvention::All => vec![CheckCodePrefix::D],
|
||||
DocstringConvention::PEP8 => vec![
|
||||
// All errors except D203, D212, D213, D214, D215, D404, D405, D406, D407, D408,
|
||||
// D409, D410, D411, D413, D415, D416 and D417.
|
||||
CheckCodePrefix::D100,
|
||||
CheckCodePrefix::D101,
|
||||
CheckCodePrefix::D102,
|
||||
CheckCodePrefix::D103,
|
||||
CheckCodePrefix::D104,
|
||||
CheckCodePrefix::D105,
|
||||
CheckCodePrefix::D106,
|
||||
CheckCodePrefix::D107,
|
||||
CheckCodePrefix::D200,
|
||||
CheckCodePrefix::D201,
|
||||
CheckCodePrefix::D202,
|
||||
// CheckCodePrefix::D203,
|
||||
CheckCodePrefix::D204,
|
||||
CheckCodePrefix::D205,
|
||||
CheckCodePrefix::D206,
|
||||
CheckCodePrefix::D207,
|
||||
CheckCodePrefix::D208,
|
||||
CheckCodePrefix::D209,
|
||||
CheckCodePrefix::D210,
|
||||
CheckCodePrefix::D211,
|
||||
// CheckCodePrefix::D212,
|
||||
// CheckCodePrefix::D213,
|
||||
// CheckCodePrefix::D214,
|
||||
// CheckCodePrefix::D215,
|
||||
CheckCodePrefix::D300,
|
||||
CheckCodePrefix::D400,
|
||||
CheckCodePrefix::D402,
|
||||
CheckCodePrefix::D403,
|
||||
// CheckCodePrefix::D404,
|
||||
// CheckCodePrefix::D405,
|
||||
// CheckCodePrefix::D406,
|
||||
// CheckCodePrefix::D407,
|
||||
// CheckCodePrefix::D408,
|
||||
// CheckCodePrefix::D409,
|
||||
// CheckCodePrefix::D410,
|
||||
// CheckCodePrefix::D411,
|
||||
CheckCodePrefix::D412,
|
||||
// CheckCodePrefix::D413,
|
||||
CheckCodePrefix::D414,
|
||||
// CheckCodePrefix::D415,
|
||||
// CheckCodePrefix::D416,
|
||||
// CheckCodePrefix::D417,
|
||||
CheckCodePrefix::D418,
|
||||
CheckCodePrefix::D419,
|
||||
],
|
||||
DocstringConvention::NumPy => vec![
|
||||
// All errors except D107, D203, D212, D213, D402, D413, D415, D416, and D417.
|
||||
CheckCodePrefix::D100,
|
||||
CheckCodePrefix::D101,
|
||||
CheckCodePrefix::D102,
|
||||
CheckCodePrefix::D103,
|
||||
CheckCodePrefix::D104,
|
||||
CheckCodePrefix::D105,
|
||||
CheckCodePrefix::D106,
|
||||
// CheckCodePrefix::D107,
|
||||
CheckCodePrefix::D200,
|
||||
CheckCodePrefix::D201,
|
||||
CheckCodePrefix::D202,
|
||||
// CheckCodePrefix::D203,
|
||||
CheckCodePrefix::D204,
|
||||
CheckCodePrefix::D205,
|
||||
CheckCodePrefix::D206,
|
||||
CheckCodePrefix::D207,
|
||||
CheckCodePrefix::D208,
|
||||
CheckCodePrefix::D209,
|
||||
CheckCodePrefix::D210,
|
||||
CheckCodePrefix::D211,
|
||||
// CheckCodePrefix::D212,
|
||||
// CheckCodePrefix::D213,
|
||||
CheckCodePrefix::D214,
|
||||
CheckCodePrefix::D215,
|
||||
CheckCodePrefix::D300,
|
||||
CheckCodePrefix::D400,
|
||||
// CheckCodePrefix::D402,
|
||||
CheckCodePrefix::D403,
|
||||
CheckCodePrefix::D404,
|
||||
CheckCodePrefix::D405,
|
||||
CheckCodePrefix::D406,
|
||||
CheckCodePrefix::D407,
|
||||
CheckCodePrefix::D408,
|
||||
CheckCodePrefix::D409,
|
||||
CheckCodePrefix::D410,
|
||||
CheckCodePrefix::D411,
|
||||
CheckCodePrefix::D412,
|
||||
// CheckCodePrefix::D413,
|
||||
CheckCodePrefix::D414,
|
||||
// CheckCodePrefix::D415,
|
||||
// CheckCodePrefix::D416,
|
||||
// CheckCodePrefix::D417,
|
||||
CheckCodePrefix::D418,
|
||||
CheckCodePrefix::D419,
|
||||
],
|
||||
DocstringConvention::Google => vec![
|
||||
// All errors except D203, D204, D213, D215, D400, D401, D404, D406, D407, D408,
|
||||
// D409 and D413.
|
||||
CheckCodePrefix::D100,
|
||||
CheckCodePrefix::D101,
|
||||
CheckCodePrefix::D102,
|
||||
CheckCodePrefix::D103,
|
||||
CheckCodePrefix::D104,
|
||||
CheckCodePrefix::D105,
|
||||
CheckCodePrefix::D106,
|
||||
CheckCodePrefix::D107,
|
||||
CheckCodePrefix::D200,
|
||||
CheckCodePrefix::D201,
|
||||
CheckCodePrefix::D202,
|
||||
// CheckCodePrefix::D203,
|
||||
// CheckCodePrefix::D204,
|
||||
CheckCodePrefix::D205,
|
||||
CheckCodePrefix::D206,
|
||||
CheckCodePrefix::D207,
|
||||
CheckCodePrefix::D208,
|
||||
CheckCodePrefix::D209,
|
||||
CheckCodePrefix::D210,
|
||||
CheckCodePrefix::D211,
|
||||
CheckCodePrefix::D212,
|
||||
// CheckCodePrefix::D213,
|
||||
CheckCodePrefix::D214,
|
||||
// CheckCodePrefix::D215,
|
||||
CheckCodePrefix::D300,
|
||||
// CheckCodePrefix::D400,
|
||||
CheckCodePrefix::D402,
|
||||
CheckCodePrefix::D403,
|
||||
// CheckCodePrefix::D404,
|
||||
CheckCodePrefix::D405,
|
||||
// CheckCodePrefix::D406,
|
||||
// CheckCodePrefix::D407,
|
||||
// CheckCodePrefix::D408,
|
||||
// CheckCodePrefix::D409,
|
||||
CheckCodePrefix::D410,
|
||||
CheckCodePrefix::D411,
|
||||
CheckCodePrefix::D412,
|
||||
// CheckCodePrefix::D413,
|
||||
CheckCodePrefix::D414,
|
||||
CheckCodePrefix::D415,
|
||||
CheckCodePrefix::D416,
|
||||
CheckCodePrefix::D417,
|
||||
CheckCodePrefix::D418,
|
||||
CheckCodePrefix::D419,
|
||||
],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Infer the enabled plugins based on user-provided options.
|
||||
///
|
||||
/// For example, if the user specified a `mypy-init-return` setting, we should
|
||||
/// infer that `flake8-annotations` is active.
|
||||
pub fn infer_plugins_from_options(flake8: &HashMap<String, Option<String>>) -> Vec<Plugin> {
|
||||
let mut plugins = BTreeSet::new();
|
||||
for key in flake8.keys() {
|
||||
match key.as_str() {
|
||||
// flake8-docstrings
|
||||
"docstring-convention" | "docstring_convention" => {
|
||||
plugins.insert(Plugin::Flake8Docstrings);
|
||||
}
|
||||
// flake8-builtins
|
||||
"builtins-ignorelist" | "builtins_ignorelist" => {
|
||||
plugins.insert(Plugin::Flake8Builtins);
|
||||
}
|
||||
// flake8-quotes
|
||||
"quotes" | "inline-quotes" | "inline_quotes" => {
|
||||
plugins.insert(Plugin::Flake8Quotes);
|
||||
}
|
||||
"multiline-quotes" | "multiline_quotes" => {
|
||||
plugins.insert(Plugin::Flake8Quotes);
|
||||
}
|
||||
"docstring-quotes" | "docstring_quotes" => {
|
||||
plugins.insert(Plugin::Flake8Quotes);
|
||||
}
|
||||
"avoid-escape" | "avoid_escape" => {
|
||||
plugins.insert(Plugin::Flake8Quotes);
|
||||
}
|
||||
// flake8-annotations
|
||||
"suppress-none-returning" | "suppress_none_returning" => {
|
||||
plugins.insert(Plugin::Flake8Annotations);
|
||||
}
|
||||
"suppress-dummy-args" | "suppress_dummy_args" => {
|
||||
plugins.insert(Plugin::Flake8Annotations);
|
||||
}
|
||||
"allow-untyped-defs" | "allow_untyped_defs" => {
|
||||
plugins.insert(Plugin::Flake8Annotations);
|
||||
}
|
||||
"allow-untyped-nested" | "allow_untyped_nested" => {
|
||||
plugins.insert(Plugin::Flake8Annotations);
|
||||
}
|
||||
"mypy-init-return" | "mypy_init_return" => {
|
||||
plugins.insert(Plugin::Flake8Annotations);
|
||||
}
|
||||
"dispatch-decorators" | "dispatch_decorators" => {
|
||||
plugins.insert(Plugin::Flake8Annotations);
|
||||
}
|
||||
"overload-decorators" | "overload_decorators" => {
|
||||
plugins.insert(Plugin::Flake8Annotations);
|
||||
}
|
||||
"allow-star-arg-any" | "allow_star_arg_any" => {
|
||||
plugins.insert(Plugin::Flake8Annotations);
|
||||
}
|
||||
// pep8-naming
|
||||
"ignore-names" | "ignore_names" => {
|
||||
plugins.insert(Plugin::PEP8Naming);
|
||||
}
|
||||
"classmethod-decorators" | "classmethod_decorators" => {
|
||||
plugins.insert(Plugin::PEP8Naming);
|
||||
}
|
||||
"staticmethod-decorators" | "staticmethod_decorators" => {
|
||||
plugins.insert(Plugin::PEP8Naming);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
Vec::from_iter(plugins)
|
||||
}
|
||||
|
||||
/// Infer the enabled plugins based on the referenced prefixes.
|
||||
///
|
||||
/// For example, if the user ignores `ANN101`, we should infer that
|
||||
/// `flake8-annotations` is active.
|
||||
pub fn infer_plugins_from_codes(codes: &BTreeSet<CheckCodePrefix>) -> Vec<Plugin> {
|
||||
[
|
||||
Plugin::Flake8Bugbear,
|
||||
Plugin::Flake8Builtins,
|
||||
Plugin::Flake8Comprehensions,
|
||||
Plugin::Flake8Docstrings,
|
||||
Plugin::Flake8Print,
|
||||
Plugin::Flake8Quotes,
|
||||
Plugin::Flake8Annotations,
|
||||
Plugin::PEP8Naming,
|
||||
Plugin::Pyupgrade,
|
||||
]
|
||||
.into_iter()
|
||||
.filter(|plugin| {
|
||||
for prefix in codes {
|
||||
if prefix
|
||||
.codes()
|
||||
.iter()
|
||||
.any(|code| plugin.default().codes().contains(code))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
false
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Resolve the set of enabled `CheckCodePrefix` values for the given plugins.
|
||||
pub fn resolve_select(
|
||||
flake8: &HashMap<String, Option<String>>,
|
||||
plugins: &[Plugin],
|
||||
) -> BTreeSet<CheckCodePrefix> {
|
||||
// Include default Pyflakes and pycodestyle checks.
|
||||
let mut select = BTreeSet::from([CheckCodePrefix::E, CheckCodePrefix::F, CheckCodePrefix::W]);
|
||||
|
||||
// Add prefix codes for every plugin.
|
||||
for plugin in plugins {
|
||||
select.extend(plugin.select(flake8));
|
||||
}
|
||||
|
||||
select
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::collections::HashMap;
|
||||
|
||||
use crate::plugin::{infer_plugins_from_options, Plugin};
|
||||
|
||||
#[test]
|
||||
fn it_infers_plugins() {
|
||||
let actual = infer_plugins_from_options(&HashMap::from([(
|
||||
"inline-quotes".to_string(),
|
||||
Some("single".to_string()),
|
||||
)]));
|
||||
let expected = vec![Plugin::Flake8Quotes];
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
let actual = infer_plugins_from_options(&HashMap::from([(
|
||||
"staticmethod-decorators".to_string(),
|
||||
Some("[]".to_string()),
|
||||
)]));
|
||||
let expected = vec![Plugin::PEP8Naming];
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
}
|
||||
37
output.py
37
output.py
@@ -1,37 +0,0 @@
|
||||
from collections import Counter
|
||||
|
||||
|
||||
def f() -> None:
|
||||
"""Docstring goes here."""
|
||||
for x in range(5):
|
||||
print(x)
|
||||
else:
|
||||
print("Nope!")
|
||||
|
||||
|
||||
a = {
|
||||
"a",
|
||||
"b",
|
||||
"c",
|
||||
"a",
|
||||
"b",
|
||||
"c",
|
||||
"a",
|
||||
"b",
|
||||
"c",
|
||||
"a",
|
||||
"b",
|
||||
"c",
|
||||
"a",
|
||||
"b",
|
||||
"c",
|
||||
"a",
|
||||
"b",
|
||||
"c",
|
||||
"a",
|
||||
"b",
|
||||
"c",
|
||||
}
|
||||
|
||||
cls(title=title, before_text=before_text, after_text=after_text, before_description=before_description, after_description=after_description, transform_type=transform_type)
|
||||
|
||||
@@ -32,3 +32,7 @@ build-backend = "maturin"
|
||||
bindings = "bin"
|
||||
sdist-include = ["Cargo.lock"]
|
||||
strip = true
|
||||
|
||||
[tool.isort]
|
||||
profile = "black"
|
||||
known_third_party = ["fastapi", "pydantic", "starlette"]
|
||||
|
||||
20
resources/test/fixtures/B002.py
vendored
Normal file
20
resources/test/fixtures/B002.py
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
"""
|
||||
Should emit:
|
||||
B002 - on lines 15 and 20
|
||||
"""
|
||||
|
||||
|
||||
def this_is_all_fine(n):
|
||||
x = n + 1
|
||||
y = 1 + n
|
||||
z = +x + y
|
||||
return +z
|
||||
|
||||
|
||||
def this_is_buggy(n):
|
||||
x = ++n
|
||||
return x
|
||||
|
||||
|
||||
def this_is_buggy_too(n):
|
||||
return ++n
|
||||
18
resources/test/fixtures/B003.py
vendored
Normal file
18
resources/test/fixtures/B003.py
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
"""
|
||||
Should emit:
|
||||
B003 - on line 9
|
||||
"""
|
||||
|
||||
import os
|
||||
from os import environ
|
||||
|
||||
os.environ = {}
|
||||
environ = {} # that's fine, assigning a new meaning to the module-level name
|
||||
|
||||
|
||||
class Object:
|
||||
os = None
|
||||
|
||||
|
||||
o = Object()
|
||||
o.os.environ = {}
|
||||
12
resources/test/fixtures/B004.py
vendored
Normal file
12
resources/test/fixtures/B004.py
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
def this_is_a_bug():
|
||||
o = object()
|
||||
if hasattr(o, "__call__"):
|
||||
print("Ooh, callable! Or is it?")
|
||||
if getattr(o, "__call__", False):
|
||||
print("Ooh, callable! Or is it?")
|
||||
|
||||
|
||||
def this_is_fine():
|
||||
o = object()
|
||||
if callable(o):
|
||||
print("Ooh, this is actually callable.")
|
||||
26
resources/test/fixtures/B005.py
vendored
Normal file
26
resources/test/fixtures/B005.py
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
s = "qwe"
|
||||
s.strip(s) # no warning
|
||||
s.strip("we") # no warning
|
||||
s.strip(".facebook.com") # warning
|
||||
s.strip("e") # no warning
|
||||
s.strip("\n\t ") # no warning
|
||||
s.strip(r"\n\t ") # warning
|
||||
s.lstrip(s) # no warning
|
||||
s.lstrip("we") # no warning
|
||||
s.lstrip(".facebook.com") # warning
|
||||
s.lstrip("e") # no warning
|
||||
s.lstrip("\n\t ") # no warning
|
||||
s.lstrip(r"\n\t ") # warning
|
||||
s.rstrip(s) # no warning
|
||||
s.rstrip("we") # warning
|
||||
s.rstrip(".facebook.com") # warning
|
||||
s.rstrip("e") # no warning
|
||||
s.rstrip("\n\t ") # no warning
|
||||
s.rstrip(r"\n\t ") # warning
|
||||
|
||||
from somewhere import other_type, strip
|
||||
|
||||
strip("we") # no warning
|
||||
other_type().lstrip() # no warning
|
||||
other_type().rstrip(["a", "b", "c"]) # no warning
|
||||
other_type().strip("a", "b") # no warning
|
||||
187
resources/test/fixtures/B006_B008.py
vendored
Normal file
187
resources/test/fixtures/B006_B008.py
vendored
Normal file
@@ -0,0 +1,187 @@
|
||||
import collections
|
||||
import datetime as dt
|
||||
import logging
|
||||
import operator
|
||||
import random
|
||||
import re
|
||||
import time
|
||||
import types
|
||||
from operator import attrgetter, itemgetter, methodcaller
|
||||
from types import MappingProxyType
|
||||
|
||||
|
||||
# B006
|
||||
# Allow immutable literals/calls/comprehensions
|
||||
def this_is_okay(value=(1, 2, 3)):
|
||||
...
|
||||
|
||||
|
||||
async def and_this_also(value=tuple()):
|
||||
pass
|
||||
|
||||
|
||||
def frozenset_also_okay(value=frozenset()):
|
||||
pass
|
||||
|
||||
|
||||
def mappingproxytype_okay(
|
||||
value=MappingProxyType({}), value2=types.MappingProxyType({})
|
||||
):
|
||||
pass
|
||||
|
||||
|
||||
def re_compile_ok(value=re.compile("foo")):
|
||||
pass
|
||||
|
||||
|
||||
def operators_ok(
|
||||
v=operator.attrgetter("foo"),
|
||||
v2=operator.itemgetter("foo"),
|
||||
v3=operator.methodcaller("foo"),
|
||||
):
|
||||
pass
|
||||
|
||||
|
||||
def operators_ok_unqualified(
|
||||
v=attrgetter("foo"),
|
||||
v2=itemgetter("foo"),
|
||||
v3=methodcaller("foo"),
|
||||
):
|
||||
pass
|
||||
|
||||
|
||||
def kwonlyargs_immutable(*, value=()):
|
||||
...
|
||||
|
||||
|
||||
# Flag mutable literals/comprehensions
|
||||
|
||||
|
||||
def this_is_wrong(value=[1, 2, 3]):
|
||||
...
|
||||
|
||||
|
||||
def this_is_also_wrong(value={}):
|
||||
...
|
||||
|
||||
|
||||
def and_this(value=set()):
|
||||
...
|
||||
|
||||
|
||||
def this_too(value=collections.OrderedDict()):
|
||||
...
|
||||
|
||||
|
||||
async def async_this_too(value=collections.defaultdict()):
|
||||
...
|
||||
|
||||
|
||||
def dont_forget_me(value=collections.deque()):
|
||||
...
|
||||
|
||||
|
||||
# N.B. we're also flagging the function call in the comprehension
|
||||
def list_comprehension_also_not_okay(default=[i**2 for i in range(3)]):
|
||||
pass
|
||||
|
||||
|
||||
def dict_comprehension_also_not_okay(default={i: i**2 for i in range(3)}):
|
||||
pass
|
||||
|
||||
|
||||
def set_comprehension_also_not_okay(default={i**2 for i in range(3)}):
|
||||
pass
|
||||
|
||||
|
||||
def kwonlyargs_mutable(*, value=[]):
|
||||
...
|
||||
|
||||
|
||||
# Recommended approach for mutable defaults
|
||||
def do_this_instead(value=None):
|
||||
if value is None:
|
||||
value = set()
|
||||
|
||||
|
||||
# B008
|
||||
# Flag function calls as default args (including if they are part of a sub-expression)
|
||||
def in_fact_all_calls_are_wrong(value=time.time()):
|
||||
...
|
||||
|
||||
|
||||
def f(when=dt.datetime.now() + dt.timedelta(days=7)):
|
||||
pass
|
||||
|
||||
|
||||
def can_even_catch_lambdas(a=(lambda x: x)()):
|
||||
...
|
||||
|
||||
|
||||
# Recommended approach for function calls as default args
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def do_this_instead_of_calls_in_defaults(logger=LOGGER):
|
||||
# That makes it more obvious that this one value is reused.
|
||||
...
|
||||
|
||||
|
||||
# Handle inf/infinity/nan special case
|
||||
def float_inf_okay(value=float("inf")):
|
||||
pass
|
||||
|
||||
|
||||
def float_infinity_okay(value=float("infinity")):
|
||||
pass
|
||||
|
||||
|
||||
def float_plus_infinity_okay(value=float("+infinity")):
|
||||
pass
|
||||
|
||||
|
||||
def float_minus_inf_okay(value=float("-inf")):
|
||||
pass
|
||||
|
||||
|
||||
def float_nan_okay(value=float("nan")):
|
||||
pass
|
||||
|
||||
|
||||
def float_minus_NaN_okay(value=float("-NaN")):
|
||||
pass
|
||||
|
||||
|
||||
def float_infinity_literal(value=float("1e999")):
|
||||
pass
|
||||
|
||||
|
||||
# But don't allow standard floats
|
||||
def float_int_is_wrong(value=float(3)):
|
||||
pass
|
||||
|
||||
|
||||
def float_str_not_inf_or_nan_is_wrong(value=float("3.14")):
|
||||
pass
|
||||
|
||||
|
||||
# B006 and B008
|
||||
# We should handle arbitrary nesting of these B008.
|
||||
def nested_combo(a=[float(3), dt.datetime.now()]):
|
||||
pass
|
||||
|
||||
|
||||
# Don't flag nested B006 since we can't guarantee that
|
||||
# it isn't made mutable by the outer operation.
|
||||
def no_nested_b006(a=map(lambda s: s.upper(), ["a", "b", "c"])):
|
||||
pass
|
||||
|
||||
|
||||
# B008-ception.
|
||||
def nested_b008(a=random.randint(0, dt.datetime.now().year)):
|
||||
pass
|
||||
|
||||
|
||||
# Ignore lambda contents since they are evaluated at call time.
|
||||
def foo(f=lambda x: print(x)):
|
||||
f(1)
|
||||
31
resources/test/fixtures/B007.py
vendored
Normal file
31
resources/test/fixtures/B007.py
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
for i in range(10):
|
||||
print(i)
|
||||
|
||||
print(i) # name no longer defined on Python 3; no warning yet
|
||||
|
||||
for i in range(10): # name not used within the loop; B007
|
||||
print(10)
|
||||
|
||||
print(i) # name no longer defined on Python 3; no warning yet
|
||||
|
||||
|
||||
for _ in range(10): # _ is okay for a throw-away variable
|
||||
print(10)
|
||||
|
||||
|
||||
for i in range(10):
|
||||
for j in range(10):
|
||||
for k in range(10): # k not used, i and j used transitively
|
||||
print(i + j)
|
||||
|
||||
|
||||
def strange_generator():
|
||||
for i in range(10):
|
||||
for j in range(10):
|
||||
for k in range(10):
|
||||
for l in range(10):
|
||||
yield i, (j, (k, l))
|
||||
|
||||
|
||||
for i, (j, (k, l)) in strange_generator(): # i, k not used
|
||||
print(j, l)
|
||||
36
resources/test/fixtures/B009_B010.py
vendored
Normal file
36
resources/test/fixtures/B009_B010.py
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
"""
|
||||
Should emit:
|
||||
B009 - Line 18, 19, 20, 21, 22
|
||||
B010 - Line 33, 34, 35, 36
|
||||
"""
|
||||
|
||||
# Valid getattr usage
|
||||
getattr(foo, bar)
|
||||
getattr(foo, "bar", None)
|
||||
getattr(foo, "bar{foo}".format(foo="a"), None)
|
||||
getattr(foo, "bar{foo}".format(foo="a"))
|
||||
getattr(foo, bar, None)
|
||||
getattr(foo, "123abc")
|
||||
getattr(foo, r"123\abc")
|
||||
getattr(foo, "except")
|
||||
|
||||
# Invalid usage
|
||||
getattr(foo, "bar")
|
||||
getattr(foo, "_123abc")
|
||||
getattr(foo, "abc123")
|
||||
getattr(foo, r"abc123")
|
||||
_ = lambda x: getattr(x, "bar")
|
||||
|
||||
# Valid setattr usage
|
||||
setattr(foo, bar, None)
|
||||
setattr(foo, "bar{foo}".format(foo="a"), None)
|
||||
setattr(foo, "123abc", None)
|
||||
setattr(foo, r"123\abc", None)
|
||||
setattr(foo, "except", None)
|
||||
_ = lambda x: setattr(x, "bar", 1)
|
||||
|
||||
# Invalid usage
|
||||
setattr(foo, "bar", None)
|
||||
setattr(foo, "_123abc", None)
|
||||
setattr(foo, "abc123", None)
|
||||
setattr(foo, r"abc123", None)
|
||||
10
resources/test/fixtures/B011.py
vendored
Normal file
10
resources/test/fixtures/B011.py
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
"""
|
||||
Should emit:
|
||||
B011 - on line 8
|
||||
B011 - on line 10
|
||||
"""
|
||||
|
||||
assert 1 != 2
|
||||
assert False
|
||||
assert 1 != 2, "message"
|
||||
assert False, "message"
|
||||
8
resources/test/fixtures/B013.py
vendored
Normal file
8
resources/test/fixtures/B013.py
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
try:
|
||||
pass
|
||||
except (ValueError,):
|
||||
pass
|
||||
except AttributeError:
|
||||
pass
|
||||
except (ImportError, TypeError):
|
||||
pass
|
||||
76
resources/test/fixtures/B014.py
vendored
Normal file
76
resources/test/fixtures/B014.py
vendored
Normal file
@@ -0,0 +1,76 @@
|
||||
"""
|
||||
Should emit:
|
||||
B014 - on lines 11, 17, 28, 42, 49, 56, and 74.
|
||||
"""
|
||||
|
||||
import binascii
|
||||
import re
|
||||
|
||||
try:
|
||||
pass
|
||||
except (Exception, TypeError):
|
||||
# TypeError is a subclass of Exception, so it doesn't add anything
|
||||
pass
|
||||
|
||||
try:
|
||||
pass
|
||||
except (OSError, OSError) as err:
|
||||
# Duplicate exception types are useless
|
||||
pass
|
||||
|
||||
|
||||
class MyError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
except (MyError, MyError):
|
||||
# Detect duplicate non-builtin errors
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
except (MyError, Exception) as e:
|
||||
# Don't assume that we're all subclasses of Exception
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
except (MyError, BaseException) as e:
|
||||
# But we *can* assume that everything is a subclass of BaseException
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
except (re.error, re.error):
|
||||
# Duplicate exception types as attributes
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
except (IOError, EnvironmentError, OSError):
|
||||
# Detect if a primary exception and any its aliases are present.
|
||||
#
|
||||
# Since Python 3.3, IOError, EnvironmentError, WindowsError, mmap.error,
|
||||
# socket.error and select.error are aliases of OSError. See PEP 3151 for
|
||||
# more info.
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
except (MyException, NotImplemented):
|
||||
# NotImplemented is not an exception, let's not crash on it.
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
except (ValueError, binascii.Error):
|
||||
# binascii.Error is a subclass of ValueError.
|
||||
pass
|
||||
27
resources/test/fixtures/B015.py
vendored
Normal file
27
resources/test/fixtures/B015.py
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
assert 1 == 1
|
||||
|
||||
1 == 1
|
||||
|
||||
assert 1 in (1, 2)
|
||||
|
||||
1 in (1, 2)
|
||||
|
||||
|
||||
if 1 == 2:
|
||||
pass
|
||||
|
||||
|
||||
def test():
|
||||
assert 1 in (1, 2)
|
||||
|
||||
1 in (1, 2)
|
||||
|
||||
|
||||
data = [x for x in [1, 2, 3] if x in (1, 2)]
|
||||
|
||||
|
||||
class TestClass:
|
||||
1 == 1
|
||||
|
||||
|
||||
print(1 == 1)
|
||||
11
resources/test/fixtures/B016.py
vendored
Normal file
11
resources/test/fixtures/B016.py
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
"""
|
||||
Should emit:
|
||||
B016 - on lines 6, 7, and 8
|
||||
"""
|
||||
|
||||
raise False
|
||||
raise 1
|
||||
raise "string"
|
||||
raise Exception(False)
|
||||
raise Exception(1)
|
||||
raise Exception("string")
|
||||
36
resources/test/fixtures/B017.py
vendored
Normal file
36
resources/test/fixtures/B017.py
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
"""
|
||||
Should emit:
|
||||
B017 - on lines 20
|
||||
"""
|
||||
import asyncio
|
||||
import unittest
|
||||
|
||||
CONSTANT = True
|
||||
|
||||
|
||||
def something_else() -> None:
|
||||
for i in (1, 2, 3):
|
||||
print(i)
|
||||
|
||||
|
||||
class Foo:
|
||||
pass
|
||||
|
||||
|
||||
class Foobar(unittest.TestCase):
|
||||
def evil_raises(self) -> None:
|
||||
with self.assertRaises(Exception):
|
||||
raise Exception("Evil I say!")
|
||||
|
||||
def context_manager_raises(self) -> None:
|
||||
with self.assertRaises(Exception) as ex:
|
||||
raise Exception("Context manager is good")
|
||||
self.assertEqual("Context manager is good", str(ex.exception))
|
||||
|
||||
def regex_raises(self) -> None:
|
||||
with self.assertRaisesRegex(Exception, "Regex is good"):
|
||||
raise Exception("Regex is good")
|
||||
|
||||
def raises_with_absolute_reference(self):
|
||||
with self.assertRaises(asyncio.CancelledError):
|
||||
Foo()
|
||||
59
resources/test/fixtures/B018.py
vendored
Normal file
59
resources/test/fixtures/B018.py
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
class Foo1:
|
||||
"""abc"""
|
||||
|
||||
|
||||
class Foo2:
|
||||
"""abc"""
|
||||
|
||||
a = 2
|
||||
"str" # Str (no raise)
|
||||
f"{int}" # JoinedStr (no raise)
|
||||
1j # Number (complex)
|
||||
1 # Number (int)
|
||||
1.0 # Number (float)
|
||||
b"foo" # Binary
|
||||
True # NameConstant (True)
|
||||
False # NameConstant (False)
|
||||
None # NameConstant (None)
|
||||
[1, 2] # list
|
||||
{1, 2} # set
|
||||
{"foo": "bar"} # dict
|
||||
|
||||
|
||||
class Foo3:
|
||||
123
|
||||
a = 2
|
||||
"str"
|
||||
1
|
||||
|
||||
|
||||
def foo1():
|
||||
"""my docstring"""
|
||||
|
||||
|
||||
def foo2():
|
||||
"""my docstring"""
|
||||
a = 2
|
||||
"str" # Str (no raise)
|
||||
f"{int}" # JoinedStr (no raise)
|
||||
1j # Number (complex)
|
||||
1 # Number (int)
|
||||
1.0 # Number (float)
|
||||
b"foo" # Binary
|
||||
True # NameConstant (True)
|
||||
False # NameConstant (False)
|
||||
None # NameConstant (None)
|
||||
[1, 2] # list
|
||||
{1, 2} # set
|
||||
{"foo": "bar"} # dict
|
||||
|
||||
|
||||
def foo3():
|
||||
123
|
||||
a = 2
|
||||
"str"
|
||||
3
|
||||
|
||||
|
||||
def foo4():
|
||||
...
|
||||
108
resources/test/fixtures/B019.py
vendored
Normal file
108
resources/test/fixtures/B019.py
vendored
Normal file
@@ -0,0 +1,108 @@
|
||||
"""
|
||||
Should emit:
|
||||
B019 - on lines 73, 77, 81, 85, 89, 93, 97, 101
|
||||
"""
|
||||
import functools
|
||||
from functools import cache, cached_property, lru_cache
|
||||
|
||||
|
||||
def some_other_cache():
|
||||
...
|
||||
|
||||
|
||||
@functools.cache
|
||||
def compute_func(self, y):
|
||||
...
|
||||
|
||||
|
||||
class Foo:
|
||||
def __init__(self, x):
|
||||
self.x = x
|
||||
|
||||
def compute_method(self, y):
|
||||
...
|
||||
|
||||
@some_other_cache
|
||||
def user_cached_instance_method(self, y):
|
||||
...
|
||||
|
||||
@classmethod
|
||||
@functools.cache
|
||||
def cached_classmethod(cls, y):
|
||||
...
|
||||
|
||||
@classmethod
|
||||
@cache
|
||||
def other_cached_classmethod(cls, y):
|
||||
...
|
||||
|
||||
@classmethod
|
||||
@functools.lru_cache
|
||||
def lru_cached_classmethod(cls, y):
|
||||
...
|
||||
|
||||
@classmethod
|
||||
@lru_cache
|
||||
def other_lru_cached_classmethod(cls, y):
|
||||
...
|
||||
|
||||
@staticmethod
|
||||
@functools.cache
|
||||
def cached_staticmethod(y):
|
||||
...
|
||||
|
||||
@staticmethod
|
||||
@cache
|
||||
def other_cached_staticmethod(y):
|
||||
...
|
||||
|
||||
@staticmethod
|
||||
@functools.lru_cache
|
||||
def lru_cached_staticmethod(y):
|
||||
...
|
||||
|
||||
@staticmethod
|
||||
@lru_cache
|
||||
def other_lru_cached_staticmethod(y):
|
||||
...
|
||||
|
||||
@functools.cached_property
|
||||
def some_cached_property(self):
|
||||
...
|
||||
|
||||
@cached_property
|
||||
def some_other_cached_property(self):
|
||||
...
|
||||
|
||||
# Remaining methods should emit B019
|
||||
@functools.cache
|
||||
def cached_instance_method(self, y):
|
||||
...
|
||||
|
||||
@cache
|
||||
def another_cached_instance_method(self, y):
|
||||
...
|
||||
|
||||
@functools.cache()
|
||||
def called_cached_instance_method(self, y):
|
||||
...
|
||||
|
||||
@cache()
|
||||
def another_called_cached_instance_method(self, y):
|
||||
...
|
||||
|
||||
@functools.lru_cache
|
||||
def lru_cached_instance_method(self, y):
|
||||
...
|
||||
|
||||
@lru_cache
|
||||
def another_lru_cached_instance_method(self, y):
|
||||
...
|
||||
|
||||
@functools.lru_cache()
|
||||
def called_lru_cached_instance_method(self, y):
|
||||
...
|
||||
|
||||
@lru_cache()
|
||||
def another_called_lru_cached_instance_method(self, y):
|
||||
...
|
||||
38
resources/test/fixtures/B025.py
vendored
Normal file
38
resources/test/fixtures/B025.py
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
"""
|
||||
Should emit:
|
||||
B025 - on lines 15, 22, 31
|
||||
"""
|
||||
|
||||
import pickle
|
||||
|
||||
try:
|
||||
a = 1
|
||||
except ValueError:
|
||||
a = 2
|
||||
finally:
|
||||
a = 3
|
||||
|
||||
try:
|
||||
a = 1
|
||||
except ValueError:
|
||||
a = 2
|
||||
except ValueError:
|
||||
a = 2
|
||||
|
||||
try:
|
||||
a = 1
|
||||
except pickle.PickleError:
|
||||
a = 2
|
||||
except ValueError:
|
||||
a = 2
|
||||
except pickle.PickleError:
|
||||
a = 2
|
||||
|
||||
try:
|
||||
a = 1
|
||||
except (ValueError, TypeError):
|
||||
a = 2
|
||||
except ValueError:
|
||||
a = 2
|
||||
except (OSError, TypeError):
|
||||
a = 2
|
||||
21
resources/test/fixtures/B026.py
vendored
Normal file
21
resources/test/fixtures/B026.py
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
"""
|
||||
Should emit:
|
||||
B026 - on lines 16, 17, 18, 19, 20, 21
|
||||
"""
|
||||
|
||||
|
||||
def foo(bar, baz, bam):
|
||||
print(bar, baz, bam)
|
||||
|
||||
|
||||
bar_baz = ["bar", "baz"]
|
||||
|
||||
foo("bar", "baz", bam="bam")
|
||||
foo("bar", baz="baz", bam="bam")
|
||||
foo(bar="bar", baz="baz", bam="bam")
|
||||
foo(bam="bam", *["bar", "baz"])
|
||||
foo(bam="bam", *bar_baz)
|
||||
foo(baz="baz", bam="bam", *["bar"])
|
||||
foo(bar="bar", baz="baz", bam="bam", *[])
|
||||
foo(bam="bam", *["bar"], *["baz"])
|
||||
foo(*["bar"], bam="bam", *["baz"])
|
||||
4
resources/test/fixtures/C400.py
vendored
Normal file
4
resources/test/fixtures/C400.py
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
x = list(x for x in range(3))
|
||||
x = list(
|
||||
x for x in range(3)
|
||||
)
|
||||
4
resources/test/fixtures/C401.py
vendored
Normal file
4
resources/test/fixtures/C401.py
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
x = set(x for x in range(3))
|
||||
x = set(
|
||||
x for x in range(3)
|
||||
)
|
||||
5
resources/test/fixtures/C402.py
vendored
Normal file
5
resources/test/fixtures/C402.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
dict((x, x) for x in range(3))
|
||||
dict(
|
||||
(x, x) for x in range(3)
|
||||
)
|
||||
dict(((x, x) for x in range(3)), z=3)
|
||||
4
resources/test/fixtures/C403.py
vendored
Normal file
4
resources/test/fixtures/C403.py
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
s = set([x for x in range(3)])
|
||||
s = set(
|
||||
[x for x in range(3)]
|
||||
)
|
||||
2
resources/test/fixtures/C404.py
vendored
Normal file
2
resources/test/fixtures/C404.py
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
dict([(i, i) for i in range(3)])
|
||||
dict([(i, i) for i in range(3)], z=4)
|
||||
5
resources/test/fixtures/C405.py
vendored
Normal file
5
resources/test/fixtures/C405.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
s1 = set([1, 2])
|
||||
s2 = set((1, 2))
|
||||
s3 = set([])
|
||||
s4 = set(())
|
||||
s5 = set()
|
||||
5
resources/test/fixtures/C406.py
vendored
Normal file
5
resources/test/fixtures/C406.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
d1 = dict([(1, 2)])
|
||||
d2 = dict(((1, 2),))
|
||||
d3 = dict([])
|
||||
d4 = dict(())
|
||||
d5 = dict()
|
||||
5
resources/test/fixtures/C408.py
vendored
Normal file
5
resources/test/fixtures/C408.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
t = tuple()
|
||||
l = list()
|
||||
d1 = dict()
|
||||
d2 = dict(a=1)
|
||||
d3 = dict(**d2)
|
||||
10
resources/test/fixtures/C409.py
vendored
Normal file
10
resources/test/fixtures/C409.py
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
t1 = tuple([])
|
||||
t2 = tuple([1, 2])
|
||||
t3 = tuple((1, 2))
|
||||
t4 = tuple([
|
||||
1,
|
||||
2
|
||||
])
|
||||
t5 = tuple(
|
||||
(1, 2)
|
||||
)
|
||||
4
resources/test/fixtures/C410.py
vendored
Normal file
4
resources/test/fixtures/C410.py
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
l1 = list([1, 2])
|
||||
l2 = list((1, 2))
|
||||
l3 = list([])
|
||||
l4 = list(())
|
||||
2
resources/test/fixtures/C411.py
vendored
Normal file
2
resources/test/fixtures/C411.py
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
x = [1, 2, 3]
|
||||
list([i for i in x])
|
||||
6
resources/test/fixtures/C413.py
vendored
Normal file
6
resources/test/fixtures/C413.py
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
x = [2, 3, 1]
|
||||
list(x)
|
||||
list(sorted(x))
|
||||
reversed(sorted(x))
|
||||
reversed(sorted(x, key=lambda e: e))
|
||||
reversed(sorted(x, reverse=True))
|
||||
14
resources/test/fixtures/C414.py
vendored
Normal file
14
resources/test/fixtures/C414.py
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
x = [1, 2, 3]
|
||||
list(list(x))
|
||||
list(tuple(x))
|
||||
tuple(list(x))
|
||||
tuple(tuple(x))
|
||||
set(set(x))
|
||||
set(list(x))
|
||||
set(tuple(x))
|
||||
set(sorted(x))
|
||||
set(reversed(x))
|
||||
sorted(list(x))
|
||||
sorted(tuple(x))
|
||||
sorted(sorted(x))
|
||||
sorted(reversed(x))
|
||||
9
resources/test/fixtures/C415.py
vendored
Normal file
9
resources/test/fixtures/C415.py
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
lst = [2, 1, 3]
|
||||
a = set(lst[::-1])
|
||||
b = reversed(lst[::-1])
|
||||
c = sorted(lst[::-1])
|
||||
d = sorted(lst[::-1], reverse=True)
|
||||
e = set(lst[2:-1])
|
||||
f = set(lst[:1:-1])
|
||||
g = set(lst[::1])
|
||||
h = set(lst[::-2])
|
||||
6
resources/test/fixtures/C416.py
vendored
Normal file
6
resources/test/fixtures/C416.py
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
x = [1, 2, 3]
|
||||
[i for i in x]
|
||||
{i for i in x}
|
||||
|
||||
[i for i in x if i > 1]
|
||||
[i for i in x for j in x]
|
||||
6
resources/test/fixtures/C417.py
vendored
Normal file
6
resources/test/fixtures/C417.py
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
nums = [1, 2, 3]
|
||||
map(lambda x: x + 1, nums)
|
||||
map(lambda x: str(x), nums)
|
||||
list(map(lambda x: x * 2, nums))
|
||||
set(map(lambda x: x % 2 == 0, nums))
|
||||
dict(map(lambda v: (v, v**2), nums))
|
||||
534
resources/test/fixtures/D.py
vendored
Normal file
534
resources/test/fixtures/D.py
vendored
Normal file
@@ -0,0 +1,534 @@
|
||||
# No docstring, so we can test D100
|
||||
from functools import wraps
|
||||
import os
|
||||
from .expected import Expectation
|
||||
from typing import overload
|
||||
|
||||
|
||||
expectation = Expectation()
|
||||
expect = expectation.expect
|
||||
|
||||
expect('class_', 'D101: Missing docstring in public class')
|
||||
|
||||
|
||||
class class_:
|
||||
|
||||
expect('meta', 'D419: Docstring is empty')
|
||||
|
||||
class meta:
|
||||
""""""
|
||||
|
||||
@expect('D102: Missing docstring in public method')
|
||||
def method(self=None):
|
||||
pass
|
||||
|
||||
def _ok_since_private(self=None):
|
||||
pass
|
||||
|
||||
@overload
|
||||
def overloaded_method(self, a: int) -> str:
|
||||
...
|
||||
|
||||
@overload
|
||||
def overloaded_method(self, a: str) -> str:
|
||||
"""Foo bar documentation."""
|
||||
...
|
||||
|
||||
def overloaded_method(a):
|
||||
"""Foo bar documentation."""
|
||||
return str(a)
|
||||
|
||||
expect('overloaded_method',
|
||||
"D418: Function/ Method decorated with @overload"
|
||||
" shouldn't contain a docstring")
|
||||
|
||||
@property
|
||||
def foo(self):
|
||||
"""The foo of the thing, which isn't in imperitive mood."""
|
||||
return "hello"
|
||||
|
||||
@expect('D102: Missing docstring in public method')
|
||||
def __new__(self=None):
|
||||
pass
|
||||
|
||||
@expect('D107: Missing docstring in __init__')
|
||||
def __init__(self=None):
|
||||
pass
|
||||
|
||||
@expect('D105: Missing docstring in magic method')
|
||||
def __str__(self=None):
|
||||
pass
|
||||
|
||||
@expect('D102: Missing docstring in public method')
|
||||
def __call__(self=None, x=None, y=None, z=None):
|
||||
pass
|
||||
|
||||
|
||||
@expect('D419: Docstring is empty')
|
||||
def function():
|
||||
""" """
|
||||
def ok_since_nested():
|
||||
pass
|
||||
|
||||
@expect('D419: Docstring is empty')
|
||||
def nested():
|
||||
''
|
||||
|
||||
|
||||
def function_with_nesting():
|
||||
"""Foo bar documentation."""
|
||||
@overload
|
||||
def nested_overloaded_func(a: int) -> str:
|
||||
...
|
||||
|
||||
@overload
|
||||
def nested_overloaded_func(a: str) -> str:
|
||||
"""Foo bar documentation."""
|
||||
...
|
||||
|
||||
def nested_overloaded_func(a):
|
||||
"""Foo bar documentation."""
|
||||
return str(a)
|
||||
|
||||
|
||||
expect('nested_overloaded_func',
|
||||
"D418: Function/ Method decorated with @overload"
|
||||
" shouldn't contain a docstring")
|
||||
|
||||
|
||||
@overload
|
||||
def overloaded_func(a: int) -> str:
|
||||
...
|
||||
|
||||
|
||||
@overload
|
||||
def overloaded_func(a: str) -> str:
|
||||
"""Foo bar documentation."""
|
||||
...
|
||||
|
||||
|
||||
def overloaded_func(a):
|
||||
"""Foo bar documentation."""
|
||||
return str(a)
|
||||
|
||||
|
||||
expect('overloaded_func',
|
||||
"D418: Function/ Method decorated with @overload"
|
||||
" shouldn't contain a docstring")
|
||||
|
||||
|
||||
@expect('D200: One-line docstring should fit on one line with quotes '
|
||||
'(found 3)')
|
||||
@expect('D212: Multi-line docstring summary should start at the first line')
|
||||
def asdlkfasd():
|
||||
"""
|
||||
Wrong.
|
||||
"""
|
||||
|
||||
|
||||
@expect('D201: No blank lines allowed before function docstring (found 1)')
|
||||
def leading_space():
|
||||
|
||||
"""Leading space."""
|
||||
|
||||
|
||||
@expect('D202: No blank lines allowed after function docstring (found 1)')
|
||||
def trailing_space():
|
||||
"""Leading space."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
@expect('D201: No blank lines allowed before function docstring (found 1)')
|
||||
@expect('D202: No blank lines allowed after function docstring (found 1)')
|
||||
def trailing_and_leading_space():
|
||||
|
||||
"""Trailing and leading space."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
expect('LeadingSpaceMissing',
|
||||
'D203: 1 blank line required before class docstring (found 0)')
|
||||
|
||||
|
||||
class LeadingSpaceMissing:
|
||||
"""Leading space missing."""
|
||||
|
||||
|
||||
expect('WithLeadingSpace',
|
||||
'D211: No blank lines allowed before class docstring (found 1)')
|
||||
|
||||
|
||||
class WithLeadingSpace:
|
||||
|
||||
"""With leading space."""
|
||||
|
||||
|
||||
expect('TrailingSpace',
|
||||
'D204: 1 blank line required after class docstring (found 0)')
|
||||
expect('TrailingSpace',
|
||||
'D211: No blank lines allowed before class docstring (found 1)')
|
||||
|
||||
|
||||
class TrailingSpace:
|
||||
|
||||
"""TrailingSpace."""
|
||||
pass
|
||||
|
||||
|
||||
expect('LeadingAndTrailingSpaceMissing',
|
||||
'D203: 1 blank line required before class docstring (found 0)')
|
||||
expect('LeadingAndTrailingSpaceMissing',
|
||||
'D204: 1 blank line required after class docstring (found 0)')
|
||||
|
||||
|
||||
class LeadingAndTrailingSpaceMissing:
|
||||
"""Leading and trailing space missing."""
|
||||
pass
|
||||
|
||||
|
||||
@expect('D205: 1 blank line required between summary line and description '
|
||||
'(found 0)')
|
||||
@expect('D213: Multi-line docstring summary should start at the second line')
|
||||
def multi_line_zero_separating_blanks():
|
||||
"""Summary.
|
||||
Description.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@expect('D205: 1 blank line required between summary line and description '
|
||||
'(found 2)')
|
||||
@expect('D213: Multi-line docstring summary should start at the second line')
|
||||
def multi_line_two_separating_blanks():
|
||||
"""Summary.
|
||||
|
||||
|
||||
Description.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@expect('D213: Multi-line docstring summary should start at the second line')
|
||||
def multi_line_one_separating_blanks():
|
||||
"""Summary.
|
||||
|
||||
Description.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@expect('D207: Docstring is under-indented')
|
||||
@expect('D213: Multi-line docstring summary should start at the second line')
|
||||
def asdfsdf():
|
||||
"""Summary.
|
||||
|
||||
Description.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@expect('D207: Docstring is under-indented')
|
||||
@expect('D213: Multi-line docstring summary should start at the second line')
|
||||
def asdsdfsdffsdf():
|
||||
"""Summary.
|
||||
|
||||
Description.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@expect('D208: Docstring is over-indented')
|
||||
@expect('D213: Multi-line docstring summary should start at the second line')
|
||||
def asdfsdsdf24():
|
||||
"""Summary.
|
||||
|
||||
Description.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@expect('D208: Docstring is over-indented')
|
||||
@expect('D213: Multi-line docstring summary should start at the second line')
|
||||
def asdfsdsdfsdf24():
|
||||
"""Summary.
|
||||
|
||||
Description.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@expect('D208: Docstring is over-indented')
|
||||
@expect('D213: Multi-line docstring summary should start at the second line')
|
||||
def asdfsdfsdsdsdfsdf24():
|
||||
"""Summary.
|
||||
|
||||
Description.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@expect('D209: Multi-line docstring closing quotes should be on a separate '
|
||||
'line')
|
||||
@expect('D213: Multi-line docstring summary should start at the second line')
|
||||
def asdfljdf24():
|
||||
"""Summary.
|
||||
|
||||
Description."""
|
||||
|
||||
|
||||
@expect('D210: No whitespaces allowed surrounding docstring text')
|
||||
def endswith():
|
||||
"""Whitespace at the end. """
|
||||
|
||||
|
||||
@expect('D210: No whitespaces allowed surrounding docstring text')
|
||||
def around():
|
||||
""" Whitespace at everywhere. """
|
||||
|
||||
|
||||
@expect('D210: No whitespaces allowed surrounding docstring text')
|
||||
@expect('D213: Multi-line docstring summary should start at the second line')
|
||||
def multiline():
|
||||
""" Whitespace at the beginning.
|
||||
|
||||
This is the end.
|
||||
"""
|
||||
|
||||
|
||||
@expect('D300: Use """triple double quotes""" (found \'\'\'-quotes)')
|
||||
def triple_single_quotes_raw():
|
||||
r'''Summary.'''
|
||||
|
||||
|
||||
@expect('D300: Use """triple double quotes""" (found \'\'\'-quotes)')
|
||||
def triple_single_quotes_raw_uppercase():
|
||||
R'''Summary.'''
|
||||
|
||||
|
||||
@expect('D300: Use """triple double quotes""" (found \'-quotes)')
|
||||
def single_quotes_raw():
|
||||
r'Summary.'
|
||||
|
||||
|
||||
@expect('D300: Use """triple double quotes""" (found \'-quotes)')
|
||||
def single_quotes_raw_uppercase():
|
||||
R'Summary.'
|
||||
|
||||
|
||||
@expect('D300: Use """triple double quotes""" (found \'-quotes)')
|
||||
@expect('D301: Use r""" if any backslashes in a docstring')
|
||||
def single_quotes_raw_uppercase_backslash():
|
||||
R'Sum\mary.'
|
||||
|
||||
|
||||
@expect('D301: Use r""" if any backslashes in a docstring')
|
||||
def double_quotes_backslash():
|
||||
"""Sum\\mary."""
|
||||
|
||||
|
||||
@expect('D301: Use r""" if any backslashes in a docstring')
|
||||
def double_quotes_backslash_uppercase():
|
||||
R"""Sum\\mary."""
|
||||
|
||||
|
||||
@expect('D213: Multi-line docstring summary should start at the second line')
|
||||
def exceptions_of_D301():
|
||||
"""Exclude some backslashes from D301.
|
||||
|
||||
In particular, line continuations \
|
||||
and unicode literals \u0394 and \N{GREEK CAPITAL LETTER DELTA}.
|
||||
They are considered to be intentionally unescaped.
|
||||
"""
|
||||
|
||||
|
||||
@expect("D400: First line should end with a period (not 'y')")
|
||||
@expect("D415: First line should end with a period, question mark, "
|
||||
"or exclamation point (not 'y')")
|
||||
def lwnlkjl():
|
||||
"""Summary"""
|
||||
|
||||
|
||||
@expect("D401: First line should be in imperative mood "
|
||||
"(perhaps 'Return', not 'Returns')")
|
||||
def liouiwnlkjl():
|
||||
"""Returns foo."""
|
||||
|
||||
|
||||
@expect("D401: First line should be in imperative mood; try rephrasing "
|
||||
"(found 'Constructor')")
|
||||
def sdgfsdg23245():
|
||||
"""Constructor for a foo."""
|
||||
|
||||
|
||||
@expect("D401: First line should be in imperative mood; try rephrasing "
|
||||
"(found 'Constructor')")
|
||||
def sdgfsdg23245777():
|
||||
"""Constructor."""
|
||||
|
||||
|
||||
@expect('D402: First line should not be the function\'s "signature"')
|
||||
def foobar():
|
||||
"""Signature: foobar()."""
|
||||
|
||||
|
||||
@expect('D213: Multi-line docstring summary should start at the second line')
|
||||
def new_209():
|
||||
"""First line.
|
||||
|
||||
More lines.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
@expect('D213: Multi-line docstring summary should start at the second line')
|
||||
def old_209():
|
||||
"""One liner.
|
||||
|
||||
Multi-line comments. OK to have extra blank line
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@expect("D103: Missing docstring in public function")
|
||||
def oneliner_d102(): return
|
||||
|
||||
|
||||
@expect("D400: First line should end with a period (not 'r')")
|
||||
@expect("D415: First line should end with a period, question mark,"
|
||||
" or exclamation point (not 'r')")
|
||||
def oneliner_withdoc(): """One liner"""
|
||||
|
||||
|
||||
def ignored_decorator(func): # noqa: D400,D401,D415
|
||||
"""Runs something"""
|
||||
func()
|
||||
pass
|
||||
|
||||
|
||||
def decorator_for_test(func): # noqa: D400,D401,D415
|
||||
"""Runs something"""
|
||||
func()
|
||||
pass
|
||||
|
||||
|
||||
@ignored_decorator
|
||||
def oneliner_ignored_decorator(): """One liner"""
|
||||
|
||||
|
||||
@decorator_for_test
|
||||
@expect("D400: First line should end with a period (not 'r')")
|
||||
@expect("D415: First line should end with a period, question mark,"
|
||||
" or exclamation point (not 'r')")
|
||||
def oneliner_with_decorator_expecting_errors(): """One liner"""
|
||||
|
||||
|
||||
@decorator_for_test
|
||||
def valid_oneliner_with_decorator(): """One liner."""
|
||||
|
||||
|
||||
@expect("D207: Docstring is under-indented")
|
||||
@expect('D213: Multi-line docstring summary should start at the second line')
|
||||
def docstring_start_in_same_line(): """First Line.
|
||||
|
||||
Second Line
|
||||
"""
|
||||
|
||||
|
||||
def function_with_lambda_arg(x=lambda y: y):
|
||||
"""Wrap the given lambda."""
|
||||
|
||||
|
||||
@expect('D213: Multi-line docstring summary should start at the second line')
|
||||
def a_following_valid_function(x=None):
|
||||
"""Check for a bug where the previous function caused an assertion.
|
||||
|
||||
The assertion was caused in the next function, so this one is necessary.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
def outer_function():
|
||||
"""Do something."""
|
||||
def inner_function():
|
||||
"""Do inner something."""
|
||||
return 0
|
||||
|
||||
|
||||
@expect("D400: First line should end with a period (not 'g')")
|
||||
@expect("D401: First line should be in imperative mood "
|
||||
"(perhaps 'Run', not 'Runs')")
|
||||
@expect("D415: First line should end with a period, question mark, "
|
||||
"or exclamation point (not 'g')")
|
||||
def docstring_bad():
|
||||
"""Runs something"""
|
||||
pass
|
||||
|
||||
|
||||
def docstring_bad_ignore_all(): # noqa
|
||||
"""Runs something"""
|
||||
pass
|
||||
|
||||
|
||||
def docstring_bad_ignore_one(): # noqa: D400,D401,D415
|
||||
"""Runs something"""
|
||||
pass
|
||||
|
||||
|
||||
@expect("D401: First line should be in imperative mood "
|
||||
"(perhaps 'Run', not 'Runs')")
|
||||
def docstring_ignore_some_violations_but_catch_D401(): # noqa: E501,D400,D415
|
||||
"""Runs something"""
|
||||
pass
|
||||
|
||||
|
||||
@expect(
|
||||
"D401: First line should be in imperative mood "
|
||||
"(perhaps 'Initiate', not 'Initiates')"
|
||||
)
|
||||
def docstring_initiates():
|
||||
"""Initiates the process."""
|
||||
|
||||
|
||||
@expect(
|
||||
"D401: First line should be in imperative mood "
|
||||
"(perhaps 'Initialize', not 'Initializes')"
|
||||
)
|
||||
def docstring_initializes():
|
||||
"""Initializes the process."""
|
||||
|
||||
|
||||
@wraps(docstring_bad_ignore_one)
|
||||
def bad_decorated_function():
|
||||
"""Bad (E501) but decorated"""
|
||||
pass
|
||||
|
||||
|
||||
def valid_google_string(): # noqa: D400
|
||||
"""Test a valid something!"""
|
||||
|
||||
|
||||
@expect("D415: First line should end with a period, question mark, "
|
||||
"or exclamation point (not 'g')")
|
||||
def bad_google_string(): # noqa: D400
|
||||
"""Test a valid something"""
|
||||
|
||||
|
||||
# This is reproducing a bug where AttributeError is raised when parsing class
|
||||
# parameters as functions for Google / Numpy conventions.
|
||||
class Blah: # noqa: D203,D213
|
||||
"""A Blah.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
x : int
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, x):
|
||||
pass
|
||||
|
||||
|
||||
expect(os.path.normcase(__file__ if __file__[-1] != 'c' else __file__[:-1]),
|
||||
'D100: Missing docstring in public module')
|
||||
35
resources/test/fixtures/E501.py
vendored
35
resources/test/fixtures/E501.py
vendored
@@ -14,3 +14,38 @@ Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor i
|
||||
|
||||
_ = "---------------------------------------------------------------------------AAAAAAA"
|
||||
_ = "---------------------------------------------------------------------------亜亜亜亜亜亜亜"
|
||||
|
||||
|
||||
def caller(string: str) -> None:
|
||||
assert string is not None
|
||||
|
||||
|
||||
caller(
|
||||
"""
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
caller(
|
||||
"""
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||
""", # noqa: E501
|
||||
)
|
||||
|
||||
multiple_strings_per_line = {
|
||||
# OK
|
||||
"Lorem ipsum dolor": "sit amet",
|
||||
# E501 Line too long
|
||||
"Lorem ipsum dolor": "sit amet consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.",
|
||||
# E501 Line too long
|
||||
"Lorem ipsum dolor": """
|
||||
sit amet consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||
""",
|
||||
# OK
|
||||
"Lorem ipsum dolor": "sit amet consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.", # noqa: E501
|
||||
# OK
|
||||
"Lorem ipsum dolor": """
|
||||
sit amet consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||
""", # noqa: E501
|
||||
}
|
||||
|
||||
@@ -28,7 +28,6 @@ from blah import ClassA, ClassB, ClassC
|
||||
if TYPE_CHECKING:
|
||||
from models import Fruit, Nut, Vegetable
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import shelve
|
||||
import importlib
|
||||
@@ -61,7 +60,28 @@ Y = TypeVar("Y", bound="Dict")
|
||||
Z = TypeVar("Z", "List", "Set")
|
||||
|
||||
a = list["Fruit"]
|
||||
b = Union["Nut", None]
|
||||
b = Union["""Nut""", None]
|
||||
c = cast("Vegetable", b)
|
||||
|
||||
Field = lambda default=MISSING: field(default=default)
|
||||
|
||||
|
||||
# Test: access a sub-importation via an alias.
|
||||
import pyarrow as pa
|
||||
import pyarrow.csv
|
||||
|
||||
print(pa.csv.read_csv("test.csv"))
|
||||
|
||||
|
||||
# Test: referencing an import via TypeAlias.
|
||||
import sys
|
||||
|
||||
import numpy as np
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
from typing import TypeAlias
|
||||
else:
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
|
||||
CustomInt: TypeAlias = "np.int8 | np.int16"
|
||||
5
resources/test/fixtures/F401_1.py
vendored
Normal file
5
resources/test/fixtures/F401_1.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
"""Access a sub-importation via an alias."""
|
||||
import pyarrow as pa
|
||||
import pyarrow.csv
|
||||
|
||||
print(pa.csv.read_csv("test.csv"))
|
||||
12
resources/test/fixtures/F401_2.py
vendored
Normal file
12
resources/test/fixtures/F401_2.py
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
"""Test: referencing an import via TypeAlias."""
|
||||
import sys
|
||||
|
||||
import numpy as np
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
from typing import TypeAlias
|
||||
else:
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
|
||||
CustomInt: TypeAlias = "np.int8 | np.int16"
|
||||
14
resources/test/fixtures/F401_3.py
vendored
Normal file
14
resources/test/fixtures/F401_3.py
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
"""Test: referencing an import via TypeAlias (with future annotations)."""
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
|
||||
import numpy as np
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
from typing import TypeAlias
|
||||
else:
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
|
||||
CustomInt: TypeAlias = np.int8 | np.int16
|
||||
14
resources/test/fixtures/F401_4.py
vendored
Normal file
14
resources/test/fixtures/F401_4.py
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
"""Test: referencing an import via TypeAlias (with future annotations and quotes)."""
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
|
||||
import numpy as np
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
from typing import TypeAlias
|
||||
else:
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
|
||||
CustomInt: TypeAlias = "np.int8 | np.int16"
|
||||
5
resources/test/fixtures/F401_5.py
vendored
Normal file
5
resources/test/fixtures/F401_5.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
"""Test: removal of multi-segment and aliases imports."""
|
||||
from a.b import c
|
||||
from d.e import f as g
|
||||
import h.i
|
||||
import j.k as l
|
||||
1
resources/test/fixtures/F404.py
vendored
1
resources/test/fixtures/F404.py
vendored
@@ -1,4 +1,3 @@
|
||||
from __future__ import print_function
|
||||
"""Docstring"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
|
||||
3
resources/test/fixtures/F632.py
vendored
3
resources/test/fixtures/F632.py
vendored
@@ -3,3 +3,6 @@ if x is "abc":
|
||||
|
||||
if 123 is not y:
|
||||
pass
|
||||
|
||||
if "123" is x < 3:
|
||||
pass
|
||||
|
||||
@@ -122,3 +122,12 @@ class PEP593Test:
|
||||
dict["foo", "bar"], # Expected to fail as undefined.
|
||||
123,
|
||||
]
|
||||
|
||||
|
||||
def in_ipython_notebook() -> bool:
|
||||
try:
|
||||
# autoimported by notebooks
|
||||
get_ipython() # type: ignore[name-defined]
|
||||
except NameError:
|
||||
return False # not in notebook
|
||||
return True
|
||||
30
resources/test/fixtures/F821_1.py
vendored
Normal file
30
resources/test/fixtures/F821_1.py
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
"""Test: typing module imports."""
|
||||
from foo import cast
|
||||
|
||||
# OK
|
||||
x = cast("Model")
|
||||
|
||||
from typing import cast
|
||||
|
||||
|
||||
# F821 Undefined name `Model`
|
||||
x = cast("Model", x)
|
||||
|
||||
|
||||
import typing
|
||||
|
||||
|
||||
# F821 Undefined name `Model`
|
||||
x = typing.cast("Model", x)
|
||||
|
||||
|
||||
from typing import Pattern
|
||||
|
||||
# F821 Undefined name `Model`
|
||||
x = Pattern["Model"]
|
||||
|
||||
|
||||
from typing.re import Match
|
||||
|
||||
# F821 Undefined name `Model`
|
||||
x = Match["Model"]
|
||||
18
resources/test/fixtures/F821_2.py
vendored
Normal file
18
resources/test/fixtures/F821_2.py
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
"""Test: typing_extensions module imports."""
|
||||
from foo import Literal
|
||||
|
||||
# F821 Undefined name `Model`
|
||||
x: Literal["Model"]
|
||||
|
||||
from typing_extensions import Literal
|
||||
|
||||
|
||||
# OK
|
||||
x: Literal["Model"]
|
||||
|
||||
|
||||
import typing_extensions
|
||||
|
||||
|
||||
# OK
|
||||
x: typing_extensions.Literal["Model"]
|
||||
14
resources/test/fixtures/F821_3.py
vendored
Normal file
14
resources/test/fixtures/F821_3.py
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
"""Test (edge case): accesses of object named `dict`."""
|
||||
|
||||
# OK
|
||||
dict = {"parameters": {}}
|
||||
dict["parameters"]["userId"] = "userId"
|
||||
dict["parameters"]["itemId"] = "itemId"
|
||||
del dict["parameters"]["itemId"]
|
||||
|
||||
# F821 Undefined name `key`
|
||||
# F821 Undefined name `value`
|
||||
x: dict["key", "value"]
|
||||
|
||||
# OK
|
||||
x: dict[str, str]
|
||||
3
resources/test/fixtures/M001.py
vendored
3
resources/test/fixtures/M001.py
vendored
@@ -15,6 +15,9 @@ def f() -> None:
|
||||
# Invalid
|
||||
d = 1 # noqa: F841, E501
|
||||
|
||||
# Invalid (and unimplemented)
|
||||
d = 1 # noqa: F841, W191
|
||||
|
||||
|
||||
# Valid
|
||||
_ = """Lorem ipsum dolor sit amet.
|
||||
|
||||
34
resources/test/fixtures/N801.py
vendored
Normal file
34
resources/test/fixtures/N801.py
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
class bad:
|
||||
pass
|
||||
|
||||
|
||||
class _bad:
|
||||
pass
|
||||
|
||||
|
||||
class bad_class:
|
||||
pass
|
||||
|
||||
|
||||
class Bad_Class:
|
||||
pass
|
||||
|
||||
|
||||
class BAD_CLASS:
|
||||
pass
|
||||
|
||||
|
||||
class Good:
|
||||
pass
|
||||
|
||||
|
||||
class _Good:
|
||||
pass
|
||||
|
||||
|
||||
class GoodClass:
|
||||
pass
|
||||
|
||||
|
||||
class GOOD:
|
||||
pass
|
||||
41
resources/test/fixtures/N802.py
vendored
Normal file
41
resources/test/fixtures/N802.py
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
import unittest
|
||||
|
||||
|
||||
def Bad():
|
||||
pass
|
||||
|
||||
|
||||
def _Bad():
|
||||
pass
|
||||
|
||||
|
||||
def BAD():
|
||||
pass
|
||||
|
||||
|
||||
def BAD_FUNC():
|
||||
pass
|
||||
|
||||
|
||||
def good():
|
||||
pass
|
||||
|
||||
|
||||
def _good():
|
||||
pass
|
||||
|
||||
|
||||
def good_func():
|
||||
pass
|
||||
|
||||
|
||||
def tearDownModule():
|
||||
pass
|
||||
|
||||
|
||||
class Test(unittest.TestCase):
|
||||
def tearDown(self):
|
||||
return super().tearDown()
|
||||
|
||||
def testTest(self):
|
||||
assert True
|
||||
7
resources/test/fixtures/N803.py
vendored
Normal file
7
resources/test/fixtures/N803.py
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
def func(_, a, A):
|
||||
return _, a, A
|
||||
|
||||
|
||||
class Class:
|
||||
def method(self, _, a, A):
|
||||
return _, a, A
|
||||
43
resources/test/fixtures/N804.py
vendored
Normal file
43
resources/test/fixtures/N804.py
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
from abc import ABCMeta
|
||||
|
||||
|
||||
class Class:
|
||||
def bad_method(this):
|
||||
pass
|
||||
|
||||
if False:
|
||||
|
||||
def extra_bad_method(this):
|
||||
pass
|
||||
|
||||
def good_method(self):
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def class_method(cls):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def static_method(x):
|
||||
return x
|
||||
|
||||
def __init__(self):
|
||||
...
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
...
|
||||
|
||||
def __init_subclass__(self, default_name, **kwargs):
|
||||
...
|
||||
|
||||
|
||||
class MetaClass(ABCMeta):
|
||||
def bad_method(self):
|
||||
pass
|
||||
|
||||
def good_method(cls):
|
||||
pass
|
||||
|
||||
|
||||
def func(x):
|
||||
return x
|
||||
43
resources/test/fixtures/N805.py
vendored
Normal file
43
resources/test/fixtures/N805.py
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
from abc import ABCMeta
|
||||
|
||||
|
||||
class Class:
|
||||
def bad_method(this):
|
||||
pass
|
||||
|
||||
if False:
|
||||
|
||||
def extra_bad_method(this):
|
||||
pass
|
||||
|
||||
def good_method(self):
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def class_method(cls):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def static_method(x):
|
||||
return x
|
||||
|
||||
def __init__(self):
|
||||
...
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
...
|
||||
|
||||
def __init_subclass__(self, default_name, **kwargs):
|
||||
...
|
||||
|
||||
|
||||
class MetaClass(ABCMeta):
|
||||
def bad_method(self):
|
||||
pass
|
||||
|
||||
def good_method(cls):
|
||||
pass
|
||||
|
||||
|
||||
def func(x):
|
||||
return x
|
||||
5
resources/test/fixtures/N806.py
vendored
Normal file
5
resources/test/fixtures/N806.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
def f():
|
||||
lower = 0
|
||||
Camel = 0
|
||||
CONSTANT = 0
|
||||
_ = 0
|
||||
15
resources/test/fixtures/N807.py
vendored
Normal file
15
resources/test/fixtures/N807.py
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
def __bad__():
|
||||
pass
|
||||
|
||||
|
||||
def __good():
|
||||
pass
|
||||
|
||||
|
||||
def good__():
|
||||
pass
|
||||
|
||||
|
||||
class Class:
|
||||
def __good__(self):
|
||||
pass
|
||||
3
resources/test/fixtures/N811.py
vendored
Normal file
3
resources/test/fixtures/N811.py
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
import mod.CONST as const
|
||||
from mod import CONSTANT as constant
|
||||
from mod import ANOTHER_CONSTANT as another_constant
|
||||
3
resources/test/fixtures/N812.py
vendored
Normal file
3
resources/test/fixtures/N812.py
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
import modl.lowercase as Lower
|
||||
from mod import lowercase as Lowercase
|
||||
from mod import another_lowercase as AnotherLowercase
|
||||
3
resources/test/fixtures/N813.py
vendored
Normal file
3
resources/test/fixtures/N813.py
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
import mod.Camel as camel
|
||||
from mod import CamelCase as camelcase
|
||||
from mod import AnotherCamelCase as another_camelcase
|
||||
3
resources/test/fixtures/N814.py
vendored
Normal file
3
resources/test/fixtures/N814.py
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
import mod.Camel as CAMEL
|
||||
from mod import CamelCase as CAMELCASE
|
||||
from mod import AnotherCamelCase as ANOTHER_CAMELCASE
|
||||
6
resources/test/fixtures/N815.py
vendored
Normal file
6
resources/test/fixtures/N815.py
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
class C:
|
||||
lower = 0
|
||||
CONSTANT = 0
|
||||
mixedCase = 0
|
||||
_mixedCase = 0
|
||||
mixed_Case = 0
|
||||
5
resources/test/fixtures/N816.py
vendored
Normal file
5
resources/test/fixtures/N816.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
lower = 0
|
||||
CONSTANT = 0
|
||||
mixedCase = 0
|
||||
_mixedCase = 0
|
||||
mixed_Case = 0
|
||||
2
resources/test/fixtures/N817.py
vendored
Normal file
2
resources/test/fixtures/N817.py
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
import mod.CaMel as CM
|
||||
from mod import CamelCase as CC
|
||||
10
resources/test/fixtures/N818.py
vendored
Normal file
10
resources/test/fixtures/N818.py
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
class Error(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class AnotherError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class C(Exception):
|
||||
pass
|
||||
3
resources/test/fixtures/R002.py
vendored
3
resources/test/fixtures/R002.py
vendored
@@ -1,3 +0,0 @@
|
||||
self.assertEquals (1, 2)
|
||||
self.assertEquals(1, 2)
|
||||
self.assertEqual(3, 4)
|
||||
7
resources/test/fixtures/RUF001.py
vendored
Normal file
7
resources/test/fixtures/RUF001.py
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
x = "𝐁ad string"
|
||||
|
||||
|
||||
def f():
|
||||
"""Here's a docstring with an unusual parenthesis: )"""
|
||||
# And here's a comment with an unusual punctuation mark: ᜵
|
||||
...
|
||||
7
resources/test/fixtures/RUF002.py
vendored
Normal file
7
resources/test/fixtures/RUF002.py
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
x = "𝐁ad string"
|
||||
|
||||
|
||||
def f():
|
||||
"""Here's a docstring with an unusual parenthesis: )"""
|
||||
# And here's a comment with an unusual punctuation mark: ᜵
|
||||
...
|
||||
7
resources/test/fixtures/RUF003.py
vendored
Normal file
7
resources/test/fixtures/RUF003.py
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
x = "𝐁ad string"
|
||||
|
||||
|
||||
def f():
|
||||
"""Here's a docstring with an unusual parenthesis: )"""
|
||||
# And here's a comment with an unusual punctuation mark: ᜵
|
||||
...
|
||||
15
resources/test/fixtures/U002.py
vendored
Normal file
15
resources/test/fixtures/U002.py
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
from os.path import abspath
|
||||
|
||||
x = abspath(__file__)
|
||||
|
||||
|
||||
import os
|
||||
|
||||
|
||||
y = os.path.abspath(__file__)
|
||||
|
||||
|
||||
from os import path
|
||||
|
||||
|
||||
z = path.abspath(__file__)
|
||||
5
resources/test/fixtures/U003.py
vendored
Normal file
5
resources/test/fixtures/U003.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
type('')
|
||||
type(b'')
|
||||
type(0)
|
||||
type(0.)
|
||||
type(0j)
|
||||
10
resources/test/fixtures/U005.py
vendored
Normal file
10
resources/test/fixtures/U005.py
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
import unittest
|
||||
|
||||
|
||||
class Suite(unittest.TestCase):
|
||||
def test(self) -> None:
|
||||
self.assertEquals (1, 2)
|
||||
self.assertEquals(1, 2)
|
||||
self.assertEqual(3, 4)
|
||||
self.failUnlessAlmostEqual(1, 1.1)
|
||||
self.assertNotRegexpMatches("a", "b")
|
||||
12
resources/test/fixtures/U006.py
vendored
Normal file
12
resources/test/fixtures/U006.py
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
from typing import List
|
||||
|
||||
|
||||
def f(x: List[str]) -> None:
|
||||
...
|
||||
|
||||
|
||||
import typing
|
||||
|
||||
|
||||
def f(x: typing.List[str]) -> None:
|
||||
...
|
||||
40
resources/test/fixtures/U007.py
vendored
Normal file
40
resources/test/fixtures/U007.py
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
from typing import Optional
|
||||
|
||||
|
||||
def f(x: Optional[str]) -> None:
|
||||
...
|
||||
|
||||
|
||||
import typing
|
||||
|
||||
|
||||
def f(x: typing.Optional[str]) -> None:
|
||||
...
|
||||
|
||||
|
||||
from typing import Union
|
||||
|
||||
|
||||
def f(x: Union[str, int, Union[float, bytes]]) -> None:
|
||||
...
|
||||
|
||||
|
||||
import typing
|
||||
|
||||
|
||||
def f(x: typing.Union[str, int]) -> None:
|
||||
...
|
||||
|
||||
|
||||
from typing import Union
|
||||
|
||||
|
||||
def f(x: "Union[str, int, Union[float, bytes]]") -> None:
|
||||
...
|
||||
|
||||
|
||||
import typing
|
||||
|
||||
|
||||
def f(x: "typing.Union[str, int]") -> None:
|
||||
...
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user