Compare commits

..

2 Commits

Author SHA1 Message Date
Zanie
b3980c1113 Add must_use 2023-12-26 12:56:06 -06:00
Zanie
4f6e03ced8 Do not take explicit-preview-rules into account when selecting fixable rules 2023-12-26 12:43:35 -06:00
231 changed files with 4120 additions and 4734 deletions

View File

@@ -168,7 +168,7 @@ jobs:
cargo-fuzz:
runs-on: ubuntu-latest
needs: determine_changes
if: false # ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
name: "cargo fuzz"
steps:
- uses: actions/checkout@v4

247
.github/workflows/flake8-to-ruff.yaml vendored Normal file
View File

@@ -0,0 +1,247 @@
name: "[flake8-to-ruff] Release"
on: workflow_dispatch
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
PACKAGE_NAME: flake8-to-ruff
CRATE_NAME: flake8_to_ruff
PYTHON_VERSION: "3.11"
CARGO_INCREMENTAL: 0
CARGO_NET_RETRY: 10
CARGO_TERM_COLOR: always
RUSTUP_MAX_RETRIES: 10
jobs:
macos-x86_64:
runs-on: macos-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
- name: "Install Rust toolchain"
run: rustup show
- name: "Build wheels - x86_64"
uses: PyO3/maturin-action@v1
with:
target: x86_64
args: --release --out dist --sdist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- name: "Install built wheel - x86_64"
run: |
pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
macos-universal:
runs-on: macos-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
- name: "Install Rust toolchain"
run: rustup show
- name: "Build wheels - universal2"
uses: PyO3/maturin-action@v1
with:
args: --release --target universal2-apple-darwin --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- name: "Install built wheel - universal2"
run: |
pip install dist/${{ env.CRATE_NAME }}-*universal2.whl --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
windows:
runs-on: windows-latest
strategy:
matrix:
target: [x64, x86]
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: ${{ matrix.target }}
- name: "Install Rust toolchain"
run: rustup show
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.target }}
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- name: "Install built wheel"
shell: bash
run: |
python -m pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
linux:
runs-on: ubuntu-latest
strategy:
matrix:
target: [x86_64, i686]
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.target }}
manylinux: auto
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- name: "Install built wheel"
if: matrix.target == 'x86_64'
run: |
pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
linux-cross:
runs-on: ubuntu-latest
strategy:
matrix:
target: [aarch64, armv7, s390x, ppc64le, ppc64]
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.target }}
manylinux: auto
args: --no-default-features --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- uses: uraimo/run-on-arch-action@v2
if: matrix.target != 'ppc64'
name: Install built wheel
with:
arch: ${{ matrix.target }}
distro: ubuntu20.04
githubToken: ${{ github.token }}
install: |
apt-get update
apt-get install -y --no-install-recommends python3 python3-pip
pip3 install -U pip
run: |
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
musllinux:
runs-on: ubuntu-latest
strategy:
matrix:
target:
- x86_64-unknown-linux-musl
- i686-unknown-linux-musl
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.target }}
manylinux: musllinux_1_2
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- name: "Install built wheel"
if: matrix.target == 'x86_64-unknown-linux-musl'
uses: addnab/docker-run-action@v3
with:
image: alpine:latest
options: -v ${{ github.workspace }}:/io -w /io
run: |
apk add py3-pip
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links /io/dist/ --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
musllinux-cross:
runs-on: ubuntu-latest
strategy:
matrix:
platform:
- target: aarch64-unknown-linux-musl
arch: aarch64
- target: armv7-unknown-linux-musleabihf
arch: armv7
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.platform.target }}
manylinux: musllinux_1_2
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- uses: uraimo/run-on-arch-action@v2
name: Install built wheel
with:
arch: ${{ matrix.platform.arch }}
distro: alpine_latest
githubToken: ${{ github.token }}
install: |
apk add py3-pip
run: |
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
release:
name: Release
runs-on: ubuntu-latest
needs:
- macos-universal
- macos-x86_64
- windows
- linux
- linux-cross
- musllinux
- musllinux-cross
steps:
- uses: actions/download-artifact@v3
with:
name: wheels
- uses: actions/setup-python@v5
- name: "Publish to PyPi"
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.FLAKE8_TO_RUFF_TOKEN }}
run: |
pip install --upgrade twine
twine upload --skip-existing *

View File

@@ -1,72 +1,5 @@
# Changelog
## 0.1.11
### Preview features
- \[`pylint`\] Implement `super-without-brackets` (`W0245`) ([#9257](https://github.com/astral-sh/ruff/pull/9257))
### Bug fixes
- Check path string properly in `python -m ruff` invocations ([#9367](https://github.com/astral-sh/ruff/pull/9367))
### Documentation
- Tweak `relative-imports` message ([#9365](https://github.com/astral-sh/ruff/pull/9365))
- Add fix safety note for `yield-in-for-loop` ([#9364](https://github.com/astral-sh/ruff/pull/9364))
## 0.1.10
### Preview features
- Improve `dummy_implementations` preview style formatting ([#9240](https://github.com/astral-sh/ruff/pull/9240))
- Normalise Hex and unicode escape sequences in strings ([#9280](https://github.com/astral-sh/ruff/pull/9280))
- Parenthesize long type annotations in annotated assignments ([#9210](https://github.com/astral-sh/ruff/pull/9210))
- Parenthesize multi-context managers in `with` statements ([#9222](https://github.com/astral-sh/ruff/pull/9222))
- \[`flake8-pyi`\] Implement `generator-return-from-iter-method` (`PYI058`) ([#9313](https://github.com/astral-sh/ruff/pull/9313))
- \[`pylint`\] Implement `empty-comment` (`PLR2044`) ([#9174](https://github.com/astral-sh/ruff/pull/9174))
- \[`refurb`\] Implement `bit-count` (`FURB161`) ([#9265](https://github.com/astral-sh/ruff/pull/9265))
- \[`ruff`\] Add `never-union` rule to detect redundant `typing.NoReturn` and `typing.Never` ([#9217](https://github.com/astral-sh/ruff/pull/9217))
### CLI
- Add paths to TOML parse errors ([#9358](https://github.com/astral-sh/ruff/pull/9358))
- Add row and column numbers to formatter parse errors ([#9321](https://github.com/astral-sh/ruff/pull/9321))
- Improve responsiveness when invoked via Python ([#9315](https://github.com/astral-sh/ruff/pull/9315))
- Short rule messages should not end with a period ([#9345](https://github.com/astral-sh/ruff/pull/9345))
### Configuration
- Respect runtime-required decorators on functions ([#9317](https://github.com/astral-sh/ruff/pull/9317))
### Bug fixes
- Avoid `asyncio-dangling-task` for nonlocal and global bindings ([#9263](https://github.com/astral-sh/ruff/pull/9263))
- Escape trailing placeholders in rule documentation ([#9301](https://github.com/astral-sh/ruff/pull/9301))
- Fix continuation detection following multi-line strings ([#9332](https://github.com/astral-sh/ruff/pull/9332))
- Fix scoping for generators in named expressions in classes ([#9248](https://github.com/astral-sh/ruff/pull/9248))
- Port from obsolete wsl crate to is-wsl ([#9356](https://github.com/astral-sh/ruff/pull/9356))
- Remove special pre-visit for module docstrings ([#9261](https://github.com/astral-sh/ruff/pull/9261))
- Respect `__str__` definitions from super classes ([#9338](https://github.com/astral-sh/ruff/pull/9338))
- Respect `unused-noqa` via `per-file-ignores` ([#9300](https://github.com/astral-sh/ruff/pull/9300))
- Respect attribute chains when resolving builtin call paths ([#9309](https://github.com/astral-sh/ruff/pull/9309))
- Treat all `typing_extensions` members as typing aliases ([#9335](https://github.com/astral-sh/ruff/pull/9335))
- Use `Display` for formatter parse errors ([#9316](https://github.com/astral-sh/ruff/pull/9316))
- Wrap subscripted dicts in parens for f-string conversion ([#9238](https://github.com/astral-sh/ruff/pull/9238))
- \[`flake8-annotations`\] Avoid adding return types to stub methods ([#9277](https://github.com/astral-sh/ruff/pull/9277))
- \[`flake8-annotations`\] Respect mixed `return` and `raise` cases in return-type analysis ([#9310](https://github.com/astral-sh/ruff/pull/9310))
- \[`flake8-bandit`\] Don't report violations when `SafeLoader` is imported from `yaml.loader` (`S506`) ([#9299](https://github.com/astral-sh/ruff/pull/9299))
- \[`pylint`\] Avoid panic when comment is preceded by Unicode ([#9331](https://github.com/astral-sh/ruff/pull/9331))
- \[`pylint`\] Change `PLR0917` error message to match other `PLR09XX` messages ([#9308](https://github.com/astral-sh/ruff/pull/9308))
- \[`refurb`\] Avoid false positives for `math-constant` (`FURB152`) ([#9290](https://github.com/astral-sh/ruff/pull/9290))
### Documentation
- Expand target name for better rule documentation ([#9302](https://github.com/astral-sh/ruff/pull/9302))
- Fix typos found by codespell ([#9346](https://github.com/astral-sh/ruff/pull/9346))
- \[`perflint`\] Document `PERF102` fix un-safety ([#9351](https://github.com/astral-sh/ruff/pull/9351))
- \[`pyupgrade`\] Document `UP007` fix un-safety ([#9306](https://github.com/astral-sh/ruff/pull/9306))
## 0.1.9
### Breaking changes

159
Cargo.lock generated
View File

@@ -312,9 +312,9 @@ dependencies = [
[[package]]
name = "clap"
version = "4.4.12"
version = "4.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dcfab8ba68f3668e89f6ff60f5b205cea56aa7b769451a59f34b8682f51c056d"
checksum = "ac495e00dcec98c83465d5ad66c5c4fabd652fd6686e7c6269b117e729a6f17b"
dependencies = [
"clap_builder",
"clap_derive",
@@ -322,9 +322,9 @@ dependencies = [
[[package]]
name = "clap_builder"
version = "4.4.12"
version = "4.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fb7fb5e4e979aec3be7791562fcba452f94ad85e954da024396433e0e25a79e9"
checksum = "c77ed9a32a62e6ca27175d00d29d05ca32e396ea1eb5fb01d8256b669cec7663"
dependencies = [
"anstream",
"anstyle",
@@ -442,6 +442,12 @@ dependencies = [
"windows-sys 0.48.0",
]
[[package]]
name = "configparser"
version = "3.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e0e56e414a2a52ab2a104f85cd40933c2fbc278b83637facf646ecf451b49237"
[[package]]
name = "console"
version = "0.15.7"
@@ -800,6 +806,30 @@ version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flake8-to-ruff"
version = "0.1.9"
dependencies = [
"anyhow",
"clap",
"colored",
"configparser",
"itertools 0.11.0",
"log",
"once_cell",
"pep440_rs 0.4.0",
"pretty_assertions",
"regex",
"ruff_linter",
"ruff_workspace",
"rustc-hash",
"serde",
"serde_json",
"strum",
"strum_macros",
"toml",
]
[[package]]
name = "flate2"
version = "1.0.27"
@@ -1089,22 +1119,14 @@ dependencies = [
"cfg-if",
]
[[package]]
name = "is-docker"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "928bae27f42bc99b60d9ac7334e3a21d10ad8f1835a4e12ec3ec0464765ed1b3"
dependencies = [
"once_cell",
]
[[package]]
name = "is-macro"
version = "0.3.4"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b75828adcb53122ef5ea649a39f50f82d94b754099bf6331b32e255e1891e8fb"
checksum = "bc74b7abae208af9314a406bd7dcc65091230b6e749c09e07a645885fecf34f9"
dependencies = [
"Inflector",
"pmutil 0.6.1",
"proc-macro2",
"quote",
"syn 2.0.40",
@@ -1121,16 +1143,6 @@ dependencies = [
"windows-sys 0.48.0",
]
[[package]]
name = "is-wsl"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "173609498df190136aa7dea1a91db051746d339e18476eed5ca40521f02d7aa5"
dependencies = [
"is-docker",
"once_cell",
]
[[package]]
name = "itertools"
version = "0.10.5"
@@ -1142,9 +1154,9 @@ dependencies = [
[[package]]
name = "itertools"
version = "0.12.0"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "25db6b064527c5d482d0423354fcd07a89a2dfe07b67892e62411946db7f07b0"
checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57"
dependencies = [
"either",
]
@@ -1688,6 +1700,17 @@ version = "0.2.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58"
[[package]]
name = "pmutil"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3894e5d549cccbe44afecf72922f277f603cd4bb0219c8342631ef18fffbe004"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "pmutil"
version = "0.6.1"
@@ -1781,9 +1804,9 @@ dependencies = [
[[package]]
name = "proc-macro2"
version = "1.0.73"
version = "1.0.71"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2dd5e8a1f1029c43224ad5898e50140c2aebb1705f19e67c918ebf5b9e797fe1"
checksum = "75cb1540fadbd5b8fbccc4dddad2734eba435053f725621c070711a14bb5f4b8"
dependencies = [
"unicode-ident",
]
@@ -1980,23 +2003,24 @@ checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f"
[[package]]
name = "result-like"
version = "0.5.0"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "abf7172fef6a7d056b5c26bf6c826570267562d51697f4982ff3ba4aec68a9df"
checksum = "ccc7ce6435c33898517a30e85578cd204cbb696875efb93dec19a2d31294f810"
dependencies = [
"result-like-derive",
]
[[package]]
name = "result-like-derive"
version = "0.5.0"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8d6574c02e894d66370cfc681e5d68fedbc9a548fb55b30a96b3f0ae22d0fe5"
checksum = "1fabf0a2e54f711c68c50d49f648a1a8a37adcb57353f518ac4df374f0788f42"
dependencies = [
"pmutil",
"pmutil 0.5.3",
"proc-macro2",
"quote",
"syn 2.0.40",
"syn 1.0.109",
"syn-ext",
]
[[package]]
@@ -2041,7 +2065,7 @@ dependencies = [
"filetime",
"glob",
"globset",
"itertools 0.12.0",
"itertools 0.11.0",
"regex",
"ruff_macros",
"seahash",
@@ -2049,8 +2073,9 @@ dependencies = [
[[package]]
name = "ruff_cli"
version = "0.1.11"
version = "0.1.9"
dependencies = [
"annotate-snippets 0.9.2",
"anyhow",
"argfile",
"assert_cmd",
@@ -2063,11 +2088,12 @@ dependencies = [
"clearscreen",
"colored",
"filetime",
"glob",
"ignore",
"insta",
"insta-cmd",
"is-macro",
"itertools 0.12.0",
"itertools 0.11.0",
"log",
"mimalloc",
"notify",
@@ -2076,11 +2102,14 @@ dependencies = [
"regex",
"ruff_cache",
"ruff_diagnostics",
"ruff_formatter",
"ruff_linter",
"ruff_macros",
"ruff_notebook",
"ruff_python_ast",
"ruff_python_formatter",
"ruff_python_stdlib",
"ruff_python_trivia",
"ruff_source_file",
"ruff_text_size",
"ruff_workspace",
@@ -2088,12 +2117,14 @@ dependencies = [
"serde",
"serde_json",
"shellexpand",
"similar",
"strum",
"tempfile",
"test-case",
"thiserror",
"tikv-jemallocator",
"tracing",
"ureq",
"walkdir",
"wild",
]
@@ -2108,7 +2139,7 @@ dependencies = [
"imara-diff",
"indicatif",
"indoc",
"itertools 0.12.0",
"itertools 0.11.0",
"libcst",
"once_cell",
"pretty_assertions",
@@ -2122,6 +2153,7 @@ dependencies = [
"ruff_python_ast",
"ruff_python_codegen",
"ruff_python_formatter",
"ruff_python_literal",
"ruff_python_parser",
"ruff_python_stdlib",
"ruff_python_trivia",
@@ -2131,6 +2163,7 @@ dependencies = [
"serde_json",
"similar",
"strum",
"strum_macros",
"tempfile",
"toml",
"tracing",
@@ -2176,7 +2209,7 @@ dependencies = [
[[package]]
name = "ruff_linter"
version = "0.1.11"
version = "0.1.9"
dependencies = [
"aho-corasick",
"annotate-snippets 0.9.2",
@@ -2191,8 +2224,7 @@ dependencies = [
"imperative",
"insta",
"is-macro",
"is-wsl",
"itertools 0.12.0",
"itertools 0.11.0",
"libcst",
"log",
"memchr",
@@ -2238,13 +2270,14 @@ dependencies = [
"unicode-width",
"unicode_names2",
"url",
"wsl",
]
[[package]]
name = "ruff_macros"
version = "0.0.0"
dependencies = [
"itertools 0.12.0",
"itertools 0.11.0",
"proc-macro2",
"quote",
"ruff_python_trivia",
@@ -2257,7 +2290,7 @@ version = "0.0.0"
dependencies = [
"anyhow",
"insta",
"itertools 0.12.0",
"itertools 0.11.0",
"once_cell",
"ruff_diagnostics",
"ruff_source_file",
@@ -2277,7 +2310,9 @@ dependencies = [
"bitflags 2.4.1",
"insta",
"is-macro",
"itertools 0.12.0",
"itertools 0.11.0",
"memchr",
"once_cell",
"ruff_python_parser",
"ruff_python_trivia",
"ruff_source_file",
@@ -2308,7 +2343,7 @@ dependencies = [
"clap",
"countme",
"insta",
"itertools 0.12.0",
"itertools 0.11.0",
"memchr",
"once_cell",
"regex",
@@ -2337,6 +2372,7 @@ dependencies = [
name = "ruff_python_index"
version = "0.0.0"
dependencies = [
"itertools 0.11.0",
"ruff_python_ast",
"ruff_python_parser",
"ruff_python_trivia",
@@ -2351,7 +2387,7 @@ dependencies = [
"bitflags 2.4.1",
"hexf-parse",
"is-macro",
"itertools 0.12.0",
"itertools 0.11.0",
"lexical-parse-float",
"rand",
"unic-ucd-category",
@@ -2365,7 +2401,7 @@ dependencies = [
"bitflags 2.4.1",
"insta",
"is-macro",
"itertools 0.12.0",
"itertools 0.11.0",
"lalrpop",
"lalrpop-util",
"memchr",
@@ -2416,9 +2452,8 @@ name = "ruff_python_trivia"
version = "0.0.0"
dependencies = [
"insta",
"itertools 0.12.0",
"itertools 0.11.0",
"ruff_python_ast",
"ruff_python_index",
"ruff_python_parser",
"ruff_source_file",
"ruff_text_size",
@@ -2427,7 +2462,7 @@ dependencies = [
[[package]]
name = "ruff_shrinking"
version = "0.1.11"
version = "0.1.9"
dependencies = [
"anyhow",
"clap",
@@ -2470,6 +2505,7 @@ dependencies = [
"console_log",
"js-sys",
"log",
"ruff_diagnostics",
"ruff_formatter",
"ruff_linter",
"ruff_python_ast",
@@ -2498,7 +2534,7 @@ dependencies = [
"globset",
"ignore",
"is-macro",
"itertools 0.12.0",
"itertools 0.11.0",
"log",
"once_cell",
"path-absolutize",
@@ -2694,9 +2730,9 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.109"
version = "1.0.108"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb0652c533506ad7a2e353cce269330d6afd8bdfb6d75e0ace5b35aacbd7b9e9"
checksum = "3d1c7e3eac408d115102c4c24ad393e0821bb3a5df4d506a80f85f7a742a526b"
dependencies = [
"itoa",
"ryu",
@@ -2869,6 +2905,15 @@ dependencies = [
"unicode-ident",
]
[[package]]
name = "syn-ext"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b86cb2b68c5b3c078cac02588bc23f3c04bb828c5d3aedd17980876ec6a7be6"
dependencies = [
"syn 1.0.109",
]
[[package]]
name = "tempfile"
version = "3.8.1"
@@ -3775,6 +3820,12 @@ dependencies = [
"memchr",
]
[[package]]
name = "wsl"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8dab7ac864710bdea6594becbea5b5050333cf34fefb0dc319567eb347950d4"
[[package]]
name = "yaml-rust"
version = "0.4.5"

View File

@@ -12,103 +12,48 @@ authors = ["Charlie Marsh <charlie.r.marsh@gmail.com>"]
license = "MIT"
[workspace.dependencies]
aho-corasick = { version = "1.1.2" }
annotate-snippets = { version = "0.9.2", features = ["color"] }
anyhow = { version = "1.0.76" }
argfile = { version = "0.1.6" }
assert_cmd = { version = "2.0.8" }
bincode = { version = "1.3.3" }
bitflags = { version = "2.4.1" }
cachedir = { version = "0.3.1" }
chrono = { version = "0.4.31", default-features = false, features = ["clock"] }
clap = { version = "4.4.12", features = ["derive"] }
clap_complete_command = { version = "0.5.1" }
clearscreen = { version = "2.0.0" }
codspeed-criterion-compat = { version = "2.3.3", default-features = false }
clap = { version = "4.4.7", features = ["derive"] }
colored = { version = "2.1.0" }
configparser = { version = "3.0.3" }
console_error_panic_hook = { version = "0.1.7" }
console_log = { version = "1.0.0" }
countme = { version ="3.0.1"}
criterion = { version = "0.5.1", default-features = false }
dirs = { version = "5.0.0" }
drop_bomb = { version = "0.1.5" }
env_logger = { version ="0.10.1"}
fern = { version = "0.6.1" }
filetime = { version = "0.2.23" }
fs-err = { version ="2.11.0"}
glob = { version = "0.3.1" }
globset = { version = "0.4.14" }
hexf-parse = { version ="0.2.1"}
ignore = { version = "0.4.21" }
imara-diff ={ version = "0.1.5"}
imperative = { version = "1.0.4" }
indicatif ={ version = "0.17.7"}
indoc ={ version = "2.0.4"}
insta = { version = "1.34.0", feature = ["filters", "glob"] }
insta-cmd = { version = "0.4.0" }
is-macro = { version = "0.3.4" }
is-wsl = { version = "0.4.0" }
itertools = { version = "0.12.0" }
js-sys = { version = "0.3.66" }
lalrpop-util = { version = "0.20.0", default-features = false }
lexical-parse-float = { version = "0.8.0", features = ["format"] }
is-macro = { version = "0.3.1" }
itertools = { version = "0.11.0" }
libcst = { version = "1.1.0", default-features = false }
log = { version = "0.4.17" }
memchr = { version = "2.6.4" }
mimalloc = { version ="0.1.39"}
natord = { version = "1.0.9" }
notify = { version = "6.1.1" }
once_cell = { version = "1.19.0" }
path-absolutize = { version = "3.1.1" }
pathdiff = { version = "0.2.1" }
pep440_rs = { version = "0.4.0", features = ["serde"] }
pretty_assertions = "1.3.0"
proc-macro2 = { version = "1.0.73" }
pyproject-toml = { version = "0.8.1" }
quick-junit = { version = "0.3.5" }
proc-macro2 = { version = "1.0.71" }
quote = { version = "1.0.23" }
rand = { version = "0.8.5" }
rayon = { version = "1.8.0" }
regex = { version = "1.10.2" }
result-like = { version = "0.5.0" }
rustc-hash = { version = "1.1.0" }
schemars = { version = "0.8.16" }
seahash = { version ="4.1.0"}
semver = { version = "1.0.20" }
serde = { version = "1.0.193", features = ["derive"] }
serde-wasm-bindgen = { version = "0.6.3" }
serde_json = { version = "1.0.109" }
serde_test = { version = "1.0.152" }
serde_with = { version = "3.4.0", default-features = false, features = ["macros"] }
serde_json = { version = "1.0.108" }
shellexpand = { version = "3.0.0" }
shlex = { version ="1.2.0"}
similar = { version = "2.3.0", features = ["inline"] }
smallvec = { version = "1.11.2" }
static_assertions = "1.1.0"
strum = { version = "0.25.0", features = ["strum_macros"] }
strum_macros = { version = "0.25.3" }
syn = { version = "2.0.40" }
tempfile = { version ="3.8.1"}
test-case = { version = "3.3.1" }
thiserror = { version = "1.0.51" }
tikv-jemallocator = { version ="0.5.0"}
toml = { version = "0.8.8" }
tracing = { version = "0.1.40" }
tracing-indicatif = { version = "0.3.6" }
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
typed-arena = { version = "2.0.2" }
unic-ucd-category = { version ="0.9"}
unicode-ident = { version = "1.0.12" }
unicode-width = { version = "0.1.11" }
unicode_names2 = { version = "1.2.1" }
ureq = { version = "2.9.1" }
url = { version = "2.5.0" }
unicode-width = { version = "0.1.11" }
uuid = { version = "1.6.1", features = ["v4", "fast-rng", "macro-diagnostics", "js"] }
walkdir = { version = "2.3.2" }
wasm-bindgen = { version = "0.2.84" }
wasm-bindgen-test = { version = "0.3.39" }
wild = { version = "2" }
wsl = { version = "0.1.0" }
[workspace.lints.rust]
unsafe_code = "warn"

View File

@@ -150,7 +150,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
```yaml
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.1.11
rev: v0.1.9
hooks:
# Run the linter.
- id: ruff
@@ -432,7 +432,6 @@ Ruff is used by a number of major open-source projects and companies, including:
- [PyTorch](https://github.com/pytorch/pytorch)
- [Pydantic](https://github.com/pydantic/pydantic)
- [Pylint](https://github.com/PyCQA/pylint)
- [PyVista](https://github.com/pyvista/pyvista)
- [Reflex](https://github.com/reflex-dev/reflex)
- [River](https://github.com/online-ml/river)
- [Rippling](https://rippling.com)

View File

@@ -0,0 +1,39 @@
[package]
name = "flake8-to-ruff"
version = "0.1.9"
description = """
Convert Flake8 configuration files to Ruff configuration files.
"""
authors = { workspace = true }
edition = { workspace = true }
rust-version = { workspace = true }
homepage = { workspace = true }
documentation = { workspace = true }
repository = { workspace = true }
license = { workspace = true }
[dependencies]
ruff_linter = { path = "../ruff_linter", default-features = false }
ruff_workspace = { path = "../ruff_workspace" }
anyhow = { workspace = true }
clap = { workspace = true }
colored = { workspace = true }
configparser = { version = "3.0.3" }
itertools = { workspace = true }
log = { workspace = true }
once_cell = { workspace = true }
pep440_rs = { version = "0.4.0", features = ["serde"] }
regex = { workspace = true }
rustc-hash = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
strum = { workspace = true }
strum_macros = { workspace = true }
toml = { workspace = true }
[dev-dependencies]
pretty_assertions = "1.3.0"
[lints]
workspace = true

View File

@@ -0,0 +1,99 @@
# flake8-to-ruff
Convert existing Flake8 configuration files (`setup.cfg`, `tox.ini`, or `.flake8`) for use with
[Ruff](https://github.com/astral-sh/ruff).
Generates a Ruff-compatible `pyproject.toml` section.
## Installation and Usage
### Installation
Available as [`flake8-to-ruff`](https://pypi.org/project/flake8-to-ruff/) on PyPI:
```shell
pip install flake8-to-ruff
```
### Usage
To run `flake8-to-ruff`:
```shell
flake8-to-ruff path/to/setup.cfg
flake8-to-ruff path/to/tox.ini
flake8-to-ruff path/to/.flake8
```
`flake8-to-ruff` will print the relevant `pyproject.toml` sections to standard output, like so:
```toml
[tool.ruff]
exclude = [
'.svn',
'CVS',
'.bzr',
'.hg',
'.git',
'__pycache__',
'.tox',
'.idea',
'.mypy_cache',
'.venv',
'node_modules',
'_state_machine.py',
'test_fstring.py',
'bad_coding2.py',
'badsyntax_*.py',
]
select = [
'A',
'E',
'F',
'Q',
]
ignore = []
[tool.ruff.flake8-quotes]
inline-quotes = 'single'
[tool.ruff.pep8-naming]
ignore-names = [
'foo',
'bar',
]
```
### Plugins
`flake8-to-ruff` will attempt to infer any activated plugins based on the settings provided in your
configuration file.
For example, if your `.flake8` file includes a `docstring-convention` property, `flake8-to-ruff`
will enable the appropriate [`flake8-docstrings`](https://pypi.org/project/flake8-docstrings/)
checks.
Alternatively, you can manually specify plugins on the command-line:
```shell
flake8-to-ruff path/to/.flake8 --plugin flake8-builtins --plugin flake8-quotes
```
## Limitations
1. Ruff only supports a subset of the Flake configuration options. `flake8-to-ruff` will warn on and
ignore unsupported options in the `.flake8` file (or equivalent). (Similarly, Ruff has a few
configuration options that don't exist in Flake8.)
1. Ruff will omit any rule codes that are unimplemented or unsupported by Ruff, including rule
codes from unsupported plugins. (See the
[documentation](https://docs.astral.sh/ruff/faq/#how-does-ruff-compare-to-flake8) for the complete
list of supported plugins.)
## License
MIT
## Contributing
Contributions are welcome and hugely appreciated. To get started, check out the
[contributing guidelines](https://github.com/astral-sh/ruff/blob/main/CONTRIBUTING.md).

View File

@@ -0,0 +1,65 @@
[build-system]
requires = [
# The minimum setuptools version is specific to the PEP 517 backend,
# and may be stricter than the version required in `setup.cfg`
"setuptools>=40.6.0,!=60.9.0",
"wheel",
# Must be kept in sync with the `install_requirements` in `setup.cfg`
"cffi>=1.12; platform_python_implementation != 'PyPy'",
"setuptools-rust>=0.11.4",
]
build-backend = "setuptools.build_meta"
[tool.black]
line-length = 79
target-version = ["py36"]
[tool.pytest.ini_options]
addopts = "-r s --capture=no --strict-markers --benchmark-disable"
markers = [
"skip_fips: this test is not executed in FIPS mode",
"supported: parametrized test requiring only_if and skip_message",
]
[tool.mypy]
show_error_codes = true
check_untyped_defs = true
no_implicit_reexport = true
warn_redundant_casts = true
warn_unused_ignores = true
warn_unused_configs = true
strict_equality = true
[[tool.mypy.overrides]]
module = [
"pretend"
]
ignore_missing_imports = true
[tool.coverage.run]
branch = true
relative_files = true
source = [
"cryptography",
"tests/",
]
[tool.coverage.paths]
source = [
"src/cryptography",
"*.tox/*/lib*/python*/site-packages/cryptography",
"*.tox\\*\\Lib\\site-packages\\cryptography",
"*.tox/pypy/site-packages/cryptography",
]
tests =[
"tests/",
"*tests\\",
]
[tool.coverage.report]
exclude_lines = [
"@abc.abstractmethod",
"@abc.abstractproperty",
"@typing.overload",
"if typing.TYPE_CHECKING",
]

View File

@@ -0,0 +1,91 @@
[metadata]
name = cryptography
version = attr: cryptography.__version__
description = cryptography is a package which provides cryptographic recipes and primitives to Python developers.
long_description = file: README.rst
long_description_content_type = text/x-rst
license = BSD-3-Clause OR Apache-2.0
url = https://github.com/pyca/cryptography
author = The Python Cryptographic Authority and individual contributors
author_email = cryptography-dev@python.org
project_urls =
Documentation=https://cryptography.io/
Source=https://github.com/pyca/cryptography/
Issues=https://github.com/pyca/cryptography/issues
Changelog=https://cryptography.io/en/latest/changelog/
classifiers =
Development Status :: 5 - Production/Stable
Intended Audience :: Developers
License :: OSI Approved :: Apache Software License
License :: OSI Approved :: BSD License
Natural Language :: English
Operating System :: MacOS :: MacOS X
Operating System :: POSIX
Operating System :: POSIX :: BSD
Operating System :: POSIX :: Linux
Operating System :: Microsoft :: Windows
Programming Language :: Python
Programming Language :: Python :: 3
Programming Language :: Python :: 3 :: Only
Programming Language :: Python :: 3.6
Programming Language :: Python :: 3.7
Programming Language :: Python :: 3.8
Programming Language :: Python :: 3.9
Programming Language :: Python :: 3.10
Programming Language :: Python :: 3.11
Programming Language :: Python :: Implementation :: CPython
Programming Language :: Python :: Implementation :: PyPy
Topic :: Security :: Cryptography
[options]
python_requires = >=3.6
include_package_data = True
zip_safe = False
package_dir =
=src
packages = find:
# `install_requires` must be kept in sync with `pyproject.toml`
install_requires =
cffi >=1.12
[options.packages.find]
where = src
exclude =
_cffi_src
_cffi_src.*
[options.extras_require]
test =
pytest>=6.2.0
pytest-benchmark
pytest-cov
pytest-subtests
pytest-xdist
pretend
iso8601
pytz
hypothesis>=1.11.4,!=3.79.2
docs =
sphinx >= 1.6.5,!=1.8.0,!=3.1.0,!=3.1.1,!=5.2.0,!=5.2.0.post0
sphinx_rtd_theme
docstest =
pyenchant >= 1.6.11
twine >= 1.12.0
sphinxcontrib-spelling >= 4.0.1
sdist =
setuptools_rust >= 0.11.4
pep8test =
black
flake8
flake8-import-order
pep8-naming
# This extra is for OpenSSH private keys that use bcrypt KDF
# Versions: v3.1.3 - ignore_few_rounds, v3.1.5 - abi3
ssh =
bcrypt >= 3.1.5
[flake8]
ignore = E203,E211,W503,W504,N818
exclude = .tox,*.egg,.git,_build,.hypothesis
select = E,W,F,N,I
application-import-names = cryptography,cryptography_vectors,tests

View File

@@ -0,0 +1,19 @@
[flake8]
# Ignore style and complexity
# E: style errors
# W: style warnings
# C: complexity
# D: docstring warnings (unused pydocstyle extension)
# F841: local variable assigned but never used
ignore = E, C, W, D, F841
builtins = c, get_config
exclude =
.cache,
.github,
docs,
jupyterhub/alembic*,
onbuild,
scripts,
share,
tools,
setup.py

View File

@@ -0,0 +1,43 @@
[flake8]
# Exclude the grpc generated code
exclude = ./manim/grpc/gen/*
max-complexity = 15
max-line-length = 88
statistics = True
# Prevents some flake8-rst-docstrings errors
rst-roles = attr,class,func,meth,mod,obj,ref,doc,exc
rst-directives = manim, SEEALSO, seealso
docstring-convention=numpy
select = A,A00,B,B9,C4,C90,D,E,F,F,PT,RST,SIM,W
# General Compatibility
extend-ignore = E203, W503, D202, D212, D213, D404
# Misc
F401, F403, F405, F841, E501, E731, E402, F811, F821,
# Plug-in: flake8-builtins
A001, A002, A003,
# Plug-in: flake8-bugbear
B006, B007, B008, B009, B010, B903, B950,
# Plug-in: flake8-simplify
SIM105, SIM106, SIM119,
# Plug-in: flake8-comprehensions
C901
# Plug-in: flake8-pytest-style
PT001, PT004, PT006, PT011, PT018, PT022, PT023,
# Plug-in: flake8-docstrings
D100, D101, D102, D103, D104, D105, D106, D107,
D200, D202, D204, D205, D209,
D301,
D400, D401, D402, D403, D405, D406, D407, D409, D411, D412, D414,
# Plug-in: flake8-rst-docstrings
RST201, RST203, RST210, RST212, RST213, RST215,
RST301, RST303,

View File

@@ -0,0 +1,36 @@
[flake8]
min_python_version = 3.7.0
max-line-length = 88
ban-relative-imports = true
# flake8-use-fstring: https://github.com/MichaelKim0407/flake8-use-fstring#--percent-greedy-and---format-greedy
format-greedy = 1
inline-quotes = double
enable-extensions = TC, TC1
type-checking-strict = true
eradicate-whitelist-extend = ^-.*;
extend-ignore =
# E203: Whitespace before ':' (pycqa/pycodestyle#373)
E203,
# SIM106: Handle error-cases first
SIM106,
# ANN101: Missing type annotation for self in method
ANN101,
# ANN102: Missing type annotation for cls in classmethod
ANN102,
# PIE781: assign-and-return
PIE781,
# PIE798 no-unnecessary-class: Consider using a module for namespacing instead
PIE798,
per-file-ignores =
# TC002: Move third-party import '...' into a type-checking block
__init__.py:TC002,
# ANN201: Missing return type annotation for public function
tests/test_*:ANN201
tests/**/test_*:ANN201
extend-exclude =
# Frozen and not subject to change in this repo:
get-poetry.py,
install-poetry.py,
# External to the project's coding standards:
tests/fixtures/*,
tests/**/fixtures/*,

View File

@@ -0,0 +1,19 @@
[flake8]
max-line-length=120
docstring-convention=all
import-order-style=pycharm
application_import_names=bot,tests
exclude=.cache,.venv,.git,constants.py
extend-ignore=
B311,W503,E226,S311,T000,E731
# Missing Docstrings
D100,D104,D105,D107,
# Docstring Whitespace
D203,D212,D214,D215,
# Docstring Quotes
D301,D302,
# Docstring Content
D400,D401,D402,D404,D405,D406,D407,D408,D409,D410,D411,D412,D413,D414,D416,D417
# Type Annotations
ANN002,ANN003,ANN101,ANN102,ANN204,ANN206,ANN401
per-file-ignores=tests/*:D,ANN

View File

@@ -0,0 +1,6 @@
[flake8]
ignore = E203, E501, W503
per-file-ignores =
requests/__init__.py:E402, F401
requests/compat.py:E402, F401
tests/compat.py:F401

View File

@@ -0,0 +1,34 @@
[project]
name = "flake8-to-ruff"
keywords = ["automation", "flake8", "pycodestyle", "pyflakes", "pylint", "clippy"]
classifiers = [
"Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3 :: Only",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Software Development :: Quality Assurance",
]
author = "Charlie Marsh"
author_email = "charlie.r.marsh@gmail.com"
description = "Convert existing Flake8 configuration to Ruff."
requires-python = ">=3.7"
[project.urls]
repository = "https://github.com/astral-sh/ruff#subdirectory=crates/flake8_to_ruff"
[build-system]
requires = ["maturin>=1.0,<2.0"]
build-backend = "maturin"
[tool.maturin]
bindings = "bin"
strip = true

View File

@@ -0,0 +1,13 @@
//! Extract Black configuration settings from a pyproject.toml.
use ruff_linter::line_width::LineLength;
use ruff_linter::settings::types::PythonVersion;
use serde::{Deserialize, Serialize};
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
pub(crate) struct Black {
#[serde(alias = "line-length", alias = "line_length")]
pub(crate) line_length: Option<LineLength>,
#[serde(alias = "target-version", alias = "target_version")]
pub(crate) target_version: Option<Vec<PythonVersion>>,
}

View File

@@ -0,0 +1,687 @@
use std::collections::{HashMap, HashSet};
use std::str::FromStr;
use itertools::Itertools;
use ruff_linter::line_width::LineLength;
use ruff_linter::registry::Linter;
use ruff_linter::rule_selector::RuleSelector;
use ruff_linter::rules::flake8_pytest_style::types::{
ParametrizeNameType, ParametrizeValuesRowType, ParametrizeValuesType,
};
use ruff_linter::rules::flake8_quotes::settings::Quote;
use ruff_linter::rules::flake8_tidy_imports::settings::Strictness;
use ruff_linter::rules::pydocstyle::settings::Convention;
use ruff_linter::settings::types::PythonVersion;
use ruff_linter::settings::DEFAULT_SELECTORS;
use ruff_linter::warn_user;
use ruff_workspace::options::{
Flake8AnnotationsOptions, Flake8BugbearOptions, Flake8BuiltinsOptions, Flake8ErrMsgOptions,
Flake8PytestStyleOptions, Flake8QuotesOptions, Flake8TidyImportsOptions, LintCommonOptions,
LintOptions, McCabeOptions, Options, Pep8NamingOptions, PydocstyleOptions,
};
use ruff_workspace::pyproject::Pyproject;
use super::external_config::ExternalConfig;
use super::plugin::Plugin;
use super::{parser, plugin};
pub(crate) fn convert(
config: &HashMap<String, HashMap<String, Option<String>>>,
external_config: &ExternalConfig,
plugins: Option<Vec<Plugin>>,
) -> Pyproject {
// Extract the Flake8 section.
let flake8 = config
.get("flake8")
.expect("Unable to find flake8 section in INI file");
// Extract all referenced rule code prefixes, to power plugin inference.
let mut referenced_codes: HashSet<RuleSelector> = HashSet::default();
for (key, value) in flake8 {
if let Some(value) = value {
match key.as_str() {
"select" | "ignore" | "extend-select" | "extend_select" | "extend-ignore"
| "extend_ignore" => {
referenced_codes.extend(parser::parse_prefix_codes(value.as_ref()));
}
"per-file-ignores" | "per_file_ignores" => {
if let Ok(per_file_ignores) =
parser::parse_files_to_codes_mapping(value.as_ref())
{
for (_, codes) in parser::collect_per_file_ignores(per_file_ignores) {
referenced_codes.extend(codes);
}
}
}
_ => {}
}
}
}
// Infer plugins, if not provided.
let plugins = plugins.unwrap_or_else(|| {
let from_options = plugin::infer_plugins_from_options(flake8);
if !from_options.is_empty() {
#[allow(clippy::print_stderr)]
{
eprintln!("Inferred plugins from settings: {from_options:#?}");
}
}
let from_codes = plugin::infer_plugins_from_codes(&referenced_codes);
if !from_codes.is_empty() {
#[allow(clippy::print_stderr)]
{
eprintln!("Inferred plugins from referenced codes: {from_codes:#?}");
}
}
from_options.into_iter().chain(from_codes).collect()
});
// Check if the user has specified a `select`. If not, we'll add our own
// default `select`, and populate it based on user plugins.
let mut select = flake8
.get("select")
.and_then(|value| {
value
.as_ref()
.map(|value| HashSet::from_iter(parser::parse_prefix_codes(value)))
})
.unwrap_or_else(|| resolve_select(&plugins));
let mut ignore: HashSet<RuleSelector> = flake8
.get("ignore")
.and_then(|value| {
value
.as_ref()
.map(|value| HashSet::from_iter(parser::parse_prefix_codes(value)))
})
.unwrap_or_default();
// Parse each supported option.
let mut options = Options::default();
let mut lint_options = LintCommonOptions::default();
let mut flake8_annotations = Flake8AnnotationsOptions::default();
let mut flake8_bugbear = Flake8BugbearOptions::default();
let mut flake8_builtins = Flake8BuiltinsOptions::default();
let mut flake8_errmsg = Flake8ErrMsgOptions::default();
let mut flake8_pytest_style = Flake8PytestStyleOptions::default();
let mut flake8_quotes = Flake8QuotesOptions::default();
let mut flake8_tidy_imports = Flake8TidyImportsOptions::default();
let mut mccabe = McCabeOptions::default();
let mut pep8_naming = Pep8NamingOptions::default();
let mut pydocstyle = PydocstyleOptions::default();
for (key, value) in flake8 {
if let Some(value) = value {
match key.as_str() {
// flake8
"builtins" => {
options.builtins = Some(parser::parse_strings(value.as_ref()));
}
"max-line-length" | "max_line_length" => match LineLength::from_str(value) {
Ok(line_length) => options.line_length = Some(line_length),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
},
"select" => {
// No-op (handled above).
select.extend(parser::parse_prefix_codes(value.as_ref()));
}
"ignore" => {
// No-op (handled above).
}
"extend-select" | "extend_select" => {
// Unlike Flake8, use a single explicit `select`.
select.extend(parser::parse_prefix_codes(value.as_ref()));
}
"extend-ignore" | "extend_ignore" => {
// Unlike Flake8, use a single explicit `ignore`.
ignore.extend(parser::parse_prefix_codes(value.as_ref()));
}
"exclude" => {
options.exclude = Some(parser::parse_strings(value.as_ref()));
}
"extend-exclude" | "extend_exclude" => {
options.extend_exclude = Some(parser::parse_strings(value.as_ref()));
}
"per-file-ignores" | "per_file_ignores" => {
match parser::parse_files_to_codes_mapping(value.as_ref()) {
Ok(per_file_ignores) => {
lint_options.per_file_ignores =
Some(parser::collect_per_file_ignores(per_file_ignores));
}
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
// flake8-bugbear
"extend-immutable-calls" | "extend_immutable_calls" => {
flake8_bugbear.extend_immutable_calls =
Some(parser::parse_strings(value.as_ref()));
}
// flake8-builtins
"builtins-ignorelist" | "builtins_ignorelist" => {
flake8_builtins.builtins_ignorelist =
Some(parser::parse_strings(value.as_ref()));
}
// flake8-annotations
"suppress-none-returning" | "suppress_none_returning" => {
match parser::parse_bool(value.as_ref()) {
Ok(bool) => flake8_annotations.suppress_none_returning = Some(bool),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
"suppress-dummy-args" | "suppress_dummy_args" => {
match parser::parse_bool(value.as_ref()) {
Ok(bool) => flake8_annotations.suppress_dummy_args = Some(bool),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
"mypy-init-return" | "mypy_init_return" => {
match parser::parse_bool(value.as_ref()) {
Ok(bool) => flake8_annotations.mypy_init_return = Some(bool),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
"allow-star-arg-any" | "allow_star_arg_any" => {
match parser::parse_bool(value.as_ref()) {
Ok(bool) => flake8_annotations.allow_star_arg_any = Some(bool),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
// flake8-quotes
"quotes" | "inline-quotes" | "inline_quotes" => match value.trim() {
"'" | "single" => flake8_quotes.inline_quotes = Some(Quote::Single),
"\"" | "double" => flake8_quotes.inline_quotes = Some(Quote::Double),
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
},
"multiline-quotes" | "multiline_quotes" => match value.trim() {
"'" | "single" => flake8_quotes.multiline_quotes = Some(Quote::Single),
"\"" | "double" => flake8_quotes.multiline_quotes = Some(Quote::Double),
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
},
"docstring-quotes" | "docstring_quotes" => match value.trim() {
"'" | "single" => flake8_quotes.docstring_quotes = Some(Quote::Single),
"\"" | "double" => flake8_quotes.docstring_quotes = Some(Quote::Double),
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
},
"avoid-escape" | "avoid_escape" => match parser::parse_bool(value.as_ref()) {
Ok(bool) => flake8_quotes.avoid_escape = Some(bool),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
},
// pep8-naming
"ignore-names" | "ignore_names" => {
pep8_naming.ignore_names = Some(parser::parse_strings(value.as_ref()));
}
"classmethod-decorators" | "classmethod_decorators" => {
pep8_naming.classmethod_decorators =
Some(parser::parse_strings(value.as_ref()));
}
"staticmethod-decorators" | "staticmethod_decorators" => {
pep8_naming.staticmethod_decorators =
Some(parser::parse_strings(value.as_ref()));
}
// flake8-tidy-imports
"ban-relative-imports" | "ban_relative_imports" => match value.trim() {
"true" => flake8_tidy_imports.ban_relative_imports = Some(Strictness::All),
"parents" => {
flake8_tidy_imports.ban_relative_imports = Some(Strictness::Parents);
}
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
},
// flake8-docstrings
"docstring-convention" => match value.trim() {
"google" => pydocstyle.convention = Some(Convention::Google),
"numpy" => pydocstyle.convention = Some(Convention::Numpy),
"pep257" => pydocstyle.convention = Some(Convention::Pep257),
"all" => pydocstyle.convention = None,
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
},
// mccabe
"max-complexity" | "max_complexity" => match value.parse::<usize>() {
Ok(max_complexity) => mccabe.max_complexity = Some(max_complexity),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
},
// flake8-errmsg
"errmsg-max-string-length" | "errmsg_max_string_length" => {
match value.parse::<usize>() {
Ok(max_string_length) => {
flake8_errmsg.max_string_length = Some(max_string_length);
}
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
// flake8-pytest-style
"pytest-fixture-no-parentheses" | "pytest_fixture_no_parentheses " => {
match parser::parse_bool(value.as_ref()) {
Ok(bool) => flake8_pytest_style.fixture_parentheses = Some(!bool),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
"pytest-parametrize-names-type" | "pytest_parametrize_names_type" => {
match value.trim() {
"csv" => {
flake8_pytest_style.parametrize_names_type =
Some(ParametrizeNameType::Csv);
}
"tuple" => {
flake8_pytest_style.parametrize_names_type =
Some(ParametrizeNameType::Tuple);
}
"list" => {
flake8_pytest_style.parametrize_names_type =
Some(ParametrizeNameType::List);
}
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
}
}
"pytest-parametrize-values-type" | "pytest_parametrize_values_type" => {
match value.trim() {
"tuple" => {
flake8_pytest_style.parametrize_values_type =
Some(ParametrizeValuesType::Tuple);
}
"list" => {
flake8_pytest_style.parametrize_values_type =
Some(ParametrizeValuesType::List);
}
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
}
}
"pytest-parametrize-values-row-type" | "pytest_parametrize_values_row_type" => {
match value.trim() {
"tuple" => {
flake8_pytest_style.parametrize_values_row_type =
Some(ParametrizeValuesRowType::Tuple);
}
"list" => {
flake8_pytest_style.parametrize_values_row_type =
Some(ParametrizeValuesRowType::List);
}
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
}
}
"pytest-raises-require-match-for" | "pytest_raises_require_match_for" => {
flake8_pytest_style.raises_require_match_for =
Some(parser::parse_strings(value.as_ref()));
}
"pytest-mark-no-parentheses" | "pytest_mark_no_parentheses" => {
match parser::parse_bool(value.as_ref()) {
Ok(bool) => flake8_pytest_style.mark_parentheses = Some(!bool),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
// Unknown
_ => {
warn_user!("Skipping unsupported property: {}", key);
}
}
}
}
// Deduplicate and sort.
lint_options.select = Some(
select
.into_iter()
.sorted_by_key(RuleSelector::prefix_and_code)
.collect(),
);
lint_options.ignore = Some(
ignore
.into_iter()
.sorted_by_key(RuleSelector::prefix_and_code)
.collect(),
);
if flake8_annotations != Flake8AnnotationsOptions::default() {
lint_options.flake8_annotations = Some(flake8_annotations);
}
if flake8_bugbear != Flake8BugbearOptions::default() {
lint_options.flake8_bugbear = Some(flake8_bugbear);
}
if flake8_builtins != Flake8BuiltinsOptions::default() {
lint_options.flake8_builtins = Some(flake8_builtins);
}
if flake8_errmsg != Flake8ErrMsgOptions::default() {
lint_options.flake8_errmsg = Some(flake8_errmsg);
}
if flake8_pytest_style != Flake8PytestStyleOptions::default() {
lint_options.flake8_pytest_style = Some(flake8_pytest_style);
}
if flake8_quotes != Flake8QuotesOptions::default() {
lint_options.flake8_quotes = Some(flake8_quotes);
}
if flake8_tidy_imports != Flake8TidyImportsOptions::default() {
lint_options.flake8_tidy_imports = Some(flake8_tidy_imports);
}
if mccabe != McCabeOptions::default() {
lint_options.mccabe = Some(mccabe);
}
if pep8_naming != Pep8NamingOptions::default() {
lint_options.pep8_naming = Some(pep8_naming);
}
if pydocstyle != PydocstyleOptions::default() {
lint_options.pydocstyle = Some(pydocstyle);
}
// Extract any settings from the existing `pyproject.toml`.
if let Some(black) = &external_config.black {
if let Some(line_length) = &black.line_length {
options.line_length = Some(*line_length);
}
if let Some(target_version) = &black.target_version {
if let Some(target_version) = target_version.iter().min() {
options.target_version = Some(*target_version);
}
}
}
if let Some(isort) = &external_config.isort {
if let Some(src_paths) = &isort.src_paths {
match options.src.as_mut() {
Some(src) => {
src.extend_from_slice(src_paths);
}
None => {
options.src = Some(src_paths.clone());
}
}
}
}
if let Some(project) = &external_config.project {
if let Some(requires_python) = &project.requires_python {
if options.target_version.is_none() {
options.target_version =
PythonVersion::get_minimum_supported_version(requires_python);
}
}
}
if lint_options != LintCommonOptions::default() {
options.lint = Some(LintOptions {
common: lint_options,
..LintOptions::default()
});
}
// Create the pyproject.toml.
Pyproject::new(options)
}
/// Resolve the set of enabled `RuleSelector` values for the given
/// plugins.
fn resolve_select(plugins: &[Plugin]) -> HashSet<RuleSelector> {
let mut select: HashSet<_> = DEFAULT_SELECTORS.iter().cloned().collect();
select.extend(plugins.iter().map(|p| Linter::from(p).into()));
select
}
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use std::str::FromStr;
use anyhow::Result;
use itertools::Itertools;
use pep440_rs::VersionSpecifiers;
use pretty_assertions::assert_eq;
use ruff_linter::line_width::LineLength;
use ruff_linter::registry::Linter;
use ruff_linter::rule_selector::RuleSelector;
use ruff_linter::rules::flake8_quotes;
use ruff_linter::rules::pydocstyle::settings::Convention;
use ruff_linter::settings::types::PythonVersion;
use ruff_workspace::options::{
Flake8QuotesOptions, LintCommonOptions, LintOptions, Options, PydocstyleOptions,
};
use ruff_workspace::pyproject::Pyproject;
use crate::converter::DEFAULT_SELECTORS;
use crate::pep621::Project;
use crate::ExternalConfig;
use super::super::plugin::Plugin;
use super::convert;
fn lint_default_options(plugins: impl IntoIterator<Item = RuleSelector>) -> LintCommonOptions {
LintCommonOptions {
ignore: Some(vec![]),
select: Some(
DEFAULT_SELECTORS
.iter()
.cloned()
.chain(plugins)
.sorted_by_key(RuleSelector::prefix_and_code)
.collect(),
),
..LintCommonOptions::default()
}
}
#[test]
fn it_converts_empty() {
let actual = convert(
&HashMap::from([("flake8".to_string(), HashMap::default())]),
&ExternalConfig::default(),
None,
);
let expected = Pyproject::new(Options {
lint: Some(LintOptions {
common: lint_default_options([]),
..LintOptions::default()
}),
..Options::default()
});
assert_eq!(actual, expected);
}
#[test]
fn it_converts_dashes() {
let actual = convert(
&HashMap::from([(
"flake8".to_string(),
HashMap::from([("max-line-length".to_string(), Some("100".to_string()))]),
)]),
&ExternalConfig::default(),
Some(vec![]),
);
let expected = Pyproject::new(Options {
line_length: Some(LineLength::try_from(100).unwrap()),
lint: Some(LintOptions {
common: lint_default_options([]),
..LintOptions::default()
}),
..Options::default()
});
assert_eq!(actual, expected);
}
#[test]
fn it_converts_underscores() {
let actual = convert(
&HashMap::from([(
"flake8".to_string(),
HashMap::from([("max_line_length".to_string(), Some("100".to_string()))]),
)]),
&ExternalConfig::default(),
Some(vec![]),
);
let expected = Pyproject::new(Options {
line_length: Some(LineLength::try_from(100).unwrap()),
lint: Some(LintOptions {
common: lint_default_options([]),
..LintOptions::default()
}),
..Options::default()
});
assert_eq!(actual, expected);
}
#[test]
fn it_ignores_parse_errors() {
let actual = convert(
&HashMap::from([(
"flake8".to_string(),
HashMap::from([("max_line_length".to_string(), Some("abc".to_string()))]),
)]),
&ExternalConfig::default(),
Some(vec![]),
);
let expected = Pyproject::new(Options {
lint: Some(LintOptions {
common: lint_default_options([]),
..LintOptions::default()
}),
..Options::default()
});
assert_eq!(actual, expected);
}
#[test]
fn it_converts_plugin_options() {
let actual = convert(
&HashMap::from([(
"flake8".to_string(),
HashMap::from([("inline-quotes".to_string(), Some("single".to_string()))]),
)]),
&ExternalConfig::default(),
Some(vec![]),
);
let expected = Pyproject::new(Options {
lint: Some(LintOptions {
common: LintCommonOptions {
flake8_quotes: Some(Flake8QuotesOptions {
inline_quotes: Some(flake8_quotes::settings::Quote::Single),
multiline_quotes: None,
docstring_quotes: None,
avoid_escape: None,
}),
..lint_default_options([])
},
..LintOptions::default()
}),
..Options::default()
});
assert_eq!(actual, expected);
}
#[test]
fn it_converts_docstring_conventions() {
let actual = convert(
&HashMap::from([(
"flake8".to_string(),
HashMap::from([(
"docstring-convention".to_string(),
Some("numpy".to_string()),
)]),
)]),
&ExternalConfig::default(),
Some(vec![Plugin::Flake8Docstrings]),
);
let expected = Pyproject::new(Options {
lint: Some(LintOptions {
common: LintCommonOptions {
pydocstyle: Some(PydocstyleOptions {
convention: Some(Convention::Numpy),
ignore_decorators: None,
property_decorators: None,
}),
..lint_default_options([Linter::Pydocstyle.into()])
},
..LintOptions::default()
}),
..Options::default()
});
assert_eq!(actual, expected);
}
#[test]
fn it_infers_plugins_if_omitted() {
let actual = convert(
&HashMap::from([(
"flake8".to_string(),
HashMap::from([("inline-quotes".to_string(), Some("single".to_string()))]),
)]),
&ExternalConfig::default(),
None,
);
let expected = Pyproject::new(Options {
lint: Some(LintOptions {
common: LintCommonOptions {
flake8_quotes: Some(Flake8QuotesOptions {
inline_quotes: Some(flake8_quotes::settings::Quote::Single),
multiline_quotes: None,
docstring_quotes: None,
avoid_escape: None,
}),
..lint_default_options([Linter::Flake8Quotes.into()])
},
..LintOptions::default()
}),
..Options::default()
});
assert_eq!(actual, expected);
}
#[test]
fn it_converts_project_requires_python() -> Result<()> {
let actual = convert(
&HashMap::from([("flake8".to_string(), HashMap::default())]),
&ExternalConfig {
project: Some(&Project {
requires_python: Some(VersionSpecifiers::from_str(">=3.8.16, <3.11")?),
}),
..ExternalConfig::default()
},
Some(vec![]),
);
let expected = Pyproject::new(Options {
target_version: Some(PythonVersion::Py38),
lint: Some(LintOptions {
common: lint_default_options([]),
..LintOptions::default()
}),
..Options::default()
});
assert_eq!(actual, expected);
Ok(())
}
}

View File

@@ -0,0 +1,10 @@
use super::black::Black;
use super::isort::Isort;
use super::pep621::Project;
#[derive(Default)]
pub(crate) struct ExternalConfig<'a> {
pub(crate) black: Option<&'a Black>,
pub(crate) isort: Option<&'a Isort>,
pub(crate) project: Option<&'a Project>,
}

View File

@@ -0,0 +1,10 @@
//! Extract isort configuration settings from a pyproject.toml.
use serde::{Deserialize, Serialize};
/// The [isort configuration](https://pycqa.github.io/isort/docs/configuration/config_files.html).
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
pub(crate) struct Isort {
#[serde(alias = "src-paths", alias = "src_paths")]
pub(crate) src_paths: Option<Vec<String>>,
}

View File

@@ -0,0 +1,80 @@
//! Utility to generate Ruff's `pyproject.toml` section from a Flake8 INI file.
mod black;
mod converter;
mod external_config;
mod isort;
mod parser;
mod pep621;
mod plugin;
mod pyproject;
use std::path::PathBuf;
use anyhow::Result;
use clap::Parser;
use configparser::ini::Ini;
use crate::converter::convert;
use crate::external_config::ExternalConfig;
use crate::plugin::Plugin;
use crate::pyproject::parse;
use ruff_linter::logging::{set_up_logging, LogLevel};
#[derive(Parser)]
#[command(
about = "Convert existing Flake8 configuration to Ruff.",
long_about = None
)]
struct Args {
/// Path to the Flake8 configuration file (e.g., `setup.cfg`, `tox.ini`, or
/// `.flake8`).
#[arg(required = true)]
file: PathBuf,
/// Optional path to a `pyproject.toml` file, used to ensure compatibility
/// with Black.
#[arg(long)]
pyproject: Option<PathBuf>,
/// List of plugins to enable.
#[arg(long, value_delimiter = ',')]
plugin: Option<Vec<Plugin>>,
}
fn main() -> Result<()> {
set_up_logging(&LogLevel::Default)?;
let args = Args::parse();
// Read the INI file.
let mut ini = Ini::new_cs();
ini.set_multiline(true);
let config = ini.load(args.file).map_err(|msg| anyhow::anyhow!(msg))?;
// Read the pyproject.toml file.
let pyproject = args.pyproject.map(parse).transpose()?;
let external_config = pyproject
.as_ref()
.and_then(|pyproject| pyproject.tool.as_ref())
.map(|tool| ExternalConfig {
black: tool.black.as_ref(),
isort: tool.isort.as_ref(),
..Default::default()
})
.unwrap_or_default();
let external_config = ExternalConfig {
project: pyproject
.as_ref()
.and_then(|pyproject| pyproject.project.as_ref()),
..external_config
};
// Create Ruff's pyproject.toml section.
let pyproject = convert(&config, &external_config, args.plugin);
#[allow(clippy::print_stdout)]
{
println!("{}", toml::to_string_pretty(&pyproject)?);
}
Ok(())
}

View File

@@ -0,0 +1,391 @@
use std::str::FromStr;
use anyhow::{bail, Result};
use once_cell::sync::Lazy;
use regex::Regex;
use rustc_hash::FxHashMap;
use ruff_linter::settings::types::PatternPrefixPair;
use ruff_linter::{warn_user, RuleSelector};
static COMMA_SEPARATED_LIST_RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"[,\s]").unwrap());
/// Parse a comma-separated list of `RuleSelector` values (e.g.,
/// "F401,E501").
pub(crate) fn parse_prefix_codes(value: &str) -> Vec<RuleSelector> {
let mut codes: Vec<RuleSelector> = vec![];
for code in COMMA_SEPARATED_LIST_RE.split(value) {
let code = code.trim();
if code.is_empty() {
continue;
}
if let Ok(code) = RuleSelector::from_str(code) {
codes.push(code);
} else {
warn_user!("Unsupported prefix code: {code}");
}
}
codes
}
/// Parse a comma-separated list of strings (e.g., "__init__.py,__main__.py").
pub(crate) fn parse_strings(value: &str) -> Vec<String> {
COMMA_SEPARATED_LIST_RE
.split(value)
.map(str::trim)
.filter(|part| !part.is_empty())
.map(String::from)
.collect()
}
/// Parse a boolean.
pub(crate) fn parse_bool(value: &str) -> Result<bool> {
match value.trim() {
"true" => Ok(true),
"false" => Ok(false),
_ => bail!("Unexpected boolean value: {value}"),
}
}
#[derive(Debug)]
struct Token {
token_name: TokenType,
src: String,
}
#[derive(Debug, Copy, Clone)]
enum TokenType {
Code,
File,
Colon,
Comma,
Ws,
Eof,
}
struct State {
seen_sep: bool,
seen_colon: bool,
filenames: Vec<String>,
codes: Vec<String>,
}
impl State {
const fn new() -> Self {
Self {
seen_sep: true,
seen_colon: false,
filenames: vec![],
codes: vec![],
}
}
/// Generate the list of `StrRuleCodePair` pairs for the current
/// state.
fn parse(&self) -> Vec<PatternPrefixPair> {
let mut codes: Vec<PatternPrefixPair> = vec![];
for code in &self.codes {
if let Ok(code) = RuleSelector::from_str(code) {
for filename in &self.filenames {
codes.push(PatternPrefixPair {
pattern: filename.clone(),
prefix: code.clone(),
});
}
} else {
warn_user!("Unsupported prefix code: {code}");
}
}
codes
}
}
/// Tokenize the raw 'files-to-codes' mapping.
fn tokenize_files_to_codes_mapping(value: &str) -> Vec<Token> {
let mut tokens = vec![];
let mut i = 0;
while i < value.len() {
for (token_re, token_name) in [
(
Regex::new(r"([A-Z]+[0-9]*)(?:$|\s|,)").unwrap(),
TokenType::Code,
),
(Regex::new(r"([^\s:,]+)").unwrap(), TokenType::File),
(Regex::new(r"(\s*:\s*)").unwrap(), TokenType::Colon),
(Regex::new(r"(\s*,\s*)").unwrap(), TokenType::Comma),
(Regex::new(r"(\s+)").unwrap(), TokenType::Ws),
] {
if let Some(cap) = token_re.captures(&value[i..]) {
let mat = cap.get(1).unwrap();
if mat.start() == 0 {
tokens.push(Token {
token_name,
src: mat.as_str().trim().to_string(),
});
i += mat.end();
break;
}
}
}
}
tokens.push(Token {
token_name: TokenType::Eof,
src: String::new(),
});
tokens
}
/// Parse a 'files-to-codes' mapping, mimicking Flake8's internal logic.
/// See: <https://github.com/PyCQA/flake8/blob/7dfe99616fc2f07c0017df2ba5fa884158f3ea8a/src/flake8/utils.py#L45>
pub(crate) fn parse_files_to_codes_mapping(value: &str) -> Result<Vec<PatternPrefixPair>> {
if value.trim().is_empty() {
return Ok(vec![]);
}
let mut codes: Vec<PatternPrefixPair> = vec![];
let mut state = State::new();
for token in tokenize_files_to_codes_mapping(value) {
if matches!(token.token_name, TokenType::Comma | TokenType::Ws) {
state.seen_sep = true;
} else if !state.seen_colon {
if matches!(token.token_name, TokenType::Colon) {
state.seen_colon = true;
state.seen_sep = true;
} else if state.seen_sep && matches!(token.token_name, TokenType::File) {
state.filenames.push(token.src);
state.seen_sep = false;
} else {
bail!("Unexpected token: {:?}", token.token_name);
}
} else {
if matches!(token.token_name, TokenType::Eof) {
codes.extend(state.parse());
state = State::new();
} else if state.seen_sep && matches!(token.token_name, TokenType::Code) {
state.codes.push(token.src);
state.seen_sep = false;
} else if state.seen_sep && matches!(token.token_name, TokenType::File) {
codes.extend(state.parse());
state = State::new();
state.filenames.push(token.src);
state.seen_sep = false;
} else {
bail!("Unexpected token: {:?}", token.token_name);
}
}
}
Ok(codes)
}
/// Collect a list of `PatternPrefixPair` structs as a `BTreeMap`.
pub(crate) fn collect_per_file_ignores(
pairs: Vec<PatternPrefixPair>,
) -> FxHashMap<String, Vec<RuleSelector>> {
let mut per_file_ignores: FxHashMap<String, Vec<RuleSelector>> = FxHashMap::default();
for pair in pairs {
per_file_ignores
.entry(pair.pattern)
.or_default()
.push(pair.prefix);
}
per_file_ignores
}
#[cfg(test)]
mod tests {
use anyhow::Result;
use ruff_linter::codes;
use ruff_linter::registry::Linter;
use ruff_linter::settings::types::PatternPrefixPair;
use ruff_linter::RuleSelector;
use super::{parse_files_to_codes_mapping, parse_prefix_codes, parse_strings};
#[test]
fn it_parses_prefix_codes() {
let actual = parse_prefix_codes("");
let expected: Vec<RuleSelector> = vec![];
assert_eq!(actual, expected);
let actual = parse_prefix_codes(" ");
let expected: Vec<RuleSelector> = vec![];
assert_eq!(actual, expected);
let actual = parse_prefix_codes("F401");
let expected = vec![codes::Pyflakes::_401.into()];
assert_eq!(actual, expected);
let actual = parse_prefix_codes("F401,");
let expected = vec![codes::Pyflakes::_401.into()];
assert_eq!(actual, expected);
let actual = parse_prefix_codes("F401,E501");
let expected = vec![
codes::Pyflakes::_401.into(),
codes::Pycodestyle::E501.into(),
];
assert_eq!(actual, expected);
let actual = parse_prefix_codes("F401, E501");
let expected = vec![
codes::Pyflakes::_401.into(),
codes::Pycodestyle::E501.into(),
];
assert_eq!(actual, expected);
}
#[test]
fn it_parses_strings() {
let actual = parse_strings("");
let expected: Vec<String> = vec![];
assert_eq!(actual, expected);
let actual = parse_strings(" ");
let expected: Vec<String> = vec![];
assert_eq!(actual, expected);
let actual = parse_strings("__init__.py");
let expected = vec!["__init__.py".to_string()];
assert_eq!(actual, expected);
let actual = parse_strings("__init__.py,");
let expected = vec!["__init__.py".to_string()];
assert_eq!(actual, expected);
let actual = parse_strings("__init__.py,__main__.py");
let expected = vec!["__init__.py".to_string(), "__main__.py".to_string()];
assert_eq!(actual, expected);
let actual = parse_strings("__init__.py, __main__.py");
let expected = vec!["__init__.py".to_string(), "__main__.py".to_string()];
assert_eq!(actual, expected);
}
#[test]
fn it_parse_files_to_codes_mapping() -> Result<()> {
let actual = parse_files_to_codes_mapping("")?;
let expected: Vec<PatternPrefixPair> = vec![];
assert_eq!(actual, expected);
let actual = parse_files_to_codes_mapping(" ")?;
let expected: Vec<PatternPrefixPair> = vec![];
assert_eq!(actual, expected);
// Ex) locust
let actual = parse_files_to_codes_mapping(
"per-file-ignores =
locust/test/*: F841
examples/*: F841
*.pyi: E302,E704"
.strip_prefix("per-file-ignores =")
.unwrap(),
)?;
let expected: Vec<PatternPrefixPair> = vec![
PatternPrefixPair {
pattern: "locust/test/*".to_string(),
prefix: codes::Pyflakes::_841.into(),
},
PatternPrefixPair {
pattern: "examples/*".to_string(),
prefix: codes::Pyflakes::_841.into(),
},
];
assert_eq!(actual, expected);
// Ex) celery
let actual = parse_files_to_codes_mapping(
"per-file-ignores =
t/*,setup.py,examples/*,docs/*,extra/*:
D,"
.strip_prefix("per-file-ignores =")
.unwrap(),
)?;
let expected: Vec<PatternPrefixPair> = vec![
PatternPrefixPair {
pattern: "t/*".to_string(),
prefix: Linter::Pydocstyle.into(),
},
PatternPrefixPair {
pattern: "setup.py".to_string(),
prefix: Linter::Pydocstyle.into(),
},
PatternPrefixPair {
pattern: "examples/*".to_string(),
prefix: Linter::Pydocstyle.into(),
},
PatternPrefixPair {
pattern: "docs/*".to_string(),
prefix: Linter::Pydocstyle.into(),
},
PatternPrefixPair {
pattern: "extra/*".to_string(),
prefix: Linter::Pydocstyle.into(),
},
];
assert_eq!(actual, expected);
// Ex) scrapy
let actual = parse_files_to_codes_mapping(
"per-file-ignores =
scrapy/__init__.py:E402
scrapy/core/downloader/handlers/http.py:F401
scrapy/http/__init__.py:F401
scrapy/linkextractors/__init__.py:E402,F401
scrapy/selector/__init__.py:F401
scrapy/spiders/__init__.py:E402,F401
scrapy/utils/url.py:F403,F405
tests/test_loader.py:E741"
.strip_prefix("per-file-ignores =")
.unwrap(),
)?;
let expected: Vec<PatternPrefixPair> = vec![
PatternPrefixPair {
pattern: "scrapy/__init__.py".to_string(),
prefix: codes::Pycodestyle::E402.into(),
},
PatternPrefixPair {
pattern: "scrapy/core/downloader/handlers/http.py".to_string(),
prefix: codes::Pyflakes::_401.into(),
},
PatternPrefixPair {
pattern: "scrapy/http/__init__.py".to_string(),
prefix: codes::Pyflakes::_401.into(),
},
PatternPrefixPair {
pattern: "scrapy/linkextractors/__init__.py".to_string(),
prefix: codes::Pycodestyle::E402.into(),
},
PatternPrefixPair {
pattern: "scrapy/linkextractors/__init__.py".to_string(),
prefix: codes::Pyflakes::_401.into(),
},
PatternPrefixPair {
pattern: "scrapy/selector/__init__.py".to_string(),
prefix: codes::Pyflakes::_401.into(),
},
PatternPrefixPair {
pattern: "scrapy/spiders/__init__.py".to_string(),
prefix: codes::Pycodestyle::E402.into(),
},
PatternPrefixPair {
pattern: "scrapy/spiders/__init__.py".to_string(),
prefix: codes::Pyflakes::_401.into(),
},
PatternPrefixPair {
pattern: "scrapy/utils/url.py".to_string(),
prefix: codes::Pyflakes::_403.into(),
},
PatternPrefixPair {
pattern: "scrapy/utils/url.py".to_string(),
prefix: codes::Pyflakes::_405.into(),
},
PatternPrefixPair {
pattern: "tests/test_loader.py".to_string(),
prefix: codes::Pycodestyle::E741.into(),
},
];
assert_eq!(actual, expected);
Ok(())
}
}

View File

@@ -0,0 +1,10 @@
//! Extract PEP 621 configuration settings from a pyproject.toml.
use pep440_rs::VersionSpecifiers;
use serde::{Deserialize, Serialize};
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
pub(crate) struct Project {
#[serde(alias = "requires-python", alias = "requires_python")]
pub(crate) requires_python: Option<VersionSpecifiers>,
}

View File

@@ -0,0 +1,368 @@
use std::collections::{BTreeSet, HashMap, HashSet};
use std::fmt;
use std::str::FromStr;
use anyhow::anyhow;
use ruff_linter::registry::Linter;
use ruff_linter::rule_selector::PreviewOptions;
use ruff_linter::RuleSelector;
#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq)]
pub enum Plugin {
Flake82020,
Flake8Annotations,
Flake8Bandit,
Flake8BlindExcept,
Flake8BooleanTrap,
Flake8Bugbear,
Flake8Builtins,
Flake8Commas,
Flake8Comprehensions,
Flake8Datetimez,
Flake8Debugger,
Flake8Docstrings,
Flake8Eradicate,
Flake8ErrMsg,
Flake8Executable,
Flake8ImplicitStrConcat,
Flake8ImportConventions,
Flake8NoPep420,
Flake8Pie,
Flake8Print,
Flake8PytestStyle,
Flake8Quotes,
Flake8Return,
Flake8Simplify,
Flake8TidyImports,
Flake8TypeChecking,
Flake8UnusedArguments,
Flake8UsePathlib,
McCabe,
PEP8Naming,
PandasVet,
Pyupgrade,
Tryceratops,
}
impl FromStr for Plugin {
type Err = anyhow::Error;
fn from_str(string: &str) -> Result<Self, Self::Err> {
match string {
"flake8-2020" => Ok(Plugin::Flake82020),
"flake8-annotations" => Ok(Plugin::Flake8Annotations),
"flake8-bandit" => Ok(Plugin::Flake8Bandit),
"flake8-blind-except" => Ok(Plugin::Flake8BlindExcept),
"flake8-boolean-trap" => Ok(Plugin::Flake8BooleanTrap),
"flake8-bugbear" => Ok(Plugin::Flake8Bugbear),
"flake8-builtins" => Ok(Plugin::Flake8Builtins),
"flake8-commas" => Ok(Plugin::Flake8Commas),
"flake8-comprehensions" => Ok(Plugin::Flake8Comprehensions),
"flake8-datetimez" => Ok(Plugin::Flake8Datetimez),
"flake8-debugger" => Ok(Plugin::Flake8Debugger),
"flake8-docstrings" => Ok(Plugin::Flake8Docstrings),
"flake8-eradicate" => Ok(Plugin::Flake8Eradicate),
"flake8-errmsg" => Ok(Plugin::Flake8ErrMsg),
"flake8-executable" => Ok(Plugin::Flake8Executable),
"flake8-implicit-str-concat" => Ok(Plugin::Flake8ImplicitStrConcat),
"flake8-import-conventions" => Ok(Plugin::Flake8ImportConventions),
"flake8-no-pep420" => Ok(Plugin::Flake8NoPep420),
"flake8-pie" => Ok(Plugin::Flake8Pie),
"flake8-print" => Ok(Plugin::Flake8Print),
"flake8-pytest-style" => Ok(Plugin::Flake8PytestStyle),
"flake8-quotes" => Ok(Plugin::Flake8Quotes),
"flake8-return" => Ok(Plugin::Flake8Return),
"flake8-simplify" => Ok(Plugin::Flake8Simplify),
"flake8-tidy-imports" => Ok(Plugin::Flake8TidyImports),
"flake8-type-checking" => Ok(Plugin::Flake8TypeChecking),
"flake8-unused-arguments" => Ok(Plugin::Flake8UnusedArguments),
"flake8-use-pathlib" => Ok(Plugin::Flake8UsePathlib),
"mccabe" => Ok(Plugin::McCabe),
"pep8-naming" => Ok(Plugin::PEP8Naming),
"pandas-vet" => Ok(Plugin::PandasVet),
"pyupgrade" => Ok(Plugin::Pyupgrade),
"tryceratops" => Ok(Plugin::Tryceratops),
_ => Err(anyhow!("Unknown plugin: {string}")),
}
}
}
impl fmt::Debug for Plugin {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"{}",
match self {
Plugin::Flake82020 => "flake8-2020",
Plugin::Flake8Annotations => "flake8-annotations",
Plugin::Flake8Bandit => "flake8-bandit",
Plugin::Flake8BlindExcept => "flake8-blind-except",
Plugin::Flake8BooleanTrap => "flake8-boolean-trap",
Plugin::Flake8Bugbear => "flake8-bugbear",
Plugin::Flake8Builtins => "flake8-builtins",
Plugin::Flake8Commas => "flake8-commas",
Plugin::Flake8Comprehensions => "flake8-comprehensions",
Plugin::Flake8Datetimez => "flake8-datetimez",
Plugin::Flake8Debugger => "flake8-debugger",
Plugin::Flake8Docstrings => "flake8-docstrings",
Plugin::Flake8Eradicate => "flake8-eradicate",
Plugin::Flake8ErrMsg => "flake8-errmsg",
Plugin::Flake8Executable => "flake8-executable",
Plugin::Flake8ImplicitStrConcat => "flake8-implicit-str-concat",
Plugin::Flake8ImportConventions => "flake8-import-conventions",
Plugin::Flake8NoPep420 => "flake8-no-pep420",
Plugin::Flake8Pie => "flake8-pie",
Plugin::Flake8Print => "flake8-print",
Plugin::Flake8PytestStyle => "flake8-pytest-style",
Plugin::Flake8Quotes => "flake8-quotes",
Plugin::Flake8Return => "flake8-return",
Plugin::Flake8Simplify => "flake8-simplify",
Plugin::Flake8TidyImports => "flake8-tidy-imports",
Plugin::Flake8TypeChecking => "flake8-type-checking",
Plugin::Flake8UnusedArguments => "flake8-unused-arguments",
Plugin::Flake8UsePathlib => "flake8-use-pathlib",
Plugin::McCabe => "mccabe",
Plugin::PEP8Naming => "pep8-naming",
Plugin::PandasVet => "pandas-vet",
Plugin::Pyupgrade => "pyupgrade",
Plugin::Tryceratops => "tryceratops",
}
)
}
}
impl From<&Plugin> for Linter {
fn from(plugin: &Plugin) -> Self {
match plugin {
Plugin::Flake82020 => Linter::Flake82020,
Plugin::Flake8Annotations => Linter::Flake8Annotations,
Plugin::Flake8Bandit => Linter::Flake8Bandit,
Plugin::Flake8BlindExcept => Linter::Flake8BlindExcept,
Plugin::Flake8BooleanTrap => Linter::Flake8BooleanTrap,
Plugin::Flake8Bugbear => Linter::Flake8Bugbear,
Plugin::Flake8Builtins => Linter::Flake8Builtins,
Plugin::Flake8Commas => Linter::Flake8Commas,
Plugin::Flake8Comprehensions => Linter::Flake8Comprehensions,
Plugin::Flake8Datetimez => Linter::Flake8Datetimez,
Plugin::Flake8Debugger => Linter::Flake8Debugger,
Plugin::Flake8Docstrings => Linter::Pydocstyle,
Plugin::Flake8Eradicate => Linter::Eradicate,
Plugin::Flake8ErrMsg => Linter::Flake8ErrMsg,
Plugin::Flake8Executable => Linter::Flake8Executable,
Plugin::Flake8ImplicitStrConcat => Linter::Flake8ImplicitStrConcat,
Plugin::Flake8ImportConventions => Linter::Flake8ImportConventions,
Plugin::Flake8NoPep420 => Linter::Flake8NoPep420,
Plugin::Flake8Pie => Linter::Flake8Pie,
Plugin::Flake8Print => Linter::Flake8Print,
Plugin::Flake8PytestStyle => Linter::Flake8PytestStyle,
Plugin::Flake8Quotes => Linter::Flake8Quotes,
Plugin::Flake8Return => Linter::Flake8Return,
Plugin::Flake8Simplify => Linter::Flake8Simplify,
Plugin::Flake8TidyImports => Linter::Flake8TidyImports,
Plugin::Flake8TypeChecking => Linter::Flake8TypeChecking,
Plugin::Flake8UnusedArguments => Linter::Flake8UnusedArguments,
Plugin::Flake8UsePathlib => Linter::Flake8UsePathlib,
Plugin::McCabe => Linter::McCabe,
Plugin::PEP8Naming => Linter::PEP8Naming,
Plugin::PandasVet => Linter::PandasVet,
Plugin::Pyupgrade => Linter::Pyupgrade,
Plugin::Tryceratops => Linter::Tryceratops,
}
}
}
/// Infer the enabled plugins based on user-provided options.
///
/// For example, if the user specified a `mypy-init-return` setting, we should
/// infer that `flake8-annotations` is active.
pub(crate) fn infer_plugins_from_options(flake8: &HashMap<String, Option<String>>) -> Vec<Plugin> {
let mut plugins = BTreeSet::new();
for key in flake8.keys() {
match key.as_str() {
// flake8-annotations
"suppress-none-returning" | "suppress_none_returning" => {
plugins.insert(Plugin::Flake8Annotations);
}
"suppress-dummy-args" | "suppress_dummy_args" => {
plugins.insert(Plugin::Flake8Annotations);
}
"allow-untyped-defs" | "allow_untyped_defs" => {
plugins.insert(Plugin::Flake8Annotations);
}
"allow-untyped-nested" | "allow_untyped_nested" => {
plugins.insert(Plugin::Flake8Annotations);
}
"mypy-init-return" | "mypy_init_return" => {
plugins.insert(Plugin::Flake8Annotations);
}
"dispatch-decorators" | "dispatch_decorators" => {
plugins.insert(Plugin::Flake8Annotations);
}
"overload-decorators" | "overload_decorators" => {
plugins.insert(Plugin::Flake8Annotations);
}
"allow-star-arg-any" | "allow_star_arg_any" => {
plugins.insert(Plugin::Flake8Annotations);
}
// flake8-bugbear
"extend-immutable-calls" | "extend_immutable_calls" => {
plugins.insert(Plugin::Flake8Bugbear);
}
// flake8-builtins
"builtins-ignorelist" | "builtins_ignorelist" => {
plugins.insert(Plugin::Flake8Builtins);
}
// flake8-docstrings
"docstring-convention" | "docstring_convention" => {
plugins.insert(Plugin::Flake8Docstrings);
}
// flake8-eradicate
"eradicate-aggressive" | "eradicate_aggressive" => {
plugins.insert(Plugin::Flake8Eradicate);
}
"eradicate-whitelist" | "eradicate_whitelist" => {
plugins.insert(Plugin::Flake8Eradicate);
}
"eradicate-whitelist-extend" | "eradicate_whitelist_extend" => {
plugins.insert(Plugin::Flake8Eradicate);
}
// flake8-pytest-style
"pytest-fixture-no-parentheses" | "pytest_fixture_no_parentheses " => {
plugins.insert(Plugin::Flake8PytestStyle);
}
"pytest-parametrize-names-type" | "pytest_parametrize_names_type" => {
plugins.insert(Plugin::Flake8PytestStyle);
}
"pytest-parametrize-values-type" | "pytest_parametrize_values_type" => {
plugins.insert(Plugin::Flake8PytestStyle);
}
"pytest-parametrize-values-row-type" | "pytest_parametrize_values_row_type" => {
plugins.insert(Plugin::Flake8PytestStyle);
}
"pytest-raises-require-match-for" | "pytest_raises_require_match_for" => {
plugins.insert(Plugin::Flake8PytestStyle);
}
"pytest-mark-no-parentheses" | "pytest_mark_no_parentheses" => {
plugins.insert(Plugin::Flake8PytestStyle);
}
// flake8-quotes
"quotes" | "inline-quotes" | "inline_quotes" => {
plugins.insert(Plugin::Flake8Quotes);
}
"multiline-quotes" | "multiline_quotes" => {
plugins.insert(Plugin::Flake8Quotes);
}
"docstring-quotes" | "docstring_quotes" => {
plugins.insert(Plugin::Flake8Quotes);
}
"avoid-escape" | "avoid_escape" => {
plugins.insert(Plugin::Flake8Quotes);
}
// flake8-tidy-imports
"ban-relative-imports" | "ban_relative_imports" => {
plugins.insert(Plugin::Flake8TidyImports);
}
"banned-modules" | "banned_modules" => {
plugins.insert(Plugin::Flake8TidyImports);
}
// mccabe
"max-complexity" | "max_complexity" => {
plugins.insert(Plugin::McCabe);
}
// pep8-naming
"ignore-names" | "ignore_names" => {
plugins.insert(Plugin::PEP8Naming);
}
"classmethod-decorators" | "classmethod_decorators" => {
plugins.insert(Plugin::PEP8Naming);
}
"staticmethod-decorators" | "staticmethod_decorators" => {
plugins.insert(Plugin::PEP8Naming);
}
"max-string-length" | "max_string_length" => {
plugins.insert(Plugin::Flake8ErrMsg);
}
_ => {}
}
}
Vec::from_iter(plugins)
}
/// Infer the enabled plugins based on the referenced prefixes.
///
/// For example, if the user ignores `ANN101`, we should infer that
/// `flake8-annotations` is active.
pub(crate) fn infer_plugins_from_codes(selectors: &HashSet<RuleSelector>) -> Vec<Plugin> {
// Ignore cases in which we've knowingly changed rule prefixes.
[
Plugin::Flake82020,
Plugin::Flake8Annotations,
Plugin::Flake8Bandit,
// Plugin::Flake8BlindExcept,
Plugin::Flake8BooleanTrap,
Plugin::Flake8Bugbear,
Plugin::Flake8Builtins,
// Plugin::Flake8Commas,
Plugin::Flake8Comprehensions,
Plugin::Flake8Datetimez,
Plugin::Flake8Debugger,
Plugin::Flake8Docstrings,
// Plugin::Flake8Eradicate,
Plugin::Flake8ErrMsg,
Plugin::Flake8Executable,
Plugin::Flake8ImplicitStrConcat,
// Plugin::Flake8ImportConventions,
Plugin::Flake8NoPep420,
Plugin::Flake8Pie,
Plugin::Flake8Print,
Plugin::Flake8PytestStyle,
Plugin::Flake8Quotes,
Plugin::Flake8Return,
Plugin::Flake8Simplify,
// Plugin::Flake8TidyImports,
// Plugin::Flake8TypeChecking,
Plugin::Flake8UnusedArguments,
// Plugin::Flake8UsePathlib,
Plugin::McCabe,
Plugin::PEP8Naming,
Plugin::PandasVet,
Plugin::Tryceratops,
]
.into_iter()
.filter(|plugin| {
for selector in selectors {
if selector
.rules(&PreviewOptions::default())
.any(|rule| Linter::from(plugin).rules().any(|r| r == rule))
{
return true;
}
}
false
})
.collect()
}
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use super::{infer_plugins_from_options, Plugin};
#[test]
fn it_infers_plugins() {
let actual = infer_plugins_from_options(&HashMap::from([(
"inline-quotes".to_string(),
Some("single".to_string()),
)]));
let expected = vec![Plugin::Flake8Quotes];
assert_eq!(actual, expected);
let actual = infer_plugins_from_options(&HashMap::from([(
"staticmethod-decorators".to_string(),
Some("[]".to_string()),
)]));
let expected = vec![Plugin::PEP8Naming];
assert_eq!(actual, expected);
}
}

View File

@@ -0,0 +1,26 @@
use std::path::Path;
use anyhow::Result;
use serde::{Deserialize, Serialize};
use super::black::Black;
use super::isort::Isort;
use super::pep621::Project;
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
pub(crate) struct Tools {
pub(crate) black: Option<Black>,
pub(crate) isort: Option<Isort>,
}
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
pub(crate) struct Pyproject {
pub(crate) tool: Option<Tools>,
pub(crate) project: Option<Project>,
}
pub(crate) fn parse<P: AsRef<Path>>(path: P) -> Result<Pyproject> {
let contents = std::fs::read_to_string(path)?;
let pyproject = toml::from_str::<Pyproject>(&contents)?;
Ok(pyproject)
}

View File

@@ -31,17 +31,17 @@ name = "formatter"
harness = false
[dependencies]
once_cell = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
url = { workspace = true }
ureq = { workspace = true }
criterion = { workspace = true, default-features = false }
codspeed-criterion-compat = { workspace = true, default-features = false, optional = true}
once_cell.workspace = true
serde.workspace = true
serde_json.workspace = true
url = "2.5.0"
ureq = "2.9.1"
criterion = { version = "0.5.1", default-features = false }
codspeed-criterion-compat = { version="2.3.3", default-features = false, optional = true}
[dev-dependencies]
ruff_linter = { path = "../ruff_linter" }
ruff_python_ast = { path = "../ruff_python_ast" }
ruff_linter.path = "../ruff_linter"
ruff_python_ast.path = "../ruff_python_ast"
ruff_python_formatter = { path = "../ruff_python_formatter" }
ruff_python_index = { path = "../ruff_python_index" }
ruff_python_parser = { path = "../ruff_python_parser" }
@@ -53,7 +53,7 @@ workspace = true
codspeed = ["codspeed-criterion-compat"]
[target.'cfg(target_os = "windows")'.dev-dependencies]
mimalloc = { workspace = true }
mimalloc = "0.1.39"
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64")))'.dev-dependencies]
tikv-jemallocator = { workspace = true }
tikv-jemallocator = "0.5.0"

View File

@@ -65,7 +65,7 @@ fn benchmark_formatter(criterion: &mut Criterion) {
let comment_ranges = comment_ranges.finish();
// Parse the AST.
let module = parse_tokens(tokens, case.code(), Mode::Module)
let module = parse_tokens(tokens, case.code(), Mode::Module, "<filename>")
.expect("Input to be a valid python program");
b.iter(|| {

View File

@@ -58,7 +58,8 @@ fn benchmark_linter(mut group: BenchmarkGroup, settings: &LinterSettings) {
let tokens = lexer::lex(case.code(), Mode::Module).collect::<Vec<_>>();
// Parse the source.
let ast = parse_program_tokens(tokens.clone(), case.code(), false).unwrap();
let ast =
parse_program_tokens(tokens.clone(), case.code(), case.name(), false).unwrap();
b.iter(|| {
let path = case.path();

View File

@@ -60,7 +60,7 @@ fn benchmark_parser(criterion: &mut Criterion<WallTime>) {
&case,
|b, case| {
b.iter(|| {
let parsed = parse_suite(case.code()).unwrap();
let parsed = parse_suite(case.code(), case.name()).unwrap();
let mut visitor = CountVisitor { count: 0 };
visitor.visit_body(&parsed);

View File

@@ -16,7 +16,7 @@ glob = { workspace = true }
globset = { workspace = true }
regex = { workspace = true }
filetime = { workspace = true }
seahash = { workspace = true }
seahash = "4.1.0"
[dev-dependencies]
ruff_macros = { path = "../ruff_macros" }

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_cli"
version = "0.1.11"
version = "0.1.9"
publish = false
authors = { workspace = true }
edition = { workspace = true }
@@ -15,60 +15,67 @@ readme = "../../README.md"
name = "ruff"
[dependencies]
ruff_linter = { path = "../ruff_linter", features = ["clap"] }
ruff_cache = { path = "../ruff_cache" }
ruff_diagnostics = { path = "../ruff_diagnostics" }
ruff_linter = { path = "../ruff_linter", features = ["clap"] }
ruff_macros = { path = "../ruff_macros" }
ruff_formatter = { path = "../ruff_formatter" }
ruff_notebook = { path = "../ruff_notebook" }
ruff_macros = { path = "../ruff_macros" }
ruff_python_ast = { path = "../ruff_python_ast" }
ruff_python_formatter = { path = "../ruff_python_formatter" }
ruff_source_file = { path = "../ruff_source_file" }
ruff_text_size = { path = "../ruff_text_size" }
ruff_python_trivia = { path = "../ruff_python_trivia" }
ruff_workspace = { path = "../ruff_workspace" }
ruff_text_size = { path = "../ruff_text_size" }
annotate-snippets = { version = "0.9.2", features = ["color"] }
anyhow = { workspace = true }
argfile = { workspace = true }
bincode = { workspace = true }
argfile = { version = "0.1.6" }
bincode = { version = "1.3.3" }
bitflags = { workspace = true }
cachedir = { workspace = true }
cachedir = { version = "0.3.1" }
chrono = { workspace = true }
clap = { workspace = true, features = ["derive", "env"] }
clap_complete_command = { workspace = true }
clearscreen = { workspace = true }
clap_complete_command = { version = "0.5.1" }
clearscreen = { version = "2.0.0" }
colored = { workspace = true }
filetime = { workspace = true }
glob = { workspace = true }
ignore = { workspace = true }
is-macro = { workspace = true }
itertools = { workspace = true }
log = { workspace = true }
notify = { workspace = true }
notify = { version = "6.1.1" }
path-absolutize = { workspace = true, features = ["once_cell_cache"] }
rayon = { workspace = true }
rayon = { version = "1.8.0" }
regex = { workspace = true }
ruff_python_stdlib = { path = "../ruff_python_stdlib" }
rustc-hash = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
shellexpand = { workspace = true }
similar = { workspace = true }
strum = { workspace = true, features = [] }
thiserror = { workspace = true }
tracing = { workspace = true, features = ["log"] }
walkdir = { workspace = true }
wild = { workspace = true }
walkdir = { version = "2.3.2" }
wild = { version = "2" }
[dev-dependencies]
assert_cmd = { workspace = true }
assert_cmd = { version = "2.0.8" }
# Avoid writing colored snapshots when running tests from the terminal
colored = { workspace = true, features = ["no-color"]}
insta = { workspace = true, features = ["filters", "json"] }
insta-cmd = { workspace = true }
tempfile = { workspace = true }
insta-cmd = { version = "0.4.0" }
tempfile = "3.8.1"
test-case = { workspace = true }
ureq = { version = "2.9.1", features = [] }
[target.'cfg(target_os = "windows")'.dependencies]
mimalloc = { workspace = true }
mimalloc = "0.1.39"
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64")))'.dependencies]
tikv-jemallocator = { workspace = true }
tikv-jemallocator = "0.5.0"
[lints]
workspace = true

View File

@@ -17,7 +17,7 @@ use tracing::debug;
use ruff_diagnostics::SourceMap;
use ruff_linter::fs;
use ruff_linter::logging::{DisplayParseError, LogLevel};
use ruff_linter::logging::LogLevel;
use ruff_linter::registry::Rule;
use ruff_linter::rules::flake8_quotes::settings::Quote;
use ruff_linter::source_kind::{SourceError, SourceKind};
@@ -244,7 +244,7 @@ pub(crate) fn format_path(
// Extract the sources from the file.
let unformatted = match SourceKind::from_path(path, source_type) {
Ok(Some(source_kind)) => source_kind,
// Non-Python Jupyter notebook.
// Non Python Jupyter notebook
Ok(None) => return Ok(FormatResult::Skipped),
Err(err) => {
return Err(FormatCommandError::Read(Some(path.to_path_buf()), err));
@@ -321,22 +321,12 @@ pub(crate) fn format_source(
path: Option<&Path>,
settings: &FormatterSettings,
) -> Result<FormattedSource, FormatCommandError> {
match &source_kind {
match source_kind {
SourceKind::Python(unformatted) => {
let options = settings.to_format_options(source_type, unformatted);
let formatted = format_module_source(unformatted, options).map_err(|err| {
if let FormatModuleError::ParseError(err) = err {
DisplayParseError::from_source_kind(
err,
path.map(Path::to_path_buf),
source_kind,
)
.into()
} else {
FormatCommandError::Format(path.map(Path::to_path_buf), err)
}
})?;
let formatted = format_module_source(unformatted, options)
.map_err(|err| FormatCommandError::Format(path.map(Path::to_path_buf), err))?;
let formatted = formatted.into_code();
if formatted.len() == unformatted.len() && formatted == *unformatted {
@@ -362,19 +352,8 @@ pub(crate) fn format_source(
let unformatted = &notebook.source_code()[range];
// Format the cell.
let formatted =
format_module_source(unformatted, options.clone()).map_err(|err| {
if let FormatModuleError::ParseError(err) = err {
DisplayParseError::from_source_kind(
err,
path.map(Path::to_path_buf),
source_kind,
)
.into()
} else {
FormatCommandError::Format(path.map(Path::to_path_buf), err)
}
})?;
let formatted = format_module_source(unformatted, options.clone())
.map_err(|err| FormatCommandError::Format(path.map(Path::to_path_buf), err))?;
// If the cell is unchanged, skip it.
let formatted = formatted.as_code();
@@ -429,13 +408,11 @@ pub(crate) fn format_source(
pub(crate) enum FormatResult {
/// The file was formatted.
Formatted,
/// The file was formatted, [`SourceKind`] contains the formatted code
Diff {
unformatted: SourceKind,
formatted: SourceKind,
},
/// The file was unchanged, as the formatted contents matched the existing contents.
Unchanged,
@@ -584,7 +561,6 @@ impl<'a> FormatResults<'a> {
#[derive(Error, Debug)]
pub(crate) enum FormatCommandError {
Ignore(#[from] ignore::Error),
Parse(#[from] DisplayParseError),
Panic(Option<PathBuf>, PanicError),
Read(Option<PathBuf>, SourceError),
Format(Option<PathBuf>, FormatModuleError),
@@ -602,7 +578,6 @@ impl FormatCommandError {
None
}
}
Self::Parse(err) => err.path(),
Self::Panic(path, _)
| Self::Read(path, _)
| Self::Format(path, _)
@@ -636,9 +611,6 @@ impl Display for FormatCommandError {
)
}
}
Self::Parse(err) => {
write!(f, "{err}")
}
Self::Read(path, err) => {
if let Some(path) = path {
write!(

View File

@@ -10,6 +10,7 @@ use colored::Colorize;
use log::{debug, error, warn};
use rustc_hash::FxHashMap;
use crate::cache::{Cache, FileCacheKey, LintCacheData};
use ruff_diagnostics::Diagnostic;
use ruff_linter::linter::{lint_fix, lint_only, FixTable, FixerResult, LinterResult, ParseSource};
use ruff_linter::logging::DisplayParseError;
@@ -23,12 +24,10 @@ use ruff_linter::{fs, IOError, SyntaxError};
use ruff_notebook::{Notebook, NotebookError, NotebookIndex};
use ruff_python_ast::imports::ImportMap;
use ruff_python_ast::{PySourceType, SourceType, TomlSourceType};
use ruff_source_file::SourceFileBuilder;
use ruff_source_file::{LineIndex, SourceCode, SourceFileBuilder};
use ruff_text_size::{TextRange, TextSize};
use ruff_workspace::Settings;
use crate::cache::{Cache, FileCacheKey, LintCacheData};
#[derive(Debug, Default, PartialEq)]
pub(crate) struct Diagnostics {
pub(crate) messages: Vec<Message>,
@@ -357,10 +356,17 @@ pub(crate) fn lint_path(
}
}
if let Some(error) = parse_error {
if let Some(err) = parse_error {
error!(
"{}",
DisplayParseError::from_source_kind(error, Some(path.to_path_buf()), &source_kind,)
DisplayParseError::new(
err,
SourceCode::new(
source_kind.source_code(),
&LineIndex::from_source_text(source_kind.source_code())
),
&source_kind,
)
);
}

View File

@@ -328,32 +328,6 @@ OTHER = "OTHER"
Ok(())
}
#[test]
fn syntax_error() -> Result<()> {
let tempdir = TempDir::new()?;
fs::write(
tempdir.path().join("main.py"),
r#"
from module import =
"#,
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.current_dir(tempdir.path())
.args(["format", "--no-cache", "--isolated", "--check"])
.arg("main.py"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to parse main.py:2:20: Unexpected token '='
"###);
Ok(())
}
#[test]
fn messages() -> Result<()> {
let tempdir = TempDir::new()?;
@@ -672,8 +646,7 @@ format = "json"
----- stderr -----
ruff failed
Cause: Failed to parse [RUFF-TOML-PATH]
Cause: TOML parse error at line 2, column 10
Cause: Failed to parse `[RUFF-TOML-PATH]`: TOML parse error at line 2, column 10
|
2 | format = "json"
| ^^^^^^

View File

@@ -1047,10 +1047,7 @@ fn unreadable_pyproject_toml() -> Result<()> {
err.chain()
.map(std::string::ToString::to_string)
.collect::<Vec<_>>(),
vec![
format!("Failed to read {}/pyproject.toml", tempdir.path().display()),
"Permission denied (os error 13)".to_string()
],
vec!["Permission denied (os error 13)".to_string()],
);
Ok(())
}

View File

@@ -19,6 +19,7 @@ ruff_python_ast = { path = "../ruff_python_ast" }
ruff_python_codegen = { path = "../ruff_python_codegen" }
ruff_python_formatter = { path = "../ruff_python_formatter" }
ruff_notebook = { path = "../ruff_notebook" }
ruff_python_literal = { path = "../ruff_python_literal" }
ruff_python_parser = { path = "../ruff_python_parser" }
ruff_python_stdlib = { path = "../ruff_python_stdlib" }
ruff_python_trivia = { path = "../ruff_python_trivia" }
@@ -27,27 +28,28 @@ ruff_workspace = { path = "../ruff_workspace", features = ["schemars"]}
anyhow = { workspace = true }
clap = { workspace = true }
ignore = { workspace = true }
imara-diff = { workspace = true }
indicatif = { workspace = true }
indicatif = "0.17.7"
itertools = { workspace = true }
libcst = { workspace = true }
once_cell = { workspace = true }
pretty_assertions = { workspace = true }
rayon = { workspace = true }
pretty_assertions = { version = "1.3.0" }
rayon = "1.8.0"
regex = { workspace = true }
schemars = { workspace = true }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
similar = { workspace = true }
strum = { workspace = true }
tempfile = { workspace = true }
strum_macros = { workspace = true }
tempfile = "3.8.1"
toml = { workspace = true, features = ["parse"] }
tracing = { workspace = true }
tracing-indicatif = { workspace = true }
tracing-subscriber = { workspace = true, features = ["env-filter"] }
imara-diff = "0.1.5"
[dev-dependencies]
indoc = { workspace = true }
indoc = "2.0.4"
[features]
# Turn off rayon for profiling

View File

@@ -35,7 +35,6 @@ use ruff_linter::settings::types::{FilePattern, FilePatternSet};
use ruff_python_formatter::{
format_module_source, FormatModuleError, MagicTrailingComma, PreviewMode, PyFormatOptions,
};
use ruff_python_parser::ParseError;
use ruff_workspace::resolver::{python_files_in_path, PyprojectConfig, ResolvedFile, Resolver};
/// Find files that ruff would check so we can format them. Adapted from `ruff_cli`.
@@ -743,11 +742,11 @@ enum CheckFileError {
reformatted: String,
},
/// The input file was already invalid (not a bug)
SyntaxErrorInInput(ParseError),
SyntaxErrorInInput(FormatModuleError),
/// The formatter introduced a syntax error
SyntaxErrorInOutput {
formatted: String,
error: ParseError,
error: FormatModuleError,
},
/// The formatter failed (bug)
FormatError(FormatError),
@@ -797,7 +796,7 @@ fn format_dev_file(
let start = Instant::now();
let printed = match format_module_source(&content, options.clone()) {
Ok(printed) => printed,
Err(FormatModuleError::ParseError(err)) => {
Err(err @ (FormatModuleError::LexError(_) | FormatModuleError::ParseError(_))) => {
return Err(CheckFileError::SyntaxErrorInInput(err));
}
Err(FormatModuleError::FormatError(err)) => {
@@ -824,7 +823,7 @@ fn format_dev_file(
if stability_check {
let reformatted = match format_module_source(formatted, options) {
Ok(reformatted) => reformatted,
Err(FormatModuleError::ParseError(err)) => {
Err(err @ (FormatModuleError::LexError(_) | FormatModuleError::ParseError(_))) => {
return Err(CheckFileError::SyntaxErrorInOutput {
formatted: formatted.to_string(),
error: err,

View File

@@ -3,7 +3,6 @@
//! Used for <https://docs.astral.sh/ruff/rules/>.
use itertools::Itertools;
use std::borrow::Cow;
use strum::IntoEnumIterator;
use ruff_diagnostics::FixAvailability;
@@ -38,16 +37,6 @@ fn generate_table(table_out: &mut String, rules: impl IntoIterator<Item = Rule>,
let rule_name = rule.as_ref();
// If the message ends in a bracketed expression (like: "Use {replacement}"), escape the
// brackets. Otherwise, it'll be interpreted as an HTML attribute via the `attr_list`
// plugin. (Above, we'd convert to "Use {replacement\}".)
let message = rule.message_formats()[0];
let message = if let Some(prefix) = message.strip_suffix('}') {
Cow::Owned(format!("{prefix}\\}}"))
} else {
Cow::Borrowed(message)
};
#[allow(clippy::or_fun_call)]
table_out.push_str(&format!(
"| {0}{1} {{ #{0}{1} }} | {2} | {3} | {4} |",
@@ -57,7 +46,7 @@ fn generate_table(table_out: &mut String, rules: impl IntoIterator<Item = Rule>,
.is_some()
.then_some(format_args!("[{rule_name}](rules/{rule_name}.md)"))
.unwrap_or(format_args!("{rule_name}")),
message,
rule.message_formats()[0],
status_token,
));
table_out.push('\n');

View File

@@ -24,7 +24,11 @@ pub(crate) fn main(args: &Args) -> Result<()> {
args.file.display()
)
})?;
let python_ast = parse(source_kind.source_code(), source_type.as_mode())?;
let python_ast = parse(
source_kind.source_code(),
source_type.as_mode(),
&args.file.to_string_lossy(),
)?;
println!("{python_ast:#?}");
Ok(())
}

View File

@@ -22,7 +22,7 @@ pub(crate) fn main(args: &Args) -> Result<()> {
println!("{}", ruff_notebook::round_trip(path)?);
} else {
let contents = fs::read_to_string(&args.file)?;
println!("{}", round_trip(&contents)?);
println!("{}", round_trip(&contents, &args.file.to_string_lossy())?);
}
Ok(())
}

View File

@@ -15,7 +15,7 @@ ruff_cache = { path = "../ruff_cache" }
ruff_macros = { path = "../ruff_macros" }
ruff_text_size = { path = "../ruff_text_size" }
drop_bomb = { workspace = true }
drop_bomb = { version = "0.1.5" }
rustc-hash = { workspace = true }
schemars = { workspace = true, optional = true }
serde = { workspace = true, optional = true }

View File

@@ -16,7 +16,7 @@ license = { workspace = true }
ruff_macros = { path = "../ruff_macros" }
[dev-dependencies]
static_assertions = { workspace = true }
static_assertions = "1.1.0"
[lints]
workspace = true

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_linter"
version = "0.1.11"
version = "0.1.9"
publish = false
authors = { workspace = true }
edition = { workspace = true }
@@ -29,38 +29,37 @@ ruff_python_parser = { path = "../ruff_python_parser" }
ruff_source_file = { path = "../ruff_source_file", features = ["serde"] }
ruff_text_size = { path = "../ruff_text_size" }
aho-corasick = { workspace = true }
annotate-snippets = { workspace = true, features = ["color"] }
aho-corasick = { version = "1.1.2" }
annotate-snippets = { version = "0.9.2", features = ["color"] }
anyhow = { workspace = true }
bitflags = { workspace = true }
chrono = { workspace = true }
clap = { workspace = true, features = ["derive", "string"], optional = true }
colored = { workspace = true }
fern = { workspace = true }
fern = { version = "0.6.1" }
glob = { workspace = true }
globset = { workspace = true }
imperative = { workspace = true }
imperative = { version = "1.0.4" }
is-macro = { workspace = true }
is-wsl = { workspace = true }
itertools = { workspace = true }
libcst = { workspace = true }
log = { workspace = true }
memchr = { workspace = true }
natord = { workspace = true }
natord = { version = "1.0.9" }
once_cell = { workspace = true }
path-absolutize = { workspace = true, features = [
"once_cell_cache",
"use_unix_paths_on_wasm",
] }
pathdiff = { workspace = true }
pep440_rs = { workspace = true, features = ["serde"] }
pyproject-toml = { workspace = true }
quick-junit = { workspace = true }
pathdiff = { version = "0.2.1" }
pep440_rs = { version = "0.4.0", features = ["serde"] }
pyproject-toml = { version = "0.8.1" }
quick-junit = { version = "0.3.5" }
regex = { workspace = true }
result-like = { workspace = true }
result-like = { version = "0.4.6" }
rustc-hash = { workspace = true }
schemars = { workspace = true, optional = true }
semver = { workspace = true }
semver = { version = "1.0.20" }
serde = { workspace = true }
serde_json = { workspace = true }
similar = { workspace = true }
@@ -69,18 +68,19 @@ strum = { workspace = true }
strum_macros = { workspace = true }
thiserror = { workspace = true }
toml = { workspace = true }
typed-arena = { workspace = true }
typed-arena = { version = "2.0.2" }
unicode-width = { workspace = true }
unicode_names2 = { workspace = true }
url = { workspace = true }
url = { version = "2.2.2" }
wsl = { version = "0.1.0" }
[dev-dependencies]
insta = { workspace = true }
pretty_assertions = { workspace = true }
pretty_assertions = "1.3.0"
test-case = { workspace = true }
# Disable colored output in tests
colored = { workspace = true, features = ["no-color"] }
tempfile = { workspace = true }
tempfile = "3.8.1"
[features]
default = []

View File

@@ -229,38 +229,3 @@ def overloaded(i: "str") -> "str":
def overloaded(i):
return i
def func(x: int):
if not x:
return 1
raise ValueError
def func(x: int):
if not x:
return 1
else:
return 2
raise ValueError
def func():
try:
raise ValueError
except:
return 2
def func():
try:
return 1
except:
pass
def func(x: int):
for _ in range(3):
if x > 0:
return 1
raise ValueError

View File

@@ -2,7 +2,7 @@ import json
import yaml
from yaml import CSafeLoader
from yaml import SafeLoader
from yaml.loader import SafeLoader as NewSafeLoader
from yaml import SafeLoader as NewSafeLoader
def test_yaml_load():
@@ -29,8 +29,3 @@ yaml.load("{}", yaml.SafeLoader)
yaml.load("{}", CSafeLoader)
yaml.load("{}", yaml.CSafeLoader)
yaml.load("{}", NewSafeLoader)
yaml.load("{}", Loader=SafeLoader)
yaml.load("{}", Loader=yaml.SafeLoader)
yaml.load("{}", Loader=CSafeLoader)
yaml.load("{}", Loader=yaml.CSafeLoader)
yaml.load("{}", Loader=NewSafeLoader)

View File

@@ -165,19 +165,3 @@ class AbstractTestModel5(models.Model):
def my_beautiful_method(self):
return 2
# Subclass with its own __str__
class SubclassTestModel1(TestModel1):
def __str__(self):
return self.new_field
# Subclass with inherited __str__
class SubclassTestModel2(TestModel4):
pass
# Subclass without __str__
class SubclassTestModel3(TestModel1):
pass

View File

@@ -1,82 +0,0 @@
import collections.abc
import typing
from collections.abc import AsyncGenerator, Generator
from typing import Any
class IteratorReturningSimpleGenerator1:
def __iter__(self) -> Generator: # PYI058 (use `Iterator`)
return (x for x in range(42))
class IteratorReturningSimpleGenerator2:
def __iter__(self, /) -> collections.abc.Generator[str, Any, None]: # PYI058 (use `Iterator`)
"""Fully documented, because I'm a runtime function!"""
yield from "abcdefg"
return None
class IteratorReturningSimpleGenerator3:
def __iter__(self, /) -> collections.abc.Generator[str, None, typing.Any]: # PYI058 (use `Iterator`)
yield "a"
yield "b"
yield "c"
return
class AsyncIteratorReturningSimpleAsyncGenerator1:
def __aiter__(self) -> typing.AsyncGenerator: pass # PYI058 (Use `AsyncIterator`)
class AsyncIteratorReturningSimpleAsyncGenerator2:
def __aiter__(self, /) -> collections.abc.AsyncGenerator[str, Any]: ... # PYI058 (Use `AsyncIterator`)
class AsyncIteratorReturningSimpleAsyncGenerator3:
def __aiter__(self, /) -> collections.abc.AsyncGenerator[str, None]: pass # PYI058 (Use `AsyncIterator`)
class CorrectIterator:
def __iter__(self) -> Iterator[str]: ... # OK
class CorrectAsyncIterator:
def __aiter__(self) -> collections.abc.AsyncIterator[int]: ... # OK
class Fine:
def __iter__(self) -> typing.Self: ... # OK
class StrangeButWeWontComplainHere:
def __aiter__(self) -> list[bytes]: ... # OK
def __iter__(self) -> Generator: ... # OK (not in class scope)
def __aiter__(self) -> AsyncGenerator: ... # OK (not in class scope)
class IteratorReturningComplexGenerator:
def __iter__(self) -> Generator[str, int, bytes]: ... # OK
class AsyncIteratorReturningComplexAsyncGenerator:
def __aiter__(self) -> AsyncGenerator[str, int]: ... # OK
class ClassWithInvalidAsyncAiterMethod:
async def __aiter__(self) -> AsyncGenerator: ... # OK
class IteratorWithUnusualParameters1:
def __iter__(self, foo) -> Generator: ... # OK
class IteratorWithUnusualParameters2:
def __iter__(self, *, bar) -> Generator: ... # OK
class IteratorWithUnusualParameters3:
def __iter__(self, *args) -> Generator: ... # OK
class IteratorWithUnusualParameters4:
def __iter__(self, **kwargs) -> Generator: ... # OK
class IteratorWithIterMethodThatReturnsThings:
def __iter__(self) -> Generator: # OK
yield
return 42
class IteratorWithIterMethodThatReceivesThingsFromSend:
def __iter__(self) -> Generator: # OK
x = yield 42
class IteratorWithNonTrivialIterBody:
def __iter__(self) -> Generator: # OK
foo, bar, baz = (1, 2, 3)
yield foo
yield bar
yield baz

View File

@@ -1,58 +0,0 @@
import collections.abc
import typing
from collections.abc import AsyncGenerator, Generator
from typing import Any
class IteratorReturningSimpleGenerator1:
def __iter__(self) -> Generator: ... # PYI058 (use `Iterator`)
class IteratorReturningSimpleGenerator2:
def __iter__(self, /) -> collections.abc.Generator[str, Any, None]: ... # PYI058 (use `Iterator`)
class IteratorReturningSimpleGenerator3:
def __iter__(self, /) -> collections.abc.Generator[str, None, typing.Any]: ... # PYI058 (use `Iterator`)
class AsyncIteratorReturningSimpleAsyncGenerator1:
def __aiter__(self) -> typing.AsyncGenerator: ... # PYI058 (Use `AsyncIterator`)
class AsyncIteratorReturningSimpleAsyncGenerator2:
def __aiter__(self, /) -> collections.abc.AsyncGenerator[str, Any]: ... # PYI058 (Use `AsyncIterator`)
class AsyncIteratorReturningSimpleAsyncGenerator3:
def __aiter__(self, /) -> collections.abc.AsyncGenerator[str, None]: ... # PYI058 (Use `AsyncIterator`)
class CorrectIterator:
def __iter__(self) -> Iterator[str]: ... # OK
class CorrectAsyncIterator:
def __aiter__(self) -> collections.abc.AsyncIterator[int]: ... # OK
class Fine:
def __iter__(self) -> typing.Self: ... # OK
class StrangeButWeWontComplainHere:
def __aiter__(self) -> list[bytes]: ... # OK
def __iter__(self) -> Generator: ... # OK (not in class scope)
def __aiter__(self) -> AsyncGenerator: ... # OK (not in class scope)
class IteratorReturningComplexGenerator:
def __iter__(self) -> Generator[str, int, bytes]: ... # OK
class AsyncIteratorReturningComplexAsyncGenerator:
def __aiter__(self) -> AsyncGenerator[str, int]: ... # OK
class ClassWithInvalidAsyncAiterMethod:
async def __aiter__(self) -> AsyncGenerator: ... # OK
class IteratorWithUnusualParameters1:
def __iter__(self, foo) -> Generator: ... # OK
class IteratorWithUnusualParameters2:
def __iter__(self, *, bar) -> Generator: ... # OK
class IteratorWithUnusualParameters3:
def __iter__(self, *args) -> Generator: ... # OK
class IteratorWithUnusualParameters4:
def __iter__(self, **kwargs) -> Generator: ... # OK

View File

@@ -159,7 +159,7 @@ async with asyncio.timeout(1):
async with trio.move_on_at(1):
pass
# Do not suppress combination, if a context manager is already combined with another.
# Do not supress combination, if a context manager is already combined with another.
async with asyncio.timeout(1), A():
async with B():
pass

View File

@@ -4,8 +4,6 @@ import datetime
from array import array
from dataclasses import dataclass
from uuid import UUID # TCH003
from collections.abc import Sequence
from pydantic import validate_call
import attrs
from attrs import frozen
@@ -24,8 +22,3 @@ class B:
@dataclass
class C:
x: UUID
@validate_call(config={'arbitrary_types_allowed': True})
def test(user: Sequence):
...

View File

@@ -1,16 +0,0 @@
# See: https://github.com/astral-sh/ruff/issues/9323
class Chassis(RobotModuleTemplate):
"""底盘信息推送控制
"""\
"""""" \
\
"abc\
" \
\

View File

@@ -1,5 +0,0 @@
"""Test for attribute accesses on builtins."""
a = type({}).__dict__['fromkeys']
b = dict.__dict__['fromkeys']
assert a is b

View File

@@ -1,58 +0,0 @@
# this line has a non-empty comment and is OK
# this line is also OK, but the three following lines are not
#
#
#
# this non-empty comment has trailing whitespace and is OK
# Many codebases use multiple `#` characters on a single line to visually
# separate sections of code, so we don't consider these empty comments.
##########################################
# trailing `#` characters are not considered empty comments ###
def foo(): # this comment is OK, the one below is not
pass #
# the lines below have no comments and are OK
def bar():
pass
# "Empty comments" are common in block comments
# to add legibility. For example:
#
# The above line's "empty comment" is likely
# intentional and is considered OK.
# lines in multi-line strings/comments whose last non-whitespace character is a `#`
# do not count as empty comments
"""
The following lines are all fine:
#
#
#
"""
# These should be removed, despite being an empty "block comment".
#
#
# These should also be removed.
x = 1
#
##
#
# This should be removed.
α = 1
α#

View File

@@ -1,33 +0,0 @@
class Animal:
@staticmethod
def speak():
return f"This animal says something."
class BadDog(Animal):
@staticmethod
def speak():
original_speak = super.speak() # PLW0245
return f"{original_speak} But as a dog, it barks!"
class GoodDog(Animal):
@staticmethod
def speak():
original_speak = super().speak() # OK
return f"{original_speak} But as a dog, it barks!"
class FineDog(Animal):
@staticmethod
def speak():
super = "super"
original_speak = super.speak() # OK
return f"{original_speak} But as a dog, it barks!"
def super_without_class() -> None:
super.blah() # OK
super.blah() # OK

View File

@@ -39,7 +39,7 @@ class Foo:
TCLS = typing.TypeVar["TCLS"]
y: typing.TypeAlias = list[TCLS]
# UP040 won't add generics in fix
# UP040 wont add generics in fix
T = typing.TypeVar(*args)
x: typing.TypeAlias = list[T]

View File

@@ -10,36 +10,6 @@ r = 3.15 # OK
r = 3.141 # FURB152
r = 3.142 # FURB152
r = 3.1415 # FURB152
r = 3.1416 # FURB152
r = 3.141592 # FURB152
r = 3.141593 # FURB152
r = 3.14159265 # FURB152
r = 3.141592653589793238462643383279 # FURB152
r = 3.14159266 # OK
e = 2.7 # OK
e = 2.718 # FURB152
e = 2.7182 # FURB152
e = 2.7183 # FURB152
e = 2.719 # OK
e = 2.71824 # OK
e = 2.71820001 # OK
e = 2.718200000000001 # OK
e = 2.7182000000000001 # FURB152

View File

@@ -1,22 +0,0 @@
x = 10
def ten() -> int:
return 10
count = bin(x).count("1") # FURB161
count = bin(10).count("1") # FURB161
count = bin(0b1010).count("1") # FURB161
count = bin(0xA).count("1") # FURB161
count = bin(0o12).count("1") # FURB161
count = bin(0x10 + 0x1000).count("1") # FURB161
count = bin(ten()).count("1") # FURB161
count = bin((10)).count("1") # FURB161
count = bin("10" "15").count("1") # FURB161
count = x.bit_count() # OK
count = (10).bit_count() # OK
count = 0b1010.bit_count() # OK
count = 0xA.bit_count() # OK
count = 0o12.bit_count() # OK
count = (0x10 + 0x1000).bit_count() # OK
count = ten().bit_count() # OK

View File

@@ -1,5 +0,0 @@
from typing_extensions import Optional
def f(arg: Optional[int] = None):
pass

View File

@@ -1,2 +0,0 @@
import os
import foo # noqa: F401

View File

@@ -438,12 +438,6 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
}
}
}
if checker.enabled(Rule::SuperWithoutBrackets) {
pylint::rules::super_without_brackets(checker, func);
}
if checker.enabled(Rule::BitCount) {
refurb::rules::bit_count(checker, call);
}
if checker.enabled(Rule::TypeOfPrimitive) {
pyupgrade::rules::type_of_primitive(checker, expr, func, args);
}

View File

@@ -154,9 +154,6 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
parameters,
);
}
if checker.enabled(Rule::GeneratorReturnFromIterMethod) {
flake8_pyi::rules::bad_generator_return_type(function_def, checker);
}
if checker.enabled(Rule::CustomTypeVarReturnType) {
flake8_pyi::rules::custom_type_var_return_type(
checker,

View File

@@ -28,29 +28,21 @@ pub(super) enum AnnotationContext {
impl AnnotationContext {
pub(super) fn from_model(semantic: &SemanticModel, settings: &LinterSettings) -> Self {
// If the annotation is in a class scope (e.g., an annotated assignment for a
// class field) or a function scope, and that class or function is marked as
// runtime-required, treat the annotation as runtime-required.
match semantic.current_scope().kind {
ScopeKind::Class(class_def)
if flake8_type_checking::helpers::runtime_required_class(
// class field), and that class is marked as annotation as runtime-required.
if semantic
.current_scope()
.kind
.as_class()
.is_some_and(|class_def| {
flake8_type_checking::helpers::runtime_required_class(
class_def,
&settings.flake8_type_checking.runtime_required_base_classes,
&settings.flake8_type_checking.runtime_required_decorators,
semantic,
) =>
{
return Self::RuntimeRequired
}
ScopeKind::Function(function_def)
if flake8_type_checking::helpers::runtime_required_function(
function_def,
&settings.flake8_type_checking.runtime_required_decorators,
semantic,
) =>
{
return Self::RuntimeRequired
}
_ => {}
)
})
{
return Self::RuntimeRequired;
}
// If `__future__` annotations are enabled, then annotations are never evaluated

View File

@@ -11,12 +11,11 @@ use ruff_source_file::Locator;
use crate::noqa;
use crate::noqa::{Directive, FileExemption, NoqaDirectives, NoqaMapping};
use crate::registry::{AsRule, Rule, RuleSet};
use crate::registry::{AsRule, Rule};
use crate::rule_redirects::get_redirect_target;
use crate::rules::ruff::rules::{UnusedCodes, UnusedNOQA};
use crate::settings::LinterSettings;
#[allow(clippy::too_many_arguments)]
pub(crate) fn check_noqa(
diagnostics: &mut Vec<Diagnostic>,
path: &Path,
@@ -24,7 +23,6 @@ pub(crate) fn check_noqa(
comment_ranges: &CommentRanges,
noqa_line_for: &NoqaMapping,
analyze_directives: bool,
per_file_ignores: &RuleSet,
settings: &LinterSettings,
) -> Vec<usize> {
// Identify any codes that are globally exempted (within the current file).
@@ -121,7 +119,7 @@ pub(crate) fn check_noqa(
let mut unknown_codes = vec![];
let mut unmatched_codes = vec![];
let mut valid_codes = vec![];
let mut self_ignore = per_file_ignores.contains(Rule::UnusedNOQA);
let mut self_ignore = false;
for code in directive.codes() {
let code = get_redirect_target(code).unwrap_or(code);
if Rule::UnusedNOQA.noqa_code() == code {

View File

@@ -1,5 +1,4 @@
//! Lint rules based on checking physical lines.
use ruff_diagnostics::Diagnostic;
use ruff_python_codegen::Stylist;
use ruff_python_index::Indexer;

View File

@@ -40,10 +40,6 @@ pub(crate) fn check_tokens(
pygrep_hooks::rules::blanket_type_ignore(&mut diagnostics, indexer, locator);
}
if settings.rules.enabled(Rule::EmptyComment) {
pylint::rules::empty_comments(&mut diagnostics, indexer, locator);
}
if settings.rules.any_enabled(&[
Rule::AmbiguousUnicodeCharacterString,
Rule::AmbiguousUnicodeCharacterDocstring,

View File

@@ -265,7 +265,6 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
(Pylint, "R1733") => (RuleGroup::Preview, rules::pylint::rules::UnnecessaryDictIndexLookup),
(Pylint, "R1736") => (RuleGroup::Preview, rules::pylint::rules::UnnecessaryListIndexLookup),
(Pylint, "R2004") => (RuleGroup::Stable, rules::pylint::rules::MagicValueComparison),
(Pylint, "R2044") => (RuleGroup::Preview, rules::pylint::rules::EmptyComment),
(Pylint, "R5501") => (RuleGroup::Stable, rules::pylint::rules::CollapsibleElseIf),
(Pylint, "R6201") => (RuleGroup::Preview, rules::pylint::rules::LiteralMembership),
#[allow(deprecated)]
@@ -275,7 +274,6 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
(Pylint, "W0127") => (RuleGroup::Stable, rules::pylint::rules::SelfAssigningVariable),
(Pylint, "W0129") => (RuleGroup::Stable, rules::pylint::rules::AssertOnStringLiteral),
(Pylint, "W0131") => (RuleGroup::Stable, rules::pylint::rules::NamedExprWithoutContext),
(Pylint, "W0245") => (RuleGroup::Preview, rules::pylint::rules::SuperWithoutBrackets),
(Pylint, "W0406") => (RuleGroup::Stable, rules::pylint::rules::ImportSelf),
(Pylint, "W0602") => (RuleGroup::Stable, rules::pylint::rules::GlobalVariableNotAssigned),
(Pylint, "W0604") => (RuleGroup::Preview, rules::pylint::rules::GlobalAtModuleLevel),
@@ -749,7 +747,6 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
(Flake8Pyi, "053") => (RuleGroup::Stable, rules::flake8_pyi::rules::StringOrBytesTooLong),
(Flake8Pyi, "055") => (RuleGroup::Stable, rules::flake8_pyi::rules::UnnecessaryTypeUnion),
(Flake8Pyi, "056") => (RuleGroup::Stable, rules::flake8_pyi::rules::UnsupportedMethodCallOnAll),
(Flake8Pyi, "058") => (RuleGroup::Preview, rules::flake8_pyi::rules::GeneratorReturnFromIterMethod),
// flake8-pytest-style
(Flake8PytestStyle, "001") => (RuleGroup::Stable, rules::flake8_pytest_style::rules::PytestFixtureIncorrectParenthesesStyle),
@@ -967,7 +964,6 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
(Refurb, "145") => (RuleGroup::Preview, rules::refurb::rules::SliceCopy),
(Refurb, "148") => (RuleGroup::Preview, rules::refurb::rules::UnnecessaryEnumerate),
(Refurb, "152") => (RuleGroup::Preview, rules::refurb::rules::MathConstant),
(Refurb, "161") => (RuleGroup::Preview, rules::refurb::rules::BitCount),
(Refurb, "163") => (RuleGroup::Preview, rules::refurb::rules::RedundantLogBase),
(Refurb, "168") => (RuleGroup::Preview, rules::refurb::rules::IsinstanceTypeNone),
(Refurb, "169") => (RuleGroup::Preview, rules::refurb::rules::TypeNoneComparison),

View File

@@ -397,13 +397,13 @@ mod tests {
#[test]
fn find_semicolon() -> Result<()> {
let contents = "x = 1";
let program = parse_suite(contents)?;
let program = parse_suite(contents, "<filename>")?;
let stmt = program.first().unwrap();
let locator = Locator::new(contents);
assert_eq!(trailing_semicolon(stmt.end(), &locator), None);
let contents = "x = 1; y = 1";
let program = parse_suite(contents)?;
let program = parse_suite(contents, "<filename>")?;
let stmt = program.first().unwrap();
let locator = Locator::new(contents);
assert_eq!(
@@ -412,7 +412,7 @@ mod tests {
);
let contents = "x = 1 ; y = 1";
let program = parse_suite(contents)?;
let program = parse_suite(contents, "<filename>")?;
let stmt = program.first().unwrap();
let locator = Locator::new(contents);
assert_eq!(
@@ -425,7 +425,7 @@ x = 1 \
; y = 1
"
.trim();
let program = parse_suite(contents)?;
let program = parse_suite(contents, "<filename>")?;
let stmt = program.first().unwrap();
let locator = Locator::new(contents);
assert_eq!(

View File

@@ -333,7 +333,7 @@ mod tests {
#[test]
fn start_of_file() -> Result<()> {
fn insert(contents: &str) -> Result<Insertion> {
let program = parse_suite(contents)?;
let program = parse_suite(contents, "<filename>")?;
let tokens: Vec<LexResult> = ruff_python_parser::tokenize(contents, Mode::Module);
let locator = Locator::new(contents);
let stylist = Stylist::from_tokens(&tokens, &locator);

View File

@@ -31,7 +31,7 @@ use crate::fix::{fix_file, FixResult};
use crate::logging::DisplayParseError;
use crate::message::Message;
use crate::noqa::add_noqa;
use crate::registry::{AsRule, Rule, RuleSet};
use crate::registry::{AsRule, Rule};
use crate::rules::pycodestyle;
use crate::settings::types::UnsafeFixes;
use crate::settings::{flags, LinterSettings};
@@ -145,7 +145,7 @@ pub fn check_path(
.any(|rule_code| rule_code.lint_source().is_imports());
if use_ast || use_imports || use_doc_lines {
// Parse, if the AST wasn't pre-provided provided.
match tokens.into_ast_source(source_kind, source_type) {
match tokens.into_ast_source(source_kind, source_type, path) {
Ok(python_ast) => {
let cell_offsets = source_kind.as_ipy_notebook().map(Notebook::cell_offsets);
if use_ast {
@@ -213,14 +213,12 @@ pub fn check_path(
}
// Ignore diagnostics based on per-file-ignores.
let per_file_ignores = if !diagnostics.is_empty() && !settings.per_file_ignores.is_empty() {
fs::ignores_from_path(path, &settings.per_file_ignores)
} else {
RuleSet::empty()
if !diagnostics.is_empty() && !settings.per_file_ignores.is_empty() {
let ignores = fs::ignores_from_path(path, &settings.per_file_ignores);
if !ignores.is_empty() {
diagnostics.retain(|diagnostic| !ignores.contains(diagnostic.kind.rule()));
}
};
if !per_file_ignores.is_empty() {
diagnostics.retain(|diagnostic| !per_file_ignores.contains(diagnostic.kind.rule()));
}
// Enforce `noqa` directives.
if (noqa.into() && !diagnostics.is_empty())
@@ -236,7 +234,6 @@ pub fn check_path(
indexer.comment_ranges(),
&directives.noqa_line_for,
error.is_none(),
&per_file_ignores,
settings,
);
if noqa.into() {
@@ -336,15 +333,10 @@ pub fn add_noqa_to_path(
);
// Log any parse errors.
if let Some(error) = error {
if let Some(err) = error {
error!(
"{}",
DisplayParseError::from_source_code(
error,
Some(path.to_path_buf()),
&locator.to_source_code(),
source_kind,
)
DisplayParseError::new(err, locator.to_source_code(), source_kind)
);
}
@@ -692,11 +684,13 @@ impl<'a> TokenSource<'a> {
self,
source_kind: &SourceKind,
source_type: PySourceType,
path: &Path,
) -> Result<AstSource<'a>, ParseError> {
match self {
Self::Tokens(tokens) => Ok(AstSource::Ast(ruff_python_parser::parse_program_tokens(
tokens,
source_kind.source_code(),
&path.to_string_lossy(),
source_type.is_ipynb(),
)?)),
Self::Precomputed { ast, .. } => Ok(AstSource::Precomputed(ast)),

View File

@@ -1,5 +1,5 @@
use std::fmt::{Display, Formatter, Write};
use std::path::{Path, PathBuf};
use std::path::Path;
use std::sync::Mutex;
use anyhow::Result;
@@ -9,7 +9,7 @@ use log::Level;
use once_cell::sync::Lazy;
use ruff_python_parser::{ParseError, ParseErrorType};
use ruff_source_file::{LineIndex, OneIndexed, SourceCode, SourceLocation};
use ruff_source_file::{OneIndexed, SourceCode, SourceLocation};
use crate::fs;
use crate::source_kind::SourceKind;
@@ -136,116 +136,70 @@ pub fn set_up_logging(level: &LogLevel) -> Result<()> {
Ok(())
}
/// A wrapper around [`ParseError`] to translate byte offsets to user-facing
/// source code locations (typically, line and column numbers).
#[derive(Debug)]
pub struct DisplayParseError {
pub struct DisplayParseError<'a> {
error: ParseError,
path: Option<PathBuf>,
location: ErrorLocation,
source_code: SourceCode<'a, 'a>,
source_kind: &'a SourceKind,
}
impl DisplayParseError {
/// Create a [`DisplayParseError`] from a [`ParseError`] and a [`SourceKind`].
pub fn from_source_kind(
impl<'a> DisplayParseError<'a> {
pub fn new(
error: ParseError,
path: Option<PathBuf>,
source_kind: &SourceKind,
source_code: SourceCode<'a, 'a>,
source_kind: &'a SourceKind,
) -> Self {
Self::from_source_code(
error,
path,
&SourceCode::new(
source_kind.source_code(),
&LineIndex::from_source_text(source_kind.source_code()),
),
source_kind,
)
}
/// Create a [`DisplayParseError`] from a [`ParseError`] and a [`SourceCode`].
pub fn from_source_code(
error: ParseError,
path: Option<PathBuf>,
source_code: &SourceCode,
source_kind: &SourceKind,
) -> Self {
// Translate the byte offset to a location in the originating source.
let location =
if let Some(jupyter_index) = source_kind.as_ipy_notebook().map(Notebook::index) {
let source_location = source_code.source_location(error.offset);
ErrorLocation::Cell(
jupyter_index
.cell(source_location.row)
.unwrap_or(OneIndexed::MIN),
SourceLocation {
row: jupyter_index
.cell_row(source_location.row)
.unwrap_or(OneIndexed::MIN),
column: source_location.column,
},
)
} else {
ErrorLocation::File(source_code.source_location(error.offset))
};
Self {
error,
path,
location,
source_code,
source_kind,
}
}
/// Return the path of the file in which the error occurred.
pub fn path(&self) -> Option<&Path> {
self.path.as_deref()
}
}
impl std::error::Error for DisplayParseError {}
impl Display for DisplayParseError<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(
f,
"{header} {path}{colon}",
header = "Failed to parse".bold(),
path = fs::relativize_path(Path::new(&self.error.source_path)).bold(),
colon = ":".cyan(),
)?;
impl Display for DisplayParseError {
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
if let Some(path) = self.path.as_ref() {
write!(
f,
"{header} {path}{colon}",
header = "Failed to parse".bold(),
path = fs::relativize_path(path).bold(),
colon = ":".cyan(),
)?;
} else {
write!(
f,
"{header}{colon}",
header = "Failed to parse".bold(),
colon = ":".cyan(),
)?;
}
match &self.location {
ErrorLocation::File(location) => {
let source_location = self.source_code.source_location(self.error.offset);
// If we're working on a Jupyter notebook, translate the positions
// with respect to the cell and row in the cell. This is the same
// format as the `TextEmitter`.
let error_location =
if let Some(jupyter_index) = self.source_kind.as_ipy_notebook().map(Notebook::index) {
write!(
f,
"{row}{colon}{column}{colon} {inner}",
row = location.row,
column = location.column,
"cell {cell}{colon}",
cell = jupyter_index
.cell(source_location.row)
.unwrap_or(OneIndexed::MIN),
colon = ":".cyan(),
inner = &DisplayParseErrorType(&self.error.error)
)
}
ErrorLocation::Cell(cell, location) => {
write!(
f,
"{cell}{colon}{row}{colon}{column}{colon} {inner}",
cell = cell,
row = location.row,
column = location.column,
colon = ":".cyan(),
inner = &DisplayParseErrorType(&self.error.error)
)
}
}
)?;
SourceLocation {
row: jupyter_index
.cell_row(source_location.row)
.unwrap_or(OneIndexed::MIN),
column: source_location.column,
}
} else {
source_location
};
write!(
f,
"{row}{colon}{column}{colon} {inner}",
row = error_location.row,
column = error_location.column,
colon = ":".cyan(),
inner = &DisplayParseErrorType(&self.error.error)
)
}
}
@@ -283,14 +237,6 @@ impl Display for DisplayParseErrorType<'_> {
}
}
#[derive(Debug)]
enum ErrorLocation {
/// The error occurred in a Python file.
File(SourceLocation),
/// The error occurred in a Jupyter cell.
Cell(OneIndexed, SourceLocation),
}
/// Truncates the display text before the first newline character to avoid line breaks.
struct TruncateAtNewline<'a>(&'a dyn Display);

View File

@@ -265,7 +265,6 @@ impl Rule {
| Rule::BlanketNOQA
| Rule::BlanketTypeIgnore
| Rule::CommentedOutCode
| Rule::EmptyComment
| Rule::ExtraneousParentheses
| Rule::InvalidCharacterBackspace
| Rule::InvalidCharacterEsc

View File

@@ -243,6 +243,17 @@ pub struct PreviewOptions {
pub require_explicit: bool,
}
impl PreviewOptions {
/// Return a copy with the same preview mode setting but require explicit disabled.
#[must_use]
pub fn without_require_explicit(&self) -> Self {
Self {
mode: self.mode,
require_explicit: false,
}
}
}
#[cfg(feature = "schemars")]
mod schema {
use itertools::Itertools;

View File

@@ -74,7 +74,7 @@ pub(crate) fn comment_contains_code(line: &str, task_tags: &[String]) -> bool {
}
// Finally, compile the source code.
parse_suite(line).is_ok()
parse_suite(line, "<filename>").is_ok()
}
#[cfg(test)]

View File

@@ -3,11 +3,10 @@ use rustc_hash::FxHashSet;
use ruff_diagnostics::Edit;
use ruff_python_ast::helpers::{
pep_604_union, typing_optional, typing_union, ReturnStatementVisitor,
pep_604_union, typing_optional, typing_union, ReturnStatementVisitor, Terminal,
};
use ruff_python_ast::visitor::Visitor;
use ruff_python_ast::{self as ast, Expr, ExprContext};
use ruff_python_semantic::analyze::terminal::Terminal;
use ruff_python_semantic::analyze::type_inference::{NumberLike, PythonType, ResolvedPythonType};
use ruff_python_semantic::analyze::visibility;
use ruff_python_semantic::{Definition, SemanticModel};
@@ -62,7 +61,7 @@ pub(crate) fn auto_return_type(function: &ast::StmtFunctionDef) -> Option<AutoPy
let terminal = Terminal::from_function(function);
// If every control flow path raises an exception, return `NoReturn`.
if terminal == Terminal::Raise {
if terminal == Some(Terminal::Raise) {
return Some(AutoPythonType::Never);
}
@@ -89,7 +88,7 @@ pub(crate) fn auto_return_type(function: &ast::StmtFunctionDef) -> Option<AutoPy
// if x > 0:
// return 1
// ```
if terminal == Terminal::ConditionalReturn || terminal == Terminal::None {
if terminal.is_none() {
return_type = return_type.union(ResolvedPythonType::Atom(PythonType::None));
}

View File

@@ -579,99 +579,4 @@ auto_return_type.py:210:5: ANN201 [*] Missing return type annotation for public
212 212 | raise ValueError
213 213 | else:
auto_return_type.py:234:5: ANN201 [*] Missing return type annotation for public function `func`
|
234 | def func(x: int):
| ^^^^ ANN201
235 | if not x:
236 | return 1
|
= help: Add return type annotation: `int`
Unsafe fix
231 231 | return i
232 232 |
233 233 |
234 |-def func(x: int):
234 |+def func(x: int) -> int:
235 235 | if not x:
236 236 | return 1
237 237 | raise ValueError
auto_return_type.py:240:5: ANN201 [*] Missing return type annotation for public function `func`
|
240 | def func(x: int):
| ^^^^ ANN201
241 | if not x:
242 | return 1
|
= help: Add return type annotation: `int`
Unsafe fix
237 237 | raise ValueError
238 238 |
239 239 |
240 |-def func(x: int):
240 |+def func(x: int) -> int:
241 241 | if not x:
242 242 | return 1
243 243 | else:
auto_return_type.py:248:5: ANN201 [*] Missing return type annotation for public function `func`
|
248 | def func():
| ^^^^ ANN201
249 | try:
250 | raise ValueError
|
= help: Add return type annotation: `int | None`
Unsafe fix
245 245 | raise ValueError
246 246 |
247 247 |
248 |-def func():
248 |+def func() -> int | None:
249 249 | try:
250 250 | raise ValueError
251 251 | except:
auto_return_type.py:255:5: ANN201 [*] Missing return type annotation for public function `func`
|
255 | def func():
| ^^^^ ANN201
256 | try:
257 | return 1
|
= help: Add return type annotation: `int | None`
Unsafe fix
252 252 | return 2
253 253 |
254 254 |
255 |-def func():
255 |+def func() -> int | None:
256 256 | try:
257 257 | return 1
258 258 | except:
auto_return_type.py:262:5: ANN201 [*] Missing return type annotation for public function `func`
|
262 | def func(x: int):
| ^^^^ ANN201
263 | for _ in range(3):
264 | if x > 0:
|
= help: Add return type annotation: `int`
Unsafe fix
259 259 | pass
260 260 |
261 261 |
262 |-def func(x: int):
262 |+def func(x: int) -> int:
263 263 | for _ in range(3):
264 264 | if x > 0:
265 265 | return 1

View File

@@ -642,117 +642,4 @@ auto_return_type.py:210:5: ANN201 [*] Missing return type annotation for public
212 212 | raise ValueError
213 213 | else:
auto_return_type.py:234:5: ANN201 [*] Missing return type annotation for public function `func`
|
234 | def func(x: int):
| ^^^^ ANN201
235 | if not x:
236 | return 1
|
= help: Add return type annotation: `int`
Unsafe fix
231 231 | return i
232 232 |
233 233 |
234 |-def func(x: int):
234 |+def func(x: int) -> int:
235 235 | if not x:
236 236 | return 1
237 237 | raise ValueError
auto_return_type.py:240:5: ANN201 [*] Missing return type annotation for public function `func`
|
240 | def func(x: int):
| ^^^^ ANN201
241 | if not x:
242 | return 1
|
= help: Add return type annotation: `int`
Unsafe fix
237 237 | raise ValueError
238 238 |
239 239 |
240 |-def func(x: int):
240 |+def func(x: int) -> int:
241 241 | if not x:
242 242 | return 1
243 243 | else:
auto_return_type.py:248:5: ANN201 [*] Missing return type annotation for public function `func`
|
248 | def func():
| ^^^^ ANN201
249 | try:
250 | raise ValueError
|
= help: Add return type annotation: `Optional[int]`
Unsafe fix
214 214 | return 1
215 215 |
216 216 |
217 |-from typing import overload
217 |+from typing import overload, Optional
218 218 |
219 219 |
220 220 | @overload
--------------------------------------------------------------------------------
245 245 | raise ValueError
246 246 |
247 247 |
248 |-def func():
248 |+def func() -> Optional[int]:
249 249 | try:
250 250 | raise ValueError
251 251 | except:
auto_return_type.py:255:5: ANN201 [*] Missing return type annotation for public function `func`
|
255 | def func():
| ^^^^ ANN201
256 | try:
257 | return 1
|
= help: Add return type annotation: `Optional[int]`
Unsafe fix
214 214 | return 1
215 215 |
216 216 |
217 |-from typing import overload
217 |+from typing import overload, Optional
218 218 |
219 219 |
220 220 | @overload
--------------------------------------------------------------------------------
252 252 | return 2
253 253 |
254 254 |
255 |-def func():
255 |+def func() -> Optional[int]:
256 256 | try:
257 257 | return 1
258 258 | except:
auto_return_type.py:262:5: ANN201 [*] Missing return type annotation for public function `func`
|
262 | def func(x: int):
| ^^^^ ANN201
263 | for _ in range(3):
264 | if x > 0:
|
= help: Add return type annotation: `int`
Unsafe fix
259 259 | pass
260 260 |
261 261 |
262 |-def func(x: int):
262 |+def func(x: int) -> int:
263 263 | for _ in range(3):
264 264 | if x > 0:
265 265 | return 1

View File

@@ -28,7 +28,7 @@ use crate::checkers::ast::Checker;
/// from paramiko import client
///
/// ssh_client = client.SSHClient()
/// ssh_client.set_missing_host_key_policy(client.RejectPolicy)
/// ssh_client.set_missing_host_key_policy()
/// ```
///
/// ## References

View File

@@ -70,11 +70,7 @@ pub(crate) fn unsafe_yaml_load(checker: &mut Checker, call: &ast::ExprCall) {
.semantic()
.resolve_call_path(loader_arg)
.is_some_and(|call_path| {
matches!(
call_path.as_slice(),
["yaml", "SafeLoader" | "CSafeLoader"]
| ["yaml", "loader", "SafeLoader" | "CSafeLoader"]
)
matches!(call_path.as_slice(), ["yaml", "SafeLoader" | "CSafeLoader"])
})
{
let loader = match loader_arg {

View File

@@ -191,11 +191,7 @@ fn match_annotation_to_complex_bool(annotation: &Expr, semantic: &SemanticModel)
}
// Ex) `typing.Union[bool, int]`
Expr::Subscript(ast::ExprSubscript { value, slice, .. }) => {
let call_path = semantic.resolve_call_path(value);
if call_path
.as_ref()
.is_some_and(|call_path| semantic.match_typing_call_path(call_path, "Union"))
{
if semantic.match_typing_expr(value, "Union") {
if let Expr::Tuple(ast::ExprTuple { elts, .. }) = slice.as_ref() {
elts.iter()
.any(|elt| match_annotation_to_complex_bool(elt, semantic))
@@ -203,10 +199,7 @@ fn match_annotation_to_complex_bool(annotation: &Expr, semantic: &SemanticModel)
// Union with a single type is an invalid type annotation
false
}
} else if call_path
.as_ref()
.is_some_and(|call_path| semantic.match_typing_call_path(call_path, "Optional"))
{
} else if semantic.match_typing_expr(value, "Optional") {
match_annotation_to_complex_bool(slice, semantic)
} else {
false

View File

@@ -53,7 +53,7 @@ impl Violation for UnnecessaryComprehensionAnyAll {
#[derive_message_formats]
fn message(&self) -> String {
format!("Unnecessary list comprehension")
format!("Unnecessary list comprehension.")
}
fn fix_title(&self) -> Option<String> {

View File

@@ -1,7 +1,7 @@
---
source: crates/ruff_linter/src/rules/flake8_comprehensions/mod.rs
---
C419.py:1:5: C419 [*] Unnecessary list comprehension
C419.py:1:5: C419 [*] Unnecessary list comprehension.
|
1 | any([x.id for x in bar])
| ^^^^^^^^^^^^^^^^^^^ C419
@@ -17,7 +17,7 @@ C419.py:1:5: C419 [*] Unnecessary list comprehension
3 3 | any( # first comment
4 4 | [x.id for x in bar], # second comment
C419.py:2:5: C419 [*] Unnecessary list comprehension
C419.py:2:5: C419 [*] Unnecessary list comprehension.
|
1 | any([x.id for x in bar])
2 | all([x.id for x in bar])
@@ -35,7 +35,7 @@ C419.py:2:5: C419 [*] Unnecessary list comprehension
4 4 | [x.id for x in bar], # second comment
5 5 | ) # third comment
C419.py:4:5: C419 [*] Unnecessary list comprehension
C419.py:4:5: C419 [*] Unnecessary list comprehension.
|
2 | all([x.id for x in bar])
3 | any( # first comment
@@ -56,7 +56,7 @@ C419.py:4:5: C419 [*] Unnecessary list comprehension
6 6 | all( # first comment
7 7 | [x.id for x in bar], # second comment
C419.py:7:5: C419 [*] Unnecessary list comprehension
C419.py:7:5: C419 [*] Unnecessary list comprehension.
|
5 | ) # third comment
6 | all( # first comment
@@ -77,7 +77,7 @@ C419.py:7:5: C419 [*] Unnecessary list comprehension
9 9 | any({x.id for x in bar})
10 10 |
C419.py:9:5: C419 [*] Unnecessary list comprehension
C419.py:9:5: C419 [*] Unnecessary list comprehension.
|
7 | [x.id for x in bar], # second comment
8 | ) # third comment
@@ -98,7 +98,7 @@ C419.py:9:5: C419 [*] Unnecessary list comprehension
11 11 | # OK
12 12 | all(x.id for x in bar)
C419.py:24:5: C419 [*] Unnecessary list comprehension
C419.py:24:5: C419 [*] Unnecessary list comprehension.
|
22 | # Special comment handling
23 | any(
@@ -133,7 +133,7 @@ C419.py:24:5: C419 [*] Unnecessary list comprehension
31 30 | )
32 31 |
C419.py:35:5: C419 [*] Unnecessary list comprehension
C419.py:35:5: C419 [*] Unnecessary list comprehension.
|
33 | # Weird case where the function call, opening bracket, and comment are all
34 | # on the same line.

View File

@@ -4,14 +4,14 @@ use ruff_python_semantic::{analyze, SemanticModel};
/// Return `true` if a Python class appears to be a Django model, based on its base classes.
pub(super) fn is_model(class_def: &ast::StmtClassDef, semantic: &SemanticModel) -> bool {
analyze::class::any_call_path(class_def, semantic, &|call_path| {
analyze::class::any_over_body(class_def, semantic, &|call_path| {
matches!(call_path.as_slice(), ["django", "db", "models", "Model"])
})
}
/// Return `true` if a Python class appears to be a Django model form, based on its base classes.
pub(super) fn is_model_form(class_def: &ast::StmtClassDef, semantic: &SemanticModel) -> bool {
analyze::class::any_call_path(class_def, semantic, &|call_path| {
analyze::class::any_over_body(class_def, semantic, &|call_path| {
matches!(
call_path.as_slice(),
["django", "forms", "ModelForm"] | ["django", "forms", "models", "ModelForm"]

View File

@@ -3,7 +3,7 @@ use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::helpers::is_const_true;
use ruff_python_ast::identifier::Identifier;
use ruff_python_ast::{self as ast, Expr, Stmt};
use ruff_python_semantic::{analyze, SemanticModel};
use ruff_python_semantic::SemanticModel;
use crate::checkers::ast::Checker;
@@ -55,11 +55,9 @@ pub(crate) fn model_without_dunder_str(checker: &mut Checker, class_def: &ast::S
if !is_non_abstract_model(class_def, checker.semantic()) {
return;
}
if has_dunder_method(class_def, checker.semantic()) {
if has_dunder_method(class_def) {
return;
}
checker.diagnostics.push(Diagnostic::new(
DjangoModelWithoutDunderStr,
class_def.identifier(),
@@ -67,12 +65,10 @@ pub(crate) fn model_without_dunder_str(checker: &mut Checker, class_def: &ast::S
}
/// Returns `true` if the class has `__str__` method.
fn has_dunder_method(class_def: &ast::StmtClassDef, semantic: &SemanticModel) -> bool {
analyze::class::any_super_class(class_def, semantic, &|class_def| {
class_def.body.iter().any(|val| match val {
Stmt::FunctionDef(ast::StmtFunctionDef { name, .. }) => name == "__str__",
_ => false,
})
fn has_dunder_method(class_def: &ast::StmtClassDef) -> bool {
class_def.body.iter().any(|val| match val {
Stmt::FunctionDef(ast::StmtFunctionDef { name, .. }) => name == "__str__",
_ => false,
})
}

View File

@@ -1,9 +1,8 @@
use ruff_python_ast::{self as ast, Expr, Stmt};
use ruff_python_ast::{self as ast, Arguments, Expr, Stmt};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::helpers::is_const_true;
use ruff_python_semantic::SemanticModel;
use ruff_text_size::Ranged;
use crate::checkers::ast::Checker;
@@ -49,7 +48,7 @@ impl Violation for DjangoNullableModelStringField {
#[derive_message_formats]
fn message(&self) -> String {
let DjangoNullableModelStringField { field_name } = self;
format!("Avoid using `null=True` on string-based fields such as `{field_name}`")
format!("Avoid using `null=True` on string-based fields such as {field_name}")
}
}
@@ -59,7 +58,7 @@ pub(crate) fn nullable_model_string_field(checker: &mut Checker, body: &[Stmt])
let Stmt::Assign(ast::StmtAssign { value, .. }) = statement else {
continue;
};
if let Some(field_name) = is_nullable_field(value, checker.semantic()) {
if let Some(field_name) = is_nullable_field(checker, value) {
checker.diagnostics.push(Diagnostic::new(
DjangoNullableModelStringField {
field_name: field_name.to_string(),
@@ -70,12 +69,22 @@ pub(crate) fn nullable_model_string_field(checker: &mut Checker, body: &[Stmt])
}
}
fn is_nullable_field<'a>(value: &'a Expr, semantic: &'a SemanticModel) -> Option<&'a str> {
let call = value.as_call_expr()?;
fn is_nullable_field<'a>(checker: &'a Checker, value: &'a Expr) -> Option<&'a str> {
let Expr::Call(ast::ExprCall {
func,
arguments: Arguments { keywords, .. },
..
}) = value
else {
return None;
};
let Some(valid_field_name) = helpers::get_model_field_name(func, checker.semantic()) else {
return None;
};
let field_name = helpers::get_model_field_name(&call.func, semantic)?;
if !matches!(
field_name,
valid_field_name,
"CharField" | "TextField" | "SlugField" | "EmailField" | "FilePathField" | "URLField"
) {
return None;
@@ -84,7 +93,7 @@ fn is_nullable_field<'a>(value: &'a Expr, semantic: &'a SemanticModel) -> Option
let mut null_key = false;
let mut blank_key = false;
let mut unique_key = false;
for keyword in &call.arguments.keywords {
for keyword in keywords {
let Some(argument) = &keyword.arg else {
continue;
};
@@ -104,6 +113,5 @@ fn is_nullable_field<'a>(value: &'a Expr, semantic: &'a SemanticModel) -> Option
if !null_key {
return None;
}
Some(field_name)
Some(valid_field_name)
}

View File

@@ -1,7 +1,7 @@
---
source: crates/ruff_linter/src/rules/flake8_django/mod.rs
---
DJ001.py:7:17: DJ001 Avoid using `null=True` on string-based fields such as `CharField`
DJ001.py:7:17: DJ001 Avoid using `null=True` on string-based fields such as CharField
|
6 | class IncorrectModel(models.Model):
7 | charfield = models.CharField(max_length=255, null=True)
@@ -10,7 +10,7 @@ DJ001.py:7:17: DJ001 Avoid using `null=True` on string-based fields such as `Cha
9 | slugfield = models.SlugField(max_length=255, null=True)
|
DJ001.py:8:17: DJ001 Avoid using `null=True` on string-based fields such as `TextField`
DJ001.py:8:17: DJ001 Avoid using `null=True` on string-based fields such as TextField
|
6 | class IncorrectModel(models.Model):
7 | charfield = models.CharField(max_length=255, null=True)
@@ -20,7 +20,7 @@ DJ001.py:8:17: DJ001 Avoid using `null=True` on string-based fields such as `Tex
10 | emailfield = models.EmailField(max_length=255, null=True)
|
DJ001.py:9:17: DJ001 Avoid using `null=True` on string-based fields such as `SlugField`
DJ001.py:9:17: DJ001 Avoid using `null=True` on string-based fields such as SlugField
|
7 | charfield = models.CharField(max_length=255, null=True)
8 | textfield = models.TextField(max_length=255, null=True)
@@ -30,7 +30,7 @@ DJ001.py:9:17: DJ001 Avoid using `null=True` on string-based fields such as `Slu
11 | filepathfield = models.FilePathField(max_length=255, null=True)
|
DJ001.py:10:18: DJ001 Avoid using `null=True` on string-based fields such as `EmailField`
DJ001.py:10:18: DJ001 Avoid using `null=True` on string-based fields such as EmailField
|
8 | textfield = models.TextField(max_length=255, null=True)
9 | slugfield = models.SlugField(max_length=255, null=True)
@@ -40,7 +40,7 @@ DJ001.py:10:18: DJ001 Avoid using `null=True` on string-based fields such as `Em
12 | urlfield = models.URLField(max_length=255, null=True)
|
DJ001.py:11:21: DJ001 Avoid using `null=True` on string-based fields such as `FilePathField`
DJ001.py:11:21: DJ001 Avoid using `null=True` on string-based fields such as FilePathField
|
9 | slugfield = models.SlugField(max_length=255, null=True)
10 | emailfield = models.EmailField(max_length=255, null=True)
@@ -49,7 +49,7 @@ DJ001.py:11:21: DJ001 Avoid using `null=True` on string-based fields such as `Fi
12 | urlfield = models.URLField(max_length=255, null=True)
|
DJ001.py:12:16: DJ001 Avoid using `null=True` on string-based fields such as `URLField`
DJ001.py:12:16: DJ001 Avoid using `null=True` on string-based fields such as URLField
|
10 | emailfield = models.EmailField(max_length=255, null=True)
11 | filepathfield = models.FilePathField(max_length=255, null=True)
@@ -57,7 +57,7 @@ DJ001.py:12:16: DJ001 Avoid using `null=True` on string-based fields such as `UR
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ DJ001
|
DJ001.py:16:17: DJ001 Avoid using `null=True` on string-based fields such as `CharField`
DJ001.py:16:17: DJ001 Avoid using `null=True` on string-based fields such as CharField
|
15 | class IncorrectModelWithAlias(DjangoModel):
16 | charfield = DjangoModel.CharField(max_length=255, null=True)
@@ -66,7 +66,7 @@ DJ001.py:16:17: DJ001 Avoid using `null=True` on string-based fields such as `Ch
18 | slugfield = models.SlugField(max_length=255, null=True)
|
DJ001.py:17:17: DJ001 Avoid using `null=True` on string-based fields such as `CharField`
DJ001.py:17:17: DJ001 Avoid using `null=True` on string-based fields such as CharField
|
15 | class IncorrectModelWithAlias(DjangoModel):
16 | charfield = DjangoModel.CharField(max_length=255, null=True)
@@ -76,7 +76,7 @@ DJ001.py:17:17: DJ001 Avoid using `null=True` on string-based fields such as `Ch
19 | emailfield = models.EmailField(max_length=255, null=True)
|
DJ001.py:18:17: DJ001 Avoid using `null=True` on string-based fields such as `SlugField`
DJ001.py:18:17: DJ001 Avoid using `null=True` on string-based fields such as SlugField
|
16 | charfield = DjangoModel.CharField(max_length=255, null=True)
17 | textfield = SmthCharField(max_length=255, null=True)
@@ -86,7 +86,7 @@ DJ001.py:18:17: DJ001 Avoid using `null=True` on string-based fields such as `Sl
20 | filepathfield = models.FilePathField(max_length=255, null=True)
|
DJ001.py:19:18: DJ001 Avoid using `null=True` on string-based fields such as `EmailField`
DJ001.py:19:18: DJ001 Avoid using `null=True` on string-based fields such as EmailField
|
17 | textfield = SmthCharField(max_length=255, null=True)
18 | slugfield = models.SlugField(max_length=255, null=True)
@@ -96,7 +96,7 @@ DJ001.py:19:18: DJ001 Avoid using `null=True` on string-based fields such as `Em
21 | urlfield = models.URLField(max_length=255, null=True)
|
DJ001.py:20:21: DJ001 Avoid using `null=True` on string-based fields such as `FilePathField`
DJ001.py:20:21: DJ001 Avoid using `null=True` on string-based fields such as FilePathField
|
18 | slugfield = models.SlugField(max_length=255, null=True)
19 | emailfield = models.EmailField(max_length=255, null=True)
@@ -105,7 +105,7 @@ DJ001.py:20:21: DJ001 Avoid using `null=True` on string-based fields such as `Fi
21 | urlfield = models.URLField(max_length=255, null=True)
|
DJ001.py:21:16: DJ001 Avoid using `null=True` on string-based fields such as `URLField`
DJ001.py:21:16: DJ001 Avoid using `null=True` on string-based fields such as URLField
|
19 | emailfield = models.EmailField(max_length=255, null=True)
20 | filepathfield = models.FilePathField(max_length=255, null=True)
@@ -113,7 +113,7 @@ DJ001.py:21:16: DJ001 Avoid using `null=True` on string-based fields such as `UR
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ DJ001
|
DJ001.py:25:17: DJ001 Avoid using `null=True` on string-based fields such as `CharField`
DJ001.py:25:17: DJ001 Avoid using `null=True` on string-based fields such as CharField
|
24 | class IncorrectModelWithoutSuperclass:
25 | charfield = DjangoModel.CharField(max_length=255, null=True)
@@ -122,7 +122,7 @@ DJ001.py:25:17: DJ001 Avoid using `null=True` on string-based fields such as `Ch
27 | slugfield = models.SlugField(max_length=255, null=True)
|
DJ001.py:26:17: DJ001 Avoid using `null=True` on string-based fields such as `CharField`
DJ001.py:26:17: DJ001 Avoid using `null=True` on string-based fields such as CharField
|
24 | class IncorrectModelWithoutSuperclass:
25 | charfield = DjangoModel.CharField(max_length=255, null=True)
@@ -132,7 +132,7 @@ DJ001.py:26:17: DJ001 Avoid using `null=True` on string-based fields such as `Ch
28 | emailfield = models.EmailField(max_length=255, null=True)
|
DJ001.py:27:17: DJ001 Avoid using `null=True` on string-based fields such as `SlugField`
DJ001.py:27:17: DJ001 Avoid using `null=True` on string-based fields such as SlugField
|
25 | charfield = DjangoModel.CharField(max_length=255, null=True)
26 | textfield = SmthCharField(max_length=255, null=True)
@@ -142,7 +142,7 @@ DJ001.py:27:17: DJ001 Avoid using `null=True` on string-based fields such as `Sl
29 | filepathfield = models.FilePathField(max_length=255, null=True)
|
DJ001.py:28:18: DJ001 Avoid using `null=True` on string-based fields such as `EmailField`
DJ001.py:28:18: DJ001 Avoid using `null=True` on string-based fields such as EmailField
|
26 | textfield = SmthCharField(max_length=255, null=True)
27 | slugfield = models.SlugField(max_length=255, null=True)
@@ -152,7 +152,7 @@ DJ001.py:28:18: DJ001 Avoid using `null=True` on string-based fields such as `Em
30 | urlfield = models.URLField(max_length=255, null=True)
|
DJ001.py:29:21: DJ001 Avoid using `null=True` on string-based fields such as `FilePathField`
DJ001.py:29:21: DJ001 Avoid using `null=True` on string-based fields such as FilePathField
|
27 | slugfield = models.SlugField(max_length=255, null=True)
28 | emailfield = models.EmailField(max_length=255, null=True)
@@ -161,7 +161,7 @@ DJ001.py:29:21: DJ001 Avoid using `null=True` on string-based fields such as `Fi
30 | urlfield = models.URLField(max_length=255, null=True)
|
DJ001.py:30:16: DJ001 Avoid using `null=True` on string-based fields such as `URLField`
DJ001.py:30:16: DJ001 Avoid using `null=True` on string-based fields such as URLField
|
28 | emailfield = models.EmailField(max_length=255, null=True)
29 | filepathfield = models.FilePathField(max_length=255, null=True)

View File

@@ -23,12 +23,4 @@ DJ008.py:36:7: DJ008 Model does not define `__str__` method
37 | new_field = models.CharField(max_length=10)
|
DJ008.py:182:7: DJ008 Model does not define `__str__` method
|
181 | # Subclass without __str__
182 | class SubclassTestModel3(TestModel1):
| ^^^^^^^^^^^^^^^^^^ DJ008
183 | pass
|

View File

@@ -3,6 +3,7 @@
use std::path::Path;
use ruff_text_size::{Ranged, TextRange};
use wsl;
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -44,7 +45,7 @@ impl Violation for ShebangMissingExecutableFile {
pub(crate) fn shebang_missing_executable_file(filepath: &Path) -> Option<Diagnostic> {
// WSL supports Windows file systems, which do not have executable bits.
// Instead, everything is executable. Therefore, we skip this rule on WSL.
if is_wsl::is_wsl() {
if wsl::is_wsl() {
return None;
}
if let Ok(true) = is_executable(filepath) {

View File

@@ -3,6 +3,7 @@
use std::path::Path;
use ruff_text_size::{Ranged, TextRange};
use wsl;
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -44,7 +45,7 @@ impl Violation for ShebangNotExecutable {
pub(crate) fn shebang_not_executable(filepath: &Path, range: TextRange) -> Option<Diagnostic> {
// WSL supports Windows file systems, which do not have executable bits.
// Instead, everything is executable. Therefore, we skip this rule on WSL.
if is_wsl::is_wsl() {
if wsl::is_wsl() {
return None;
}

View File

@@ -39,8 +39,6 @@ mod tests {
#[test_case(Rule::EllipsisInNonEmptyClassBody, Path::new("PYI013.pyi"))]
#[test_case(Rule::FutureAnnotationsInStub, Path::new("PYI044.py"))]
#[test_case(Rule::FutureAnnotationsInStub, Path::new("PYI044.pyi"))]
#[test_case(Rule::GeneratorReturnFromIterMethod, Path::new("PYI058.py"))]
#[test_case(Rule::GeneratorReturnFromIterMethod, Path::new("PYI058.pyi"))]
#[test_case(Rule::IterMethodReturnIterable, Path::new("PYI045.py"))]
#[test_case(Rule::IterMethodReturnIterable, Path::new("PYI045.pyi"))]
#[test_case(Rule::NoReturnArgumentAnnotationInStub, Path::new("PYI050.py"))]

View File

@@ -1,191 +0,0 @@
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast as ast;
use ruff_python_ast::helpers::map_subscript;
use ruff_python_ast::identifier::Identifier;
use ruff_python_semantic::SemanticModel;
use crate::checkers::ast::Checker;
/// ## What it does
/// Checks for simple `__iter__` methods that return `Generator`, and for
/// simple `__aiter__` methods that return `AsyncGenerator`.
///
/// ## Why is this bad?
/// Using `(Async)Iterator` for these methods is simpler and more elegant. More
/// importantly, it also reflects the fact that the precise kind of iterator
/// returned from an `__iter__` method is usually an implementation detail that
/// could change at any time. Type annotations help define a contract for a
/// function; implementation details should not leak into that contract.
///
/// For example:
/// ```python
/// from collections.abc import AsyncGenerator, Generator
/// from typing import Any
///
///
/// class CustomIterator:
/// def __iter__(self) -> Generator:
/// yield from range(42)
///
///
/// class CustomIterator2:
/// def __iter__(self) -> Generator[str, Any, None]:
/// yield from "abcdefg"
/// ```
///
/// Use instead:
/// ```python
/// from collections.abc import Iterator
///
///
/// class CustomIterator:
/// def __iter__(self) -> Iterator:
/// yield from range(42)
///
///
/// class CustomIterator2:
/// def __iter__(self) -> Iterator[str]:
/// yield from "abdefg"
/// ```
#[violation]
pub struct GeneratorReturnFromIterMethod {
better_return_type: String,
method_name: String,
}
impl Violation for GeneratorReturnFromIterMethod {
#[derive_message_formats]
fn message(&self) -> String {
let GeneratorReturnFromIterMethod {
better_return_type,
method_name,
} = self;
format!("Use `{better_return_type}` as the return value for simple `{method_name}` methods")
}
}
/// PYI058
pub(crate) fn bad_generator_return_type(
function_def: &ast::StmtFunctionDef,
checker: &mut Checker,
) {
if function_def.is_async {
return;
}
let name = function_def.name.as_str();
let better_return_type = match name {
"__iter__" => "Iterator",
"__aiter__" => "AsyncIterator",
_ => return,
};
let semantic = checker.semantic();
if !semantic.current_scope().kind.is_class() {
return;
}
let parameters = &function_def.parameters;
if !parameters.kwonlyargs.is_empty()
|| parameters.kwarg.is_some()
|| parameters.vararg.is_some()
{
return;
}
if (parameters.args.len() + parameters.posonlyargs.len()) != 1 {
return;
}
let returns = match &function_def.returns {
Some(returns) => returns.as_ref(),
_ => return,
};
if !semantic
.resolve_call_path(map_subscript(returns))
.is_some_and(|call_path| {
matches!(
(name, call_path.as_slice()),
(
"__iter__",
["typing" | "typing_extensions", "Generator"]
| ["collections", "abc", "Generator"]
) | (
"__aiter__",
["typing" | "typing_extensions", "AsyncGenerator"]
| ["collections", "abc", "AsyncGenerator"]
)
)
})
{
return;
};
// `Generator` allows three type parameters; `AsyncGenerator` allows two.
// If type parameters are present,
// Check that all parameters except the first one are either `typing.Any` or `None`;
// if not, don't emit the diagnostic
if let ast::Expr::Subscript(ast::ExprSubscript { slice, .. }) = returns {
let ast::Expr::Tuple(ast::ExprTuple { elts, .. }) = slice.as_ref() else {
return;
};
if matches!(
(name, &elts[..]),
("__iter__", [_, _, _]) | ("__aiter__", [_, _])
) {
if !&elts.iter().skip(1).all(|elt| is_any_or_none(elt, semantic)) {
return;
}
} else {
return;
}
};
// For .py files (runtime Python!),
// only emit the lint if it's a simple __(a)iter__ implementation
// -- for more complex function bodies,
// it's more likely we'll be emitting a false positive here
if !checker.source_type.is_stub() {
let mut yield_encountered = false;
for stmt in &function_def.body {
match stmt {
ast::Stmt::Pass(_) => continue,
ast::Stmt::Return(ast::StmtReturn { value, .. }) => {
if let Some(ret_val) = value {
if yield_encountered
&& !matches!(ret_val.as_ref(), ast::Expr::NoneLiteral(_))
{
return;
}
}
}
ast::Stmt::Expr(ast::StmtExpr { value, .. }) => match value.as_ref() {
ast::Expr::StringLiteral(_) | ast::Expr::EllipsisLiteral(_) => continue,
ast::Expr::Yield(_) | ast::Expr::YieldFrom(_) => {
yield_encountered = true;
continue;
}
_ => return,
},
_ => return,
}
}
};
checker.diagnostics.push(Diagnostic::new(
GeneratorReturnFromIterMethod {
better_return_type: better_return_type.to_string(),
method_name: name.to_string(),
},
function_def.identifier(),
));
}
fn is_any_or_none(expr: &ast::Expr, semantic: &SemanticModel) -> bool {
semantic.match_typing_expr(expr, "Any") || matches!(expr, ast::Expr::NoneLiteral(_))
}

View File

@@ -244,12 +244,7 @@ fn non_none_annotation_element<'a>(
) -> Option<&'a Expr> {
// E.g., `typing.Union` or `typing.Optional`
if let Expr::Subscript(ExprSubscript { value, slice, .. }) = annotation {
let call_path = semantic.resolve_call_path(value);
if call_path
.as_ref()
.is_some_and(|value| semantic.match_typing_call_path(value, "Optional"))
{
if semantic.match_typing_expr(value, "Optional") {
return if slice.is_none_literal_expr() {
None
} else {
@@ -257,10 +252,7 @@ fn non_none_annotation_element<'a>(
};
}
if !call_path
.as_ref()
.is_some_and(|value| semantic.match_typing_call_path(value, "Union"))
{
if !semantic.match_typing_expr(value, "Union") {
return None;
}

View File

@@ -14,7 +14,7 @@ use crate::checkers::ast::Checker;
/// the `from __future__ import annotations` import statement has no effect
/// and should be omitted.
///
/// ## References
/// ## Resources
/// - [Static Typing with Python: Type Stubs](https://typing.readthedocs.io/en/latest/source/stubs.html)
#[violation]
pub struct FutureAnnotationsInStub;

View File

@@ -1,5 +1,4 @@
pub(crate) use any_eq_ne_annotation::*;
pub(crate) use bad_generator_return_type::*;
pub(crate) use bad_version_info_comparison::*;
pub(crate) use collections_named_tuple::*;
pub(crate) use complex_assignment_in_stub::*;
@@ -37,7 +36,6 @@ pub(crate) use unsupported_method_call_on_all::*;
pub(crate) use unused_private_type_definition::*;
mod any_eq_ne_annotation;
mod bad_generator_return_type;
mod bad_version_info_comparison;
mod collections_named_tuple;
mod complex_assignment_in_stub;

View File

@@ -8,15 +8,6 @@ PYI050.py:13:24: PYI050 Prefer `typing.Never` over `NoReturn` for argument annot
14 | ...
|
PYI050.py:18:10: PYI050 Prefer `typing.Never` over `NoReturn` for argument annotations
|
17 | def foo_no_return_typing_extensions(
18 | arg: typing_extensions.NoReturn,
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ PYI050
19 | ):
20 | ...
|
PYI050.py:23:44: PYI050 Prefer `typing.Never` over `NoReturn` for argument annotations
|
23 | def foo_no_return_kwarg(arg: int, *, arg2: NoReturn):

View File

@@ -11,16 +11,6 @@ PYI050.pyi:6:24: PYI050 Prefer `typing.Never` over `NoReturn` for argument annot
8 | arg: typing_extensions.NoReturn,
|
PYI050.pyi:8:10: PYI050 Prefer `typing.Never` over `NoReturn` for argument annotations
|
6 | def foo_no_return(arg: NoReturn): ... # Error: PYI050
7 | def foo_no_return_typing_extensions(
8 | arg: typing_extensions.NoReturn,
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ PYI050
9 | ): ... # Error: PYI050
10 | def foo_no_return_kwarg(arg: int, *, arg2: NoReturn): ... # Error: PYI050
|
PYI050.pyi:10:44: PYI050 Prefer `typing.Never` over `NoReturn` for argument annotations
|
8 | arg: typing_extensions.NoReturn,

View File

@@ -1,57 +0,0 @@
---
source: crates/ruff_linter/src/rules/flake8_pyi/mod.rs
---
PYI058.py:7:9: PYI058 Use `Iterator` as the return value for simple `__iter__` methods
|
6 | class IteratorReturningSimpleGenerator1:
7 | def __iter__(self) -> Generator: # PYI058 (use `Iterator`)
| ^^^^^^^^ PYI058
8 | return (x for x in range(42))
|
PYI058.py:11:9: PYI058 Use `Iterator` as the return value for simple `__iter__` methods
|
10 | class IteratorReturningSimpleGenerator2:
11 | def __iter__(self, /) -> collections.abc.Generator[str, Any, None]: # PYI058 (use `Iterator`)
| ^^^^^^^^ PYI058
12 | """Fully documented, because I'm a runtime function!"""
13 | yield from "abcdefg"
|
PYI058.py:17:9: PYI058 Use `Iterator` as the return value for simple `__iter__` methods
|
16 | class IteratorReturningSimpleGenerator3:
17 | def __iter__(self, /) -> collections.abc.Generator[str, None, typing.Any]: # PYI058 (use `Iterator`)
| ^^^^^^^^ PYI058
18 | yield "a"
19 | yield "b"
|
PYI058.py:24:9: PYI058 Use `AsyncIterator` as the return value for simple `__aiter__` methods
|
23 | class AsyncIteratorReturningSimpleAsyncGenerator1:
24 | def __aiter__(self) -> typing.AsyncGenerator: pass # PYI058 (Use `AsyncIterator`)
| ^^^^^^^^^ PYI058
25 |
26 | class AsyncIteratorReturningSimpleAsyncGenerator2:
|
PYI058.py:27:9: PYI058 Use `AsyncIterator` as the return value for simple `__aiter__` methods
|
26 | class AsyncIteratorReturningSimpleAsyncGenerator2:
27 | def __aiter__(self, /) -> collections.abc.AsyncGenerator[str, Any]: ... # PYI058 (Use `AsyncIterator`)
| ^^^^^^^^^ PYI058
28 |
29 | class AsyncIteratorReturningSimpleAsyncGenerator3:
|
PYI058.py:30:9: PYI058 Use `AsyncIterator` as the return value for simple `__aiter__` methods
|
29 | class AsyncIteratorReturningSimpleAsyncGenerator3:
30 | def __aiter__(self, /) -> collections.abc.AsyncGenerator[str, None]: pass # PYI058 (Use `AsyncIterator`)
| ^^^^^^^^^ PYI058
31 |
32 | class CorrectIterator:
|

View File

@@ -1,58 +0,0 @@
---
source: crates/ruff_linter/src/rules/flake8_pyi/mod.rs
---
PYI058.pyi:7:9: PYI058 Use `Iterator` as the return value for simple `__iter__` methods
|
6 | class IteratorReturningSimpleGenerator1:
7 | def __iter__(self) -> Generator: ... # PYI058 (use `Iterator`)
| ^^^^^^^^ PYI058
8 |
9 | class IteratorReturningSimpleGenerator2:
|
PYI058.pyi:10:9: PYI058 Use `Iterator` as the return value for simple `__iter__` methods
|
9 | class IteratorReturningSimpleGenerator2:
10 | def __iter__(self, /) -> collections.abc.Generator[str, Any, None]: ... # PYI058 (use `Iterator`)
| ^^^^^^^^ PYI058
11 |
12 | class IteratorReturningSimpleGenerator3:
|
PYI058.pyi:13:9: PYI058 Use `Iterator` as the return value for simple `__iter__` methods
|
12 | class IteratorReturningSimpleGenerator3:
13 | def __iter__(self, /) -> collections.abc.Generator[str, None, typing.Any]: ... # PYI058 (use `Iterator`)
| ^^^^^^^^ PYI058
14 |
15 | class AsyncIteratorReturningSimpleAsyncGenerator1:
|
PYI058.pyi:16:9: PYI058 Use `AsyncIterator` as the return value for simple `__aiter__` methods
|
15 | class AsyncIteratorReturningSimpleAsyncGenerator1:
16 | def __aiter__(self) -> typing.AsyncGenerator: ... # PYI058 (Use `AsyncIterator`)
| ^^^^^^^^^ PYI058
17 |
18 | class AsyncIteratorReturningSimpleAsyncGenerator2:
|
PYI058.pyi:19:9: PYI058 Use `AsyncIterator` as the return value for simple `__aiter__` methods
|
18 | class AsyncIteratorReturningSimpleAsyncGenerator2:
19 | def __aiter__(self, /) -> collections.abc.AsyncGenerator[str, Any]: ... # PYI058 (Use `AsyncIterator`)
| ^^^^^^^^^ PYI058
20 |
21 | class AsyncIteratorReturningSimpleAsyncGenerator3:
|
PYI058.pyi:22:9: PYI058 Use `AsyncIterator` as the return value for simple `__aiter__` methods
|
21 | class AsyncIteratorReturningSimpleAsyncGenerator3:
22 | def __aiter__(self, /) -> collections.abc.AsyncGenerator[str, None]: ... # PYI058 (Use `AsyncIterator`)
| ^^^^^^^^^ PYI058
23 |
24 | class CorrectIterator:
|

View File

@@ -322,7 +322,7 @@ SIM117.py:126:1: SIM117 [*] Use a single `with` statement with multiple contexts
SIM117.py:163:1: SIM117 [*] Use a single `with` statement with multiple contexts instead of nested `with` statements
|
162 | # Do not suppress combination, if a context manager is already combined with another.
162 | # Do not supress combination, if a context manager is already combined with another.
163 | / async with asyncio.timeout(1), A():
164 | | async with B():
| |___________________^ SIM117
@@ -333,7 +333,7 @@ SIM117.py:163:1: SIM117 [*] Use a single `with` statement with multiple contexts
Unsafe fix
160 160 | pass
161 161 |
162 162 | # Do not suppress combination, if a context manager is already combined with another.
162 162 | # Do not supress combination, if a context manager is already combined with another.
163 |-async with asyncio.timeout(1), A():
164 |- async with B():
165 |- pass

View File

@@ -56,10 +56,8 @@ impl Violation for RelativeImports {
#[derive_message_formats]
fn message(&self) -> String {
match self.strictness {
Strictness::Parents => {
format!("Prefer absolute imports over relative imports from parent modules")
}
Strictness::All => format!("Prefer absolute imports over relative imports"),
Strictness::Parents => format!("Relative imports from parent modules are banned"),
Strictness::All => format!("Relative imports are banned"),
}
}

View File

@@ -1,7 +1,7 @@
---
source: crates/ruff_linter/src/rules/flake8_tidy_imports/mod.rs
---
TID252.py:7:1: TID252 Prefer absolute imports over relative imports
TID252.py:7:1: TID252 Relative imports are banned
|
6 | # TID252
7 | from . import sibling
@@ -11,7 +11,7 @@ TID252.py:7:1: TID252 Prefer absolute imports over relative imports
|
= help: Replace relative imports with absolute imports
TID252.py:8:1: TID252 Prefer absolute imports over relative imports
TID252.py:8:1: TID252 Relative imports are banned
|
6 | # TID252
7 | from . import sibling
@@ -22,7 +22,7 @@ TID252.py:8:1: TID252 Prefer absolute imports over relative imports
|
= help: Replace relative imports with absolute imports
TID252.py:9:1: TID252 Prefer absolute imports over relative imports
TID252.py:9:1: TID252 Relative imports are banned
|
7 | from . import sibling
8 | from .sibling import example
@@ -33,7 +33,7 @@ TID252.py:9:1: TID252 Prefer absolute imports over relative imports
|
= help: Replace relative imports with absolute imports
TID252.py:10:1: TID252 Prefer absolute imports over relative imports
TID252.py:10:1: TID252 Relative imports are banned
|
8 | from .sibling import example
9 | from .. import parent
@@ -44,7 +44,7 @@ TID252.py:10:1: TID252 Prefer absolute imports over relative imports
|
= help: Replace relative imports with absolute imports
TID252.py:11:1: TID252 Prefer absolute imports over relative imports
TID252.py:11:1: TID252 Relative imports are banned
|
9 | from .. import parent
10 | from ..parent import example
@@ -55,7 +55,7 @@ TID252.py:11:1: TID252 Prefer absolute imports over relative imports
|
= help: Replace relative imports with absolute imports
TID252.py:12:1: TID252 Prefer absolute imports over relative imports
TID252.py:12:1: TID252 Relative imports are banned
|
10 | from ..parent import example
11 | from ... import grandparent
@@ -66,7 +66,7 @@ TID252.py:12:1: TID252 Prefer absolute imports over relative imports
|
= help: Replace relative imports with absolute imports
TID252.py:13:1: TID252 Prefer absolute imports over relative imports
TID252.py:13:1: TID252 Relative imports are banned
|
11 | from ... import grandparent
12 | from ...grandparent import example
@@ -77,7 +77,7 @@ TID252.py:13:1: TID252 Prefer absolute imports over relative imports
|
= help: Replace relative imports with absolute imports
TID252.py:14:1: TID252 Prefer absolute imports over relative imports
TID252.py:14:1: TID252 Relative imports are banned
|
12 | from ...grandparent import example
13 | from .parent import hello
@@ -90,7 +90,7 @@ TID252.py:14:1: TID252 Prefer absolute imports over relative imports
|
= help: Replace relative imports with absolute imports
TID252.py:17:1: TID252 Prefer absolute imports over relative imports
TID252.py:17:1: TID252 Relative imports are banned
|
15 | parent import \
16 | hello_world
@@ -104,7 +104,7 @@ TID252.py:17:1: TID252 Prefer absolute imports over relative imports
|
= help: Replace relative imports with absolute imports
TID252.py:21:1: TID252 Prefer absolute imports over relative imports
TID252.py:21:1: TID252 Relative imports are banned
|
19 | import \
20 | world_hello
@@ -115,7 +115,7 @@ TID252.py:21:1: TID252 Prefer absolute imports over relative imports
|
= help: Replace relative imports with absolute imports
TID252.py:22:1: TID252 Prefer absolute imports over relative imports
TID252.py:22:1: TID252 Relative imports are banned
|
20 | world_hello
21 | from ..... import ultragrantparent
@@ -126,7 +126,7 @@ TID252.py:22:1: TID252 Prefer absolute imports over relative imports
|
= help: Replace relative imports with absolute imports
TID252.py:23:1: TID252 Prefer absolute imports over relative imports
TID252.py:23:1: TID252 Relative imports are banned
|
21 | from ..... import ultragrantparent
22 | from ...... import ultragrantparent
@@ -137,7 +137,7 @@ TID252.py:23:1: TID252 Prefer absolute imports over relative imports
|
= help: Replace relative imports with absolute imports
TID252.py:24:1: TID252 Prefer absolute imports over relative imports
TID252.py:24:1: TID252 Relative imports are banned
|
22 | from ...... import ultragrantparent
23 | from ....... import ultragrantparent
@@ -148,7 +148,7 @@ TID252.py:24:1: TID252 Prefer absolute imports over relative imports
|
= help: Replace relative imports with absolute imports
TID252.py:25:1: TID252 Prefer absolute imports over relative imports
TID252.py:25:1: TID252 Relative imports are banned
|
23 | from ....... import ultragrantparent
24 | from ......... import ultragrantparent
@@ -159,7 +159,7 @@ TID252.py:25:1: TID252 Prefer absolute imports over relative imports
|
= help: Replace relative imports with absolute imports
TID252.py:26:1: TID252 Prefer absolute imports over relative imports
TID252.py:26:1: TID252 Relative imports are banned
|
24 | from ......... import ultragrantparent
25 | from ........................... import ultragrantparent
@@ -170,7 +170,7 @@ TID252.py:26:1: TID252 Prefer absolute imports over relative imports
|
= help: Replace relative imports with absolute imports
TID252.py:27:1: TID252 Prefer absolute imports over relative imports
TID252.py:27:1: TID252 Relative imports are banned
|
25 | from ........................... import ultragrantparent
26 | from .....parent import ultragrantparent
@@ -180,7 +180,7 @@ TID252.py:27:1: TID252 Prefer absolute imports over relative imports
|
= help: Replace relative imports with absolute imports
TID252.py:28:1: TID252 Prefer absolute imports over relative imports
TID252.py:28:1: TID252 Relative imports are banned
|
26 | from .....parent import ultragrantparent
27 | from .........parent import ultragrantparent

Some files were not shown because too many files have changed in this diff Show More