Compare commits

..

1 Commits

Author SHA1 Message Date
David Peter
9220598fc8 [ty] Experiment: half-baked typing.TypeAlias support 2025-05-20 11:33:59 +02:00
1362 changed files with 18919 additions and 52385 deletions

View File

@@ -5,4 +5,3 @@
[rules]
possibly-unresolved-reference = "warn"
unused-ignore-comment = "warn"
division-by-zero = "warn"

View File

@@ -79,7 +79,7 @@ jobs:
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
- name: Build and push by digest
id: build
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
uses: docker/build-push-action@1dc73863535b631f98b2378be8619f83b136f4a0 # v6.17.0
with:
context: .
platforms: ${{ matrix.platform }}
@@ -231,7 +231,7 @@ jobs:
${{ env.TAG_PATTERNS }}
- name: Build and push
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
uses: docker/build-push-action@1dc73863535b631f98b2378be8619f83b136f4a0 # v6.17.0
with:
context: .
platforms: linux/amd64,linux/arm64

View File

@@ -237,13 +237,13 @@ jobs:
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"
uses: rui314/setup-mold@67424c1b3680e35255d95971cbd5de0047bf31c3 # v1
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
- name: "Install cargo nextest"
uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4
uses: taiki-e/install-action@941e8a4d9d7cdb696bd4f017cf54aca281f8ffff # v2.51.2
with:
tool: cargo-nextest
- name: "Install cargo insta"
uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4
uses: taiki-e/install-action@941e8a4d9d7cdb696bd4f017cf54aca281f8ffff # v2.51.2
with:
tool: cargo-insta
- name: ty mdtests (GitHub annotations)
@@ -295,13 +295,13 @@ jobs:
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"
uses: rui314/setup-mold@67424c1b3680e35255d95971cbd5de0047bf31c3 # v1
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
- name: "Install cargo nextest"
uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4
uses: taiki-e/install-action@941e8a4d9d7cdb696bd4f017cf54aca281f8ffff # v2.51.2
with:
tool: cargo-nextest
- name: "Install cargo insta"
uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4
uses: taiki-e/install-action@941e8a4d9d7cdb696bd4f017cf54aca281f8ffff # v2.51.2
with:
tool: cargo-insta
- name: "Run tests"
@@ -324,7 +324,7 @@ jobs:
- name: "Install Rust toolchain"
run: rustup show
- name: "Install cargo nextest"
uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4
uses: taiki-e/install-action@941e8a4d9d7cdb696bd4f017cf54aca281f8ffff # v2.51.2
with:
tool: cargo-nextest
- name: "Run tests"
@@ -380,7 +380,7 @@ jobs:
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"
uses: rui314/setup-mold@67424c1b3680e35255d95971cbd5de0047bf31c3 # v1
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
- name: "Build"
run: cargo build --release --locked
@@ -405,13 +405,13 @@ jobs:
MSRV: ${{ steps.msrv.outputs.value }}
run: rustup default "${MSRV}"
- name: "Install mold"
uses: rui314/setup-mold@67424c1b3680e35255d95971cbd5de0047bf31c3 # v1
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
- name: "Install cargo nextest"
uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4
uses: taiki-e/install-action@941e8a4d9d7cdb696bd4f017cf54aca281f8ffff # v2.51.2
with:
tool: cargo-nextest
- name: "Install cargo insta"
uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4
uses: taiki-e/install-action@941e8a4d9d7cdb696bd4f017cf54aca281f8ffff # v2.51.2
with:
tool: cargo-insta
- name: "Run tests"
@@ -437,7 +437,7 @@ jobs:
- name: "Install Rust toolchain"
run: rustup show
- name: "Install cargo-binstall"
uses: cargo-bins/cargo-binstall@e8c9cc3599f6c4063d143083205f98ca25d91677 # v1.12.6
uses: cargo-bins/cargo-binstall@5cbf019d8cb9b9d5b086218c41458ea35d817691 # v1.12.5
with:
tool: cargo-fuzz@0.11.2
- name: "Install cargo-fuzz"
@@ -459,7 +459,7 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
- uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
name: Download Ruff binary to test
id: download-cached-binary
@@ -660,7 +660,7 @@ jobs:
branch: ${{ github.event.pull_request.base.ref }}
workflow: "ci.yaml"
check_artifacts: true
- uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
- uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1
- name: Fuzz
env:
FORCE_COLOR: 1
@@ -690,7 +690,7 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- uses: cargo-bins/cargo-binstall@e8c9cc3599f6c4063d143083205f98ca25d91677 # v1.12.6
- uses: cargo-bins/cargo-binstall@5cbf019d8cb9b9d5b086218c41458ea35d817691 # v1.12.5
- run: cargo binstall --no-confirm cargo-shear
- run: cargo shear
@@ -730,7 +730,7 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
- uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
@@ -773,7 +773,7 @@ jobs:
- name: "Install Rust toolchain"
run: rustup show
- name: Install uv
uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1
- name: "Install Insiders dependencies"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
run: uv pip install -r docs/requirements-insiders.txt --system
@@ -910,7 +910,7 @@ jobs:
run: rustup show
- name: "Install codspeed"
uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4
uses: taiki-e/install-action@941e8a4d9d7cdb696bd4f017cf54aca281f8ffff # v2.51.2
with:
tool: cargo-codspeed

View File

@@ -34,11 +34,11 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
- uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"
uses: rui314/setup-mold@67424c1b3680e35255d95971cbd5de0047bf31c3 # v1
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- name: Build ruff
# A debug build means the script runs slower once it gets started,

View File

@@ -11,7 +11,6 @@ on:
- "crates/ruff_python_parser"
- ".github/workflows/mypy_primer.yaml"
- ".github/workflows/mypy_primer_comment.yaml"
- "Cargo.lock"
concurrency:
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.event.pull_request.number || github.sha }}
@@ -37,7 +36,7 @@ jobs:
persist-credentials: false
- name: Install the latest version of uv
uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
with:

View File

@@ -22,7 +22,7 @@ jobs:
id-token: write
steps:
- name: "Install uv"
uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
with:
pattern: wheels-*

View File

@@ -80,7 +80,7 @@ repos:
pass_filenames: false # This makes it a lot faster
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.11.12
rev: v0.11.10
hooks:
- id: ruff-format
- id: ruff
@@ -98,7 +98,7 @@ repos:
# zizmor detects security vulnerabilities in GitHub Actions workflows.
# Additional configuration for the tool is found in `.github/zizmor.yml`
- repo: https://github.com/woodruffw/zizmor-pre-commit
rev: v1.9.0
rev: v1.7.0
hooks:
- id: zizmor

View File

@@ -1,58 +1,5 @@
# Changelog
## 0.11.12
### Preview features
- \[`airflow`\] Revise fix titles (`AIR3`) ([#18215](https://github.com/astral-sh/ruff/pull/18215))
- \[`pylint`\] Implement `missing-maxsplit-arg` (`PLC0207`) ([#17454](https://github.com/astral-sh/ruff/pull/17454))
- \[`pyupgrade`\] New rule `UP050` (`useless-class-metaclass-type`) ([#18334](https://github.com/astral-sh/ruff/pull/18334))
- \[`flake8-use-pathlib`\] Replace `os.symlink` with `Path.symlink_to` (`PTH211`) ([#18337](https://github.com/astral-sh/ruff/pull/18337))
### Bug fixes
- \[`flake8-bugbear`\] Ignore `__debug__` attribute in `B010` ([#18357](https://github.com/astral-sh/ruff/pull/18357))
- \[`flake8-async`\] Fix `anyio.sleep` argument name (`ASYNC115`, `ASYNC116`) ([#18262](https://github.com/astral-sh/ruff/pull/18262))
- \[`refurb`\] Fix `FURB129` autofix generating invalid syntax ([#18235](https://github.com/astral-sh/ruff/pull/18235))
### Rule changes
- \[`flake8-implicit-str-concat`\] Add autofix for `ISC003` ([#18256](https://github.com/astral-sh/ruff/pull/18256))
- \[`pycodestyle`\] Improve the diagnostic message for `E712` ([#18328](https://github.com/astral-sh/ruff/pull/18328))
- \[`flake8-2020`\] Fix diagnostic message for `!=` comparisons (`YTT201`) ([#18293](https://github.com/astral-sh/ruff/pull/18293))
- \[`pyupgrade`\] Make fix unsafe if it deletes comments (`UP010`) ([#18291](https://github.com/astral-sh/ruff/pull/18291))
### Documentation
- Simplify rules table to improve readability ([#18297](https://github.com/astral-sh/ruff/pull/18297))
- Update editor integrations link in README ([#17977](https://github.com/astral-sh/ruff/pull/17977))
- \[`flake8-bugbear`\] Add fix safety section (`B006`) ([#17652](https://github.com/astral-sh/ruff/pull/17652))
## 0.11.11
### Preview features
- \[`airflow`\] Add autofixes for `AIR302` and `AIR312` ([#17942](https://github.com/astral-sh/ruff/pull/17942))
- \[`airflow`\] Move rules from `AIR312` to `AIR302` ([#17940](https://github.com/astral-sh/ruff/pull/17940))
- \[`airflow`\] Update `AIR301` and `AIR311` with the latest Airflow implementations ([#17985](https://github.com/astral-sh/ruff/pull/17985))
- \[`flake8-simplify`\] Enable fix in preview mode (`SIM117`) ([#18208](https://github.com/astral-sh/ruff/pull/18208))
### Bug fixes
- Fix inconsistent formatting of match-case on `[]` and `_` ([#18147](https://github.com/astral-sh/ruff/pull/18147))
- \[`pylint`\] Fix `PLW1514` not recognizing the `encoding` positional argument of `codecs.open` ([#18109](https://github.com/astral-sh/ruff/pull/18109))
### CLI
- Add full option name in formatter warning ([#18217](https://github.com/astral-sh/ruff/pull/18217))
### Documentation
- Fix rendering of admonition in docs ([#18163](https://github.com/astral-sh/ruff/pull/18163))
- \[`flake8-print`\] Improve print/pprint docs for `T201` and `T203` ([#18130](https://github.com/astral-sh/ruff/pull/18130))
- \[`flake8-simplify`\] Add fix safety section (`SIM110`,`SIM210`) ([#18114](https://github.com/astral-sh/ruff/pull/18114),[#18100](https://github.com/astral-sh/ruff/pull/18100))
- \[`pylint`\] Fix docs example that produced different output (`PLW0603`) ([#18216](https://github.com/astral-sh/ruff/pull/18216))
## 0.11.10
### Preview features

79
Cargo.lock generated
View File

@@ -1,6 +1,6 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 4
version = 3
[[package]]
name = "adler2"
@@ -8,18 +8,6 @@ version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627"
[[package]]
name = "ahash"
version = "0.8.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75"
dependencies = [
"cfg-if",
"once_cell",
"version_check",
"zerocopy",
]
[[package]]
name = "aho-corasick"
version = "1.1.3"
@@ -354,9 +342,9 @@ dependencies = [
[[package]]
name = "clap"
version = "4.5.39"
version = "4.5.38"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd60e63e9be68e5fb56422e397cf9baddded06dae1d2e523401542383bc72a9f"
checksum = "ed93b9805f8ba930df42c2590f05453d5ec36cbb85d018868a5b24d31f6ac000"
dependencies = [
"clap_builder",
"clap_derive",
@@ -364,9 +352,9 @@ dependencies = [
[[package]]
name = "clap_builder"
version = "4.5.39"
version = "4.5.38"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89cc6392a1f72bbeb820d71f32108f61fdaf18bc526e1d23954168a67759ef51"
checksum = "379026ff283facf611b0ea629334361c4211d1b12ee01024eec1591133b04120"
dependencies = [
"anstream",
"anstyle",
@@ -1118,10 +1106,6 @@ name = "hashbrown"
version = "0.14.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1"
dependencies = [
"ahash",
"allocator-api2",
]
[[package]]
name = "hashbrown"
@@ -1514,9 +1498,9 @@ checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
[[package]]
name = "jiff"
version = "0.2.14"
version = "0.2.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a194df1107f33c79f4f93d02c80798520551949d59dfad22b6157048a88cca93"
checksum = "f02000660d30638906021176af16b17498bd0d12813dbfe7b276d8bc7f3c0806"
dependencies = [
"jiff-static",
"jiff-tzdb-platform",
@@ -1529,9 +1513,9 @@ dependencies = [
[[package]]
name = "jiff-static"
version = "0.2.14"
version = "0.2.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c6e1db7ed32c6c71b759497fae34bf7933636f75a251b9e736555da426f6442"
checksum = "f3c30758ddd7188629c6713fc45d1188af4f44c90582311d0c8d8c9907f60c48"
dependencies = [
"proc-macro2",
"quote",
@@ -1613,9 +1597,9 @@ checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa"
[[package]]
name = "libcst"
version = "1.8.0"
version = "1.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3ac076e37f8fe6bcddbb6c3282897e6e9498b254907ccbfc806dc8f9f1491f02"
checksum = "ad9e315e3f679e61b9095ffd5e509de78b8a4ea3bba9d772f6fb243209f808d4"
dependencies = [
"annotate-snippets",
"libcst_derive",
@@ -1623,14 +1607,14 @@ dependencies = [
"paste",
"peg",
"regex",
"thiserror 2.0.12",
"thiserror 1.0.69",
]
[[package]]
name = "libcst_derive"
version = "1.8.0"
version = "1.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9cf4a12c744a301b216c4f0cb73542709ab15e6dadbb06966ac05864109d05da"
checksum = "bfa96ed35d0dccc67cf7ba49350cb86de3dcb1d072a7ab28f99117f19d874953"
dependencies = [
"quote",
"syn",
@@ -2501,7 +2485,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.11.12"
version = "0.11.10"
dependencies = [
"anyhow",
"argfile",
@@ -2658,6 +2642,7 @@ dependencies = [
"rayon",
"regex",
"ruff",
"ruff_diagnostics",
"ruff_formatter",
"ruff_linter",
"ruff_notebook",
@@ -2687,7 +2672,9 @@ dependencies = [
name = "ruff_diagnostics"
version = "0.0.0"
dependencies = [
"anyhow",
"is-macro",
"log",
"ruff_text_size",
"serde",
]
@@ -2738,7 +2725,7 @@ dependencies = [
[[package]]
name = "ruff_linter"
version = "0.11.12"
version = "0.11.10"
dependencies = [
"aho-corasick",
"anyhow",
@@ -3074,7 +3061,7 @@ dependencies = [
[[package]]
name = "ruff_wasm"
version = "0.11.12"
version = "0.11.10"
dependencies = [
"console_error_panic_hook",
"console_log",
@@ -3094,7 +3081,6 @@ dependencies = [
"ruff_workspace",
"serde",
"serde-wasm-bindgen",
"uuid",
"wasm-bindgen",
"wasm-bindgen-test",
]
@@ -3193,14 +3179,13 @@ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
[[package]]
name = "salsa"
version = "0.22.0"
source = "git+https://github.com/carljm/salsa.git?rev=0f6d406f6c309964279baef71588746b8c67b4a3#0f6d406f6c309964279baef71588746b8c67b4a3"
version = "0.21.1"
source = "git+https://github.com/salsa-rs/salsa.git?rev=7edce6e248f35c8114b4b021cdb474a3fb2813b3#7edce6e248f35c8114b4b021cdb474a3fb2813b3"
dependencies = [
"boxcar",
"compact_str",
"crossbeam-queue",
"dashmap 6.1.0",
"hashbrown 0.14.5",
"hashbrown 0.15.3",
"hashlink",
"indexmap",
@@ -3217,14 +3202,15 @@ dependencies = [
[[package]]
name = "salsa-macro-rules"
version = "0.22.0"
source = "git+https://github.com/carljm/salsa.git?rev=0f6d406f6c309964279baef71588746b8c67b4a3#0f6d406f6c309964279baef71588746b8c67b4a3"
version = "0.21.1"
source = "git+https://github.com/salsa-rs/salsa.git?rev=7edce6e248f35c8114b4b021cdb474a3fb2813b3#7edce6e248f35c8114b4b021cdb474a3fb2813b3"
[[package]]
name = "salsa-macros"
version = "0.22.0"
source = "git+https://github.com/carljm/salsa.git?rev=0f6d406f6c309964279baef71588746b8c67b4a3#0f6d406f6c309964279baef71588746b8c67b4a3"
version = "0.21.1"
source = "git+https://github.com/salsa-rs/salsa.git?rev=7edce6e248f35c8114b4b021cdb474a3fb2813b3#7edce6e248f35c8114b4b021cdb474a3fb2813b3"
dependencies = [
"heck",
"proc-macro2",
"quote",
"syn",
@@ -3888,7 +3874,6 @@ dependencies = [
"countme",
"crossbeam",
"ctrlc",
"dunce",
"filetime",
"indicatif",
"insta",
@@ -4018,7 +4003,6 @@ dependencies = [
"ruff_source_file",
"ruff_text_size",
"rustc-hash 2.1.1",
"salsa",
"serde",
"serde_json",
"shellexpand",
@@ -4087,7 +4071,6 @@ dependencies = [
"ty_ide",
"ty_project",
"ty_python_semantic",
"uuid",
"wasm-bindgen",
"wasm-bindgen-test",
]
@@ -4257,9 +4240,9 @@ checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
[[package]]
name = "uuid"
version = "1.17.0"
version = "1.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3cf4199d1e5d15ddd86a694e4d0dffa9c323ce759fea589f00fef9d81cc1931d"
checksum = "458f7a779bf54acc9f347480ac654f68407d3aab21269a6e3c9f922acd9e2da9"
dependencies = [
"getrandom 0.3.3",
"js-sys",
@@ -4270,9 +4253,9 @@ dependencies = [
[[package]]
name = "uuid-macro-internal"
version = "1.17.0"
version = "1.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26b682e8c381995ea03130e381928e0e005b7c9eb483c6c8682f50e07b33c2b7"
checksum = "72dcd78c4f979627a754f5522cea6e6a25e55139056535fe6e69c506cd64a862"
dependencies = [
"proc-macro2",
"quote",

View File

@@ -129,7 +129,7 @@ regex = { version = "1.10.2" }
rustc-hash = { version = "2.0.0" }
rustc-stable-hash = { version = "0.1.2" }
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
salsa = { git = "https://github.com/carljm/salsa.git", rev = "0f6d406f6c309964279baef71588746b8c67b4a3" }
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "7edce6e248f35c8114b4b021cdb474a3fb2813b3" }
schemars = { version = "0.8.16" }
seahash = { version = "4.1.0" }
serde = { version = "1.0.197", features = ["derive"] }
@@ -179,6 +179,7 @@ uuid = { version = "1.6.1", features = [
"v4",
"fast-rng",
"macro-diagnostics",
"js",
] }
walkdir = { version = "2.3.2" }
wasm-bindgen = { version = "0.2.92" }
@@ -187,7 +188,7 @@ wild = { version = "2" }
zip = { version = "0.6.6", default-features = false }
[workspace.metadata.cargo-shear]
ignored = ["getrandom", "ruff_options_metadata", "uuid"]
ignored = ["getrandom", "ruff_options_metadata"]
[workspace.lints.rust]

View File

@@ -34,7 +34,8 @@ An extremely fast Python linter and code formatter, written in Rust.
- 🔧 Fix support, for automatic error correction (e.g., automatically remove unused imports)
- 📏 Over [800 built-in rules](https://docs.astral.sh/ruff/rules/), with native re-implementations
of popular Flake8 plugins, like flake8-bugbear
- ⌨️ First-party [editor integrations](https://docs.astral.sh/ruff/editors) for [VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://docs.astral.sh/ruff/editors/setup)
- ⌨️ First-party [editor integrations](https://docs.astral.sh/ruff/integrations/) for
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://docs.astral.sh/ruff/editors/setup)
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://docs.astral.sh/ruff/configuration/#config-file-discovery)
Ruff aims to be orders of magnitude faster than alternative tools while integrating more
@@ -148,8 +149,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
# For a specific version.
curl -LsSf https://astral.sh/ruff/0.11.12/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.11.12/install.ps1 | iex"
curl -LsSf https://astral.sh/ruff/0.11.10/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.11.10/install.ps1 | iex"
```
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
@@ -182,7 +183,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
```yaml
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.11.12
rev: v0.11.10
hooks:
# Run the linter.
- id: ruff

View File

@@ -4,10 +4,6 @@ extend-exclude = [
"crates/ty_vendored/vendor/**/*",
"**/resources/**/*",
"**/snapshots/**/*",
# Completion tests tend to have a lot of incomplete
# words naturally. It's annoying to have to make all
# of them actually words. So just ignore typos here.
"crates/ty_ide/src/completion.rs",
]
[default.extend-words]

View File

@@ -1,7 +1,6 @@
doc-valid-idents = [
"..",
"CodeQL",
"CPython",
"FastAPI",
"IPython",
"LangChain",
@@ -15,7 +14,7 @@ doc-valid-idents = [
"SNMPv1",
"SNMPv2",
"SNMPv3",
"PyFlakes",
"PyFlakes"
]
ignore-interior-mutability = [

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff"
version = "0.11.12"
version = "0.11.10"
publish = true
authors = { workspace = true }
edition = { workspace = true }

View File

@@ -349,6 +349,7 @@ impl FileCache {
.iter()
.map(|msg| {
Message::diagnostic(
msg.rule.into(),
msg.body.clone(),
msg.suggestion.clone(),
msg.range,
@@ -356,7 +357,6 @@ impl FileCache {
msg.parent,
file.clone(),
msg.noqa_offset,
msg.rule,
)
})
.collect()

View File

@@ -12,7 +12,7 @@ use rayon::prelude::*;
use rustc_hash::FxHashMap;
use ruff_db::panic::catch_unwind;
use ruff_linter::OldDiagnostic;
use ruff_diagnostics::Diagnostic;
use ruff_linter::message::Message;
use ruff_linter::package::PackageRoot;
use ruff_linter::registry::Rule;
@@ -131,7 +131,8 @@ pub(crate) fn check(
Diagnostics::new(
vec![Message::from_diagnostic(
OldDiagnostic::new(IOError { message }, TextRange::default(), &dummy),
Diagnostic::new(IOError { message }, TextRange::default()),
dummy,
None,
)],
FxHashMap::default(),

View File

@@ -822,11 +822,11 @@ pub(super) fn warn_incompatible_formatter_settings(resolver: &Resolver) {
rule_names.sort();
if let [rule] = rule_names.as_slice() {
warn_user_once!(
"The following rule may cause conflicts when used with the formatter: {rule}. To avoid unexpected behavior, we recommend disabling this rule, either by removing it from the `lint.select` or `lint.extend-select` configuration, or adding it to the `lint.ignore` configuration."
"The following rule may cause conflicts when used with the formatter: {rule}. To avoid unexpected behavior, we recommend disabling this rule, either by removing it from the `select` or `extend-select` configuration, or adding it to the `ignore` configuration."
);
} else {
warn_user_once!(
"The following rules may cause conflicts when used with the formatter: {}. To avoid unexpected behavior, we recommend disabling these rules, either by removing them from the `lint.select` or `lint.extend-select` configuration, or adding them to the `lint.ignore` configuration.",
"The following rules may cause conflicts when used with the formatter: {}. To avoid unexpected behavior, we recommend disabling these rules, either by removing them from the `select` or `extend-select` configuration, or adding them to the `ignore` configuration.",
rule_names.join(", ")
);
}

View File

@@ -6,7 +6,7 @@ use serde::ser::SerializeSeq;
use serde::{Serialize, Serializer};
use strum::IntoEnumIterator;
use ruff_linter::FixAvailability;
use ruff_diagnostics::FixAvailability;
use ruff_linter::registry::{Linter, Rule, RuleNamespace};
use crate::args::HelpFormat;

View File

@@ -12,7 +12,7 @@ use colored::Colorize;
use log::{debug, warn};
use rustc_hash::FxHashMap;
use ruff_linter::OldDiagnostic;
use ruff_diagnostics::Diagnostic;
use ruff_linter::codes::Rule;
use ruff_linter::linter::{FixTable, FixerResult, LinterResult, ParseSource, lint_fix, lint_only};
use ruff_linter::message::Message;
@@ -64,13 +64,13 @@ impl Diagnostics {
let source_file = SourceFileBuilder::new(name, "").finish();
Self::new(
vec![Message::from_diagnostic(
OldDiagnostic::new(
Diagnostic::new(
IOError {
message: err.to_string(),
},
TextRange::default(),
&source_file,
),
source_file,
None,
)],
FxHashMap::default(),
@@ -235,7 +235,7 @@ pub(crate) fn lint_path(
};
let source_file =
SourceFileBuilder::new(path.to_string_lossy(), contents).finish();
lint_pyproject_toml(&source_file, settings)
lint_pyproject_toml(source_file, settings)
} else {
vec![]
};
@@ -396,7 +396,7 @@ pub(crate) fn lint_stdin(
}
return Ok(Diagnostics {
messages: lint_pyproject_toml(&source_file, &settings.linter),
messages: lint_pyproject_toml(source_file, &settings.linter),
fixed: FixMap::from_iter([(fs::relativize_path(path), FixTable::default())]),
notebook_indexes: FxHashMap::default(),
});

View File

@@ -566,7 +566,7 @@ fn venv() -> Result<()> {
----- stderr -----
ruff failed
Cause: Invalid search path settings
Cause: Failed to discover the site-packages directory: Invalid `--python` argument: `none` does not point to a Python executable or a directory on disk
Cause: Failed to discover the site-packages directory: Invalid `--python` argument: `none` could not be canonicalized
");
});

View File

@@ -862,7 +862,7 @@ if condition:
print('Should change quotes')
----- stderr -----
warning: The following rule may cause conflicts when used with the formatter: `COM812`. To avoid unexpected behavior, we recommend disabling this rule, either by removing it from the `lint.select` or `lint.extend-select` configuration, or adding it to the `lint.ignore` configuration.
warning: The following rule may cause conflicts when used with the formatter: `COM812`. To avoid unexpected behavior, we recommend disabling this rule, either by removing it from the `select` or `extend-select` configuration, or adding it to the `ignore` configuration.
"#);
Ok(())
}
@@ -999,7 +999,7 @@ def say_hy(name: str):
1 file reformatted
----- stderr -----
warning: The following rule may cause conflicts when used with the formatter: `COM812`. To avoid unexpected behavior, we recommend disabling this rule, either by removing it from the `lint.select` or `lint.extend-select` configuration, or adding it to the `lint.ignore` configuration.
warning: The following rule may cause conflicts when used with the formatter: `COM812`. To avoid unexpected behavior, we recommend disabling this rule, either by removing it from the `select` or `extend-select` configuration, or adding it to the `ignore` configuration.
warning: The `format.indent-style="tab"` option is incompatible with `W191`, which lints against all uses of tabs. We recommend disabling these rules when using the formatter, which enforces a consistent indentation style. Alternatively, set the `format.indent-style` option to `"space"`.
warning: The `lint.flake8-implicit-str-concat.allow-multiline = false` option is incompatible with the formatter unless `ISC001` is enabled. We recommend enabling `ISC001` or setting `allow-multiline=true`.
warning: The `format.indent-style="tab"` option is incompatible with `D206`, with requires space-based indentation. We recommend disabling these rules when using the formatter, which enforces a consistent indentation style. Alternatively, set the `format.indent-style` option to `"space"`.
@@ -1059,7 +1059,7 @@ def say_hy(name: str):
print(f"Hy {name}")
----- stderr -----
warning: The following rule may cause conflicts when used with the formatter: `COM812`. To avoid unexpected behavior, we recommend disabling this rule, either by removing it from the `lint.select` or `lint.extend-select` configuration, or adding it to the `lint.ignore` configuration.
warning: The following rule may cause conflicts when used with the formatter: `COM812`. To avoid unexpected behavior, we recommend disabling this rule, either by removing it from the `select` or `extend-select` configuration, or adding it to the `ignore` configuration.
warning: The `format.indent-style="tab"` option is incompatible with `W191`, which lints against all uses of tabs. We recommend disabling these rules when using the formatter, which enforces a consistent indentation style. Alternatively, set the `format.indent-style` option to `"space"`.
warning: The `format.indent-style="tab"` option is incompatible with `D206`, with requires space-based indentation. We recommend disabling these rules when using the formatter, which enforces a consistent indentation style. Alternatively, set the `format.indent-style` option to `"space"`.
warning: The `flake8-quotes.inline-quotes="single"` option is incompatible with the formatter's `format.quote-style="double"`. We recommend disabling `Q000` and `Q003` when using the formatter, which enforces a consistent quote style. Alternatively, set both options to either `"single"` or `"double"`.
@@ -1199,7 +1199,7 @@ def say_hy(name: str):
----- stderr -----
warning: `incorrect-blank-line-before-class` (D203) and `no-blank-line-before-class` (D211) are incompatible. Ignoring `incorrect-blank-line-before-class`.
warning: `multi-line-summary-first-line` (D212) and `multi-line-summary-second-line` (D213) are incompatible. Ignoring `multi-line-summary-second-line`.
warning: The following rule may cause conflicts when used with the formatter: `COM812`. To avoid unexpected behavior, we recommend disabling this rule, either by removing it from the `lint.select` or `lint.extend-select` configuration, or adding it to the `lint.ignore` configuration.
warning: The following rule may cause conflicts when used with the formatter: `COM812`. To avoid unexpected behavior, we recommend disabling this rule, either by removing it from the `select` or `extend-select` configuration, or adding it to the `ignore` configuration.
");
Ok(())
}

View File

@@ -78,7 +78,7 @@ fn setup_tomllib_case() -> Case {
let src_root = SystemPath::new("/src");
let mut metadata = ProjectMetadata::discover(src_root, &system).unwrap();
metadata.apply_options(Options {
metadata.apply_cli_options(Options {
environment: Some(EnvironmentOptions {
python_version: Some(RangedValue::cli(PythonVersion::PY312)),
..EnvironmentOptions::default()
@@ -131,7 +131,7 @@ fn benchmark_incremental(criterion: &mut Criterion) {
fn setup() -> Case {
let case = setup_tomllib_case();
let result: Vec<_> = case.db.check();
let result: Vec<_> = case.db.check().unwrap();
assert_diagnostics(&case.db, &result, EXPECTED_TOMLLIB_DIAGNOSTICS);
@@ -159,7 +159,7 @@ fn benchmark_incremental(criterion: &mut Criterion) {
None,
);
let result = db.check();
let result = db.check().unwrap();
assert_eq!(result.len(), EXPECTED_TOMLLIB_DIAGNOSTICS.len());
}
@@ -179,7 +179,7 @@ fn benchmark_cold(criterion: &mut Criterion) {
setup_tomllib_case,
|case| {
let Case { db, .. } = case;
let result: Vec<_> = db.check();
let result: Vec<_> = db.check().unwrap();
assert_diagnostics(db, &result, EXPECTED_TOMLLIB_DIAGNOSTICS);
},
@@ -224,7 +224,7 @@ fn setup_micro_case(code: &str) -> Case {
let src_root = SystemPath::new("/src");
let mut metadata = ProjectMetadata::discover(src_root, &system).unwrap();
metadata.apply_options(Options {
metadata.apply_cli_options(Options {
environment: Some(EnvironmentOptions {
python_version: Some(RangedValue::cli(PythonVersion::PY312)),
..EnvironmentOptions::default()
@@ -293,7 +293,7 @@ fn benchmark_many_string_assignments(criterion: &mut Criterion) {
},
|case| {
let Case { db, .. } = case;
let result = db.check();
let result = db.check().unwrap();
assert_eq!(result.len(), 0);
},
BatchSize::SmallInput,
@@ -339,7 +339,7 @@ fn benchmark_many_tuple_assignments(criterion: &mut Criterion) {
},
|case| {
let Case { db, .. } = case;
let result = db.check();
let result = db.check().unwrap();
assert_eq!(result.len(), 0);
},
BatchSize::SmallInput,

View File

@@ -275,12 +275,7 @@ impl fmt::Debug for Files {
impl std::panic::RefUnwindSafe for Files {}
/// A file that's either stored on the host system's file system or in the vendored file system.
///
/// # Ordering
/// Ordering is based on the file's salsa-assigned id and not on its values.
/// The id may change between runs.
#[salsa::input]
#[derive(PartialOrd, Ord)]
pub struct File {
/// The path of the file (immutable).
#[returns(ref)]

View File

@@ -1,4 +1,3 @@
use std::any::Any;
use std::backtrace::BacktraceStatus;
use std::cell::Cell;
use std::panic::Location;
@@ -25,25 +24,17 @@ impl Payload {
None
}
}
pub fn downcast_ref<R: Any>(&self) -> Option<&R> {
self.0.downcast_ref::<R>()
}
}
impl std::fmt::Display for PanicError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "panicked at")?;
writeln!(f, "panicked at")?;
if let Some(location) = &self.location {
write!(f, " {location}")?;
}
if let Some(payload) = self.payload.as_str() {
write!(f, ":\n{payload}")?;
}
if let Some(query_trace) = self.salsa_backtrace.as_ref() {
let _ = writeln!(f, "{query_trace}");
}
if let Some(backtrace) = &self.backtrace {
match backtrace.status() {
BacktraceStatus::Disabled => {
@@ -58,7 +49,6 @@ impl std::fmt::Display for PanicError {
_ => {}
}
}
Ok(())
}
}

View File

@@ -596,13 +596,6 @@ impl AsRef<SystemPath> for Utf8PathBuf {
}
}
impl AsRef<SystemPath> for camino::Utf8Component<'_> {
#[inline]
fn as_ref(&self) -> &SystemPath {
SystemPath::new(self.as_str())
}
}
impl AsRef<SystemPath> for str {
#[inline]
fn as_ref(&self) -> &SystemPath {
@@ -633,22 +626,6 @@ impl Deref for SystemPathBuf {
}
}
impl<P: AsRef<SystemPath>> FromIterator<P> for SystemPathBuf {
fn from_iter<I: IntoIterator<Item = P>>(iter: I) -> Self {
let mut buf = SystemPathBuf::new();
buf.extend(iter);
buf
}
}
impl<P: AsRef<SystemPath>> Extend<P> for SystemPathBuf {
fn extend<I: IntoIterator<Item = P>>(&mut self, iter: I) {
for path in iter {
self.push(path);
}
}
}
impl std::fmt::Debug for SystemPath {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.0.fmt(f)

View File

@@ -13,12 +13,12 @@ pub fn assert_function_query_was_not_run<Db, Q, QDb, I, R>(
Q: Fn(QDb, I) -> R,
I: salsa::plumbing::AsId + std::fmt::Debug + Copy,
{
let id = input.as_id();
let id = input.as_id().as_u32();
let (query_name, will_execute_event) = find_will_execute_event(db, query, input, events);
db.attach(|_| {
if let Some(will_execute_event) = will_execute_event {
panic!("Expected query {query_name}({id:?}) not to have run but it did: {will_execute_event:?}\n\n{events:#?}");
panic!("Expected query {query_name}({id}) not to have run but it did: {will_execute_event:?}\n\n{events:#?}");
}
});
}
@@ -65,7 +65,7 @@ pub fn assert_function_query_was_run<Db, Q, QDb, I, R>(
Q: Fn(QDb, I) -> R,
I: salsa::plumbing::AsId + std::fmt::Debug + Copy,
{
let id = input.as_id();
let id = input.as_id().as_u32();
let (query_name, will_execute_event) = find_will_execute_event(db, query, input, events);
db.attach(|_| {
@@ -224,7 +224,7 @@ fn query_was_not_run() {
}
#[test]
#[should_panic(expected = "Expected query len(Id(0)) not to have run but it did:")]
#[should_panic(expected = "Expected query len(0) not to have run but it did:")]
fn query_was_not_run_fails_if_query_was_run() {
use crate::tests::TestDb;
use salsa::prelude::*;
@@ -287,7 +287,7 @@ fn const_query_was_not_run_fails_if_query_was_run() {
}
#[test]
#[should_panic(expected = "Expected query len(Id(0)) to have run but it did not:")]
#[should_panic(expected = "Expected query len(0) to have run but it did not:")]
fn query_was_run_fails_if_query_was_not_run() {
use crate::tests::TestDb;
use salsa::prelude::*;

View File

@@ -14,6 +14,7 @@ license = { workspace = true }
ty = { workspace = true }
ty_project = { workspace = true, features = ["schemars"] }
ruff = { workspace = true }
ruff_diagnostics = { workspace = true }
ruff_formatter = { workspace = true }
ruff_linter = { workspace = true, features = ["schemars"] }
ruff_notebook = { workspace = true }

View File

@@ -10,7 +10,7 @@ use itertools::Itertools;
use regex::{Captures, Regex};
use strum::IntoEnumIterator;
use ruff_linter::FixAvailability;
use ruff_diagnostics::FixAvailability;
use ruff_linter::registry::{Linter, Rule, RuleNamespace};
use ruff_options_metadata::{OptionEntry, OptionsMetadata};
use ruff_workspace::options::Options;

View File

@@ -8,7 +8,7 @@ use std::borrow::Cow;
use std::fmt::Write;
use strum::IntoEnumIterator;
use ruff_linter::FixAvailability;
use ruff_diagnostics::FixAvailability;
use ruff_linter::registry::{Linter, Rule, RuleNamespace};
use ruff_linter::upstream_categories::UpstreamCategoryAndPrefix;
use ruff_options_metadata::OptionsMetadata;
@@ -18,43 +18,44 @@ const FIX_SYMBOL: &str = "🛠️";
const PREVIEW_SYMBOL: &str = "🧪";
const REMOVED_SYMBOL: &str = "";
const WARNING_SYMBOL: &str = "⚠️";
const STABLE_SYMBOL: &str = "✔️";
const SPACER: &str = "&nbsp;&nbsp;&nbsp;&nbsp;";
/// Style for the rule's fixability and status icons.
const SYMBOL_STYLE: &str = "style='width: 1em; display: inline-block;'";
/// Style for the container wrapping the fixability and status icons.
const SYMBOLS_CONTAINER: &str = "style='display: flex; gap: 0.5rem; justify-content: end;'";
fn generate_table(table_out: &mut String, rules: impl IntoIterator<Item = Rule>, linter: &Linter) {
table_out.push_str("| Code | Name | Message | |");
table_out.push_str("| Code | Name | Message | |");
table_out.push('\n');
table_out.push_str("| ---- | ---- | ------- | -: |");
table_out.push_str("| ---- | ---- | ------- | ------: |");
table_out.push('\n');
for rule in rules {
let status_token = match rule.group() {
RuleGroup::Removed => {
format!(
"<span {SYMBOL_STYLE} title='Rule has been removed'>{REMOVED_SYMBOL}</span>"
)
format!("<span title='Rule has been removed'>{REMOVED_SYMBOL}</span>")
}
RuleGroup::Deprecated => {
format!(
"<span {SYMBOL_STYLE} title='Rule has been deprecated'>{WARNING_SYMBOL}</span>"
)
format!("<span title='Rule has been deprecated'>{WARNING_SYMBOL}</span>")
}
RuleGroup::Preview => {
format!("<span {SYMBOL_STYLE} title='Rule is in preview'>{PREVIEW_SYMBOL}</span>")
format!("<span title='Rule is in preview'>{PREVIEW_SYMBOL}</span>")
}
RuleGroup::Stable => {
// A full opacity checkmark is a bit aggressive for indicating stable
format!("<span title='Rule is stable' style='opacity: 0.6'>{STABLE_SYMBOL}</span>")
}
RuleGroup::Stable => format!("<span {SYMBOL_STYLE}></span>"),
};
let fix_token = match rule.fixable() {
FixAvailability::Always | FixAvailability::Sometimes => {
format!("<span {SYMBOL_STYLE} title='Automatic fix available'>{FIX_SYMBOL}</span>")
format!("<span title='Automatic fix available'>{FIX_SYMBOL}</span>")
}
FixAvailability::None => {
format!(
"<span title='Automatic fix not available' style='opacity: 0.1' aria-hidden='true'>{FIX_SYMBOL}</span>"
)
}
FixAvailability::None => format!("<span {SYMBOL_STYLE}></span>"),
};
let tokens = format!("{status_token} {fix_token}");
let rule_name = rule.as_ref();
// If the message ends in a bracketed expression (like: "Use {replacement}"), escape the
@@ -81,14 +82,15 @@ fn generate_table(table_out: &mut String, rules: impl IntoIterator<Item = Rule>,
#[expect(clippy::or_fun_call)]
let _ = write!(
table_out,
"| {ss}{prefix}{code}{se} {{ #{prefix}{code} }} | {ss}{explanation}{se} | {ss}{message}{se} | <div {SYMBOLS_CONTAINER}>{status_token}{fix_token}</div>|",
prefix = linter.common_prefix(),
code = linter.code_for_rule(rule).unwrap(),
explanation = rule
.explanation()
"| {ss}{0}{1}{se} {{ #{0}{1} }} | {ss}{2}{se} | {ss}{3}{se} | {ss}{4}{se} |",
linter.common_prefix(),
linter.code_for_rule(rule).unwrap(),
rule.explanation()
.is_some()
.then_some(format_args!("[{rule_name}](rules/{rule_name}.md)"))
.unwrap_or(format_args!("{rule_name}")),
message,
tokens,
);
table_out.push('\n');
}
@@ -102,6 +104,12 @@ pub(crate) fn generate() -> String {
table_out.push_str("### Legend");
table_out.push('\n');
let _ = write!(
&mut table_out,
"{SPACER}{STABLE_SYMBOL}{SPACER} The rule is stable."
);
table_out.push_str("<br />");
let _ = write!(
&mut table_out,
"{SPACER}{PREVIEW_SYMBOL}{SPACER} The rule is unstable and is in [\"preview\"](faq.md#what-is-preview)."
@@ -124,8 +132,7 @@ pub(crate) fn generate() -> String {
&mut table_out,
"{SPACER}{FIX_SYMBOL}{SPACER} The rule is automatically fixable by the `--fix` command-line option."
);
table_out.push_str("\n\n");
table_out.push_str("All rules not marked as preview, deprecated or removed are stable.");
table_out.push_str("<br />");
table_out.push('\n');
for linter in Linter::iter() {

View File

@@ -80,7 +80,6 @@ fn generate() -> String {
let mut parents = Vec::new();
output.push_str("<!-- WARNING: This file is auto-generated (cargo dev generate-all). Edit the doc comments in 'crates/ty/src/args.rs' if you want to change anything here. -->\n\n");
output.push_str("# CLI Reference\n\n");
generate_command(&mut output, &ty, &mut parents);

View File

@@ -25,10 +25,6 @@ pub(crate) fn main(args: &Args) -> anyhow::Result<()> {
let file_name = "crates/ty/docs/configuration.md";
let markdown_path = PathBuf::from(ROOT_DIR).join(file_name);
output.push_str(
"<!-- WARNING: This file is auto-generated (cargo dev generate-all). Update the doc comments on the 'Options' struct in 'crates/ty_project/src/metadata/options.rs' if you want to change anything here. -->\n\n",
);
generate_set(
&mut output,
Set::Toplevel(Options::metadata()),

View File

@@ -56,10 +56,6 @@ fn generate_markdown() -> String {
let mut output = String::new();
let _ = writeln!(
&mut output,
"<!-- WARNING: This file is auto-generated (cargo dev generate-all). Edit the lint-declarations in 'crates/ty_python_semantic/src/types/diagnostic.rs' if you want to change anything here. -->\n"
);
let _ = writeln!(&mut output, "# Rules\n");
let mut lints: Vec<_> = registry.lints().iter().collect();

View File

@@ -16,5 +16,7 @@ doctest = false
[dependencies]
ruff_text_size = { workspace = true }
anyhow = { workspace = true }
log = { workspace = true }
is-macro = { workspace = true }
serde = { workspace = true, optional = true, features = [] }

View File

@@ -1,15 +1,14 @@
use anyhow::Result;
use log::debug;
use ruff_source_file::SourceFile;
use ruff_text_size::{Ranged, TextRange, TextSize};
use crate::registry::AsRule;
use crate::violation::Violation;
use crate::{Fix, codes::Rule};
use crate::{Fix, Violation};
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct OldDiagnostic {
pub struct Diagnostic {
/// The identifier of the diagnostic, used to align the diagnostic with a rule.
pub name: &'static str,
/// The message body to display to the user, to explain the diagnostic.
pub body: String,
/// The message to display to the user, to explain the suggested fix.
@@ -17,27 +16,17 @@ pub struct OldDiagnostic {
pub range: TextRange,
pub fix: Option<Fix>,
pub parent: Option<TextSize>,
pub(crate) rule: Rule,
pub(crate) file: SourceFile,
}
impl OldDiagnostic {
// TODO(brent) We temporarily allow this to avoid updating all of the call sites to add
// references. I expect this method to go away or change significantly with the rest of the
// diagnostic refactor, but if it still exists in this form at the end of the refactor, we
// should just update the call sites.
#[expect(clippy::needless_pass_by_value)]
pub fn new<T: Violation>(kind: T, range: TextRange, file: &SourceFile) -> Self {
impl Diagnostic {
pub fn new<T: Violation>(kind: T, range: TextRange) -> Self {
Self {
name: T::rule_name(),
body: Violation::message(&kind),
suggestion: Violation::fix_title(&kind),
range,
fix: None,
parent: None,
rule: T::rule(),
file: file.clone(),
}
}
@@ -61,7 +50,7 @@ impl OldDiagnostic {
pub fn try_set_fix(&mut self, func: impl FnOnce() -> Result<Fix>) {
match func() {
Ok(fix) => self.fix = Some(fix),
Err(err) => debug!("Failed to create fix for {}: {}", self.rule, err),
Err(err) => debug!("Failed to create fix for {}: {}", self.name, err),
}
}
@@ -72,7 +61,7 @@ impl OldDiagnostic {
match func() {
Ok(None) => {}
Ok(Some(fix)) => self.fix = Some(fix),
Err(err) => debug!("Failed to create fix for {}: {}", self.rule, err),
Err(err) => debug!("Failed to create fix for {}: {}", self.name, err),
}
}
@@ -91,13 +80,7 @@ impl OldDiagnostic {
}
}
impl AsRule for OldDiagnostic {
fn rule(&self) -> Rule {
self.rule
}
}
impl Ranged for OldDiagnostic {
impl Ranged for Diagnostic {
fn range(&self) -> TextRange {
self.range
}

View File

@@ -1,7 +1,11 @@
pub use diagnostic::Diagnostic;
pub use edit::Edit;
pub use fix::{Applicability, Fix, IsolationLevel};
pub use source_map::{SourceMap, SourceMarker};
pub use violation::{AlwaysFixableViolation, FixAvailability, Violation, ViolationMetadata};
mod diagnostic;
mod edit;
mod fix;
mod source_map;
mod violation;

View File

@@ -1,7 +1,5 @@
use std::fmt::{Debug, Display};
use crate::codes::Rule;
#[derive(Debug, Copy, Clone)]
pub enum FixAvailability {
Sometimes,
@@ -20,8 +18,8 @@ impl Display for FixAvailability {
}
pub trait ViolationMetadata {
/// Returns the rule for this violation
fn rule() -> Rule;
/// Returns the rule name of this violation
fn rule_name() -> &'static str;
/// Returns an explanation of what this violation catches,
/// why it's bad, and what users should do instead.

View File

@@ -9,8 +9,8 @@ use ruff_db::{Db as SourceDb, Upcast};
use ruff_python_ast::PythonVersion;
use ty_python_semantic::lint::{LintRegistry, RuleSelection};
use ty_python_semantic::{
Db, Program, ProgramSettings, PythonPath, PythonPlatform, PythonVersionSource,
PythonVersionWithSource, SearchPathSettings, SysPrefixPathOrigin, default_lint_registry,
Db, Program, ProgramSettings, PythonPath, PythonPlatform, SearchPathSettings,
default_lint_registry,
};
static EMPTY_VENDORED: std::sync::LazyLock<VendoredFileSystem> = std::sync::LazyLock::new(|| {
@@ -37,18 +37,14 @@ impl ModuleDb {
) -> Result<Self> {
let mut search_paths = SearchPathSettings::new(src_roots);
if let Some(venv_path) = venv_path {
search_paths.python_path =
PythonPath::sys_prefix(venv_path, SysPrefixPathOrigin::PythonCliFlag);
search_paths.python_path = PythonPath::from_cli_flag(venv_path);
}
let db = Self::default();
Program::from_settings(
&db,
ProgramSettings {
python_version: Some(PythonVersionWithSource {
version: python_version,
source: PythonVersionSource::default(),
}),
python_version,
python_platform: PythonPlatform::default(),
search_paths,
},

View File

@@ -19,20 +19,19 @@ impl<'a> Resolver<'a> {
pub(crate) fn resolve(&self, import: CollectedImport) -> Option<&'a FilePath> {
match import {
CollectedImport::Import(import) => {
let module = resolve_module(self.db, &import)?;
Some(module.file()?.path(self.db))
resolve_module(self.db, &import).map(|module| module.file().path(self.db))
}
CollectedImport::ImportFrom(import) => {
// Attempt to resolve the member (e.g., given `from foo import bar`, look for `foo.bar`).
let parent = import.parent();
let module = resolve_module(self.db, &import).or_else(|| {
// Attempt to resolve the module (e.g., given `from foo import bar`, look for `foo`).
resolve_module(self.db, &import)
.map(|module| module.file().path(self.db))
.or_else(|| {
// Attempt to resolve the module (e.g., given `from foo import bar`, look for `foo`).
resolve_module(self.db, &parent?)
})?;
Some(module.file()?.path(self.db))
resolve_module(self.db, &parent?).map(|module| module.file().path(self.db))
})
}
}
}

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_linter"
version = "0.11.12"
version = "0.11.10"
publish = false
authors = { workspace = true }
edition = { workspace = true }

View File

@@ -10,10 +10,22 @@ from airflow import (
PY312,
)
from airflow.api_connexion.security import requires_access
from airflow.configuration import (
as_dict,
get,
getboolean,
getfloat,
getint,
has_option,
remove_option,
set,
)
from airflow.contrib.aws_athena_hook import AWSAthenaHook
from airflow.datasets import DatasetAliasEvent
from airflow.hooks.base_hook import BaseHook
from airflow.operators.subdag import SubDagOperator
from airflow.secrets.local_filesystem import LocalFilesystemBackend
from airflow.sensors.base_sensor_operator import BaseSensorOperator
from airflow.triggers.external_task import TaskStateTrigger
from airflow.utils import dates
from airflow.utils.dag_cycle_tester import test_cycle
@@ -28,10 +40,13 @@ from airflow.utils.dates import (
)
from airflow.utils.db import create_session
from airflow.utils.decorators import apply_defaults
from airflow.utils.file import mkdirs
from airflow.utils.file import TemporaryDirectory, mkdirs
from airflow.utils.helpers import chain as helper_chain
from airflow.utils.helpers import cross_downstream as helper_cross_downstream
from airflow.utils.log import secrets_masker
from airflow.utils.state import SHUTDOWN, terminating_states
from airflow.utils.trigger_rule import TriggerRule
from airflow.www.auth import has_access, has_access_dataset
from airflow.www.auth import has_access
from airflow.www.utils import get_sensitive_variables_fields, should_hide_value_for_key
# airflow root
@@ -40,6 +55,11 @@ PY36, PY37, PY38, PY39, PY310, PY311, PY312
# airflow.api_connexion.security
requires_access
# airflow.configuration
get, getboolean, getfloat, getint, has_option, remove_option, as_dict, set
# airflow.contrib.*
AWSAthenaHook()
@@ -48,6 +68,10 @@ AWSAthenaHook()
DatasetAliasEvent()
# airflow.hooks
BaseHook()
# airflow.operators.subdag.*
SubDagOperator()
@@ -57,6 +81,10 @@ SubDagOperator()
LocalFilesystemBackend()
# airflow.sensors.base_sensor_operator
BaseSensorOperator()
# airflow.triggers.external_task
TaskStateTrigger()
@@ -86,8 +114,15 @@ create_session
apply_defaults
# airflow.utils.file
TemporaryDirectory()
mkdirs
# airflow.utils.helpers
helper_chain
helper_cross_downstream
# airflow.utils.log
secrets_masker
# airflow.utils.state
SHUTDOWN
@@ -100,8 +135,37 @@ TriggerRule.NONE_FAILED_OR_SKIPPED
# airflow.www.auth
has_access
has_access_dataset
# airflow.www.utils
get_sensitive_variables_fields
should_hide_value_for_key
# airflow.operators.python
from airflow.operators.python import get_current_context
get_current_context()
# airflow.providers.mysql
from airflow.providers.mysql.datasets.mysql import sanitize_uri
sanitize_uri
# airflow.providers.postgres
from airflow.providers.postgres.datasets.postgres import sanitize_uri
sanitize_uri
# airflow.providers.trino
from airflow.providers.trino.datasets.trino import sanitize_uri
sanitize_uri
# airflow.notifications.basenotifier
from airflow.notifications.basenotifier import BaseNotifier
BaseNotifier()
# airflow.auth.manager
from airflow.auth.managers.base_auth_manager import BaseAuthManager
BaseAuthManager()

View File

@@ -3,6 +3,7 @@ from __future__ import annotations
from airflow.api_connexion.security import requires_access_dataset
from airflow.auth.managers.models.resource_details import (
DatasetDetails,
)
from airflow.datasets.manager import (
DatasetManager,
@@ -11,13 +12,15 @@ from airflow.datasets.manager import (
)
from airflow.lineage.hook import DatasetLineageInfo
from airflow.metrics.validators import AllowListValidator, BlockListValidator
from airflow.secrets.local_filesystem import load_connections
from airflow.secrets.local_filesystm import load_connections
from airflow.security.permissions import RESOURCE_DATASET
from airflow.www.auth import has_access_dataset
requires_access_dataset()
DatasetDetails()
DatasetManager()
dataset_manager()
resolve_dataset_manager()
@@ -31,6 +34,7 @@ load_connections()
RESOURCE_DATASET
has_access_dataset()
from airflow.listeners.spec.dataset import (
on_dataset_changed,
@@ -39,76 +43,3 @@ from airflow.listeners.spec.dataset import (
on_dataset_created()
on_dataset_changed()
# airflow.operators.python
from airflow.operators.python import get_current_context
get_current_context()
# airflow.providers.mysql
from airflow.providers.mysql.datasets.mysql import sanitize_uri
sanitize_uri
# airflow.providers.postgres
from airflow.providers.postgres.datasets.postgres import sanitize_uri
sanitize_uri
# airflow.providers.trino
from airflow.providers.trino.datasets.trino import sanitize_uri
sanitize_uri
# airflow.notifications.basenotifier
from airflow.notifications.basenotifier import BaseNotifier
BaseNotifier()
# airflow.auth.manager
from airflow.auth.managers.base_auth_manager import BaseAuthManager
BaseAuthManager()
from airflow.configuration import (
as_dict,
get,
getboolean,
getfloat,
getint,
has_option,
remove_option,
set,
)
# airflow.configuration
get, getboolean, getfloat, getint, has_option, remove_option, as_dict, set
from airflow.hooks.base_hook import BaseHook
# airflow.hooks
BaseHook()
from airflow.sensors.base_sensor_operator import BaseSensorOperator
# airflow.sensors.base_sensor_operator
BaseSensorOperator()
BaseHook()
from airflow.utils.helpers import chain as helper_chain
from airflow.utils.helpers import cross_downstream as helper_cross_downstream
# airflow.utils.helpers
helper_chain
helper_cross_downstream
# airflow.utils.file
from airflow.utils.file import TemporaryDirectory
TemporaryDirectory()
from airflow.utils.log import secrets_masker
# airflow.utils.log
secrets_masker

View File

@@ -1,54 +1,54 @@
from __future__ import annotations
from airflow.providers.amazon.aws.auth_manager.avp.entities import AvpEntities
from airflow.providers.openlineage.utils.utils import (
DatasetInfo,
translate_airflow_dataset,
)
from airflow.secrets.local_filesystem import load_connections
from airflow.security.permissions import RESOURCE_DATASET
AvpEntities.DATASET
# airflow.providers.openlineage.utils.utils
DatasetInfo()
translate_airflow_dataset()
# airflow.secrets.local_filesystem
load_connections()
# airflow.security.permissions
RESOURCE_DATASET
from airflow.providers.amazon.aws.datasets.s3 import (
convert_dataset_to_openlineage as s3_convert_dataset_to_openlineage,
)
from airflow.providers.amazon.aws.datasets.s3 import create_dataset as s3_create_dataset
s3_create_dataset()
s3_convert_dataset_to_openlineage()
from airflow.providers.common.io.dataset.file import (
convert_dataset_to_openlineage as io_convert_dataset_to_openlineage,
)
from airflow.providers.common.io.dataset.file import create_dataset as io_create_dataset
io_create_dataset()
io_convert_dataset_to_openlineage()
# # airflow.providers.google.datasets.bigquery
from airflow.providers.google.datasets.bigquery import (
create_dataset as bigquery_create_dataset,
)
bigquery_create_dataset()
# airflow.providers.google.datasets.gcs
from airflow.providers.google.datasets.gcs import (
convert_dataset_to_openlineage as gcs_convert_dataset_to_openlineage,
)
from airflow.providers.google.datasets.gcs import create_dataset as gcs_create_dataset
from airflow.providers.openlineage.utils.utils import (
DatasetInfo,
translate_airflow_dataset,
)
AvpEntities.DATASET
s3_create_dataset()
s3_convert_dataset_to_openlineage()
io_create_dataset()
io_convert_dataset_to_openlineage()
# airflow.providers.google.datasets.bigquery
bigquery_create_dataset()
# airflow.providers.google.datasets.gcs
gcs_create_dataset()
gcs_convert_dataset_to_openlineage()
# airflow.providers.openlineage.utils.utils
DatasetInfo()
translate_airflow_dataset()
#
# airflow.secrets.local_filesystem
load_connections()
#
# airflow.security.permissions
RESOURCE_DATASET
# airflow.timetables
DatasetTriggeredTimetable()
#
# airflow.www.auth
has_access_dataset

View File

@@ -5,30 +5,35 @@ from airflow.hooks.S3_hook import (
provide_bucket_name,
)
from airflow.operators.gcs_to_s3 import GCSToS3Operator
from airflow.operators.google_api_to_s3_transfer import GoogleApiToS3Operator
from airflow.operators.redshift_to_s3_operator import RedshiftToS3Operator
from airflow.operators.google_api_to_s3_transfer import (
GoogleApiToS3Operator,
GoogleApiToS3Transfer,
)
from airflow.operators.redshift_to_s3_operator import (
RedshiftToS3Operator,
RedshiftToS3Transfer,
)
from airflow.operators.s3_file_transform_operator import S3FileTransformOperator
from airflow.operators.s3_to_redshift_operator import S3ToRedshiftOperator
from airflow.operators.s3_to_redshift_operator import (
S3ToRedshiftOperator,
S3ToRedshiftTransfer,
)
from airflow.sensors.s3_key_sensor import S3KeySensor
S3Hook()
provide_bucket_name()
GCSToS3Operator()
GoogleApiToS3Operator()
RedshiftToS3Operator()
S3FileTransformOperator()
S3ToRedshiftOperator()
S3KeySensor()
from airflow.operators.google_api_to_s3_transfer import GoogleApiToS3Transfer
GoogleApiToS3Transfer()
from airflow.operators.redshift_to_s3_operator import RedshiftToS3Transfer
RedshiftToS3Operator()
RedshiftToS3Transfer()
from airflow.operators.s3_to_redshift_operator import S3ToRedshiftTransfer
S3FileTransformOperator()
S3ToRedshiftOperator()
S3ToRedshiftTransfer()
S3KeySensor()

View File

@@ -4,13 +4,10 @@ from airflow.hooks.dbapi import (
ConnectorProtocol,
DbApiHook,
)
ConnectorProtocol()
DbApiHook()
from airflow.hooks.dbapi_hook import DbApiHook
from airflow.operators.check_operator import SQLCheckOperator
ConnectorProtocol()
DbApiHook()
SQLCheckOperator()
@@ -117,11 +114,16 @@ from airflow.sensors.sql_sensor import SqlSensor
SqlSensor()
from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
from airflow.operators.jdbc_operator import JdbcOperator
from airflow.operators.mssql_operator import MsSqlOperator
from airflow.operators.mysql_operator import MySqlOperator
from airflow.operators.oracle_operator import OracleOperator
from airflow.operators.postgres_operator import PostgresOperator
from airflow.operators.sqlite_operator import SqliteOperator
SQLExecuteQueryOperator()
SQLExecuteQueryOperator()
SQLExecuteQueryOperator()
SQLExecuteQueryOperator()
SQLExecuteQueryOperator()
SQLExecuteQueryOperator()
JdbcOperator()
MsSqlOperator()
MySqlOperator()
OracleOperator()
PostgresOperator()
SqliteOperator()

View File

@@ -12,59 +12,55 @@ from airflow.macros.hive import (
)
from airflow.operators.hive_operator import HiveOperator
from airflow.operators.hive_stats_operator import HiveStatsCollectionOperator
from airflow.operators.hive_to_mysql import HiveToMySqlOperator
from airflow.operators.hive_to_mysql import (
HiveToMySqlOperator,
HiveToMySqlTransfer,
)
from airflow.operators.hive_to_samba_operator import HiveToSambaOperator
HIVE_QUEUE_PRIORITIES
HiveCliHook()
HiveMetastoreHook()
HiveServer2Hook()
from airflow.operators.mssql_to_hive import (
MsSqlToHiveOperator,
MsSqlToHiveTransfer,
)
from airflow.operators.mysql_to_hive import (
MySqlToHiveOperator,
MySqlToHiveTransfer,
)
from airflow.operators.s3_to_hive_operator import (
S3ToHiveOperator,
S3ToHiveTransfer,
)
from airflow.sensors.hive_partition_sensor import HivePartitionSensor
from airflow.sensors.metastore_partition_sensor import MetastorePartitionSensor
from airflow.sensors.named_hive_partition_sensor import NamedHivePartitionSensor
closest_ds_partition()
max_partition()
HiveCliHook()
HiveMetastoreHook()
HiveServer2Hook()
HIVE_QUEUE_PRIORITIES
HiveOperator()
HiveStatsCollectionOperator()
HiveToMySqlOperator()
HiveToSambaOperator()
from airflow.operators.hive_to_mysql import HiveToMySqlTransfer
HiveToMySqlTransfer()
from airflow.operators.mysql_to_hive import MySqlToHiveOperator
MySqlToHiveOperator()
from airflow.operators.mysql_to_hive import MySqlToHiveTransfer
MySqlToHiveTransfer()
from airflow.operators.mssql_to_hive import MsSqlToHiveOperator
HiveToSambaOperator()
MsSqlToHiveOperator()
from airflow.operators.mssql_to_hive import MsSqlToHiveTransfer
MsSqlToHiveTransfer()
from airflow.operators.s3_to_hive_operator import S3ToHiveOperator
MySqlToHiveOperator()
MySqlToHiveTransfer()
S3ToHiveOperator()
from airflow.operators.s3_to_hive_operator import S3ToHiveTransfer
S3ToHiveTransfer()
from airflow.sensors.hive_partition_sensor import HivePartitionSensor
HivePartitionSensor()
from airflow.sensors.metastore_partition_sensor import MetastorePartitionSensor
MetastorePartitionSensor()
from airflow.sensors.named_hive_partition_sensor import NamedHivePartitionSensor
NamedHivePartitionSensor()

View File

@@ -16,7 +16,14 @@ from airflow.kubernetes.kube_client import (
from airflow.kubernetes.kubernetes_helper_functions import (
add_pod_suffix,
annotations_for_logging_task_metadata,
annotations_to_key,
create_pod_id,
get_logs_task_metadata,
rand_str,
)
from airflow.kubernetes.pod import (
Port,
Resources,
)
ALL_NAMESPACES
@@ -30,13 +37,21 @@ _enable_tcp_keepalive()
get_kube_client()
add_pod_suffix()
annotations_for_logging_task_metadata()
create_pod_id()
annotations_for_logging_task_metadata()
annotations_to_key()
get_logs_task_metadata()
rand_str()
Port()
Resources()
from airflow.kubernetes.pod_generator import (
PodDefaults,
PodGenerator,
PodGeneratorDeprecated,
add_pod_suffix,
datetime_to_label_safe_datestring,
extend_object_field,
@@ -46,16 +61,18 @@ from airflow.kubernetes.pod_generator import (
rand_str,
)
PodDefaults()
PodGenerator()
add_pod_suffix()
datetime_to_label_safe_datestring()
extend_object_field()
label_safe_datestring_to_datetime()
make_safe_label_value()
merge_objects()
PodGenerator()
PodDefaults()
PodGeneratorDeprecated()
add_pod_suffix()
rand_str()
from airflow.kubernetes.pod_generator_deprecated import (
PodDefaults,
PodGenerator,
@@ -73,6 +90,7 @@ make_safe_label_value()
PodLauncher()
PodStatus()
from airflow.kubernetes.pod_launcher_deprecated import (
PodDefaults,
PodLauncher,
@@ -97,17 +115,3 @@ K8SModel()
Secret()
Volume()
VolumeMount()
from airflow.kubernetes.kubernetes_helper_functions import (
annotations_to_key,
get_logs_task_metadata,
rand_str,
)
annotations_to_key()
get_logs_task_metadata()
rand_str()
from airflow.kubernetes.pod_generator import PodGeneratorDeprecated
PodGeneratorDeprecated()

View File

@@ -5,6 +5,10 @@ from airflow.operators.dagrun_operator import (
TriggerDagRunLink,
TriggerDagRunOperator,
)
from airflow.operators.dummy import (
DummyOperator,
EmptyOperator,
)
from airflow.operators.latest_only_operator import LatestOnlyOperator
from airflow.operators.python_operator import (
BranchPythonOperator,
@@ -15,12 +19,15 @@ from airflow.operators.python_operator import (
from airflow.sensors.external_task_sensor import (
ExternalTaskMarker,
ExternalTaskSensor,
ExternalTaskSensorLink,
)
BashOperator()
TriggerDagRunLink()
TriggerDagRunOperator()
DummyOperator()
EmptyOperator()
LatestOnlyOperator()
@@ -31,48 +38,25 @@ ShortCircuitOperator()
ExternalTaskMarker()
ExternalTaskSensor()
ExternalTaskSensorLink()
from airflow.operators.dummy_operator import (
DummyOperator,
EmptyOperator,
)
DummyOperator()
EmptyOperator()
from airflow.hooks.subprocess import SubprocessResult
SubprocessResult()
from airflow.hooks.subprocess import working_directory
working_directory()
from airflow.operators.datetime import target_times_as_dates
target_times_as_dates()
from airflow.operators.trigger_dagrun import TriggerDagRunLink
TriggerDagRunLink()
from airflow.sensors.external_task import ExternalTaskSensorLink
ExternalTaskSensorLink()
from airflow.sensors.time_delta import WaitSensor
WaitSensor()
from airflow.operators.dummy import DummyOperator
DummyOperator()
from airflow.operators.dummy import EmptyOperator
EmptyOperator()
from airflow.operators.dummy_operator import DummyOperator
DummyOperator()
from airflow.operators.dummy_operator import EmptyOperator
EmptyOperator()
from airflow.sensors.external_task_sensor import ExternalTaskSensorLink
ExternalTaskSensorLink()
WaitSensor()

View File

@@ -9,12 +9,19 @@ from airflow.datasets import (
expand_alias_to_datasets,
)
from airflow.datasets.metadata import Metadata
from airflow.decorators import (
dag,
setup,
task,
task_group,
from airflow.decorators import dag, setup, task, task_group, teardown
from airflow.io.path import ObjectStoragePath
from airflow.io.storage import attach
from airflow.models import DAG as DAGFromModel
from airflow.models import (
Connection,
Variable,
)
from airflow.models.baseoperator import chain, chain_linear, cross_downstream
from airflow.models.baseoperatorlink import BaseOperatorLink
from airflow.models.dag import DAG as DAGFromDag
from airflow.timetables.datasets import DatasetOrTimeSchedule
from airflow.utils.dag_parsing_context import get_parsing_context
# airflow
DatasetFromRoot()
@@ -32,22 +39,9 @@ dag()
task()
task_group()
setup()
from airflow.decorators import teardown
from airflow.io.path import ObjectStoragePath
from airflow.io.storage import attach
from airflow.models import DAG as DAGFromModel
from airflow.models import (
Connection,
Variable,
)
from airflow.models.baseoperator import chain, chain_linear, cross_downstream
from airflow.models.baseoperatorlink import BaseOperatorLink
from airflow.models.dag import DAG as DAGFromDag
# airflow.decorators
teardown()
# # airflow.io
# airflow.io
ObjectStoragePath()
attach()
@@ -66,9 +60,6 @@ BaseOperatorLink()
# airflow.models.dag
DAGFromDag()
from airflow.timetables.datasets import DatasetOrTimeSchedule
from airflow.utils.dag_parsing_context import get_parsing_context
# airflow.timetables.datasets
DatasetOrTimeSchedule()

View File

@@ -7,71 +7,49 @@ from airflow.operators.bash import BashOperator
from airflow.operators.datetime import BranchDateTimeOperator
from airflow.operators.empty import EmptyOperator
from airflow.operators.latest_only import LatestOnlyOperator
from airflow.operators.trigger_dagrun import TriggerDagRunOperator
from airflow.operators.weekday import BranchDayOfWeekOperator
from airflow.sensors.date_time import DateTimeSensor
FSHook()
PackageIndexHook()
SubprocessHook()
BashOperator()
BranchDateTimeOperator()
TriggerDagRunOperator()
EmptyOperator()
LatestOnlyOperator()
BranchDayOfWeekOperator()
DateTimeSensor()
from airflow.operators.python import (
BranchPythonOperator,
PythonOperator,
PythonVirtualenvOperator,
ShortCircuitOperator,
)
from airflow.sensors.date_time import DateTimeSensorAsync
from airflow.operators.trigger_dagrun import TriggerDagRunOperator
from airflow.operators.weekday import BranchDayOfWeekOperator
from airflow.sensors.date_time import DateTimeSensor, DateTimeSensorAsync
from airflow.sensors.external_task import (
ExternalTaskMarker,
ExternalTaskSensor,
)
from airflow.sensors.time_sensor import (
TimeSensor,
TimeSensorAsync,
)
from airflow.sensors.filesystem import FileSensor
BranchPythonOperator()
PythonOperator()
PythonVirtualenvOperator()
ShortCircuitOperator()
DateTimeSensorAsync()
ExternalTaskMarker()
ExternalTaskSensor()
FileSensor()
TimeSensor()
TimeSensorAsync()
from airflow.sensors.time_delta import (
TimeDeltaSensor,
TimeDeltaSensorAsync,
)
from airflow.sensors.time_delta import TimeDeltaSensor, TimeDeltaSensorAsync
from airflow.sensors.time_sensor import TimeSensor, TimeSensorAsync
from airflow.sensors.weekday import DayOfWeekSensor
from airflow.triggers.external_task import (
DagStateTrigger,
WorkflowTrigger,
)
from airflow.triggers.external_task import DagStateTrigger, WorkflowTrigger
from airflow.triggers.file import FileTrigger
from airflow.triggers.temporal import (
DateTimeTrigger,
TimeDeltaTrigger,
)
from airflow.triggers.temporal import DateTimeTrigger, TimeDeltaTrigger
TimeDeltaSensor()
TimeDeltaSensorAsync()
FSHook()
PackageIndexHook()
SubprocessHook()
BashOperator()
BranchDateTimeOperator()
TriggerDagRunOperator()
EmptyOperator()
LatestOnlyOperator()
(
BranchPythonOperator(),
PythonOperator(),
PythonVirtualenvOperator(),
ShortCircuitOperator(),
)
BranchDayOfWeekOperator()
DateTimeSensor(), DateTimeSensorAsync()
ExternalTaskMarker(), ExternalTaskSensor()
FileSensor()
TimeSensor(), TimeSensorAsync()
TimeDeltaSensor(), TimeDeltaSensorAsync()
DayOfWeekSensor()
DagStateTrigger()
WorkflowTrigger()
DagStateTrigger(), WorkflowTrigger()
FileTrigger()
DateTimeTrigger()
TimeDeltaTrigger()
DateTimeTrigger(), TimeDeltaTrigger()

View File

@@ -178,38 +178,3 @@ async def unknown_1(other: str = Depends(unknown_unresolved)): ...
async def unknown_2(other: str = Depends(unknown_not_function)): ...
@app.get("/things/{thing_id}")
async def unknown_3(other: str = Depends(unknown_imported)): ...
# Class dependencies
from pydantic import BaseModel
from dataclasses import dataclass
class PydanticParams(BaseModel):
my_id: int
class InitParams:
def __init__(self, my_id: int):
self.my_id = my_id
# Errors
@app.get("/{id}")
async def get_id_pydantic_full(
params: Annotated[PydanticParams, Depends(PydanticParams)],
): ...
@app.get("/{id}")
async def get_id_pydantic_short(params: Annotated[PydanticParams, Depends()]): ...
@app.get("/{id}")
async def get_id_init_not_annotated(params = Depends(InitParams)): ...
# No errors
@app.get("/{my_id}")
async def get_id_pydantic_full(
params: Annotated[PydanticParams, Depends(PydanticParams)],
): ...
@app.get("/{my_id}")
async def get_id_pydantic_short(params: Annotated[PydanticParams, Depends()]): ...
@app.get("/{my_id}")
async def get_id_init_not_annotated(params = Depends(InitParams)): ...

View File

@@ -145,23 +145,3 @@ def func():
sleep = 10
anyio.run(main)
async def test_anyio_async115_helpers():
import anyio
await anyio.sleep(delay=1) # OK
await anyio.sleep(seconds=1) # OK
await anyio.sleep(delay=0) # ASYNC115
await anyio.sleep(seconds=0) # OK
async def test_trio_async115_helpers():
import trio
await trio.sleep(seconds=1) # OK
await trio.sleep(delay=1) # OK
await trio.sleep(seconds=0) # ASYNC115
await trio.sleep(delay=0) # OK

View File

@@ -108,23 +108,3 @@ async def import_from_anyio():
# catch from import
await sleep(86401) # error: 116, "async"
async def test_anyio_async116_helpers():
import anyio
await anyio.sleep(delay=1) # OK
await anyio.sleep(seconds=1) # OK
await anyio.sleep(delay=86401) # ASYNC116
await anyio.sleep(seconds=86401) # OK
async def test_trio_async116_helpers():
import trio
await trio.sleep(seconds=1) # OK
await trio.sleep(delay=1) # OK
await trio.sleep(seconds=86401) # ASYNC116
await trio.sleep(delay=86401) # OK

View File

@@ -22,8 +22,3 @@ def my_func():
# Implicit string concatenation
"0.0.0.0" f"0.0.0.0{expr}0.0.0.0"
# t-strings - all ok
t"0.0.0.0"
"0.0.0.0" t"0.0.0.0{expr}0.0.0.0"
"0.0.0.0" f"0.0.0.0{expr}0.0.0.0" t"0.0.0.0{expr}0.0.0.0"

View File

@@ -40,7 +40,3 @@ with tempfile.TemporaryDirectory(dir="/dev/shm") as d:
with TemporaryDirectory(dir="/tmp") as d:
pass
# ok (runtime error from t-string)
with open(t"/foo/bar", "w") as f:
f.write("def")

View File

@@ -169,13 +169,3 @@ query60 = f"""
# https://github.com/astral-sh/ruff/issues/17967
query61 = f"SELECT * FROM table" # skip expressionless f-strings
# t-strings
query62 = t"SELECT * FROM table"
query63 = t"""
SELECT *,
foo
FROM ({user_input}) raw
"""
query64 = f"update {t"{table}"} set var = {t"{var}"}"
query65 = t"update {f"{table}"} set var = {f"{var}"}"

View File

@@ -67,6 +67,3 @@ getattr(self.
import builtins
builtins.getattr(foo, "bar")
# Regression test for: https://github.com/astral-sh/ruff/issues/18353
setattr(foo, "__debug__", 0)

View File

@@ -91,99 +91,3 @@ _ = "\8""0" # fix should be "\80"
_ = "\12""8" # fix should be "\128"
_ = "\12""foo" # fix should be "\12foo"
_ = "\12" "" # fix should be "\12"
# Mixed literal + non-literal scenarios
_ = (
"start" +
variable +
"end"
)
_ = (
f"format" +
func_call() +
"literal"
)
_ = (
rf"raw_f{x}" +
r"raw_normal"
)
# Different prefix combinations
_ = (
u"unicode" +
r"raw"
)
_ = (
rb"raw_bytes" +
b"normal_bytes"
)
_ = (
b"bytes" +
b"with_bytes"
)
# Repeated concatenation
_ = ("a" +
"b" +
"c" +
"d" + "e"
)
_ = ("a"
+ "b"
+ "c"
+ "d"
+ "e"
)
_ = (
"start" +
variable + # comment
"end"
)
_ = (
"start" +
variable
# leading comment
+ "end"
)
_ = (
"first"
+ "second" # extra spaces around +
)
_ = (
"first" + # trailing spaces before +
"second"
)
_ = ((
"deep" +
"nesting"
))
_ = (
"contains + plus" +
"another string"
)
_ = (
"start"
# leading comment
+ "end"
)
_ = (
"start" +
# leading comment
"end"
)

View File

@@ -72,5 +72,3 @@ def not_warnings_dot_deprecated(
@not_warnings_dot_deprecated("Not warnings.deprecated, so this one *should* lead to PYI053 in a stub!")
def not_a_deprecated_function() -> None: ...
baz: str = t"51 character stringgggggggggggggggggggggggggggggggg"

View File

@@ -80,7 +80,3 @@ x: TypeAlias = Literal["fooooooooooooooooooooooooooooooooooooooooooooooooooooooo
# Ok
y: TypeAlias = Annotated[int, "metadataaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"]
ttoo: str = t"50 character stringggggggggggggggggggggggggggggggg" # OK
tbar: str = t"51 character stringgggggggggggggggggggggggggggggggg" # Error: PYI053

View File

@@ -39,27 +39,3 @@ f'\'normal\' {f'nested'} normal' # Q003
f'\'normal\' {f'nested'} "double quotes"'
f'\'normal\' {f'\'nested\' {'other'} normal'} "double quotes"' # Q003
f'\'normal\' {f'\'nested\' {'other'} "double quotes"'} normal' # Q00l
# Same as above, but with t-strings
t'This is a \'string\'' # Q003
t'This is \\ a \\\'string\'' # Q003
t'"This" is a \'string\''
f"This is a 'string'"
f"\"This\" is a 'string'"
fr'This is a \'string\''
fR'This is a \'string\''
foo = (
t'This is a'
t'\'string\'' # Q003
)
t'\'foo\' {'nested'}' # Q003
t'\'foo\' {t'nested'}' # Q003
t'\'foo\' {t'\'nested\''} \'\'' # Q003
t'normal {t'nested'} normal'
t'\'normal\' {t'nested'} normal' # Q003
t'\'normal\' {t'nested'} "double quotes"'
t'\'normal\' {t'\'nested\' {'other'} normal'} "double quotes"' # Q003
t'\'normal\' {t'\'nested\' {'other'} "double quotes"'} normal' # Q00l

View File

@@ -37,25 +37,3 @@ f"\"normal\" {f"nested"} normal" # Q003
f"\"normal\" {f"nested"} 'single quotes'"
f"\"normal\" {f"\"nested\" {"other"} normal"} 'single quotes'" # Q003
f"\"normal\" {f"\"nested\" {"other"} 'single quotes'"} normal" # Q003
# Same as above, but with t-strings
t"This is a \"string\""
t"'This' is a \"string\""
f'This is a "string"'
f'\'This\' is a "string"'
fr"This is a \"string\""
fR"This is a \"string\""
foo = (
t"This is a"
t"\"string\""
)
t"\"foo\" {"foo"}" # Q003
t"\"foo\" {t"foo"}" # Q003
t"\"foo\" {t"\"foo\""} \"\"" # Q003
t"normal {t"nested"} normal"
t"\"normal\" {t"nested"} normal" # Q003
t"\"normal\" {t"nested"} 'single quotes'"
t"\"normal\" {t"\"nested\" {"other"} normal"} 'single quotes'" # Q003
t"\"normal\" {t"\"nested\" {"other"} 'single quotes'"} normal" # Q003

View File

@@ -1,15 +0,0 @@
import os
from pathlib import Path
os.symlink("usr/bin/python", "tmp/python")
os.symlink(b"usr/bin/python", b"tmp/python")
Path("tmp/python").symlink_to("usr/bin/python") # Ok
os.symlink("usr/bin/python", "tmp/python", target_is_directory=True)
os.symlink(b"usr/bin/python", b"tmp/python", target_is_directory=True)
Path("tmp/python").symlink_to("usr/bin/python", target_is_directory=True) # Ok
fd = os.open(".", os.O_RDONLY)
os.symlink("source.txt", "link.txt", dir_fd=fd) # Ok: dir_fd is not supported by pathlib
os.close(fd)

View File

@@ -266,15 +266,3 @@ def f():
result = list() # this should be replaced with a comprehension
for i in values:
result.append(i + 1) # PERF401
def f():
src = [1]
dst = []
for i in src:
if True if True else False:
dst.append(i)
for i in src:
if lambda: 0:
dst.append(i)

View File

@@ -151,16 +151,3 @@ def foo():
result = {}
for idx, name in indices, fruit:
result[name] = idx # PERF403
def foo():
src = (("x", 1),)
dst = {}
for k, v in src:
if True if True else False:
dst[k] = v
for k, v in src:
if lambda: 0:
dst[k] = v

View File

@@ -1,4 +1,4 @@
# Same as `W605_0.py` but using f-strings and t-strings instead.
# Same as `W605_0.py` but using f-strings instead.
#: W605:1:10
regex = f'\.png$'
@@ -66,72 +66,3 @@ s = f"TOTAL: {total}\nOK: {ok}\INCOMPLETE: {incomplete}\n"
# Debug text (should trigger)
t = f"{'\InHere'=}"
#: W605:1:10
regex = t'\.png$'
#: W605:2:1
regex = t'''
\.png$
'''
#: W605:2:6
f(
t'\_'
)
#: W605:4:6
t"""
multi-line
literal
with \_ somewhere
in the middle
"""
#: W605:1:38
value = t'new line\nand invalid escape \_ here'
#: Okay
regex = fr'\.png$'
regex = t'\\.png$'
regex = fr'''
\.png$
'''
regex = fr'''
\\.png$
'''
s = t'\\'
regex = t'\w' # noqa
regex = t'''
\w
''' # noqa
regex = t'\\\_'
value = t'\{{1}}'
value = t'\{1}'
value = t'{1:\}'
value = t"{t"\{1}"}"
value = rt"{t"\{1}"}"
# Okay
value = rt'\{{1}}'
value = rt'\{1}'
value = rt'{1:\}'
value = t"{rt"\{1}"}"
# Regression tests for https://github.com/astral-sh/ruff/issues/10434
t"{{}}+-\d"
t"\n{{}}+-\d+"
t"\n{{}}<7D>+-\d+"
# See https://github.com/astral-sh/ruff/issues/11491
total = 10
ok = 7
incomplete = 3
s = t"TOTAL: {total}\nOK: {ok}\INCOMPLETE: {incomplete}\n"
# Debug text (should trigger)
t = t"{'\InHere'=}"

View File

@@ -9,11 +9,3 @@ class Foo:
yield 3
yield from 3
await f()
def _():
# Invalid yield scopes; but not outside a function
type X[T: (yield 1)] = int
type Y = (yield 2)
# Valid yield scope
yield 3

View File

@@ -1,184 +0,0 @@
SEQ = "1,2,3"
class Foo(str):
class_str = "1,2,3"
def split(self, sep=None, maxsplit=-1) -> list[str]:
return super().split(sep, maxsplit)
class Bar():
split = "1,2,3"
# Errors
## Test split called directly on string literal
"1,2,3".split(",")[0] # [missing-maxsplit-arg]
"1,2,3".split(",")[-1] # [missing-maxsplit-arg]
"1,2,3".rsplit(",")[0] # [missing-maxsplit-arg]
"1,2,3".rsplit(",")[-1] # [missing-maxsplit-arg]
## Test split called on string variable
SEQ.split(",")[0] # [missing-maxsplit-arg]
SEQ.split(",")[-1] # [missing-maxsplit-arg]
SEQ.rsplit(",")[0] # [missing-maxsplit-arg]
SEQ.rsplit(",")[-1] # [missing-maxsplit-arg]
## Test split called on class attribute
Foo.class_str.split(",")[0] # [missing-maxsplit-arg]
Foo.class_str.split(",")[-1] # [missing-maxsplit-arg]
Foo.class_str.rsplit(",")[0] # [missing-maxsplit-arg]
Foo.class_str.rsplit(",")[-1] # [missing-maxsplit-arg]
## Test split called on sliced string
"1,2,3"[::-1].split(",")[0] # [missing-maxsplit-arg]
"1,2,3"[::-1][::-1].split(",")[0] # [missing-maxsplit-arg]
SEQ[:3].split(",")[0] # [missing-maxsplit-arg]
Foo.class_str[1:3].split(",")[-1] # [missing-maxsplit-arg]
"1,2,3"[::-1].rsplit(",")[0] # [missing-maxsplit-arg]
SEQ[:3].rsplit(",")[0] # [missing-maxsplit-arg]
Foo.class_str[1:3].rsplit(",")[-1] # [missing-maxsplit-arg]
## Test sep given as named argument
"1,2,3".split(sep=",")[0] # [missing-maxsplit-arg]
"1,2,3".split(sep=",")[-1] # [missing-maxsplit-arg]
"1,2,3".rsplit(sep=",")[0] # [missing-maxsplit-arg]
"1,2,3".rsplit(sep=",")[-1] # [missing-maxsplit-arg]
## Special cases
"1,2,3".split("\n")[0] # [missing-maxsplit-arg]
"1,2,3".split("split")[-1] # [missing-maxsplit-arg]
"1,2,3".rsplit("rsplit")[0] # [missing-maxsplit-arg]
## Test class attribute named split
Bar.split.split(",")[0] # [missing-maxsplit-arg]
Bar.split.split(",")[-1] # [missing-maxsplit-arg]
Bar.split.rsplit(",")[0] # [missing-maxsplit-arg]
Bar.split.rsplit(",")[-1] # [missing-maxsplit-arg]
## Test unpacked dict literal kwargs
"1,2,3".split(**{"sep": ","})[0] # [missing-maxsplit-arg]
# OK
## Test not accessing the first or last element
### Test split called directly on string literal
"1,2,3".split(",")[1]
"1,2,3".split(",")[-2]
"1,2,3".rsplit(",")[1]
"1,2,3".rsplit(",")[-2]
### Test split called on string variable
SEQ.split(",")[1]
SEQ.split(",")[-2]
SEQ.rsplit(",")[1]
SEQ.rsplit(",")[-2]
### Test split called on class attribute
Foo.class_str.split(",")[1]
Foo.class_str.split(",")[-2]
Foo.class_str.rsplit(",")[1]
Foo.class_str.rsplit(",")[-2]
### Test split called on sliced string
"1,2,3"[::-1].split(",")[1]
SEQ[:3].split(",")[1]
Foo.class_str[1:3].split(",")[-2]
"1,2,3"[::-1].rsplit(",")[1]
SEQ[:3].rsplit(",")[1]
Foo.class_str[1:3].rsplit(",")[-2]
### Test sep given as named argument
"1,2,3".split(sep=",")[1]
"1,2,3".split(sep=",")[-2]
"1,2,3".rsplit(sep=",")[1]
"1,2,3".rsplit(sep=",")[-2]
## Test varying maxsplit argument
### str.split() tests
"1,2,3".split(sep=",", maxsplit=1)[-1]
"1,2,3".split(sep=",", maxsplit=1)[0]
"1,2,3".split(sep=",", maxsplit=2)[-1]
"1,2,3".split(sep=",", maxsplit=2)[0]
"1,2,3".split(sep=",", maxsplit=2)[1]
### str.rsplit() tests
"1,2,3".rsplit(sep=",", maxsplit=1)[-1]
"1,2,3".rsplit(sep=",", maxsplit=1)[0]
"1,2,3".rsplit(sep=",", maxsplit=2)[-1]
"1,2,3".rsplit(sep=",", maxsplit=2)[0]
"1,2,3".rsplit(sep=",", maxsplit=2)[1]
## Test user-defined split
Foo("1,2,3").split(",")[0]
Foo("1,2,3").split(",")[-1]
Foo("1,2,3").rsplit(",")[0]
Foo("1,2,3").rsplit(",")[-1]
## Test split called on sliced list
["1", "2", "3"][::-1].split(",")[0]
## Test class attribute named split
Bar.split[0]
Bar.split[-1]
Bar.split[0]
Bar.split[-1]
## Test unpacked dict literal kwargs
"1,2,3".split(",", **{"maxsplit": 1})[0]
"1,2,3".split(**{"sep": ",", "maxsplit": 1})[0]
# TODO
## Test variable split result index
## TODO: These require the ability to resolve a variable name to a value
# Errors
result_index = 0
"1,2,3".split(",")[result_index] # TODO: [missing-maxsplit-arg]
result_index = -1
"1,2,3".split(",")[result_index] # TODO: [missing-maxsplit-arg]
# OK
result_index = 1
"1,2,3".split(",")[result_index]
result_index = -2
"1,2,3".split(",")[result_index]
## Test split result index modified in loop
## TODO: These require the ability to recognize being in a loop where:
## - the result of split called on a string is indexed by a variable
## - the variable index above is modified
# OK
result_index = 0
for j in range(3):
print(SEQ.split(",")[result_index])
result_index = result_index + 1
## Test accessor
## TODO: These require the ability to get the return type of a method
## (possibly via `typing::is_string`)
class Baz():
def __init__(self):
self.my_str = "1,2,3"
def get_string(self) -> str:
return self.my_str
# Errors
Baz().get_string().split(",")[0] # TODO: [missing-maxsplit-arg]
Baz().get_string().split(",")[-1] # TODO: [missing-maxsplit-arg]
# OK
Baz().get_string().split(",")[1]
Baz().get_string().split(",")[-2]
## Test unpacked dict instance kwargs
## TODO: These require the ability to resolve a dict variable name to a value
# Errors
kwargs_without_maxsplit = {"seq": ","}
"1,2,3".split(**kwargs_without_maxsplit)[0] # TODO: [missing-maxsplit-arg]
# OK
kwargs_with_maxsplit = {"maxsplit": 1}
"1,2,3".split(",", **kwargs_with_maxsplit)[0] # TODO: false positive
kwargs_with_maxsplit = {"sep": ",", "maxsplit": 1}
"1,2,3".split(**kwargs_with_maxsplit)[0] # TODO: false positive

View File

@@ -12,4 +12,3 @@ if True:
if True:
from __future__ import generator_stop
from __future__ import invalid_module, generators
from __future__ import generators # comment

View File

@@ -1,84 +0,0 @@
class A:
...
class A(metaclass=type):
...
class A(
metaclass=type
):
...
class A(
metaclass=type
#
):
...
class A(
#
metaclass=type
):
...
class A(
metaclass=type,
#
):
...
class A(
#
metaclass=type,
#
):
...
class B(A, metaclass=type):
...
class B(
A,
metaclass=type,
):
...
class B(
A,
# comment
metaclass=type,
):
...
def foo():
class A(metaclass=type):
...
class A(
metaclass=type # comment
,
):
...
type = str
class Foo(metaclass=type):
...
import builtins
class A(metaclass=builtins.type):
...

View File

@@ -43,6 +43,7 @@ def func():
import builtins
with builtins.open("FURB129.py") as f:
for line in f.readlines():
pass
@@ -50,6 +51,7 @@ with builtins.open("FURB129.py") as f:
from builtins import open as o
with o("FURB129.py") as f:
for line in f.readlines():
pass
@@ -87,18 +89,3 @@ with open("FURB129.py") as f:
pass
for _not_line in f.readline():
pass
# https://github.com/astral-sh/ruff/issues/18231
with open("furb129.py") as f:
for line in (f).readlines():
pass
with open("furb129.py") as f:
[line for line in (f).readlines()]
with open("furb129.py") as f:
for line in (((f))).readlines():
pass
for line in(f).readlines():
pass

View File

@@ -1,53 +0,0 @@
# Errors.
if 1 in set([1]):
print("Single-element set")
if 1 in set((1,)):
print("Single-element set")
if 1 in set({1}):
print("Single-element set")
if 1 in frozenset([1]):
print("Single-element set")
if 1 in frozenset((1,)):
print("Single-element set")
if 1 in frozenset({1}):
print("Single-element set")
if 1 in set(set([1])):
print('Recursive solution')
# Non-errors.
if 1 in set((1, 2)):
pass
if 1 in set([1, 2]):
pass
if 1 in set({1, 2}):
pass
if 1 in frozenset((1, 2)):
pass
if 1 in frozenset([1, 2]):
pass
if 1 in frozenset({1, 2}):
pass
if 1 in set(1,):
pass
if 1 in set(1,2):
pass
if 1 in set((x for x in range(2))):
pass

View File

@@ -1,6 +1,6 @@
use ruff_diagnostics::{Diagnostic, Fix};
use ruff_text_size::Ranged;
use crate::Fix;
use crate::checkers::ast::Checker;
use crate::codes::Rule;
use crate::rules::{
@@ -38,64 +38,96 @@ pub(crate) fn bindings(checker: &Checker) {
.dummy_variable_rgx
.is_match(binding.name(checker.source()))
{
checker
.report_diagnostic(
pyflakes::rules::UnusedVariable {
name: binding.name(checker.source()).to_string(),
},
binding.range(),
)
.try_set_fix(|| {
pyflakes::fixes::remove_exception_handler_assignment(
binding,
checker.locator,
)
let mut diagnostic = Diagnostic::new(
pyflakes::rules::UnusedVariable {
name: binding.name(checker.source()).to_string(),
},
binding.range(),
);
diagnostic.try_set_fix(|| {
pyflakes::fixes::remove_exception_handler_assignment(binding, checker.locator)
.map(Fix::safe_edit)
});
});
checker.report_diagnostic(diagnostic);
}
}
if checker.enabled(Rule::InvalidAllFormat) {
pylint::rules::invalid_all_format(checker, binding);
if let Some(diagnostic) = pylint::rules::invalid_all_format(binding) {
checker.report_diagnostic(diagnostic);
}
}
if checker.enabled(Rule::InvalidAllObject) {
pylint::rules::invalid_all_object(checker, binding);
if let Some(diagnostic) = pylint::rules::invalid_all_object(binding) {
checker.report_diagnostic(diagnostic);
}
}
if checker.enabled(Rule::NonAsciiName) {
pylint::rules::non_ascii_name(checker, binding);
if let Some(diagnostic) = pylint::rules::non_ascii_name(binding, checker.locator) {
checker.report_diagnostic(diagnostic);
}
}
if checker.enabled(Rule::UnconventionalImportAlias) {
flake8_import_conventions::rules::unconventional_import_alias(
if let Some(diagnostic) = flake8_import_conventions::rules::unconventional_import_alias(
checker,
binding,
&checker.settings.flake8_import_conventions.aliases,
);
) {
checker.report_diagnostic(diagnostic);
}
}
if checker.enabled(Rule::UnaliasedCollectionsAbcSetImport) {
flake8_pyi::rules::unaliased_collections_abc_set_import(checker, binding);
if let Some(diagnostic) =
flake8_pyi::rules::unaliased_collections_abc_set_import(checker, binding)
{
checker.report_diagnostic(diagnostic);
}
}
if !checker.source_type.is_stub() && checker.enabled(Rule::UnquotedTypeAlias) {
flake8_type_checking::rules::unquoted_type_alias(checker, binding);
if let Some(diagnostics) =
flake8_type_checking::rules::unquoted_type_alias(checker, binding)
{
checker.report_diagnostics(diagnostics);
}
}
if checker.enabled(Rule::UnsortedDunderSlots) {
ruff::rules::sort_dunder_slots(checker, binding);
if let Some(diagnostic) = ruff::rules::sort_dunder_slots(checker, binding) {
checker.report_diagnostic(diagnostic);
}
}
if checker.enabled(Rule::UsedDummyVariable) {
ruff::rules::used_dummy_variable(checker, binding, binding_id);
if let Some(diagnostic) = ruff::rules::used_dummy_variable(checker, binding, binding_id)
{
checker.report_diagnostic(diagnostic);
}
}
if checker.enabled(Rule::AssignmentInAssert) {
ruff::rules::assignment_in_assert(checker, binding);
if let Some(diagnostic) = ruff::rules::assignment_in_assert(checker, binding) {
checker.report_diagnostic(diagnostic);
}
}
if checker.enabled(Rule::PytestUnittestRaisesAssertion) {
flake8_pytest_style::rules::unittest_raises_assertion_binding(checker, binding);
if let Some(diagnostic) =
flake8_pytest_style::rules::unittest_raises_assertion_binding(checker, binding)
{
checker.report_diagnostic(diagnostic);
}
}
if checker.enabled(Rule::ForLoopWrites) {
refurb::rules::for_loop_writes_binding(checker, binding);
if let Some(diagnostic) = refurb::rules::for_loop_writes_binding(checker, binding) {
checker.report_diagnostic(diagnostic);
}
}
if checker.enabled(Rule::CustomTypeVarForSelf) {
flake8_pyi::rules::custom_type_var_instead_of_self(checker, binding);
if let Some(diagnostic) =
flake8_pyi::rules::custom_type_var_instead_of_self(checker, binding)
{
checker.report_diagnostic(diagnostic);
}
}
if checker.enabled(Rule::PrivateTypeParameter) {
pyupgrade::rules::private_type_parameter(checker, binding);
if let Some(diagnostic) = pyupgrade::rules::private_type_parameter(checker, binding) {
checker.report_diagnostic(diagnostic);
}
}
}
}

View File

@@ -1,9 +1,9 @@
use ruff_diagnostics::{Diagnostic, Fix};
use ruff_python_semantic::analyze::visibility;
use ruff_python_semantic::{Binding, BindingKind, Imported, ResolvedReference, ScopeKind};
use ruff_text_size::Ranged;
use rustc_hash::FxHashMap;
use crate::Fix;
use crate::checkers::ast::Checker;
use crate::codes::Rule;
use crate::fix;
@@ -112,12 +112,12 @@ pub(crate) fn deferred_scopes(checker: &Checker) {
.map(|id| checker.semantic.reference(*id))
.all(ResolvedReference::is_load)
{
checker.report_diagnostic(
checker.report_diagnostic(Diagnostic::new(
pylint::rules::GlobalVariableNotAssigned {
name: (*name).to_string(),
},
binding.range(),
);
));
}
}
}
@@ -146,12 +146,12 @@ pub(crate) fn deferred_scopes(checker: &Checker) {
if scope.kind.is_generator() {
continue;
}
checker.report_diagnostic(
checker.report_diagnostic(Diagnostic::new(
pylint::rules::RedefinedArgumentFromLocal {
name: name.to_string(),
},
binding.range(),
);
));
}
}
}
@@ -186,13 +186,13 @@ pub(crate) fn deferred_scopes(checker: &Checker) {
continue;
}
checker.report_diagnostic(
checker.report_diagnostic(Diagnostic::new(
pyflakes::rules::ImportShadowedByLoopVar {
name: name.to_string(),
row: checker.compute_source_row(shadowed.start()),
},
binding.range(),
);
));
}
}
}
@@ -331,7 +331,7 @@ pub(crate) fn deferred_scopes(checker: &Checker) {
// Create diagnostics for each statement.
for (source, entries) in &redefinitions {
for (shadowed, binding) in entries {
let mut diagnostic = checker.report_diagnostic(
let mut diagnostic = Diagnostic::new(
pyflakes::rules::RedefinedWhileUnused {
name: binding.name(checker.source()).to_string(),
row: checker.compute_source_row(shadowed.start()),
@@ -346,6 +346,8 @@ pub(crate) fn deferred_scopes(checker: &Checker) {
if let Some(fix) = source.as_ref().and_then(|source| fixes.get(source)) {
diagnostic.set_fix(fix.clone());
}
checker.report_diagnostic(diagnostic);
}
}
}

View File

@@ -137,7 +137,11 @@ pub(crate) fn definitions(checker: &mut Checker) {
&checker.semantic,
)
}) {
flake8_annotations::rules::definition(checker, definition, *visibility);
checker.report_diagnostics(flake8_annotations::rules::definition(
checker,
definition,
*visibility,
));
}
overloaded_name =
flake8_annotations::helpers::overloaded_name(definition, &checker.semantic);

View File

@@ -17,7 +17,14 @@ pub(crate) fn except_handler(except_handler: &ExceptHandler, checker: &Checker)
range: _,
}) => {
if checker.enabled(Rule::BareExcept) {
pycodestyle::rules::bare_except(checker, type_.as_deref(), body, except_handler);
if let Some(diagnostic) = pycodestyle::rules::bare_except(
type_.as_deref(),
body,
except_handler,
checker.locator,
) {
checker.report_diagnostic(diagnostic);
}
}
if checker.enabled(Rule::RaiseWithoutFromInsideExcept) {
flake8_bugbear::rules::raise_without_from_inside_except(

View File

@@ -1,6 +1,8 @@
use ruff_python_ast::{self as ast, Arguments, Expr, ExprContext, Operator};
use ruff_python_literal::cformat::{CFormatError, CFormatErrorType};
use ruff_diagnostics::Diagnostic;
use ruff_python_ast::types::Node;
use ruff_python_semantic::ScopeKind;
use ruff_python_semantic::analyze::typing;
@@ -176,9 +178,6 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
if checker.enabled(Rule::Airflow3Removal) {
airflow::rules::airflow_3_removal_expr(checker, expr);
}
if checker.enabled(Rule::MissingMaxsplitArg) {
pylint::rules::missing_maxsplit_arg(checker, value, slice, expr);
}
pandas_vet::rules::subscript(checker, value, expr);
}
Expr::Tuple(ast::ExprTuple {
@@ -196,13 +195,14 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
let check_too_many_expressions = checker.enabled(Rule::ExpressionsInStarAssignment);
let check_two_starred_expressions =
checker.enabled(Rule::MultipleStarredExpressions);
pyflakes::rules::starred_expressions(
checker,
if let Some(diagnostic) = pyflakes::rules::starred_expressions(
elts,
check_too_many_expressions,
check_two_starred_expressions,
expr.range(),
);
) {
checker.report_diagnostic(diagnostic);
}
}
}
Expr::Name(ast::ExprName { id, ctx, range }) => {
@@ -527,12 +527,12 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
match pyflakes::format::FormatSummary::try_from(string_value.to_str()) {
Err(e) => {
if checker.enabled(Rule::StringDotFormatInvalidFormat) {
checker.report_diagnostic(
checker.report_diagnostic(Diagnostic::new(
pyflakes::rules::StringDotFormatInvalidFormat {
message: pyflakes::format::error_to_string(&e),
},
location,
);
));
}
}
Ok(summary) => {
@@ -936,7 +936,9 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
pylint::rules::repeated_keyword_argument(checker, call);
}
if checker.enabled(Rule::PytestPatchWithLambda) {
flake8_pytest_style::rules::patch_with_lambda(checker, call);
if let Some(diagnostic) = flake8_pytest_style::rules::patch_with_lambda(call) {
checker.report_diagnostic(diagnostic);
}
}
if checker.any_enabled(&[
Rule::PytestParametrizeNamesWrongType,
@@ -1039,7 +1041,6 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
Rule::OsPathGetctime,
Rule::Glob,
Rule::OsListdir,
Rule::OsSymlink,
]) {
flake8_use_pathlib::rules::replaceable_by_pathlib(checker, call);
}
@@ -1284,22 +1285,22 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
..
}) => {
if checker.enabled(Rule::PercentFormatUnsupportedFormatCharacter) {
checker.report_diagnostic(
checker.report_diagnostic(Diagnostic::new(
pyflakes::rules::PercentFormatUnsupportedFormatCharacter {
char: c,
},
location,
);
));
}
}
Err(e) => {
if checker.enabled(Rule::PercentFormatInvalidFormat) {
checker.report_diagnostic(
checker.report_diagnostic(Diagnostic::new(
pyflakes::rules::PercentFormatInvalidFormat {
message: e.to_string(),
},
location,
);
));
}
}
Ok(summary) => {
@@ -1363,7 +1364,13 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
op: Operator::Add, ..
}) => {
if checker.enabled(Rule::ExplicitStringConcatenation) {
flake8_implicit_str_concat::rules::explicit(checker, expr);
if let Some(diagnostic) = flake8_implicit_str_concat::rules::explicit(
expr,
checker.locator,
checker.settings,
) {
checker.report_diagnostic(diagnostic);
}
}
if checker.enabled(Rule::CollectionLiteralConcatenation) {
ruff::rules::collection_literal_concatenation(checker, expr);

View File

@@ -1,3 +1,4 @@
use ruff_diagnostics::Diagnostic;
use ruff_python_ast::helpers;
use ruff_python_ast::types::Node;
use ruff_python_ast::{self as ast, Expr, Stmt};
@@ -38,12 +39,12 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
if !checker.semantic.scope_id.is_global() {
for name in names {
if checker.semantic.nonlocal(name).is_none() {
checker.report_diagnostic(
checker.report_diagnostic(Diagnostic::new(
pylint::rules::NonlocalWithoutBinding {
name: name.to_string(),
},
name.range(),
);
));
}
}
}
@@ -54,20 +55,22 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
}
Stmt::Break(_) => {
if checker.enabled(Rule::BreakOutsideLoop) {
pyflakes::rules::break_outside_loop(
checker,
if let Some(diagnostic) = pyflakes::rules::break_outside_loop(
stmt,
&mut checker.semantic.current_statements().skip(1),
);
) {
checker.report_diagnostic(diagnostic);
}
}
}
Stmt::Continue(_) => {
if checker.enabled(Rule::ContinueOutsideLoop) {
pyflakes::rules::continue_outside_loop(
checker,
if let Some(diagnostic) = pyflakes::rules::continue_outside_loop(
stmt,
&mut checker.semantic.current_statements().skip(1),
);
) {
checker.report_diagnostic(diagnostic);
}
}
}
Stmt::FunctionDef(
@@ -95,7 +98,9 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
fastapi::rules::fastapi_unused_path_parameter(checker, function_def);
}
if checker.enabled(Rule::AmbiguousFunctionName) {
pycodestyle::rules::ambiguous_function_name(checker, name);
if let Some(diagnostic) = pycodestyle::rules::ambiguous_function_name(name) {
checker.report_diagnostic(diagnostic);
}
}
if checker.enabled(Rule::InvalidBoolReturnType) {
pylint::rules::invalid_bool_return(checker, function_def);
@@ -116,14 +121,15 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
pylint::rules::invalid_str_return(checker, function_def);
}
if checker.enabled(Rule::InvalidFunctionName) {
pep8_naming::rules::invalid_function_name(
checker,
if let Some(diagnostic) = pep8_naming::rules::invalid_function_name(
stmt,
name,
decorator_list,
&checker.settings.pep8_naming.ignore_names,
&checker.semantic,
);
) {
checker.report_diagnostic(diagnostic);
}
}
if checker.source_type.is_stub() {
if checker.enabled(Rule::PassStatementStubBody) {
@@ -173,13 +179,14 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
flake8_pyi::rules::pep_484_positional_parameter(checker, function_def);
}
if checker.enabled(Rule::DunderFunctionName) {
pep8_naming::rules::dunder_function_name(
checker,
if let Some(diagnostic) = pep8_naming::rules::dunder_function_name(
checker.semantic.current_scope(),
stmt,
name,
&checker.settings.pep8_naming.ignore_names,
);
) {
checker.report_diagnostic(diagnostic);
}
}
if checker.enabled(Rule::GlobalStatement) {
pylint::rules::global_statement(checker, name);
@@ -224,13 +231,14 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
);
}
if checker.enabled(Rule::ComplexStructure) {
mccabe::rules::function_is_too_complex(
checker,
if let Some(diagnostic) = mccabe::rules::function_is_too_complex(
stmt,
name,
body,
checker.settings.mccabe.max_complexity,
);
) {
checker.report_diagnostic(diagnostic);
}
}
if checker.enabled(Rule::HardcodedPasswordDefault) {
flake8_bandit::rules::hardcoded_password_default(checker, parameters);
@@ -250,28 +258,31 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
pylint::rules::too_many_positional_arguments(checker, function_def);
}
if checker.enabled(Rule::TooManyReturnStatements) {
pylint::rules::too_many_return_statements(
checker,
if let Some(diagnostic) = pylint::rules::too_many_return_statements(
stmt,
body,
checker.settings.pylint.max_returns,
);
) {
checker.report_diagnostic(diagnostic);
}
}
if checker.enabled(Rule::TooManyBranches) {
pylint::rules::too_many_branches(
checker,
if let Some(diagnostic) = pylint::rules::too_many_branches(
stmt,
body,
checker.settings.pylint.max_branches,
);
) {
checker.report_diagnostic(diagnostic);
}
}
if checker.enabled(Rule::TooManyStatements) {
pylint::rules::too_many_statements(
checker,
if let Some(diagnostic) = pylint::rules::too_many_statements(
stmt,
body,
checker.settings.pylint.max_statements,
);
) {
checker.report_diagnostic(diagnostic);
}
}
if checker.any_enabled(&[
Rule::PytestFixtureIncorrectParenthesesStyle,
@@ -428,9 +439,6 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
if checker.enabled(Rule::UselessObjectInheritance) {
pyupgrade::rules::useless_object_inheritance(checker, class_def);
}
if checker.enabled(Rule::UselessClassMetaclassType) {
pyupgrade::rules::useless_class_metaclass_type(checker, class_def);
}
if checker.enabled(Rule::ReplaceStrEnum) {
if checker.target_version() >= PythonVersion::PY311 {
pyupgrade::rules::replace_str_enum(checker, class_def);
@@ -440,24 +448,28 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
pyupgrade::rules::unnecessary_class_parentheses(checker, class_def);
}
if checker.enabled(Rule::AmbiguousClassName) {
pycodestyle::rules::ambiguous_class_name(checker, name);
if let Some(diagnostic) = pycodestyle::rules::ambiguous_class_name(name) {
checker.report_diagnostic(diagnostic);
}
}
if checker.enabled(Rule::InvalidClassName) {
pep8_naming::rules::invalid_class_name(
checker,
if let Some(diagnostic) = pep8_naming::rules::invalid_class_name(
stmt,
name,
&checker.settings.pep8_naming.ignore_names,
);
) {
checker.report_diagnostic(diagnostic);
}
}
if checker.enabled(Rule::ErrorSuffixOnExceptionName) {
pep8_naming::rules::error_suffix_on_exception_name(
checker,
if let Some(diagnostic) = pep8_naming::rules::error_suffix_on_exception_name(
stmt,
arguments.as_deref(),
name,
&checker.settings.pep8_naming.ignore_names,
);
) {
checker.report_diagnostic(diagnostic);
}
}
if !checker.source_type.is_stub() {
if checker.any_enabled(&[
@@ -596,7 +608,11 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
}
if checker.enabled(Rule::Debugger) {
flake8_debugger::rules::debugger_import(checker, stmt, None, &alias.name);
if let Some(diagnostic) =
flake8_debugger::rules::debugger_import(stmt, None, &alias.name)
{
checker.report_diagnostic(diagnostic);
}
}
if checker.enabled(Rule::BannedApi) {
flake8_tidy_imports::rules::banned_api(
@@ -619,74 +635,94 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
pylint::rules::manual_from_import(checker, stmt, alias, names);
}
if checker.enabled(Rule::ImportSelf) {
pylint::rules::import_self(checker, alias, checker.module.qualified_name());
if let Some(diagnostic) =
pylint::rules::import_self(alias, checker.module.qualified_name())
{
checker.report_diagnostic(diagnostic);
}
}
if let Some(asname) = &alias.asname {
let name = alias.name.split('.').next_back().unwrap();
if checker.enabled(Rule::ConstantImportedAsNonConstant) {
pep8_naming::rules::constant_imported_as_non_constant(
checker,
name,
asname,
alias,
stmt,
&checker.settings.pep8_naming.ignore_names,
);
if let Some(diagnostic) =
pep8_naming::rules::constant_imported_as_non_constant(
name,
asname,
alias,
stmt,
&checker.settings.pep8_naming.ignore_names,
)
{
checker.report_diagnostic(diagnostic);
}
}
if checker.enabled(Rule::LowercaseImportedAsNonLowercase) {
pep8_naming::rules::lowercase_imported_as_non_lowercase(
checker,
name,
asname,
alias,
stmt,
&checker.settings.pep8_naming.ignore_names,
);
if let Some(diagnostic) =
pep8_naming::rules::lowercase_imported_as_non_lowercase(
name,
asname,
alias,
stmt,
&checker.settings.pep8_naming.ignore_names,
)
{
checker.report_diagnostic(diagnostic);
}
}
if checker.enabled(Rule::CamelcaseImportedAsLowercase) {
pep8_naming::rules::camelcase_imported_as_lowercase(
checker,
name,
asname,
alias,
stmt,
&checker.settings.pep8_naming.ignore_names,
);
if let Some(diagnostic) =
pep8_naming::rules::camelcase_imported_as_lowercase(
name,
asname,
alias,
stmt,
&checker.settings.pep8_naming.ignore_names,
)
{
checker.report_diagnostic(diagnostic);
}
}
if checker.enabled(Rule::CamelcaseImportedAsConstant) {
pep8_naming::rules::camelcase_imported_as_constant(
checker,
if let Some(diagnostic) = pep8_naming::rules::camelcase_imported_as_constant(
name,
asname,
alias,
stmt,
&checker.settings.pep8_naming.ignore_names,
);
) {
checker.report_diagnostic(diagnostic);
}
}
if checker.enabled(Rule::CamelcaseImportedAsAcronym) {
pep8_naming::rules::camelcase_imported_as_acronym(
if let Some(diagnostic) = pep8_naming::rules::camelcase_imported_as_acronym(
name, asname, alias, stmt, checker,
);
) {
checker.report_diagnostic(diagnostic);
}
}
}
if checker.enabled(Rule::BannedImportAlias) {
if let Some(asname) = &alias.asname {
flake8_import_conventions::rules::banned_import_alias(
checker,
stmt,
&alias.name,
asname,
&checker.settings.flake8_import_conventions.banned_aliases,
);
if let Some(diagnostic) =
flake8_import_conventions::rules::banned_import_alias(
stmt,
&alias.name,
asname,
&checker.settings.flake8_import_conventions.banned_aliases,
)
{
checker.report_diagnostic(diagnostic);
}
}
}
if checker.enabled(Rule::PytestIncorrectPytestImport) {
flake8_pytest_style::rules::import(
checker,
if let Some(diagnostic) = flake8_pytest_style::rules::import(
stmt,
&alias.name,
alias.asname.as_deref(),
);
) {
checker.report_diagnostic(diagnostic);
}
}
if checker.enabled(Rule::BuiltinImportShadowing) {
flake8_builtins::rules::builtin_import_shadowing(checker, alias);
@@ -798,7 +834,11 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
}
if checker.enabled(Rule::PytestIncorrectPytestImport) {
flake8_pytest_style::rules::import_from(checker, stmt, module, level);
if let Some(diagnostic) =
flake8_pytest_style::rules::import_from(stmt, module, level)
{
checker.report_diagnostic(diagnostic);
}
}
if checker.source_type.is_stub() {
if checker.enabled(Rule::FutureAnnotationsInStub) {
@@ -813,98 +853,119 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
} else if &alias.name == "*" {
if checker.enabled(Rule::UndefinedLocalWithNestedImportStarUsage) {
if !matches!(checker.semantic.current_scope().kind, ScopeKind::Module) {
checker.report_diagnostic(
checker.report_diagnostic(Diagnostic::new(
pyflakes::rules::UndefinedLocalWithNestedImportStarUsage {
name: helpers::format_import_from(level, module).to_string(),
},
stmt.range(),
);
));
}
}
if checker.enabled(Rule::UndefinedLocalWithImportStar) {
checker.report_diagnostic(
checker.report_diagnostic(Diagnostic::new(
pyflakes::rules::UndefinedLocalWithImportStar {
name: helpers::format_import_from(level, module).to_string(),
},
stmt.range(),
);
));
}
}
if checker.enabled(Rule::RelativeImports) {
flake8_tidy_imports::rules::banned_relative_import(
if let Some(diagnostic) = flake8_tidy_imports::rules::banned_relative_import(
checker,
stmt,
level,
module,
checker.module.qualified_name(),
checker.settings.flake8_tidy_imports.ban_relative_imports,
);
) {
checker.report_diagnostic(diagnostic);
}
}
if checker.enabled(Rule::Debugger) {
flake8_debugger::rules::debugger_import(checker, stmt, module, &alias.name);
if let Some(diagnostic) =
flake8_debugger::rules::debugger_import(stmt, module, &alias.name)
{
checker.report_diagnostic(diagnostic);
}
}
if checker.enabled(Rule::BannedImportAlias) {
if let Some(asname) = &alias.asname {
let qualified_name =
helpers::format_import_from_member(level, module, &alias.name);
flake8_import_conventions::rules::banned_import_alias(
checker,
stmt,
&qualified_name,
asname,
&checker.settings.flake8_import_conventions.banned_aliases,
);
if let Some(diagnostic) =
flake8_import_conventions::rules::banned_import_alias(
stmt,
&qualified_name,
asname,
&checker.settings.flake8_import_conventions.banned_aliases,
)
{
checker.report_diagnostic(diagnostic);
}
}
}
if let Some(asname) = &alias.asname {
if checker.enabled(Rule::ConstantImportedAsNonConstant) {
pep8_naming::rules::constant_imported_as_non_constant(
checker,
&alias.name,
asname,
alias,
stmt,
&checker.settings.pep8_naming.ignore_names,
);
if let Some(diagnostic) =
pep8_naming::rules::constant_imported_as_non_constant(
&alias.name,
asname,
alias,
stmt,
&checker.settings.pep8_naming.ignore_names,
)
{
checker.report_diagnostic(diagnostic);
}
}
if checker.enabled(Rule::LowercaseImportedAsNonLowercase) {
pep8_naming::rules::lowercase_imported_as_non_lowercase(
checker,
&alias.name,
asname,
alias,
stmt,
&checker.settings.pep8_naming.ignore_names,
);
if let Some(diagnostic) =
pep8_naming::rules::lowercase_imported_as_non_lowercase(
&alias.name,
asname,
alias,
stmt,
&checker.settings.pep8_naming.ignore_names,
)
{
checker.report_diagnostic(diagnostic);
}
}
if checker.enabled(Rule::CamelcaseImportedAsLowercase) {
pep8_naming::rules::camelcase_imported_as_lowercase(
checker,
&alias.name,
asname,
alias,
stmt,
&checker.settings.pep8_naming.ignore_names,
);
if let Some(diagnostic) =
pep8_naming::rules::camelcase_imported_as_lowercase(
&alias.name,
asname,
alias,
stmt,
&checker.settings.pep8_naming.ignore_names,
)
{
checker.report_diagnostic(diagnostic);
}
}
if checker.enabled(Rule::CamelcaseImportedAsConstant) {
pep8_naming::rules::camelcase_imported_as_constant(
checker,
if let Some(diagnostic) = pep8_naming::rules::camelcase_imported_as_constant(
&alias.name,
asname,
alias,
stmt,
&checker.settings.pep8_naming.ignore_names,
);
) {
checker.report_diagnostic(diagnostic);
}
}
if checker.enabled(Rule::CamelcaseImportedAsAcronym) {
pep8_naming::rules::camelcase_imported_as_acronym(
if let Some(diagnostic) = pep8_naming::rules::camelcase_imported_as_acronym(
&alias.name,
asname,
alias,
stmt,
checker,
);
) {
checker.report_diagnostic(diagnostic);
}
}
if !checker.source_type.is_stub() {
if checker.enabled(Rule::UselessImportAlias) {
@@ -917,21 +978,23 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
}
}
if checker.enabled(Rule::ImportSelf) {
pylint::rules::import_from_self(
checker,
if let Some(diagnostic) = pylint::rules::import_from_self(
level,
module,
names,
checker.module.qualified_name(),
);
) {
checker.report_diagnostic(diagnostic);
}
}
if checker.enabled(Rule::BannedImportFrom) {
flake8_import_conventions::rules::banned_import_from(
checker,
if let Some(diagnostic) = flake8_import_conventions::rules::banned_import_from(
stmt,
&helpers::format_import_from(level, module),
&checker.settings.flake8_import_conventions.banned_from,
);
) {
checker.report_diagnostic(diagnostic);
}
}
if checker.enabled(Rule::ByteStringUsage) {
flake8_pyi::rules::bytestring_import(checker, import_from);
@@ -1146,7 +1209,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
) => {
if !checker.semantic.in_type_checking_block() {
if checker.enabled(Rule::Assert) {
flake8_bandit::rules::assert_used(checker, stmt);
checker.report_diagnostic(flake8_bandit::rules::assert_used(stmt));
}
}
if checker.enabled(Rule::AssertTuple) {
@@ -1343,7 +1406,11 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
}
}
if checker.enabled(Rule::DefaultExceptNotLast) {
pyflakes::rules::default_except_not_last(checker, handlers, checker.locator);
if let Some(diagnostic) =
pyflakes::rules::default_except_not_last(handlers, checker.locator)
{
checker.report_diagnostic(diagnostic);
}
}
if checker.any_enabled(&[
Rule::DuplicateHandlerException,
@@ -1438,7 +1505,9 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
);
}
if checker.enabled(Rule::PandasDfVariableName) {
pandas_vet::rules::assignment_to_df(checker, targets);
if let Some(diagnostic) = pandas_vet::rules::assignment_to_df(targets) {
checker.report_diagnostic(diagnostic);
}
}
if checker
.settings
@@ -1632,7 +1701,11 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
pylint::rules::named_expr_without_context(checker, value);
}
if checker.enabled(Rule::AsyncioDanglingTask) {
ruff::rules::asyncio_dangling_task(checker, value, checker.semantic());
if let Some(diagnostic) =
ruff::rules::asyncio_dangling_task(value, checker.semantic())
{
checker.report_diagnostic(diagnostic);
}
}
if checker.enabled(Rule::RepeatedAppend) {
refurb::rules::repeated_append(checker, stmt);

View File

@@ -1,3 +1,4 @@
use ruff_diagnostics::Diagnostic;
use ruff_python_semantic::Exceptions;
use ruff_python_stdlib::builtins::version_builtin_was_added;
@@ -14,12 +15,12 @@ pub(crate) fn unresolved_references(checker: &Checker) {
for reference in checker.semantic.unresolved_references() {
if reference.is_wildcard_import() {
if checker.enabled(Rule::UndefinedLocalWithImportStarUsage) {
checker.report_diagnostic(
checker.report_diagnostic(Diagnostic::new(
pyflakes::rules::UndefinedLocalWithImportStarUsage {
name: reference.name(checker.source()).to_string(),
},
reference.range(),
);
));
}
} else {
if checker.enabled(Rule::UndefinedName) {
@@ -41,13 +42,13 @@ pub(crate) fn unresolved_references(checker: &Checker) {
let symbol_name = reference.name(checker.source());
checker.report_diagnostic(
checker.report_diagnostic(Diagnostic::new(
pyflakes::rules::UndefinedName {
name: symbol_name.to_string(),
minor_version_builtin_added: version_builtin_was_added(symbol_name),
},
reference.range(),
);
));
}
}
}

View File

@@ -26,9 +26,12 @@ use std::path::Path;
use itertools::Itertools;
use log::debug;
use ruff_python_parser::semantic_errors::{
SemanticSyntaxChecker, SemanticSyntaxContext, SemanticSyntaxError, SemanticSyntaxErrorKind,
};
use rustc_hash::{FxHashMap, FxHashSet};
use ruff_diagnostics::IsolationLevel;
use ruff_diagnostics::{Diagnostic, Edit, IsolationLevel};
use ruff_notebook::{CellOffsets, NotebookIndex};
use ruff_python_ast::helpers::{collect_import_from_member, is_docstring_stmt, to_module_path};
use ruff_python_ast::identifier::Identifier;
@@ -37,15 +40,12 @@ use ruff_python_ast::str::Quote;
use ruff_python_ast::visitor::{Visitor, walk_except_handler, walk_pattern};
use ruff_python_ast::{
self as ast, AnyParameterRef, ArgOrKeyword, Comprehension, ElifElseClause, ExceptHandler, Expr,
ExprContext, InterpolatedStringElement, Keyword, MatchCase, ModModule, Parameter, Parameters,
Pattern, PythonVersion, Stmt, Suite, UnaryOp,
ExprContext, FStringElement, Keyword, MatchCase, ModModule, Parameter, Parameters, Pattern,
PythonVersion, Stmt, Suite, UnaryOp,
};
use ruff_python_ast::{PySourceType, helpers, str, visitor};
use ruff_python_codegen::{Generator, Stylist};
use ruff_python_index::Indexer;
use ruff_python_parser::semantic_errors::{
SemanticSyntaxChecker, SemanticSyntaxContext, SemanticSyntaxError, SemanticSyntaxErrorKind,
};
use ruff_python_parser::typing::{AnnotationKind, ParsedAnnotation, parse_type_annotation};
use ruff_python_parser::{ParseError, Parsed, Tokens};
use ruff_python_semantic::all::{DunderAllDefinition, DunderAllFlags};
@@ -57,7 +57,7 @@ use ruff_python_semantic::{
};
use ruff_python_stdlib::builtins::{MAGIC_GLOBALS, python_builtins};
use ruff_python_trivia::CommentRanges;
use ruff_source_file::{OneIndexed, SourceFile, SourceRow};
use ruff_source_file::{OneIndexed, SourceRow};
use ruff_text_size::{Ranged, TextRange, TextSize};
use crate::checkers::ast::annotation::AnnotationContext;
@@ -66,14 +66,13 @@ use crate::importer::{ImportRequest, Importer, ResolutionError};
use crate::noqa::NoqaMapping;
use crate::package::PackageRoot;
use crate::preview::{is_semantic_errors_enabled, is_undefined_export_in_dunder_init_enabled};
use crate::registry::{AsRule, Rule};
use crate::registry::Rule;
use crate::rules::pyflakes::rules::{
LateFutureImport, ReturnOutsideFunction, YieldOutsideFunction,
};
use crate::rules::pylint::rules::{AwaitOutsideAsync, LoadBeforeGlobalDeclaration};
use crate::rules::{flake8_pyi, flake8_type_checking, pyflakes, pyupgrade};
use crate::settings::{LinterSettings, TargetVersion, flags};
use crate::{Edit, OldDiagnostic, Violation};
use crate::{Locator, docstrings, noqa};
mod analyze;
@@ -224,6 +223,8 @@ pub(crate) struct Checker<'a> {
visit: deferred::Visit<'a>,
/// A set of deferred nodes to be analyzed after the AST traversal (e.g., `for` loops).
analyze: deferred::Analyze,
/// The cumulative set of diagnostics computed across all lint rules.
diagnostics: RefCell<Vec<Diagnostic>>,
/// The list of names already seen by flake8-bugbear diagnostics, to avoid duplicate violations.
flake8_bugbear_seen: RefCell<FxHashSet<TextRange>>,
/// The end offset of the last visited statement.
@@ -237,7 +238,6 @@ pub(crate) struct Checker<'a> {
semantic_checker: SemanticSyntaxChecker,
/// Errors collected by the `semantic_checker`.
semantic_errors: RefCell<Vec<SemanticSyntaxError>>,
context: &'a LintContext<'a>,
}
impl<'a> Checker<'a> {
@@ -258,7 +258,6 @@ impl<'a> Checker<'a> {
cell_offsets: Option<&'a CellOffsets>,
notebook_index: Option<&'a NotebookIndex>,
target_version: TargetVersion,
context: &'a LintContext<'a>,
) -> Checker<'a> {
let semantic = SemanticModel::new(&settings.typing_modules, path, module);
Self {
@@ -279,6 +278,7 @@ impl<'a> Checker<'a> {
semantic,
visit: deferred::Visit::default(),
analyze: deferred::Analyze::default(),
diagnostics: RefCell::default(),
flake8_bugbear_seen: RefCell::default(),
cell_offsets,
notebook_index,
@@ -287,7 +287,6 @@ impl<'a> Checker<'a> {
target_version,
semantic_checker: SemanticSyntaxChecker::new(),
semantic_errors: RefCell::default(),
context,
}
}
}
@@ -338,7 +337,6 @@ impl<'a> Checker<'a> {
ast::BytesLiteralFlags::empty().with_quote_style(self.preferred_quote())
}
// TODO(dylan) add similar method for t-strings
/// Return the default f-string flags a generated `FString` node should use, given where we are
/// in the AST.
pub(crate) fn default_fstring_flags(&self) -> ast::FStringFlags {
@@ -381,30 +379,19 @@ impl<'a> Checker<'a> {
self.indexer.comment_ranges()
}
/// Return a [`DiagnosticGuard`] for reporting a diagnostic.
///
/// The guard derefs to a [`Diagnostic`], so it can be used to further modify the diagnostic
/// before it is added to the collection in the checker on `Drop`.
pub(crate) fn report_diagnostic<'chk, T: Violation>(
&'chk self,
kind: T,
range: TextRange,
) -> DiagnosticGuard<'chk, 'a> {
self.context.report_diagnostic(kind, range)
/// Push a new [`Diagnostic`] to the collection in the [`Checker`]
pub(crate) fn report_diagnostic(&self, diagnostic: Diagnostic) {
let mut diagnostics = self.diagnostics.borrow_mut();
diagnostics.push(diagnostic);
}
/// Return a [`DiagnosticGuard`] for reporting a diagnostic if the corresponding rule is
/// enabled.
///
/// Prefer [`Checker::report_diagnostic`] in general because the conversion from a `Diagnostic`
/// to a `Rule` is somewhat expensive.
pub(crate) fn report_diagnostic_if_enabled<'chk, T: Violation>(
&'chk self,
kind: T,
range: TextRange,
) -> Option<DiagnosticGuard<'chk, 'a>> {
self.context
.report_diagnostic_if_enabled(kind, range, self.settings)
/// Extend the collection of [`Diagnostic`] objects in the [`Checker`]
pub(crate) fn report_diagnostics<I>(&self, diagnostics: I)
where
I: IntoIterator<Item = Diagnostic>,
{
let mut checker_diagnostics = self.diagnostics.borrow_mut();
checker_diagnostics.extend(diagnostics);
}
/// Adds a [`TextRange`] to the set of ranges of variable names
@@ -530,9 +517,9 @@ impl<'a> Checker<'a> {
}
/// Push `diagnostic` if the checker is not in a `@no_type_check` context.
pub(crate) fn report_type_diagnostic<T: Violation>(&self, kind: T, range: TextRange) {
pub(crate) fn report_type_diagnostic(&self, diagnostic: Diagnostic) {
if !self.semantic.in_no_type_check() {
self.report_diagnostic(kind, range);
self.report_diagnostic(diagnostic);
}
}
@@ -617,7 +604,7 @@ impl SemanticSyntaxContext for Checker<'_> {
match error.kind {
SemanticSyntaxErrorKind::LateFutureImport => {
if self.settings.rules.enabled(Rule::LateFutureImport) {
self.report_diagnostic(LateFutureImport, error.range);
self.report_diagnostic(Diagnostic::new(LateFutureImport, error.range));
}
}
SemanticSyntaxErrorKind::LoadBeforeGlobalDeclaration { name, start } => {
@@ -626,28 +613,31 @@ impl SemanticSyntaxContext for Checker<'_> {
.rules
.enabled(Rule::LoadBeforeGlobalDeclaration)
{
self.report_diagnostic(
self.report_diagnostic(Diagnostic::new(
LoadBeforeGlobalDeclaration {
name,
row: self.compute_source_row(start),
},
error.range,
);
));
}
}
SemanticSyntaxErrorKind::YieldOutsideFunction(kind) => {
if self.settings.rules.enabled(Rule::YieldOutsideFunction) {
self.report_diagnostic(YieldOutsideFunction::new(kind), error.range);
self.report_diagnostic(Diagnostic::new(
YieldOutsideFunction::new(kind),
error.range,
));
}
}
SemanticSyntaxErrorKind::ReturnOutsideFunction => {
if self.settings.rules.enabled(Rule::ReturnOutsideFunction) {
self.report_diagnostic(ReturnOutsideFunction, error.range);
self.report_diagnostic(Diagnostic::new(ReturnOutsideFunction, error.range));
}
}
SemanticSyntaxErrorKind::AwaitOutsideAsyncFunction(_) => {
if self.settings.rules.enabled(Rule::AwaitOutsideAsync) {
self.report_diagnostic(AwaitOutsideAsync, error.range);
self.report_diagnostic(Diagnostic::new(AwaitOutsideAsync, error.range));
}
}
SemanticSyntaxErrorKind::ReboundComprehensionVariable
@@ -700,17 +690,6 @@ impl SemanticSyntaxContext for Checker<'_> {
false
}
fn in_yield_allowed_context(&self) -> bool {
for scope in self.semantic.current_scopes() {
match scope.kind {
ScopeKind::Class(_) | ScopeKind::Generator { .. } => return false,
ScopeKind::Function(_) | ScopeKind::Lambda(_) => return true,
ScopeKind::Module | ScopeKind::Type => {}
}
}
false
}
fn in_sync_comprehension(&self) -> bool {
for scope in self.semantic.current_scopes() {
if let ScopeKind::Generator {
@@ -1898,10 +1877,6 @@ impl<'a> Visitor<'a> for Checker<'a> {
self.semantic.flags |= SemanticModelFlags::F_STRING;
visitor::walk_expr(self, expr);
}
Expr::TString(_) => {
self.semantic.flags |= SemanticModelFlags::T_STRING;
visitor::walk_expr(self, expr);
}
Expr::Named(ast::ExprNamed {
target,
value,
@@ -1935,7 +1910,6 @@ impl<'a> Visitor<'a> for Checker<'a> {
}
Expr::BytesLiteral(bytes_literal) => analyze::string_like(bytes_literal.into(), self),
Expr::FString(f_string) => analyze::string_like(f_string.into(), self),
Expr::TString(t_string) => analyze::string_like(t_string.into(), self),
_ => {}
}
@@ -2125,15 +2099,12 @@ impl<'a> Visitor<'a> for Checker<'a> {
}
}
fn visit_interpolated_string_element(
&mut self,
interpolated_string_element: &'a InterpolatedStringElement,
) {
fn visit_f_string_element(&mut self, f_string_element: &'a FStringElement) {
let snapshot = self.semantic.flags;
if interpolated_string_element.is_interpolation() {
self.semantic.flags |= SemanticModelFlags::INTERPOLATED_STRING_REPLACEMENT_FIELD;
if f_string_element.is_expression() {
self.semantic.flags |= SemanticModelFlags::F_STRING_REPLACEMENT_FIELD;
}
visitor::walk_interpolated_string_element(self, interpolated_string_element);
visitor::walk_f_string_element(self, f_string_element);
self.semantic.flags = snapshot;
}
}
@@ -2744,12 +2715,12 @@ impl<'a> Checker<'a> {
self.semantic.restore(snapshot);
if self.enabled(Rule::ForwardAnnotationSyntaxError) {
self.report_type_diagnostic(
self.report_type_diagnostic(Diagnostic::new(
pyflakes::rules::ForwardAnnotationSyntaxError {
parse_error: parse_error.error.to_string(),
},
string_expr.range(),
);
));
}
}
}
@@ -2890,26 +2861,30 @@ impl<'a> Checker<'a> {
} else {
if self.semantic.global_scope().uses_star_imports() {
if self.enabled(Rule::UndefinedLocalWithImportStarUsage) {
self.report_diagnostic(
pyflakes::rules::UndefinedLocalWithImportStarUsage {
name: name.to_string(),
},
range,
)
.set_parent(definition.start());
self.diagnostics.get_mut().push(
Diagnostic::new(
pyflakes::rules::UndefinedLocalWithImportStarUsage {
name: name.to_string(),
},
range,
)
.with_parent(definition.start()),
);
}
} else {
if self.enabled(Rule::UndefinedExport) {
if is_undefined_export_in_dunder_init_enabled(self.settings)
|| !self.path.ends_with("__init__.py")
{
self.report_diagnostic(
pyflakes::rules::UndefinedExport {
name: name.to_string(),
},
range,
)
.set_parent(definition.start());
self.diagnostics.get_mut().push(
Diagnostic::new(
pyflakes::rules::UndefinedExport {
name: name.to_string(),
},
range,
)
.with_parent(definition.start()),
);
}
}
}
@@ -2970,8 +2945,7 @@ pub(crate) fn check_ast(
cell_offsets: Option<&CellOffsets>,
notebook_index: Option<&NotebookIndex>,
target_version: TargetVersion,
context: &LintContext,
) -> Vec<SemanticSyntaxError> {
) -> (Vec<Diagnostic>, Vec<SemanticSyntaxError>) {
let module_path = package
.map(PackageRoot::path)
.and_then(|package| to_module_path(package, path));
@@ -3011,7 +2985,6 @@ pub(crate) fn check_ast(
cell_offsets,
notebook_index,
target_version,
context,
);
checker.bind_builtins();
@@ -3038,137 +3011,10 @@ pub(crate) fn check_ast(
analyze::deferred_scopes(&checker);
let Checker {
semantic_errors, ..
diagnostics,
semantic_errors,
..
} = checker;
semantic_errors.into_inner()
}
/// A type for collecting diagnostics in a given file.
///
/// [`LintContext::report_diagnostic`] can be used to obtain a [`DiagnosticGuard`], which will push
/// a [`Violation`] to the contained [`OldDiagnostic`] collection on `Drop`.
pub(crate) struct LintContext<'a> {
diagnostics: RefCell<Vec<OldDiagnostic>>,
source_file: &'a SourceFile,
}
impl<'a> LintContext<'a> {
/// Create a new collector with the given `source_file` and an empty collection of
/// `OldDiagnostic`s.
pub(crate) fn new(source_file: &'a SourceFile) -> Self {
Self {
diagnostics: RefCell::default(),
source_file,
}
}
/// Return a [`DiagnosticGuard`] for reporting a diagnostic.
///
/// The guard derefs to an [`OldDiagnostic`], so it can be used to further modify the diagnostic
/// before it is added to the collection in the collector on `Drop`.
pub(crate) fn report_diagnostic<'chk, T: Violation>(
&'chk self,
kind: T,
range: TextRange,
) -> DiagnosticGuard<'chk, 'a> {
DiagnosticGuard {
context: self,
diagnostic: Some(OldDiagnostic::new(kind, range, self.source_file)),
}
}
/// Return a [`DiagnosticGuard`] for reporting a diagnostic if the corresponding rule is
/// enabled.
///
/// Prefer [`DiagnosticsCollector::report_diagnostic`] in general because the conversion from an
/// `OldDiagnostic` to a `Rule` is somewhat expensive.
pub(crate) fn report_diagnostic_if_enabled<'chk, T: Violation>(
&'chk self,
kind: T,
range: TextRange,
settings: &LinterSettings,
) -> Option<DiagnosticGuard<'chk, 'a>> {
let diagnostic = OldDiagnostic::new(kind, range, self.source_file);
if settings.rules.enabled(diagnostic.rule()) {
Some(DiagnosticGuard {
context: self,
diagnostic: Some(diagnostic),
})
} else {
None
}
}
pub(crate) fn into_diagnostics(self) -> Vec<OldDiagnostic> {
self.diagnostics.into_inner()
}
pub(crate) fn is_empty(&self) -> bool {
self.diagnostics.borrow().is_empty()
}
pub(crate) fn as_mut_vec(&mut self) -> &mut Vec<OldDiagnostic> {
self.diagnostics.get_mut()
}
pub(crate) fn iter(&mut self) -> impl Iterator<Item = &OldDiagnostic> {
self.diagnostics.get_mut().iter()
}
}
/// An abstraction for mutating a diagnostic.
///
/// Callers can build this guard by starting with `Checker::report_diagnostic`.
///
/// The primary function of this guard is to add the underlying diagnostic to the `Checker`'s list
/// of diagnostics on `Drop`, while dereferencing to the underlying diagnostic for mutations like
/// adding fixes or parent ranges.
pub(crate) struct DiagnosticGuard<'a, 'b> {
/// The parent checker that will receive the diagnostic on `Drop`.
context: &'a LintContext<'b>,
/// The diagnostic that we want to report.
///
/// This is always `Some` until the `Drop` (or `defuse`) call.
diagnostic: Option<OldDiagnostic>,
}
impl DiagnosticGuard<'_, '_> {
/// Consume the underlying `Diagnostic` without emitting it.
///
/// In general you should avoid constructing diagnostics that may not be emitted, but this
/// method can be used where this is unavoidable.
pub(crate) fn defuse(mut self) {
self.diagnostic = None;
}
}
impl std::ops::Deref for DiagnosticGuard<'_, '_> {
type Target = OldDiagnostic;
fn deref(&self) -> &OldDiagnostic {
// OK because `self.diagnostic` is only `None` within `Drop`.
self.diagnostic.as_ref().unwrap()
}
}
/// Return a mutable borrow of the diagnostic in this guard.
impl std::ops::DerefMut for DiagnosticGuard<'_, '_> {
fn deref_mut(&mut self) -> &mut OldDiagnostic {
// OK because `self.diagnostic` is only `None` within `Drop`.
self.diagnostic.as_mut().unwrap()
}
}
impl Drop for DiagnosticGuard<'_, '_> {
fn drop(&mut self) {
if std::thread::panicking() {
// Don't submit diagnostics when panicking because they might be incomplete.
return;
}
if let Some(diagnostic) = self.diagnostic.take() {
self.context.diagnostics.borrow_mut().push(diagnostic);
}
}
(diagnostics.into_inner(), semantic_errors.into_inner())
}

View File

@@ -1,10 +1,10 @@
use std::path::Path;
use ruff_diagnostics::Diagnostic;
use ruff_python_ast::PythonVersion;
use ruff_python_trivia::CommentRanges;
use crate::Locator;
use crate::checkers::ast::LintContext;
use crate::package::PackageRoot;
use crate::preview::is_allow_nested_roots_enabled;
use crate::registry::Rule;
@@ -20,12 +20,13 @@ pub(crate) fn check_file_path(
comment_ranges: &CommentRanges,
settings: &LinterSettings,
target_version: PythonVersion,
context: &LintContext,
) {
) -> Vec<Diagnostic> {
let mut diagnostics: Vec<Diagnostic> = vec![];
// flake8-no-pep420
if settings.rules.enabled(Rule::ImplicitNamespacePackage) {
let allow_nested_roots = is_allow_nested_roots_enabled(settings);
implicit_namespace_package(
if let Some(diagnostic) = implicit_namespace_package(
path,
package,
locator,
@@ -33,17 +34,26 @@ pub(crate) fn check_file_path(
&settings.project_root,
&settings.src,
allow_nested_roots,
context,
);
) {
diagnostics.push(diagnostic);
}
}
// pep8-naming
if settings.rules.enabled(Rule::InvalidModuleName) {
invalid_module_name(path, package, &settings.pep8_naming.ignore_names, context);
if let Some(diagnostic) =
invalid_module_name(path, package, &settings.pep8_naming.ignore_names)
{
diagnostics.push(diagnostic);
}
}
// flake8-builtins
if settings.rules.enabled(Rule::StdlibModuleShadowing) {
stdlib_module_shadowing(path, settings, target_version, context);
if let Some(diagnostic) = stdlib_module_shadowing(path, settings, target_version) {
diagnostics.push(diagnostic);
}
}
diagnostics
}

View File

@@ -1,5 +1,6 @@
//! Lint rules based on import analysis.
use ruff_diagnostics::Diagnostic;
use ruff_notebook::CellOffsets;
use ruff_python_ast::statement_visitor::StatementVisitor;
use ruff_python_ast::{ModModule, PySourceType, PythonVersion};
@@ -15,8 +16,6 @@ use crate::rules::isort;
use crate::rules::isort::block::{Block, BlockBuilder};
use crate::settings::LinterSettings;
use super::ast::LintContext;
#[expect(clippy::too_many_arguments)]
pub(crate) fn check_imports(
parsed: &Parsed<ModModule>,
@@ -29,8 +28,7 @@ pub(crate) fn check_imports(
source_type: PySourceType,
cell_offsets: Option<&CellOffsets>,
target_version: PythonVersion,
context: &LintContext,
) {
) -> Vec<Diagnostic> {
// Extract all import blocks from the AST.
let tracker = {
let mut tracker =
@@ -42,10 +40,11 @@ pub(crate) fn check_imports(
let blocks: Vec<&Block> = tracker.iter().collect();
// Enforce import rules.
let mut diagnostics = vec![];
if settings.rules.enabled(Rule::UnsortedImports) {
for block in &blocks {
if !block.imports.is_empty() {
isort::rules::organize_imports(
if let Some(diagnostic) = isort::rules::organize_imports(
block,
locator,
stylist,
@@ -55,19 +54,21 @@ pub(crate) fn check_imports(
source_type,
parsed.tokens(),
target_version,
context,
);
) {
diagnostics.push(diagnostic);
}
}
}
}
if settings.rules.enabled(Rule::MissingRequiredImport) {
isort::rules::add_required_imports(
diagnostics.extend(isort::rules::add_required_imports(
parsed,
locator,
stylist,
settings,
source_type,
context,
);
));
}
diagnostics
}

View File

@@ -1,11 +1,13 @@
use ruff_diagnostics::Diagnostic;
use ruff_python_codegen::Stylist;
use ruff_python_index::Indexer;
use ruff_python_parser::{TokenKind, Tokens};
use ruff_source_file::LineRanges;
use ruff_text_size::{Ranged, TextRange};
use crate::Locator;
use crate::line_width::IndentWidth;
use crate::registry::Rule;
use crate::registry::{AsRule, Rule};
use crate::rules::pycodestyle::rules::logical_lines::{
LogicalLines, TokenFlags, extraneous_whitespace, indentation, missing_whitespace,
missing_whitespace_after_keyword, missing_whitespace_around_operator, redundant_backslash,
@@ -14,9 +16,6 @@ use crate::rules::pycodestyle::rules::logical_lines::{
whitespace_before_parameters,
};
use crate::settings::LinterSettings;
use crate::{Locator, Violation};
use super::ast::{DiagnosticGuard, LintContext};
/// Return the amount of indentation, expanding tabs to the next multiple of the settings' tab size.
pub(crate) fn expand_indent(line: &str, indent_width: IndentWidth) -> usize {
@@ -41,9 +40,8 @@ pub(crate) fn check_logical_lines(
indexer: &Indexer,
stylist: &Stylist,
settings: &LinterSettings,
lint_context: &LintContext,
) {
let mut context = LogicalLinesContext::new(settings, lint_context);
) -> Vec<Diagnostic> {
let mut context = LogicalLinesContext::new(settings);
let mut prev_line = None;
let mut prev_indent_level = None;
@@ -172,7 +170,7 @@ pub(crate) fn check_logical_lines(
let indent_size = 4;
if enforce_indentation {
indentation(
for diagnostic in indentation(
&line,
prev_line.as_ref(),
indent_char,
@@ -180,9 +178,11 @@ pub(crate) fn check_logical_lines(
prev_indent_level,
indent_size,
range,
lint_context,
settings,
);
) {
if settings.rules.enabled(diagnostic.rule()) {
context.push_diagnostic(diagnostic);
}
}
}
if !line.is_comment_only() {
@@ -190,24 +190,26 @@ pub(crate) fn check_logical_lines(
prev_indent_level = Some(indent_level);
}
}
context.diagnostics
}
pub(crate) struct LogicalLinesContext<'a, 'b> {
#[derive(Debug, Clone)]
pub(crate) struct LogicalLinesContext<'a> {
settings: &'a LinterSettings,
context: &'a LintContext<'b>,
diagnostics: Vec<Diagnostic>,
}
impl<'a, 'b> LogicalLinesContext<'a, 'b> {
fn new(settings: &'a LinterSettings, context: &'a LintContext<'b>) -> Self {
Self { settings, context }
impl<'a> LogicalLinesContext<'a> {
fn new(settings: &'a LinterSettings) -> Self {
Self {
settings,
diagnostics: Vec::new(),
}
}
pub(crate) fn report_diagnostic<'chk, T: Violation>(
&'chk self,
kind: T,
range: TextRange,
) -> Option<DiagnosticGuard<'chk, 'a>> {
self.context
.report_diagnostic_if_enabled(kind, range, self.settings)
pub(crate) fn push_diagnostic(&mut self, diagnostic: Diagnostic) {
if self.settings.rules.enabled(diagnostic.rule()) {
self.diagnostics.push(diagnostic);
}
}
}

View File

@@ -5,9 +5,11 @@ use std::path::Path;
use itertools::Itertools;
use rustc_hash::FxHashSet;
use ruff_diagnostics::{Diagnostic, Edit, Fix};
use ruff_python_trivia::CommentRanges;
use ruff_text_size::Ranged;
use crate::Locator;
use crate::fix::edits::delete_comment;
use crate::noqa::{
Code, Directive, FileExemption, FileNoqaDirectives, NoqaDirectives, NoqaMapping,
@@ -19,13 +21,10 @@ use crate::rules::pygrep_hooks;
use crate::rules::ruff;
use crate::rules::ruff::rules::{UnusedCodes, UnusedNOQA};
use crate::settings::LinterSettings;
use crate::{Edit, Fix, Locator};
use super::ast::LintContext;
#[expect(clippy::too_many_arguments)]
pub(crate) fn check_noqa(
context: &mut LintContext,
diagnostics: &mut Vec<Diagnostic>,
path: &Path,
locator: &Locator,
comment_ranges: &CommentRanges,
@@ -47,7 +46,7 @@ pub(crate) fn check_noqa(
let mut ignored_diagnostics = vec![];
// Remove any ignored diagnostics.
'outer: for (index, diagnostic) in context.iter().enumerate() {
'outer: for (index, diagnostic) in diagnostics.iter().enumerate() {
let rule = diagnostic.rule();
if matches!(rule, Rule::BlanketNOQA) {
@@ -136,9 +135,11 @@ pub(crate) fn check_noqa(
Directive::All(directive) => {
if matches.is_empty() {
let edit = delete_comment(directive.range(), locator);
let mut diagnostic = context
.report_diagnostic(UnusedNOQA { codes: None }, directive.range());
let mut diagnostic =
Diagnostic::new(UnusedNOQA { codes: None }, directive.range());
diagnostic.set_fix(Fix::safe_edit(edit));
diagnostics.push(diagnostic);
}
}
Directive::Codes(directive) => {
@@ -158,7 +159,9 @@ pub(crate) fn check_noqa(
if seen_codes.insert(original_code) {
let is_code_used = if is_file_level {
context.iter().any(|diag| diag.rule().noqa_code() == code)
diagnostics
.iter()
.any(|diag| diag.rule().noqa_code() == code)
} else {
matches.iter().any(|match_| *match_ == code)
} || settings
@@ -209,7 +212,7 @@ pub(crate) fn check_noqa(
directive.range(),
)
};
let mut diagnostic = context.report_diagnostic(
let mut diagnostic = Diagnostic::new(
UnusedNOQA {
codes: Some(UnusedCodes {
disabled: disabled_codes
@@ -233,6 +236,7 @@ pub(crate) fn check_noqa(
directive.range(),
);
diagnostic.set_fix(Fix::safe_edit(edit));
diagnostics.push(diagnostic);
}
}
}
@@ -243,8 +247,8 @@ pub(crate) fn check_noqa(
&& !per_file_ignores.contains(Rule::RedirectedNOQA)
&& !exemption.includes(Rule::RedirectedNOQA)
{
ruff::rules::redirected_noqa(context, &noqa_directives);
ruff::rules::redirected_file_noqa(context, &file_noqa_directives);
ruff::rules::redirected_noqa(diagnostics, &noqa_directives);
ruff::rules::redirected_file_noqa(diagnostics, &file_noqa_directives);
}
if settings.rules.enabled(Rule::BlanketNOQA)
@@ -252,7 +256,7 @@ pub(crate) fn check_noqa(
&& !exemption.enumerates(Rule::BlanketNOQA)
{
pygrep_hooks::rules::blanket_noqa(
context,
diagnostics,
&noqa_directives,
locator,
&file_noqa_directives,
@@ -263,7 +267,7 @@ pub(crate) fn check_noqa(
&& !per_file_ignores.contains(Rule::InvalidRuleCode)
&& !exemption.enumerates(Rule::InvalidRuleCode)
{
ruff::rules::invalid_noqa_code(context, &noqa_directives, locator, &settings.external);
ruff::rules::invalid_noqa_code(diagnostics, &noqa_directives, locator, &settings.external);
}
ignored_diagnostics.sort_unstable();

View File

@@ -1,5 +1,6 @@
//! Lint rules based on checking physical lines.
use ruff_diagnostics::Diagnostic;
use ruff_python_codegen::Stylist;
use ruff_python_index::Indexer;
use ruff_source_file::UniversalNewlines;
@@ -16,16 +17,15 @@ use crate::rules::pylint;
use crate::rules::ruff::rules::indented_form_feed;
use crate::settings::LinterSettings;
use super::ast::LintContext;
pub(crate) fn check_physical_lines(
locator: &Locator,
stylist: &Stylist,
indexer: &Indexer,
doc_lines: &[TextSize],
settings: &LinterSettings,
context: &LintContext,
) {
) -> Vec<Diagnostic> {
let mut diagnostics: Vec<Diagnostic> = vec![];
let enforce_doc_line_too_long = settings.rules.enabled(Rule::DocLineTooLong);
let enforce_line_too_long = settings.rules.enabled(Rule::LineTooLong);
let enforce_no_newline_at_end_of_file = settings.rules.enabled(Rule::MissingNewlineAtEndOfFile);
@@ -45,38 +45,54 @@ pub(crate) fn check_physical_lines(
.is_some()
{
if enforce_doc_line_too_long {
doc_line_too_long(&line, comment_ranges, settings, context);
if let Some(diagnostic) = doc_line_too_long(&line, comment_ranges, settings) {
diagnostics.push(diagnostic);
}
}
}
if enforce_mixed_spaces_and_tabs {
mixed_spaces_and_tabs(&line, context);
if let Some(diagnostic) = mixed_spaces_and_tabs(&line) {
diagnostics.push(diagnostic);
}
}
if enforce_line_too_long {
line_too_long(&line, comment_ranges, settings, context);
if let Some(diagnostic) = line_too_long(&line, comment_ranges, settings) {
diagnostics.push(diagnostic);
}
}
if enforce_bidirectional_unicode {
pylint::rules::bidirectional_unicode(&line, context);
diagnostics.extend(pylint::rules::bidirectional_unicode(&line));
}
if enforce_trailing_whitespace || enforce_blank_line_contains_whitespace {
trailing_whitespace(&line, locator, indexer, settings, context);
if let Some(diagnostic) = trailing_whitespace(&line, locator, indexer, settings) {
diagnostics.push(diagnostic);
}
}
if settings.rules.enabled(Rule::IndentedFormFeed) {
indented_form_feed(&line, context);
if let Some(diagnostic) = indented_form_feed(&line) {
diagnostics.push(diagnostic);
}
}
}
if enforce_no_newline_at_end_of_file {
no_newline_at_end_of_file(locator, stylist, context);
if let Some(diagnostic) = no_newline_at_end_of_file(locator, stylist) {
diagnostics.push(diagnostic);
}
}
if enforce_copyright_notice {
missing_copyright_notice(locator, settings, context);
if let Some(diagnostic) = missing_copyright_notice(locator, settings) {
diagnostics.push(diagnostic);
}
}
diagnostics
}
#[cfg(test)]
@@ -84,10 +100,8 @@ mod tests {
use ruff_python_codegen::Stylist;
use ruff_python_index::Indexer;
use ruff_python_parser::parse_module;
use ruff_source_file::SourceFileBuilder;
use crate::Locator;
use crate::checkers::ast::LintContext;
use crate::line_width::LineLength;
use crate::registry::Rule;
use crate::rules::pycodestyle;
@@ -104,8 +118,6 @@ mod tests {
let stylist = Stylist::from_tokens(parsed.tokens(), locator.contents());
let check_with_max_line_length = |line_length: LineLength| {
let source_file = SourceFileBuilder::new("<filename>", line).finish();
let diagnostics = LintContext::new(&source_file);
check_physical_lines(
&locator,
&stylist,
@@ -118,9 +130,7 @@ mod tests {
},
..LinterSettings::for_rule(Rule::LineTooLong)
},
&diagnostics,
);
diagnostics.into_diagnostics()
)
};
let line_length = LineLength::try_from(8).unwrap();
assert_eq!(check_with_max_line_length(line_length), vec![]);

View File

@@ -2,6 +2,7 @@
use std::path::Path;
use ruff_diagnostics::Diagnostic;
use ruff_notebook::CellOffsets;
use ruff_python_ast::PySourceType;
use ruff_python_codegen::Stylist;
@@ -18,8 +19,6 @@ use crate::rules::{
};
use crate::settings::LinterSettings;
use super::ast::LintContext;
#[expect(clippy::too_many_arguments)]
pub(crate) fn check_tokens(
tokens: &Tokens,
@@ -30,8 +29,8 @@ pub(crate) fn check_tokens(
settings: &LinterSettings,
source_type: PySourceType,
cell_offsets: Option<&CellOffsets>,
context: &mut LintContext,
) {
) -> Vec<Diagnostic> {
let mut diagnostics: Vec<Diagnostic> = vec![];
let comment_ranges = indexer.comment_ranges();
if settings.rules.any_enabled(&[
@@ -42,23 +41,16 @@ pub(crate) fn check_tokens(
Rule::BlankLinesAfterFunctionOrClass,
Rule::BlankLinesBeforeNestedDefinition,
]) {
BlankLinesChecker::new(
locator,
stylist,
settings,
source_type,
cell_offsets,
context,
)
.check_lines(tokens);
BlankLinesChecker::new(locator, stylist, settings, source_type, cell_offsets)
.check_lines(tokens, &mut diagnostics);
}
if settings.rules.enabled(Rule::BlanketTypeIgnore) {
pygrep_hooks::rules::blanket_type_ignore(context, comment_ranges, locator);
pygrep_hooks::rules::blanket_type_ignore(&mut diagnostics, comment_ranges, locator);
}
if settings.rules.enabled(Rule::EmptyComment) {
pylint::rules::empty_comments(context, comment_ranges, locator);
pylint::rules::empty_comments(&mut diagnostics, comment_ranges, locator);
}
if settings
@@ -66,20 +58,25 @@ pub(crate) fn check_tokens(
.enabled(Rule::AmbiguousUnicodeCharacterComment)
{
for range in comment_ranges {
ruff::rules::ambiguous_unicode_character_comment(context, locator, range, settings);
ruff::rules::ambiguous_unicode_character_comment(
&mut diagnostics,
locator,
range,
settings,
);
}
}
if settings.rules.enabled(Rule::CommentedOutCode) {
eradicate::rules::commented_out_code(context, locator, comment_ranges, settings);
eradicate::rules::commented_out_code(&mut diagnostics, locator, comment_ranges, settings);
}
if settings.rules.enabled(Rule::UTF8EncodingDeclaration) {
pyupgrade::rules::unnecessary_coding_comment(context, locator, comment_ranges);
pyupgrade::rules::unnecessary_coding_comment(&mut diagnostics, locator, comment_ranges);
}
if settings.rules.enabled(Rule::TabIndentation) {
pycodestyle::rules::tab_indentation(context, locator, indexer);
pycodestyle::rules::tab_indentation(&mut diagnostics, locator, indexer);
}
if settings.rules.any_enabled(&[
@@ -90,7 +87,7 @@ pub(crate) fn check_tokens(
Rule::InvalidCharacterZeroWidthSpace,
]) {
for token in tokens {
pylint::rules::invalid_string_characters(context, token, locator);
pylint::rules::invalid_string_characters(&mut diagnostics, token, locator);
}
}
@@ -100,7 +97,7 @@ pub(crate) fn check_tokens(
Rule::UselessSemicolon,
]) {
pycodestyle::rules::compound_statements(
context,
&mut diagnostics,
tokens,
locator,
indexer,
@@ -113,7 +110,13 @@ pub(crate) fn check_tokens(
Rule::SingleLineImplicitStringConcatenation,
Rule::MultiLineImplicitStringConcatenation,
]) {
flake8_implicit_str_concat::rules::implicit(context, tokens, locator, indexer, settings);
flake8_implicit_str_concat::rules::implicit(
&mut diagnostics,
tokens,
locator,
indexer,
settings,
);
}
if settings.rules.any_enabled(&[
@@ -121,15 +124,15 @@ pub(crate) fn check_tokens(
Rule::TrailingCommaOnBareTuple,
Rule::ProhibitedTrailingComma,
]) {
flake8_commas::rules::trailing_commas(context, tokens, locator, indexer);
flake8_commas::rules::trailing_commas(&mut diagnostics, tokens, locator, indexer);
}
if settings.rules.enabled(Rule::ExtraneousParentheses) {
pyupgrade::rules::extraneous_parentheses(context, tokens, locator);
pyupgrade::rules::extraneous_parentheses(&mut diagnostics, tokens, locator);
}
if source_type.is_stub() && settings.rules.enabled(Rule::TypeCommentInStub) {
flake8_pyi::rules::type_comment_in_stub(context, locator, comment_ranges);
flake8_pyi::rules::type_comment_in_stub(&mut diagnostics, locator, comment_ranges);
}
if settings.rules.any_enabled(&[
@@ -139,7 +142,13 @@ pub(crate) fn check_tokens(
Rule::ShebangNotFirstLine,
Rule::ShebangMissingPython,
]) {
flake8_executable::rules::from_tokens(context, path, locator, comment_ranges, settings);
flake8_executable::rules::from_tokens(
&mut diagnostics,
path,
locator,
comment_ranges,
settings,
);
}
if settings.rules.any_enabled(&[
@@ -163,15 +172,19 @@ pub(crate) fn check_tokens(
TodoComment::from_comment(comment, *comment_range, i)
})
.collect();
flake8_todos::rules::todos(context, &todo_comments, locator, comment_ranges);
flake8_fixme::rules::todos(context, &todo_comments);
flake8_todos::rules::todos(&mut diagnostics, &todo_comments, locator, comment_ranges);
flake8_fixme::rules::todos(&mut diagnostics, &todo_comments);
}
if settings.rules.enabled(Rule::TooManyNewlinesAtEndOfFile) {
pycodestyle::rules::too_many_newlines_at_end_of_file(context, tokens, cell_offsets);
pycodestyle::rules::too_many_newlines_at_end_of_file(
&mut diagnostics,
tokens,
cell_offsets,
);
}
context
.as_mut_vec()
.retain(|diagnostic| settings.rules.enabled(diagnostic.rule()));
diagnostics.retain(|diagnostic| settings.rules.enabled(diagnostic.rule()));
diagnostics
}

View File

@@ -6,7 +6,7 @@ use std::fmt::Formatter;
use strum_macros::{AsRefStr, EnumIter};
use crate::registry::Linter;
use crate::registry::{AsRule, Linter};
use crate::rule_selector::is_single_rule_selector;
use crate::rules;
@@ -198,7 +198,6 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
(Pylint, "C0132") => (RuleGroup::Stable, rules::pylint::rules::TypeParamNameMismatch),
(Pylint, "C0205") => (RuleGroup::Stable, rules::pylint::rules::SingleStringSlots),
(Pylint, "C0206") => (RuleGroup::Stable, rules::pylint::rules::DictIndexMissingItems),
(Pylint, "C0207") => (RuleGroup::Preview, rules::pylint::rules::MissingMaxsplitArg),
(Pylint, "C0208") => (RuleGroup::Stable, rules::pylint::rules::IterationOverSet),
(Pylint, "C0414") => (RuleGroup::Stable, rules::pylint::rules::UselessImportAlias),
(Pylint, "C0415") => (RuleGroup::Preview, rules::pylint::rules::ImportOutsideTopLevel),
@@ -553,7 +552,6 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
(Pyupgrade, "046") => (RuleGroup::Preview, rules::pyupgrade::rules::NonPEP695GenericClass),
(Pyupgrade, "047") => (RuleGroup::Preview, rules::pyupgrade::rules::NonPEP695GenericFunction),
(Pyupgrade, "049") => (RuleGroup::Preview, rules::pyupgrade::rules::PrivateTypeParameter),
(Pyupgrade, "050") => (RuleGroup::Preview, rules::pyupgrade::rules::UselessClassMetaclassType),
// pydocstyle
(Pydocstyle, "100") => (RuleGroup::Stable, rules::pydocstyle::rules::UndocumentedPublicModule),
@@ -935,7 +933,6 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
(Flake8UsePathlib, "207") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::Glob),
(Flake8UsePathlib, "208") => (RuleGroup::Stable, rules::flake8_use_pathlib::violations::OsListdir),
(Flake8UsePathlib, "210") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::InvalidPathlibWithSuffix),
(Flake8UsePathlib, "211") => (RuleGroup::Preview, rules::flake8_use_pathlib::violations::OsSymlink),
// flake8-logging-format
(Flake8LoggingFormat, "001") => (RuleGroup::Stable, rules::flake8_logging_format::violations::LoggingStringFormat),
@@ -1025,7 +1022,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
(Ruff, "056") => (RuleGroup::Preview, rules::ruff::rules::FalsyDictGetFallback),
(Ruff, "057") => (RuleGroup::Preview, rules::ruff::rules::UnnecessaryRound),
(Ruff, "058") => (RuleGroup::Preview, rules::ruff::rules::StarmapZip),
(Ruff, "059") => (RuleGroup::Stable, rules::ruff::rules::UnusedUnpackedVariable),
(Ruff, "059") => (RuleGroup::Preview, rules::ruff::rules::UnusedUnpackedVariable),
(Ruff, "060") => (RuleGroup::Preview, rules::ruff::rules::InEmptyCollection),
(Ruff, "100") => (RuleGroup::Stable, rules::ruff::rules::UnusedNOQA),
(Ruff, "101") => (RuleGroup::Stable, rules::ruff::rules::RedirectedNOQA),
@@ -1157,9 +1154,3 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
_ => return None,
})
}
impl std::fmt::Display for Rule {
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
f.write_str(self.into())
}
}

View File

@@ -2,6 +2,7 @@
use anyhow::{Context, Result};
use ruff_diagnostics::Edit;
use ruff_python_ast::parenthesize::parenthesized_range;
use ruff_python_ast::{self as ast, Arguments, ExceptHandler, Expr, ExprList, Parameters, Stmt};
use ruff_python_ast::{AnyNodeRef, ArgOrKeyword};
@@ -15,7 +16,6 @@ use ruff_python_trivia::{
use ruff_source_file::{LineRanges, NewlineWithTrailingNewline, UniversalNewlines};
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
use crate::Edit;
use crate::Locator;
use crate::cst::matchers::{match_function_def, match_indented_block, match_statement};
use crate::fix::codemods;
@@ -595,17 +595,18 @@ mod tests {
use ruff_source_file::SourceFileBuilder;
use test_case::test_case;
use ruff_diagnostics::{Diagnostic, Edit, Fix};
use ruff_python_ast::Stmt;
use ruff_python_codegen::Stylist;
use ruff_python_parser::{parse_expression, parse_module};
use ruff_text_size::{Ranged, TextRange, TextSize};
use crate::Locator;
use crate::fix::apply_fixes;
use crate::fix::edits::{
add_to_dunder_all, make_redundant_alias, next_stmt_break, trailing_semicolon,
};
use crate::message::Message;
use crate::{Edit, Fix, Locator, OldDiagnostic};
/// Parse the given source using [`Mode::Module`] and return the first statement.
fn parse_first_stmt(source: &str) -> Result<Stmt> {
@@ -736,16 +737,24 @@ x = 1 \
let diag = {
use crate::rules::pycodestyle::rules::MissingNewlineAtEndOfFile;
let mut iter = edits.into_iter();
let diag = OldDiagnostic::new(
let diag = Diagnostic::new(
MissingNewlineAtEndOfFile, // The choice of rule here is arbitrary.
TextRange::default(),
&SourceFileBuilder::new("<filename>", "<code>").finish(),
)
.with_fix(Fix::safe_edits(
iter.next().ok_or(anyhow!("expected edits nonempty"))?,
iter,
));
Message::from_diagnostic(diag, None)
Message::diagnostic(
diag.name,
diag.body,
diag.suggestion,
diag.range,
diag.fix,
diag.parent,
SourceFileBuilder::new("<filename>", "<code>").finish(),
None,
)
};
assert_eq!(apply_fixes([diag].iter(), &locator).code, expect);
Ok(())

View File

@@ -3,7 +3,7 @@ use std::collections::BTreeSet;
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
use ruff_diagnostics::{IsolationLevel, SourceMap};
use ruff_diagnostics::{Edit, Fix, IsolationLevel, SourceMap};
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
use crate::Locator;
@@ -11,7 +11,6 @@ use crate::linter::FixTable;
use crate::message::Message;
use crate::registry::Rule;
use crate::settings::types::UnsafeFixes;
use crate::{Edit, Fix};
pub(crate) mod codemods;
pub(crate) mod edits;
@@ -158,16 +157,14 @@ fn cmp_fix(rule1: Rule, rule2: Rule, fix1: &Fix, fix2: &Fix) -> std::cmp::Orderi
#[cfg(test)]
mod tests {
use ruff_diagnostics::SourceMarker;
use ruff_diagnostics::{Diagnostic, Edit, Fix, SourceMarker};
use ruff_source_file::SourceFileBuilder;
use ruff_text_size::{Ranged, TextSize};
use crate::Locator;
use crate::diagnostic::OldDiagnostic;
use crate::fix::{FixResult, apply_fixes};
use crate::message::Message;
use crate::rules::pycodestyle::rules::MissingNewlineAtEndOfFile;
use crate::{Edit, Fix};
fn create_diagnostics(
filename: &str,
@@ -177,12 +174,12 @@ mod tests {
edit.into_iter()
.map(|edit| {
// The choice of rule here is arbitrary.
let diagnostic = OldDiagnostic::new(
MissingNewlineAtEndOfFile,
edit.range(),
&SourceFileBuilder::new(filename, source).finish(),
);
Message::from_diagnostic(diagnostic.with_fix(Fix::safe_edit(edit)), None)
let diagnostic = Diagnostic::new(MissingNewlineAtEndOfFile, edit.range());
Message::from_diagnostic(
diagnostic.with_fix(Fix::safe_edit(edit)),
SourceFileBuilder::new(filename, source).finish(),
None,
)
})
.collect()
}

View File

@@ -1,6 +1,7 @@
//! Insert statements into Python code.
use std::ops::Add;
use ruff_diagnostics::Edit;
use ruff_python_ast::Stmt;
use ruff_python_ast::helpers::is_docstring_stmt;
use ruff_python_codegen::Stylist;
@@ -9,7 +10,6 @@ use ruff_python_trivia::{PythonWhitespace, textwrap::indent};
use ruff_source_file::{LineRanges, UniversalNewlineIterator};
use ruff_text_size::{Ranged, TextSize};
use crate::Edit;
use crate::Locator;
#[derive(Debug, Clone, PartialEq, Eq)]

View File

@@ -8,6 +8,7 @@ use std::error::Error;
use anyhow::Result;
use libcst_native::{ImportAlias, Name as cstName, NameOrAttribute};
use ruff_diagnostics::Edit;
use ruff_python_ast::{self as ast, Expr, ModModule, Stmt};
use ruff_python_codegen::Stylist;
use ruff_python_parser::{Parsed, Tokens};
@@ -17,7 +18,6 @@ use ruff_python_semantic::{
use ruff_python_trivia::textwrap::indent;
use ruff_text_size::{Ranged, TextSize};
use crate::Edit;
use crate::Locator;
use crate::cst::matchers::{match_aliases, match_import_from, match_statement};
use crate::fix;

View File

@@ -14,17 +14,12 @@ pub use rule_selector::RuleSelector;
pub use rule_selector::clap_completion::RuleSelectorParser;
pub use rules::pycodestyle::rules::IOError;
pub use diagnostic::OldDiagnostic;
pub(crate) use ruff_diagnostics::{Applicability, Edit, Fix};
pub use violation::{AlwaysFixableViolation, FixAvailability, Violation, ViolationMetadata};
pub const VERSION: &str = env!("CARGO_PKG_VERSION");
mod checkers;
pub mod codes;
mod comments;
mod cst;
mod diagnostic;
pub mod directives;
mod doc_lines;
mod docstrings;
@@ -50,7 +45,6 @@ pub mod settings;
pub mod source_kind;
mod text_helpers;
pub mod upstream_categories;
mod violation;
#[cfg(any(test, fuzzing))]
pub mod test;

View File

@@ -1,4 +1,6 @@
use std::borrow::Cow;
use std::cell::LazyCell;
use std::ops::Deref;
use std::path::Path;
use anyhow::{Result, anyhow};
@@ -7,16 +9,16 @@ use itertools::Itertools;
use ruff_python_parser::semantic_errors::SemanticSyntaxError;
use rustc_hash::FxHashMap;
use ruff_diagnostics::Diagnostic;
use ruff_notebook::Notebook;
use ruff_python_ast::{ModModule, PySourceType, PythonVersion};
use ruff_python_codegen::Stylist;
use ruff_python_index::Indexer;
use ruff_python_parser::{ParseError, ParseOptions, Parsed, UnsupportedSyntaxError};
use ruff_source_file::{SourceFile, SourceFileBuilder};
use ruff_source_file::SourceFileBuilder;
use ruff_text_size::Ranged;
use crate::OldDiagnostic;
use crate::checkers::ast::{LintContext, check_ast};
use crate::checkers::ast::check_ast;
use crate::checkers::filesystem::check_file_path;
use crate::checkers::imports::check_imports;
use crate::checkers::noqa::check_noqa;
@@ -111,11 +113,8 @@ pub fn check_path(
parsed: &Parsed<ModModule>,
target_version: TargetVersion,
) -> Vec<Message> {
let source_file =
SourceFileBuilder::new(path.to_string_lossy().as_ref(), locator.contents()).finish();
// Aggregate all diagnostics.
let mut diagnostics = LintContext::new(&source_file);
let mut diagnostics = vec![];
// Aggregate all semantic syntax errors.
let mut semantic_syntax_errors = vec![];
@@ -137,7 +136,7 @@ pub fn check_path(
.iter_enabled()
.any(|rule_code| rule_code.lint_source().is_tokens())
{
check_tokens(
diagnostics.extend(check_tokens(
tokens,
path,
locator,
@@ -146,8 +145,7 @@ pub fn check_path(
settings,
source_type,
source_kind.as_ipy_notebook().map(Notebook::cell_offsets),
&mut diagnostics,
);
));
}
// Run the filesystem-based rules.
@@ -156,15 +154,14 @@ pub fn check_path(
.iter_enabled()
.any(|rule_code| rule_code.lint_source().is_filesystem())
{
check_file_path(
diagnostics.extend(check_file_path(
path,
package,
locator,
comment_ranges,
settings,
target_version.linter_version(),
&diagnostics,
);
));
}
// Run the logical line-based rules.
@@ -173,14 +170,9 @@ pub fn check_path(
.iter_enabled()
.any(|rule_code| rule_code.lint_source().is_logical_lines())
{
crate::checkers::logical_lines::check_logical_lines(
tokens,
locator,
indexer,
stylist,
settings,
&diagnostics,
);
diagnostics.extend(crate::checkers::logical_lines::check_logical_lines(
tokens, locator, indexer, stylist, settings,
));
}
// Run the AST-based rules only if there are no syntax errors.
@@ -188,7 +180,7 @@ pub fn check_path(
let cell_offsets = source_kind.as_ipy_notebook().map(Notebook::cell_offsets);
let notebook_index = source_kind.as_ipy_notebook().map(Notebook::index);
semantic_syntax_errors.extend(check_ast(
let (new_diagnostics, new_semantic_syntax_errors) = check_ast(
parsed,
locator,
stylist,
@@ -202,8 +194,9 @@ pub fn check_path(
cell_offsets,
notebook_index,
target_version,
&diagnostics,
));
);
diagnostics.extend(new_diagnostics);
semantic_syntax_errors.extend(new_semantic_syntax_errors);
let use_imports = !directives.isort.skip_file
&& settings
@@ -212,7 +205,7 @@ pub fn check_path(
.any(|rule_code| rule_code.lint_source().is_imports());
if use_imports || use_doc_lines {
if use_imports {
check_imports(
let import_diagnostics = check_imports(
parsed,
locator,
indexer,
@@ -223,8 +216,9 @@ pub fn check_path(
source_type,
cell_offsets,
target_version.linter_version(),
&diagnostics,
);
diagnostics.extend(import_diagnostics);
}
if use_doc_lines {
doc_lines.extend(doc_lines_from_ast(parsed.suite(), locator));
@@ -244,14 +238,9 @@ pub fn check_path(
.iter_enabled()
.any(|rule_code| rule_code.lint_source().is_physical_lines())
{
check_physical_lines(
locator,
stylist,
indexer,
&doc_lines,
settings,
&diagnostics,
);
diagnostics.extend(check_physical_lines(
locator, stylist, indexer, &doc_lines, settings,
));
}
// Raise violations for internal test rules
@@ -261,70 +250,47 @@ pub fn check_path(
if !settings.rules.enabled(*test_rule) {
continue;
}
match test_rule {
let diagnostic = match test_rule {
Rule::StableTestRule => {
test_rules::StableTestRule::diagnostic(locator, comment_ranges, &diagnostics);
test_rules::StableTestRule::diagnostic(locator, comment_ranges)
}
Rule::StableTestRuleSafeFix => {
test_rules::StableTestRuleSafeFix::diagnostic(locator, comment_ranges)
}
Rule::StableTestRuleUnsafeFix => {
test_rules::StableTestRuleUnsafeFix::diagnostic(locator, comment_ranges)
}
Rule::StableTestRuleSafeFix => test_rules::StableTestRuleSafeFix::diagnostic(
locator,
comment_ranges,
&diagnostics,
),
Rule::StableTestRuleUnsafeFix => test_rules::StableTestRuleUnsafeFix::diagnostic(
locator,
comment_ranges,
&diagnostics,
),
Rule::StableTestRuleDisplayOnlyFix => {
test_rules::StableTestRuleDisplayOnlyFix::diagnostic(
locator,
comment_ranges,
&diagnostics,
);
test_rules::StableTestRuleDisplayOnlyFix::diagnostic(locator, comment_ranges)
}
Rule::PreviewTestRule => {
test_rules::PreviewTestRule::diagnostic(locator, comment_ranges, &diagnostics);
test_rules::PreviewTestRule::diagnostic(locator, comment_ranges)
}
Rule::DeprecatedTestRule => {
test_rules::DeprecatedTestRule::diagnostic(
locator,
comment_ranges,
&diagnostics,
);
test_rules::DeprecatedTestRule::diagnostic(locator, comment_ranges)
}
Rule::AnotherDeprecatedTestRule => {
test_rules::AnotherDeprecatedTestRule::diagnostic(
locator,
comment_ranges,
&diagnostics,
);
test_rules::AnotherDeprecatedTestRule::diagnostic(locator, comment_ranges)
}
Rule::RemovedTestRule => {
test_rules::RemovedTestRule::diagnostic(locator, comment_ranges, &diagnostics);
test_rules::RemovedTestRule::diagnostic(locator, comment_ranges)
}
Rule::AnotherRemovedTestRule => {
test_rules::AnotherRemovedTestRule::diagnostic(locator, comment_ranges)
}
Rule::RedirectedToTestRule => {
test_rules::RedirectedToTestRule::diagnostic(locator, comment_ranges)
}
Rule::RedirectedFromTestRule => {
test_rules::RedirectedFromTestRule::diagnostic(locator, comment_ranges)
}
Rule::AnotherRemovedTestRule => test_rules::AnotherRemovedTestRule::diagnostic(
locator,
comment_ranges,
&diagnostics,
),
Rule::RedirectedToTestRule => test_rules::RedirectedToTestRule::diagnostic(
locator,
comment_ranges,
&diagnostics,
),
Rule::RedirectedFromTestRule => test_rules::RedirectedFromTestRule::diagnostic(
locator,
comment_ranges,
&diagnostics,
),
Rule::RedirectedFromPrefixTestRule => {
test_rules::RedirectedFromPrefixTestRule::diagnostic(
locator,
comment_ranges,
&diagnostics,
);
test_rules::RedirectedFromPrefixTestRule::diagnostic(locator, comment_ranges)
}
_ => unreachable!("All test rules must have an implementation"),
};
if let Some(diagnostic) = diagnostic {
diagnostics.push(diagnostic);
}
}
}
@@ -342,9 +308,7 @@ pub fn check_path(
RuleSet::empty()
};
if !per_file_ignores.is_empty() {
diagnostics
.as_mut_vec()
.retain(|diagnostic| !per_file_ignores.contains(diagnostic.rule()));
diagnostics.retain(|diagnostic| !per_file_ignores.contains(diagnostic.rule()));
}
// Enforce `noqa` directives.
@@ -366,13 +330,11 @@ pub fn check_path(
);
if noqa.is_enabled() {
for index in ignored.iter().rev() {
diagnostics.as_mut_vec().swap_remove(*index);
diagnostics.swap_remove(*index);
}
}
}
let mut diagnostics = diagnostics.into_diagnostics();
if parsed.has_valid_syntax() {
// Remove fixes for any rules marked as unfixable.
for diagnostic in &mut diagnostics {
@@ -410,9 +372,9 @@ pub fn check_path(
parsed.errors(),
syntax_errors,
&semantic_syntax_errors,
path,
locator,
directives,
&source_file,
)
}
@@ -476,7 +438,7 @@ pub fn add_noqa_to_path(
)
}
/// Generate a [`Message`] for each [`OldDiagnostic`] triggered by the given source
/// Generate a [`Message`] for each [`Diagnostic`] triggered by the given source
/// code.
pub fn lint_only(
path: &Path,
@@ -541,28 +503,39 @@ pub fn lint_only(
/// Convert from diagnostics to messages.
fn diagnostics_to_messages(
diagnostics: Vec<OldDiagnostic>,
diagnostics: Vec<Diagnostic>,
parse_errors: &[ParseError],
unsupported_syntax_errors: &[UnsupportedSyntaxError],
semantic_syntax_errors: &[SemanticSyntaxError],
path: &Path,
locator: &Locator,
directives: &Directives,
source_file: &SourceFile,
) -> Vec<Message> {
let file = LazyCell::new(|| {
let mut builder =
SourceFileBuilder::new(path.to_string_lossy().as_ref(), locator.contents());
if let Some(line_index) = locator.line_index() {
builder.set_line_index(line_index.clone());
}
builder.finish()
});
parse_errors
.iter()
.map(|parse_error| Message::from_parse_error(parse_error, locator, source_file.clone()))
.map(|parse_error| Message::from_parse_error(parse_error, locator, file.deref().clone()))
.chain(unsupported_syntax_errors.iter().map(|syntax_error| {
Message::from_unsupported_syntax_error(syntax_error, source_file.clone())
Message::from_unsupported_syntax_error(syntax_error, file.deref().clone())
}))
.chain(
semantic_syntax_errors
.iter()
.map(|error| Message::from_semantic_syntax_error(error, source_file.clone())),
.map(|error| Message::from_semantic_syntax_error(error, file.deref().clone())),
)
.chain(diagnostics.into_iter().map(|diagnostic| {
let noqa_offset = directives.noqa_line_for.resolve(diagnostic.start());
Message::from_diagnostic(diagnostic, Some(noqa_offset))
Message::from_diagnostic(diagnostic, file.deref().clone(), Some(noqa_offset))
}))
.collect()
}

View File

@@ -6,11 +6,11 @@ use colored::{Color, ColoredString, Colorize, Styles};
use ruff_text_size::{Ranged, TextRange, TextSize};
use similar::{ChangeTag, TextDiff};
use ruff_diagnostics::{Applicability, Fix};
use ruff_source_file::{OneIndexed, SourceFile};
use crate::message::Message;
use crate::text_helpers::ShowNonprinting;
use crate::{Applicability, Fix};
/// Renders a diff that shows the code fixes.
///

View File

@@ -4,11 +4,11 @@ use serde::ser::SerializeSeq;
use serde::{Serialize, Serializer};
use serde_json::{Value, json};
use ruff_diagnostics::Edit;
use ruff_notebook::NotebookIndex;
use ruff_source_file::{LineColumn, OneIndexed, SourceCode};
use ruff_text_size::Ranged;
use crate::Edit;
use crate::message::{Emitter, EmitterContext, Message};
#[derive(Default)]

View File

@@ -16,6 +16,7 @@ pub use json_lines::JsonLinesEmitter;
pub use junit::JunitEmitter;
pub use pylint::PylintEmitter;
pub use rdjson::RdjsonEmitter;
use ruff_diagnostics::{Diagnostic, Fix};
use ruff_notebook::NotebookIndex;
use ruff_python_parser::{ParseError, UnsupportedSyntaxError};
use ruff_source_file::{LineColumn, SourceFile};
@@ -27,7 +28,6 @@ use crate::Locator;
use crate::codes::NoqaCode;
use crate::logging::DisplayParseErrorType;
use crate::registry::Rule;
use crate::{Fix, OldDiagnostic};
mod azure;
mod diff;
@@ -50,7 +50,7 @@ mod text;
/// `noqa` offsets.
///
/// For diagnostic messages, the [`db::Diagnostic`]'s primary message contains the
/// [`OldDiagnostic::body`], and the primary annotation optionally contains the suggestion accompanying
/// [`Diagnostic::body`], and the primary annotation optionally contains the suggestion accompanying
/// a fix. The `db::Diagnostic::id` field contains the kebab-case lint name derived from the `Rule`.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct Message {
@@ -60,7 +60,6 @@ pub struct Message {
pub fix: Option<Fix>,
pub parent: Option<TextSize>,
pub(crate) noqa_offset: Option<TextSize>,
noqa_code: Option<NoqaCode>,
}
impl Message {
@@ -77,12 +76,12 @@ impl Message {
fix: None,
parent: None,
noqa_offset: None,
noqa_code: None,
}
}
#[expect(clippy::too_many_arguments)]
pub fn diagnostic(
name: &'static str,
body: String,
suggestion: Option<String>,
range: TextRange,
@@ -90,10 +89,9 @@ impl Message {
parent: Option<TextSize>,
file: SourceFile,
noqa_offset: Option<TextSize>,
rule: Rule,
) -> Message {
let mut diagnostic = db::Diagnostic::new(
DiagnosticId::Lint(LintName::of(rule.into())),
DiagnosticId::Lint(LintName::of(name)),
Severity::Error,
body,
);
@@ -109,22 +107,25 @@ impl Message {
fix,
parent,
noqa_offset,
noqa_code: Some(rule.noqa_code()),
}
}
/// Create a [`Message`] from the given [`OldDiagnostic`] corresponding to a rule violation.
pub fn from_diagnostic(diagnostic: OldDiagnostic, noqa_offset: Option<TextSize>) -> Message {
let OldDiagnostic {
/// Create a [`Message`] from the given [`Diagnostic`] corresponding to a rule violation.
pub fn from_diagnostic(
diagnostic: Diagnostic,
file: SourceFile,
noqa_offset: Option<TextSize>,
) -> Message {
let Diagnostic {
name,
body,
suggestion,
range,
fix,
parent,
rule,
file,
} = diagnostic;
Self::diagnostic(
name,
body,
suggestion,
range,
@@ -132,7 +133,6 @@ impl Message {
parent,
file,
noqa_offset,
rule,
)
}
@@ -235,7 +235,7 @@ impl Message {
/// Returns the [`NoqaCode`] corresponding to the diagnostic message.
pub fn to_noqa_code(&self) -> Option<NoqaCode> {
self.noqa_code
self.to_rule().map(|rule| rule.noqa_code())
}
/// Returns the URL for the rule documentation, if it exists.
@@ -371,8 +371,7 @@ impl<'a> EmitterContext<'a> {
mod tests {
use rustc_hash::FxHashMap;
use crate::codes::Rule;
use crate::{Edit, Fix};
use ruff_diagnostics::{Edit, Fix};
use ruff_notebook::NotebookIndex;
use ruff_python_parser::{Mode, ParseOptions, parse_unchecked};
use ruff_source_file::{OneIndexed, SourceFileBuilder};
@@ -418,6 +417,7 @@ def fibonacci(n):
let unused_import_start = TextSize::from(7);
let unused_import = Message::diagnostic(
"unused-import",
"`os` imported but unused".to_string(),
Some("Remove unused import: `os`".to_string()),
TextRange::new(unused_import_start, TextSize::from(9)),
@@ -428,11 +428,11 @@ def fibonacci(n):
None,
fib_source.clone(),
Some(unused_import_start),
Rule::UnusedImport,
);
let unused_variable_start = TextSize::from(94);
let unused_variable = Message::diagnostic(
"unused-variable",
"Local variable `x` is assigned to but never used".to_string(),
Some("Remove assignment to unused variable `x`".to_string()),
TextRange::new(unused_variable_start, TextSize::from(95)),
@@ -443,13 +443,13 @@ def fibonacci(n):
None,
fib_source,
Some(unused_variable_start),
Rule::UnusedVariable,
);
let file_2 = r"if a == 1: pass";
let undefined_name_start = TextSize::from(3);
let undefined_name = Message::diagnostic(
"undefined-name",
"Undefined name `a`".to_string(),
None,
TextRange::new(undefined_name_start, TextSize::from(4)),
@@ -457,7 +457,6 @@ def fibonacci(n):
None,
SourceFileBuilder::new("undef.py", file_2).finish(),
Some(undefined_name_start),
Rule::UndefinedName,
);
vec![unused_import, unused_variable, undefined_name]
@@ -480,6 +479,7 @@ def foo():
let unused_import_os_start = TextSize::from(16);
let unused_import_os = Message::diagnostic(
"unused-import",
"`os` imported but unused".to_string(),
Some("Remove unused import: `os`".to_string()),
TextRange::new(unused_import_os_start, TextSize::from(18)),
@@ -490,11 +490,11 @@ def foo():
None,
notebook_source.clone(),
Some(unused_import_os_start),
Rule::UnusedImport,
);
let unused_import_math_start = TextSize::from(35);
let unused_import_math = Message::diagnostic(
"unused-import",
"`math` imported but unused".to_string(),
Some("Remove unused import: `math`".to_string()),
TextRange::new(unused_import_math_start, TextSize::from(39)),
@@ -505,11 +505,11 @@ def foo():
None,
notebook_source.clone(),
Some(unused_import_math_start),
Rule::UnusedImport,
);
let unused_variable_start = TextSize::from(98);
let unused_variable = Message::diagnostic(
"unused-variable",
"Local variable `x` is assigned to but never used".to_string(),
Some("Remove assignment to unused variable `x`".to_string()),
TextRange::new(unused_variable_start, TextSize::from(99)),
@@ -520,7 +520,6 @@ def foo():
None,
notebook_source,
Some(unused_variable_start),
Rule::UnusedVariable,
);
let mut notebook_indexes = FxHashMap::default();

View File

@@ -4,10 +4,10 @@ use serde::ser::SerializeSeq;
use serde::{Serialize, Serializer};
use serde_json::{Value, json};
use ruff_diagnostics::Edit;
use ruff_source_file::SourceCode;
use ruff_text_size::Ranged;
use crate::Edit;
use crate::message::{Emitter, EmitterContext, LineColumn, Message};
#[derive(Default)]

View File

@@ -9,11 +9,11 @@ use anyhow::Result;
use itertools::Itertools;
use log::warn;
use ruff_diagnostics::Edit;
use ruff_python_trivia::{CommentRanges, Cursor, indentation_at_offset};
use ruff_source_file::{LineEnding, LineRanges};
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
use crate::Edit;
use crate::Locator;
use crate::codes::NoqaCode;
use crate::fs::relativize_path;
@@ -1221,6 +1221,7 @@ mod tests {
use insta::assert_debug_snapshot;
use ruff_diagnostics::{Diagnostic, Edit};
use ruff_python_trivia::CommentRanges;
use ruff_source_file::{LineEnding, SourceFileBuilder};
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
@@ -1233,7 +1234,6 @@ mod tests {
use crate::rules::pycodestyle::rules::{AmbiguousVariableName, UselessSemicolon};
use crate::rules::pyflakes::rules::UnusedVariable;
use crate::rules::pyupgrade::rules::PrintfStringFormatting;
use crate::{Edit, OldDiagnostic};
use crate::{Locator, generate_noqa_edits};
fn assert_lexed_ranges_match_slices(
@@ -1252,9 +1252,14 @@ mod tests {
}
/// Create a [`Message`] with a placeholder filename and rule code from `diagnostic`.
fn message_from_diagnostic(diagnostic: OldDiagnostic) -> Message {
fn message_from_diagnostic(
diagnostic: Diagnostic,
path: impl AsRef<Path>,
source: &str,
) -> Message {
let noqa_offset = diagnostic.start();
Message::from_diagnostic(diagnostic, Some(noqa_offset))
let file = SourceFileBuilder::new(path.as_ref().to_string_lossy(), source).finish();
Message::from_diagnostic(diagnostic, file, Some(noqa_offset))
}
#[test]
@@ -2837,15 +2842,13 @@ mod tests {
assert_eq!(count, 0);
assert_eq!(output, format!("{contents}"));
let source_file = SourceFileBuilder::new(path.to_string_lossy(), contents).finish();
let messages = [OldDiagnostic::new(
let messages = [Diagnostic::new(
UnusedVariable {
name: "x".to_string(),
},
TextRange::new(TextSize::from(0), TextSize::from(0)),
&source_file,
)]
.map(message_from_diagnostic);
.map(|d| message_from_diagnostic(d, path, contents));
let contents = "x = 1";
let noqa_line_for = NoqaMapping::default();
@@ -2861,22 +2864,19 @@ mod tests {
assert_eq!(count, 1);
assert_eq!(output, "x = 1 # noqa: F841\n");
let source_file = SourceFileBuilder::new(path.to_string_lossy(), contents).finish();
let messages = [
OldDiagnostic::new(
Diagnostic::new(
AmbiguousVariableName("x".to_string()),
TextRange::new(TextSize::from(0), TextSize::from(0)),
&source_file,
),
OldDiagnostic::new(
Diagnostic::new(
UnusedVariable {
name: "x".to_string(),
},
TextRange::new(TextSize::from(0), TextSize::from(0)),
&source_file,
),
]
.map(message_from_diagnostic);
.map(|d| message_from_diagnostic(d, path, contents));
let contents = "x = 1 # noqa: E741\n";
let noqa_line_for = NoqaMapping::default();
let comment_ranges =
@@ -2893,22 +2893,19 @@ mod tests {
assert_eq!(count, 1);
assert_eq!(output, "x = 1 # noqa: E741, F841\n");
let source_file = SourceFileBuilder::new(path.to_string_lossy(), contents).finish();
let messages = [
OldDiagnostic::new(
Diagnostic::new(
AmbiguousVariableName("x".to_string()),
TextRange::new(TextSize::from(0), TextSize::from(0)),
&source_file,
),
OldDiagnostic::new(
Diagnostic::new(
UnusedVariable {
name: "x".to_string(),
},
TextRange::new(TextSize::from(0), TextSize::from(0)),
&source_file,
),
]
.map(message_from_diagnostic);
.map(|d| message_from_diagnostic(d, path, contents));
let contents = "x = 1 # noqa";
let noqa_line_for = NoqaMapping::default();
let comment_ranges =
@@ -2939,13 +2936,11 @@ print(
)
"#;
let noqa_line_for = [TextRange::new(8.into(), 68.into())].into_iter().collect();
let source_file = SourceFileBuilder::new(path.to_string_lossy(), source).finish();
let messages = [OldDiagnostic::new(
let messages = [Diagnostic::new(
PrintfStringFormatting,
TextRange::new(12.into(), 79.into()),
&source_file,
)]
.map(message_from_diagnostic);
.map(|d| message_from_diagnostic(d, path, source));
let comment_ranges = CommentRanges::default();
let edits = generate_noqa_edits(
path,
@@ -2973,13 +2968,11 @@ print(
foo;
bar =
";
let source_file = SourceFileBuilder::new(path.to_string_lossy(), source).finish();
let messages = [OldDiagnostic::new(
let messages = [Diagnostic::new(
UselessSemicolon,
TextRange::new(4.into(), 5.into()),
&source_file,
)]
.map(message_from_diagnostic);
.map(|d| message_from_diagnostic(d, path, source));
let noqa_line_for = NoqaMapping::default();
let comment_ranges = CommentRanges::default();
let edits = generate_noqa_edits(

View File

@@ -127,10 +127,6 @@ pub(crate) const fn is_check_file_level_directives_enabled(settings: &LinterSett
}
// https://github.com/astral-sh/ruff/pull/17644
pub(crate) const fn is_readlines_in_for_fix_safe_enabled(settings: &LinterSettings) -> bool {
settings.preview.is_enabled()
}
pub(crate) const fn multiple_with_statements_fix_safe_enabled(settings: &LinterSettings) -> bool {
pub(crate) const fn is_readlines_in_for_fix_safe(settings: &LinterSettings) -> bool {
settings.preview.is_enabled()
}

View File

@@ -3,16 +3,16 @@ use log::warn;
use pyproject_toml::PyProjectToml;
use ruff_text_size::{TextRange, TextSize};
use ruff_diagnostics::Diagnostic;
use ruff_source_file::SourceFile;
use crate::IOError;
use crate::OldDiagnostic;
use crate::message::Message;
use crate::registry::Rule;
use crate::rules::ruff::rules::InvalidPyprojectToml;
use crate::settings::LinterSettings;
pub fn lint_pyproject_toml(source_file: &SourceFile, settings: &LinterSettings) -> Vec<Message> {
pub fn lint_pyproject_toml(source_file: SourceFile, settings: &LinterSettings) -> Vec<Message> {
let Some(err) = toml::from_str::<PyProjectToml>(source_file.source_text()).err() else {
return Vec::default();
};
@@ -29,9 +29,8 @@ pub fn lint_pyproject_toml(source_file: &SourceFile, settings: &LinterSettings)
source_file.name(),
);
if settings.rules.enabled(Rule::IOError) {
let diagnostic =
OldDiagnostic::new(IOError { message }, TextRange::default(), source_file);
messages.push(Message::from_diagnostic(diagnostic, None));
let diagnostic = Diagnostic::new(IOError { message }, TextRange::default());
messages.push(Message::from_diagnostic(diagnostic, source_file, None));
} else {
warn!(
"{}{}{} {message}",
@@ -52,12 +51,8 @@ pub fn lint_pyproject_toml(source_file: &SourceFile, settings: &LinterSettings)
if settings.rules.enabled(Rule::InvalidPyprojectToml) {
let toml_err = err.message().to_string();
let diagnostic = OldDiagnostic::new(
InvalidPyprojectToml { message: toml_err },
range,
source_file,
);
messages.push(Message::from_diagnostic(diagnostic, None));
let diagnostic = Diagnostic::new(InvalidPyprojectToml { message: toml_err }, range);
messages.push(Message::from_diagnostic(diagnostic, source_file, None));
}
messages

Some files were not shown because too many files have changed in this diff Show More