Compare commits
218 Commits
gankra/roo
...
brent/lamb
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
79d526cd91 | ||
|
|
376571eed1 | ||
|
|
e4374f14ed | ||
|
|
03bd0619e9 | ||
|
|
bd8812127d | ||
|
|
44b0c9ebac | ||
|
|
7b237d316f | ||
|
|
36cce347fd | ||
|
|
33b942c7ad | ||
|
|
9ce3230add | ||
|
|
2bc6c78e26 | ||
|
|
1fd852fb3f | ||
|
|
5f3e086ee4 | ||
|
|
039a69fa8c | ||
|
|
3656b44877 | ||
|
|
98869f0307 | ||
|
|
d258302b08 | ||
|
|
deeda56906 | ||
|
|
f63a9f2334 | ||
|
|
e4dc406a3d | ||
|
|
1d188476b6 | ||
|
|
4821c050ef | ||
|
|
835e31b3ff | ||
|
|
8d1efe964a | ||
|
|
04e7cecab3 | ||
|
|
84a810736d | ||
|
|
f44598dc11 | ||
|
|
ab46c8de0f | ||
|
|
a6f2dee33b | ||
|
|
238f151371 | ||
|
|
3fa609929f | ||
|
|
73b1fce74a | ||
|
|
52bd22003b | ||
|
|
0a6d6b6194 | ||
|
|
ca51feb319 | ||
|
|
e0a3cbb048 | ||
|
|
f4f259395c | ||
|
|
2a1d412f72 | ||
|
|
dd751e8d07 | ||
|
|
020ff1723b | ||
|
|
09e6af16c8 | ||
|
|
76efc8061d | ||
|
|
16de4aa3cc | ||
|
|
e06e108095 | ||
|
|
b6add3ee6d | ||
|
|
39c21d7c6c | ||
|
|
1617292e9f | ||
|
|
faae72b836 | ||
|
|
276f1d0d88 | ||
|
|
ed18112cfa | ||
|
|
8ba1cfebed | ||
|
|
6185a2af9e | ||
|
|
6cc3393ccd | ||
|
|
c7ff9826d6 | ||
|
|
35640dd853 | ||
|
|
cb2e277482 | ||
|
|
132d10fb6f | ||
|
|
f189aad6d2 | ||
|
|
5517c9943a | ||
|
|
b5ff96595d | ||
|
|
c6573b16ac | ||
|
|
76127e5fb5 | ||
|
|
cddc0fedc2 | ||
|
|
eda85f3c64 | ||
|
|
cef6600cf3 | ||
|
|
5c69e00d1c | ||
|
|
7569b09bdd | ||
|
|
7009d60260 | ||
|
|
f79044478c | ||
|
|
47e41ac6b6 | ||
|
|
2e7ab00d51 | ||
|
|
d8106d38a0 | ||
|
|
4fd8d4b0ee | ||
|
|
63b1c1ea8b | ||
|
|
3c5e4e1477 | ||
|
|
3c8fb68765 | ||
|
|
42adfd40ea | ||
|
|
79a02711c1 | ||
|
|
d2fe6347fb | ||
|
|
1fe958c694 | ||
|
|
fe4ee81b97 | ||
|
|
0433526897 | ||
|
|
78ee7ae925 | ||
|
|
21ec8aa7d4 | ||
|
|
64a255df49 | ||
|
|
b5305b5f32 | ||
|
|
39f105bc4a | ||
|
|
e8c35b9704 | ||
|
|
1b2ed6a503 | ||
|
|
de9df1b326 | ||
|
|
e2e83acd2f | ||
|
|
6ddfb51d71 | ||
|
|
e017b039df | ||
|
|
0dfd55babf | ||
|
|
f947c23cd7 | ||
|
|
02879fa377 | ||
|
|
7dbfb56c3d | ||
|
|
f97c38dd88 | ||
|
|
14fce1f788 | ||
|
|
31194d048d | ||
|
|
fe95ff6b06 | ||
|
|
61c1007137 | ||
|
|
884c3b178e | ||
|
|
ade727ce66 | ||
|
|
3493c9b67a | ||
|
|
666dd5fef1 | ||
|
|
bb05527350 | ||
|
|
dc373e639e | ||
|
|
fc71c90de6 | ||
|
|
c11b00bea0 | ||
|
|
770b4d12ab | ||
|
|
c596a78c08 | ||
|
|
cb98175a36 | ||
|
|
3bef60f69a | ||
|
|
f477e11d26 | ||
|
|
c0bd092fa9 | ||
|
|
73b9b8eb6b | ||
|
|
80eeb1d64f | ||
|
|
50b75cfcc6 | ||
|
|
1c4a9d6a06 | ||
|
|
222c6fd496 | ||
|
|
b754abff1b | ||
|
|
41fe4d7f8c | ||
|
|
f14631e1cc | ||
|
|
02f2dba28e | ||
|
|
0454a72674 | ||
|
|
c32234cf0d | ||
|
|
566d1d6497 | ||
|
|
6c3d6124c8 | ||
|
|
73107a083c | ||
|
|
de1a6fb8ad | ||
|
|
bff32a41dc | ||
|
|
17c7b3cde1 | ||
|
|
921f409ee8 | ||
|
|
a151f9746d | ||
|
|
521217bb90 | ||
|
|
a32d5b8dc4 | ||
|
|
6337e22f0c | ||
|
|
827d8ae5d4 | ||
|
|
1734ddfb3e | ||
|
|
ff3a6a8fbd | ||
|
|
9664474c51 | ||
|
|
69b4c29924 | ||
|
|
bb40c34361 | ||
|
|
b93d8f2b9f | ||
|
|
1d111c8780 | ||
|
|
9d7da914b9 | ||
|
|
0c2cf75869 | ||
|
|
1d6ae8596a | ||
|
|
cf4e82d4b0 | ||
|
|
1baf98aab3 | ||
|
|
3179b05221 | ||
|
|
172e8d4ae0 | ||
|
|
735ec0c1f9 | ||
|
|
3585c96ea5 | ||
|
|
4b026c2a55 | ||
|
|
4b758b3746 | ||
|
|
8737a2d5f5 | ||
|
|
3be3a10a2f | ||
|
|
13375d0e42 | ||
|
|
c0b04d4b7c | ||
|
|
1c7ea690a8 | ||
|
|
9bacd19c5a | ||
|
|
f0fe6d62fb | ||
|
|
10bda3df00 | ||
|
|
e55bc943e5 | ||
|
|
1b0ee4677e | ||
|
|
1ebedf6df5 | ||
|
|
980b4c55b2 | ||
|
|
5139f76d1f | ||
|
|
aca8ba76a4 | ||
|
|
7045898ffa | ||
|
|
d38a5292d2 | ||
|
|
83a00c0ac8 | ||
|
|
8b22fd1a5f | ||
|
|
765257bdce | ||
|
|
2d4e0edee4 | ||
|
|
9ce3fa3fe3 | ||
|
|
196a68e4c8 | ||
|
|
349061117c | ||
|
|
d0aebaa253 | ||
|
|
17850eee4b | ||
|
|
4d2ee41e24 | ||
|
|
7b959ef44b | ||
|
|
4c4ddc8c29 | ||
|
|
ae0343f848 | ||
|
|
29462ea1d4 | ||
|
|
7fee62b2de | ||
|
|
96b60c11d9 | ||
|
|
fffbe5a879 | ||
|
|
116611bd39 | ||
|
|
db0e921db1 | ||
|
|
3c7f56f582 | ||
|
|
8a73519b25 | ||
|
|
fa12fd0184 | ||
|
|
fdb8ea487c | ||
|
|
d846a0319a | ||
|
|
bca5d33385 | ||
|
|
c83c4d52a4 | ||
|
|
e692b7f1ee | ||
|
|
8e51db3ecd | ||
|
|
64ab79e572 | ||
|
|
1ade9a5943 | ||
|
|
304ac22e74 | ||
|
|
c3de8847d5 | ||
|
|
f17ddd62ad | ||
|
|
adbf05802a | ||
|
|
eb8c0ad87c | ||
|
|
a2d0d39853 | ||
|
|
f36fa7d6c1 | ||
|
|
6f0982d2d6 | ||
|
|
3e8685d2ec | ||
|
|
7576669297 | ||
|
|
bf74c824eb | ||
|
|
e196c2ab37 | ||
|
|
4522f35ea7 | ||
|
|
be5a62f7e5 | ||
|
|
28aed61a22 |
298
.github/workflows/ci.yaml
vendored
298
.github/workflows/ci.yaml
vendored
@@ -231,6 +231,8 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- name: "Install Rust toolchain"
|
||||
run: |
|
||||
rustup component add clippy
|
||||
@@ -251,20 +253,23 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
shared-key: ruff-linux-debug
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
with:
|
||||
enable-cache: "true"
|
||||
- name: ty mdtests (GitHub annotations)
|
||||
@@ -277,8 +282,8 @@ jobs:
|
||||
run: cargo test -p ty_python_semantic --test mdtest || true
|
||||
- name: "Run tests"
|
||||
run: cargo insta test --all-features --unreferenced reject --test-runner nextest
|
||||
# Dogfood ty on py-fuzzer
|
||||
- run: uv run --project=./python/py-fuzzer cargo run -p ty check --project=./python/py-fuzzer
|
||||
- name: Dogfood ty on py-fuzzer
|
||||
run: uv run --project=./python/py-fuzzer cargo run -p ty check --project=./python/py-fuzzer
|
||||
# Check for broken links in the documentation.
|
||||
- run: cargo doc --all --no-deps
|
||||
env:
|
||||
@@ -291,14 +296,6 @@ jobs:
|
||||
env:
|
||||
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
|
||||
RUSTDOCFLAGS: "-D warnings"
|
||||
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: ruff
|
||||
path: target/debug/ruff
|
||||
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: ty
|
||||
path: target/debug/ty
|
||||
|
||||
cargo-test-linux-release:
|
||||
name: "cargo test (linux, release)"
|
||||
@@ -315,20 +312,22 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
with:
|
||||
enable-cache: "true"
|
||||
- name: "Run tests"
|
||||
@@ -350,14 +349,16 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
with:
|
||||
enable-cache: "true"
|
||||
- name: "Run tests"
|
||||
@@ -376,9 +377,11 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version: 22
|
||||
cache: "npm"
|
||||
@@ -411,6 +414,8 @@ jobs:
|
||||
file: "Cargo.toml"
|
||||
field: "workspace.package.rust-version"
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- name: "Install Rust toolchain"
|
||||
env:
|
||||
MSRV: ${{ steps.msrv.outputs.value }}
|
||||
@@ -435,10 +440,13 @@ jobs:
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
workspaces: "fuzz -> target"
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||
- name: "Install cargo-binstall"
|
||||
uses: cargo-bins/cargo-binstall@a66119fbb1c952daba62640c2609111fe0803621 # v1.15.7
|
||||
uses: cargo-bins/cargo-binstall@ae04fb5e853ae6cd3ad7de4a1d554a8b646d12aa # v1.15.11
|
||||
- name: "Install cargo-fuzz"
|
||||
# Download the latest version from quick install and not the github releases because github releases only has MUSL targets.
|
||||
run: cargo binstall cargo-fuzz --force --disable-strategies crate-meta-data --no-confirm
|
||||
@@ -447,9 +455,7 @@ jobs:
|
||||
fuzz-parser:
|
||||
name: "fuzz parser"
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- cargo-test-linux
|
||||
- determine_changes
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.parser == 'true' || needs.determine_changes.outputs.py-fuzzer == 'true') }}
|
||||
timeout-minutes: 20
|
||||
env:
|
||||
@@ -458,27 +464,24 @@ jobs:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
- uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
name: Download Ruff binary to test
|
||||
id: download-cached-binary
|
||||
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
name: ruff
|
||||
path: ruff-to-test
|
||||
shared-key: ruff-linux-debug
|
||||
save-if: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: Build Ruff binary
|
||||
run: cargo build --bin ruff
|
||||
- name: Fuzz
|
||||
env:
|
||||
DOWNLOAD_PATH: ${{ steps.download-cached-binary.outputs.download-path }}
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x "${DOWNLOAD_PATH}/ruff"
|
||||
|
||||
(
|
||||
uv run \
|
||||
--python="${PYTHON_VERSION}" \
|
||||
--project=./python/py-fuzzer \
|
||||
--locked \
|
||||
fuzz \
|
||||
--test-executable="${DOWNLOAD_PATH}/ruff" \
|
||||
--test-executable=target/debug/ruff \
|
||||
--bin=ruff \
|
||||
0-500
|
||||
)
|
||||
@@ -494,7 +497,9 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup component add rustfmt
|
||||
# Run all code generation scripts, and verify that the current output is
|
||||
@@ -518,9 +523,7 @@ jobs:
|
||||
ecosystem:
|
||||
name: "ecosystem"
|
||||
runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-latest-8' || 'ubuntu-latest' }}
|
||||
needs:
|
||||
- cargo-test-linux
|
||||
- determine_changes
|
||||
needs: determine_changes
|
||||
# Only runs on pull requests, since that is the only we way we can find the base version for comparison.
|
||||
# Ecosystem check needs linter and/or formatter changes.
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && github.event_name == 'pull_request' && needs.determine_changes.outputs.code == 'true' }}
|
||||
@@ -528,27 +531,37 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.base.ref }}
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
|
||||
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
with:
|
||||
# TODO: figure out why `ruff-ecosystem` crashes on Python 3.14
|
||||
python-version: "3.13"
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
activate-environment: true
|
||||
|
||||
- uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
name: Download comparison Ruff binary
|
||||
id: ruff-target
|
||||
with:
|
||||
name: ruff
|
||||
path: target/debug
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: Download baseline Ruff binary
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
name: ruff
|
||||
branch: ${{ github.event.pull_request.base.ref }}
|
||||
workflow: "ci.yaml"
|
||||
check_artifacts: true
|
||||
shared-key: ruff-linux-debug
|
||||
save-if: false
|
||||
|
||||
- name: Build baseline version
|
||||
run: |
|
||||
cargo build --bin ruff
|
||||
mv target/debug/ruff target/debug/ruff-baseline
|
||||
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
clean: false
|
||||
|
||||
- name: Build comparison version
|
||||
run: cargo build --bin ruff
|
||||
|
||||
- name: Install ruff-ecosystem
|
||||
run: |
|
||||
@@ -556,16 +569,11 @@ jobs:
|
||||
|
||||
- name: Run `ruff check` stable ecosystem check
|
||||
if: ${{ needs.determine_changes.outputs.linter == 'true' }}
|
||||
env:
|
||||
DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }}
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x ./ruff "${DOWNLOAD_PATH}/ruff"
|
||||
|
||||
# Set pipefail to avoid hiding errors with tee
|
||||
set -eo pipefail
|
||||
|
||||
ruff-ecosystem check ./ruff "${DOWNLOAD_PATH}/ruff" --cache ./checkouts --output-format markdown | tee ecosystem-result-check-stable
|
||||
ruff-ecosystem check ./target/debug/ruff-baseline ./target/debug/ruff --cache ./checkouts --output-format markdown | tee ecosystem-result-check-stable
|
||||
|
||||
cat ecosystem-result-check-stable > "$GITHUB_STEP_SUMMARY"
|
||||
echo "### Linter (stable)" > ecosystem-result
|
||||
@@ -574,16 +582,11 @@ jobs:
|
||||
|
||||
- name: Run `ruff check` preview ecosystem check
|
||||
if: ${{ needs.determine_changes.outputs.linter == 'true' }}
|
||||
env:
|
||||
DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }}
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x ./ruff "${DOWNLOAD_PATH}/ruff"
|
||||
|
||||
# Set pipefail to avoid hiding errors with tee
|
||||
set -eo pipefail
|
||||
|
||||
ruff-ecosystem check ./ruff "${DOWNLOAD_PATH}/ruff" --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-check-preview
|
||||
ruff-ecosystem check ./target/debug/ruff-baseline ./target/debug/ruff --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-check-preview
|
||||
|
||||
cat ecosystem-result-check-preview > "$GITHUB_STEP_SUMMARY"
|
||||
echo "### Linter (preview)" >> ecosystem-result
|
||||
@@ -592,16 +595,11 @@ jobs:
|
||||
|
||||
- name: Run `ruff format` stable ecosystem check
|
||||
if: ${{ needs.determine_changes.outputs.formatter == 'true' }}
|
||||
env:
|
||||
DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }}
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x ./ruff "${DOWNLOAD_PATH}/ruff"
|
||||
|
||||
# Set pipefail to avoid hiding errors with tee
|
||||
set -eo pipefail
|
||||
|
||||
ruff-ecosystem format ./ruff "${DOWNLOAD_PATH}/ruff" --cache ./checkouts --output-format markdown | tee ecosystem-result-format-stable
|
||||
ruff-ecosystem format ./target/debug/ruff-baseline ./target/debug/ruff --cache ./checkouts --output-format markdown | tee ecosystem-result-format-stable
|
||||
|
||||
cat ecosystem-result-format-stable > "$GITHUB_STEP_SUMMARY"
|
||||
echo "### Formatter (stable)" >> ecosystem-result
|
||||
@@ -610,32 +608,19 @@ jobs:
|
||||
|
||||
- name: Run `ruff format` preview ecosystem check
|
||||
if: ${{ needs.determine_changes.outputs.formatter == 'true' }}
|
||||
env:
|
||||
DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }}
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x ./ruff "${DOWNLOAD_PATH}/ruff"
|
||||
|
||||
# Set pipefail to avoid hiding errors with tee
|
||||
set -eo pipefail
|
||||
|
||||
ruff-ecosystem format ./ruff "${DOWNLOAD_PATH}/ruff" --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-format-preview
|
||||
ruff-ecosystem format ./target/debug/ruff-baseline ./target/debug/ruff --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-format-preview
|
||||
|
||||
cat ecosystem-result-format-preview > "$GITHUB_STEP_SUMMARY"
|
||||
echo "### Formatter (preview)" >> ecosystem-result
|
||||
cat ecosystem-result-format-preview >> ecosystem-result
|
||||
echo "" >> ecosystem-result
|
||||
|
||||
- name: Export pull request number
|
||||
run: |
|
||||
echo ${{ github.event.number }} > pr-number
|
||||
|
||||
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
name: Upload PR Number
|
||||
with:
|
||||
name: pr-number
|
||||
path: pr-number
|
||||
|
||||
# NOTE: astral-sh-bot uses this artifact to post comments on PRs.
|
||||
# Make sure to update the bot if you rename the artifact.
|
||||
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
name: Upload Results
|
||||
with:
|
||||
@@ -646,36 +631,38 @@ jobs:
|
||||
name: "Fuzz for new ty panics"
|
||||
runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-22.04-16' || 'ubuntu-latest' }}
|
||||
needs:
|
||||
- cargo-test-linux
|
||||
- determine_changes
|
||||
# Only runs on pull requests, since that is the only we way we can find the base version for comparison.
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && github.event_name == 'pull_request' && (needs.determine_changes.outputs.ty == 'true' || needs.determine_changes.outputs.py-fuzzer == 'true') }}
|
||||
timeout-minutes: ${{ github.repository == 'astral-sh/ruff' && 5 || 20 }}
|
||||
timeout-minutes: ${{ github.repository == 'astral-sh/ruff' && 10 || 20 }}
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
- uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
name: Download new ty binary
|
||||
id: ty-new
|
||||
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
name: ty
|
||||
path: target/debug
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: Download baseline ty binary
|
||||
with:
|
||||
name: ty
|
||||
branch: ${{ github.event.pull_request.base.ref }}
|
||||
workflow: "ci.yaml"
|
||||
check_artifacts: true
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||
- name: Fuzz
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
NEW_TY: ${{ steps.ty-new.outputs.download-path }}
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x "${PWD}/ty" "${NEW_TY}/ty"
|
||||
echo "new commit"
|
||||
git rev-list --format=%s --max-count=1 "$GITHUB_SHA"
|
||||
cargo build --profile=profiling --bin=ty
|
||||
mv target/profiling/ty ty-new
|
||||
|
||||
MERGE_BASE="$(git merge-base "$GITHUB_SHA" "origin/$GITHUB_BASE_REF")"
|
||||
git checkout -b old_commit "$MERGE_BASE"
|
||||
echo "old commit (merge base)"
|
||||
git rev-list --format=%s --max-count=1 old_commit
|
||||
cargo build --profile=profiling --bin=ty
|
||||
mv target/profiling/ty ty-old
|
||||
|
||||
(
|
||||
uv run \
|
||||
@@ -683,8 +670,8 @@ jobs:
|
||||
--project=./python/py-fuzzer \
|
||||
--locked \
|
||||
fuzz \
|
||||
--test-executable="${NEW_TY}/ty" \
|
||||
--baseline-executable="${PWD}/ty" \
|
||||
--test-executable=ty-new \
|
||||
--baseline-executable=ty-old \
|
||||
--only-new-bugs \
|
||||
--bin=ty \
|
||||
0-1000
|
||||
@@ -699,7 +686,7 @@ jobs:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: cargo-bins/cargo-binstall@a66119fbb1c952daba62640c2609111fe0803621 # v1.15.7
|
||||
- uses: cargo-bins/cargo-binstall@ae04fb5e853ae6cd3ad7de4a1d554a8b646d12aa # v1.15.11
|
||||
- run: cargo binstall --no-confirm cargo-shear
|
||||
- run: cargo shear
|
||||
|
||||
@@ -712,12 +699,16 @@ jobs:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||
- name: "Run ty completion evaluation"
|
||||
run: cargo run --release --package ty_completion_eval -- all --threshold 0.4 --tasks /tmp/completion-evaluation-tasks.csv
|
||||
run: cargo run --profile profiling --package ty_completion_eval -- all --threshold 0.4 --tasks /tmp/completion-evaluation-tasks.csv
|
||||
- name: "Ensure there are no changes"
|
||||
run: diff ./crates/ty_completion_eval/completion-evaluation-tasks.csv /tmp/completion-evaluation-tasks.csv
|
||||
|
||||
@@ -735,6 +726,8 @@ jobs:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
@@ -757,9 +750,11 @@ jobs:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
- uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version: 22
|
||||
- name: "Cache pre-commit"
|
||||
@@ -789,6 +784,8 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@a6f90b1f127823b31d4d4a8d96047790581349bd # v0.9.1
|
||||
@@ -797,7 +794,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
with:
|
||||
python-version: 3.13
|
||||
activate-environment: true
|
||||
@@ -831,6 +828,8 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Run checks"
|
||||
@@ -844,9 +843,7 @@ jobs:
|
||||
name: "test ruff-lsp"
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
needs:
|
||||
- cargo-test-linux
|
||||
- determine_changes
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
steps:
|
||||
- uses: extractions/setup-just@e33e0265a09d6d736e2ee1e0eb685ef1de4669ff # v3.0.0
|
||||
@@ -854,37 +851,46 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
name: "Download ruff-lsp source"
|
||||
name: "Checkout ruff source"
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
shared-key: ruff-linux-debug
|
||||
save-if: false
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- name: Build Ruff binary
|
||||
run: cargo build -p ruff --bin ruff
|
||||
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
name: "Checkout ruff-lsp source"
|
||||
with:
|
||||
persist-credentials: false
|
||||
repository: "astral-sh/ruff-lsp"
|
||||
path: ruff-lsp
|
||||
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
# installation fails on 3.13 and newer
|
||||
python-version: "3.12"
|
||||
|
||||
- uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
name: Download development ruff binary
|
||||
id: ruff-target
|
||||
with:
|
||||
name: ruff
|
||||
path: target/debug
|
||||
|
||||
- name: Install ruff-lsp dependencies
|
||||
run: |
|
||||
cd ruff-lsp
|
||||
just install
|
||||
|
||||
- name: Run ruff-lsp tests
|
||||
env:
|
||||
DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }}
|
||||
run: |
|
||||
# Setup development binary
|
||||
pip uninstall --yes ruff
|
||||
chmod +x "${DOWNLOAD_PATH}/ruff"
|
||||
export PATH="${DOWNLOAD_PATH}:${PATH}"
|
||||
export PATH="${PWD}/target/debug:${PATH}"
|
||||
ruff version
|
||||
|
||||
cd ruff-lsp
|
||||
just test
|
||||
|
||||
check-playground:
|
||||
@@ -901,7 +907,9 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
- uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version: 22
|
||||
cache: "npm"
|
||||
@@ -939,21 +947,23 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- name: "Install codspeed"
|
||||
uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
- name: "Build benchmarks"
|
||||
run: cargo codspeed build --features "codspeed,instrumented" --no-default-features -p ruff_benchmark --bench formatter --bench lexer --bench linter --bench parser
|
||||
run: cargo codspeed build --features "codspeed,instrumented" --profile profiling --no-default-features -p ruff_benchmark --bench formatter --bench lexer --bench linter --bench parser
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@6b43a0cd438f6ca5ad26f9ed03ed159ed2df7da9 # v4.1.1
|
||||
uses: CodSpeedHQ/action@bb005fe1c1eea036d3894f02c049cb6b154a1c27 # v4.3.3
|
||||
with:
|
||||
mode: instrumentation
|
||||
run: cargo codspeed run
|
||||
@@ -977,21 +987,23 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- name: "Install codspeed"
|
||||
uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
- name: "Build benchmarks"
|
||||
run: cargo codspeed build --features "codspeed,instrumented" --no-default-features -p ruff_benchmark --bench ty
|
||||
run: cargo codspeed build --features "codspeed,instrumented" --profile profiling --no-default-features -p ruff_benchmark --bench ty
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@6b43a0cd438f6ca5ad26f9ed03ed159ed2df7da9 # v4.1.1
|
||||
uses: CodSpeedHQ/action@bb005fe1c1eea036d3894f02c049cb6b154a1c27 # v4.3.3
|
||||
with:
|
||||
mode: instrumentation
|
||||
run: cargo codspeed run
|
||||
@@ -1015,21 +1027,23 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- name: "Install codspeed"
|
||||
uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
- name: "Build benchmarks"
|
||||
run: cargo codspeed build --features "codspeed,walltime" --no-default-features -p ruff_benchmark
|
||||
run: cargo codspeed build --features "codspeed,walltime" --profile profiling --no-default-features -p ruff_benchmark
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@6b43a0cd438f6ca5ad26f9ed03ed159ed2df7da9 # v4.1.1
|
||||
uses: CodSpeedHQ/action@bb005fe1c1eea036d3894f02c049cb6b154a1c27 # v4.3.3
|
||||
env:
|
||||
# enabling walltime flamegraphs adds ~6 minutes to the CI time, and they don't
|
||||
# appear to provide much useful insight for our walltime benchmarks right now
|
||||
|
||||
2
.github/workflows/daily_fuzz.yaml
vendored
2
.github/workflows/daily_fuzz.yaml
vendored
@@ -34,7 +34,7 @@ jobs:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
|
||||
13
.github/workflows/mypy_primer.yaml
vendored
13
.github/workflows/mypy_primer.yaml
vendored
@@ -43,7 +43,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
@@ -59,20 +59,15 @@ jobs:
|
||||
run: |
|
||||
cd ruff
|
||||
scripts/mypy_primer.sh
|
||||
echo ${{ github.event.number }} > ../pr-number
|
||||
|
||||
# NOTE: astral-sh-bot uses this artifact to post comments on PRs.
|
||||
# Make sure to update the bot if you rename the artifact.
|
||||
- name: Upload diff
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: mypy_primer_diff
|
||||
path: mypy_primer.diff
|
||||
|
||||
- name: Upload pr-number
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pr-number
|
||||
path: pr-number
|
||||
|
||||
memory_usage:
|
||||
name: Run memory statistics
|
||||
runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-22.04-32' || 'ubuntu-latest' }}
|
||||
@@ -85,7 +80,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
|
||||
122
.github/workflows/mypy_primer_comment.yaml
vendored
122
.github/workflows/mypy_primer_comment.yaml
vendored
@@ -1,122 +0,0 @@
|
||||
name: PR comment (mypy_primer)
|
||||
|
||||
on: # zizmor: ignore[dangerous-triggers]
|
||||
workflow_run:
|
||||
workflows: [Run mypy_primer]
|
||||
types: [completed]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
workflow_run_id:
|
||||
description: The mypy_primer workflow that triggers the workflow run
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: Download PR number
|
||||
with:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Parse pull request number
|
||||
id: pr-number
|
||||
run: |
|
||||
if [[ -f pr-number ]]
|
||||
then
|
||||
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: "Download mypy_primer results"
|
||||
id: download-mypy_primer_diff
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
name: mypy_primer_diff
|
||||
workflow: mypy_primer.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/mypy_primer_diff
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: "Download mypy_primer memory results"
|
||||
id: download-mypy_primer_memory_diff
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
name: mypy_primer_memory_diff
|
||||
workflow: mypy_primer.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/mypy_primer_memory_diff
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
if: ${{ steps.download-mypy_primer_diff.outputs.found_artifact == 'true' && steps.download-mypy_primer_memory_diff.outputs.found_artifact == 'true' }}
|
||||
run: |
|
||||
# Guard against malicious mypy_primer results that symlink to a secret
|
||||
# file on this runner
|
||||
if [[ -L pr/mypy_primer_diff/mypy_primer.diff ]] || [[ -L pr/mypy_primer_memory_diff/mypy_primer_memory.diff ]]
|
||||
then
|
||||
echo "Error: mypy_primer.diff and mypy_primer_memory.diff cannot be a symlink"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Note this identifier is used to find the comment to update on
|
||||
# subsequent runs
|
||||
echo '<!-- generated-comment mypy_primer -->' >> comment.txt
|
||||
|
||||
echo '## `mypy_primer` results' >> comment.txt
|
||||
if [ -s "pr/mypy_primer_diff/mypy_primer.diff" ]; then
|
||||
echo '<details>' >> comment.txt
|
||||
echo '<summary>Changes were detected when running on open source projects</summary>' >> comment.txt
|
||||
echo '' >> comment.txt
|
||||
echo '```diff' >> comment.txt
|
||||
cat pr/mypy_primer_diff/mypy_primer.diff >> comment.txt
|
||||
echo '```' >> comment.txt
|
||||
echo '</details>' >> comment.txt
|
||||
else
|
||||
echo 'No ecosystem changes detected ✅' >> comment.txt
|
||||
fi
|
||||
|
||||
if [ -s "pr/mypy_primer_memory_diff/mypy_primer_memory.diff" ]; then
|
||||
echo '<details>' >> comment.txt
|
||||
echo '<summary>Memory usage changes were detected when running on open source projects</summary>' >> comment.txt
|
||||
echo '' >> comment.txt
|
||||
echo '```diff' >> comment.txt
|
||||
cat pr/mypy_primer_memory_diff/mypy_primer_memory.diff >> comment.txt
|
||||
echo '```' >> comment.txt
|
||||
echo '</details>' >> comment.txt
|
||||
else
|
||||
echo 'No memory usage changes detected ✅' >> comment.txt
|
||||
fi
|
||||
|
||||
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||
cat comment.txt >> "$GITHUB_OUTPUT"
|
||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Find existing comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
|
||||
if: steps.generate-comment.outcome == 'success'
|
||||
id: find-comment
|
||||
with:
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
comment-author: "github-actions[bot]"
|
||||
body-includes: "<!-- generated-comment mypy_primer -->"
|
||||
|
||||
- name: Create or update comment
|
||||
if: steps.find-comment.outcome == 'success'
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
body-path: comment.txt
|
||||
edit-mode: replace
|
||||
88
.github/workflows/pr-comment.yaml
vendored
88
.github/workflows/pr-comment.yaml
vendored
@@ -1,88 +0,0 @@
|
||||
name: Ecosystem check comment
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: [CI]
|
||||
types: [completed]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
workflow_run_id:
|
||||
description: The ecosystem workflow that triggers the workflow run
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: Download pull request number
|
||||
with:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Parse pull request number
|
||||
id: pr-number
|
||||
run: |
|
||||
if [[ -f pr-number ]]
|
||||
then
|
||||
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: "Download ecosystem results"
|
||||
id: download-ecosystem-result
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
name: ecosystem-result
|
||||
workflow: ci.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/ecosystem
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
if: steps.download-ecosystem-result.outputs.found_artifact == 'true'
|
||||
run: |
|
||||
# Guard against malicious ecosystem results that symlink to a secret
|
||||
# file on this runner
|
||||
if [[ -L pr/ecosystem/ecosystem-result ]]
|
||||
then
|
||||
echo "Error: ecosystem-result cannot be a symlink"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Note this identifier is used to find the comment to update on
|
||||
# subsequent runs
|
||||
echo '<!-- generated-comment ecosystem -->' >> comment.txt
|
||||
|
||||
echo '## `ruff-ecosystem` results' >> comment.txt
|
||||
cat pr/ecosystem/ecosystem-result >> comment.txt
|
||||
echo "" >> comment.txt
|
||||
|
||||
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||
cat comment.txt >> "$GITHUB_OUTPUT"
|
||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Find existing comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
|
||||
if: steps.generate-comment.outcome == 'success'
|
||||
id: find-comment
|
||||
with:
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
comment-author: "github-actions[bot]"
|
||||
body-includes: "<!-- generated-comment ecosystem -->"
|
||||
|
||||
- name: Create or update comment
|
||||
if: steps.find-comment.outcome == 'success'
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
body-path: comment.txt
|
||||
edit-mode: replace
|
||||
2
.github/workflows/publish-playground.yml
vendored
2
.github/workflows/publish-playground.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version: 22
|
||||
package-manager-cache: false
|
||||
|
||||
2
.github/workflows/publish-pypi.yml
vendored
2
.github/workflows/publish-pypi.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
id-token: write
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
- uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
pattern: wheels-*
|
||||
|
||||
2
.github/workflows/publish-ty-playground.yml
vendored
2
.github/workflows/publish-ty-playground.yml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version: 22
|
||||
package-manager-cache: false
|
||||
|
||||
2
.github/workflows/publish-wasm.yml
vendored
2
.github/workflows/publish-wasm.yml
vendored
@@ -45,7 +45,7 @@ jobs:
|
||||
jq '.name="@astral-sh/ruff-wasm-${{ matrix.target }}"' crates/ruff_wasm/pkg/package.json > /tmp/package.json
|
||||
mv /tmp/package.json crates/ruff_wasm/pkg
|
||||
- run: cp LICENSE crates/ruff_wasm/pkg # wasm-pack does not put the LICENSE file in the pkg
|
||||
- uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version: 22
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
|
||||
24
.github/workflows/release.yml
vendored
24
.github/workflows/release.yml
vendored
@@ -68,9 +68,9 @@ jobs:
|
||||
# we specify bash to get pipefail; it guards against the `curl` command
|
||||
# failing. otherwise `sh` won't catch that `curl` returned non-0
|
||||
shell: bash
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.30.0/cargo-dist-installer.sh | sh"
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.30.2/cargo-dist-installer.sh | sh"
|
||||
- name: Cache dist
|
||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/dist
|
||||
@@ -86,7 +86,7 @@ jobs:
|
||||
cat plan-dist-manifest.json
|
||||
echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
||||
- name: "Upload dist-manifest.json"
|
||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: artifacts-plan-dist-manifest
|
||||
path: plan-dist-manifest.json
|
||||
@@ -128,14 +128,14 @@ jobs:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Install cached dist
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0
|
||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/
|
||||
- run: chmod +x ~/.cargo/bin/dist
|
||||
# Get all the local artifacts for the global tasks to use (for e.g. checksums)
|
||||
- name: Fetch local artifacts
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0
|
||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: target/distrib/
|
||||
@@ -153,7 +153,7 @@ jobs:
|
||||
|
||||
cp dist-manifest.json "$BUILD_MANIFEST_NAME"
|
||||
- name: "Upload artifacts"
|
||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: artifacts-build-global
|
||||
path: |
|
||||
@@ -166,8 +166,8 @@ jobs:
|
||||
- custom-build-binaries
|
||||
- custom-build-docker
|
||||
- build-global-artifacts
|
||||
# Only run if we're "publishing", and only if local and global didn't fail (skipped is fine)
|
||||
if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.custom-build-binaries.result == 'skipped' || needs.custom-build-binaries.result == 'success') && (needs.custom-build-docker.result == 'skipped' || needs.custom-build-docker.result == 'success') }}
|
||||
# Only run if we're "publishing", and only if plan, local and global didn't fail (skipped is fine)
|
||||
if: ${{ always() && needs.plan.result == 'success' && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.custom-build-binaries.result == 'skipped' || needs.custom-build-binaries.result == 'success') && (needs.custom-build-docker.result == 'skipped' || needs.custom-build-docker.result == 'success') }}
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
runs-on: "depot-ubuntu-latest-4"
|
||||
@@ -179,14 +179,14 @@ jobs:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Install cached dist
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0
|
||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/
|
||||
- run: chmod +x ~/.cargo/bin/dist
|
||||
# Fetch artifacts from scratch-storage
|
||||
- name: Fetch artifacts
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0
|
||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: target/distrib/
|
||||
@@ -200,7 +200,7 @@ jobs:
|
||||
cat dist-manifest.json
|
||||
echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
||||
- name: "Upload dist-manifest.json"
|
||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
# Overwrite the previous copy
|
||||
name: artifacts-dist-manifest
|
||||
@@ -256,7 +256,7 @@ jobs:
|
||||
submodules: recursive
|
||||
# Create a GitHub Release while uploading all files to it
|
||||
- name: "Download GitHub Artifacts"
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0
|
||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: artifacts
|
||||
|
||||
10
.github/workflows/sync_typeshed.yaml
vendored
10
.github/workflows/sync_typeshed.yaml
vendored
@@ -77,7 +77,7 @@ jobs:
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
git config --global user.email '<>'
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
- name: Sync typeshed stubs
|
||||
run: |
|
||||
rm -rf "ruff/${VENDORED_TYPESHED}"
|
||||
@@ -131,7 +131,7 @@ jobs:
|
||||
with:
|
||||
persist-credentials: true
|
||||
ref: ${{ env.UPSTREAM_BRANCH}}
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
@@ -170,7 +170,7 @@ jobs:
|
||||
with:
|
||||
persist-credentials: true
|
||||
ref: ${{ env.UPSTREAM_BRANCH}}
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
@@ -207,12 +207,12 @@ jobs:
|
||||
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||
- name: "Install cargo nextest"
|
||||
if: ${{ success() }}
|
||||
uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
if: ${{ success() }}
|
||||
uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: Update snapshots
|
||||
|
||||
12
.github/workflows/ty-ecosystem-analyzer.yaml
vendored
12
.github/workflows/ty-ecosystem-analyzer.yaml
vendored
@@ -33,7 +33,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
with:
|
||||
enable-cache: true # zizmor: ignore[cache-poisoning] acceptable risk for CloudFlare pages artifact
|
||||
|
||||
@@ -112,8 +112,6 @@ jobs:
|
||||
|
||||
cat diff-statistics.md >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
echo ${{ github.event.number }} > pr-number
|
||||
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
id: deploy
|
||||
@@ -131,18 +129,14 @@ jobs:
|
||||
echo >> comment.md
|
||||
echo "**[Full report with detailed diff]($DEPLOYMENT_URL/diff)** ([timing results]($DEPLOYMENT_URL/timing))" >> comment.md
|
||||
|
||||
# NOTE: astral-sh-bot uses this artifact to post comments on PRs.
|
||||
# Make sure to update the bot if you rename the artifact.
|
||||
- name: Upload comment
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: comment.md
|
||||
path: comment.md
|
||||
|
||||
- name: Upload pr-number
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pr-number
|
||||
path: pr-number
|
||||
|
||||
- name: Upload diagnostics diff
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
|
||||
@@ -1,85 +0,0 @@
|
||||
name: PR comment (ty ecosystem-analyzer)
|
||||
|
||||
on: # zizmor: ignore[dangerous-triggers]
|
||||
workflow_run:
|
||||
workflows: [ty ecosystem-analyzer]
|
||||
types: [completed]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
workflow_run_id:
|
||||
description: The ty ecosystem-analyzer workflow that triggers the workflow run
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: Download PR number
|
||||
with:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Parse pull request number
|
||||
id: pr-number
|
||||
run: |
|
||||
if [[ -f pr-number ]]
|
||||
then
|
||||
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: "Download comment.md"
|
||||
id: download-comment
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
name: comment.md
|
||||
workflow: ty-ecosystem-analyzer.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/comment
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
if: ${{ steps.download-comment.outputs.found_artifact == 'true' }}
|
||||
run: |
|
||||
# Guard against malicious ty ecosystem-analyzer results that symlink to a secret
|
||||
# file on this runner
|
||||
if [[ -L pr/comment/comment.md ]]
|
||||
then
|
||||
echo "Error: comment.md cannot be a symlink"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Note: this identifier is used to find the comment to update on subsequent runs
|
||||
echo '<!-- generated-comment ty ecosystem-analyzer -->' > comment.md
|
||||
echo >> comment.md
|
||||
cat pr/comment/comment.md >> comment.md
|
||||
|
||||
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||
cat comment.md >> "$GITHUB_OUTPUT"
|
||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Find existing comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
|
||||
if: steps.generate-comment.outcome == 'success'
|
||||
id: find-comment
|
||||
with:
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
comment-author: "github-actions[bot]"
|
||||
body-includes: "<!-- generated-comment ty ecosystem-analyzer -->"
|
||||
|
||||
- name: Create or update comment
|
||||
if: steps.find-comment.outcome == 'success'
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
body-path: comment.md
|
||||
edit-mode: replace
|
||||
2
.github/workflows/ty-ecosystem-report.yaml
vendored
2
.github/workflows/ty-ecosystem-report.yaml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
with:
|
||||
enable-cache: true # zizmor: ignore[cache-poisoning] acceptable risk for CloudFlare pages artifact
|
||||
|
||||
|
||||
13
.github/workflows/typing_conformance.yaml
vendored
13
.github/workflows/typing_conformance.yaml
vendored
@@ -24,7 +24,7 @@ env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
RUST_BACKTRACE: 1
|
||||
CONFORMANCE_SUITE_COMMIT: d4f39b27a4a47aac8b6d4019e1b0b5b3156fabdc
|
||||
CONFORMANCE_SUITE_COMMIT: 9f6d8ced7cd1c8d92687a4e9c96d7716452e471e
|
||||
|
||||
jobs:
|
||||
typing_conformance:
|
||||
@@ -94,21 +94,18 @@ jobs:
|
||||
touch typing_conformance_diagnostics.diff
|
||||
fi
|
||||
|
||||
echo ${{ github.event.number }} > pr-number
|
||||
echo "${CONFORMANCE_SUITE_COMMIT}" > conformance-suite-commit
|
||||
|
||||
# NOTE: astral-sh-bot uses this artifact to post comments on PRs.
|
||||
# Make sure to update the bot if you rename the artifact.
|
||||
- name: Upload diff
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: typing_conformance_diagnostics_diff
|
||||
path: typing_conformance_diagnostics.diff
|
||||
|
||||
- name: Upload pr-number
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pr-number
|
||||
path: pr-number
|
||||
|
||||
# NOTE: astral-sh-bot uses this artifact to post comments on PRs.
|
||||
# Make sure to update the bot if you rename the artifact.
|
||||
- name: Upload conformance suite commit
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
|
||||
112
.github/workflows/typing_conformance_comment.yaml
vendored
112
.github/workflows/typing_conformance_comment.yaml
vendored
@@ -1,112 +0,0 @@
|
||||
name: PR comment (typing_conformance)
|
||||
|
||||
on: # zizmor: ignore[dangerous-triggers]
|
||||
workflow_run:
|
||||
workflows: [Run typing conformance]
|
||||
types: [completed]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
workflow_run_id:
|
||||
description: The typing_conformance workflow that triggers the workflow run
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: Download PR number
|
||||
with:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Parse pull request number
|
||||
id: pr-number
|
||||
run: |
|
||||
if [[ -f pr-number ]]
|
||||
then
|
||||
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: Download typing conformance suite commit
|
||||
with:
|
||||
name: conformance-suite-commit
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: "Download typing_conformance results"
|
||||
id: download-typing_conformance_diff
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
name: typing_conformance_diagnostics_diff
|
||||
workflow: typing_conformance.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/typing_conformance_diagnostics_diff
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
if: ${{ steps.download-typing_conformance_diff.outputs.found_artifact == 'true' }}
|
||||
run: |
|
||||
# Guard against malicious typing_conformance results that symlink to a secret
|
||||
# file on this runner
|
||||
if [[ -L pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff ]]
|
||||
then
|
||||
echo "Error: typing_conformance_diagnostics.diff cannot be a symlink"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Note this identifier is used to find the comment to update on
|
||||
# subsequent runs
|
||||
echo '<!-- generated-comment typing_conformance_diagnostics_diff -->' >> comment.txt
|
||||
|
||||
if [[ -f conformance-suite-commit ]]
|
||||
then
|
||||
echo "## Diagnostic diff on [typing conformance tests](https://github.com/python/typing/tree/$(<conformance-suite-commit)/conformance)" >> comment.txt
|
||||
else
|
||||
echo "conformance-suite-commit file not found"
|
||||
echo "## Diagnostic diff on typing conformance tests" >> comment.txt
|
||||
fi
|
||||
|
||||
if [ -s "pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff" ]; then
|
||||
echo '<details>' >> comment.txt
|
||||
echo '<summary>Changes were detected when running ty on typing conformance tests</summary>' >> comment.txt
|
||||
echo '' >> comment.txt
|
||||
echo '```diff' >> comment.txt
|
||||
cat pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff >> comment.txt
|
||||
echo '```' >> comment.txt
|
||||
echo '</details>' >> comment.txt
|
||||
else
|
||||
echo 'No changes detected when running ty on typing conformance tests ✅' >> comment.txt
|
||||
fi
|
||||
|
||||
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||
cat comment.txt >> "$GITHUB_OUTPUT"
|
||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Find existing comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
|
||||
if: steps.generate-comment.outcome == 'success'
|
||||
id: find-comment
|
||||
with:
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
comment-author: "github-actions[bot]"
|
||||
body-includes: "<!-- generated-comment typing_conformance_diagnostics_diff -->"
|
||||
|
||||
- name: Create or update comment
|
||||
if: steps.find-comment.outcome == 'success'
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
body-path: comment.txt
|
||||
edit-mode: replace
|
||||
5
.github/zizmor.yml
vendored
5
.github/zizmor.yml
vendored
@@ -1,11 +1,8 @@
|
||||
# Configuration for the zizmor static analysis tool, run via pre-commit in CI
|
||||
# https://woodruffw.github.io/zizmor/configuration/
|
||||
# https://docs.zizmor.sh/configuration/
|
||||
#
|
||||
# TODO: can we remove the ignores here so that our workflows are more secure?
|
||||
rules:
|
||||
dangerous-triggers:
|
||||
ignore:
|
||||
- pr-comment.yaml
|
||||
cache-poisoning:
|
||||
ignore:
|
||||
- build-docker.yml
|
||||
|
||||
@@ -102,7 +102,7 @@ repos:
|
||||
# zizmor detects security vulnerabilities in GitHub Actions workflows.
|
||||
# Additional configuration for the tool is found in `.github/zizmor.yml`
|
||||
- repo: https://github.com/zizmorcore/zizmor-pre-commit
|
||||
rev: v1.15.2
|
||||
rev: v1.16.0
|
||||
hooks:
|
||||
- id: zizmor
|
||||
|
||||
|
||||
100
CHANGELOG.md
100
CHANGELOG.md
@@ -1,5 +1,105 @@
|
||||
# Changelog
|
||||
|
||||
## 0.14.4
|
||||
|
||||
Released on 2025-11-06.
|
||||
|
||||
### Preview features
|
||||
|
||||
- [formatter] Allow newlines after function headers without docstrings ([#21110](https://github.com/astral-sh/ruff/pull/21110))
|
||||
- [formatter] Avoid extra parentheses for long `match` patterns with `as` captures ([#21176](https://github.com/astral-sh/ruff/pull/21176))
|
||||
- \[`refurb`\] Expand fix safety for keyword arguments and `Decimal`s (`FURB164`) ([#21259](https://github.com/astral-sh/ruff/pull/21259))
|
||||
- \[`refurb`\] Preserve argument ordering in autofix (`FURB103`) ([#20790](https://github.com/astral-sh/ruff/pull/20790))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [server] Fix missing diagnostics for notebooks ([#21156](https://github.com/astral-sh/ruff/pull/21156))
|
||||
- \[`flake8-bugbear`\] Ignore non-NFKC attribute names in `B009` and `B010` ([#21131](https://github.com/astral-sh/ruff/pull/21131))
|
||||
- \[`refurb`\] Fix false negative for underscores before sign in `Decimal` constructor (`FURB157`) ([#21190](https://github.com/astral-sh/ruff/pull/21190))
|
||||
- \[`ruff`\] Fix false positives on starred arguments (`RUF057`) ([#21256](https://github.com/astral-sh/ruff/pull/21256))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`airflow`\] extend deprecated argument `concurrency` in `airflow..DAG` (`AIR301`) ([#21220](https://github.com/astral-sh/ruff/pull/21220))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Improve `extend` docs ([#21135](https://github.com/astral-sh/ruff/pull/21135))
|
||||
- \[`flake8-comprehensions`\] Fix typo in `C416` documentation ([#21184](https://github.com/astral-sh/ruff/pull/21184))
|
||||
- Revise Ruff setup instructions for Zed editor ([#20935](https://github.com/astral-sh/ruff/pull/20935))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Make `ruff analyze graph` work with jupyter notebooks ([#21161](https://github.com/astral-sh/ruff/pull/21161))
|
||||
|
||||
### Contributors
|
||||
|
||||
- [@chirizxc](https://github.com/chirizxc)
|
||||
- [@Lee-W](https://github.com/Lee-W)
|
||||
- [@musicinmybrain](https://github.com/musicinmybrain)
|
||||
- [@MichaReiser](https://github.com/MichaReiser)
|
||||
- [@tjkuson](https://github.com/tjkuson)
|
||||
- [@danparizher](https://github.com/danparizher)
|
||||
- [@renovate](https://github.com/renovate)
|
||||
- [@ntBre](https://github.com/ntBre)
|
||||
- [@gauthsvenkat](https://github.com/gauthsvenkat)
|
||||
- [@LoicRiegel](https://github.com/LoicRiegel)
|
||||
|
||||
## 0.14.3
|
||||
|
||||
Released on 2025-10-30.
|
||||
|
||||
### Preview features
|
||||
|
||||
- Respect `--output-format` with `--watch` ([#21097](https://github.com/astral-sh/ruff/pull/21097))
|
||||
- \[`pydoclint`\] Fix false positive on explicit exception re-raising (`DOC501`, `DOC502`) ([#21011](https://github.com/astral-sh/ruff/pull/21011))
|
||||
- \[`pyflakes`\] Revert to stable behavior if imports for module lie in alternate branches for `F401` ([#20878](https://github.com/astral-sh/ruff/pull/20878))
|
||||
- \[`pylint`\] Implement `stop-iteration-return` (`PLR1708`) ([#20733](https://github.com/astral-sh/ruff/pull/20733))
|
||||
- \[`ruff`\] Add support for additional eager conversion patterns (`RUF065`) ([#20657](https://github.com/astral-sh/ruff/pull/20657))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix finding keyword range for clause header after statement ending with semicolon ([#21067](https://github.com/astral-sh/ruff/pull/21067))
|
||||
- Fix syntax error false positive on nested alternative patterns ([#21104](https://github.com/astral-sh/ruff/pull/21104))
|
||||
- \[`ISC001`\] Fix panic when string literals are unclosed ([#21034](https://github.com/astral-sh/ruff/pull/21034))
|
||||
- \[`flake8-django`\] Apply `DJ001` to annotated fields ([#20907](https://github.com/astral-sh/ruff/pull/20907))
|
||||
- \[`flake8-pyi`\] Fix `PYI034` to not trigger on metaclasses (`PYI034`) ([#20881](https://github.com/astral-sh/ruff/pull/20881))
|
||||
- \[`flake8-type-checking`\] Fix `TC003` false positive with `future-annotations` ([#21125](https://github.com/astral-sh/ruff/pull/21125))
|
||||
- \[`pyflakes`\] Fix false positive for `__class__` in lambda expressions within class definitions (`F821`) ([#20564](https://github.com/astral-sh/ruff/pull/20564))
|
||||
- \[`pyupgrade`\] Fix false positive for `TypeVar` with default on Python \<3.13 (`UP046`,`UP047`) ([#21045](https://github.com/astral-sh/ruff/pull/21045))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- Add missing docstring sections to the numpy list ([#20931](https://github.com/astral-sh/ruff/pull/20931))
|
||||
- \[`airflow`\] Extend `airflow.models..Param` check (`AIR311`) ([#21043](https://github.com/astral-sh/ruff/pull/21043))
|
||||
- \[`airflow`\] Warn that `airflow....DAG.create_dagrun` has been removed (`AIR301`) ([#21093](https://github.com/astral-sh/ruff/pull/21093))
|
||||
- \[`refurb`\] Preserve digit separators in `Decimal` constructor (`FURB157`) ([#20588](https://github.com/astral-sh/ruff/pull/20588))
|
||||
|
||||
### Server
|
||||
|
||||
- Avoid sending an unnecessary "clear diagnostics" message for clients supporting pull diagnostics ([#21105](https://github.com/astral-sh/ruff/pull/21105))
|
||||
|
||||
### Documentation
|
||||
|
||||
- \[`flake8-bandit`\] Fix correct example for `S308` ([#21128](https://github.com/astral-sh/ruff/pull/21128))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Clearer error message when `line-length` goes beyond threshold ([#21072](https://github.com/astral-sh/ruff/pull/21072))
|
||||
|
||||
### Contributors
|
||||
|
||||
- [@danparizher](https://github.com/danparizher)
|
||||
- [@jvacek](https://github.com/jvacek)
|
||||
- [@ntBre](https://github.com/ntBre)
|
||||
- [@augustelalande](https://github.com/augustelalande)
|
||||
- [@prakhar1144](https://github.com/prakhar1144)
|
||||
- [@TaKO8Ki](https://github.com/TaKO8Ki)
|
||||
- [@dylwil3](https://github.com/dylwil3)
|
||||
- [@fatelei](https://github.com/fatelei)
|
||||
- [@ShaharNaveh](https://github.com/ShaharNaveh)
|
||||
- [@Lee-W](https://github.com/Lee-W)
|
||||
|
||||
## 0.14.2
|
||||
|
||||
Released on 2025-10-23.
|
||||
|
||||
@@ -280,6 +280,55 @@ Note that plugin-specific configuration options are defined in their own modules
|
||||
|
||||
Finally, regenerate the documentation and generated code with `cargo dev generate-all`.
|
||||
|
||||
### Opening a PR
|
||||
|
||||
After you finish your changes, the next step is to open a PR. By default, two
|
||||
sections will be filled into the PR body: the summary and the test plan.
|
||||
|
||||
#### The summary
|
||||
|
||||
The summary is intended to give us as maintainers information about your PR.
|
||||
This should typically include a link to the relevant issue(s) you're addressing
|
||||
in your PR, as well as a summary of the issue and your approach to fixing it. If
|
||||
you have any questions about your approach or design, or if you considered
|
||||
alternative approaches, that can also be helpful to include.
|
||||
|
||||
AI can be helpful in generating both the code and summary of your PR, but a
|
||||
successful contribution should still be carefully reviewed by you and the
|
||||
summary editorialized before submitting a PR. A great summary is thorough but
|
||||
also succinct and gives us the context we need to review your PR.
|
||||
|
||||
You can find examples of excellent issues and PRs by searching for the
|
||||
[`great writeup`](https://github.com/astral-sh/ruff/issues?q=label%3A%22great%20writeup%22)
|
||||
label.
|
||||
|
||||
#### The test plan
|
||||
|
||||
The test plan is likely to be shorter than the summary and can be as simple as
|
||||
"Added new snapshot tests for `RUF123`," at least for rule bugs. For LSP or some
|
||||
types of CLI changes, in particular, it can also be helpful to include
|
||||
screenshots or recordings of your change in action.
|
||||
|
||||
#### Ecosystem report
|
||||
|
||||
After opening the PR, an ecosystem report will be run as part of CI. This shows
|
||||
a diff of linter and formatter behavior before and after the changes in your PR.
|
||||
Going through these changes and reporting your findings in the PR summary or an
|
||||
additional comment help us to review your PR more efficiently. It's also a great
|
||||
way to find new test cases to incorporate into your PR if you identify any
|
||||
issues.
|
||||
|
||||
#### PR status
|
||||
|
||||
To help us know when your PR is ready for review again, please either move your
|
||||
PR back to a draft while working on it (marking it ready for review afterwards
|
||||
will ping the previous reviewers) or explicitly re-request a review. This helps
|
||||
us to avoid re-reviewing a PR while you're still working on it and also to
|
||||
prioritize PRs that are definitely ready for review.
|
||||
|
||||
You can also thumbs-up or mark as resolved any comments we leave to let us know
|
||||
you addressed them.
|
||||
|
||||
## MkDocs
|
||||
|
||||
> [!NOTE]
|
||||
|
||||
429
Cargo.lock
generated
429
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
10
Cargo.toml
10
Cargo.toml
@@ -5,7 +5,7 @@ resolver = "2"
|
||||
[workspace.package]
|
||||
# Please update rustfmt.toml when bumping the Rust edition
|
||||
edition = "2024"
|
||||
rust-version = "1.88"
|
||||
rust-version = "1.89"
|
||||
homepage = "https://docs.astral.sh/ruff"
|
||||
documentation = "https://docs.astral.sh/ruff"
|
||||
repository = "https://github.com/astral-sh/ruff"
|
||||
@@ -84,7 +84,7 @@ dashmap = { version = "6.0.1" }
|
||||
dir-test = { version = "0.4.0" }
|
||||
dunce = { version = "1.0.5" }
|
||||
drop_bomb = { version = "0.1.5" }
|
||||
etcetera = { version = "0.10.0" }
|
||||
etcetera = { version = "0.11.0" }
|
||||
fern = { version = "0.7.0" }
|
||||
filetime = { version = "0.2.23" }
|
||||
getrandom = { version = "0.3.1" }
|
||||
@@ -103,7 +103,7 @@ hashbrown = { version = "0.16.0", default-features = false, features = [
|
||||
"inline-more",
|
||||
] }
|
||||
heck = "0.5.0"
|
||||
ignore = { version = "0.4.22" }
|
||||
ignore = { version = "0.4.24" }
|
||||
imara-diff = { version = "0.1.5" }
|
||||
imperative = { version = "1.0.4" }
|
||||
indexmap = { version = "2.6.0" }
|
||||
@@ -124,7 +124,7 @@ lsp-server = { version = "0.7.6" }
|
||||
lsp-types = { git = "https://github.com/astral-sh/lsp-types.git", rev = "3512a9f", features = [
|
||||
"proposed",
|
||||
] }
|
||||
matchit = { version = "0.8.1" }
|
||||
matchit = { version = "0.9.0" }
|
||||
memchr = { version = "2.7.1" }
|
||||
mimalloc = { version = "0.1.39" }
|
||||
natord = { version = "1.0.9" }
|
||||
@@ -146,7 +146,7 @@ regex-automata = { version = "0.4.9" }
|
||||
rustc-hash = { version = "2.0.0" }
|
||||
rustc-stable-hash = { version = "0.1.2" }
|
||||
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "d38145c29574758de7ffbe8a13cd4584c3b09161", default-features = false, features = [
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "05a9af7f554b64b8aadc2eeb6f2caf73d0408d09", default-features = false, features = [
|
||||
"compact_str",
|
||||
"macros",
|
||||
"salsa_unstable",
|
||||
|
||||
@@ -147,8 +147,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.14.2/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.14.2/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.14.4/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.14.4/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -181,7 +181,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.14.2
|
||||
rev: v0.14.4
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff-check
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.14.2"
|
||||
version = "0.14.4"
|
||||
publish = true
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
@@ -7,6 +7,8 @@ use std::sync::Arc;
|
||||
|
||||
use crate::commands::completions::config::{OptionString, OptionStringParser};
|
||||
use anyhow::bail;
|
||||
use clap::builder::Styles;
|
||||
use clap::builder::styling::{AnsiColor, Effects};
|
||||
use clap::builder::{TypedValueParser, ValueParserFactory};
|
||||
use clap::{Parser, Subcommand, command};
|
||||
use colored::Colorize;
|
||||
@@ -78,6 +80,13 @@ impl GlobalConfigArgs {
|
||||
}
|
||||
}
|
||||
|
||||
// Configures Clap v3-style help menu colors
|
||||
const STYLES: Styles = Styles::styled()
|
||||
.header(AnsiColor::Green.on_default().effects(Effects::BOLD))
|
||||
.usage(AnsiColor::Green.on_default().effects(Effects::BOLD))
|
||||
.literal(AnsiColor::Cyan.on_default().effects(Effects::BOLD))
|
||||
.placeholder(AnsiColor::Cyan.on_default());
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(
|
||||
author,
|
||||
@@ -86,6 +95,7 @@ impl GlobalConfigArgs {
|
||||
after_help = "For help with a specific command, see: `ruff help <command>`."
|
||||
)]
|
||||
#[command(version)]
|
||||
#[command(styles = STYLES)]
|
||||
pub struct Args {
|
||||
#[command(subcommand)]
|
||||
pub(crate) command: Command,
|
||||
@@ -405,8 +415,13 @@ pub struct CheckCommand {
|
||||
)]
|
||||
pub statistics: bool,
|
||||
/// Enable automatic additions of `noqa` directives to failing lines.
|
||||
/// Optionally provide a reason to append after the codes.
|
||||
#[arg(
|
||||
long,
|
||||
value_name = "REASON",
|
||||
default_missing_value = "",
|
||||
num_args = 0..=1,
|
||||
require_equals = true,
|
||||
// conflicts_with = "add_noqa",
|
||||
conflicts_with = "show_files",
|
||||
conflicts_with = "show_settings",
|
||||
@@ -418,7 +433,7 @@ pub struct CheckCommand {
|
||||
conflicts_with = "fix",
|
||||
conflicts_with = "diff",
|
||||
)]
|
||||
pub add_noqa: bool,
|
||||
pub add_noqa: Option<String>,
|
||||
/// See the files Ruff will be run against with the current settings.
|
||||
#[arg(
|
||||
long,
|
||||
@@ -1047,7 +1062,7 @@ Possible choices:
|
||||
/// etc.).
|
||||
#[expect(clippy::struct_excessive_bools)]
|
||||
pub struct CheckArguments {
|
||||
pub add_noqa: bool,
|
||||
pub add_noqa: Option<String>,
|
||||
pub diff: bool,
|
||||
pub exit_non_zero_on_fix: bool,
|
||||
pub exit_zero: bool,
|
||||
|
||||
@@ -21,6 +21,7 @@ pub(crate) fn add_noqa(
|
||||
files: &[PathBuf],
|
||||
pyproject_config: &PyprojectConfig,
|
||||
config_arguments: &ConfigArguments,
|
||||
reason: Option<&str>,
|
||||
) -> Result<usize> {
|
||||
// Collect all the files to check.
|
||||
let start = Instant::now();
|
||||
@@ -76,7 +77,14 @@ pub(crate) fn add_noqa(
|
||||
return None;
|
||||
}
|
||||
};
|
||||
match add_noqa_to_path(path, package, &source_kind, source_type, &settings.linter) {
|
||||
match add_noqa_to_path(
|
||||
path,
|
||||
package,
|
||||
&source_kind,
|
||||
source_type,
|
||||
&settings.linter,
|
||||
reason,
|
||||
) {
|
||||
Ok(count) => Some(count),
|
||||
Err(e) => {
|
||||
error!("Failed to add noqa to {}: {e}", path.display());
|
||||
|
||||
@@ -7,6 +7,7 @@ use path_absolutize::CWD;
|
||||
use ruff_db::system::{SystemPath, SystemPathBuf};
|
||||
use ruff_graph::{Direction, ImportMap, ModuleDb, ModuleImports};
|
||||
use ruff_linter::package::PackageRoot;
|
||||
use ruff_linter::source_kind::SourceKind;
|
||||
use ruff_linter::{warn_user, warn_user_once};
|
||||
use ruff_python_ast::{PySourceType, SourceType};
|
||||
use ruff_workspace::resolver::{ResolvedFile, match_exclusion, python_files_in_path};
|
||||
@@ -127,10 +128,6 @@ pub(crate) fn analyze_graph(
|
||||
},
|
||||
Some(language) => PySourceType::from(language),
|
||||
};
|
||||
if matches!(source_type, PySourceType::Ipynb) {
|
||||
debug!("Ignoring Jupyter notebook: {}", path.display());
|
||||
continue;
|
||||
}
|
||||
|
||||
// Convert to system paths.
|
||||
let Ok(package) = package.map(SystemPathBuf::from_path_buf).transpose() else {
|
||||
@@ -147,13 +144,34 @@ pub(crate) fn analyze_graph(
|
||||
let root = root.clone();
|
||||
let result = inner_result.clone();
|
||||
scope.spawn(move |_| {
|
||||
// Extract source code (handles both .py and .ipynb files)
|
||||
let source_kind = match SourceKind::from_path(path.as_std_path(), source_type) {
|
||||
Ok(Some(source_kind)) => source_kind,
|
||||
Ok(None) => {
|
||||
debug!("Skipping non-Python notebook: {path}");
|
||||
return;
|
||||
}
|
||||
Err(err) => {
|
||||
warn!("Failed to read source for {path}: {err}");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let source_code = source_kind.source_code();
|
||||
|
||||
// Identify any imports via static analysis.
|
||||
let mut imports =
|
||||
ModuleImports::detect(&db, &path, package.as_deref(), string_imports)
|
||||
.unwrap_or_else(|err| {
|
||||
warn!("Failed to generate import map for {path}: {err}");
|
||||
ModuleImports::default()
|
||||
});
|
||||
let mut imports = ModuleImports::detect(
|
||||
&db,
|
||||
source_code,
|
||||
source_type,
|
||||
&path,
|
||||
package.as_deref(),
|
||||
string_imports,
|
||||
)
|
||||
.unwrap_or_else(|err| {
|
||||
warn!("Failed to generate import map for {path}: {err}");
|
||||
ModuleImports::default()
|
||||
});
|
||||
|
||||
debug!("Discovered {} imports for {}", imports.len(), path);
|
||||
|
||||
|
||||
@@ -370,7 +370,7 @@ pub(crate) fn format_source(
|
||||
let line_index = LineIndex::from_source_text(unformatted);
|
||||
let byte_range = range.to_text_range(unformatted, &line_index);
|
||||
format_range(unformatted, byte_range, options).map(|formatted_range| {
|
||||
let mut formatted = unformatted.to_string();
|
||||
let mut formatted = unformatted.clone();
|
||||
formatted.replace_range(
|
||||
std::ops::Range::<usize>::from(formatted_range.source_range()),
|
||||
formatted_range.as_code(),
|
||||
|
||||
@@ -16,6 +16,8 @@ struct LinterInfo {
|
||||
prefix: &'static str,
|
||||
name: &'static str,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
url: Option<&'static str>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
categories: Option<Vec<LinterCategoryInfo>>,
|
||||
}
|
||||
|
||||
@@ -50,6 +52,7 @@ pub(crate) fn linter(format: HelpFormat) -> Result<()> {
|
||||
.map(|linter_info| LinterInfo {
|
||||
prefix: linter_info.common_prefix(),
|
||||
name: linter_info.name(),
|
||||
url: linter_info.url(),
|
||||
categories: linter_info.upstream_categories().map(|cats| {
|
||||
cats.iter()
|
||||
.map(|c| LinterCategoryInfo {
|
||||
|
||||
@@ -319,12 +319,20 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<Exi
|
||||
warn_user!("Detected debug build without --no-cache.");
|
||||
}
|
||||
|
||||
if cli.add_noqa {
|
||||
if let Some(reason) = &cli.add_noqa {
|
||||
if !fix_mode.is_generate() {
|
||||
warn_user!("--fix is incompatible with --add-noqa.");
|
||||
}
|
||||
if reason.contains(['\n', '\r']) {
|
||||
return Err(anyhow::anyhow!(
|
||||
"--add-noqa <reason> cannot contain newline characters"
|
||||
));
|
||||
}
|
||||
|
||||
let reason_opt = (!reason.is_empty()).then_some(reason.as_str());
|
||||
|
||||
let modifications =
|
||||
commands::add_noqa::add_noqa(&files, &pyproject_config, &config_arguments)?;
|
||||
commands::add_noqa::add_noqa(&files, &pyproject_config, &config_arguments, reason_opt)?;
|
||||
if modifications > 0 && config_arguments.log_level >= LogLevel::Default {
|
||||
let s = if modifications == 1 { "" } else { "s" };
|
||||
#[expect(clippy::print_stderr)]
|
||||
|
||||
@@ -9,9 +9,7 @@ use itertools::{Itertools, iterate};
|
||||
use ruff_linter::linter::FixTable;
|
||||
use serde::Serialize;
|
||||
|
||||
use ruff_db::diagnostic::{
|
||||
Diagnostic, DiagnosticFormat, DisplayDiagnosticConfig, DisplayDiagnostics, SecondaryCode,
|
||||
};
|
||||
use ruff_db::diagnostic::{Diagnostic, DisplayDiagnosticConfig, SecondaryCode};
|
||||
use ruff_linter::fs::relativize_path;
|
||||
use ruff_linter::logging::LogLevel;
|
||||
use ruff_linter::message::{EmitterContext, render_diagnostics};
|
||||
@@ -390,21 +388,18 @@ impl Printer {
|
||||
|
||||
let context = EmitterContext::new(&diagnostics.notebook_indexes);
|
||||
let format = if preview {
|
||||
DiagnosticFormat::Full
|
||||
self.format
|
||||
} else {
|
||||
DiagnosticFormat::Concise
|
||||
OutputFormat::Concise
|
||||
};
|
||||
let config = DisplayDiagnosticConfig::default()
|
||||
.preview(preview)
|
||||
.hide_severity(true)
|
||||
.color(!cfg!(test) && colored::control::SHOULD_COLORIZE.should_colorize())
|
||||
.with_show_fix_status(show_fix_status(self.fix_mode, fixables.as_ref()))
|
||||
.format(format)
|
||||
.with_fix_applicability(self.unsafe_fixes.required_applicability());
|
||||
write!(
|
||||
writer,
|
||||
"{}",
|
||||
DisplayDiagnostics::new(&context, &config, &diagnostics.inner)
|
||||
)?;
|
||||
.with_fix_applicability(self.unsafe_fixes.required_applicability())
|
||||
.show_fix_diff(preview);
|
||||
render_diagnostics(writer, format, config, &context, &diagnostics.inner)?;
|
||||
}
|
||||
writer.flush()?;
|
||||
|
||||
|
||||
@@ -653,3 +653,133 @@ fn venv() -> Result<()> {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn notebook_basic() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let root = ChildPath::new(tempdir.path());
|
||||
|
||||
root.child("ruff").child("__init__.py").write_str("")?;
|
||||
root.child("ruff")
|
||||
.child("a.py")
|
||||
.write_str(indoc::indoc! {r#"
|
||||
def helper():
|
||||
pass
|
||||
"#})?;
|
||||
|
||||
// Create a basic notebook with a simple import
|
||||
root.child("notebook.ipynb").write_str(indoc::indoc! {r#"
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from ruff.a import helper"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"language_info": {
|
||||
"name": "python",
|
||||
"version": "3.12.0"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
"#})?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(command().current_dir(&root), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"notebook.ipynb": [
|
||||
"ruff/a.py"
|
||||
],
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn notebook_with_magic() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let root = ChildPath::new(tempdir.path());
|
||||
|
||||
root.child("ruff").child("__init__.py").write_str("")?;
|
||||
root.child("ruff")
|
||||
.child("a.py")
|
||||
.write_str(indoc::indoc! {r#"
|
||||
def helper():
|
||||
pass
|
||||
"#})?;
|
||||
|
||||
// Create a notebook with IPython magic commands and imports
|
||||
root.child("notebook.ipynb").write_str(indoc::indoc! {r#"
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%load_ext autoreload\n",
|
||||
"%autoreload 2"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from ruff.a import helper"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"language_info": {
|
||||
"name": "python",
|
||||
"version": "3.12.0"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
"#})?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(command().current_dir(&root), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"notebook.ipynb": [
|
||||
"ruff/a.py"
|
||||
],
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1760,6 +1760,64 @@ from foo import ( # noqa: F401
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_noqa_with_reason() -> Result<()> {
|
||||
let fixture = CliTest::new()?;
|
||||
fixture.write_file(
|
||||
"test.py",
|
||||
r#"import os
|
||||
|
||||
def foo():
|
||||
x = 1
|
||||
"#,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(fixture
|
||||
.check_command()
|
||||
.arg("--add-noqa=TODO: fix")
|
||||
.arg("--select=F401,F841")
|
||||
.arg("test.py"), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Added 2 noqa directives.
|
||||
");
|
||||
|
||||
let content = fs::read_to_string(fixture.root().join("test.py"))?;
|
||||
insta::assert_snapshot!(content, @r"
|
||||
import os # noqa: F401 TODO: fix
|
||||
|
||||
def foo():
|
||||
x = 1 # noqa: F841 TODO: fix
|
||||
");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_noqa_with_newline_in_reason() -> Result<()> {
|
||||
let fixture = CliTest::new()?;
|
||||
fixture.write_file("test.py", "import os\n")?;
|
||||
|
||||
assert_cmd_snapshot!(fixture
|
||||
.check_command()
|
||||
.arg("--add-noqa=line1\nline2")
|
||||
.arg("--select=F401")
|
||||
.arg("test.py"), @r###"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
ruff failed
|
||||
Cause: --add-noqa <reason> cannot contain newline characters
|
||||
"###);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Infer `3.11` from `requires-python` in `pyproject.toml`.
|
||||
#[test]
|
||||
fn requires_python() -> Result<()> {
|
||||
|
||||
@@ -146,7 +146,7 @@ static FREQTRADE: Benchmark = Benchmark::new(
|
||||
max_dep_date: "2025-06-17",
|
||||
python_version: PythonVersion::PY312,
|
||||
},
|
||||
400,
|
||||
525,
|
||||
);
|
||||
|
||||
static PANDAS: Benchmark = Benchmark::new(
|
||||
@@ -226,7 +226,7 @@ static STATIC_FRAME: Benchmark = Benchmark::new(
|
||||
max_dep_date: "2025-08-09",
|
||||
python_version: PythonVersion::PY311,
|
||||
},
|
||||
750,
|
||||
800,
|
||||
);
|
||||
|
||||
#[track_caller]
|
||||
|
||||
@@ -112,16 +112,16 @@ impl std::fmt::Display for Diff<'_> {
|
||||
// `None`, indicating a regular script file, all the lines will be in one "cell" under the
|
||||
// `None` key.
|
||||
let cells = if let Some(notebook_index) = &self.notebook_index {
|
||||
let mut last_cell = OneIndexed::MIN;
|
||||
let mut last_cell_index = OneIndexed::MIN;
|
||||
let mut cells: Vec<(Option<OneIndexed>, TextSize)> = Vec::new();
|
||||
for (row, cell) in notebook_index.iter() {
|
||||
if cell != last_cell {
|
||||
let offset = source_code.line_start(row);
|
||||
cells.push((Some(last_cell), offset));
|
||||
last_cell = cell;
|
||||
for cell in notebook_index.iter() {
|
||||
if cell.cell_index() != last_cell_index {
|
||||
let offset = source_code.line_start(cell.start_row());
|
||||
cells.push((Some(last_cell_index), offset));
|
||||
last_cell_index = cell.cell_index();
|
||||
}
|
||||
}
|
||||
cells.push((Some(last_cell), source_text.text_len()));
|
||||
cells.push((Some(last_cell_index), source_text.text_len()));
|
||||
cells
|
||||
} else {
|
||||
vec![(None, source_text.text_len())]
|
||||
|
||||
@@ -470,6 +470,17 @@ impl File {
|
||||
self.source_type(db).is_stub()
|
||||
}
|
||||
|
||||
/// Returns `true` if the file is an `__init__.pyi`
|
||||
pub fn is_package_stub(self, db: &dyn Db) -> bool {
|
||||
self.path(db).as_str().ends_with("__init__.pyi")
|
||||
}
|
||||
|
||||
/// Returns `true` if the file is an `__init__.pyi`
|
||||
pub fn is_package(self, db: &dyn Db) -> bool {
|
||||
let path = self.path(db).as_str();
|
||||
path.ends_with("__init__.pyi") || path.ends_with("__init__.py")
|
||||
}
|
||||
|
||||
pub fn source_type(self, db: &dyn Db) -> PySourceType {
|
||||
match self.path(db) {
|
||||
FilePath::System(path) => path
|
||||
|
||||
@@ -200,7 +200,12 @@ impl System for OsSystem {
|
||||
/// The walker ignores files according to [`ignore::WalkBuilder::standard_filters`]
|
||||
/// when setting [`WalkDirectoryBuilder::standard_filters`] to true.
|
||||
fn walk_directory(&self, path: &SystemPath) -> WalkDirectoryBuilder {
|
||||
WalkDirectoryBuilder::new(path, OsDirectoryWalker {})
|
||||
WalkDirectoryBuilder::new(
|
||||
path,
|
||||
OsDirectoryWalker {
|
||||
cwd: self.current_directory().to_path_buf(),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn glob(
|
||||
@@ -454,7 +459,9 @@ struct ListedDirectory {
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct OsDirectoryWalker;
|
||||
struct OsDirectoryWalker {
|
||||
cwd: SystemPathBuf,
|
||||
}
|
||||
|
||||
impl DirectoryWalker for OsDirectoryWalker {
|
||||
fn walk(
|
||||
@@ -473,6 +480,7 @@ impl DirectoryWalker for OsDirectoryWalker {
|
||||
};
|
||||
|
||||
let mut builder = ignore::WalkBuilder::new(first.as_std_path());
|
||||
builder.current_dir(self.cwd.as_std_path());
|
||||
|
||||
builder.standard_filters(standard_filters);
|
||||
builder.hidden(hidden);
|
||||
|
||||
@@ -723,10 +723,11 @@ impl ruff_cache::CacheKey for SystemPathBuf {
|
||||
|
||||
/// A slice of a virtual path on [`System`](super::System) (akin to [`str`]).
|
||||
#[repr(transparent)]
|
||||
#[derive(Eq, PartialEq, Hash, PartialOrd, Ord)]
|
||||
pub struct SystemVirtualPath(str);
|
||||
|
||||
impl SystemVirtualPath {
|
||||
pub fn new(path: &str) -> &SystemVirtualPath {
|
||||
pub const fn new(path: &str) -> &SystemVirtualPath {
|
||||
// SAFETY: SystemVirtualPath is marked as #[repr(transparent)] so the conversion from a
|
||||
// *const str to a *const SystemVirtualPath is valid.
|
||||
unsafe { &*(path as *const str as *const SystemVirtualPath) }
|
||||
@@ -767,8 +768,8 @@ pub struct SystemVirtualPathBuf(String);
|
||||
|
||||
impl SystemVirtualPathBuf {
|
||||
#[inline]
|
||||
pub fn as_path(&self) -> &SystemVirtualPath {
|
||||
SystemVirtualPath::new(&self.0)
|
||||
pub const fn as_path(&self) -> &SystemVirtualPath {
|
||||
SystemVirtualPath::new(self.0.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -852,6 +853,12 @@ impl ruff_cache::CacheKey for SystemVirtualPathBuf {
|
||||
}
|
||||
}
|
||||
|
||||
impl Borrow<SystemVirtualPath> for SystemVirtualPathBuf {
|
||||
fn borrow(&self) -> &SystemVirtualPath {
|
||||
self.as_path()
|
||||
}
|
||||
}
|
||||
|
||||
/// Deduplicates identical paths and removes nested paths.
|
||||
///
|
||||
/// # Examples
|
||||
|
||||
@@ -62,7 +62,7 @@ fn generate_set(output: &mut String, set: Set, parents: &mut Vec<Set>) {
|
||||
generate_set(
|
||||
output,
|
||||
Set::Named {
|
||||
name: set_name.to_string(),
|
||||
name: set_name.clone(),
|
||||
set: *sub_set,
|
||||
},
|
||||
parents,
|
||||
|
||||
@@ -104,7 +104,7 @@ fn generate_set(output: &mut String, set: Set, parents: &mut Vec<Set>) {
|
||||
generate_set(
|
||||
output,
|
||||
Set::Named {
|
||||
name: set_name.to_string(),
|
||||
name: set_name.clone(),
|
||||
set: *sub_set,
|
||||
},
|
||||
parents,
|
||||
|
||||
@@ -1006,7 +1006,7 @@ impl<Context> std::fmt::Debug for Align<'_, Context> {
|
||||
/// Block indents indent a block of code, such as in a function body, and therefore insert a line
|
||||
/// break before and after the content.
|
||||
///
|
||||
/// Doesn't create an indentation if the passed in content is [`FormatElement.is_empty`].
|
||||
/// Doesn't create an indentation if the passed in content is empty.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
|
||||
@@ -487,7 +487,7 @@ pub trait FormatElements {
|
||||
/// Represents the width by adding 1 to the actual width so that the width can be represented by a [`NonZeroU32`],
|
||||
/// allowing [`TextWidth`] or [`Option<Width>`] fit in 4 bytes rather than 8.
|
||||
///
|
||||
/// This means that 2^32 can not be precisely represented and instead has the same value as 2^32-1.
|
||||
/// This means that 2^32 cannot be precisely represented and instead has the same value as 2^32-1.
|
||||
/// This imprecision shouldn't matter in practice because either text are longer than any configured line width
|
||||
/// and thus, the text should break.
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
|
||||
@@ -3,8 +3,9 @@ use std::collections::{BTreeMap, BTreeSet};
|
||||
use anyhow::Result;
|
||||
|
||||
use ruff_db::system::{SystemPath, SystemPathBuf};
|
||||
use ruff_python_ast::PySourceType;
|
||||
use ruff_python_ast::helpers::to_module_path;
|
||||
use ruff_python_parser::{Mode, ParseOptions, parse};
|
||||
use ruff_python_parser::{ParseOptions, parse};
|
||||
|
||||
use crate::collector::Collector;
|
||||
pub use crate::db::ModuleDb;
|
||||
@@ -24,13 +25,14 @@ impl ModuleImports {
|
||||
/// Detect the [`ModuleImports`] for a given Python file.
|
||||
pub fn detect(
|
||||
db: &ModuleDb,
|
||||
source: &str,
|
||||
source_type: PySourceType,
|
||||
path: &SystemPath,
|
||||
package: Option<&SystemPath>,
|
||||
string_imports: StringImports,
|
||||
) -> Result<Self> {
|
||||
// Read and parse the source code.
|
||||
let source = std::fs::read_to_string(path)?;
|
||||
let parsed = parse(&source, ParseOptions::from(Mode::Module))?;
|
||||
// Parse the source code.
|
||||
let parsed = parse(source, ParseOptions::from(source_type))?;
|
||||
|
||||
let module_path =
|
||||
package.and_then(|package| to_module_path(package.as_std_path(), path.as_std_path()));
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff_linter"
|
||||
version = "0.14.2"
|
||||
version = "0.14.4"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
@@ -22,6 +22,7 @@ DAG(dag_id="class_schedule_interval", schedule_interval="@hourly")
|
||||
|
||||
DAG(dag_id="class_timetable", timetable=NullTimetable())
|
||||
|
||||
DAG(dag_id="class_concurrency", concurrency=12)
|
||||
|
||||
DAG(dag_id="class_fail_stop", fail_stop=True)
|
||||
|
||||
|
||||
@@ -10,6 +10,7 @@ from airflow.datasets import (
|
||||
)
|
||||
from airflow.datasets.manager import DatasetManager
|
||||
from airflow.lineage.hook import DatasetLineageInfo, HookLineageCollector
|
||||
from airflow.models.dag import DAG
|
||||
from airflow.providers.amazon.aws.auth_manager.aws_auth_manager import AwsAuthManager
|
||||
from airflow.providers.apache.beam.hooks import BeamHook, NotAir302HookError
|
||||
from airflow.providers.google.cloud.secrets.secret_manager import (
|
||||
@@ -20,6 +21,7 @@ from airflow.providers_manager import ProvidersManager
|
||||
from airflow.secrets.base_secrets import BaseSecretsBackend
|
||||
from airflow.secrets.local_filesystem import LocalFilesystemBackend
|
||||
|
||||
|
||||
# airflow.Dataset
|
||||
dataset_from_root = DatasetFromRoot()
|
||||
dataset_from_root.iter_datasets()
|
||||
@@ -56,6 +58,10 @@ hlc.add_input_dataset()
|
||||
hlc.add_output_dataset()
|
||||
hlc.collected_datasets()
|
||||
|
||||
# airflow.models.dag.DAG
|
||||
test_dag = DAG(dag_id="test_dag")
|
||||
test_dag.create_dagrun()
|
||||
|
||||
# airflow.providers.amazon.auth_manager.aws_auth_manager
|
||||
aam = AwsAuthManager()
|
||||
aam.is_authorized_dataset()
|
||||
@@ -96,3 +102,15 @@ base_secret_backend.get_connections()
|
||||
# airflow.secrets.local_filesystem
|
||||
lfb = LocalFilesystemBackend()
|
||||
lfb.get_connections()
|
||||
|
||||
from airflow.models import DAG
|
||||
|
||||
# airflow.DAG
|
||||
test_dag = DAG(dag_id="test_dag")
|
||||
test_dag.create_dagrun()
|
||||
|
||||
from airflow import DAG
|
||||
|
||||
# airflow.DAG
|
||||
test_dag = DAG(dag_id="test_dag")
|
||||
test_dag.create_dagrun()
|
||||
|
||||
@@ -70,3 +70,12 @@ builtins.getattr(foo, "bar")
|
||||
|
||||
# Regression test for: https://github.com/astral-sh/ruff/issues/18353
|
||||
setattr(foo, "__debug__", 0)
|
||||
|
||||
# Regression test for: https://github.com/astral-sh/ruff/issues/21126
|
||||
# Non-NFKC attribute names should be marked as unsafe. Python normalizes identifiers in
|
||||
# attribute access (obj.attr) using NFKC, but does not normalize string
|
||||
# arguments passed to getattr/setattr. Rewriting `getattr(ns, "ſ")` to
|
||||
# `ns.ſ` would be interpreted as `ns.s` at runtime, changing behavior.
|
||||
# Example: the long s character "ſ" normalizes to "s" under NFKC.
|
||||
getattr(foo, "ſ")
|
||||
setattr(foo, "ſ", 1)
|
||||
|
||||
@@ -46,3 +46,9 @@ class CorrectModel(models.Model):
|
||||
max_length=255, null=True, blank=True, unique=True
|
||||
)
|
||||
urlfieldu = models.URLField(max_length=255, null=True, blank=True, unique=True)
|
||||
|
||||
|
||||
class IncorrectModelWithSimpleAnnotations(models.Model):
|
||||
charfield: models.CharField = models.CharField(max_length=255, null=True)
|
||||
textfield: models.TextField = models.TextField(max_length=255, null=True)
|
||||
slugfield: models.SlugField = models.SlugField(max_length=255, null=True)
|
||||
|
||||
7
crates/ruff_linter/resources/test/fixtures/flake8_implicit_str_concat/ISC_syntax_error_2.py
vendored
Normal file
7
crates/ruff_linter/resources/test/fixtures/flake8_implicit_str_concat/ISC_syntax_error_2.py
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
# Regression test for https://github.com/astral-sh/ruff/issues/21023
|
||||
'' '
|
||||
"" ""
|
||||
'' '' '
|
||||
"" "" "
|
||||
f"" f"
|
||||
f"" f"" f"
|
||||
@@ -359,3 +359,29 @@ class Generic5(list[PotentialTypeVar]):
|
||||
def __new__(cls: type[Generic5]) -> Generic5: ...
|
||||
def __enter__(self: Generic5) -> Generic5: ...
|
||||
|
||||
|
||||
# Test cases based on issue #20781 - metaclasses that triggers IsMetaclass::Maybe
|
||||
class MetaclassInWhichSelfCannotBeUsed5(type(Protocol)):
|
||||
def __new__(
|
||||
cls, name: str, bases: tuple[type[Any], ...], attrs: dict[str, Any], **kwargs: Any
|
||||
) -> MetaclassInWhichSelfCannotBeUsed5:
|
||||
new_class = super().__new__(cls, name, bases, attrs, **kwargs)
|
||||
return new_class
|
||||
|
||||
|
||||
import django.db.models.base
|
||||
|
||||
|
||||
class MetaclassInWhichSelfCannotBeUsed6(django.db.models.base.ModelBase):
|
||||
def __new__(cls, name: str, bases: tuple[Any, ...], attrs: dict[str, Any], **kwargs: Any) -> MetaclassInWhichSelfCannotBeUsed6:
|
||||
...
|
||||
|
||||
|
||||
class MetaclassInWhichSelfCannotBeUsed7(django.db.models.base.ModelBase):
|
||||
def __new__(cls, /, name: str, bases: tuple[object, ...], attrs: dict[str, object], **kwds: object) -> MetaclassInWhichSelfCannotBeUsed7:
|
||||
...
|
||||
|
||||
|
||||
class MetaclassInWhichSelfCannotBeUsed8(django.db.models.base.ModelBase):
|
||||
def __new__(cls, name: builtins.str, bases: tuple, attributes: dict, /, **kw) -> MetaclassInWhichSelfCannotBeUsed8:
|
||||
...
|
||||
|
||||
@@ -252,3 +252,28 @@ from some_module import PotentialTypeVar
|
||||
class Generic5(list[PotentialTypeVar]):
|
||||
def __new__(cls: type[Generic5]) -> Generic5: ...
|
||||
def __enter__(self: Generic5) -> Generic5: ...
|
||||
|
||||
|
||||
# Test case based on issue #20781 - metaclass that triggers IsMetaclass::Maybe
|
||||
class MetaclassInWhichSelfCannotBeUsed5(type(Protocol)):
|
||||
def __new__(
|
||||
cls, name: str, bases: tuple[type[Any], ...], attrs: dict[str, Any], **kwargs: Any
|
||||
) -> MetaclassInWhichSelfCannotBeUsed5: ...
|
||||
|
||||
|
||||
import django.db.models.base
|
||||
|
||||
|
||||
class MetaclassInWhichSelfCannotBeUsed6(django.db.models.base.ModelBase):
|
||||
def __new__(cls, name: str, bases: tuple[Any, ...], attrs: dict[str, Any], **kwargs: Any) -> MetaclassInWhichSelfCannotBeUsed6:
|
||||
...
|
||||
|
||||
|
||||
class MetaclassInWhichSelfCannotBeUsed7(django.db.models.base.ModelBase):
|
||||
def __new__(cls, /, name: str, bases: tuple[object, ...], attrs: dict[str, object], **kwds: object) -> MetaclassInWhichSelfCannotBeUsed7:
|
||||
...
|
||||
|
||||
|
||||
class MetaclassInWhichSelfCannotBeUsed8(django.db.models.base.ModelBase):
|
||||
def __new__(cls, name: builtins.str, bases: tuple, attributes: dict, /, **kw) -> MetaclassInWhichSelfCannotBeUsed8:
|
||||
...
|
||||
|
||||
@@ -204,3 +204,15 @@ x = 1
|
||||
print(f"{x=}" or "bar") # SIM222
|
||||
(lambda: 1) or True # SIM222
|
||||
(i for i in range(1)) or "bar" # SIM222
|
||||
|
||||
# https://github.com/astral-sh/ruff/issues/21136
|
||||
def get_items():
|
||||
return tuple(item for item in Item.objects.all()) or None # OK
|
||||
|
||||
|
||||
def get_items_list():
|
||||
return tuple([item for item in items]) or None # OK
|
||||
|
||||
|
||||
def get_items_set():
|
||||
return tuple({item for item in items}) or None # OK
|
||||
|
||||
@@ -14,3 +14,14 @@ def f():
|
||||
import os
|
||||
|
||||
print(os)
|
||||
|
||||
|
||||
# regression test for https://github.com/astral-sh/ruff/issues/21121
|
||||
from dataclasses import KW_ONLY, dataclass
|
||||
|
||||
|
||||
@dataclass
|
||||
class DataClass:
|
||||
a: int
|
||||
_: KW_ONLY # should be an exception to TC003, even with future-annotations
|
||||
b: int
|
||||
|
||||
@@ -370,3 +370,22 @@ class Foo:
|
||||
The flag converter instance with all flags parsed.
|
||||
"""
|
||||
return
|
||||
|
||||
# OK
|
||||
def baz(x: int) -> int:
|
||||
"""
|
||||
Show a `Warnings` DOC102 false positive.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
x : int
|
||||
|
||||
Warnings
|
||||
--------
|
||||
This function demonstrates a DOC102 false positive
|
||||
|
||||
Returns
|
||||
-------
|
||||
int
|
||||
"""
|
||||
return x
|
||||
|
||||
@@ -81,3 +81,55 @@ def calculate_speed(distance: float, time: float) -> float:
|
||||
except TypeError:
|
||||
print("Not a number? Shame on you!")
|
||||
raise
|
||||
|
||||
|
||||
# This should NOT trigger DOC502 because OSError is explicitly re-raised
|
||||
def f():
|
||||
"""Do nothing.
|
||||
|
||||
Raises:
|
||||
OSError: If the OS errors.
|
||||
"""
|
||||
try:
|
||||
pass
|
||||
except OSError as e:
|
||||
raise e
|
||||
|
||||
|
||||
# This should NOT trigger DOC502 because OSError is explicitly re-raised with from None
|
||||
def g():
|
||||
"""Do nothing.
|
||||
|
||||
Raises:
|
||||
OSError: If the OS errors.
|
||||
"""
|
||||
try:
|
||||
pass
|
||||
except OSError as e:
|
||||
raise e from None
|
||||
|
||||
|
||||
# This should NOT trigger DOC502 because ValueError is explicitly re-raised from tuple exception
|
||||
def h():
|
||||
"""Do nothing.
|
||||
|
||||
Raises:
|
||||
ValueError: If something goes wrong.
|
||||
"""
|
||||
try:
|
||||
pass
|
||||
except (ValueError, TypeError) as e:
|
||||
raise e
|
||||
|
||||
|
||||
# This should NOT trigger DOC502 because TypeError is explicitly re-raised from tuple exception
|
||||
def i():
|
||||
"""Do nothing.
|
||||
|
||||
Raises:
|
||||
TypeError: If something goes wrong.
|
||||
"""
|
||||
try:
|
||||
pass
|
||||
except (ValueError, TypeError) as e:
|
||||
raise e
|
||||
|
||||
21
crates/ruff_linter/resources/test/fixtures/pyflakes/F821_33.py
vendored
Normal file
21
crates/ruff_linter/resources/test/fixtures/pyflakes/F821_33.py
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
class C:
|
||||
f = lambda self: __class__
|
||||
|
||||
|
||||
print(C().f().__name__)
|
||||
|
||||
# Test: nested lambda
|
||||
class D:
|
||||
g = lambda self: (lambda: __class__)
|
||||
|
||||
|
||||
print(D().g()().__name__)
|
||||
|
||||
# Test: lambda outside class (should still fail)
|
||||
h = lambda: __class__
|
||||
|
||||
# Test: lambda referencing module-level variable (should not be flagged as F821)
|
||||
import uuid
|
||||
|
||||
class E:
|
||||
uuid = lambda: str(uuid.uuid4())
|
||||
131
crates/ruff_linter/resources/test/fixtures/pylint/stop_iteration_return.py
vendored
Normal file
131
crates/ruff_linter/resources/test/fixtures/pylint/stop_iteration_return.py
vendored
Normal file
@@ -0,0 +1,131 @@
|
||||
"""Test cases for PLR1708 stop-iteration-return."""
|
||||
|
||||
|
||||
# Valid cases - should not trigger the rule
|
||||
def normal_function():
|
||||
raise StopIteration # Not a generator, should not trigger
|
||||
|
||||
|
||||
def normal_function_with_value():
|
||||
raise StopIteration("value") # Not a generator, should not trigger
|
||||
|
||||
|
||||
def generator_with_return():
|
||||
yield 1
|
||||
yield 2
|
||||
return "finished" # This is the correct way
|
||||
|
||||
|
||||
def generator_with_yield_from():
|
||||
yield from [1, 2, 3]
|
||||
|
||||
|
||||
def generator_without_stop_iteration():
|
||||
yield 1
|
||||
yield 2
|
||||
# No explicit termination
|
||||
|
||||
|
||||
def generator_with_other_exception():
|
||||
yield 1
|
||||
raise ValueError("something else") # Different exception
|
||||
|
||||
|
||||
# Invalid cases - should trigger the rule
|
||||
def generator_with_stop_iteration():
|
||||
yield 1
|
||||
yield 2
|
||||
raise StopIteration # Should trigger
|
||||
|
||||
|
||||
def generator_with_stop_iteration_value():
|
||||
yield 1
|
||||
yield 2
|
||||
raise StopIteration("finished") # Should trigger
|
||||
|
||||
|
||||
def generator_with_stop_iteration_expr():
|
||||
yield 1
|
||||
yield 2
|
||||
raise StopIteration(1 + 2) # Should trigger
|
||||
|
||||
|
||||
def async_generator_with_stop_iteration():
|
||||
yield 1
|
||||
yield 2
|
||||
raise StopIteration("async") # Should trigger
|
||||
|
||||
|
||||
def nested_generator():
|
||||
def inner_gen():
|
||||
yield 1
|
||||
raise StopIteration("inner") # Should trigger
|
||||
|
||||
yield from inner_gen()
|
||||
|
||||
|
||||
def generator_in_class():
|
||||
class MyClass:
|
||||
def generator_method(self):
|
||||
yield 1
|
||||
raise StopIteration("method") # Should trigger
|
||||
|
||||
return MyClass
|
||||
|
||||
|
||||
# Complex cases
|
||||
def complex_generator():
|
||||
try:
|
||||
yield 1
|
||||
yield 2
|
||||
raise StopIteration("complex") # Should trigger
|
||||
except ValueError:
|
||||
yield 3
|
||||
finally:
|
||||
pass
|
||||
|
||||
|
||||
def generator_with_conditional_stop_iteration(condition):
|
||||
yield 1
|
||||
if condition:
|
||||
raise StopIteration("conditional") # Should trigger
|
||||
yield 2
|
||||
|
||||
|
||||
# Edge cases
|
||||
def generator_with_bare_stop_iteration():
|
||||
yield 1
|
||||
raise StopIteration # Should trigger (no arguments)
|
||||
|
||||
|
||||
def generator_with_stop_iteration_in_loop():
|
||||
for i in range(5):
|
||||
yield i
|
||||
if i == 3:
|
||||
raise StopIteration("loop") # Should trigger
|
||||
|
||||
|
||||
# Should not trigger - different exceptions
|
||||
def generator_with_runtime_error():
|
||||
yield 1
|
||||
raise RuntimeError("not StopIteration") # Should not trigger
|
||||
|
||||
|
||||
def generator_with_custom_exception():
|
||||
yield 1
|
||||
raise CustomException("custom") # Should not trigger
|
||||
|
||||
|
||||
class CustomException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
# Generator comprehensions should not be affected
|
||||
list_comp = [x for x in range(10)] # Should not trigger
|
||||
|
||||
|
||||
# Lambda in generator context
|
||||
def generator_with_lambda():
|
||||
yield 1
|
||||
func = lambda x: x # Just a regular lambda
|
||||
yield 2
|
||||
5
crates/ruff_linter/resources/test/fixtures/pyupgrade/UP029_2.py
vendored
Normal file
5
crates/ruff_linter/resources/test/fixtures/pyupgrade/UP029_2.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
from .builtins import next
|
||||
from ..builtins import str
|
||||
from ...builtins import int
|
||||
from .builtins import next as _next
|
||||
|
||||
17
crates/ruff_linter/resources/test/fixtures/pyupgrade/UP037_3.py
vendored
Normal file
17
crates/ruff_linter/resources/test/fixtures/pyupgrade/UP037_3.py
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
"""
|
||||
Regression test for an ecosystem hit on
|
||||
https://github.com/astral-sh/ruff/pull/21125.
|
||||
|
||||
We should mark all of the components of special dataclass annotations as
|
||||
runtime-required, not just the first layer.
|
||||
"""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import ClassVar, Optional
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class EmptyCell:
|
||||
_singleton: ClassVar[Optional["EmptyCell"]] = None
|
||||
# the behavior of _singleton above should match a non-ClassVar
|
||||
_doubleton: "EmptyCell"
|
||||
13
crates/ruff_linter/resources/test/fixtures/pyupgrade/UP046_2.py
vendored
Normal file
13
crates/ruff_linter/resources/test/fixtures/pyupgrade/UP046_2.py
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
"""This is placed in a separate fixture as `TypeVar` needs to be imported
|
||||
from `typing_extensions` to support default arguments in Python version < 3.13.
|
||||
We verify that UP046 doesn't apply in this case.
|
||||
"""
|
||||
|
||||
from typing import Generic
|
||||
from typing_extensions import TypeVar
|
||||
|
||||
T = TypeVar("T", default=str)
|
||||
|
||||
|
||||
class DefaultTypeVar(Generic[T]):
|
||||
var: T
|
||||
12
crates/ruff_linter/resources/test/fixtures/pyupgrade/UP047_1.py
vendored
Normal file
12
crates/ruff_linter/resources/test/fixtures/pyupgrade/UP047_1.py
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
"""This is placed in a separate fixture as `TypeVar` needs to be imported
|
||||
from `typing_extensions` to support default arguments in Python version < 3.13.
|
||||
We verify that UP047 doesn't apply in this case.
|
||||
"""
|
||||
|
||||
from typing_extensions import TypeVar
|
||||
|
||||
T = TypeVar("T", default=int)
|
||||
|
||||
|
||||
def default_var(var: T) -> T:
|
||||
return var
|
||||
@@ -125,3 +125,18 @@ with open(*filename, mode="r") as f:
|
||||
# `buffering`.
|
||||
with open(*filename, file="file.txt", mode="r") as f:
|
||||
x = f.read()
|
||||
|
||||
# FURB101
|
||||
with open("file.txt", encoding="utf-8") as f:
|
||||
contents: str = f.read()
|
||||
|
||||
# FURB101 but no fix because it would remove the assignment to `x`
|
||||
with open("file.txt", encoding="utf-8") as f:
|
||||
contents, x = f.read(), 2
|
||||
|
||||
# FURB101 but no fix because it would remove the `process_contents` call
|
||||
with open("file.txt", encoding="utf-8") as f:
|
||||
contents = process_contents(f.read())
|
||||
|
||||
with open("file.txt", encoding="utf-8") as f:
|
||||
contents: str = process_contents(f.read())
|
||||
|
||||
@@ -145,3 +145,11 @@ with open("file.txt", "w") as f:
|
||||
with open("file.txt", "w") as f:
|
||||
for line in text:
|
||||
f.write(line)
|
||||
|
||||
# See: https://github.com/astral-sh/ruff/issues/20785
|
||||
import json
|
||||
|
||||
data = {"price": 100}
|
||||
|
||||
with open("test.json", "wb") as f:
|
||||
f.write(json.dumps(data, indent=4).encode("utf-8"))
|
||||
@@ -19,6 +19,9 @@ print("", *args, sep="")
|
||||
print("", **kwargs)
|
||||
print(sep="\t")
|
||||
print(sep=print(1))
|
||||
print(f"")
|
||||
print(f"", sep=",")
|
||||
print(f"", end="bar")
|
||||
|
||||
# OK.
|
||||
|
||||
@@ -33,3 +36,4 @@ print("foo", "", sep=",")
|
||||
print("foo", "", "bar", "", sep=",")
|
||||
print("", "", **kwargs)
|
||||
print(*args, sep=",")
|
||||
print(f"foo")
|
||||
|
||||
@@ -69,3 +69,25 @@ Decimal(float("\N{space}\N{hyPHen-MINus}nan"))
|
||||
Decimal(float("\x20\N{character tabulation}\N{hyphen-minus}nan"))
|
||||
Decimal(float(" -" "nan"))
|
||||
Decimal(float("-nAn"))
|
||||
|
||||
# Test cases for digit separators (safe fixes)
|
||||
# https://github.com/astral-sh/ruff/issues/20572
|
||||
Decimal("15_000_000") # Safe fix: normalizes separators, becomes Decimal(15_000_000)
|
||||
Decimal("1_234_567") # Safe fix: normalizes separators, becomes Decimal(1_234_567)
|
||||
Decimal("-5_000") # Safe fix: normalizes separators, becomes Decimal(-5_000)
|
||||
Decimal("+9_999") # Safe fix: normalizes separators, becomes Decimal(+9_999)
|
||||
|
||||
# Test cases for non-thousands separators
|
||||
Decimal("12_34_56_78") # Safe fix: preserves non-thousands separators
|
||||
Decimal("1234_5678") # Safe fix: preserves non-thousands separators
|
||||
|
||||
# Separators _and_ leading zeros
|
||||
Decimal("0001_2345")
|
||||
Decimal("000_1_2345")
|
||||
Decimal("000_000")
|
||||
|
||||
# Test cases for underscores before sign
|
||||
# https://github.com/astral-sh/ruff/issues/21186
|
||||
Decimal("_-1") # Should flag as verbose
|
||||
Decimal("_+1") # Should flag as verbose
|
||||
Decimal("_-1_000") # Should flag as verbose
|
||||
|
||||
@@ -64,3 +64,8 @@ _ = Decimal.from_float(True)
|
||||
_ = Decimal.from_float(float("-nan"))
|
||||
_ = Decimal.from_float(float("\x2dnan"))
|
||||
_ = Decimal.from_float(float("\N{HYPHEN-MINUS}nan"))
|
||||
|
||||
# See: https://github.com/astral-sh/ruff/issues/21257
|
||||
# fixes must be safe
|
||||
_ = Fraction.from_float(f=4.2)
|
||||
_ = Fraction.from_decimal(dec=4)
|
||||
@@ -81,3 +81,7 @@ round(# a comment
|
||||
round(
|
||||
17 # a comment
|
||||
)
|
||||
|
||||
# See: https://github.com/astral-sh/ruff/issues/21209
|
||||
print(round(125, **{"ndigits": -2}))
|
||||
print(round(125, *[-2]))
|
||||
@@ -43,3 +43,29 @@ logging.warning("Value: %r", repr(42))
|
||||
logging.error("Error: %r", repr([1, 2, 3]))
|
||||
logging.info("Debug info: %s", repr("test\nstring"))
|
||||
logging.warning("Value: %s", repr(42))
|
||||
|
||||
# %s + ascii()
|
||||
logging.info("ASCII: %s", ascii("Hello\nWorld"))
|
||||
logging.warning("ASCII: %s", ascii("test"))
|
||||
|
||||
# %s + oct()
|
||||
logging.info("Octal: %s", oct(42))
|
||||
logging.warning("Octal: %s", oct(255))
|
||||
|
||||
# %s + hex()
|
||||
logging.info("Hex: %s", hex(42))
|
||||
logging.warning("Hex: %s", hex(255))
|
||||
|
||||
|
||||
# Test with imported functions
|
||||
from logging import info, log
|
||||
|
||||
info("ASCII: %s", ascii("Hello\nWorld"))
|
||||
log(logging.INFO, "ASCII: %s", ascii("test"))
|
||||
|
||||
info("Octal: %s", oct(42))
|
||||
log(logging.INFO, "Octal: %s", oct(255))
|
||||
|
||||
info("Hex: %s", hex(42))
|
||||
log(logging.INFO, "Hex: %s", hex(255))
|
||||
|
||||
18
crates/ruff_linter/resources/test/fixtures/ruff/RUF065_1.py
vendored
Normal file
18
crates/ruff_linter/resources/test/fixtures/ruff/RUF065_1.py
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
import logging
|
||||
|
||||
# Test cases for str() that should NOT be flagged (issue #21315)
|
||||
# str() with no arguments - should not be flagged
|
||||
logging.warning("%s", str())
|
||||
|
||||
# str() with multiple arguments - should not be flagged
|
||||
logging.warning("%s", str(b"\xe2\x9a\xa0", "utf-8"))
|
||||
|
||||
# str() with starred arguments - should not be flagged
|
||||
logging.warning("%s", str(*(b"\xf0\x9f\x9a\xa7", "utf-8")))
|
||||
|
||||
# str() with keyword unpacking - should not be flagged
|
||||
logging.warning("%s", str(**{"object": b"\xf0\x9f\x9a\xa8", "encoding": "utf-8"}))
|
||||
|
||||
# str() with single keyword argument - should be flagged (equivalent to str("!"))
|
||||
logging.warning("%s", str(object="!"))
|
||||
|
||||
@@ -0,0 +1,12 @@
|
||||
async def f(): return [[x async for x in foo(n)] for n in range(3)]
|
||||
|
||||
async def test(): return [[x async for x in elements(n)] async for n in range(3)]
|
||||
|
||||
async def f(): [x for x in foo()] and [x async for x in foo()]
|
||||
|
||||
async def f():
|
||||
def g(): ...
|
||||
[x async for x in foo()]
|
||||
|
||||
[x async for x in y]
|
||||
|
||||
@@ -0,0 +1,3 @@
|
||||
match x:
|
||||
case Point(x=1, x=2):
|
||||
pass
|
||||
3
crates/ruff_linter/resources/test/fixtures/semantic_errors/duplicate_match_key.py
vendored
Normal file
3
crates/ruff_linter/resources/test/fixtures/semantic_errors/duplicate_match_key.py
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
match x:
|
||||
case {'key': 1, 'key': 2}:
|
||||
pass
|
||||
1
crates/ruff_linter/resources/test/fixtures/semantic_errors/duplicate_type_parameter.py
vendored
Normal file
1
crates/ruff_linter/resources/test/fixtures/semantic_errors/duplicate_type_parameter.py
vendored
Normal file
@@ -0,0 +1 @@
|
||||
class C[T, T]: pass
|
||||
29
crates/ruff_linter/resources/test/fixtures/semantic_errors/global_parameter.py
vendored
Normal file
29
crates/ruff_linter/resources/test/fixtures/semantic_errors/global_parameter.py
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
def f(a):
|
||||
global a
|
||||
|
||||
def g(a):
|
||||
if True:
|
||||
global a
|
||||
|
||||
def h(a):
|
||||
def inner():
|
||||
global a
|
||||
|
||||
def i(a):
|
||||
try:
|
||||
global a
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def f(a):
|
||||
a = 1
|
||||
global a
|
||||
|
||||
def f(a):
|
||||
a = 1
|
||||
a = 2
|
||||
global a
|
||||
|
||||
def f(a):
|
||||
class Inner:
|
||||
global a # ok
|
||||
8
crates/ruff_linter/resources/test/fixtures/semantic_errors/invalid_expression.py
vendored
Normal file
8
crates/ruff_linter/resources/test/fixtures/semantic_errors/invalid_expression.py
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
type X[T: (yield 1)] = int
|
||||
|
||||
type Y = (yield 1)
|
||||
|
||||
def f[T](x: int) -> (y := 3): return x
|
||||
|
||||
class C[T]((yield from [object])):
|
||||
pass
|
||||
8
crates/ruff_linter/resources/test/fixtures/semantic_errors/invalid_star_expression.py
vendored
Normal file
8
crates/ruff_linter/resources/test/fixtures/semantic_errors/invalid_star_expression.py
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
def func():
|
||||
return *x
|
||||
|
||||
for *x in range(10):
|
||||
pass
|
||||
|
||||
def func():
|
||||
yield *x
|
||||
11
crates/ruff_linter/resources/test/fixtures/semantic_errors/irrefutable_case_pattern.py
vendored
Normal file
11
crates/ruff_linter/resources/test/fixtures/semantic_errors/irrefutable_case_pattern.py
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
match value:
|
||||
case _:
|
||||
pass
|
||||
case 1:
|
||||
pass
|
||||
|
||||
match value:
|
||||
case irrefutable:
|
||||
pass
|
||||
case 1:
|
||||
pass
|
||||
5
crates/ruff_linter/resources/test/fixtures/semantic_errors/multiple_case_assignment.py
vendored
Normal file
5
crates/ruff_linter/resources/test/fixtures/semantic_errors/multiple_case_assignment.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
match x:
|
||||
case [a, a]:
|
||||
pass
|
||||
case _:
|
||||
pass
|
||||
1
crates/ruff_linter/resources/test/fixtures/semantic_errors/rebound_comprehension.py
vendored
Normal file
1
crates/ruff_linter/resources/test/fixtures/semantic_errors/rebound_comprehension.py
vendored
Normal file
@@ -0,0 +1 @@
|
||||
[x:= 2 for x in range(2)]
|
||||
1
crates/ruff_linter/resources/test/fixtures/semantic_errors/single_starred_assignment.py
vendored
Normal file
1
crates/ruff_linter/resources/test/fixtures/semantic_errors/single_starred_assignment.py
vendored
Normal file
@@ -0,0 +1 @@
|
||||
*a = [1, 2, 3, 4]
|
||||
7
crates/ruff_linter/resources/test/fixtures/semantic_errors/write_to_debug.py
vendored
Normal file
7
crates/ruff_linter/resources/test/fixtures/semantic_errors/write_to_debug.py
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
__debug__ = False
|
||||
|
||||
def process(__debug__):
|
||||
pass
|
||||
|
||||
class Generic[__debug__]:
|
||||
pass
|
||||
@@ -43,9 +43,6 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
pycodestyle::rules::ambiguous_variable_name(checker, name, name.range());
|
||||
}
|
||||
}
|
||||
if checker.is_rule_enabled(Rule::NonlocalWithoutBinding) {
|
||||
pylint::rules::nonlocal_without_binding(checker, nonlocal);
|
||||
}
|
||||
if checker.is_rule_enabled(Rule::NonlocalAndGlobal) {
|
||||
pylint::rules::nonlocal_and_global(checker, nonlocal);
|
||||
}
|
||||
@@ -720,7 +717,9 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
}
|
||||
if checker.is_rule_enabled(Rule::UnnecessaryBuiltinImport) {
|
||||
if let Some(module) = module {
|
||||
pyupgrade::rules::unnecessary_builtin_import(checker, stmt, module, names);
|
||||
pyupgrade::rules::unnecessary_builtin_import(
|
||||
checker, stmt, module, names, level,
|
||||
);
|
||||
}
|
||||
}
|
||||
if checker.any_rule_enabled(&[
|
||||
@@ -951,6 +950,9 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
if checker.is_rule_enabled(Rule::MisplacedBareRaise) {
|
||||
pylint::rules::misplaced_bare_raise(checker, raise);
|
||||
}
|
||||
if checker.is_rule_enabled(Rule::StopIterationReturn) {
|
||||
pylint::rules::stop_iteration_return(checker, raise);
|
||||
}
|
||||
}
|
||||
Stmt::AugAssign(aug_assign @ ast::StmtAugAssign { target, .. }) => {
|
||||
if checker.is_rule_enabled(Rule::GlobalStatement) {
|
||||
|
||||
@@ -73,7 +73,8 @@ use crate::rules::pyflakes::rules::{
|
||||
UndefinedLocalWithNestedImportStarUsage, YieldOutsideFunction,
|
||||
};
|
||||
use crate::rules::pylint::rules::{
|
||||
AwaitOutsideAsync, LoadBeforeGlobalDeclaration, YieldFromInAsyncFunction,
|
||||
AwaitOutsideAsync, LoadBeforeGlobalDeclaration, NonlocalWithoutBinding,
|
||||
YieldFromInAsyncFunction,
|
||||
};
|
||||
use crate::rules::{flake8_pyi, flake8_type_checking, pyflakes, pyupgrade};
|
||||
use crate::settings::rule_table::RuleTable;
|
||||
@@ -641,6 +642,10 @@ impl SemanticSyntaxContext for Checker<'_> {
|
||||
self.semantic.global(name)
|
||||
}
|
||||
|
||||
fn has_nonlocal_binding(&self, name: &str) -> bool {
|
||||
self.semantic.nonlocal(name).is_some()
|
||||
}
|
||||
|
||||
fn report_semantic_error(&self, error: SemanticSyntaxError) {
|
||||
match error.kind {
|
||||
SemanticSyntaxErrorKind::LateFutureImport => {
|
||||
@@ -717,6 +722,12 @@ impl SemanticSyntaxContext for Checker<'_> {
|
||||
self.report_diagnostic(pyflakes::rules::ContinueOutsideLoop, error.range);
|
||||
}
|
||||
}
|
||||
SemanticSyntaxErrorKind::NonlocalWithoutBinding(name) => {
|
||||
// PLE0117
|
||||
if self.is_rule_enabled(Rule::NonlocalWithoutBinding) {
|
||||
self.report_diagnostic(NonlocalWithoutBinding { name }, error.range);
|
||||
}
|
||||
}
|
||||
SemanticSyntaxErrorKind::ReboundComprehensionVariable
|
||||
| SemanticSyntaxErrorKind::DuplicateTypeParameter
|
||||
| SemanticSyntaxErrorKind::MultipleCaseAssignment(_)
|
||||
@@ -1400,6 +1411,14 @@ impl<'a> Visitor<'a> for Checker<'a> {
|
||||
AnnotationContext::RuntimeRequired => {
|
||||
self.visit_runtime_required_annotation(annotation);
|
||||
}
|
||||
AnnotationContext::RuntimeEvaluated
|
||||
if flake8_type_checking::helpers::is_dataclass_meta_annotation(
|
||||
annotation,
|
||||
self.semantic(),
|
||||
) =>
|
||||
{
|
||||
self.visit_runtime_required_annotation(annotation);
|
||||
}
|
||||
AnnotationContext::RuntimeEvaluated => {
|
||||
self.visit_runtime_evaluated_annotation(annotation);
|
||||
}
|
||||
@@ -2116,7 +2135,7 @@ impl<'a> Visitor<'a> for Checker<'a> {
|
||||
| Expr::DictComp(_)
|
||||
| Expr::SetComp(_) => {
|
||||
self.analyze.scopes.push(self.semantic.scope_id);
|
||||
self.semantic.pop_scope();
|
||||
self.semantic.pop_scope(); // Lambda/Generator/Comprehension scope
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
@@ -3041,7 +3060,35 @@ impl<'a> Checker<'a> {
|
||||
if let Some(parameters) = parameters {
|
||||
self.visit_parameters(parameters);
|
||||
}
|
||||
|
||||
// Here we add the implicit scope surrounding a lambda which allows code in the
|
||||
// lambda to access `__class__` at runtime when the lambda is defined within a class.
|
||||
// See the `ScopeKind::DunderClassCell` docs for more information.
|
||||
let added_dunder_class_scope = if self
|
||||
.semantic
|
||||
.current_scopes()
|
||||
.any(|scope| scope.kind.is_class())
|
||||
{
|
||||
self.semantic.push_scope(ScopeKind::DunderClassCell);
|
||||
let binding_id = self.semantic.push_binding(
|
||||
TextRange::default(),
|
||||
BindingKind::DunderClassCell,
|
||||
BindingFlags::empty(),
|
||||
);
|
||||
self.semantic
|
||||
.current_scope_mut()
|
||||
.add("__class__", binding_id);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
self.visit_expr(body);
|
||||
|
||||
// Pop the DunderClassCell scope if it was added
|
||||
if added_dunder_class_scope {
|
||||
self.semantic.pop_scope();
|
||||
}
|
||||
}
|
||||
}
|
||||
self.semantic.restore(snapshot);
|
||||
|
||||
@@ -286,6 +286,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Pylint, "R1702") => rules::pylint::rules::TooManyNestedBlocks,
|
||||
(Pylint, "R1704") => rules::pylint::rules::RedefinedArgumentFromLocal,
|
||||
(Pylint, "R1706") => rules::pylint::rules::AndOrTernary,
|
||||
(Pylint, "R1708") => rules::pylint::rules::StopIterationReturn,
|
||||
(Pylint, "R1711") => rules::pylint::rules::UselessReturn,
|
||||
(Pylint, "R1714") => rules::pylint::rules::RepeatedEqualityComparison,
|
||||
(Pylint, "R1722") => rules::pylint::rules::SysExitAlias,
|
||||
|
||||
@@ -11,6 +11,8 @@ pub(crate) static GOOGLE_SECTIONS: &[SectionKind] = &[
|
||||
SectionKind::References,
|
||||
SectionKind::Returns,
|
||||
SectionKind::SeeAlso,
|
||||
SectionKind::Warnings,
|
||||
SectionKind::Warns,
|
||||
SectionKind::Yields,
|
||||
// Google-only
|
||||
SectionKind::Args,
|
||||
@@ -32,7 +34,5 @@ pub(crate) static GOOGLE_SECTIONS: &[SectionKind] = &[
|
||||
SectionKind::Tip,
|
||||
SectionKind::Todo,
|
||||
SectionKind::Warning,
|
||||
SectionKind::Warnings,
|
||||
SectionKind::Warns,
|
||||
SectionKind::Yield,
|
||||
];
|
||||
|
||||
@@ -11,11 +11,14 @@ pub(crate) static NUMPY_SECTIONS: &[SectionKind] = &[
|
||||
SectionKind::References,
|
||||
SectionKind::Returns,
|
||||
SectionKind::SeeAlso,
|
||||
SectionKind::Warnings,
|
||||
SectionKind::Warns,
|
||||
SectionKind::Yields,
|
||||
// NumPy-only
|
||||
SectionKind::ExtendedSummary,
|
||||
SectionKind::OtherParams,
|
||||
SectionKind::OtherParameters,
|
||||
SectionKind::Parameters,
|
||||
SectionKind::Receives,
|
||||
SectionKind::ShortSummary,
|
||||
];
|
||||
|
||||
@@ -36,6 +36,7 @@ pub(crate) enum SectionKind {
|
||||
OtherParameters,
|
||||
Parameters,
|
||||
Raises,
|
||||
Receives,
|
||||
References,
|
||||
Return,
|
||||
Returns,
|
||||
@@ -76,6 +77,7 @@ impl SectionKind {
|
||||
"other parameters" => Some(Self::OtherParameters),
|
||||
"parameters" => Some(Self::Parameters),
|
||||
"raises" => Some(Self::Raises),
|
||||
"receives" => Some(Self::Receives),
|
||||
"references" => Some(Self::References),
|
||||
"return" => Some(Self::Return),
|
||||
"returns" => Some(Self::Returns),
|
||||
@@ -117,6 +119,7 @@ impl SectionKind {
|
||||
Self::OtherParameters => "Other Parameters",
|
||||
Self::Parameters => "Parameters",
|
||||
Self::Raises => "Raises",
|
||||
Self::Receives => "Receives",
|
||||
Self::References => "References",
|
||||
Self::Return => "Return",
|
||||
Self::Returns => "Returns",
|
||||
|
||||
@@ -14,7 +14,7 @@ use ruff_text_size::TextSize;
|
||||
/// The length of a line of text that is considered too long.
|
||||
///
|
||||
/// The allowed range of values is 1..=320
|
||||
#[derive(Clone, Copy, Debug, Eq, PartialEq, serde::Serialize, serde::Deserialize)]
|
||||
#[derive(Clone, Copy, Debug, Eq, PartialEq, serde::Serialize)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
pub struct LineLength(
|
||||
#[cfg_attr(feature = "schemars", schemars(range(min = 1, max = 320)))] NonZeroU16,
|
||||
@@ -46,6 +46,25 @@ impl fmt::Display for LineLength {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> serde::Deserialize<'de> for LineLength {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
let value = i64::deserialize(deserializer)?;
|
||||
|
||||
u16::try_from(value)
|
||||
.ok()
|
||||
.and_then(|u16_value| Self::try_from(u16_value).ok())
|
||||
.ok_or_else(|| {
|
||||
serde::de::Error::custom(format!(
|
||||
"line-length must be between 1 and {} (got {value})",
|
||||
Self::MAX,
|
||||
))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl CacheKey for LineLength {
|
||||
fn cache_key(&self, state: &mut CacheKeyHasher) {
|
||||
state.write_u16(self.0.get());
|
||||
|
||||
@@ -377,6 +377,7 @@ pub fn add_noqa_to_path(
|
||||
source_kind: &SourceKind,
|
||||
source_type: PySourceType,
|
||||
settings: &LinterSettings,
|
||||
reason: Option<&str>,
|
||||
) -> Result<usize> {
|
||||
// Parse once.
|
||||
let target_version = settings.resolve_target_version(path);
|
||||
@@ -425,6 +426,7 @@ pub fn add_noqa_to_path(
|
||||
&settings.external,
|
||||
&directives.noqa_line_for,
|
||||
stylist.line_ending(),
|
||||
reason,
|
||||
)
|
||||
}
|
||||
|
||||
@@ -919,17 +921,6 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Wrapper around `test_contents_syntax_errors` for testing a snippet of code instead of a
|
||||
/// file.
|
||||
fn test_snippet_syntax_errors(contents: &str, settings: &LinterSettings) -> Vec<Diagnostic> {
|
||||
let contents = dedent(contents);
|
||||
test_contents_syntax_errors(
|
||||
&SourceKind::Python(contents.to_string()),
|
||||
Path::new("<filename>"),
|
||||
settings,
|
||||
)
|
||||
}
|
||||
|
||||
/// A custom test runner that prints syntax errors in addition to other diagnostics. Adapted
|
||||
/// from `flakes` in pyflakes/mod.rs.
|
||||
fn test_contents_syntax_errors(
|
||||
@@ -972,245 +963,38 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test_case(
|
||||
"async_in_sync_error_on_310",
|
||||
"async def f(): return [[x async for x in foo(n)] for n in range(3)]",
|
||||
PythonVersion::PY310,
|
||||
"AsyncComprehensionOutsideAsyncFunction"
|
||||
Path::new("async_comprehension_outside_async_function.py"),
|
||||
PythonVersion::PY311
|
||||
)]
|
||||
#[test_case(
|
||||
"async_in_sync_okay_on_311",
|
||||
"async def f(): return [[x async for x in foo(n)] for n in range(3)]",
|
||||
PythonVersion::PY311,
|
||||
"AsyncComprehensionOutsideAsyncFunction"
|
||||
Path::new("async_comprehension_outside_async_function.py"),
|
||||
PythonVersion::PY310
|
||||
)]
|
||||
#[test_case(
|
||||
"async_in_sync_okay_on_310",
|
||||
"async def test(): return [[x async for x in elements(n)] async for n in range(3)]",
|
||||
PythonVersion::PY310,
|
||||
"AsyncComprehensionOutsideAsyncFunction"
|
||||
)]
|
||||
#[test_case(
|
||||
"deferred_function_body",
|
||||
"
|
||||
async def f(): [x for x in foo()] and [x async for x in foo()]
|
||||
async def f():
|
||||
def g(): ...
|
||||
[x async for x in foo()]
|
||||
",
|
||||
PythonVersion::PY310,
|
||||
"AsyncComprehensionOutsideAsyncFunction"
|
||||
)]
|
||||
#[test_case(
|
||||
"async_in_sync_false_positive",
|
||||
"[x async for x in y]",
|
||||
PythonVersion::PY310,
|
||||
"AsyncComprehensionOutsideAsyncFunction"
|
||||
)]
|
||||
#[test_case(
|
||||
"rebound_comprehension",
|
||||
"[x:= 2 for x in range(2)]",
|
||||
PythonVersion::PY310,
|
||||
"ReboundComprehensionVariable"
|
||||
)]
|
||||
#[test_case(
|
||||
"duplicate_type_param",
|
||||
"class C[T, T]: pass",
|
||||
PythonVersion::PY312,
|
||||
"DuplicateTypeParameter"
|
||||
)]
|
||||
#[test_case(
|
||||
"multiple_case_assignment",
|
||||
"
|
||||
match x:
|
||||
case [a, a]:
|
||||
pass
|
||||
case _:
|
||||
pass
|
||||
",
|
||||
PythonVersion::PY310,
|
||||
"MultipleCaseAssignment"
|
||||
)]
|
||||
#[test_case(
|
||||
"duplicate_match_key",
|
||||
"
|
||||
match x:
|
||||
case {'key': 1, 'key': 2}:
|
||||
pass
|
||||
",
|
||||
PythonVersion::PY310,
|
||||
"DuplicateMatchKey"
|
||||
)]
|
||||
#[test_case(
|
||||
"global_parameter",
|
||||
"
|
||||
def f(a):
|
||||
global a
|
||||
#[test_case(Path::new("rebound_comprehension.py"), PythonVersion::PY310)]
|
||||
#[test_case(Path::new("duplicate_type_parameter.py"), PythonVersion::PY312)]
|
||||
#[test_case(Path::new("multiple_case_assignment.py"), PythonVersion::PY310)]
|
||||
#[test_case(Path::new("duplicate_match_key.py"), PythonVersion::PY310)]
|
||||
#[test_case(Path::new("duplicate_match_class_attribute.py"), PythonVersion::PY310)]
|
||||
#[test_case(Path::new("invalid_star_expression.py"), PythonVersion::PY310)]
|
||||
#[test_case(Path::new("irrefutable_case_pattern.py"), PythonVersion::PY310)]
|
||||
#[test_case(Path::new("single_starred_assignment.py"), PythonVersion::PY310)]
|
||||
#[test_case(Path::new("write_to_debug.py"), PythonVersion::PY312)]
|
||||
#[test_case(Path::new("write_to_debug.py"), PythonVersion::PY310)]
|
||||
#[test_case(Path::new("invalid_expression.py"), PythonVersion::PY312)]
|
||||
#[test_case(Path::new("global_parameter.py"), PythonVersion::PY310)]
|
||||
fn test_semantic_errors(path: &Path, python_version: PythonVersion) -> Result<()> {
|
||||
let snapshot = format!(
|
||||
"semantic_syntax_error_{}_{}",
|
||||
path.to_string_lossy(),
|
||||
python_version
|
||||
);
|
||||
let path = Path::new("resources/test/fixtures/semantic_errors").join(path);
|
||||
let contents = std::fs::read_to_string(&path)?;
|
||||
let source_kind = SourceKind::Python(contents);
|
||||
|
||||
def g(a):
|
||||
if True:
|
||||
global a
|
||||
|
||||
def h(a):
|
||||
def inner():
|
||||
global a
|
||||
|
||||
def i(a):
|
||||
try:
|
||||
global a
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def f(a):
|
||||
a = 1
|
||||
global a
|
||||
|
||||
def f(a):
|
||||
a = 1
|
||||
a = 2
|
||||
global a
|
||||
|
||||
def f(a):
|
||||
class Inner:
|
||||
global a # ok
|
||||
",
|
||||
PythonVersion::PY310,
|
||||
"GlobalParameter"
|
||||
)]
|
||||
#[test_case(
|
||||
"duplicate_match_class_attribute",
|
||||
"
|
||||
match x:
|
||||
case Point(x=1, x=2):
|
||||
pass
|
||||
",
|
||||
PythonVersion::PY310,
|
||||
"DuplicateMatchClassAttribute"
|
||||
)]
|
||||
#[test_case(
|
||||
"invalid_star_expression",
|
||||
"
|
||||
def func():
|
||||
return *x
|
||||
",
|
||||
PythonVersion::PY310,
|
||||
"InvalidStarExpression"
|
||||
)]
|
||||
#[test_case(
|
||||
"invalid_star_expression_for",
|
||||
"
|
||||
for *x in range(10):
|
||||
pass
|
||||
",
|
||||
PythonVersion::PY310,
|
||||
"InvalidStarExpression"
|
||||
)]
|
||||
#[test_case(
|
||||
"invalid_star_expression_yield",
|
||||
"
|
||||
def func():
|
||||
yield *x
|
||||
",
|
||||
PythonVersion::PY310,
|
||||
"InvalidStarExpression"
|
||||
)]
|
||||
#[test_case(
|
||||
"irrefutable_case_pattern_wildcard",
|
||||
"
|
||||
match value:
|
||||
case _:
|
||||
pass
|
||||
case 1:
|
||||
pass
|
||||
",
|
||||
PythonVersion::PY310,
|
||||
"IrrefutableCasePattern"
|
||||
)]
|
||||
#[test_case(
|
||||
"irrefutable_case_pattern_capture",
|
||||
"
|
||||
match value:
|
||||
case irrefutable:
|
||||
pass
|
||||
case 1:
|
||||
pass
|
||||
",
|
||||
PythonVersion::PY310,
|
||||
"IrrefutableCasePattern"
|
||||
)]
|
||||
#[test_case(
|
||||
"single_starred_assignment",
|
||||
"*a = [1, 2, 3, 4]",
|
||||
PythonVersion::PY310,
|
||||
"SingleStarredAssignment"
|
||||
)]
|
||||
#[test_case(
|
||||
"write_to_debug",
|
||||
"
|
||||
__debug__ = False
|
||||
",
|
||||
PythonVersion::PY310,
|
||||
"WriteToDebug"
|
||||
)]
|
||||
#[test_case(
|
||||
"write_to_debug_in_function_param",
|
||||
"
|
||||
def process(__debug__):
|
||||
pass
|
||||
",
|
||||
PythonVersion::PY310,
|
||||
"WriteToDebug"
|
||||
)]
|
||||
#[test_case(
|
||||
"write_to_debug_class_type_param",
|
||||
"
|
||||
class Generic[__debug__]:
|
||||
pass
|
||||
",
|
||||
PythonVersion::PY312,
|
||||
"WriteToDebug"
|
||||
)]
|
||||
#[test_case(
|
||||
"invalid_expression_yield_in_type_param",
|
||||
"
|
||||
type X[T: (yield 1)] = int
|
||||
",
|
||||
PythonVersion::PY312,
|
||||
"InvalidExpression"
|
||||
)]
|
||||
#[test_case(
|
||||
"invalid_expression_yield_in_type_alias",
|
||||
"
|
||||
type Y = (yield 1)
|
||||
",
|
||||
PythonVersion::PY312,
|
||||
"InvalidExpression"
|
||||
)]
|
||||
#[test_case(
|
||||
"invalid_expression_walrus_in_return_annotation",
|
||||
"
|
||||
def f[T](x: int) -> (y := 3): return x
|
||||
",
|
||||
PythonVersion::PY312,
|
||||
"InvalidExpression"
|
||||
)]
|
||||
#[test_case(
|
||||
"invalid_expression_yield_from_in_base_class",
|
||||
"
|
||||
class C[T]((yield from [object])):
|
||||
pass
|
||||
",
|
||||
PythonVersion::PY312,
|
||||
"InvalidExpression"
|
||||
)]
|
||||
fn test_semantic_errors(
|
||||
name: &str,
|
||||
contents: &str,
|
||||
python_version: PythonVersion,
|
||||
error_type: &str,
|
||||
) {
|
||||
let snapshot = format!("semantic_syntax_error_{error_type}_{name}_{python_version}");
|
||||
let diagnostics = test_snippet_syntax_errors(
|
||||
contents,
|
||||
let diagnostics = test_contents_syntax_errors(
|
||||
&source_kind,
|
||||
&path,
|
||||
&LinterSettings {
|
||||
rules: settings::rule_table::RuleTable::empty(),
|
||||
unresolved_target_version: python_version.into(),
|
||||
@@ -1218,7 +1002,11 @@ mod tests {
|
||||
..Default::default()
|
||||
},
|
||||
);
|
||||
assert_diagnostics!(snapshot, diagnostics);
|
||||
insta::with_settings!({filters => vec![(r"\\", "/")]}, {
|
||||
assert_diagnostics!(format!("{snapshot}"), diagnostics);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_case(PythonVersion::PY310)]
|
||||
|
||||
@@ -4,4 +4,4 @@ expression: content
|
||||
---
|
||||
syntax_errors.py:
|
||||
1:15 invalid-syntax: Expected one or more symbol names after import
|
||||
3:12 invalid-syntax: Expected ')', found newline
|
||||
3:12 invalid-syntax: Expected `)`, found newline
|
||||
|
||||
@@ -39,7 +39,7 @@ pub fn generate_noqa_edits(
|
||||
let exemption = FileExemption::from(&file_directives);
|
||||
let directives = NoqaDirectives::from_commented_ranges(comment_ranges, external, path, locator);
|
||||
let comments = find_noqa_comments(diagnostics, locator, &exemption, &directives, noqa_line_for);
|
||||
build_noqa_edits_by_diagnostic(comments, locator, line_ending)
|
||||
build_noqa_edits_by_diagnostic(comments, locator, line_ending, None)
|
||||
}
|
||||
|
||||
/// A directive to ignore a set of rules either for a given line of Python source code or an entire file (e.g.,
|
||||
@@ -715,6 +715,7 @@ impl Display for LexicalError {
|
||||
impl Error for LexicalError {}
|
||||
|
||||
/// Adds noqa comments to suppress all messages of a file.
|
||||
#[expect(clippy::too_many_arguments)]
|
||||
pub(crate) fn add_noqa(
|
||||
path: &Path,
|
||||
diagnostics: &[Diagnostic],
|
||||
@@ -723,6 +724,7 @@ pub(crate) fn add_noqa(
|
||||
external: &[String],
|
||||
noqa_line_for: &NoqaMapping,
|
||||
line_ending: LineEnding,
|
||||
reason: Option<&str>,
|
||||
) -> Result<usize> {
|
||||
let (count, output) = add_noqa_inner(
|
||||
path,
|
||||
@@ -732,12 +734,14 @@ pub(crate) fn add_noqa(
|
||||
external,
|
||||
noqa_line_for,
|
||||
line_ending,
|
||||
reason,
|
||||
);
|
||||
|
||||
fs::write(path, output)?;
|
||||
Ok(count)
|
||||
}
|
||||
|
||||
#[expect(clippy::too_many_arguments)]
|
||||
fn add_noqa_inner(
|
||||
path: &Path,
|
||||
diagnostics: &[Diagnostic],
|
||||
@@ -746,6 +750,7 @@ fn add_noqa_inner(
|
||||
external: &[String],
|
||||
noqa_line_for: &NoqaMapping,
|
||||
line_ending: LineEnding,
|
||||
reason: Option<&str>,
|
||||
) -> (usize, String) {
|
||||
let mut count = 0;
|
||||
|
||||
@@ -757,7 +762,7 @@ fn add_noqa_inner(
|
||||
|
||||
let comments = find_noqa_comments(diagnostics, locator, &exemption, &directives, noqa_line_for);
|
||||
|
||||
let edits = build_noqa_edits_by_line(comments, locator, line_ending);
|
||||
let edits = build_noqa_edits_by_line(comments, locator, line_ending, reason);
|
||||
|
||||
let contents = locator.contents();
|
||||
|
||||
@@ -783,6 +788,7 @@ fn build_noqa_edits_by_diagnostic(
|
||||
comments: Vec<Option<NoqaComment>>,
|
||||
locator: &Locator,
|
||||
line_ending: LineEnding,
|
||||
reason: Option<&str>,
|
||||
) -> Vec<Option<Edit>> {
|
||||
let mut edits = Vec::default();
|
||||
for comment in comments {
|
||||
@@ -794,6 +800,7 @@ fn build_noqa_edits_by_diagnostic(
|
||||
FxHashSet::from_iter([comment.code]),
|
||||
locator,
|
||||
line_ending,
|
||||
reason,
|
||||
) {
|
||||
edits.push(Some(noqa_edit.into_edit()));
|
||||
}
|
||||
@@ -808,6 +815,7 @@ fn build_noqa_edits_by_line<'a>(
|
||||
comments: Vec<Option<NoqaComment<'a>>>,
|
||||
locator: &Locator,
|
||||
line_ending: LineEnding,
|
||||
reason: Option<&'a str>,
|
||||
) -> BTreeMap<TextSize, NoqaEdit<'a>> {
|
||||
let mut comments_by_line = BTreeMap::default();
|
||||
for comment in comments.into_iter().flatten() {
|
||||
@@ -831,6 +839,7 @@ fn build_noqa_edits_by_line<'a>(
|
||||
.collect(),
|
||||
locator,
|
||||
line_ending,
|
||||
reason,
|
||||
) {
|
||||
edits.insert(offset, edit);
|
||||
}
|
||||
@@ -927,6 +936,7 @@ struct NoqaEdit<'a> {
|
||||
noqa_codes: FxHashSet<&'a SecondaryCode>,
|
||||
codes: Option<&'a Codes<'a>>,
|
||||
line_ending: LineEnding,
|
||||
reason: Option<&'a str>,
|
||||
}
|
||||
|
||||
impl NoqaEdit<'_> {
|
||||
@@ -954,6 +964,9 @@ impl NoqaEdit<'_> {
|
||||
push_codes(writer, self.noqa_codes.iter().sorted_unstable());
|
||||
}
|
||||
}
|
||||
if let Some(reason) = self.reason {
|
||||
write!(writer, " {reason}").unwrap();
|
||||
}
|
||||
write!(writer, "{}", self.line_ending.as_str()).unwrap();
|
||||
}
|
||||
}
|
||||
@@ -970,6 +983,7 @@ fn generate_noqa_edit<'a>(
|
||||
noqa_codes: FxHashSet<&'a SecondaryCode>,
|
||||
locator: &Locator,
|
||||
line_ending: LineEnding,
|
||||
reason: Option<&'a str>,
|
||||
) -> Option<NoqaEdit<'a>> {
|
||||
let line_range = locator.full_line_range(offset);
|
||||
|
||||
@@ -999,6 +1013,7 @@ fn generate_noqa_edit<'a>(
|
||||
noqa_codes,
|
||||
codes,
|
||||
line_ending,
|
||||
reason,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -2832,6 +2847,7 @@ mod tests {
|
||||
&[],
|
||||
&noqa_line_for,
|
||||
LineEnding::Lf,
|
||||
None,
|
||||
);
|
||||
assert_eq!(count, 0);
|
||||
assert_eq!(output, format!("{contents}"));
|
||||
@@ -2855,6 +2871,7 @@ mod tests {
|
||||
&[],
|
||||
&noqa_line_for,
|
||||
LineEnding::Lf,
|
||||
None,
|
||||
);
|
||||
assert_eq!(count, 1);
|
||||
assert_eq!(output, "x = 1 # noqa: F841\n");
|
||||
@@ -2885,6 +2902,7 @@ mod tests {
|
||||
&[],
|
||||
&noqa_line_for,
|
||||
LineEnding::Lf,
|
||||
None,
|
||||
);
|
||||
assert_eq!(count, 1);
|
||||
assert_eq!(output, "x = 1 # noqa: E741, F841\n");
|
||||
@@ -2915,6 +2933,7 @@ mod tests {
|
||||
&[],
|
||||
&noqa_line_for,
|
||||
LineEnding::Lf,
|
||||
None,
|
||||
);
|
||||
assert_eq!(count, 0);
|
||||
assert_eq!(output, "x = 1 # noqa");
|
||||
|
||||
@@ -261,16 +261,6 @@ pub(crate) const fn is_b006_unsafe_fix_preserve_assignment_expr_enabled(
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/20520
|
||||
pub(crate) const fn is_fix_read_whole_file_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/20520
|
||||
pub(crate) const fn is_fix_write_whole_file_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
pub(crate) const fn is_typing_extensions_str_alias_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
@@ -196,6 +196,7 @@ fn check_call_arguments(checker: &Checker, qualified_name: &QualifiedName, argum
|
||||
match qualified_name.segments() {
|
||||
["airflow", .., "DAG" | "dag"] => {
|
||||
// with replacement
|
||||
diagnostic_for_argument(checker, arguments, "concurrency", Some("max_active_tasks"));
|
||||
diagnostic_for_argument(checker, arguments, "fail_stop", Some("fail_fast"));
|
||||
diagnostic_for_argument(checker, arguments, "schedule_interval", Some("schedule"));
|
||||
diagnostic_for_argument(checker, arguments, "timetable", Some("schedule"));
|
||||
@@ -492,6 +493,12 @@ fn check_method(checker: &Checker, call_expr: &ExprCall) {
|
||||
"collected_datasets" => Replacement::AttrName("collected_assets"),
|
||||
_ => return,
|
||||
},
|
||||
["airflow", "models", "dag", "DAG"] | ["airflow", "models", "DAG"] | ["airflow", "DAG"] => {
|
||||
match attr.as_str() {
|
||||
"create_dagrun" => Replacement::None,
|
||||
_ => return,
|
||||
}
|
||||
}
|
||||
["airflow", "providers_manager", "ProvidersManager"] => match attr.as_str() {
|
||||
"initialize_providers_dataset_uri_resources" => {
|
||||
Replacement::AttrName("initialize_providers_asset_uri_resources")
|
||||
|
||||
@@ -288,10 +288,12 @@ fn check_name(checker: &Checker, expr: &Expr, range: TextRange) {
|
||||
},
|
||||
|
||||
// airflow.model..DAG
|
||||
["airflow", "models", .., "DAG"] => Replacement::SourceModuleMoved {
|
||||
module: "airflow.sdk",
|
||||
name: "DAG".to_string(),
|
||||
},
|
||||
["airflow", "models", "dag", "DAG"] | ["airflow", "models", "DAG"] | ["airflow", "DAG"] => {
|
||||
Replacement::SourceModuleMoved {
|
||||
module: "airflow.sdk",
|
||||
name: "DAG".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
// airflow.sensors.base
|
||||
[
|
||||
|
||||
@@ -28,6 +28,8 @@ AIR301 [*] `timetable` is removed in Airflow 3.0
|
||||
22 |
|
||||
23 | DAG(dag_id="class_timetable", timetable=NullTimetable())
|
||||
| ^^^^^^^^^
|
||||
24 |
|
||||
25 | DAG(dag_id="class_concurrency", concurrency=12)
|
||||
|
|
||||
help: Use `schedule` instead
|
||||
20 |
|
||||
@@ -36,249 +38,271 @@ help: Use `schedule` instead
|
||||
- DAG(dag_id="class_timetable", timetable=NullTimetable())
|
||||
23 + DAG(dag_id="class_timetable", schedule=NullTimetable())
|
||||
24 |
|
||||
25 |
|
||||
26 | DAG(dag_id="class_fail_stop", fail_stop=True)
|
||||
25 | DAG(dag_id="class_concurrency", concurrency=12)
|
||||
26 |
|
||||
|
||||
AIR301 [*] `fail_stop` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:26:31
|
||||
AIR301 [*] `concurrency` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:25:33
|
||||
|
|
||||
26 | DAG(dag_id="class_fail_stop", fail_stop=True)
|
||||
| ^^^^^^^^^
|
||||
27 |
|
||||
28 | DAG(dag_id="class_default_view", default_view="dag_default_view")
|
||||
23 | DAG(dag_id="class_timetable", timetable=NullTimetable())
|
||||
24 |
|
||||
25 | DAG(dag_id="class_concurrency", concurrency=12)
|
||||
| ^^^^^^^^^^^
|
||||
26 |
|
||||
27 | DAG(dag_id="class_fail_stop", fail_stop=True)
|
||||
|
|
||||
help: Use `fail_fast` instead
|
||||
help: Use `max_active_tasks` instead
|
||||
22 |
|
||||
23 | DAG(dag_id="class_timetable", timetable=NullTimetable())
|
||||
24 |
|
||||
25 |
|
||||
- DAG(dag_id="class_concurrency", concurrency=12)
|
||||
25 + DAG(dag_id="class_concurrency", max_active_tasks=12)
|
||||
26 |
|
||||
27 | DAG(dag_id="class_fail_stop", fail_stop=True)
|
||||
28 |
|
||||
|
||||
AIR301 [*] `fail_stop` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:27:31
|
||||
|
|
||||
25 | DAG(dag_id="class_concurrency", concurrency=12)
|
||||
26 |
|
||||
27 | DAG(dag_id="class_fail_stop", fail_stop=True)
|
||||
| ^^^^^^^^^
|
||||
28 |
|
||||
29 | DAG(dag_id="class_default_view", default_view="dag_default_view")
|
||||
|
|
||||
help: Use `fail_fast` instead
|
||||
24 |
|
||||
25 | DAG(dag_id="class_concurrency", concurrency=12)
|
||||
26 |
|
||||
- DAG(dag_id="class_fail_stop", fail_stop=True)
|
||||
26 + DAG(dag_id="class_fail_stop", fail_fast=True)
|
||||
27 |
|
||||
28 | DAG(dag_id="class_default_view", default_view="dag_default_view")
|
||||
29 |
|
||||
27 + DAG(dag_id="class_fail_stop", fail_fast=True)
|
||||
28 |
|
||||
29 | DAG(dag_id="class_default_view", default_view="dag_default_view")
|
||||
30 |
|
||||
|
||||
AIR301 `default_view` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:28:34
|
||||
--> AIR301_args.py:29:34
|
||||
|
|
||||
26 | DAG(dag_id="class_fail_stop", fail_stop=True)
|
||||
27 |
|
||||
28 | DAG(dag_id="class_default_view", default_view="dag_default_view")
|
||||
27 | DAG(dag_id="class_fail_stop", fail_stop=True)
|
||||
28 |
|
||||
29 | DAG(dag_id="class_default_view", default_view="dag_default_view")
|
||||
| ^^^^^^^^^^^^
|
||||
29 |
|
||||
30 | DAG(dag_id="class_orientation", orientation="BT")
|
||||
30 |
|
||||
31 | DAG(dag_id="class_orientation", orientation="BT")
|
||||
|
|
||||
|
||||
AIR301 `orientation` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:30:33
|
||||
--> AIR301_args.py:31:33
|
||||
|
|
||||
28 | DAG(dag_id="class_default_view", default_view="dag_default_view")
|
||||
29 |
|
||||
30 | DAG(dag_id="class_orientation", orientation="BT")
|
||||
29 | DAG(dag_id="class_default_view", default_view="dag_default_view")
|
||||
30 |
|
||||
31 | DAG(dag_id="class_orientation", orientation="BT")
|
||||
| ^^^^^^^^^^^
|
||||
31 |
|
||||
32 | allow_future_exec_dates_dag = DAG(dag_id="class_allow_future_exec_dates")
|
||||
32 |
|
||||
33 | allow_future_exec_dates_dag = DAG(dag_id="class_allow_future_exec_dates")
|
||||
|
|
||||
|
||||
AIR301 [*] `schedule_interval` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:41:6
|
||||
--> AIR301_args.py:42:6
|
||||
|
|
||||
41 | @dag(schedule_interval="0 * * * *")
|
||||
42 | @dag(schedule_interval="0 * * * *")
|
||||
| ^^^^^^^^^^^^^^^^^
|
||||
42 | def decorator_schedule_interval():
|
||||
43 | pass
|
||||
43 | def decorator_schedule_interval():
|
||||
44 | pass
|
||||
|
|
||||
help: Use `schedule` instead
|
||||
38 | pass
|
||||
39 |
|
||||
39 | pass
|
||||
40 |
|
||||
41 |
|
||||
- @dag(schedule_interval="0 * * * *")
|
||||
41 + @dag(schedule="0 * * * *")
|
||||
42 | def decorator_schedule_interval():
|
||||
43 | pass
|
||||
44 |
|
||||
42 + @dag(schedule="0 * * * *")
|
||||
43 | def decorator_schedule_interval():
|
||||
44 | pass
|
||||
45 |
|
||||
|
||||
AIR301 [*] `timetable` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:46:6
|
||||
--> AIR301_args.py:47:6
|
||||
|
|
||||
46 | @dag(timetable=NullTimetable())
|
||||
47 | @dag(timetable=NullTimetable())
|
||||
| ^^^^^^^^^
|
||||
47 | def decorator_timetable():
|
||||
48 | pass
|
||||
48 | def decorator_timetable():
|
||||
49 | pass
|
||||
|
|
||||
help: Use `schedule` instead
|
||||
43 | pass
|
||||
44 |
|
||||
44 | pass
|
||||
45 |
|
||||
46 |
|
||||
- @dag(timetable=NullTimetable())
|
||||
46 + @dag(schedule=NullTimetable())
|
||||
47 | def decorator_timetable():
|
||||
48 | pass
|
||||
49 |
|
||||
47 + @dag(schedule=NullTimetable())
|
||||
48 | def decorator_timetable():
|
||||
49 | pass
|
||||
50 |
|
||||
|
||||
AIR301 [*] `execution_date` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:54:62
|
||||
--> AIR301_args.py:55:62
|
||||
|
|
||||
52 | def decorator_deprecated_operator_args():
|
||||
53 | trigger_dagrun_op = trigger_dagrun.TriggerDagRunOperator(
|
||||
54 | task_id="trigger_dagrun_op1", trigger_dag_id="test", execution_date="2024-12-04"
|
||||
53 | def decorator_deprecated_operator_args():
|
||||
54 | trigger_dagrun_op = trigger_dagrun.TriggerDagRunOperator(
|
||||
55 | task_id="trigger_dagrun_op1", trigger_dag_id="test", execution_date="2024-12-04"
|
||||
| ^^^^^^^^^^^^^^
|
||||
55 | )
|
||||
56 | trigger_dagrun_op2 = TriggerDagRunOperator(
|
||||
56 | )
|
||||
57 | trigger_dagrun_op2 = TriggerDagRunOperator(
|
||||
|
|
||||
help: Use `logical_date` instead
|
||||
51 | @dag()
|
||||
52 | def decorator_deprecated_operator_args():
|
||||
53 | trigger_dagrun_op = trigger_dagrun.TriggerDagRunOperator(
|
||||
52 | @dag()
|
||||
53 | def decorator_deprecated_operator_args():
|
||||
54 | trigger_dagrun_op = trigger_dagrun.TriggerDagRunOperator(
|
||||
- task_id="trigger_dagrun_op1", trigger_dag_id="test", execution_date="2024-12-04"
|
||||
54 + task_id="trigger_dagrun_op1", trigger_dag_id="test", logical_date="2024-12-04"
|
||||
55 | )
|
||||
56 | trigger_dagrun_op2 = TriggerDagRunOperator(
|
||||
57 | task_id="trigger_dagrun_op2", trigger_dag_id="test", execution_date="2024-12-04"
|
||||
55 + task_id="trigger_dagrun_op1", trigger_dag_id="test", logical_date="2024-12-04"
|
||||
56 | )
|
||||
57 | trigger_dagrun_op2 = TriggerDagRunOperator(
|
||||
58 | task_id="trigger_dagrun_op2", trigger_dag_id="test", execution_date="2024-12-04"
|
||||
|
||||
AIR301 [*] `execution_date` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:57:62
|
||||
--> AIR301_args.py:58:62
|
||||
|
|
||||
55 | )
|
||||
56 | trigger_dagrun_op2 = TriggerDagRunOperator(
|
||||
57 | task_id="trigger_dagrun_op2", trigger_dag_id="test", execution_date="2024-12-04"
|
||||
56 | )
|
||||
57 | trigger_dagrun_op2 = TriggerDagRunOperator(
|
||||
58 | task_id="trigger_dagrun_op2", trigger_dag_id="test", execution_date="2024-12-04"
|
||||
| ^^^^^^^^^^^^^^
|
||||
58 | )
|
||||
59 | )
|
||||
|
|
||||
help: Use `logical_date` instead
|
||||
54 | task_id="trigger_dagrun_op1", trigger_dag_id="test", execution_date="2024-12-04"
|
||||
55 | )
|
||||
56 | trigger_dagrun_op2 = TriggerDagRunOperator(
|
||||
55 | task_id="trigger_dagrun_op1", trigger_dag_id="test", execution_date="2024-12-04"
|
||||
56 | )
|
||||
57 | trigger_dagrun_op2 = TriggerDagRunOperator(
|
||||
- task_id="trigger_dagrun_op2", trigger_dag_id="test", execution_date="2024-12-04"
|
||||
57 + task_id="trigger_dagrun_op2", trigger_dag_id="test", logical_date="2024-12-04"
|
||||
58 | )
|
||||
59 |
|
||||
60 | branch_dt_op = datetime.BranchDateTimeOperator(
|
||||
58 + task_id="trigger_dagrun_op2", trigger_dag_id="test", logical_date="2024-12-04"
|
||||
59 | )
|
||||
60 |
|
||||
61 | branch_dt_op = datetime.BranchDateTimeOperator(
|
||||
|
||||
AIR301 [*] `use_task_execution_day` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:61:33
|
||||
--> AIR301_args.py:62:33
|
||||
|
|
||||
60 | branch_dt_op = datetime.BranchDateTimeOperator(
|
||||
61 | task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5
|
||||
61 | branch_dt_op = datetime.BranchDateTimeOperator(
|
||||
62 | task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^
|
||||
62 | )
|
||||
63 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
63 | )
|
||||
64 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
|
|
||||
help: Use `use_task_logical_date` instead
|
||||
58 | )
|
||||
59 |
|
||||
60 | branch_dt_op = datetime.BranchDateTimeOperator(
|
||||
59 | )
|
||||
60 |
|
||||
61 | branch_dt_op = datetime.BranchDateTimeOperator(
|
||||
- task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5
|
||||
61 + task_id="branch_dt_op", use_task_logical_date=True, task_concurrency=5
|
||||
62 | )
|
||||
63 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
64 | task_id="branch_dt_op2",
|
||||
62 + task_id="branch_dt_op", use_task_logical_date=True, task_concurrency=5
|
||||
63 | )
|
||||
64 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
65 | task_id="branch_dt_op2",
|
||||
|
||||
AIR301 [*] `task_concurrency` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:61:62
|
||||
--> AIR301_args.py:62:62
|
||||
|
|
||||
60 | branch_dt_op = datetime.BranchDateTimeOperator(
|
||||
61 | task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5
|
||||
61 | branch_dt_op = datetime.BranchDateTimeOperator(
|
||||
62 | task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5
|
||||
| ^^^^^^^^^^^^^^^^
|
||||
62 | )
|
||||
63 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
63 | )
|
||||
64 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
|
|
||||
help: Use `max_active_tis_per_dag` instead
|
||||
58 | )
|
||||
59 |
|
||||
60 | branch_dt_op = datetime.BranchDateTimeOperator(
|
||||
59 | )
|
||||
60 |
|
||||
61 | branch_dt_op = datetime.BranchDateTimeOperator(
|
||||
- task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5
|
||||
61 + task_id="branch_dt_op", use_task_execution_day=True, max_active_tis_per_dag=5
|
||||
62 | )
|
||||
63 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
64 | task_id="branch_dt_op2",
|
||||
62 + task_id="branch_dt_op", use_task_execution_day=True, max_active_tis_per_dag=5
|
||||
63 | )
|
||||
64 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
65 | task_id="branch_dt_op2",
|
||||
|
||||
AIR301 [*] `use_task_execution_day` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:65:9
|
||||
--> AIR301_args.py:66:9
|
||||
|
|
||||
63 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
64 | task_id="branch_dt_op2",
|
||||
65 | use_task_execution_day=True,
|
||||
64 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
65 | task_id="branch_dt_op2",
|
||||
66 | use_task_execution_day=True,
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^
|
||||
66 | sla=timedelta(seconds=10),
|
||||
67 | )
|
||||
67 | sla=timedelta(seconds=10),
|
||||
68 | )
|
||||
|
|
||||
help: Use `use_task_logical_date` instead
|
||||
62 | )
|
||||
63 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
64 | task_id="branch_dt_op2",
|
||||
63 | )
|
||||
64 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
65 | task_id="branch_dt_op2",
|
||||
- use_task_execution_day=True,
|
||||
65 + use_task_logical_date=True,
|
||||
66 | sla=timedelta(seconds=10),
|
||||
67 | )
|
||||
68 |
|
||||
66 + use_task_logical_date=True,
|
||||
67 | sla=timedelta(seconds=10),
|
||||
68 | )
|
||||
69 |
|
||||
|
||||
AIR301 [*] `use_task_execution_day` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:92:9
|
||||
--> AIR301_args.py:93:9
|
||||
|
|
||||
90 | follow_task_ids_if_true=None,
|
||||
91 | week_day=1,
|
||||
92 | use_task_execution_day=True,
|
||||
91 | follow_task_ids_if_true=None,
|
||||
92 | week_day=1,
|
||||
93 | use_task_execution_day=True,
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^
|
||||
93 | )
|
||||
94 | )
|
||||
|
|
||||
help: Use `use_task_logical_date` instead
|
||||
89 | follow_task_ids_if_false=None,
|
||||
90 | follow_task_ids_if_true=None,
|
||||
91 | week_day=1,
|
||||
90 | follow_task_ids_if_false=None,
|
||||
91 | follow_task_ids_if_true=None,
|
||||
92 | week_day=1,
|
||||
- use_task_execution_day=True,
|
||||
92 + use_task_logical_date=True,
|
||||
93 | )
|
||||
94 |
|
||||
95 | trigger_dagrun_op >> trigger_dagrun_op2
|
||||
93 + use_task_logical_date=True,
|
||||
94 | )
|
||||
95 |
|
||||
96 | trigger_dagrun_op >> trigger_dagrun_op2
|
||||
|
||||
AIR301 `filename_template` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:102:15
|
||||
--> AIR301_args.py:103:15
|
||||
|
|
||||
101 | # deprecated filename_template argument in FileTaskHandler
|
||||
102 | S3TaskHandler(filename_template="/tmp/test")
|
||||
102 | # deprecated filename_template argument in FileTaskHandler
|
||||
103 | S3TaskHandler(filename_template="/tmp/test")
|
||||
| ^^^^^^^^^^^^^^^^^
|
||||
103 | HdfsTaskHandler(filename_template="/tmp/test")
|
||||
104 | ElasticsearchTaskHandler(filename_template="/tmp/test")
|
||||
104 | HdfsTaskHandler(filename_template="/tmp/test")
|
||||
105 | ElasticsearchTaskHandler(filename_template="/tmp/test")
|
||||
|
|
||||
|
||||
AIR301 `filename_template` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:103:17
|
||||
--> AIR301_args.py:104:17
|
||||
|
|
||||
101 | # deprecated filename_template argument in FileTaskHandler
|
||||
102 | S3TaskHandler(filename_template="/tmp/test")
|
||||
103 | HdfsTaskHandler(filename_template="/tmp/test")
|
||||
102 | # deprecated filename_template argument in FileTaskHandler
|
||||
103 | S3TaskHandler(filename_template="/tmp/test")
|
||||
104 | HdfsTaskHandler(filename_template="/tmp/test")
|
||||
| ^^^^^^^^^^^^^^^^^
|
||||
104 | ElasticsearchTaskHandler(filename_template="/tmp/test")
|
||||
105 | GCSTaskHandler(filename_template="/tmp/test")
|
||||
105 | ElasticsearchTaskHandler(filename_template="/tmp/test")
|
||||
106 | GCSTaskHandler(filename_template="/tmp/test")
|
||||
|
|
||||
|
||||
AIR301 `filename_template` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:104:26
|
||||
--> AIR301_args.py:105:26
|
||||
|
|
||||
102 | S3TaskHandler(filename_template="/tmp/test")
|
||||
103 | HdfsTaskHandler(filename_template="/tmp/test")
|
||||
104 | ElasticsearchTaskHandler(filename_template="/tmp/test")
|
||||
103 | S3TaskHandler(filename_template="/tmp/test")
|
||||
104 | HdfsTaskHandler(filename_template="/tmp/test")
|
||||
105 | ElasticsearchTaskHandler(filename_template="/tmp/test")
|
||||
| ^^^^^^^^^^^^^^^^^
|
||||
105 | GCSTaskHandler(filename_template="/tmp/test")
|
||||
106 | GCSTaskHandler(filename_template="/tmp/test")
|
||||
|
|
||||
|
||||
AIR301 `filename_template` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:105:16
|
||||
--> AIR301_args.py:106:16
|
||||
|
|
||||
103 | HdfsTaskHandler(filename_template="/tmp/test")
|
||||
104 | ElasticsearchTaskHandler(filename_template="/tmp/test")
|
||||
105 | GCSTaskHandler(filename_template="/tmp/test")
|
||||
104 | HdfsTaskHandler(filename_template="/tmp/test")
|
||||
105 | ElasticsearchTaskHandler(filename_template="/tmp/test")
|
||||
106 | GCSTaskHandler(filename_template="/tmp/test")
|
||||
| ^^^^^^^^^^^^^^^^^
|
||||
106 |
|
||||
107 | FabAuthManager(None)
|
||||
107 |
|
||||
108 | FabAuthManager(None)
|
||||
|
|
||||
|
||||
AIR301 `appbuilder` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:107:15
|
||||
--> AIR301_args.py:108:15
|
||||
|
|
||||
105 | GCSTaskHandler(filename_template="/tmp/test")
|
||||
106 |
|
||||
107 | FabAuthManager(None)
|
||||
106 | GCSTaskHandler(filename_template="/tmp/test")
|
||||
107 |
|
||||
108 | FabAuthManager(None)
|
||||
| ^^^^^^
|
||||
|
|
||||
help: The constructor takes no parameter now
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,25 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/airflow/mod.rs
|
||||
---
|
||||
AIR311 [*] `airflow.DAG` is removed in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
||||
--> AIR311_args.py:13:1
|
||||
|
|
||||
13 | DAG(dag_id="class_sla_callback", sla_miss_callback=sla_callback)
|
||||
| ^^^
|
||||
|
|
||||
help: Use `DAG` from `airflow.sdk` instead.
|
||||
2 |
|
||||
3 | from datetime import timedelta
|
||||
4 |
|
||||
- from airflow import DAG, dag
|
||||
5 + from airflow import dag
|
||||
6 | from airflow.operators.datetime import BranchDateTimeOperator
|
||||
7 + from airflow.sdk import DAG
|
||||
8 |
|
||||
9 |
|
||||
10 | def sla_callback(*arg, **kwargs):
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR311 `sla_miss_callback` is removed in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
||||
--> AIR311_args.py:13:34
|
||||
|
|
||||
|
||||
@@ -513,6 +513,9 @@ impl Violation for MissingReturnTypeClassMethod {
|
||||
/// def foo(x: MyAny): ...
|
||||
/// ```
|
||||
///
|
||||
/// ## Options
|
||||
/// - `lint.flake8-annotations.allow-star-arg-any`
|
||||
///
|
||||
/// ## References
|
||||
/// - [Typing spec: `Any`](https://typing.python.org/en/latest/spec/special-types.html#any)
|
||||
/// - [Python documentation: `typing.Any`](https://docs.python.org/3/library/typing.html#typing.Any)
|
||||
|
||||
@@ -375,7 +375,7 @@ impl Violation for SuspiciousEvalUsage {
|
||||
///
|
||||
///
|
||||
/// def render_username(username):
|
||||
/// return django.utils.html.format_html("<i>{}</i>", username) # username is escaped.
|
||||
/// return format_html("<i>{}</i>", username) # username is escaped.
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user