Compare commits
240 Commits
alex/into_
...
dcreager/t
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e4be76e812 | ||
|
|
b1e354bd99 | ||
|
|
e4a32ba644 | ||
|
|
ac2d07e83c | ||
|
|
8156b45173 | ||
|
|
d063c71177 | ||
|
|
c16ef709f6 | ||
|
|
04a3ec3689 | ||
|
|
1a86e13472 | ||
|
|
901e9cdf49 | ||
|
|
58fa1d71b6 | ||
|
|
d9fc0f08b4 | ||
|
|
09deeabda5 | ||
|
|
1436e688cc | ||
|
|
d6c34b98a5 | ||
|
|
1b50e032a4 | ||
|
|
687ed292f6 | ||
|
|
0554b1ca8a | ||
|
|
bbe42bc775 | ||
|
|
0d2cd84df4 | ||
|
|
665f68036c | ||
|
|
f5fb5c388a | ||
|
|
dbd72480a9 | ||
|
|
75c1a0ae55 | ||
|
|
7a546809c4 | ||
|
|
3065f8dbbc | ||
|
|
fb5b8c3653 | ||
|
|
efa2b5167f | ||
|
|
29acc1e860 | ||
|
|
698231a47a | ||
|
|
d63b4b0383 | ||
|
|
c5d654bce8 | ||
|
|
3e7e91724c | ||
|
|
2a2b719f00 | ||
|
|
ffb7bdd595 | ||
|
|
0a55327d64 | ||
|
|
008e9d06e1 | ||
|
|
8529d79a70 | ||
|
|
8599c7e5b3 | ||
|
|
5f501374c4 | ||
|
|
e9a5337136 | ||
|
|
05cf53aae8 | ||
|
|
6a26f86778 | ||
|
|
d0314131fb | ||
|
|
696d7a5d68 | ||
|
|
66e9d57797 | ||
|
|
87dafb8787 | ||
|
|
9e80e5a3a6 | ||
|
|
f9cc26aa12 | ||
|
|
d49c326309 | ||
|
|
e70fccbf25 | ||
|
|
90b32f3b3b | ||
|
|
99694b6e4a | ||
|
|
67e54fffe1 | ||
|
|
a01b0d7780 | ||
|
|
04ab9170d6 | ||
|
|
12e74ae894 | ||
|
|
d64b2f747c | ||
|
|
cd183c5e1f | ||
|
|
eb1957cd17 | ||
|
|
7e3dd0764a | ||
|
|
a6abd65c2c | ||
|
|
3d4b0559f1 | ||
|
|
2f6f3e1042 | ||
|
|
9dd666d677 | ||
|
|
a1d9cb5830 | ||
|
|
8a85a2961e | ||
|
|
43427abb61 | ||
|
|
84c3cecad6 | ||
|
|
e8e8180888 | ||
|
|
f5cf672ed4 | ||
|
|
6322f37015 | ||
|
|
d272a623d3 | ||
|
|
19c7994e90 | ||
|
|
725ae69773 | ||
|
|
d2c3996f4e | ||
|
|
988c38c013 | ||
|
|
164c2a6cc6 | ||
|
|
1bbe4f0d5e | ||
|
|
cd7354a5c6 | ||
|
|
ec48a47a88 | ||
|
|
43297d3455 | ||
|
|
4373974dd9 | ||
|
|
e4374f14ed | ||
|
|
03bd0619e9 | ||
|
|
bd8812127d | ||
|
|
44b0c9ebac | ||
|
|
7b237d316f | ||
|
|
36cce347fd | ||
|
|
33b942c7ad | ||
|
|
9ce3230add | ||
|
|
2bc6c78e26 | ||
|
|
1fd852fb3f | ||
|
|
5f3e086ee4 | ||
|
|
039a69fa8c | ||
|
|
3656b44877 | ||
|
|
98869f0307 | ||
|
|
d258302b08 | ||
|
|
deeda56906 | ||
|
|
f63a9f2334 | ||
|
|
e4dc406a3d | ||
|
|
1d188476b6 | ||
|
|
4821c050ef | ||
|
|
835e31b3ff | ||
|
|
8d1efe964a | ||
|
|
04e7cecab3 | ||
|
|
84a810736d | ||
|
|
f44598dc11 | ||
|
|
ab46c8de0f | ||
|
|
a6f2dee33b | ||
|
|
238f151371 | ||
|
|
3fa609929f | ||
|
|
73b1fce74a | ||
|
|
52bd22003b | ||
|
|
0a6d6b6194 | ||
|
|
ca51feb319 | ||
|
|
e0a3cbb048 | ||
|
|
f4f259395c | ||
|
|
2a1d412f72 | ||
|
|
dd751e8d07 | ||
|
|
020ff1723b | ||
|
|
09e6af16c8 | ||
|
|
76efc8061d | ||
|
|
16de4aa3cc | ||
|
|
e06e108095 | ||
|
|
b6add3ee6d | ||
|
|
39c21d7c6c | ||
|
|
1617292e9f | ||
|
|
faae72b836 | ||
|
|
276f1d0d88 | ||
|
|
ed18112cfa | ||
|
|
8ba1cfebed | ||
|
|
6185a2af9e | ||
|
|
6cc3393ccd | ||
|
|
c7ff9826d6 | ||
|
|
35640dd853 | ||
|
|
cb2e277482 | ||
|
|
132d10fb6f | ||
|
|
f189aad6d2 | ||
|
|
5517c9943a | ||
|
|
b5ff96595d | ||
|
|
c6573b16ac | ||
|
|
76127e5fb5 | ||
|
|
cddc0fedc2 | ||
|
|
eda85f3c64 | ||
|
|
cef6600cf3 | ||
|
|
5c69e00d1c | ||
|
|
7569b09bdd | ||
|
|
7009d60260 | ||
|
|
f79044478c | ||
|
|
47e41ac6b6 | ||
|
|
2e7ab00d51 | ||
|
|
d8106d38a0 | ||
|
|
4fd8d4b0ee | ||
|
|
63b1c1ea8b | ||
|
|
3c5e4e1477 | ||
|
|
3c8fb68765 | ||
|
|
42adfd40ea | ||
|
|
79a02711c1 | ||
|
|
d2fe6347fb | ||
|
|
1fe958c694 | ||
|
|
fe4ee81b97 | ||
|
|
0433526897 | ||
|
|
78ee7ae925 | ||
|
|
21ec8aa7d4 | ||
|
|
64a255df49 | ||
|
|
b5305b5f32 | ||
|
|
39f105bc4a | ||
|
|
e8c35b9704 | ||
|
|
1b2ed6a503 | ||
|
|
de9df1b326 | ||
|
|
e2e83acd2f | ||
|
|
6ddfb51d71 | ||
|
|
e017b039df | ||
|
|
0dfd55babf | ||
|
|
f947c23cd7 | ||
|
|
02879fa377 | ||
|
|
7dbfb56c3d | ||
|
|
f97c38dd88 | ||
|
|
14fce1f788 | ||
|
|
31194d048d | ||
|
|
fe95ff6b06 | ||
|
|
61c1007137 | ||
|
|
884c3b178e | ||
|
|
ade727ce66 | ||
|
|
3493c9b67a | ||
|
|
666dd5fef1 | ||
|
|
bb05527350 | ||
|
|
dc373e639e | ||
|
|
fc71c90de6 | ||
|
|
c11b00bea0 | ||
|
|
770b4d12ab | ||
|
|
c596a78c08 | ||
|
|
cb98175a36 | ||
|
|
3bef60f69a | ||
|
|
f477e11d26 | ||
|
|
c0bd092fa9 | ||
|
|
73b9b8eb6b | ||
|
|
80eeb1d64f | ||
|
|
50b75cfcc6 | ||
|
|
1c4a9d6a06 | ||
|
|
222c6fd496 | ||
|
|
b754abff1b | ||
|
|
41fe4d7f8c | ||
|
|
f14631e1cc | ||
|
|
02f2dba28e | ||
|
|
0454a72674 | ||
|
|
c32234cf0d | ||
|
|
566d1d6497 | ||
|
|
6c3d6124c8 | ||
|
|
73107a083c | ||
|
|
de1a6fb8ad | ||
|
|
bff32a41dc | ||
|
|
17c7b3cde1 | ||
|
|
921f409ee8 | ||
|
|
a151f9746d | ||
|
|
521217bb90 | ||
|
|
a32d5b8dc4 | ||
|
|
6337e22f0c | ||
|
|
827d8ae5d4 | ||
|
|
1734ddfb3e | ||
|
|
ff3a6a8fbd | ||
|
|
9664474c51 | ||
|
|
69b4c29924 | ||
|
|
bb40c34361 | ||
|
|
b93d8f2b9f | ||
|
|
1d111c8780 | ||
|
|
9d7da914b9 | ||
|
|
0c2cf75869 | ||
|
|
1d6ae8596a | ||
|
|
cf4e82d4b0 | ||
|
|
1baf98aab3 | ||
|
|
3179b05221 | ||
|
|
172e8d4ae0 | ||
|
|
735ec0c1f9 | ||
|
|
3585c96ea5 | ||
|
|
4b026c2a55 | ||
|
|
4b758b3746 | ||
|
|
8737a2d5f5 | ||
|
|
3be3a10a2f |
@@ -7,6 +7,10 @@ serial = { max-threads = 1 }
|
||||
filter = 'binary(file_watching)'
|
||||
test-group = 'serial'
|
||||
|
||||
[[profile.default.overrides]]
|
||||
filter = 'binary(e2e)'
|
||||
test-group = 'serial'
|
||||
|
||||
[profile.ci]
|
||||
# Print out output for failing tests as soon as they fail, and also at the end
|
||||
# of the run (for easy scrollability).
|
||||
|
||||
301
.github/workflows/ci.yaml
vendored
301
.github/workflows/ci.yaml
vendored
@@ -231,6 +231,8 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- name: "Install Rust toolchain"
|
||||
run: |
|
||||
rustup component add clippy
|
||||
@@ -251,20 +253,23 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
shared-key: ruff-linux-debug
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21
|
||||
uses: taiki-e/install-action@537c30d2b45cc3aa3fb35e2bbcfb61ef93fd6f02 # v2.62.52
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21
|
||||
uses: taiki-e/install-action@537c30d2b45cc3aa3fb35e2bbcfb61ef93fd6f02 # v2.62.52
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
uses: astral-sh/setup-uv@5a7eac68fb9809dea845d802897dc5c723910fa3 # v7.1.3
|
||||
with:
|
||||
enable-cache: "true"
|
||||
- name: ty mdtests (GitHub annotations)
|
||||
@@ -277,8 +282,8 @@ jobs:
|
||||
run: cargo test -p ty_python_semantic --test mdtest || true
|
||||
- name: "Run tests"
|
||||
run: cargo insta test --all-features --unreferenced reject --test-runner nextest
|
||||
# Dogfood ty on py-fuzzer
|
||||
- run: uv run --project=./python/py-fuzzer cargo run -p ty check --project=./python/py-fuzzer
|
||||
- name: Dogfood ty on py-fuzzer
|
||||
run: uv run --project=./python/py-fuzzer cargo run -p ty check --project=./python/py-fuzzer
|
||||
# Check for broken links in the documentation.
|
||||
- run: cargo doc --all --no-deps
|
||||
env:
|
||||
@@ -291,14 +296,6 @@ jobs:
|
||||
env:
|
||||
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
|
||||
RUSTDOCFLAGS: "-D warnings"
|
||||
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: ruff
|
||||
path: target/debug/ruff
|
||||
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: ty
|
||||
path: target/debug/ty
|
||||
|
||||
cargo-test-linux-release:
|
||||
name: "cargo test (linux, release)"
|
||||
@@ -315,24 +312,24 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21
|
||||
uses: taiki-e/install-action@537c30d2b45cc3aa3fb35e2bbcfb61ef93fd6f02 # v2.62.52
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
uses: astral-sh/setup-uv@5a7eac68fb9809dea845d802897dc5c723910fa3 # v7.1.3
|
||||
with:
|
||||
enable-cache: "true"
|
||||
- name: "Run tests"
|
||||
run: cargo insta test --release --all-features --unreferenced reject --test-runner nextest
|
||||
run: cargo nextest run --cargo-profile profiling --all-features
|
||||
- name: "Run doctests"
|
||||
run: cargo test --doc --profile profiling --all-features
|
||||
|
||||
cargo-test-other:
|
||||
strategy:
|
||||
@@ -350,14 +347,16 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21
|
||||
uses: taiki-e/install-action@537c30d2b45cc3aa3fb35e2bbcfb61ef93fd6f02 # v2.62.52
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
uses: astral-sh/setup-uv@5a7eac68fb9809dea845d802897dc5c723910fa3 # v7.1.3
|
||||
with:
|
||||
enable-cache: "true"
|
||||
- name: "Run tests"
|
||||
@@ -376,9 +375,11 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version: 22
|
||||
cache: "npm"
|
||||
@@ -411,6 +412,8 @@ jobs:
|
||||
file: "Cargo.toml"
|
||||
field: "workspace.package.rust-version"
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- name: "Install Rust toolchain"
|
||||
env:
|
||||
MSRV: ${{ steps.msrv.outputs.value }}
|
||||
@@ -435,10 +438,13 @@ jobs:
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
workspaces: "fuzz -> target"
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||
- name: "Install cargo-binstall"
|
||||
uses: cargo-bins/cargo-binstall@afcf9780305558bcc9e4bc94b7589ab2bb8b6106 # v1.15.9
|
||||
uses: cargo-bins/cargo-binstall@ae04fb5e853ae6cd3ad7de4a1d554a8b646d12aa # v1.15.11
|
||||
- name: "Install cargo-fuzz"
|
||||
# Download the latest version from quick install and not the github releases because github releases only has MUSL targets.
|
||||
run: cargo binstall cargo-fuzz --force --disable-strategies crate-meta-data --no-confirm
|
||||
@@ -447,9 +453,7 @@ jobs:
|
||||
fuzz-parser:
|
||||
name: "fuzz parser"
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- cargo-test-linux
|
||||
- determine_changes
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.parser == 'true' || needs.determine_changes.outputs.py-fuzzer == 'true') }}
|
||||
timeout-minutes: 20
|
||||
env:
|
||||
@@ -458,27 +462,24 @@ jobs:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
- uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
name: Download Ruff binary to test
|
||||
id: download-cached-binary
|
||||
- uses: astral-sh/setup-uv@5a7eac68fb9809dea845d802897dc5c723910fa3 # v7.1.3
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
name: ruff
|
||||
path: ruff-to-test
|
||||
shared-key: ruff-linux-debug
|
||||
save-if: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: Build Ruff binary
|
||||
run: cargo build --bin ruff
|
||||
- name: Fuzz
|
||||
env:
|
||||
DOWNLOAD_PATH: ${{ steps.download-cached-binary.outputs.download-path }}
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x "${DOWNLOAD_PATH}/ruff"
|
||||
|
||||
(
|
||||
uv run \
|
||||
--python="${PYTHON_VERSION}" \
|
||||
--project=./python/py-fuzzer \
|
||||
--locked \
|
||||
fuzz \
|
||||
--test-executable="${DOWNLOAD_PATH}/ruff" \
|
||||
--test-executable=target/debug/ruff \
|
||||
--bin=ruff \
|
||||
0-500
|
||||
)
|
||||
@@ -494,7 +495,9 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- uses: astral-sh/setup-uv@5a7eac68fb9809dea845d802897dc5c723910fa3 # v7.1.3
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup component add rustfmt
|
||||
# Run all code generation scripts, and verify that the current output is
|
||||
@@ -518,9 +521,7 @@ jobs:
|
||||
ecosystem:
|
||||
name: "ecosystem"
|
||||
runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-latest-8' || 'ubuntu-latest' }}
|
||||
needs:
|
||||
- cargo-test-linux
|
||||
- determine_changes
|
||||
needs: determine_changes
|
||||
# Only runs on pull requests, since that is the only we way we can find the base version for comparison.
|
||||
# Ecosystem check needs linter and/or formatter changes.
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && github.event_name == 'pull_request' && needs.determine_changes.outputs.code == 'true' }}
|
||||
@@ -528,26 +529,37 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.base.ref }}
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
|
||||
- uses: astral-sh/setup-uv@5a7eac68fb9809dea845d802897dc5c723910fa3 # v7.1.3
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
activate-environment: true
|
||||
|
||||
- uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
name: Download comparison Ruff binary
|
||||
id: ruff-target
|
||||
with:
|
||||
name: ruff
|
||||
path: target/debug
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: Download baseline Ruff binary
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
name: ruff
|
||||
branch: ${{ github.event.pull_request.base.ref }}
|
||||
workflow: "ci.yaml"
|
||||
check_artifacts: true
|
||||
shared-key: ruff-linux-debug
|
||||
save-if: false
|
||||
|
||||
- name: Build baseline version
|
||||
run: |
|
||||
cargo build --bin ruff
|
||||
mv target/debug/ruff target/debug/ruff-baseline
|
||||
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
clean: false
|
||||
|
||||
- name: Build comparison version
|
||||
run: cargo build --bin ruff
|
||||
|
||||
- name: Install ruff-ecosystem
|
||||
run: |
|
||||
@@ -555,16 +567,11 @@ jobs:
|
||||
|
||||
- name: Run `ruff check` stable ecosystem check
|
||||
if: ${{ needs.determine_changes.outputs.linter == 'true' }}
|
||||
env:
|
||||
DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }}
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x ./ruff "${DOWNLOAD_PATH}/ruff"
|
||||
|
||||
# Set pipefail to avoid hiding errors with tee
|
||||
set -eo pipefail
|
||||
|
||||
ruff-ecosystem check ./ruff "${DOWNLOAD_PATH}/ruff" --cache ./checkouts --output-format markdown | tee ecosystem-result-check-stable
|
||||
ruff-ecosystem check ./target/debug/ruff-baseline ./target/debug/ruff --cache ./checkouts --output-format markdown | tee ecosystem-result-check-stable
|
||||
|
||||
cat ecosystem-result-check-stable > "$GITHUB_STEP_SUMMARY"
|
||||
echo "### Linter (stable)" > ecosystem-result
|
||||
@@ -573,16 +580,11 @@ jobs:
|
||||
|
||||
- name: Run `ruff check` preview ecosystem check
|
||||
if: ${{ needs.determine_changes.outputs.linter == 'true' }}
|
||||
env:
|
||||
DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }}
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x ./ruff "${DOWNLOAD_PATH}/ruff"
|
||||
|
||||
# Set pipefail to avoid hiding errors with tee
|
||||
set -eo pipefail
|
||||
|
||||
ruff-ecosystem check ./ruff "${DOWNLOAD_PATH}/ruff" --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-check-preview
|
||||
ruff-ecosystem check ./target/debug/ruff-baseline ./target/debug/ruff --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-check-preview
|
||||
|
||||
cat ecosystem-result-check-preview > "$GITHUB_STEP_SUMMARY"
|
||||
echo "### Linter (preview)" >> ecosystem-result
|
||||
@@ -591,16 +593,11 @@ jobs:
|
||||
|
||||
- name: Run `ruff format` stable ecosystem check
|
||||
if: ${{ needs.determine_changes.outputs.formatter == 'true' }}
|
||||
env:
|
||||
DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }}
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x ./ruff "${DOWNLOAD_PATH}/ruff"
|
||||
|
||||
# Set pipefail to avoid hiding errors with tee
|
||||
set -eo pipefail
|
||||
|
||||
ruff-ecosystem format ./ruff "${DOWNLOAD_PATH}/ruff" --cache ./checkouts --output-format markdown | tee ecosystem-result-format-stable
|
||||
ruff-ecosystem format ./target/debug/ruff-baseline ./target/debug/ruff --cache ./checkouts --output-format markdown | tee ecosystem-result-format-stable
|
||||
|
||||
cat ecosystem-result-format-stable > "$GITHUB_STEP_SUMMARY"
|
||||
echo "### Formatter (stable)" >> ecosystem-result
|
||||
@@ -609,32 +606,19 @@ jobs:
|
||||
|
||||
- name: Run `ruff format` preview ecosystem check
|
||||
if: ${{ needs.determine_changes.outputs.formatter == 'true' }}
|
||||
env:
|
||||
DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }}
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x ./ruff "${DOWNLOAD_PATH}/ruff"
|
||||
|
||||
# Set pipefail to avoid hiding errors with tee
|
||||
set -eo pipefail
|
||||
|
||||
ruff-ecosystem format ./ruff "${DOWNLOAD_PATH}/ruff" --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-format-preview
|
||||
ruff-ecosystem format ./target/debug/ruff-baseline ./target/debug/ruff --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-format-preview
|
||||
|
||||
cat ecosystem-result-format-preview > "$GITHUB_STEP_SUMMARY"
|
||||
echo "### Formatter (preview)" >> ecosystem-result
|
||||
cat ecosystem-result-format-preview >> ecosystem-result
|
||||
echo "" >> ecosystem-result
|
||||
|
||||
- name: Export pull request number
|
||||
run: |
|
||||
echo ${{ github.event.number }} > pr-number
|
||||
|
||||
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
name: Upload PR Number
|
||||
with:
|
||||
name: pr-number
|
||||
path: pr-number
|
||||
|
||||
# NOTE: astral-sh-bot uses this artifact to post comments on PRs.
|
||||
# Make sure to update the bot if you rename the artifact.
|
||||
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
name: Upload Results
|
||||
with:
|
||||
@@ -645,36 +629,38 @@ jobs:
|
||||
name: "Fuzz for new ty panics"
|
||||
runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-22.04-16' || 'ubuntu-latest' }}
|
||||
needs:
|
||||
- cargo-test-linux
|
||||
- determine_changes
|
||||
# Only runs on pull requests, since that is the only we way we can find the base version for comparison.
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && github.event_name == 'pull_request' && (needs.determine_changes.outputs.ty == 'true' || needs.determine_changes.outputs.py-fuzzer == 'true') }}
|
||||
timeout-minutes: ${{ github.repository == 'astral-sh/ruff' && 5 || 20 }}
|
||||
timeout-minutes: ${{ github.repository == 'astral-sh/ruff' && 10 || 20 }}
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
- uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
name: Download new ty binary
|
||||
id: ty-new
|
||||
- uses: astral-sh/setup-uv@5a7eac68fb9809dea845d802897dc5c723910fa3 # v7.1.3
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
name: ty
|
||||
path: target/debug
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: Download baseline ty binary
|
||||
with:
|
||||
name: ty
|
||||
branch: ${{ github.event.pull_request.base.ref }}
|
||||
workflow: "ci.yaml"
|
||||
check_artifacts: true
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||
- name: Fuzz
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
NEW_TY: ${{ steps.ty-new.outputs.download-path }}
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x "${PWD}/ty" "${NEW_TY}/ty"
|
||||
echo "new commit"
|
||||
git rev-list --format=%s --max-count=1 "$GITHUB_SHA"
|
||||
cargo build --profile=profiling --bin=ty
|
||||
mv target/profiling/ty ty-new
|
||||
|
||||
MERGE_BASE="$(git merge-base "$GITHUB_SHA" "origin/$GITHUB_BASE_REF")"
|
||||
git checkout -b old_commit "$MERGE_BASE"
|
||||
echo "old commit (merge base)"
|
||||
git rev-list --format=%s --max-count=1 old_commit
|
||||
cargo build --profile=profiling --bin=ty
|
||||
mv target/profiling/ty ty-old
|
||||
|
||||
(
|
||||
uv run \
|
||||
@@ -682,8 +668,8 @@ jobs:
|
||||
--project=./python/py-fuzzer \
|
||||
--locked \
|
||||
fuzz \
|
||||
--test-executable="${NEW_TY}/ty" \
|
||||
--baseline-executable="${PWD}/ty" \
|
||||
--test-executable=ty-new \
|
||||
--baseline-executable=ty-old \
|
||||
--only-new-bugs \
|
||||
--bin=ty \
|
||||
0-1000
|
||||
@@ -698,7 +684,7 @@ jobs:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: cargo-bins/cargo-binstall@afcf9780305558bcc9e4bc94b7589ab2bb8b6106 # v1.15.9
|
||||
- uses: cargo-bins/cargo-binstall@ae04fb5e853ae6cd3ad7de4a1d554a8b646d12aa # v1.15.11
|
||||
- run: cargo binstall --no-confirm cargo-shear
|
||||
- run: cargo shear
|
||||
|
||||
@@ -711,12 +697,16 @@ jobs:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
- uses: astral-sh/setup-uv@5a7eac68fb9809dea845d802897dc5c723910fa3 # v7.1.3
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||
- name: "Run ty completion evaluation"
|
||||
run: cargo run --release --package ty_completion_eval -- all --threshold 0.4 --tasks /tmp/completion-evaluation-tasks.csv
|
||||
run: cargo run --profile profiling --package ty_completion_eval -- all --threshold 0.4 --tasks /tmp/completion-evaluation-tasks.csv
|
||||
- name: "Ensure there are no changes"
|
||||
run: diff ./crates/ty_completion_eval/completion-evaluation-tasks.csv /tmp/completion-evaluation-tasks.csv
|
||||
|
||||
@@ -734,6 +724,8 @@ jobs:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
@@ -756,9 +748,11 @@ jobs:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
- uses: astral-sh/setup-uv@5a7eac68fb9809dea845d802897dc5c723910fa3 # v7.1.3
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
- uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version: 22
|
||||
- name: "Cache pre-commit"
|
||||
@@ -788,6 +782,8 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@a6f90b1f127823b31d4d4a8d96047790581349bd # v0.9.1
|
||||
@@ -796,7 +792,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
uses: astral-sh/setup-uv@5a7eac68fb9809dea845d802897dc5c723910fa3 # v7.1.3
|
||||
with:
|
||||
python-version: 3.13
|
||||
activate-environment: true
|
||||
@@ -830,6 +826,8 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Run checks"
|
||||
@@ -843,9 +841,7 @@ jobs:
|
||||
name: "test ruff-lsp"
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
needs:
|
||||
- cargo-test-linux
|
||||
- determine_changes
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
steps:
|
||||
- uses: extractions/setup-just@e33e0265a09d6d736e2ee1e0eb685ef1de4669ff # v3.0.0
|
||||
@@ -853,37 +849,46 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
name: "Download ruff-lsp source"
|
||||
name: "Checkout ruff source"
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
shared-key: ruff-linux-debug
|
||||
save-if: false
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- name: Build Ruff binary
|
||||
run: cargo build -p ruff --bin ruff
|
||||
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
name: "Checkout ruff-lsp source"
|
||||
with:
|
||||
persist-credentials: false
|
||||
repository: "astral-sh/ruff-lsp"
|
||||
path: ruff-lsp
|
||||
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
# installation fails on 3.13 and newer
|
||||
python-version: "3.12"
|
||||
|
||||
- uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
name: Download development ruff binary
|
||||
id: ruff-target
|
||||
with:
|
||||
name: ruff
|
||||
path: target/debug
|
||||
|
||||
- name: Install ruff-lsp dependencies
|
||||
run: |
|
||||
cd ruff-lsp
|
||||
just install
|
||||
|
||||
- name: Run ruff-lsp tests
|
||||
env:
|
||||
DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }}
|
||||
run: |
|
||||
# Setup development binary
|
||||
pip uninstall --yes ruff
|
||||
chmod +x "${DOWNLOAD_PATH}/ruff"
|
||||
export PATH="${DOWNLOAD_PATH}:${PATH}"
|
||||
export PATH="${PWD}/target/debug:${PATH}"
|
||||
ruff version
|
||||
|
||||
cd ruff-lsp
|
||||
just test
|
||||
|
||||
check-playground:
|
||||
@@ -900,7 +905,9 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
- uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version: 22
|
||||
cache: "npm"
|
||||
@@ -938,21 +945,23 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- uses: astral-sh/setup-uv@5a7eac68fb9809dea845d802897dc5c723910fa3 # v7.1.3
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- name: "Install codspeed"
|
||||
uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21
|
||||
uses: taiki-e/install-action@537c30d2b45cc3aa3fb35e2bbcfb61ef93fd6f02 # v2.62.52
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
- name: "Build benchmarks"
|
||||
run: cargo codspeed build --features "codspeed,instrumented" --no-default-features -p ruff_benchmark --bench formatter --bench lexer --bench linter --bench parser
|
||||
run: cargo codspeed build --features "codspeed,instrumented" --profile profiling --no-default-features -p ruff_benchmark --bench formatter --bench lexer --bench linter --bench parser
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@6b43a0cd438f6ca5ad26f9ed03ed159ed2df7da9 # v4.1.1
|
||||
uses: CodSpeedHQ/action@6a8e2b874c338bf81cc5e8be715ada75908d3871 # v4.3.4
|
||||
with:
|
||||
mode: instrumentation
|
||||
run: cargo codspeed run
|
||||
@@ -976,21 +985,23 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- uses: astral-sh/setup-uv@5a7eac68fb9809dea845d802897dc5c723910fa3 # v7.1.3
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- name: "Install codspeed"
|
||||
uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21
|
||||
uses: taiki-e/install-action@537c30d2b45cc3aa3fb35e2bbcfb61ef93fd6f02 # v2.62.52
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
- name: "Build benchmarks"
|
||||
run: cargo codspeed build --features "codspeed,instrumented" --no-default-features -p ruff_benchmark --bench ty
|
||||
run: cargo codspeed build --features "codspeed,instrumented" --profile profiling --no-default-features -p ruff_benchmark --bench ty
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@6b43a0cd438f6ca5ad26f9ed03ed159ed2df7da9 # v4.1.1
|
||||
uses: CodSpeedHQ/action@6a8e2b874c338bf81cc5e8be715ada75908d3871 # v4.3.4
|
||||
with:
|
||||
mode: instrumentation
|
||||
run: cargo codspeed run
|
||||
@@ -1014,21 +1025,23 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- uses: astral-sh/setup-uv@5a7eac68fb9809dea845d802897dc5c723910fa3 # v7.1.3
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- name: "Install codspeed"
|
||||
uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21
|
||||
uses: taiki-e/install-action@537c30d2b45cc3aa3fb35e2bbcfb61ef93fd6f02 # v2.62.52
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
- name: "Build benchmarks"
|
||||
run: cargo codspeed build --features "codspeed,walltime" --no-default-features -p ruff_benchmark
|
||||
run: cargo codspeed build --features "codspeed,walltime" --profile profiling --no-default-features -p ruff_benchmark
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@6b43a0cd438f6ca5ad26f9ed03ed159ed2df7da9 # v4.1.1
|
||||
uses: CodSpeedHQ/action@6a8e2b874c338bf81cc5e8be715ada75908d3871 # v4.3.4
|
||||
env:
|
||||
# enabling walltime flamegraphs adds ~6 minutes to the CI time, and they don't
|
||||
# appear to provide much useful insight for our walltime benchmarks right now
|
||||
|
||||
2
.github/workflows/daily_fuzz.yaml
vendored
2
.github/workflows/daily_fuzz.yaml
vendored
@@ -34,7 +34,7 @@ jobs:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
- uses: astral-sh/setup-uv@5a7eac68fb9809dea845d802897dc5c723910fa3 # v7.1.3
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
|
||||
13
.github/workflows/mypy_primer.yaml
vendored
13
.github/workflows/mypy_primer.yaml
vendored
@@ -43,7 +43,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
uses: astral-sh/setup-uv@5a7eac68fb9809dea845d802897dc5c723910fa3 # v7.1.3
|
||||
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
@@ -59,20 +59,15 @@ jobs:
|
||||
run: |
|
||||
cd ruff
|
||||
scripts/mypy_primer.sh
|
||||
echo ${{ github.event.number }} > ../pr-number
|
||||
|
||||
# NOTE: astral-sh-bot uses this artifact to post comments on PRs.
|
||||
# Make sure to update the bot if you rename the artifact.
|
||||
- name: Upload diff
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: mypy_primer_diff
|
||||
path: mypy_primer.diff
|
||||
|
||||
- name: Upload pr-number
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pr-number
|
||||
path: pr-number
|
||||
|
||||
memory_usage:
|
||||
name: Run memory statistics
|
||||
runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-22.04-32' || 'ubuntu-latest' }}
|
||||
@@ -85,7 +80,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
uses: astral-sh/setup-uv@5a7eac68fb9809dea845d802897dc5c723910fa3 # v7.1.3
|
||||
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
|
||||
122
.github/workflows/mypy_primer_comment.yaml
vendored
122
.github/workflows/mypy_primer_comment.yaml
vendored
@@ -1,122 +0,0 @@
|
||||
name: PR comment (mypy_primer)
|
||||
|
||||
on: # zizmor: ignore[dangerous-triggers]
|
||||
workflow_run:
|
||||
workflows: [Run mypy_primer]
|
||||
types: [completed]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
workflow_run_id:
|
||||
description: The mypy_primer workflow that triggers the workflow run
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: Download PR number
|
||||
with:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Parse pull request number
|
||||
id: pr-number
|
||||
run: |
|
||||
if [[ -f pr-number ]]
|
||||
then
|
||||
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: "Download mypy_primer results"
|
||||
id: download-mypy_primer_diff
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
name: mypy_primer_diff
|
||||
workflow: mypy_primer.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/mypy_primer_diff
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: "Download mypy_primer memory results"
|
||||
id: download-mypy_primer_memory_diff
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
name: mypy_primer_memory_diff
|
||||
workflow: mypy_primer.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/mypy_primer_memory_diff
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
if: ${{ steps.download-mypy_primer_diff.outputs.found_artifact == 'true' && steps.download-mypy_primer_memory_diff.outputs.found_artifact == 'true' }}
|
||||
run: |
|
||||
# Guard against malicious mypy_primer results that symlink to a secret
|
||||
# file on this runner
|
||||
if [[ -L pr/mypy_primer_diff/mypy_primer.diff ]] || [[ -L pr/mypy_primer_memory_diff/mypy_primer_memory.diff ]]
|
||||
then
|
||||
echo "Error: mypy_primer.diff and mypy_primer_memory.diff cannot be a symlink"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Note this identifier is used to find the comment to update on
|
||||
# subsequent runs
|
||||
echo '<!-- generated-comment mypy_primer -->' >> comment.txt
|
||||
|
||||
echo '## `mypy_primer` results' >> comment.txt
|
||||
if [ -s "pr/mypy_primer_diff/mypy_primer.diff" ]; then
|
||||
echo '<details>' >> comment.txt
|
||||
echo '<summary>Changes were detected when running on open source projects</summary>' >> comment.txt
|
||||
echo '' >> comment.txt
|
||||
echo '```diff' >> comment.txt
|
||||
cat pr/mypy_primer_diff/mypy_primer.diff >> comment.txt
|
||||
echo '```' >> comment.txt
|
||||
echo '</details>' >> comment.txt
|
||||
else
|
||||
echo 'No ecosystem changes detected ✅' >> comment.txt
|
||||
fi
|
||||
|
||||
if [ -s "pr/mypy_primer_memory_diff/mypy_primer_memory.diff" ]; then
|
||||
echo '<details>' >> comment.txt
|
||||
echo '<summary>Memory usage changes were detected when running on open source projects</summary>' >> comment.txt
|
||||
echo '' >> comment.txt
|
||||
echo '```diff' >> comment.txt
|
||||
cat pr/mypy_primer_memory_diff/mypy_primer_memory.diff >> comment.txt
|
||||
echo '```' >> comment.txt
|
||||
echo '</details>' >> comment.txt
|
||||
else
|
||||
echo 'No memory usage changes detected ✅' >> comment.txt
|
||||
fi
|
||||
|
||||
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||
cat comment.txt >> "$GITHUB_OUTPUT"
|
||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Find existing comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
|
||||
if: steps.generate-comment.outcome == 'success'
|
||||
id: find-comment
|
||||
with:
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
comment-author: "github-actions[bot]"
|
||||
body-includes: "<!-- generated-comment mypy_primer -->"
|
||||
|
||||
- name: Create or update comment
|
||||
if: steps.find-comment.outcome == 'success'
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
body-path: comment.txt
|
||||
edit-mode: replace
|
||||
88
.github/workflows/pr-comment.yaml
vendored
88
.github/workflows/pr-comment.yaml
vendored
@@ -1,88 +0,0 @@
|
||||
name: Ecosystem check comment
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: [CI]
|
||||
types: [completed]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
workflow_run_id:
|
||||
description: The ecosystem workflow that triggers the workflow run
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: Download pull request number
|
||||
with:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Parse pull request number
|
||||
id: pr-number
|
||||
run: |
|
||||
if [[ -f pr-number ]]
|
||||
then
|
||||
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: "Download ecosystem results"
|
||||
id: download-ecosystem-result
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
name: ecosystem-result
|
||||
workflow: ci.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/ecosystem
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
if: steps.download-ecosystem-result.outputs.found_artifact == 'true'
|
||||
run: |
|
||||
# Guard against malicious ecosystem results that symlink to a secret
|
||||
# file on this runner
|
||||
if [[ -L pr/ecosystem/ecosystem-result ]]
|
||||
then
|
||||
echo "Error: ecosystem-result cannot be a symlink"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Note this identifier is used to find the comment to update on
|
||||
# subsequent runs
|
||||
echo '<!-- generated-comment ecosystem -->' >> comment.txt
|
||||
|
||||
echo '## `ruff-ecosystem` results' >> comment.txt
|
||||
cat pr/ecosystem/ecosystem-result >> comment.txt
|
||||
echo "" >> comment.txt
|
||||
|
||||
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||
cat comment.txt >> "$GITHUB_OUTPUT"
|
||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Find existing comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
|
||||
if: steps.generate-comment.outcome == 'success'
|
||||
id: find-comment
|
||||
with:
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
comment-author: "github-actions[bot]"
|
||||
body-includes: "<!-- generated-comment ecosystem -->"
|
||||
|
||||
- name: Create or update comment
|
||||
if: steps.find-comment.outcome == 'success'
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
body-path: comment.txt
|
||||
edit-mode: replace
|
||||
2
.github/workflows/publish-playground.yml
vendored
2
.github/workflows/publish-playground.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version: 22
|
||||
package-manager-cache: false
|
||||
|
||||
2
.github/workflows/publish-pypi.yml
vendored
2
.github/workflows/publish-pypi.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
id-token: write
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
uses: astral-sh/setup-uv@5a7eac68fb9809dea845d802897dc5c723910fa3 # v7.1.3
|
||||
- uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
pattern: wheels-*
|
||||
|
||||
2
.github/workflows/publish-ty-playground.yml
vendored
2
.github/workflows/publish-ty-playground.yml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version: 22
|
||||
package-manager-cache: false
|
||||
|
||||
2
.github/workflows/publish-wasm.yml
vendored
2
.github/workflows/publish-wasm.yml
vendored
@@ -45,7 +45,7 @@ jobs:
|
||||
jq '.name="@astral-sh/ruff-wasm-${{ matrix.target }}"' crates/ruff_wasm/pkg/package.json > /tmp/package.json
|
||||
mv /tmp/package.json crates/ruff_wasm/pkg
|
||||
- run: cp LICENSE crates/ruff_wasm/pkg # wasm-pack does not put the LICENSE file in the pkg
|
||||
- uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version: 22
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
|
||||
6
.github/workflows/release.yml
vendored
6
.github/workflows/release.yml
vendored
@@ -68,7 +68,7 @@ jobs:
|
||||
# we specify bash to get pipefail; it guards against the `curl` command
|
||||
# failing. otherwise `sh` won't catch that `curl` returned non-0
|
||||
shell: bash
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.30.0/cargo-dist-installer.sh | sh"
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.30.2/cargo-dist-installer.sh | sh"
|
||||
- name: Cache dist
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
@@ -166,8 +166,8 @@ jobs:
|
||||
- custom-build-binaries
|
||||
- custom-build-docker
|
||||
- build-global-artifacts
|
||||
# Only run if we're "publishing", and only if local and global didn't fail (skipped is fine)
|
||||
if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.custom-build-binaries.result == 'skipped' || needs.custom-build-binaries.result == 'success') && (needs.custom-build-docker.result == 'skipped' || needs.custom-build-docker.result == 'success') }}
|
||||
# Only run if we're "publishing", and only if plan, local and global didn't fail (skipped is fine)
|
||||
if: ${{ always() && needs.plan.result == 'success' && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.custom-build-binaries.result == 'skipped' || needs.custom-build-binaries.result == 'success') && (needs.custom-build-docker.result == 'skipped' || needs.custom-build-docker.result == 'success') }}
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
runs-on: "depot-ubuntu-latest-4"
|
||||
|
||||
15
.github/workflows/sync_typeshed.yaml
vendored
15
.github/workflows/sync_typeshed.yaml
vendored
@@ -77,7 +77,7 @@ jobs:
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
git config --global user.email '<>'
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
- uses: astral-sh/setup-uv@5a7eac68fb9809dea845d802897dc5c723910fa3 # v7.1.3
|
||||
- name: Sync typeshed stubs
|
||||
run: |
|
||||
rm -rf "ruff/${VENDORED_TYPESHED}"
|
||||
@@ -131,7 +131,7 @@ jobs:
|
||||
with:
|
||||
persist-credentials: true
|
||||
ref: ${{ env.UPSTREAM_BRANCH}}
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
- uses: astral-sh/setup-uv@5a7eac68fb9809dea845d802897dc5c723910fa3 # v7.1.3
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
@@ -170,7 +170,7 @@ jobs:
|
||||
with:
|
||||
persist-credentials: true
|
||||
ref: ${{ env.UPSTREAM_BRANCH}}
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
- uses: astral-sh/setup-uv@5a7eac68fb9809dea845d802897dc5c723910fa3 # v7.1.3
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
@@ -207,17 +207,22 @@ jobs:
|
||||
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||
- name: "Install cargo nextest"
|
||||
if: ${{ success() }}
|
||||
uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21
|
||||
uses: taiki-e/install-action@537c30d2b45cc3aa3fb35e2bbcfb61ef93fd6f02 # v2.62.52
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
if: ${{ success() }}
|
||||
uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21
|
||||
uses: taiki-e/install-action@537c30d2b45cc3aa3fb35e2bbcfb61ef93fd6f02 # v2.62.52
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: Update snapshots
|
||||
if: ${{ success() }}
|
||||
run: |
|
||||
cargo r \
|
||||
--profile=profiling \
|
||||
-p ty_completion_eval \
|
||||
-- all --tasks ./crates/ty_completion_eval/completion-evaluation-tasks.csv
|
||||
|
||||
# The `cargo insta` docs indicate that `--unreferenced=delete` might be a good option,
|
||||
# but from local testing it appears to just revert all changes made by `cargo insta test --accept`.
|
||||
#
|
||||
|
||||
14
.github/workflows/ty-ecosystem-analyzer.yaml
vendored
14
.github/workflows/ty-ecosystem-analyzer.yaml
vendored
@@ -33,7 +33,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
uses: astral-sh/setup-uv@5a7eac68fb9809dea845d802897dc5c723910fa3 # v7.1.3
|
||||
with:
|
||||
enable-cache: true # zizmor: ignore[cache-poisoning] acceptable risk for CloudFlare pages artifact
|
||||
|
||||
@@ -67,7 +67,7 @@ jobs:
|
||||
|
||||
cd ..
|
||||
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@908758da02a73ef3f3308e1dbb2248510029bbe4"
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@11aa5472cf9d6b9e019c401505a093112942d7bf"
|
||||
|
||||
ecosystem-analyzer \
|
||||
--repository ruff \
|
||||
@@ -112,8 +112,6 @@ jobs:
|
||||
|
||||
cat diff-statistics.md >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
echo ${{ github.event.number }} > pr-number
|
||||
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
id: deploy
|
||||
@@ -131,18 +129,14 @@ jobs:
|
||||
echo >> comment.md
|
||||
echo "**[Full report with detailed diff]($DEPLOYMENT_URL/diff)** ([timing results]($DEPLOYMENT_URL/timing))" >> comment.md
|
||||
|
||||
# NOTE: astral-sh-bot uses this artifact to post comments on PRs.
|
||||
# Make sure to update the bot if you rename the artifact.
|
||||
- name: Upload comment
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: comment.md
|
||||
path: comment.md
|
||||
|
||||
- name: Upload pr-number
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pr-number
|
||||
path: pr-number
|
||||
|
||||
- name: Upload diagnostics diff
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
|
||||
@@ -1,85 +0,0 @@
|
||||
name: PR comment (ty ecosystem-analyzer)
|
||||
|
||||
on: # zizmor: ignore[dangerous-triggers]
|
||||
workflow_run:
|
||||
workflows: [ty ecosystem-analyzer]
|
||||
types: [completed]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
workflow_run_id:
|
||||
description: The ty ecosystem-analyzer workflow that triggers the workflow run
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: Download PR number
|
||||
with:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Parse pull request number
|
||||
id: pr-number
|
||||
run: |
|
||||
if [[ -f pr-number ]]
|
||||
then
|
||||
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: "Download comment.md"
|
||||
id: download-comment
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
name: comment.md
|
||||
workflow: ty-ecosystem-analyzer.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/comment
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
if: ${{ steps.download-comment.outputs.found_artifact == 'true' }}
|
||||
run: |
|
||||
# Guard against malicious ty ecosystem-analyzer results that symlink to a secret
|
||||
# file on this runner
|
||||
if [[ -L pr/comment/comment.md ]]
|
||||
then
|
||||
echo "Error: comment.md cannot be a symlink"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Note: this identifier is used to find the comment to update on subsequent runs
|
||||
echo '<!-- generated-comment ty ecosystem-analyzer -->' > comment.md
|
||||
echo >> comment.md
|
||||
cat pr/comment/comment.md >> comment.md
|
||||
|
||||
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||
cat comment.md >> "$GITHUB_OUTPUT"
|
||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Find existing comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
|
||||
if: steps.generate-comment.outcome == 'success'
|
||||
id: find-comment
|
||||
with:
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
comment-author: "github-actions[bot]"
|
||||
body-includes: "<!-- generated-comment ty ecosystem-analyzer -->"
|
||||
|
||||
- name: Create or update comment
|
||||
if: steps.find-comment.outcome == 'success'
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
body-path: comment.md
|
||||
edit-mode: replace
|
||||
4
.github/workflows/ty-ecosystem-report.yaml
vendored
4
.github/workflows/ty-ecosystem-report.yaml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
uses: astral-sh/setup-uv@5a7eac68fb9809dea845d802897dc5c723910fa3 # v7.1.3
|
||||
with:
|
||||
enable-cache: true # zizmor: ignore[cache-poisoning] acceptable risk for CloudFlare pages artifact
|
||||
|
||||
@@ -52,7 +52,7 @@ jobs:
|
||||
|
||||
cd ..
|
||||
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@908758da02a73ef3f3308e1dbb2248510029bbe4"
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@11aa5472cf9d6b9e019c401505a093112942d7bf"
|
||||
|
||||
ecosystem-analyzer \
|
||||
--verbose \
|
||||
|
||||
13
.github/workflows/typing_conformance.yaml
vendored
13
.github/workflows/typing_conformance.yaml
vendored
@@ -24,7 +24,7 @@ env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
RUST_BACKTRACE: 1
|
||||
CONFORMANCE_SUITE_COMMIT: d4f39b27a4a47aac8b6d4019e1b0b5b3156fabdc
|
||||
CONFORMANCE_SUITE_COMMIT: 9f6d8ced7cd1c8d92687a4e9c96d7716452e471e
|
||||
|
||||
jobs:
|
||||
typing_conformance:
|
||||
@@ -94,21 +94,18 @@ jobs:
|
||||
touch typing_conformance_diagnostics.diff
|
||||
fi
|
||||
|
||||
echo ${{ github.event.number }} > pr-number
|
||||
echo "${CONFORMANCE_SUITE_COMMIT}" > conformance-suite-commit
|
||||
|
||||
# NOTE: astral-sh-bot uses this artifact to post comments on PRs.
|
||||
# Make sure to update the bot if you rename the artifact.
|
||||
- name: Upload diff
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: typing_conformance_diagnostics_diff
|
||||
path: typing_conformance_diagnostics.diff
|
||||
|
||||
- name: Upload pr-number
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pr-number
|
||||
path: pr-number
|
||||
|
||||
# NOTE: astral-sh-bot uses this artifact to post comments on PRs.
|
||||
# Make sure to update the bot if you rename the artifact.
|
||||
- name: Upload conformance suite commit
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
|
||||
112
.github/workflows/typing_conformance_comment.yaml
vendored
112
.github/workflows/typing_conformance_comment.yaml
vendored
@@ -1,112 +0,0 @@
|
||||
name: PR comment (typing_conformance)
|
||||
|
||||
on: # zizmor: ignore[dangerous-triggers]
|
||||
workflow_run:
|
||||
workflows: [Run typing conformance]
|
||||
types: [completed]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
workflow_run_id:
|
||||
description: The typing_conformance workflow that triggers the workflow run
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: Download PR number
|
||||
with:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Parse pull request number
|
||||
id: pr-number
|
||||
run: |
|
||||
if [[ -f pr-number ]]
|
||||
then
|
||||
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: Download typing conformance suite commit
|
||||
with:
|
||||
name: conformance-suite-commit
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: "Download typing_conformance results"
|
||||
id: download-typing_conformance_diff
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
name: typing_conformance_diagnostics_diff
|
||||
workflow: typing_conformance.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/typing_conformance_diagnostics_diff
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
if: ${{ steps.download-typing_conformance_diff.outputs.found_artifact == 'true' }}
|
||||
run: |
|
||||
# Guard against malicious typing_conformance results that symlink to a secret
|
||||
# file on this runner
|
||||
if [[ -L pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff ]]
|
||||
then
|
||||
echo "Error: typing_conformance_diagnostics.diff cannot be a symlink"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Note this identifier is used to find the comment to update on
|
||||
# subsequent runs
|
||||
echo '<!-- generated-comment typing_conformance_diagnostics_diff -->' >> comment.txt
|
||||
|
||||
if [[ -f conformance-suite-commit ]]
|
||||
then
|
||||
echo "## Diagnostic diff on [typing conformance tests](https://github.com/python/typing/tree/$(<conformance-suite-commit)/conformance)" >> comment.txt
|
||||
else
|
||||
echo "conformance-suite-commit file not found"
|
||||
echo "## Diagnostic diff on typing conformance tests" >> comment.txt
|
||||
fi
|
||||
|
||||
if [ -s "pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff" ]; then
|
||||
echo '<details>' >> comment.txt
|
||||
echo '<summary>Changes were detected when running ty on typing conformance tests</summary>' >> comment.txt
|
||||
echo '' >> comment.txt
|
||||
echo '```diff' >> comment.txt
|
||||
cat pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff >> comment.txt
|
||||
echo '```' >> comment.txt
|
||||
echo '</details>' >> comment.txt
|
||||
else
|
||||
echo 'No changes detected when running ty on typing conformance tests ✅' >> comment.txt
|
||||
fi
|
||||
|
||||
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||
cat comment.txt >> "$GITHUB_OUTPUT"
|
||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Find existing comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
|
||||
if: steps.generate-comment.outcome == 'success'
|
||||
id: find-comment
|
||||
with:
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
comment-author: "github-actions[bot]"
|
||||
body-includes: "<!-- generated-comment typing_conformance_diagnostics_diff -->"
|
||||
|
||||
- name: Create or update comment
|
||||
if: steps.find-comment.outcome == 'success'
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
body-path: comment.txt
|
||||
edit-mode: replace
|
||||
3
.github/zizmor.yml
vendored
3
.github/zizmor.yml
vendored
@@ -3,9 +3,6 @@
|
||||
#
|
||||
# TODO: can we remove the ignores here so that our workflows are more secure?
|
||||
rules:
|
||||
dangerous-triggers:
|
||||
ignore:
|
||||
- pr-comment.yaml
|
||||
cache-poisoning:
|
||||
ignore:
|
||||
- build-docker.yml
|
||||
|
||||
153
CHANGELOG.md
153
CHANGELOG.md
@@ -1,5 +1,158 @@
|
||||
# Changelog
|
||||
|
||||
## 0.14.5
|
||||
|
||||
Released on 2025-11-13.
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-simplify`\] Apply `SIM113` when index variable is of type `int` ([#21395](https://github.com/astral-sh/ruff/pull/21395))
|
||||
- \[`pydoclint`\] Fix false positive when Sphinx directives follow a "Raises" section (`DOC502`) ([#20535](https://github.com/astral-sh/ruff/pull/20535))
|
||||
- \[`pydoclint`\] Support NumPy-style comma-separated parameters (`DOC102`) ([#20972](https://github.com/astral-sh/ruff/pull/20972))
|
||||
- \[`refurb`\] Auto-fix annotated assignments (`FURB101`) ([#21278](https://github.com/astral-sh/ruff/pull/21278))
|
||||
- \[`ruff`\] Ignore `str()` when not used for simple conversion (`RUF065`) ([#21330](https://github.com/astral-sh/ruff/pull/21330))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix syntax error false positive on alternative `match` patterns ([#21362](https://github.com/astral-sh/ruff/pull/21362))
|
||||
- \[`flake8-simplify`\] Fix false positive for iterable initializers with generator arguments (`SIM222`) ([#21187](https://github.com/astral-sh/ruff/pull/21187))
|
||||
- \[`pyupgrade`\] Fix false positive on relative imports from local `.builtins` module (`UP029`) ([#21309](https://github.com/astral-sh/ruff/pull/21309))
|
||||
- \[`pyupgrade`\] Consistently set the deprecated tag (`UP035`) ([#21396](https://github.com/astral-sh/ruff/pull/21396))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`refurb`\] Detect empty f-strings (`FURB105`) ([#21348](https://github.com/astral-sh/ruff/pull/21348))
|
||||
|
||||
### CLI
|
||||
|
||||
- Add option to provide a reason to `--add-noqa` ([#21294](https://github.com/astral-sh/ruff/pull/21294))
|
||||
- Add upstream linter URL to `ruff linter --output-format=json` ([#21316](https://github.com/astral-sh/ruff/pull/21316))
|
||||
- Add color to `--help` ([#21337](https://github.com/astral-sh/ruff/pull/21337))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add a new "Opening a PR" section to the contribution guide ([#21298](https://github.com/astral-sh/ruff/pull/21298))
|
||||
- Added the PyScripter IDE to the list of "Who is using Ruff?" ([#21402](https://github.com/astral-sh/ruff/pull/21402))
|
||||
- Update PyCharm setup instructions ([#21409](https://github.com/astral-sh/ruff/pull/21409))
|
||||
- \[`flake8-annotations`\] Add link to `allow-star-arg-any` option (`ANN401`) ([#21326](https://github.com/astral-sh/ruff/pull/21326))
|
||||
|
||||
### Other changes
|
||||
|
||||
- \[`configuration`\] Improve error message when `line-length` exceeds `u16::MAX` ([#21329](https://github.com/astral-sh/ruff/pull/21329))
|
||||
|
||||
### Contributors
|
||||
|
||||
- [@njhearp](https://github.com/njhearp)
|
||||
- [@11happy](https://github.com/11happy)
|
||||
- [@hugovk](https://github.com/hugovk)
|
||||
- [@Gankra](https://github.com/Gankra)
|
||||
- [@ntBre](https://github.com/ntBre)
|
||||
- [@pyscripter](https://github.com/pyscripter)
|
||||
- [@danparizher](https://github.com/danparizher)
|
||||
- [@MichaReiser](https://github.com/MichaReiser)
|
||||
- [@henryiii](https://github.com/henryiii)
|
||||
- [@charliecloudberry](https://github.com/charliecloudberry)
|
||||
|
||||
## 0.14.4
|
||||
|
||||
Released on 2025-11-06.
|
||||
|
||||
### Preview features
|
||||
|
||||
- [formatter] Allow newlines after function headers without docstrings ([#21110](https://github.com/astral-sh/ruff/pull/21110))
|
||||
- [formatter] Avoid extra parentheses for long `match` patterns with `as` captures ([#21176](https://github.com/astral-sh/ruff/pull/21176))
|
||||
- \[`refurb`\] Expand fix safety for keyword arguments and `Decimal`s (`FURB164`) ([#21259](https://github.com/astral-sh/ruff/pull/21259))
|
||||
- \[`refurb`\] Preserve argument ordering in autofix (`FURB103`) ([#20790](https://github.com/astral-sh/ruff/pull/20790))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [server] Fix missing diagnostics for notebooks ([#21156](https://github.com/astral-sh/ruff/pull/21156))
|
||||
- \[`flake8-bugbear`\] Ignore non-NFKC attribute names in `B009` and `B010` ([#21131](https://github.com/astral-sh/ruff/pull/21131))
|
||||
- \[`refurb`\] Fix false negative for underscores before sign in `Decimal` constructor (`FURB157`) ([#21190](https://github.com/astral-sh/ruff/pull/21190))
|
||||
- \[`ruff`\] Fix false positives on starred arguments (`RUF057`) ([#21256](https://github.com/astral-sh/ruff/pull/21256))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`airflow`\] extend deprecated argument `concurrency` in `airflow..DAG` (`AIR301`) ([#21220](https://github.com/astral-sh/ruff/pull/21220))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Improve `extend` docs ([#21135](https://github.com/astral-sh/ruff/pull/21135))
|
||||
- \[`flake8-comprehensions`\] Fix typo in `C416` documentation ([#21184](https://github.com/astral-sh/ruff/pull/21184))
|
||||
- Revise Ruff setup instructions for Zed editor ([#20935](https://github.com/astral-sh/ruff/pull/20935))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Make `ruff analyze graph` work with jupyter notebooks ([#21161](https://github.com/astral-sh/ruff/pull/21161))
|
||||
|
||||
### Contributors
|
||||
|
||||
- [@chirizxc](https://github.com/chirizxc)
|
||||
- [@Lee-W](https://github.com/Lee-W)
|
||||
- [@musicinmybrain](https://github.com/musicinmybrain)
|
||||
- [@MichaReiser](https://github.com/MichaReiser)
|
||||
- [@tjkuson](https://github.com/tjkuson)
|
||||
- [@danparizher](https://github.com/danparizher)
|
||||
- [@renovate](https://github.com/renovate)
|
||||
- [@ntBre](https://github.com/ntBre)
|
||||
- [@gauthsvenkat](https://github.com/gauthsvenkat)
|
||||
- [@LoicRiegel](https://github.com/LoicRiegel)
|
||||
|
||||
## 0.14.3
|
||||
|
||||
Released on 2025-10-30.
|
||||
|
||||
### Preview features
|
||||
|
||||
- Respect `--output-format` with `--watch` ([#21097](https://github.com/astral-sh/ruff/pull/21097))
|
||||
- \[`pydoclint`\] Fix false positive on explicit exception re-raising (`DOC501`, `DOC502`) ([#21011](https://github.com/astral-sh/ruff/pull/21011))
|
||||
- \[`pyflakes`\] Revert to stable behavior if imports for module lie in alternate branches for `F401` ([#20878](https://github.com/astral-sh/ruff/pull/20878))
|
||||
- \[`pylint`\] Implement `stop-iteration-return` (`PLR1708`) ([#20733](https://github.com/astral-sh/ruff/pull/20733))
|
||||
- \[`ruff`\] Add support for additional eager conversion patterns (`RUF065`) ([#20657](https://github.com/astral-sh/ruff/pull/20657))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix finding keyword range for clause header after statement ending with semicolon ([#21067](https://github.com/astral-sh/ruff/pull/21067))
|
||||
- Fix syntax error false positive on nested alternative patterns ([#21104](https://github.com/astral-sh/ruff/pull/21104))
|
||||
- \[`ISC001`\] Fix panic when string literals are unclosed ([#21034](https://github.com/astral-sh/ruff/pull/21034))
|
||||
- \[`flake8-django`\] Apply `DJ001` to annotated fields ([#20907](https://github.com/astral-sh/ruff/pull/20907))
|
||||
- \[`flake8-pyi`\] Fix `PYI034` to not trigger on metaclasses (`PYI034`) ([#20881](https://github.com/astral-sh/ruff/pull/20881))
|
||||
- \[`flake8-type-checking`\] Fix `TC003` false positive with `future-annotations` ([#21125](https://github.com/astral-sh/ruff/pull/21125))
|
||||
- \[`pyflakes`\] Fix false positive for `__class__` in lambda expressions within class definitions (`F821`) ([#20564](https://github.com/astral-sh/ruff/pull/20564))
|
||||
- \[`pyupgrade`\] Fix false positive for `TypeVar` with default on Python \<3.13 (`UP046`,`UP047`) ([#21045](https://github.com/astral-sh/ruff/pull/21045))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- Add missing docstring sections to the numpy list ([#20931](https://github.com/astral-sh/ruff/pull/20931))
|
||||
- \[`airflow`\] Extend `airflow.models..Param` check (`AIR311`) ([#21043](https://github.com/astral-sh/ruff/pull/21043))
|
||||
- \[`airflow`\] Warn that `airflow....DAG.create_dagrun` has been removed (`AIR301`) ([#21093](https://github.com/astral-sh/ruff/pull/21093))
|
||||
- \[`refurb`\] Preserve digit separators in `Decimal` constructor (`FURB157`) ([#20588](https://github.com/astral-sh/ruff/pull/20588))
|
||||
|
||||
### Server
|
||||
|
||||
- Avoid sending an unnecessary "clear diagnostics" message for clients supporting pull diagnostics ([#21105](https://github.com/astral-sh/ruff/pull/21105))
|
||||
|
||||
### Documentation
|
||||
|
||||
- \[`flake8-bandit`\] Fix correct example for `S308` ([#21128](https://github.com/astral-sh/ruff/pull/21128))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Clearer error message when `line-length` goes beyond threshold ([#21072](https://github.com/astral-sh/ruff/pull/21072))
|
||||
|
||||
### Contributors
|
||||
|
||||
- [@danparizher](https://github.com/danparizher)
|
||||
- [@jvacek](https://github.com/jvacek)
|
||||
- [@ntBre](https://github.com/ntBre)
|
||||
- [@augustelalande](https://github.com/augustelalande)
|
||||
- [@prakhar1144](https://github.com/prakhar1144)
|
||||
- [@TaKO8Ki](https://github.com/TaKO8Ki)
|
||||
- [@dylwil3](https://github.com/dylwil3)
|
||||
- [@fatelei](https://github.com/fatelei)
|
||||
- [@ShaharNaveh](https://github.com/ShaharNaveh)
|
||||
- [@Lee-W](https://github.com/Lee-W)
|
||||
|
||||
## 0.14.2
|
||||
|
||||
Released on 2025-10-23.
|
||||
|
||||
@@ -280,6 +280,55 @@ Note that plugin-specific configuration options are defined in their own modules
|
||||
|
||||
Finally, regenerate the documentation and generated code with `cargo dev generate-all`.
|
||||
|
||||
### Opening a PR
|
||||
|
||||
After you finish your changes, the next step is to open a PR. By default, two
|
||||
sections will be filled into the PR body: the summary and the test plan.
|
||||
|
||||
#### The summary
|
||||
|
||||
The summary is intended to give us as maintainers information about your PR.
|
||||
This should typically include a link to the relevant issue(s) you're addressing
|
||||
in your PR, as well as a summary of the issue and your approach to fixing it. If
|
||||
you have any questions about your approach or design, or if you considered
|
||||
alternative approaches, that can also be helpful to include.
|
||||
|
||||
AI can be helpful in generating both the code and summary of your PR, but a
|
||||
successful contribution should still be carefully reviewed by you and the
|
||||
summary editorialized before submitting a PR. A great summary is thorough but
|
||||
also succinct and gives us the context we need to review your PR.
|
||||
|
||||
You can find examples of excellent issues and PRs by searching for the
|
||||
[`great writeup`](https://github.com/astral-sh/ruff/issues?q=label%3A%22great%20writeup%22)
|
||||
label.
|
||||
|
||||
#### The test plan
|
||||
|
||||
The test plan is likely to be shorter than the summary and can be as simple as
|
||||
"Added new snapshot tests for `RUF123`," at least for rule bugs. For LSP or some
|
||||
types of CLI changes, in particular, it can also be helpful to include
|
||||
screenshots or recordings of your change in action.
|
||||
|
||||
#### Ecosystem report
|
||||
|
||||
After opening the PR, an ecosystem report will be run as part of CI. This shows
|
||||
a diff of linter and formatter behavior before and after the changes in your PR.
|
||||
Going through these changes and reporting your findings in the PR summary or an
|
||||
additional comment help us to review your PR more efficiently. It's also a great
|
||||
way to find new test cases to incorporate into your PR if you identify any
|
||||
issues.
|
||||
|
||||
#### PR status
|
||||
|
||||
To help us know when your PR is ready for review again, please either move your
|
||||
PR back to a draft while working on it (marking it ready for review afterwards
|
||||
will ping the previous reviewers) or explicitly re-request a review. This helps
|
||||
us to avoid re-reviewing a PR while you're still working on it and also to
|
||||
prioritize PRs that are definitely ready for review.
|
||||
|
||||
You can also thumbs-up or mark as resolved any comments we leave to let us know
|
||||
you addressed them.
|
||||
|
||||
## MkDocs
|
||||
|
||||
> [!NOTE]
|
||||
|
||||
431
Cargo.lock
generated
431
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
10
Cargo.toml
10
Cargo.toml
@@ -5,7 +5,7 @@ resolver = "2"
|
||||
[workspace.package]
|
||||
# Please update rustfmt.toml when bumping the Rust edition
|
||||
edition = "2024"
|
||||
rust-version = "1.88"
|
||||
rust-version = "1.89"
|
||||
homepage = "https://docs.astral.sh/ruff"
|
||||
documentation = "https://docs.astral.sh/ruff"
|
||||
repository = "https://github.com/astral-sh/ruff"
|
||||
@@ -84,7 +84,7 @@ dashmap = { version = "6.0.1" }
|
||||
dir-test = { version = "0.4.0" }
|
||||
dunce = { version = "1.0.5" }
|
||||
drop_bomb = { version = "0.1.5" }
|
||||
etcetera = { version = "0.10.0" }
|
||||
etcetera = { version = "0.11.0" }
|
||||
fern = { version = "0.7.0" }
|
||||
filetime = { version = "0.2.23" }
|
||||
getrandom = { version = "0.3.1" }
|
||||
@@ -103,7 +103,7 @@ hashbrown = { version = "0.16.0", default-features = false, features = [
|
||||
"inline-more",
|
||||
] }
|
||||
heck = "0.5.0"
|
||||
ignore = { version = "0.4.22" }
|
||||
ignore = { version = "0.4.24" }
|
||||
imara-diff = { version = "0.1.5" }
|
||||
imperative = { version = "1.0.4" }
|
||||
indexmap = { version = "2.6.0" }
|
||||
@@ -124,7 +124,7 @@ lsp-server = { version = "0.7.6" }
|
||||
lsp-types = { git = "https://github.com/astral-sh/lsp-types.git", rev = "3512a9f", features = [
|
||||
"proposed",
|
||||
] }
|
||||
matchit = { version = "0.8.1" }
|
||||
matchit = { version = "0.9.0" }
|
||||
memchr = { version = "2.7.1" }
|
||||
mimalloc = { version = "0.1.39" }
|
||||
natord = { version = "1.0.9" }
|
||||
@@ -146,7 +146,7 @@ regex-automata = { version = "0.4.9" }
|
||||
rustc-hash = { version = "2.0.0" }
|
||||
rustc-stable-hash = { version = "0.1.2" }
|
||||
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "cdd0b85516a52c18b8a6d17a2279a96ed6c3e198", default-features = false, features = [
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "a885bb4c4c192741b8a17418fef81a71e33d111e", default-features = false, features = [
|
||||
"compact_str",
|
||||
"macros",
|
||||
"salsa_unstable",
|
||||
|
||||
@@ -147,8 +147,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.14.2/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.14.2/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.14.5/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.14.5/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -181,7 +181,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.14.2
|
||||
rev: v0.14.5
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff-check
|
||||
@@ -491,6 +491,7 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [PyTorch](https://github.com/pytorch/pytorch)
|
||||
- [Pydantic](https://github.com/pydantic/pydantic)
|
||||
- [Pylint](https://github.com/PyCQA/pylint)
|
||||
- [PyScripter](https://github.com/pyscripter/pyscripter)
|
||||
- [PyVista](https://github.com/pyvista/pyvista)
|
||||
- [Reflex](https://github.com/reflex-dev/reflex)
|
||||
- [River](https://github.com/online-ml/river)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.14.2"
|
||||
version = "0.14.5"
|
||||
publish = true
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
@@ -7,6 +7,8 @@ use std::sync::Arc;
|
||||
|
||||
use crate::commands::completions::config::{OptionString, OptionStringParser};
|
||||
use anyhow::bail;
|
||||
use clap::builder::Styles;
|
||||
use clap::builder::styling::{AnsiColor, Effects};
|
||||
use clap::builder::{TypedValueParser, ValueParserFactory};
|
||||
use clap::{Parser, Subcommand, command};
|
||||
use colored::Colorize;
|
||||
@@ -78,6 +80,13 @@ impl GlobalConfigArgs {
|
||||
}
|
||||
}
|
||||
|
||||
// Configures Clap v3-style help menu colors
|
||||
const STYLES: Styles = Styles::styled()
|
||||
.header(AnsiColor::Green.on_default().effects(Effects::BOLD))
|
||||
.usage(AnsiColor::Green.on_default().effects(Effects::BOLD))
|
||||
.literal(AnsiColor::Cyan.on_default().effects(Effects::BOLD))
|
||||
.placeholder(AnsiColor::Cyan.on_default());
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(
|
||||
author,
|
||||
@@ -86,6 +95,7 @@ impl GlobalConfigArgs {
|
||||
after_help = "For help with a specific command, see: `ruff help <command>`."
|
||||
)]
|
||||
#[command(version)]
|
||||
#[command(styles = STYLES)]
|
||||
pub struct Args {
|
||||
#[command(subcommand)]
|
||||
pub(crate) command: Command,
|
||||
@@ -157,6 +167,7 @@ pub enum AnalyzeCommand {
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, clap::Parser)]
|
||||
#[expect(clippy::struct_excessive_bools)]
|
||||
pub struct AnalyzeGraphCommand {
|
||||
/// List of files or directories to include.
|
||||
#[clap(help = "List of files or directories to include [default: .]")]
|
||||
@@ -183,6 +194,12 @@ pub struct AnalyzeGraphCommand {
|
||||
/// Path to a virtual environment to use for resolving additional dependencies
|
||||
#[arg(long)]
|
||||
python: Option<PathBuf>,
|
||||
/// Include imports that are only used for type checking (i.e., imports within `if TYPE_CHECKING:` blocks).
|
||||
/// Use `--no-type-checking-imports` to exclude imports that are only used for type checking.
|
||||
#[arg(long, overrides_with("no_type_checking_imports"))]
|
||||
type_checking_imports: bool,
|
||||
#[arg(long, overrides_with("type_checking_imports"), hide = true)]
|
||||
no_type_checking_imports: bool,
|
||||
}
|
||||
|
||||
// The `Parser` derive is for ruff_dev, for ruff `Args` would be sufficient
|
||||
@@ -405,8 +422,13 @@ pub struct CheckCommand {
|
||||
)]
|
||||
pub statistics: bool,
|
||||
/// Enable automatic additions of `noqa` directives to failing lines.
|
||||
/// Optionally provide a reason to append after the codes.
|
||||
#[arg(
|
||||
long,
|
||||
value_name = "REASON",
|
||||
default_missing_value = "",
|
||||
num_args = 0..=1,
|
||||
require_equals = true,
|
||||
// conflicts_with = "add_noqa",
|
||||
conflicts_with = "show_files",
|
||||
conflicts_with = "show_settings",
|
||||
@@ -418,7 +440,7 @@ pub struct CheckCommand {
|
||||
conflicts_with = "fix",
|
||||
conflicts_with = "diff",
|
||||
)]
|
||||
pub add_noqa: bool,
|
||||
pub add_noqa: Option<String>,
|
||||
/// See the files Ruff will be run against with the current settings.
|
||||
#[arg(
|
||||
long,
|
||||
@@ -824,6 +846,10 @@ impl AnalyzeGraphCommand {
|
||||
string_imports_min_dots: self.min_dots,
|
||||
preview: resolve_bool_arg(self.preview, self.no_preview).map(PreviewMode::from),
|
||||
target_version: self.target_version.map(ast::PythonVersion::from),
|
||||
type_checking_imports: resolve_bool_arg(
|
||||
self.type_checking_imports,
|
||||
self.no_type_checking_imports,
|
||||
),
|
||||
..ExplicitConfigOverrides::default()
|
||||
};
|
||||
|
||||
@@ -1047,7 +1073,7 @@ Possible choices:
|
||||
/// etc.).
|
||||
#[expect(clippy::struct_excessive_bools)]
|
||||
pub struct CheckArguments {
|
||||
pub add_noqa: bool,
|
||||
pub add_noqa: Option<String>,
|
||||
pub diff: bool,
|
||||
pub exit_non_zero_on_fix: bool,
|
||||
pub exit_zero: bool,
|
||||
@@ -1320,6 +1346,7 @@ struct ExplicitConfigOverrides {
|
||||
extension: Option<Vec<ExtensionPair>>,
|
||||
detect_string_imports: Option<bool>,
|
||||
string_imports_min_dots: Option<usize>,
|
||||
type_checking_imports: Option<bool>,
|
||||
}
|
||||
|
||||
impl ConfigurationTransformer for ExplicitConfigOverrides {
|
||||
@@ -1410,6 +1437,9 @@ impl ConfigurationTransformer for ExplicitConfigOverrides {
|
||||
if let Some(string_imports_min_dots) = &self.string_imports_min_dots {
|
||||
config.analyze.string_imports_min_dots = Some(*string_imports_min_dots);
|
||||
}
|
||||
if let Some(type_checking_imports) = &self.type_checking_imports {
|
||||
config.analyze.type_checking_imports = Some(*type_checking_imports);
|
||||
}
|
||||
|
||||
config
|
||||
}
|
||||
|
||||
@@ -21,6 +21,7 @@ pub(crate) fn add_noqa(
|
||||
files: &[PathBuf],
|
||||
pyproject_config: &PyprojectConfig,
|
||||
config_arguments: &ConfigArguments,
|
||||
reason: Option<&str>,
|
||||
) -> Result<usize> {
|
||||
// Collect all the files to check.
|
||||
let start = Instant::now();
|
||||
@@ -76,7 +77,14 @@ pub(crate) fn add_noqa(
|
||||
return None;
|
||||
}
|
||||
};
|
||||
match add_noqa_to_path(path, package, &source_kind, source_type, &settings.linter) {
|
||||
match add_noqa_to_path(
|
||||
path,
|
||||
package,
|
||||
&source_kind,
|
||||
source_type,
|
||||
&settings.linter,
|
||||
reason,
|
||||
) {
|
||||
Ok(count) => Some(count),
|
||||
Err(e) => {
|
||||
error!("Failed to add noqa to {}: {e}", path.display());
|
||||
|
||||
@@ -7,6 +7,7 @@ use path_absolutize::CWD;
|
||||
use ruff_db::system::{SystemPath, SystemPathBuf};
|
||||
use ruff_graph::{Direction, ImportMap, ModuleDb, ModuleImports};
|
||||
use ruff_linter::package::PackageRoot;
|
||||
use ruff_linter::source_kind::SourceKind;
|
||||
use ruff_linter::{warn_user, warn_user_once};
|
||||
use ruff_python_ast::{PySourceType, SourceType};
|
||||
use ruff_workspace::resolver::{ResolvedFile, match_exclusion, python_files_in_path};
|
||||
@@ -104,6 +105,7 @@ pub(crate) fn analyze_graph(
|
||||
let settings = resolver.resolve(path);
|
||||
let string_imports = settings.analyze.string_imports;
|
||||
let include_dependencies = settings.analyze.include_dependencies.get(path).cloned();
|
||||
let type_checking_imports = settings.analyze.type_checking_imports;
|
||||
|
||||
// Skip excluded files.
|
||||
if (settings.file_resolver.force_exclude || !resolved_file.is_root())
|
||||
@@ -127,10 +129,6 @@ pub(crate) fn analyze_graph(
|
||||
},
|
||||
Some(language) => PySourceType::from(language),
|
||||
};
|
||||
if matches!(source_type, PySourceType::Ipynb) {
|
||||
debug!("Ignoring Jupyter notebook: {}", path.display());
|
||||
continue;
|
||||
}
|
||||
|
||||
// Convert to system paths.
|
||||
let Ok(package) = package.map(SystemPathBuf::from_path_buf).transpose() else {
|
||||
@@ -147,13 +145,35 @@ pub(crate) fn analyze_graph(
|
||||
let root = root.clone();
|
||||
let result = inner_result.clone();
|
||||
scope.spawn(move |_| {
|
||||
// Extract source code (handles both .py and .ipynb files)
|
||||
let source_kind = match SourceKind::from_path(path.as_std_path(), source_type) {
|
||||
Ok(Some(source_kind)) => source_kind,
|
||||
Ok(None) => {
|
||||
debug!("Skipping non-Python notebook: {path}");
|
||||
return;
|
||||
}
|
||||
Err(err) => {
|
||||
warn!("Failed to read source for {path}: {err}");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let source_code = source_kind.source_code();
|
||||
|
||||
// Identify any imports via static analysis.
|
||||
let mut imports =
|
||||
ModuleImports::detect(&db, &path, package.as_deref(), string_imports)
|
||||
.unwrap_or_else(|err| {
|
||||
warn!("Failed to generate import map for {path}: {err}");
|
||||
ModuleImports::default()
|
||||
});
|
||||
let mut imports = ModuleImports::detect(
|
||||
&db,
|
||||
source_code,
|
||||
source_type,
|
||||
&path,
|
||||
package.as_deref(),
|
||||
string_imports,
|
||||
type_checking_imports,
|
||||
)
|
||||
.unwrap_or_else(|err| {
|
||||
warn!("Failed to generate import map for {path}: {err}");
|
||||
ModuleImports::default()
|
||||
});
|
||||
|
||||
debug!("Discovered {} imports for {}", imports.len(), path);
|
||||
|
||||
|
||||
@@ -370,7 +370,7 @@ pub(crate) fn format_source(
|
||||
let line_index = LineIndex::from_source_text(unformatted);
|
||||
let byte_range = range.to_text_range(unformatted, &line_index);
|
||||
format_range(unformatted, byte_range, options).map(|formatted_range| {
|
||||
let mut formatted = unformatted.to_string();
|
||||
let mut formatted = unformatted.clone();
|
||||
formatted.replace_range(
|
||||
std::ops::Range::<usize>::from(formatted_range.source_range()),
|
||||
formatted_range.as_code(),
|
||||
|
||||
@@ -16,6 +16,8 @@ struct LinterInfo {
|
||||
prefix: &'static str,
|
||||
name: &'static str,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
url: Option<&'static str>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
categories: Option<Vec<LinterCategoryInfo>>,
|
||||
}
|
||||
|
||||
@@ -50,6 +52,7 @@ pub(crate) fn linter(format: HelpFormat) -> Result<()> {
|
||||
.map(|linter_info| LinterInfo {
|
||||
prefix: linter_info.common_prefix(),
|
||||
name: linter_info.name(),
|
||||
url: linter_info.url(),
|
||||
categories: linter_info.upstream_categories().map(|cats| {
|
||||
cats.iter()
|
||||
.map(|c| LinterCategoryInfo {
|
||||
|
||||
@@ -319,12 +319,20 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<Exi
|
||||
warn_user!("Detected debug build without --no-cache.");
|
||||
}
|
||||
|
||||
if cli.add_noqa {
|
||||
if let Some(reason) = &cli.add_noqa {
|
||||
if !fix_mode.is_generate() {
|
||||
warn_user!("--fix is incompatible with --add-noqa.");
|
||||
}
|
||||
if reason.contains(['\n', '\r']) {
|
||||
return Err(anyhow::anyhow!(
|
||||
"--add-noqa <reason> cannot contain newline characters"
|
||||
));
|
||||
}
|
||||
|
||||
let reason_opt = (!reason.is_empty()).then_some(reason.as_str());
|
||||
|
||||
let modifications =
|
||||
commands::add_noqa::add_noqa(&files, &pyproject_config, &config_arguments)?;
|
||||
commands::add_noqa::add_noqa(&files, &pyproject_config, &config_arguments, reason_opt)?;
|
||||
if modifications > 0 && config_arguments.log_level >= LogLevel::Default {
|
||||
let s = if modifications == 1 { "" } else { "s" };
|
||||
#[expect(clippy::print_stderr)]
|
||||
|
||||
@@ -653,3 +653,133 @@ fn venv() -> Result<()> {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn notebook_basic() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let root = ChildPath::new(tempdir.path());
|
||||
|
||||
root.child("ruff").child("__init__.py").write_str("")?;
|
||||
root.child("ruff")
|
||||
.child("a.py")
|
||||
.write_str(indoc::indoc! {r#"
|
||||
def helper():
|
||||
pass
|
||||
"#})?;
|
||||
|
||||
// Create a basic notebook with a simple import
|
||||
root.child("notebook.ipynb").write_str(indoc::indoc! {r#"
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from ruff.a import helper"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"language_info": {
|
||||
"name": "python",
|
||||
"version": "3.12.0"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
"#})?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(command().current_dir(&root), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"notebook.ipynb": [
|
||||
"ruff/a.py"
|
||||
],
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn notebook_with_magic() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let root = ChildPath::new(tempdir.path());
|
||||
|
||||
root.child("ruff").child("__init__.py").write_str("")?;
|
||||
root.child("ruff")
|
||||
.child("a.py")
|
||||
.write_str(indoc::indoc! {r#"
|
||||
def helper():
|
||||
pass
|
||||
"#})?;
|
||||
|
||||
// Create a notebook with IPython magic commands and imports
|
||||
root.child("notebook.ipynb").write_str(indoc::indoc! {r#"
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%load_ext autoreload\n",
|
||||
"%autoreload 2"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from ruff.a import helper"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"language_info": {
|
||||
"name": "python",
|
||||
"version": "3.12.0"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
"#})?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(command().current_dir(&root), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"notebook.ipynb": [
|
||||
"ruff/a.py"
|
||||
],
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
193
crates/ruff/tests/cli/analyze_graph.rs
Normal file
193
crates/ruff/tests/cli/analyze_graph.rs
Normal file
@@ -0,0 +1,193 @@
|
||||
use std::process::Command;
|
||||
|
||||
use insta_cmd::assert_cmd_snapshot;
|
||||
|
||||
use crate::CliTest;
|
||||
|
||||
#[test]
|
||||
fn type_checking_imports() -> anyhow::Result<()> {
|
||||
let test = AnalyzeTest::with_files([
|
||||
("ruff/__init__.py", ""),
|
||||
(
|
||||
"ruff/a.py",
|
||||
r#"
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import ruff.b
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import ruff.c
|
||||
"#,
|
||||
),
|
||||
(
|
||||
"ruff/b.py",
|
||||
r#"
|
||||
if TYPE_CHECKING:
|
||||
from ruff import c
|
||||
"#,
|
||||
),
|
||||
("ruff/c.py", ""),
|
||||
])?;
|
||||
|
||||
assert_cmd_snapshot!(test.command(), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py",
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/b.py": [
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/c.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
|
||||
assert_cmd_snapshot!(
|
||||
test.command()
|
||||
.arg("--no-type-checking-imports"),
|
||||
@r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [],
|
||||
"ruff/c.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"###
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn type_checking_imports_from_config() -> anyhow::Result<()> {
|
||||
let test = AnalyzeTest::with_files([
|
||||
("ruff/__init__.py", ""),
|
||||
(
|
||||
"ruff/a.py",
|
||||
r#"
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import ruff.b
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import ruff.c
|
||||
"#,
|
||||
),
|
||||
(
|
||||
"ruff/b.py",
|
||||
r#"
|
||||
if TYPE_CHECKING:
|
||||
from ruff import c
|
||||
"#,
|
||||
),
|
||||
("ruff/c.py", ""),
|
||||
(
|
||||
"ruff.toml",
|
||||
r#"
|
||||
[analyze]
|
||||
type-checking-imports = false
|
||||
"#,
|
||||
),
|
||||
])?;
|
||||
|
||||
assert_cmd_snapshot!(test.command(), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [],
|
||||
"ruff/c.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
|
||||
test.write_file(
|
||||
"ruff.toml",
|
||||
r#"
|
||||
[analyze]
|
||||
type-checking-imports = true
|
||||
"#,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(test.command(), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py",
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/b.py": [
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/c.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"###
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct AnalyzeTest {
|
||||
cli_test: CliTest,
|
||||
}
|
||||
|
||||
impl AnalyzeTest {
|
||||
pub(crate) fn new() -> anyhow::Result<Self> {
|
||||
Ok(Self {
|
||||
cli_test: CliTest::with_settings(|_, mut settings| {
|
||||
settings.add_filter(r#"\\\\"#, "/");
|
||||
settings
|
||||
})?,
|
||||
})
|
||||
}
|
||||
|
||||
fn with_files<'a>(files: impl IntoIterator<Item = (&'a str, &'a str)>) -> anyhow::Result<Self> {
|
||||
let case = Self::new()?;
|
||||
case.write_files(files)?;
|
||||
Ok(case)
|
||||
}
|
||||
|
||||
#[expect(unused)]
|
||||
fn with_file(path: impl AsRef<std::path::Path>, content: &str) -> anyhow::Result<Self> {
|
||||
let fixture = Self::new()?;
|
||||
fixture.write_file(path, content)?;
|
||||
Ok(fixture)
|
||||
}
|
||||
|
||||
fn command(&self) -> Command {
|
||||
let mut command = self.cli_test.command();
|
||||
command.arg("analyze").arg("graph").arg("--preview");
|
||||
command
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Deref for AnalyzeTest {
|
||||
type Target = CliTest;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.cli_test
|
||||
}
|
||||
}
|
||||
@@ -1760,6 +1760,64 @@ from foo import ( # noqa: F401
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_noqa_with_reason() -> Result<()> {
|
||||
let fixture = CliTest::new()?;
|
||||
fixture.write_file(
|
||||
"test.py",
|
||||
r#"import os
|
||||
|
||||
def foo():
|
||||
x = 1
|
||||
"#,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(fixture
|
||||
.check_command()
|
||||
.arg("--add-noqa=TODO: fix")
|
||||
.arg("--select=F401,F841")
|
||||
.arg("test.py"), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Added 2 noqa directives.
|
||||
");
|
||||
|
||||
let content = fs::read_to_string(fixture.root().join("test.py"))?;
|
||||
insta::assert_snapshot!(content, @r"
|
||||
import os # noqa: F401 TODO: fix
|
||||
|
||||
def foo():
|
||||
x = 1 # noqa: F841 TODO: fix
|
||||
");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_noqa_with_newline_in_reason() -> Result<()> {
|
||||
let fixture = CliTest::new()?;
|
||||
fixture.write_file("test.py", "import os\n")?;
|
||||
|
||||
assert_cmd_snapshot!(fixture
|
||||
.check_command()
|
||||
.arg("--add-noqa=line1\nline2")
|
||||
.arg("--select=F401")
|
||||
.arg("test.py"), @r###"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
ruff failed
|
||||
Cause: --add-noqa <reason> cannot contain newline characters
|
||||
"###);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Infer `3.11` from `requires-python` in `pyproject.toml`.
|
||||
#[test]
|
||||
fn requires_python() -> Result<()> {
|
||||
|
||||
@@ -15,6 +15,7 @@ use std::{
|
||||
};
|
||||
use tempfile::TempDir;
|
||||
|
||||
mod analyze_graph;
|
||||
mod format;
|
||||
mod lint;
|
||||
|
||||
@@ -62,9 +63,7 @@ impl CliTest {
|
||||
files: impl IntoIterator<Item = (&'a str, &'a str)>,
|
||||
) -> anyhow::Result<Self> {
|
||||
let case = Self::new()?;
|
||||
for file in files {
|
||||
case.write_file(file.0, file.1)?;
|
||||
}
|
||||
case.write_files(files)?;
|
||||
Ok(case)
|
||||
}
|
||||
|
||||
@@ -153,6 +152,16 @@ impl CliTest {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn write_files<'a>(
|
||||
&self,
|
||||
files: impl IntoIterator<Item = (&'a str, &'a str)>,
|
||||
) -> Result<()> {
|
||||
for file in files {
|
||||
self.write_file(file.0, file.1)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Returns the path to the test directory root.
|
||||
pub(crate) fn root(&self) -> &Path {
|
||||
&self.project_dir
|
||||
|
||||
@@ -9,7 +9,6 @@ info:
|
||||
- concise
|
||||
- "--show-settings"
|
||||
- test.py
|
||||
snapshot_kind: text
|
||||
---
|
||||
success: true
|
||||
exit_code: 0
|
||||
@@ -284,5 +283,6 @@ analyze.target_version = 3.10
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
analyze.type_checking_imports = true
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -12,7 +12,6 @@ info:
|
||||
- UP007
|
||||
- test.py
|
||||
- "-"
|
||||
snapshot_kind: text
|
||||
---
|
||||
success: true
|
||||
exit_code: 0
|
||||
@@ -286,5 +285,6 @@ analyze.target_version = 3.11
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
analyze.type_checking_imports = true
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -13,7 +13,6 @@ info:
|
||||
- UP007
|
||||
- test.py
|
||||
- "-"
|
||||
snapshot_kind: text
|
||||
---
|
||||
success: true
|
||||
exit_code: 0
|
||||
@@ -288,5 +287,6 @@ analyze.target_version = 3.11
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
analyze.type_checking_imports = true
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -14,7 +14,6 @@ info:
|
||||
- py310
|
||||
- test.py
|
||||
- "-"
|
||||
snapshot_kind: text
|
||||
---
|
||||
success: true
|
||||
exit_code: 0
|
||||
@@ -288,5 +287,6 @@ analyze.target_version = 3.10
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
analyze.type_checking_imports = true
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -11,7 +11,6 @@ info:
|
||||
- "--select"
|
||||
- UP007
|
||||
- foo/test.py
|
||||
snapshot_kind: text
|
||||
---
|
||||
success: true
|
||||
exit_code: 0
|
||||
@@ -285,5 +284,6 @@ analyze.target_version = 3.11
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
analyze.type_checking_imports = true
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -11,7 +11,6 @@ info:
|
||||
- "--select"
|
||||
- UP007
|
||||
- foo/test.py
|
||||
snapshot_kind: text
|
||||
---
|
||||
success: true
|
||||
exit_code: 0
|
||||
@@ -285,5 +284,6 @@ analyze.target_version = 3.10
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
analyze.type_checking_imports = true
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -283,5 +283,6 @@ analyze.target_version = 3.10
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
analyze.type_checking_imports = true
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -283,5 +283,6 @@ analyze.target_version = 3.10
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
analyze.type_checking_imports = true
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -9,7 +9,6 @@ info:
|
||||
- concise
|
||||
- test.py
|
||||
- "--show-settings"
|
||||
snapshot_kind: text
|
||||
---
|
||||
success: true
|
||||
exit_code: 0
|
||||
@@ -284,5 +283,6 @@ analyze.target_version = 3.11
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
analyze.type_checking_imports = true
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -396,5 +396,6 @@ analyze.target_version = 3.7
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
analyze.type_checking_imports = true
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -71,16 +71,13 @@ impl Display for Benchmark<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
fn check_project(db: &ProjectDatabase, max_diagnostics: usize) {
|
||||
fn check_project(db: &ProjectDatabase, project_name: &str, max_diagnostics: usize) {
|
||||
let result = db.check();
|
||||
let diagnostics = result.len();
|
||||
|
||||
assert!(
|
||||
diagnostics > 1 && diagnostics <= max_diagnostics,
|
||||
"Expected between {} and {} diagnostics but got {}",
|
||||
1,
|
||||
max_diagnostics,
|
||||
diagnostics
|
||||
"Expected between 1 and {max_diagnostics} diagnostics on project '{project_name}' but got {diagnostics}",
|
||||
);
|
||||
}
|
||||
|
||||
@@ -146,7 +143,7 @@ static FREQTRADE: Benchmark = Benchmark::new(
|
||||
max_dep_date: "2025-06-17",
|
||||
python_version: PythonVersion::PY312,
|
||||
},
|
||||
400,
|
||||
525,
|
||||
);
|
||||
|
||||
static PANDAS: Benchmark = Benchmark::new(
|
||||
@@ -184,7 +181,7 @@ static PYDANTIC: Benchmark = Benchmark::new(
|
||||
max_dep_date: "2025-06-17",
|
||||
python_version: PythonVersion::PY39,
|
||||
},
|
||||
1000,
|
||||
5000,
|
||||
);
|
||||
|
||||
static SYMPY: Benchmark = Benchmark::new(
|
||||
@@ -226,7 +223,7 @@ static STATIC_FRAME: Benchmark = Benchmark::new(
|
||||
max_dep_date: "2025-08-09",
|
||||
python_version: PythonVersion::PY311,
|
||||
},
|
||||
800,
|
||||
900,
|
||||
);
|
||||
|
||||
#[track_caller]
|
||||
@@ -234,11 +231,11 @@ fn run_single_threaded(bencher: Bencher, benchmark: &Benchmark) {
|
||||
bencher
|
||||
.with_inputs(|| benchmark.setup_iteration())
|
||||
.bench_local_refs(|db| {
|
||||
check_project(db, benchmark.max_diagnostics);
|
||||
check_project(db, benchmark.project.name, benchmark.max_diagnostics);
|
||||
});
|
||||
}
|
||||
|
||||
#[bench(args=[&ALTAIR, &FREQTRADE, &PYDANTIC, &TANJUN], sample_size=2, sample_count=3)]
|
||||
#[bench(args=[&ALTAIR, &FREQTRADE, &TANJUN], sample_size=2, sample_count=3)]
|
||||
fn small(bencher: Bencher, benchmark: &Benchmark) {
|
||||
run_single_threaded(bencher, benchmark);
|
||||
}
|
||||
@@ -248,12 +245,12 @@ fn medium(bencher: Bencher, benchmark: &Benchmark) {
|
||||
run_single_threaded(bencher, benchmark);
|
||||
}
|
||||
|
||||
#[bench(args=[&SYMPY], sample_size=1, sample_count=2)]
|
||||
#[bench(args=[&SYMPY, &PYDANTIC], sample_size=1, sample_count=2)]
|
||||
fn large(bencher: Bencher, benchmark: &Benchmark) {
|
||||
run_single_threaded(bencher, benchmark);
|
||||
}
|
||||
|
||||
#[bench(args=[&PYDANTIC], sample_size=3, sample_count=8)]
|
||||
#[bench(args=[&ALTAIR], sample_size=3, sample_count=8)]
|
||||
fn multithreaded(bencher: Bencher, benchmark: &Benchmark) {
|
||||
let thread_pool = ThreadPoolBuilder::new().build().unwrap();
|
||||
|
||||
@@ -261,7 +258,7 @@ fn multithreaded(bencher: Bencher, benchmark: &Benchmark) {
|
||||
.with_inputs(|| benchmark.setup_iteration())
|
||||
.bench_local_values(|db| {
|
||||
thread_pool.install(|| {
|
||||
check_project(&db, benchmark.max_diagnostics);
|
||||
check_project(&db, benchmark.project.name, benchmark.max_diagnostics);
|
||||
db
|
||||
})
|
||||
});
|
||||
@@ -285,7 +282,7 @@ fn main() {
|
||||
// branch when looking up the ingredient index.
|
||||
{
|
||||
let db = TANJUN.setup_iteration();
|
||||
check_project(&db, TANJUN.max_diagnostics);
|
||||
check_project(&db, TANJUN.project.name, TANJUN.max_diagnostics);
|
||||
}
|
||||
|
||||
divan::main();
|
||||
|
||||
@@ -112,16 +112,16 @@ impl std::fmt::Display for Diff<'_> {
|
||||
// `None`, indicating a regular script file, all the lines will be in one "cell" under the
|
||||
// `None` key.
|
||||
let cells = if let Some(notebook_index) = &self.notebook_index {
|
||||
let mut last_cell = OneIndexed::MIN;
|
||||
let mut last_cell_index = OneIndexed::MIN;
|
||||
let mut cells: Vec<(Option<OneIndexed>, TextSize)> = Vec::new();
|
||||
for (row, cell) in notebook_index.iter() {
|
||||
if cell != last_cell {
|
||||
let offset = source_code.line_start(row);
|
||||
cells.push((Some(last_cell), offset));
|
||||
last_cell = cell;
|
||||
for cell in notebook_index.iter() {
|
||||
if cell.cell_index() != last_cell_index {
|
||||
let offset = source_code.line_start(cell.start_row());
|
||||
cells.push((Some(last_cell_index), offset));
|
||||
last_cell_index = cell.cell_index();
|
||||
}
|
||||
}
|
||||
cells.push((Some(last_cell), source_text.text_len()));
|
||||
cells.push((Some(last_cell_index), source_text.text_len()));
|
||||
cells
|
||||
} else {
|
||||
vec![(None, source_text.text_len())]
|
||||
|
||||
@@ -470,6 +470,17 @@ impl File {
|
||||
self.source_type(db).is_stub()
|
||||
}
|
||||
|
||||
/// Returns `true` if the file is an `__init__.pyi`
|
||||
pub fn is_package_stub(self, db: &dyn Db) -> bool {
|
||||
self.path(db).as_str().ends_with("__init__.pyi")
|
||||
}
|
||||
|
||||
/// Returns `true` if the file is an `__init__.pyi`
|
||||
pub fn is_package(self, db: &dyn Db) -> bool {
|
||||
let path = self.path(db).as_str();
|
||||
path.ends_with("__init__.pyi") || path.ends_with("__init__.py")
|
||||
}
|
||||
|
||||
pub fn source_type(self, db: &dyn Db) -> PySourceType {
|
||||
match self.path(db) {
|
||||
FilePath::System(path) => path
|
||||
|
||||
@@ -7,6 +7,7 @@ use ruff_source_file::LineIndex;
|
||||
|
||||
use crate::Db;
|
||||
use crate::files::{File, FilePath};
|
||||
use crate::system::System;
|
||||
|
||||
/// Reads the source text of a python text file (must be valid UTF8) or notebook.
|
||||
#[salsa::tracked(heap_size=ruff_memory_usage::heap_size)]
|
||||
@@ -15,7 +16,7 @@ pub fn source_text(db: &dyn Db, file: File) -> SourceText {
|
||||
let _span = tracing::trace_span!("source_text", file = %path).entered();
|
||||
let mut read_error = None;
|
||||
|
||||
let kind = if is_notebook(file.path(db)) {
|
||||
let kind = if is_notebook(db.system(), path) {
|
||||
file.read_to_notebook(db)
|
||||
.unwrap_or_else(|error| {
|
||||
tracing::debug!("Failed to read notebook '{path}': {error}");
|
||||
@@ -40,18 +41,17 @@ pub fn source_text(db: &dyn Db, file: File) -> SourceText {
|
||||
}
|
||||
}
|
||||
|
||||
fn is_notebook(path: &FilePath) -> bool {
|
||||
match path {
|
||||
FilePath::System(system) => system.extension().is_some_and(|extension| {
|
||||
PySourceType::try_from_extension(extension) == Some(PySourceType::Ipynb)
|
||||
}),
|
||||
FilePath::SystemVirtual(system_virtual) => {
|
||||
system_virtual.extension().is_some_and(|extension| {
|
||||
PySourceType::try_from_extension(extension) == Some(PySourceType::Ipynb)
|
||||
})
|
||||
}
|
||||
FilePath::Vendored(_) => false,
|
||||
}
|
||||
fn is_notebook(system: &dyn System, path: &FilePath) -> bool {
|
||||
let source_type = match path {
|
||||
FilePath::System(path) => system.source_type(path),
|
||||
FilePath::SystemVirtual(system_virtual) => system.virtual_path_source_type(system_virtual),
|
||||
FilePath::Vendored(_) => return false,
|
||||
};
|
||||
|
||||
let with_extension_fallback =
|
||||
source_type.or_else(|| PySourceType::try_from_extension(path.extension()?));
|
||||
|
||||
with_extension_fallback == Some(PySourceType::Ipynb)
|
||||
}
|
||||
|
||||
/// The source text of a file containing python code.
|
||||
|
||||
@@ -9,6 +9,7 @@ pub use os::OsSystem;
|
||||
|
||||
use filetime::FileTime;
|
||||
use ruff_notebook::{Notebook, NotebookError};
|
||||
use ruff_python_ast::PySourceType;
|
||||
use std::error::Error;
|
||||
use std::fmt::{Debug, Formatter};
|
||||
use std::path::{Path, PathBuf};
|
||||
@@ -16,12 +17,11 @@ use std::{fmt, io};
|
||||
pub use test::{DbWithTestSystem, DbWithWritableSystem, InMemorySystem, TestSystem};
|
||||
use walk_directory::WalkDirectoryBuilder;
|
||||
|
||||
use crate::file_revision::FileRevision;
|
||||
|
||||
pub use self::path::{
|
||||
DeduplicatedNestedPathsIter, SystemPath, SystemPathBuf, SystemVirtualPath,
|
||||
SystemVirtualPathBuf, deduplicate_nested_paths,
|
||||
};
|
||||
use crate::file_revision::FileRevision;
|
||||
|
||||
mod memory_fs;
|
||||
#[cfg(feature = "os")]
|
||||
@@ -66,6 +66,35 @@ pub trait System: Debug + Sync + Send {
|
||||
/// See [dunce::canonicalize] for more information.
|
||||
fn canonicalize_path(&self, path: &SystemPath) -> Result<SystemPathBuf>;
|
||||
|
||||
/// Returns the source type for `path` if known or `None`.
|
||||
///
|
||||
/// The default is to always return `None`, assuming the system
|
||||
/// has no additional information and that the caller should
|
||||
/// rely on the file extension instead.
|
||||
///
|
||||
/// This is primarily used for the LSP integration to respect
|
||||
/// the chosen language (or the fact that it is a notebook) in
|
||||
/// the editor.
|
||||
fn source_type(&self, path: &SystemPath) -> Option<PySourceType> {
|
||||
let _ = path;
|
||||
None
|
||||
}
|
||||
|
||||
/// Returns the source type for `path` if known or `None`.
|
||||
///
|
||||
/// The default is to always return `None`, assuming the system
|
||||
/// has no additional information and that the caller should
|
||||
/// rely on the file extension instead.
|
||||
///
|
||||
/// This is primarily used for the LSP integration to respect
|
||||
/// the chosen language (or the fact that it is a notebook) in
|
||||
/// the editor.
|
||||
fn virtual_path_source_type(&self, path: &SystemVirtualPath) -> Option<PySourceType> {
|
||||
let _ = path;
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Reads the content of the file at `path` into a [`String`].
|
||||
fn read_to_string(&self, path: &SystemPath) -> Result<String>;
|
||||
|
||||
|
||||
@@ -723,10 +723,11 @@ impl ruff_cache::CacheKey for SystemPathBuf {
|
||||
|
||||
/// A slice of a virtual path on [`System`](super::System) (akin to [`str`]).
|
||||
#[repr(transparent)]
|
||||
#[derive(Eq, PartialEq, Hash, PartialOrd, Ord)]
|
||||
pub struct SystemVirtualPath(str);
|
||||
|
||||
impl SystemVirtualPath {
|
||||
pub fn new(path: &str) -> &SystemVirtualPath {
|
||||
pub const fn new(path: &str) -> &SystemVirtualPath {
|
||||
// SAFETY: SystemVirtualPath is marked as #[repr(transparent)] so the conversion from a
|
||||
// *const str to a *const SystemVirtualPath is valid.
|
||||
unsafe { &*(path as *const str as *const SystemVirtualPath) }
|
||||
@@ -767,8 +768,8 @@ pub struct SystemVirtualPathBuf(String);
|
||||
|
||||
impl SystemVirtualPathBuf {
|
||||
#[inline]
|
||||
pub fn as_path(&self) -> &SystemVirtualPath {
|
||||
SystemVirtualPath::new(&self.0)
|
||||
pub const fn as_path(&self) -> &SystemVirtualPath {
|
||||
SystemVirtualPath::new(self.0.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -852,6 +853,12 @@ impl ruff_cache::CacheKey for SystemVirtualPathBuf {
|
||||
}
|
||||
}
|
||||
|
||||
impl Borrow<SystemVirtualPath> for SystemVirtualPathBuf {
|
||||
fn borrow(&self) -> &SystemVirtualPath {
|
||||
self.as_path()
|
||||
}
|
||||
}
|
||||
|
||||
/// Deduplicates identical paths and removes nested paths.
|
||||
///
|
||||
/// # Examples
|
||||
|
||||
@@ -62,7 +62,7 @@ fn generate_set(output: &mut String, set: Set, parents: &mut Vec<Set>) {
|
||||
generate_set(
|
||||
output,
|
||||
Set::Named {
|
||||
name: set_name.to_string(),
|
||||
name: set_name.clone(),
|
||||
set: *sub_set,
|
||||
},
|
||||
parents,
|
||||
|
||||
@@ -104,7 +104,7 @@ fn generate_set(output: &mut String, set: Set, parents: &mut Vec<Set>) {
|
||||
generate_set(
|
||||
output,
|
||||
Set::Named {
|
||||
name: set_name.to_string(),
|
||||
name: set_name.clone(),
|
||||
set: *sub_set,
|
||||
},
|
||||
parents,
|
||||
|
||||
@@ -1006,7 +1006,7 @@ impl<Context> std::fmt::Debug for Align<'_, Context> {
|
||||
/// Block indents indent a block of code, such as in a function body, and therefore insert a line
|
||||
/// break before and after the content.
|
||||
///
|
||||
/// Doesn't create an indentation if the passed in content is [`FormatElement.is_empty`].
|
||||
/// Doesn't create an indentation if the passed in content is empty.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
|
||||
@@ -487,7 +487,7 @@ pub trait FormatElements {
|
||||
/// Represents the width by adding 1 to the actual width so that the width can be represented by a [`NonZeroU32`],
|
||||
/// allowing [`TextWidth`] or [`Option<Width>`] fit in 4 bytes rather than 8.
|
||||
///
|
||||
/// This means that 2^32 can not be precisely represented and instead has the same value as 2^32-1.
|
||||
/// This means that 2^32 cannot be precisely represented and instead has the same value as 2^32-1.
|
||||
/// This imprecision shouldn't matter in practice because either text are longer than any configured line width
|
||||
/// and thus, the text should break.
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
|
||||
@@ -14,14 +14,21 @@ pub(crate) struct Collector<'a> {
|
||||
string_imports: StringImports,
|
||||
/// The collected imports from the Python AST.
|
||||
imports: Vec<CollectedImport>,
|
||||
/// Whether to detect type checking imports
|
||||
type_checking_imports: bool,
|
||||
}
|
||||
|
||||
impl<'a> Collector<'a> {
|
||||
pub(crate) fn new(module_path: Option<&'a [String]>, string_imports: StringImports) -> Self {
|
||||
pub(crate) fn new(
|
||||
module_path: Option<&'a [String]>,
|
||||
string_imports: StringImports,
|
||||
type_checking_imports: bool,
|
||||
) -> Self {
|
||||
Self {
|
||||
module_path,
|
||||
string_imports,
|
||||
imports: Vec::new(),
|
||||
type_checking_imports,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -91,10 +98,25 @@ impl<'ast> SourceOrderVisitor<'ast> for Collector<'_> {
|
||||
}
|
||||
}
|
||||
}
|
||||
Stmt::If(ast::StmtIf {
|
||||
test,
|
||||
body,
|
||||
elif_else_clauses,
|
||||
range: _,
|
||||
node_index: _,
|
||||
}) => {
|
||||
// Skip TYPE_CHECKING blocks if not requested
|
||||
if self.type_checking_imports || !is_type_checking_condition(test) {
|
||||
self.visit_body(body);
|
||||
}
|
||||
|
||||
for clause in elif_else_clauses {
|
||||
self.visit_elif_else_clause(clause);
|
||||
}
|
||||
}
|
||||
Stmt::FunctionDef(_)
|
||||
| Stmt::ClassDef(_)
|
||||
| Stmt::While(_)
|
||||
| Stmt::If(_)
|
||||
| Stmt::With(_)
|
||||
| Stmt::Match(_)
|
||||
| Stmt::Try(_)
|
||||
@@ -152,6 +174,30 @@ impl<'ast> SourceOrderVisitor<'ast> for Collector<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if an expression is a `TYPE_CHECKING` condition.
|
||||
///
|
||||
/// Returns `true` for:
|
||||
/// - `TYPE_CHECKING`
|
||||
/// - `typing.TYPE_CHECKING`
|
||||
///
|
||||
/// NOTE: Aliased `TYPE_CHECKING`, i.e. `import typing.TYPE_CHECKING as TC; if TC: ...`
|
||||
/// will not be detected!
|
||||
fn is_type_checking_condition(expr: &Expr) -> bool {
|
||||
match expr {
|
||||
// `if TYPE_CHECKING:`
|
||||
Expr::Name(ast::ExprName { id, .. }) => id.as_str() == "TYPE_CHECKING",
|
||||
// `if typing.TYPE_CHECKING:`
|
||||
Expr::Attribute(ast::ExprAttribute { value, attr, .. }) => {
|
||||
attr.as_str() == "TYPE_CHECKING"
|
||||
&& matches!(
|
||||
value.as_ref(),
|
||||
Expr::Name(ast::ExprName { id, .. }) if id.as_str() == "typing"
|
||||
)
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum CollectedImport {
|
||||
/// The import was part of an `import` statement.
|
||||
|
||||
@@ -3,8 +3,9 @@ use std::collections::{BTreeMap, BTreeSet};
|
||||
use anyhow::Result;
|
||||
|
||||
use ruff_db::system::{SystemPath, SystemPathBuf};
|
||||
use ruff_python_ast::PySourceType;
|
||||
use ruff_python_ast::helpers::to_module_path;
|
||||
use ruff_python_parser::{Mode, ParseOptions, parse};
|
||||
use ruff_python_parser::{ParseOptions, parse};
|
||||
|
||||
use crate::collector::Collector;
|
||||
pub use crate::db::ModuleDb;
|
||||
@@ -24,20 +25,26 @@ impl ModuleImports {
|
||||
/// Detect the [`ModuleImports`] for a given Python file.
|
||||
pub fn detect(
|
||||
db: &ModuleDb,
|
||||
source: &str,
|
||||
source_type: PySourceType,
|
||||
path: &SystemPath,
|
||||
package: Option<&SystemPath>,
|
||||
string_imports: StringImports,
|
||||
type_checking_imports: bool,
|
||||
) -> Result<Self> {
|
||||
// Read and parse the source code.
|
||||
let source = std::fs::read_to_string(path)?;
|
||||
let parsed = parse(&source, ParseOptions::from(Mode::Module))?;
|
||||
// Parse the source code.
|
||||
let parsed = parse(source, ParseOptions::from(source_type))?;
|
||||
|
||||
let module_path =
|
||||
package.and_then(|package| to_module_path(package.as_std_path(), path.as_std_path()));
|
||||
|
||||
// Collect the imports.
|
||||
let imports =
|
||||
Collector::new(module_path.as_deref(), string_imports).collect(parsed.syntax());
|
||||
let imports = Collector::new(
|
||||
module_path.as_deref(),
|
||||
string_imports,
|
||||
type_checking_imports,
|
||||
)
|
||||
.collect(parsed.syntax());
|
||||
|
||||
// Resolve the imports.
|
||||
let mut resolved_imports = ModuleImports::default();
|
||||
|
||||
@@ -6,7 +6,7 @@ use std::collections::BTreeMap;
|
||||
use std::fmt;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Debug, Default, Clone, CacheKey)]
|
||||
#[derive(Debug, Clone, CacheKey)]
|
||||
pub struct AnalyzeSettings {
|
||||
pub exclude: FilePatternSet,
|
||||
pub preview: PreviewMode,
|
||||
@@ -14,6 +14,21 @@ pub struct AnalyzeSettings {
|
||||
pub string_imports: StringImports,
|
||||
pub include_dependencies: BTreeMap<PathBuf, (PathBuf, Vec<String>)>,
|
||||
pub extension: ExtensionMapping,
|
||||
pub type_checking_imports: bool,
|
||||
}
|
||||
|
||||
impl Default for AnalyzeSettings {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
exclude: FilePatternSet::default(),
|
||||
preview: PreviewMode::default(),
|
||||
target_version: PythonVersion::default(),
|
||||
string_imports: StringImports::default(),
|
||||
include_dependencies: BTreeMap::default(),
|
||||
extension: ExtensionMapping::default(),
|
||||
type_checking_imports: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for AnalyzeSettings {
|
||||
@@ -29,6 +44,7 @@ impl fmt::Display for AnalyzeSettings {
|
||||
self.string_imports,
|
||||
self.extension | debug,
|
||||
self.include_dependencies | debug,
|
||||
self.type_checking_imports,
|
||||
]
|
||||
}
|
||||
Ok(())
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff_linter"
|
||||
version = "0.14.2"
|
||||
version = "0.14.5"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
@@ -22,6 +22,7 @@ DAG(dag_id="class_schedule_interval", schedule_interval="@hourly")
|
||||
|
||||
DAG(dag_id="class_timetable", timetable=NullTimetable())
|
||||
|
||||
DAG(dag_id="class_concurrency", concurrency=12)
|
||||
|
||||
DAG(dag_id="class_fail_stop", fail_stop=True)
|
||||
|
||||
|
||||
@@ -70,3 +70,12 @@ builtins.getattr(foo, "bar")
|
||||
|
||||
# Regression test for: https://github.com/astral-sh/ruff/issues/18353
|
||||
setattr(foo, "__debug__", 0)
|
||||
|
||||
# Regression test for: https://github.com/astral-sh/ruff/issues/21126
|
||||
# Non-NFKC attribute names should be marked as unsafe. Python normalizes identifiers in
|
||||
# attribute access (obj.attr) using NFKC, but does not normalize string
|
||||
# arguments passed to getattr/setattr. Rewriting `getattr(ns, "ſ")` to
|
||||
# `ns.ſ` would be interpreted as `ns.s` at runtime, changing behavior.
|
||||
# Example: the long s character "ſ" normalizes to "s" under NFKC.
|
||||
getattr(foo, "ſ")
|
||||
setattr(foo, "ſ", 1)
|
||||
|
||||
@@ -46,7 +46,8 @@ def func():
|
||||
|
||||
|
||||
def func():
|
||||
# OK (index doesn't start at 0
|
||||
# SIM113
|
||||
# https://github.com/astral-sh/ruff/pull/21395
|
||||
idx = 10
|
||||
for x in range(5):
|
||||
g(x, idx)
|
||||
|
||||
@@ -204,3 +204,15 @@ x = 1
|
||||
print(f"{x=}" or "bar") # SIM222
|
||||
(lambda: 1) or True # SIM222
|
||||
(i for i in range(1)) or "bar" # SIM222
|
||||
|
||||
# https://github.com/astral-sh/ruff/issues/21136
|
||||
def get_items():
|
||||
return tuple(item for item in Item.objects.all()) or None # OK
|
||||
|
||||
|
||||
def get_items_list():
|
||||
return tuple([item for item in items]) or None # OK
|
||||
|
||||
|
||||
def get_items_set():
|
||||
return tuple({item for item in items}) or None # OK
|
||||
|
||||
@@ -371,6 +371,61 @@ class Foo:
|
||||
"""
|
||||
return
|
||||
|
||||
# DOC102 - Test case from issue #20959: comma-separated parameters
|
||||
def leq(x: object, y: object) -> bool:
|
||||
"""Compare two objects for loose equality.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
x1, x2 : object
|
||||
Objects.
|
||||
|
||||
Returns
|
||||
-------
|
||||
bool
|
||||
Whether the objects are identical or equal.
|
||||
"""
|
||||
return x is y or x == y
|
||||
|
||||
|
||||
# OK - comma-separated parameters that match function signature
|
||||
def compare_values(x1: int, x2: int) -> bool:
|
||||
"""Compare two integer values.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
x1, x2 : int
|
||||
Values to compare.
|
||||
|
||||
Returns
|
||||
-------
|
||||
bool
|
||||
True if values are equal.
|
||||
"""
|
||||
return x1 == x2
|
||||
|
||||
|
||||
# DOC102 - mixed comma-separated and regular parameters
|
||||
def process_data(data, x1: str, x2: str) -> str:
|
||||
"""Process data with multiple string parameters.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
data : list
|
||||
Input data to process.
|
||||
x1, x2 : str
|
||||
String parameters for processing.
|
||||
extra_param : str
|
||||
Extra parameter not in signature.
|
||||
|
||||
Returns
|
||||
-------
|
||||
str
|
||||
Processed result.
|
||||
"""
|
||||
return f"{x1}{x2}{len(data)}"
|
||||
|
||||
|
||||
# OK
|
||||
def baz(x: int) -> int:
|
||||
"""
|
||||
@@ -389,3 +444,21 @@ def baz(x: int) -> int:
|
||||
int
|
||||
"""
|
||||
return x
|
||||
|
||||
|
||||
# OK - comma-separated parameters without type annotations
|
||||
def add_numbers(a, b):
|
||||
"""
|
||||
Adds two numbers and returns the result.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
a, b
|
||||
The numbers to add.
|
||||
|
||||
Returns
|
||||
-------
|
||||
int
|
||||
The sum of the two numbers.
|
||||
"""
|
||||
return a + b
|
||||
|
||||
@@ -83,6 +83,37 @@ def calculate_speed(distance: float, time: float) -> float:
|
||||
raise
|
||||
|
||||
|
||||
# DOC502 regression for Sphinx directive after Raises (issue #18959)
|
||||
def foo():
|
||||
"""First line.
|
||||
|
||||
Raises:
|
||||
ValueError:
|
||||
some text
|
||||
|
||||
.. versionadded:: 0.7.0
|
||||
The ``init_kwargs`` argument.
|
||||
"""
|
||||
raise ValueError
|
||||
|
||||
|
||||
# DOC502 regression for following section with colons
|
||||
def example_with_following_section():
|
||||
"""Summary.
|
||||
|
||||
Returns:
|
||||
str: The resulting expression.
|
||||
|
||||
Raises:
|
||||
ValueError: If the unit is not valid.
|
||||
|
||||
Relation to `time_range_lookup`:
|
||||
- Handles the "start of" modifier.
|
||||
- Example: "start of month" → `DATETRUNC()`.
|
||||
"""
|
||||
raise ValueError
|
||||
|
||||
|
||||
# This should NOT trigger DOC502 because OSError is explicitly re-raised
|
||||
def f():
|
||||
"""Do nothing.
|
||||
|
||||
@@ -117,3 +117,33 @@ def calculate_speed(distance: float, time: float) -> float:
|
||||
except TypeError:
|
||||
print("Not a number? Shame on you!")
|
||||
raise
|
||||
|
||||
|
||||
# DOC502 regression for Sphinx directive after Raises (issue #18959)
|
||||
def foo():
|
||||
"""First line.
|
||||
|
||||
Raises
|
||||
------
|
||||
ValueError
|
||||
some text
|
||||
|
||||
.. versionadded:: 0.7.0
|
||||
The ``init_kwargs`` argument.
|
||||
"""
|
||||
raise ValueError
|
||||
|
||||
# Make sure we don't bail out on a Sphinx directive in the description of one
|
||||
# of the exceptions
|
||||
def foo():
|
||||
"""First line.
|
||||
|
||||
Raises
|
||||
------
|
||||
ValueError
|
||||
some text
|
||||
.. math:: e^{xception}
|
||||
ZeroDivisionError
|
||||
Will not be raised, DOC502
|
||||
"""
|
||||
raise ValueError
|
||||
|
||||
5
crates/ruff_linter/resources/test/fixtures/pyupgrade/UP029_2.py
vendored
Normal file
5
crates/ruff_linter/resources/test/fixtures/pyupgrade/UP029_2.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
from .builtins import next
|
||||
from ..builtins import str
|
||||
from ...builtins import int
|
||||
from .builtins import next as _next
|
||||
|
||||
@@ -125,3 +125,18 @@ with open(*filename, mode="r") as f:
|
||||
# `buffering`.
|
||||
with open(*filename, file="file.txt", mode="r") as f:
|
||||
x = f.read()
|
||||
|
||||
# FURB101
|
||||
with open("file.txt", encoding="utf-8") as f:
|
||||
contents: str = f.read()
|
||||
|
||||
# FURB101 but no fix because it would remove the assignment to `x`
|
||||
with open("file.txt", encoding="utf-8") as f:
|
||||
contents, x = f.read(), 2
|
||||
|
||||
# FURB101 but no fix because it would remove the `process_contents` call
|
||||
with open("file.txt", encoding="utf-8") as f:
|
||||
contents = process_contents(f.read())
|
||||
|
||||
with open("file.txt", encoding="utf-8") as f:
|
||||
contents: str = process_contents(f.read())
|
||||
|
||||
@@ -145,3 +145,20 @@ with open("file.txt", "w") as f:
|
||||
with open("file.txt", "w") as f:
|
||||
for line in text:
|
||||
f.write(line)
|
||||
|
||||
# See: https://github.com/astral-sh/ruff/issues/20785
|
||||
import json
|
||||
|
||||
data = {"price": 100}
|
||||
|
||||
with open("test.json", "wb") as f:
|
||||
f.write(json.dumps(data, indent=4).encode("utf-8"))
|
||||
|
||||
# See: https://github.com/astral-sh/ruff/issues/21381
|
||||
with open("tmp_path/pyproject.toml", "w") as f:
|
||||
f.write(dedent(
|
||||
"""
|
||||
[project]
|
||||
other = 1.234
|
||||
""",
|
||||
))
|
||||
|
||||
@@ -19,6 +19,9 @@ print("", *args, sep="")
|
||||
print("", **kwargs)
|
||||
print(sep="\t")
|
||||
print(sep=print(1))
|
||||
print(f"")
|
||||
print(f"", sep=",")
|
||||
print(f"", end="bar")
|
||||
|
||||
# OK.
|
||||
|
||||
@@ -33,3 +36,4 @@ print("foo", "", sep=",")
|
||||
print("foo", "", "bar", "", sep=",")
|
||||
print("", "", **kwargs)
|
||||
print(*args, sep=",")
|
||||
print(f"foo")
|
||||
|
||||
@@ -85,3 +85,9 @@ Decimal("1234_5678") # Safe fix: preserves non-thousands separators
|
||||
Decimal("0001_2345")
|
||||
Decimal("000_1_2345")
|
||||
Decimal("000_000")
|
||||
|
||||
# Test cases for underscores before sign
|
||||
# https://github.com/astral-sh/ruff/issues/21186
|
||||
Decimal("_-1") # Should flag as verbose
|
||||
Decimal("_+1") # Should flag as verbose
|
||||
Decimal("_-1_000") # Should flag as verbose
|
||||
|
||||
@@ -64,3 +64,8 @@ _ = Decimal.from_float(True)
|
||||
_ = Decimal.from_float(float("-nan"))
|
||||
_ = Decimal.from_float(float("\x2dnan"))
|
||||
_ = Decimal.from_float(float("\N{HYPHEN-MINUS}nan"))
|
||||
|
||||
# See: https://github.com/astral-sh/ruff/issues/21257
|
||||
# fixes must be safe
|
||||
_ = Fraction.from_float(f=4.2)
|
||||
_ = Fraction.from_decimal(dec=4)
|
||||
@@ -132,3 +132,9 @@ class AWithQuotes:
|
||||
final_variable: 'Final[list[int]]' = []
|
||||
class_variable_without_subscript: 'ClassVar' = []
|
||||
final_variable_without_subscript: 'Final' = []
|
||||
|
||||
|
||||
# Reassignment of a ClassVar should not trigger RUF012
|
||||
class P:
|
||||
class_variable: ClassVar[list] = [10, 20, 30, 40, 50]
|
||||
class_variable = [*class_variable[0::1], *class_variable[2::3]]
|
||||
|
||||
@@ -81,3 +81,7 @@ round(# a comment
|
||||
round(
|
||||
17 # a comment
|
||||
)
|
||||
|
||||
# See: https://github.com/astral-sh/ruff/issues/21209
|
||||
print(round(125, **{"ndigits": -2}))
|
||||
print(round(125, *[-2]))
|
||||
18
crates/ruff_linter/resources/test/fixtures/ruff/RUF065_1.py
vendored
Normal file
18
crates/ruff_linter/resources/test/fixtures/ruff/RUF065_1.py
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
import logging
|
||||
|
||||
# Test cases for str() that should NOT be flagged (issue #21315)
|
||||
# str() with no arguments - should not be flagged
|
||||
logging.warning("%s", str())
|
||||
|
||||
# str() with multiple arguments - should not be flagged
|
||||
logging.warning("%s", str(b"\xe2\x9a\xa0", "utf-8"))
|
||||
|
||||
# str() with starred arguments - should not be flagged
|
||||
logging.warning("%s", str(*(b"\xf0\x9f\x9a\xa7", "utf-8")))
|
||||
|
||||
# str() with keyword unpacking - should not be flagged
|
||||
logging.warning("%s", str(**{"object": b"\xf0\x9f\x9a\xa8", "encoding": "utf-8"}))
|
||||
|
||||
# str() with single keyword argument - should be flagged (equivalent to str("!"))
|
||||
logging.warning("%s", str(object="!"))
|
||||
|
||||
@@ -43,9 +43,6 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
pycodestyle::rules::ambiguous_variable_name(checker, name, name.range());
|
||||
}
|
||||
}
|
||||
if checker.is_rule_enabled(Rule::NonlocalWithoutBinding) {
|
||||
pylint::rules::nonlocal_without_binding(checker, nonlocal);
|
||||
}
|
||||
if checker.is_rule_enabled(Rule::NonlocalAndGlobal) {
|
||||
pylint::rules::nonlocal_and_global(checker, nonlocal);
|
||||
}
|
||||
@@ -720,7 +717,9 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
}
|
||||
if checker.is_rule_enabled(Rule::UnnecessaryBuiltinImport) {
|
||||
if let Some(module) = module {
|
||||
pyupgrade::rules::unnecessary_builtin_import(checker, stmt, module, names);
|
||||
pyupgrade::rules::unnecessary_builtin_import(
|
||||
checker, stmt, module, names, level,
|
||||
);
|
||||
}
|
||||
}
|
||||
if checker.any_rule_enabled(&[
|
||||
|
||||
@@ -73,7 +73,8 @@ use crate::rules::pyflakes::rules::{
|
||||
UndefinedLocalWithNestedImportStarUsage, YieldOutsideFunction,
|
||||
};
|
||||
use crate::rules::pylint::rules::{
|
||||
AwaitOutsideAsync, LoadBeforeGlobalDeclaration, YieldFromInAsyncFunction,
|
||||
AwaitOutsideAsync, LoadBeforeGlobalDeclaration, NonlocalWithoutBinding,
|
||||
YieldFromInAsyncFunction,
|
||||
};
|
||||
use crate::rules::{flake8_pyi, flake8_type_checking, pyflakes, pyupgrade};
|
||||
use crate::settings::rule_table::RuleTable;
|
||||
@@ -641,6 +642,10 @@ impl SemanticSyntaxContext for Checker<'_> {
|
||||
self.semantic.global(name)
|
||||
}
|
||||
|
||||
fn has_nonlocal_binding(&self, name: &str) -> bool {
|
||||
self.semantic.nonlocal(name).is_some()
|
||||
}
|
||||
|
||||
fn report_semantic_error(&self, error: SemanticSyntaxError) {
|
||||
match error.kind {
|
||||
SemanticSyntaxErrorKind::LateFutureImport => {
|
||||
@@ -717,6 +722,12 @@ impl SemanticSyntaxContext for Checker<'_> {
|
||||
self.report_diagnostic(pyflakes::rules::ContinueOutsideLoop, error.range);
|
||||
}
|
||||
}
|
||||
SemanticSyntaxErrorKind::NonlocalWithoutBinding(name) => {
|
||||
// PLE0117
|
||||
if self.is_rule_enabled(Rule::NonlocalWithoutBinding) {
|
||||
self.report_diagnostic(NonlocalWithoutBinding { name }, error.range);
|
||||
}
|
||||
}
|
||||
SemanticSyntaxErrorKind::ReboundComprehensionVariable
|
||||
| SemanticSyntaxErrorKind::DuplicateTypeParameter
|
||||
| SemanticSyntaxErrorKind::MultipleCaseAssignment(_)
|
||||
@@ -849,23 +860,17 @@ impl SemanticSyntaxContext for Checker<'_> {
|
||||
}
|
||||
|
||||
fn is_bound_parameter(&self, name: &str) -> bool {
|
||||
for scope in self.semantic.current_scopes() {
|
||||
match scope.kind {
|
||||
ScopeKind::Class(_) => return false,
|
||||
ScopeKind::Function(ast::StmtFunctionDef { parameters, .. })
|
||||
| ScopeKind::Lambda(ast::ExprLambda {
|
||||
parameters: Some(parameters),
|
||||
..
|
||||
}) => return parameters.includes(name),
|
||||
ScopeKind::Lambda(_)
|
||||
| ScopeKind::Generator { .. }
|
||||
| ScopeKind::Module
|
||||
| ScopeKind::Type
|
||||
| ScopeKind::DunderClassCell => {}
|
||||
match self.semantic.current_scope().kind {
|
||||
ScopeKind::Function(ast::StmtFunctionDef { parameters, .. }) => {
|
||||
parameters.includes(name)
|
||||
}
|
||||
ScopeKind::Class(_)
|
||||
| ScopeKind::Lambda(_)
|
||||
| ScopeKind::Generator { .. }
|
||||
| ScopeKind::Module
|
||||
| ScopeKind::Type
|
||||
| ScopeKind::DunderClassCell => false,
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -83,7 +83,7 @@ impl<'a> Importer<'a> {
|
||||
.into_edit(&required_import)
|
||||
} else {
|
||||
// Insert at the start of the file.
|
||||
Insertion::start_of_file(self.python_ast, self.source, self.stylist)
|
||||
Insertion::start_of_file(self.python_ast, self.source, self.stylist, None)
|
||||
.into_edit(&required_import)
|
||||
}
|
||||
}
|
||||
@@ -113,7 +113,7 @@ impl<'a> Importer<'a> {
|
||||
Insertion::end_of_statement(stmt, self.source, self.stylist)
|
||||
} else {
|
||||
// Insert at the start of the file.
|
||||
Insertion::start_of_file(self.python_ast, self.source, self.stylist)
|
||||
Insertion::start_of_file(self.python_ast, self.source, self.stylist, None)
|
||||
};
|
||||
let add_import_edit = insertion.into_edit(&content);
|
||||
|
||||
@@ -498,7 +498,7 @@ impl<'a> Importer<'a> {
|
||||
Insertion::end_of_statement(stmt, self.source, self.stylist)
|
||||
} else {
|
||||
// Insert at the start of the file.
|
||||
Insertion::start_of_file(self.python_ast, self.source, self.stylist)
|
||||
Insertion::start_of_file(self.python_ast, self.source, self.stylist, None)
|
||||
};
|
||||
if insertion.is_inline() {
|
||||
Err(anyhow::anyhow!(
|
||||
|
||||
@@ -51,13 +51,17 @@ impl<'de> serde::Deserialize<'de> for LineLength {
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
let value = u16::deserialize(deserializer)?;
|
||||
Self::try_from(value).map_err(|_| {
|
||||
serde::de::Error::custom(format!(
|
||||
"line-length must be between 1 and {} (got {value})",
|
||||
Self::MAX,
|
||||
))
|
||||
})
|
||||
let value = i64::deserialize(deserializer)?;
|
||||
|
||||
u16::try_from(value)
|
||||
.ok()
|
||||
.and_then(|u16_value| Self::try_from(u16_value).ok())
|
||||
.ok_or_else(|| {
|
||||
serde::de::Error::custom(format!(
|
||||
"line-length must be between 1 and {} (got {value})",
|
||||
Self::MAX,
|
||||
))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -377,6 +377,7 @@ pub fn add_noqa_to_path(
|
||||
source_kind: &SourceKind,
|
||||
source_type: PySourceType,
|
||||
settings: &LinterSettings,
|
||||
reason: Option<&str>,
|
||||
) -> Result<usize> {
|
||||
// Parse once.
|
||||
let target_version = settings.resolve_target_version(path);
|
||||
@@ -425,6 +426,7 @@ pub fn add_noqa_to_path(
|
||||
&settings.external,
|
||||
&directives.noqa_line_for,
|
||||
stylist.line_ending(),
|
||||
reason,
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@@ -4,4 +4,4 @@ expression: content
|
||||
---
|
||||
syntax_errors.py:
|
||||
1:15 invalid-syntax: Expected one or more symbol names after import
|
||||
3:12 invalid-syntax: Expected ')', found newline
|
||||
3:12 invalid-syntax: Expected `)`, found newline
|
||||
|
||||
@@ -39,7 +39,7 @@ pub fn generate_noqa_edits(
|
||||
let exemption = FileExemption::from(&file_directives);
|
||||
let directives = NoqaDirectives::from_commented_ranges(comment_ranges, external, path, locator);
|
||||
let comments = find_noqa_comments(diagnostics, locator, &exemption, &directives, noqa_line_for);
|
||||
build_noqa_edits_by_diagnostic(comments, locator, line_ending)
|
||||
build_noqa_edits_by_diagnostic(comments, locator, line_ending, None)
|
||||
}
|
||||
|
||||
/// A directive to ignore a set of rules either for a given line of Python source code or an entire file (e.g.,
|
||||
@@ -715,6 +715,7 @@ impl Display for LexicalError {
|
||||
impl Error for LexicalError {}
|
||||
|
||||
/// Adds noqa comments to suppress all messages of a file.
|
||||
#[expect(clippy::too_many_arguments)]
|
||||
pub(crate) fn add_noqa(
|
||||
path: &Path,
|
||||
diagnostics: &[Diagnostic],
|
||||
@@ -723,6 +724,7 @@ pub(crate) fn add_noqa(
|
||||
external: &[String],
|
||||
noqa_line_for: &NoqaMapping,
|
||||
line_ending: LineEnding,
|
||||
reason: Option<&str>,
|
||||
) -> Result<usize> {
|
||||
let (count, output) = add_noqa_inner(
|
||||
path,
|
||||
@@ -732,12 +734,14 @@ pub(crate) fn add_noqa(
|
||||
external,
|
||||
noqa_line_for,
|
||||
line_ending,
|
||||
reason,
|
||||
);
|
||||
|
||||
fs::write(path, output)?;
|
||||
Ok(count)
|
||||
}
|
||||
|
||||
#[expect(clippy::too_many_arguments)]
|
||||
fn add_noqa_inner(
|
||||
path: &Path,
|
||||
diagnostics: &[Diagnostic],
|
||||
@@ -746,6 +750,7 @@ fn add_noqa_inner(
|
||||
external: &[String],
|
||||
noqa_line_for: &NoqaMapping,
|
||||
line_ending: LineEnding,
|
||||
reason: Option<&str>,
|
||||
) -> (usize, String) {
|
||||
let mut count = 0;
|
||||
|
||||
@@ -757,7 +762,7 @@ fn add_noqa_inner(
|
||||
|
||||
let comments = find_noqa_comments(diagnostics, locator, &exemption, &directives, noqa_line_for);
|
||||
|
||||
let edits = build_noqa_edits_by_line(comments, locator, line_ending);
|
||||
let edits = build_noqa_edits_by_line(comments, locator, line_ending, reason);
|
||||
|
||||
let contents = locator.contents();
|
||||
|
||||
@@ -783,6 +788,7 @@ fn build_noqa_edits_by_diagnostic(
|
||||
comments: Vec<Option<NoqaComment>>,
|
||||
locator: &Locator,
|
||||
line_ending: LineEnding,
|
||||
reason: Option<&str>,
|
||||
) -> Vec<Option<Edit>> {
|
||||
let mut edits = Vec::default();
|
||||
for comment in comments {
|
||||
@@ -794,6 +800,7 @@ fn build_noqa_edits_by_diagnostic(
|
||||
FxHashSet::from_iter([comment.code]),
|
||||
locator,
|
||||
line_ending,
|
||||
reason,
|
||||
) {
|
||||
edits.push(Some(noqa_edit.into_edit()));
|
||||
}
|
||||
@@ -808,6 +815,7 @@ fn build_noqa_edits_by_line<'a>(
|
||||
comments: Vec<Option<NoqaComment<'a>>>,
|
||||
locator: &Locator,
|
||||
line_ending: LineEnding,
|
||||
reason: Option<&'a str>,
|
||||
) -> BTreeMap<TextSize, NoqaEdit<'a>> {
|
||||
let mut comments_by_line = BTreeMap::default();
|
||||
for comment in comments.into_iter().flatten() {
|
||||
@@ -831,6 +839,7 @@ fn build_noqa_edits_by_line<'a>(
|
||||
.collect(),
|
||||
locator,
|
||||
line_ending,
|
||||
reason,
|
||||
) {
|
||||
edits.insert(offset, edit);
|
||||
}
|
||||
@@ -927,6 +936,7 @@ struct NoqaEdit<'a> {
|
||||
noqa_codes: FxHashSet<&'a SecondaryCode>,
|
||||
codes: Option<&'a Codes<'a>>,
|
||||
line_ending: LineEnding,
|
||||
reason: Option<&'a str>,
|
||||
}
|
||||
|
||||
impl NoqaEdit<'_> {
|
||||
@@ -954,6 +964,9 @@ impl NoqaEdit<'_> {
|
||||
push_codes(writer, self.noqa_codes.iter().sorted_unstable());
|
||||
}
|
||||
}
|
||||
if let Some(reason) = self.reason {
|
||||
write!(writer, " {reason}").unwrap();
|
||||
}
|
||||
write!(writer, "{}", self.line_ending.as_str()).unwrap();
|
||||
}
|
||||
}
|
||||
@@ -970,6 +983,7 @@ fn generate_noqa_edit<'a>(
|
||||
noqa_codes: FxHashSet<&'a SecondaryCode>,
|
||||
locator: &Locator,
|
||||
line_ending: LineEnding,
|
||||
reason: Option<&'a str>,
|
||||
) -> Option<NoqaEdit<'a>> {
|
||||
let line_range = locator.full_line_range(offset);
|
||||
|
||||
@@ -999,6 +1013,7 @@ fn generate_noqa_edit<'a>(
|
||||
noqa_codes,
|
||||
codes,
|
||||
line_ending,
|
||||
reason,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -2832,6 +2847,7 @@ mod tests {
|
||||
&[],
|
||||
&noqa_line_for,
|
||||
LineEnding::Lf,
|
||||
None,
|
||||
);
|
||||
assert_eq!(count, 0);
|
||||
assert_eq!(output, format!("{contents}"));
|
||||
@@ -2855,6 +2871,7 @@ mod tests {
|
||||
&[],
|
||||
&noqa_line_for,
|
||||
LineEnding::Lf,
|
||||
None,
|
||||
);
|
||||
assert_eq!(count, 1);
|
||||
assert_eq!(output, "x = 1 # noqa: F841\n");
|
||||
@@ -2885,6 +2902,7 @@ mod tests {
|
||||
&[],
|
||||
&noqa_line_for,
|
||||
LineEnding::Lf,
|
||||
None,
|
||||
);
|
||||
assert_eq!(count, 1);
|
||||
assert_eq!(output, "x = 1 # noqa: E741, F841\n");
|
||||
@@ -2915,6 +2933,7 @@ mod tests {
|
||||
&[],
|
||||
&noqa_line_for,
|
||||
LineEnding::Lf,
|
||||
None,
|
||||
);
|
||||
assert_eq!(count, 0);
|
||||
assert_eq!(output, "x = 1 # noqa");
|
||||
|
||||
@@ -261,16 +261,6 @@ pub(crate) const fn is_b006_unsafe_fix_preserve_assignment_expr_enabled(
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/20520
|
||||
pub(crate) const fn is_fix_read_whole_file_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/20520
|
||||
pub(crate) const fn is_fix_write_whole_file_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
pub(crate) const fn is_typing_extensions_str_alias_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
@@ -279,3 +269,8 @@ pub(crate) const fn is_typing_extensions_str_alias_enabled(settings: &LinterSett
|
||||
pub(crate) const fn is_extended_i18n_function_matching_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/21395
|
||||
pub(crate) const fn is_enumerate_for_loop_int_index_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
@@ -196,6 +196,7 @@ fn check_call_arguments(checker: &Checker, qualified_name: &QualifiedName, argum
|
||||
match qualified_name.segments() {
|
||||
["airflow", .., "DAG" | "dag"] => {
|
||||
// with replacement
|
||||
diagnostic_for_argument(checker, arguments, "concurrency", Some("max_active_tasks"));
|
||||
diagnostic_for_argument(checker, arguments, "fail_stop", Some("fail_fast"));
|
||||
diagnostic_for_argument(checker, arguments, "schedule_interval", Some("schedule"));
|
||||
diagnostic_for_argument(checker, arguments, "timetable", Some("schedule"));
|
||||
|
||||
@@ -28,6 +28,8 @@ AIR301 [*] `timetable` is removed in Airflow 3.0
|
||||
22 |
|
||||
23 | DAG(dag_id="class_timetable", timetable=NullTimetable())
|
||||
| ^^^^^^^^^
|
||||
24 |
|
||||
25 | DAG(dag_id="class_concurrency", concurrency=12)
|
||||
|
|
||||
help: Use `schedule` instead
|
||||
20 |
|
||||
@@ -36,249 +38,271 @@ help: Use `schedule` instead
|
||||
- DAG(dag_id="class_timetable", timetable=NullTimetable())
|
||||
23 + DAG(dag_id="class_timetable", schedule=NullTimetable())
|
||||
24 |
|
||||
25 |
|
||||
26 | DAG(dag_id="class_fail_stop", fail_stop=True)
|
||||
25 | DAG(dag_id="class_concurrency", concurrency=12)
|
||||
26 |
|
||||
|
||||
AIR301 [*] `fail_stop` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:26:31
|
||||
AIR301 [*] `concurrency` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:25:33
|
||||
|
|
||||
26 | DAG(dag_id="class_fail_stop", fail_stop=True)
|
||||
| ^^^^^^^^^
|
||||
27 |
|
||||
28 | DAG(dag_id="class_default_view", default_view="dag_default_view")
|
||||
23 | DAG(dag_id="class_timetable", timetable=NullTimetable())
|
||||
24 |
|
||||
25 | DAG(dag_id="class_concurrency", concurrency=12)
|
||||
| ^^^^^^^^^^^
|
||||
26 |
|
||||
27 | DAG(dag_id="class_fail_stop", fail_stop=True)
|
||||
|
|
||||
help: Use `fail_fast` instead
|
||||
help: Use `max_active_tasks` instead
|
||||
22 |
|
||||
23 | DAG(dag_id="class_timetable", timetable=NullTimetable())
|
||||
24 |
|
||||
25 |
|
||||
- DAG(dag_id="class_concurrency", concurrency=12)
|
||||
25 + DAG(dag_id="class_concurrency", max_active_tasks=12)
|
||||
26 |
|
||||
27 | DAG(dag_id="class_fail_stop", fail_stop=True)
|
||||
28 |
|
||||
|
||||
AIR301 [*] `fail_stop` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:27:31
|
||||
|
|
||||
25 | DAG(dag_id="class_concurrency", concurrency=12)
|
||||
26 |
|
||||
27 | DAG(dag_id="class_fail_stop", fail_stop=True)
|
||||
| ^^^^^^^^^
|
||||
28 |
|
||||
29 | DAG(dag_id="class_default_view", default_view="dag_default_view")
|
||||
|
|
||||
help: Use `fail_fast` instead
|
||||
24 |
|
||||
25 | DAG(dag_id="class_concurrency", concurrency=12)
|
||||
26 |
|
||||
- DAG(dag_id="class_fail_stop", fail_stop=True)
|
||||
26 + DAG(dag_id="class_fail_stop", fail_fast=True)
|
||||
27 |
|
||||
28 | DAG(dag_id="class_default_view", default_view="dag_default_view")
|
||||
29 |
|
||||
27 + DAG(dag_id="class_fail_stop", fail_fast=True)
|
||||
28 |
|
||||
29 | DAG(dag_id="class_default_view", default_view="dag_default_view")
|
||||
30 |
|
||||
|
||||
AIR301 `default_view` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:28:34
|
||||
--> AIR301_args.py:29:34
|
||||
|
|
||||
26 | DAG(dag_id="class_fail_stop", fail_stop=True)
|
||||
27 |
|
||||
28 | DAG(dag_id="class_default_view", default_view="dag_default_view")
|
||||
27 | DAG(dag_id="class_fail_stop", fail_stop=True)
|
||||
28 |
|
||||
29 | DAG(dag_id="class_default_view", default_view="dag_default_view")
|
||||
| ^^^^^^^^^^^^
|
||||
29 |
|
||||
30 | DAG(dag_id="class_orientation", orientation="BT")
|
||||
30 |
|
||||
31 | DAG(dag_id="class_orientation", orientation="BT")
|
||||
|
|
||||
|
||||
AIR301 `orientation` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:30:33
|
||||
--> AIR301_args.py:31:33
|
||||
|
|
||||
28 | DAG(dag_id="class_default_view", default_view="dag_default_view")
|
||||
29 |
|
||||
30 | DAG(dag_id="class_orientation", orientation="BT")
|
||||
29 | DAG(dag_id="class_default_view", default_view="dag_default_view")
|
||||
30 |
|
||||
31 | DAG(dag_id="class_orientation", orientation="BT")
|
||||
| ^^^^^^^^^^^
|
||||
31 |
|
||||
32 | allow_future_exec_dates_dag = DAG(dag_id="class_allow_future_exec_dates")
|
||||
32 |
|
||||
33 | allow_future_exec_dates_dag = DAG(dag_id="class_allow_future_exec_dates")
|
||||
|
|
||||
|
||||
AIR301 [*] `schedule_interval` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:41:6
|
||||
--> AIR301_args.py:42:6
|
||||
|
|
||||
41 | @dag(schedule_interval="0 * * * *")
|
||||
42 | @dag(schedule_interval="0 * * * *")
|
||||
| ^^^^^^^^^^^^^^^^^
|
||||
42 | def decorator_schedule_interval():
|
||||
43 | pass
|
||||
43 | def decorator_schedule_interval():
|
||||
44 | pass
|
||||
|
|
||||
help: Use `schedule` instead
|
||||
38 | pass
|
||||
39 |
|
||||
39 | pass
|
||||
40 |
|
||||
41 |
|
||||
- @dag(schedule_interval="0 * * * *")
|
||||
41 + @dag(schedule="0 * * * *")
|
||||
42 | def decorator_schedule_interval():
|
||||
43 | pass
|
||||
44 |
|
||||
42 + @dag(schedule="0 * * * *")
|
||||
43 | def decorator_schedule_interval():
|
||||
44 | pass
|
||||
45 |
|
||||
|
||||
AIR301 [*] `timetable` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:46:6
|
||||
--> AIR301_args.py:47:6
|
||||
|
|
||||
46 | @dag(timetable=NullTimetable())
|
||||
47 | @dag(timetable=NullTimetable())
|
||||
| ^^^^^^^^^
|
||||
47 | def decorator_timetable():
|
||||
48 | pass
|
||||
48 | def decorator_timetable():
|
||||
49 | pass
|
||||
|
|
||||
help: Use `schedule` instead
|
||||
43 | pass
|
||||
44 |
|
||||
44 | pass
|
||||
45 |
|
||||
46 |
|
||||
- @dag(timetable=NullTimetable())
|
||||
46 + @dag(schedule=NullTimetable())
|
||||
47 | def decorator_timetable():
|
||||
48 | pass
|
||||
49 |
|
||||
47 + @dag(schedule=NullTimetable())
|
||||
48 | def decorator_timetable():
|
||||
49 | pass
|
||||
50 |
|
||||
|
||||
AIR301 [*] `execution_date` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:54:62
|
||||
--> AIR301_args.py:55:62
|
||||
|
|
||||
52 | def decorator_deprecated_operator_args():
|
||||
53 | trigger_dagrun_op = trigger_dagrun.TriggerDagRunOperator(
|
||||
54 | task_id="trigger_dagrun_op1", trigger_dag_id="test", execution_date="2024-12-04"
|
||||
53 | def decorator_deprecated_operator_args():
|
||||
54 | trigger_dagrun_op = trigger_dagrun.TriggerDagRunOperator(
|
||||
55 | task_id="trigger_dagrun_op1", trigger_dag_id="test", execution_date="2024-12-04"
|
||||
| ^^^^^^^^^^^^^^
|
||||
55 | )
|
||||
56 | trigger_dagrun_op2 = TriggerDagRunOperator(
|
||||
56 | )
|
||||
57 | trigger_dagrun_op2 = TriggerDagRunOperator(
|
||||
|
|
||||
help: Use `logical_date` instead
|
||||
51 | @dag()
|
||||
52 | def decorator_deprecated_operator_args():
|
||||
53 | trigger_dagrun_op = trigger_dagrun.TriggerDagRunOperator(
|
||||
52 | @dag()
|
||||
53 | def decorator_deprecated_operator_args():
|
||||
54 | trigger_dagrun_op = trigger_dagrun.TriggerDagRunOperator(
|
||||
- task_id="trigger_dagrun_op1", trigger_dag_id="test", execution_date="2024-12-04"
|
||||
54 + task_id="trigger_dagrun_op1", trigger_dag_id="test", logical_date="2024-12-04"
|
||||
55 | )
|
||||
56 | trigger_dagrun_op2 = TriggerDagRunOperator(
|
||||
57 | task_id="trigger_dagrun_op2", trigger_dag_id="test", execution_date="2024-12-04"
|
||||
55 + task_id="trigger_dagrun_op1", trigger_dag_id="test", logical_date="2024-12-04"
|
||||
56 | )
|
||||
57 | trigger_dagrun_op2 = TriggerDagRunOperator(
|
||||
58 | task_id="trigger_dagrun_op2", trigger_dag_id="test", execution_date="2024-12-04"
|
||||
|
||||
AIR301 [*] `execution_date` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:57:62
|
||||
--> AIR301_args.py:58:62
|
||||
|
|
||||
55 | )
|
||||
56 | trigger_dagrun_op2 = TriggerDagRunOperator(
|
||||
57 | task_id="trigger_dagrun_op2", trigger_dag_id="test", execution_date="2024-12-04"
|
||||
56 | )
|
||||
57 | trigger_dagrun_op2 = TriggerDagRunOperator(
|
||||
58 | task_id="trigger_dagrun_op2", trigger_dag_id="test", execution_date="2024-12-04"
|
||||
| ^^^^^^^^^^^^^^
|
||||
58 | )
|
||||
59 | )
|
||||
|
|
||||
help: Use `logical_date` instead
|
||||
54 | task_id="trigger_dagrun_op1", trigger_dag_id="test", execution_date="2024-12-04"
|
||||
55 | )
|
||||
56 | trigger_dagrun_op2 = TriggerDagRunOperator(
|
||||
55 | task_id="trigger_dagrun_op1", trigger_dag_id="test", execution_date="2024-12-04"
|
||||
56 | )
|
||||
57 | trigger_dagrun_op2 = TriggerDagRunOperator(
|
||||
- task_id="trigger_dagrun_op2", trigger_dag_id="test", execution_date="2024-12-04"
|
||||
57 + task_id="trigger_dagrun_op2", trigger_dag_id="test", logical_date="2024-12-04"
|
||||
58 | )
|
||||
59 |
|
||||
60 | branch_dt_op = datetime.BranchDateTimeOperator(
|
||||
58 + task_id="trigger_dagrun_op2", trigger_dag_id="test", logical_date="2024-12-04"
|
||||
59 | )
|
||||
60 |
|
||||
61 | branch_dt_op = datetime.BranchDateTimeOperator(
|
||||
|
||||
AIR301 [*] `use_task_execution_day` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:61:33
|
||||
--> AIR301_args.py:62:33
|
||||
|
|
||||
60 | branch_dt_op = datetime.BranchDateTimeOperator(
|
||||
61 | task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5
|
||||
61 | branch_dt_op = datetime.BranchDateTimeOperator(
|
||||
62 | task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^
|
||||
62 | )
|
||||
63 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
63 | )
|
||||
64 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
|
|
||||
help: Use `use_task_logical_date` instead
|
||||
58 | )
|
||||
59 |
|
||||
60 | branch_dt_op = datetime.BranchDateTimeOperator(
|
||||
59 | )
|
||||
60 |
|
||||
61 | branch_dt_op = datetime.BranchDateTimeOperator(
|
||||
- task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5
|
||||
61 + task_id="branch_dt_op", use_task_logical_date=True, task_concurrency=5
|
||||
62 | )
|
||||
63 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
64 | task_id="branch_dt_op2",
|
||||
62 + task_id="branch_dt_op", use_task_logical_date=True, task_concurrency=5
|
||||
63 | )
|
||||
64 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
65 | task_id="branch_dt_op2",
|
||||
|
||||
AIR301 [*] `task_concurrency` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:61:62
|
||||
--> AIR301_args.py:62:62
|
||||
|
|
||||
60 | branch_dt_op = datetime.BranchDateTimeOperator(
|
||||
61 | task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5
|
||||
61 | branch_dt_op = datetime.BranchDateTimeOperator(
|
||||
62 | task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5
|
||||
| ^^^^^^^^^^^^^^^^
|
||||
62 | )
|
||||
63 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
63 | )
|
||||
64 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
|
|
||||
help: Use `max_active_tis_per_dag` instead
|
||||
58 | )
|
||||
59 |
|
||||
60 | branch_dt_op = datetime.BranchDateTimeOperator(
|
||||
59 | )
|
||||
60 |
|
||||
61 | branch_dt_op = datetime.BranchDateTimeOperator(
|
||||
- task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5
|
||||
61 + task_id="branch_dt_op", use_task_execution_day=True, max_active_tis_per_dag=5
|
||||
62 | )
|
||||
63 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
64 | task_id="branch_dt_op2",
|
||||
62 + task_id="branch_dt_op", use_task_execution_day=True, max_active_tis_per_dag=5
|
||||
63 | )
|
||||
64 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
65 | task_id="branch_dt_op2",
|
||||
|
||||
AIR301 [*] `use_task_execution_day` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:65:9
|
||||
--> AIR301_args.py:66:9
|
||||
|
|
||||
63 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
64 | task_id="branch_dt_op2",
|
||||
65 | use_task_execution_day=True,
|
||||
64 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
65 | task_id="branch_dt_op2",
|
||||
66 | use_task_execution_day=True,
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^
|
||||
66 | sla=timedelta(seconds=10),
|
||||
67 | )
|
||||
67 | sla=timedelta(seconds=10),
|
||||
68 | )
|
||||
|
|
||||
help: Use `use_task_logical_date` instead
|
||||
62 | )
|
||||
63 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
64 | task_id="branch_dt_op2",
|
||||
63 | )
|
||||
64 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
65 | task_id="branch_dt_op2",
|
||||
- use_task_execution_day=True,
|
||||
65 + use_task_logical_date=True,
|
||||
66 | sla=timedelta(seconds=10),
|
||||
67 | )
|
||||
68 |
|
||||
66 + use_task_logical_date=True,
|
||||
67 | sla=timedelta(seconds=10),
|
||||
68 | )
|
||||
69 |
|
||||
|
||||
AIR301 [*] `use_task_execution_day` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:92:9
|
||||
--> AIR301_args.py:93:9
|
||||
|
|
||||
90 | follow_task_ids_if_true=None,
|
||||
91 | week_day=1,
|
||||
92 | use_task_execution_day=True,
|
||||
91 | follow_task_ids_if_true=None,
|
||||
92 | week_day=1,
|
||||
93 | use_task_execution_day=True,
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^
|
||||
93 | )
|
||||
94 | )
|
||||
|
|
||||
help: Use `use_task_logical_date` instead
|
||||
89 | follow_task_ids_if_false=None,
|
||||
90 | follow_task_ids_if_true=None,
|
||||
91 | week_day=1,
|
||||
90 | follow_task_ids_if_false=None,
|
||||
91 | follow_task_ids_if_true=None,
|
||||
92 | week_day=1,
|
||||
- use_task_execution_day=True,
|
||||
92 + use_task_logical_date=True,
|
||||
93 | )
|
||||
94 |
|
||||
95 | trigger_dagrun_op >> trigger_dagrun_op2
|
||||
93 + use_task_logical_date=True,
|
||||
94 | )
|
||||
95 |
|
||||
96 | trigger_dagrun_op >> trigger_dagrun_op2
|
||||
|
||||
AIR301 `filename_template` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:102:15
|
||||
--> AIR301_args.py:103:15
|
||||
|
|
||||
101 | # deprecated filename_template argument in FileTaskHandler
|
||||
102 | S3TaskHandler(filename_template="/tmp/test")
|
||||
102 | # deprecated filename_template argument in FileTaskHandler
|
||||
103 | S3TaskHandler(filename_template="/tmp/test")
|
||||
| ^^^^^^^^^^^^^^^^^
|
||||
103 | HdfsTaskHandler(filename_template="/tmp/test")
|
||||
104 | ElasticsearchTaskHandler(filename_template="/tmp/test")
|
||||
104 | HdfsTaskHandler(filename_template="/tmp/test")
|
||||
105 | ElasticsearchTaskHandler(filename_template="/tmp/test")
|
||||
|
|
||||
|
||||
AIR301 `filename_template` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:103:17
|
||||
--> AIR301_args.py:104:17
|
||||
|
|
||||
101 | # deprecated filename_template argument in FileTaskHandler
|
||||
102 | S3TaskHandler(filename_template="/tmp/test")
|
||||
103 | HdfsTaskHandler(filename_template="/tmp/test")
|
||||
102 | # deprecated filename_template argument in FileTaskHandler
|
||||
103 | S3TaskHandler(filename_template="/tmp/test")
|
||||
104 | HdfsTaskHandler(filename_template="/tmp/test")
|
||||
| ^^^^^^^^^^^^^^^^^
|
||||
104 | ElasticsearchTaskHandler(filename_template="/tmp/test")
|
||||
105 | GCSTaskHandler(filename_template="/tmp/test")
|
||||
105 | ElasticsearchTaskHandler(filename_template="/tmp/test")
|
||||
106 | GCSTaskHandler(filename_template="/tmp/test")
|
||||
|
|
||||
|
||||
AIR301 `filename_template` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:104:26
|
||||
--> AIR301_args.py:105:26
|
||||
|
|
||||
102 | S3TaskHandler(filename_template="/tmp/test")
|
||||
103 | HdfsTaskHandler(filename_template="/tmp/test")
|
||||
104 | ElasticsearchTaskHandler(filename_template="/tmp/test")
|
||||
103 | S3TaskHandler(filename_template="/tmp/test")
|
||||
104 | HdfsTaskHandler(filename_template="/tmp/test")
|
||||
105 | ElasticsearchTaskHandler(filename_template="/tmp/test")
|
||||
| ^^^^^^^^^^^^^^^^^
|
||||
105 | GCSTaskHandler(filename_template="/tmp/test")
|
||||
106 | GCSTaskHandler(filename_template="/tmp/test")
|
||||
|
|
||||
|
||||
AIR301 `filename_template` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:105:16
|
||||
--> AIR301_args.py:106:16
|
||||
|
|
||||
103 | HdfsTaskHandler(filename_template="/tmp/test")
|
||||
104 | ElasticsearchTaskHandler(filename_template="/tmp/test")
|
||||
105 | GCSTaskHandler(filename_template="/tmp/test")
|
||||
104 | HdfsTaskHandler(filename_template="/tmp/test")
|
||||
105 | ElasticsearchTaskHandler(filename_template="/tmp/test")
|
||||
106 | GCSTaskHandler(filename_template="/tmp/test")
|
||||
| ^^^^^^^^^^^^^^^^^
|
||||
106 |
|
||||
107 | FabAuthManager(None)
|
||||
107 |
|
||||
108 | FabAuthManager(None)
|
||||
|
|
||||
|
||||
AIR301 `appbuilder` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:107:15
|
||||
--> AIR301_args.py:108:15
|
||||
|
|
||||
105 | GCSTaskHandler(filename_template="/tmp/test")
|
||||
106 |
|
||||
107 | FabAuthManager(None)
|
||||
106 | GCSTaskHandler(filename_template="/tmp/test")
|
||||
107 |
|
||||
108 | FabAuthManager(None)
|
||||
| ^^^^^^
|
||||
|
|
||||
help: The constructor takes no parameter now
|
||||
|
||||
@@ -513,6 +513,9 @@ impl Violation for MissingReturnTypeClassMethod {
|
||||
/// def foo(x: MyAny): ...
|
||||
/// ```
|
||||
///
|
||||
/// ## Options
|
||||
/// - `lint.flake8-annotations.allow-star-arg-any`
|
||||
///
|
||||
/// ## References
|
||||
/// - [Typing spec: `Any`](https://typing.python.org/en/latest/spec/special-types.html#any)
|
||||
/// - [Python documentation: `typing.Any`](https://docs.python.org/3/library/typing.html#typing.Any)
|
||||
|
||||
@@ -3,6 +3,7 @@ use ruff_python_ast::{self as ast, Expr};
|
||||
use ruff_python_stdlib::identifiers::{is_identifier, is_mangled_private};
|
||||
use ruff_source_file::LineRanges;
|
||||
use ruff_text_size::Ranged;
|
||||
use unicode_normalization::UnicodeNormalization;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::fix::edits::pad;
|
||||
@@ -29,6 +30,21 @@ use crate::{AlwaysFixableViolation, Edit, Fix};
|
||||
/// obj.foo
|
||||
/// ```
|
||||
///
|
||||
/// ## Fix safety
|
||||
/// The fix is marked as unsafe for attribute names that are not in NFKC (Normalization Form KC)
|
||||
/// normalization. Python normalizes identifiers using NFKC when using attribute access syntax
|
||||
/// (e.g., `obj.attr`), but does not normalize string arguments passed to `getattr`. Rewriting
|
||||
/// `getattr(obj, "ſ")` to `obj.ſ` would be interpreted as `obj.s` at runtime, changing behavior.
|
||||
///
|
||||
/// For example, the long s character `"ſ"` normalizes to `"s"` under NFKC, so:
|
||||
/// ```python
|
||||
/// # This accesses an attribute with the exact name "ſ" (if it exists)
|
||||
/// value = getattr(obj, "ſ")
|
||||
///
|
||||
/// # But this would normalize to "s" and access a different attribute
|
||||
/// obj.ſ # This is interpreted as obj.s, not obj.ſ
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `getattr`](https://docs.python.org/3/library/functions.html#getattr)
|
||||
#[derive(ViolationMetadata)]
|
||||
@@ -69,8 +85,14 @@ pub(crate) fn getattr_with_constant(checker: &Checker, expr: &Expr, func: &Expr,
|
||||
return;
|
||||
}
|
||||
|
||||
// Mark fixes as unsafe for non-NFKC attribute names. Python normalizes identifiers using NFKC, so using
|
||||
// attribute syntax (e.g., `obj.attr`) would normalize the name and potentially change
|
||||
// program behavior.
|
||||
let attr_name = value.to_str();
|
||||
let is_unsafe = attr_name.nfkc().collect::<String>() != attr_name;
|
||||
|
||||
let mut diagnostic = checker.report_diagnostic(GetAttrWithConstant, expr.range());
|
||||
diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement(
|
||||
let edit = Edit::range_replacement(
|
||||
pad(
|
||||
if matches!(
|
||||
obj,
|
||||
@@ -88,5 +110,11 @@ pub(crate) fn getattr_with_constant(checker: &Checker, expr: &Expr, func: &Expr,
|
||||
checker.locator(),
|
||||
),
|
||||
expr.range(),
|
||||
)));
|
||||
);
|
||||
let fix = if is_unsafe {
|
||||
Fix::unsafe_edit(edit)
|
||||
} else {
|
||||
Fix::safe_edit(edit)
|
||||
};
|
||||
diagnostic.set_fix(fix);
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ use ruff_text_size::{Ranged, TextRange};
|
||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_python_codegen::Generator;
|
||||
use ruff_python_stdlib::identifiers::{is_identifier, is_mangled_private};
|
||||
use unicode_normalization::UnicodeNormalization;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::{AlwaysFixableViolation, Edit, Fix};
|
||||
@@ -28,6 +29,23 @@ use crate::{AlwaysFixableViolation, Edit, Fix};
|
||||
/// obj.foo = 42
|
||||
/// ```
|
||||
///
|
||||
/// ## Fix safety
|
||||
/// The fix is marked as unsafe for attribute names that are not in NFKC (Normalization Form KC)
|
||||
/// normalization. Python normalizes identifiers using NFKC when using attribute access syntax
|
||||
/// (e.g., `obj.attr = value`), but does not normalize string arguments passed to `setattr`.
|
||||
/// Rewriting `setattr(obj, "ſ", 1)` to `obj.ſ = 1` would be interpreted as `obj.s = 1` at
|
||||
/// runtime, changing behavior.
|
||||
///
|
||||
/// For example, the long s character `"ſ"` normalizes to `"s"` under NFKC, so:
|
||||
/// ```python
|
||||
/// # This creates an attribute with the exact name "ſ"
|
||||
/// setattr(obj, "ſ", 1)
|
||||
/// getattr(obj, "ſ") # Returns 1
|
||||
///
|
||||
/// # But this would normalize to "s" and set a different attribute
|
||||
/// obj.ſ = 1 # This is interpreted as obj.s = 1, not obj.ſ = 1
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `setattr`](https://docs.python.org/3/library/functions.html#setattr)
|
||||
#[derive(ViolationMetadata)]
|
||||
@@ -89,6 +107,12 @@ pub(crate) fn setattr_with_constant(checker: &Checker, expr: &Expr, func: &Expr,
|
||||
return;
|
||||
}
|
||||
|
||||
// Mark fixes as unsafe for non-NFKC attribute names. Python normalizes identifiers using NFKC, so using
|
||||
// attribute syntax (e.g., `obj.attr = value`) would normalize the name and potentially change
|
||||
// program behavior.
|
||||
let attr_name = name.to_str();
|
||||
let is_unsafe = attr_name.nfkc().collect::<String>() != attr_name;
|
||||
|
||||
// We can only replace a `setattr` call (which is an `Expr`) with an assignment
|
||||
// (which is a `Stmt`) if the `Expr` is already being used as a `Stmt`
|
||||
// (i.e., it's directly within an `Stmt::Expr`).
|
||||
@@ -100,10 +124,16 @@ pub(crate) fn setattr_with_constant(checker: &Checker, expr: &Expr, func: &Expr,
|
||||
{
|
||||
if expr == child.as_ref() {
|
||||
let mut diagnostic = checker.report_diagnostic(SetAttrWithConstant, expr.range());
|
||||
diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement(
|
||||
let edit = Edit::range_replacement(
|
||||
assignment(obj, name.to_str(), value, checker.generator()),
|
||||
expr.range(),
|
||||
)));
|
||||
);
|
||||
let fix = if is_unsafe {
|
||||
Fix::unsafe_edit(edit)
|
||||
} else {
|
||||
Fix::safe_edit(edit)
|
||||
};
|
||||
diagnostic.set_fix(fix);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -360,3 +360,21 @@ help: Replace `getattr` with attribute access
|
||||
70 |
|
||||
71 | # Regression test for: https://github.com/astral-sh/ruff/issues/18353
|
||||
72 | setattr(foo, "__debug__", 0)
|
||||
|
||||
B009 [*] Do not call `getattr` with a constant attribute value. It is not any safer than normal property access.
|
||||
--> B009_B010.py:80:1
|
||||
|
|
||||
78 | # `ns.ſ` would be interpreted as `ns.s` at runtime, changing behavior.
|
||||
79 | # Example: the long s character "ſ" normalizes to "s" under NFKC.
|
||||
80 | getattr(foo, "ſ")
|
||||
| ^^^^^^^^^^^^^^^^^
|
||||
81 | setattr(foo, "ſ", 1)
|
||||
|
|
||||
help: Replace `getattr` with attribute access
|
||||
77 | # arguments passed to getattr/setattr. Rewriting `getattr(ns, "ſ")` to
|
||||
78 | # `ns.ſ` would be interpreted as `ns.s` at runtime, changing behavior.
|
||||
79 | # Example: the long s character "ſ" normalizes to "s" under NFKC.
|
||||
- getattr(foo, "ſ")
|
||||
80 + foo.ſ
|
||||
81 | setattr(foo, "ſ", 1)
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
@@ -118,3 +118,19 @@ help: Replace `setattr` with assignment
|
||||
56 |
|
||||
57 | # Regression test for: https://github.com/astral-sh/ruff/issues/7455#issuecomment-1722458885
|
||||
58 | assert getattr(func, '_rpc')is True
|
||||
|
||||
B010 [*] Do not call `setattr` with a constant attribute value. It is not any safer than normal property access.
|
||||
--> B009_B010.py:81:1
|
||||
|
|
||||
79 | # Example: the long s character "ſ" normalizes to "s" under NFKC.
|
||||
80 | getattr(foo, "ſ")
|
||||
81 | setattr(foo, "ſ", 1)
|
||||
| ^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
help: Replace `setattr` with assignment
|
||||
78 | # `ns.ſ` would be interpreted as `ns.s` at runtime, changing behavior.
|
||||
79 | # Example: the long s character "ſ" normalizes to "s" under NFKC.
|
||||
80 | getattr(foo, "ſ")
|
||||
- setattr(foo, "ſ", 1)
|
||||
81 + foo.ſ = 1
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
@@ -43,7 +43,7 @@ use crate::rules::flake8_comprehensions::fixes;
|
||||
/// >>> {x: y for x, y in d1} # Iterates over the keys of a mapping
|
||||
/// {1: 2, 4: 5}
|
||||
/// >>> dict(d1) # Ruff's incorrect suggested fix
|
||||
/// (1, 2): 3, (4, 5): 6}
|
||||
/// {(1, 2): 3, (4, 5): 6}
|
||||
/// >>> dict(d1.keys()) # Correct fix
|
||||
/// {1: 2, 4: 5}
|
||||
/// ```
|
||||
|
||||
@@ -78,7 +78,7 @@ pub(crate) fn unconventional_import_alias(
|
||||
let mut diagnostic = checker.report_diagnostic(
|
||||
UnconventionalImportAlias {
|
||||
name: qualified_name,
|
||||
asname: expected_alias.to_string(),
|
||||
asname: expected_alias.clone(),
|
||||
},
|
||||
binding.range(),
|
||||
);
|
||||
|
||||
@@ -6,21 +6,17 @@ use ruff_macros::CacheKey;
|
||||
|
||||
#[derive(Clone, Copy, Debug, CacheKey, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[derive(Default)]
|
||||
pub enum ParametrizeNameType {
|
||||
#[serde(rename = "csv")]
|
||||
Csv,
|
||||
#[serde(rename = "tuple")]
|
||||
#[default]
|
||||
Tuple,
|
||||
#[serde(rename = "list")]
|
||||
List,
|
||||
}
|
||||
|
||||
impl Default for ParametrizeNameType {
|
||||
fn default() -> Self {
|
||||
Self::Tuple
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for ParametrizeNameType {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
@@ -33,19 +29,15 @@ impl Display for ParametrizeNameType {
|
||||
|
||||
#[derive(Clone, Copy, Debug, CacheKey, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[derive(Default)]
|
||||
pub enum ParametrizeValuesType {
|
||||
#[serde(rename = "tuple")]
|
||||
Tuple,
|
||||
#[serde(rename = "list")]
|
||||
#[default]
|
||||
List,
|
||||
}
|
||||
|
||||
impl Default for ParametrizeValuesType {
|
||||
fn default() -> Self {
|
||||
Self::List
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for ParametrizeValuesType {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
@@ -57,19 +49,15 @@ impl Display for ParametrizeValuesType {
|
||||
|
||||
#[derive(Clone, Copy, Debug, CacheKey, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[derive(Default)]
|
||||
pub enum ParametrizeValuesRowType {
|
||||
#[serde(rename = "tuple")]
|
||||
#[default]
|
||||
Tuple,
|
||||
#[serde(rename = "list")]
|
||||
List,
|
||||
}
|
||||
|
||||
impl Default for ParametrizeValuesRowType {
|
||||
fn default() -> Self {
|
||||
Self::Tuple
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for ParametrizeValuesRowType {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
|
||||
@@ -9,19 +9,15 @@ use ruff_macros::CacheKey;
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, CacheKey)]
|
||||
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[derive(Default)]
|
||||
pub enum Quote {
|
||||
/// Use double quotes.
|
||||
#[default]
|
||||
Double,
|
||||
/// Use single quotes.
|
||||
Single,
|
||||
}
|
||||
|
||||
impl Default for Quote {
|
||||
fn default() -> Self {
|
||||
Self::Double
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ruff_python_ast::str::Quote> for Quote {
|
||||
fn from(value: ruff_python_ast::str::Quote) -> Self {
|
||||
match value {
|
||||
|
||||
@@ -61,6 +61,7 @@ mod tests {
|
||||
|
||||
#[test_case(Rule::SplitStaticString, Path::new("SIM905.py"))]
|
||||
#[test_case(Rule::DictGetWithNoneDefault, Path::new("SIM910.py"))]
|
||||
#[test_case(Rule::EnumerateForLoop, Path::new("SIM113.py"))]
|
||||
fn preview_rules(rule_code: Rule, path: &Path) -> Result<()> {
|
||||
let snapshot = format!(
|
||||
"preview__{}_{}",
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
use crate::preview::is_enumerate_for_loop_int_index_enabled;
|
||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_python_ast::statement_visitor::{StatementVisitor, walk_stmt};
|
||||
use ruff_python_ast::{self as ast, Expr, Int, Number, Operator, Stmt};
|
||||
use ruff_python_semantic::analyze::type_inference::{NumberLike, PythonType, ResolvedPythonType};
|
||||
use ruff_python_semantic::analyze::typing;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
@@ -11,6 +13,9 @@ use crate::checkers::ast::Checker;
|
||||
/// Checks for `for` loops with explicit loop-index variables that can be replaced
|
||||
/// with `enumerate()`.
|
||||
///
|
||||
/// In [preview], this rule checks for index variables initialized with any integer rather than only
|
||||
/// a literal zero.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// When iterating over a sequence, it's often desirable to keep track of the
|
||||
/// index of each element alongside the element itself. Prefer the `enumerate`
|
||||
@@ -35,6 +40,8 @@ use crate::checkers::ast::Checker;
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `enumerate`](https://docs.python.org/3/library/functions.html#enumerate)
|
||||
///
|
||||
/// [preview]: https://docs.astral.sh/ruff/preview/
|
||||
#[derive(ViolationMetadata)]
|
||||
#[violation_metadata(stable_since = "v0.2.0")]
|
||||
pub(crate) struct EnumerateForLoop {
|
||||
@@ -82,17 +89,21 @@ pub(crate) fn enumerate_for_loop(checker: &Checker, for_stmt: &ast::StmtFor) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Ensure that the index variable was initialized to 0.
|
||||
// Ensure that the index variable was initialized to 0 (or instance of `int` if preview is enabled).
|
||||
let Some(value) = typing::find_binding_value(binding, checker.semantic()) else {
|
||||
continue;
|
||||
};
|
||||
if !matches!(
|
||||
if !(matches!(
|
||||
value,
|
||||
Expr::NumberLiteral(ast::ExprNumberLiteral {
|
||||
value: Number::Int(Int::ZERO),
|
||||
..
|
||||
})
|
||||
) {
|
||||
) || matches!(
|
||||
ResolvedPythonType::from(value),
|
||||
ResolvedPythonType::Atom(PythonType::Number(NumberLike::Integer))
|
||||
) && is_enumerate_for_loop_int_index_enabled(checker.settings()))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
@@ -116,7 +116,7 @@ pub(crate) fn convert_for_loop_to_any_all(checker: &Checker, stmt: &Stmt) {
|
||||
|
||||
let mut diagnostic = checker.report_diagnostic(
|
||||
ReimplementedBuiltin {
|
||||
replacement: contents.to_string(),
|
||||
replacement: contents.clone(),
|
||||
},
|
||||
TextRange::new(stmt.start(), terminal.stmt.end()),
|
||||
);
|
||||
@@ -212,7 +212,7 @@ pub(crate) fn convert_for_loop_to_any_all(checker: &Checker, stmt: &Stmt) {
|
||||
|
||||
let mut diagnostic = checker.report_diagnostic(
|
||||
ReimplementedBuiltin {
|
||||
replacement: contents.to_string(),
|
||||
replacement: contents.clone(),
|
||||
},
|
||||
TextRange::new(stmt.start(), terminal.stmt.end()),
|
||||
);
|
||||
|
||||
@@ -1101,6 +1101,7 @@ help: Replace with `f"{x=}"`
|
||||
204 + print(f"{x=}") # SIM222
|
||||
205 | (lambda: 1) or True # SIM222
|
||||
206 | (i for i in range(1)) or "bar" # SIM222
|
||||
207 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
SIM222 [*] Use `lambda: 1` instead of `lambda: 1 or ...`
|
||||
@@ -1119,6 +1120,8 @@ help: Replace with `lambda: 1`
|
||||
- (lambda: 1) or True # SIM222
|
||||
205 + lambda: 1 # SIM222
|
||||
206 | (i for i in range(1)) or "bar" # SIM222
|
||||
207 |
|
||||
208 | # https://github.com/astral-sh/ruff/issues/21136
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
SIM222 [*] Use `(i for i in range(1))` instead of `(i for i in range(1)) or ...`
|
||||
@@ -1128,6 +1131,8 @@ SIM222 [*] Use `(i for i in range(1))` instead of `(i for i in range(1)) or ...`
|
||||
205 | (lambda: 1) or True # SIM222
|
||||
206 | (i for i in range(1)) or "bar" # SIM222
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
207 |
|
||||
208 | # https://github.com/astral-sh/ruff/issues/21136
|
||||
|
|
||||
help: Replace with `(i for i in range(1))`
|
||||
203 | x = 1
|
||||
@@ -1135,4 +1140,7 @@ help: Replace with `(i for i in range(1))`
|
||||
205 | (lambda: 1) or True # SIM222
|
||||
- (i for i in range(1)) or "bar" # SIM222
|
||||
206 + (i for i in range(1)) # SIM222
|
||||
207 |
|
||||
208 | # https://github.com/astral-sh/ruff/issues/21136
|
||||
209 | def get_items():
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
@@ -0,0 +1,60 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_simplify/mod.rs
|
||||
---
|
||||
SIM113 Use `enumerate()` for index variable `idx` in `for` loop
|
||||
--> SIM113.py:6:9
|
||||
|
|
||||
4 | for x in range(5):
|
||||
5 | g(x, idx)
|
||||
6 | idx += 1
|
||||
| ^^^^^^^^
|
||||
7 | h(x)
|
||||
|
|
||||
|
||||
SIM113 Use `enumerate()` for index variable `idx` in `for` loop
|
||||
--> SIM113.py:17:9
|
||||
|
|
||||
15 | if g(x):
|
||||
16 | break
|
||||
17 | idx += 1
|
||||
| ^^^^^^^^
|
||||
18 | sum += h(x, idx)
|
||||
|
|
||||
|
||||
SIM113 Use `enumerate()` for index variable `idx` in `for` loop
|
||||
--> SIM113.py:27:9
|
||||
|
|
||||
25 | g(x)
|
||||
26 | h(x, y)
|
||||
27 | idx += 1
|
||||
| ^^^^^^^^
|
||||
|
|
||||
|
||||
SIM113 Use `enumerate()` for index variable `idx` in `for` loop
|
||||
--> SIM113.py:36:9
|
||||
|
|
||||
34 | for x in range(5):
|
||||
35 | sum += h(x, idx)
|
||||
36 | idx += 1
|
||||
| ^^^^^^^^
|
||||
|
|
||||
|
||||
SIM113 Use `enumerate()` for index variable `idx` in `for` loop
|
||||
--> SIM113.py:44:9
|
||||
|
|
||||
42 | for x in range(5):
|
||||
43 | g(x, idx)
|
||||
44 | idx += 1
|
||||
| ^^^^^^^^
|
||||
45 | h(x)
|
||||
|
|
||||
|
||||
SIM113 Use `enumerate()` for index variable `idx` in `for` loop
|
||||
--> SIM113.py:54:9
|
||||
|
|
||||
52 | for x in range(5):
|
||||
53 | g(x, idx)
|
||||
54 | idx += 1
|
||||
| ^^^^^^^^
|
||||
55 | h(x)
|
||||
|
|
||||
@@ -47,7 +47,7 @@ pub(crate) fn banned_api<T: Ranged>(checker: &Checker, policy: &NameMatchPolicy,
|
||||
checker.report_diagnostic(
|
||||
BannedApi {
|
||||
name: banned_module,
|
||||
message: reason.msg.to_string(),
|
||||
message: reason.msg.clone(),
|
||||
},
|
||||
node.range(),
|
||||
);
|
||||
@@ -74,8 +74,8 @@ pub(crate) fn banned_attribute_access(checker: &Checker, expr: &Expr) {
|
||||
{
|
||||
checker.report_diagnostic(
|
||||
BannedApi {
|
||||
name: banned_path.to_string(),
|
||||
message: ban.msg.to_string(),
|
||||
name: banned_path.clone(),
|
||||
message: ban.msg.clone(),
|
||||
},
|
||||
expr.range(),
|
||||
);
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user