Compare commits
20 Commits
main
...
dcreager/i
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f33ca3a622 | ||
|
|
a79ba2036b | ||
|
|
d6f28b7428 | ||
|
|
9df9adae1e | ||
|
|
eec4e2ed11 | ||
|
|
a44fbd6658 | ||
|
|
22075d5ed7 | ||
|
|
5d451979c4 | ||
|
|
73773b4ea4 | ||
|
|
7cfdc4a550 | ||
|
|
2f0e7d6af7 | ||
|
|
8d44f8b7b5 | ||
|
|
c0faa2dc3d | ||
|
|
f4fff7fb24 | ||
|
|
a6bd68886f | ||
|
|
5c2c3f00ff | ||
|
|
5affc120b3 | ||
|
|
1e284933ec | ||
|
|
c529ee4f80 | ||
|
|
f88ff62da5 |
@@ -8,3 +8,7 @@ benchmark = "bench -p ruff_benchmark --bench linter --bench formatter --"
|
||||
# See: https://github.com/astral-sh/ruff/issues/11503
|
||||
[target.'cfg(all(target_env="msvc", target_os = "windows"))']
|
||||
rustflags = ["-C", "target-feature=+crt-static"]
|
||||
|
||||
[target.'wasm32-unknown-unknown']
|
||||
# See https://docs.rs/getrandom/latest/getrandom/#webassembly-support
|
||||
rustflags = ["--cfg", 'getrandom_backend="wasm_js"']
|
||||
@@ -5,4 +5,4 @@ rustup component add clippy rustfmt
|
||||
cargo install cargo-insta
|
||||
cargo fetch
|
||||
|
||||
pip install maturin prek
|
||||
pip install maturin pre-commit
|
||||
|
||||
1
.gitattributes
vendored
1
.gitattributes
vendored
@@ -22,7 +22,6 @@ crates/ruff_linter/resources/test/fixtures/pyupgrade/UP018_CR.py text eol=cr
|
||||
crates/ruff_linter/resources/test/fixtures/pyupgrade/UP018_LF.py text eol=lf
|
||||
|
||||
crates/ruff_python_parser/resources/inline linguist-generated=true
|
||||
crates/ty_python_semantic/resources/mdtest/external/*.lock linguist-generated=true
|
||||
|
||||
ruff.schema.json -diff linguist-generated=true text=auto eol=lf
|
||||
ty.schema.json -diff linguist-generated=true text=auto eol=lf
|
||||
|
||||
10
.github/CODEOWNERS
vendored
10
.github/CODEOWNERS
vendored
@@ -20,11 +20,9 @@
|
||||
# ty
|
||||
/crates/ty* @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
|
||||
/crates/ruff_db/ @carljm @MichaReiser @sharkdp @dcreager
|
||||
/crates/ty_project/ @carljm @MichaReiser @sharkdp @dcreager @Gankra
|
||||
/crates/ty_ide/ @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager @Gankra
|
||||
/crates/ty_server/ @carljm @MichaReiser @sharkdp @dcreager @Gankra
|
||||
/crates/ty_project/ @carljm @MichaReiser @sharkdp @dcreager
|
||||
/crates/ty_server/ @carljm @MichaReiser @sharkdp @dcreager
|
||||
/crates/ty/ @carljm @MichaReiser @sharkdp @dcreager
|
||||
/crates/ty_wasm/ @carljm @MichaReiser @sharkdp @dcreager @Gankra
|
||||
/crates/ty_wasm/ @carljm @MichaReiser @sharkdp @dcreager
|
||||
/scripts/ty_benchmark/ @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
|
||||
/crates/ty_python_semantic/ @carljm @AlexWaygood @sharkdp @dcreager
|
||||
/crates/ty_module_resolver/ @carljm @MichaReiser @AlexWaygood @Gankra
|
||||
/crates/ty_python_semantic @carljm @AlexWaygood @sharkdp @dcreager
|
||||
|
||||
9
.github/actionlint.yaml
vendored
9
.github/actionlint.yaml
vendored
@@ -1,20 +1,13 @@
|
||||
# Configuration for the actionlint tool, which we run via prek
|
||||
# Configuration for the actionlint tool, which we run via pre-commit
|
||||
# to verify the correctness of the syntax in our GitHub Actions workflows.
|
||||
|
||||
self-hosted-runner:
|
||||
# Various runners we use that aren't recognized out-of-the-box by actionlint:
|
||||
labels:
|
||||
- depot-ubuntu-24.04-4
|
||||
- depot-ubuntu-latest-8
|
||||
- depot-ubuntu-22.04-16
|
||||
- depot-ubuntu-22.04-32
|
||||
- depot-windows-2022-16
|
||||
- depot-ubuntu-22.04-arm-4
|
||||
- github-windows-2025-x86_64-8
|
||||
- github-windows-2025-x86_64-16
|
||||
- codspeed-macro
|
||||
|
||||
paths:
|
||||
".github/workflows/mypy_primer.yaml":
|
||||
ignore:
|
||||
- 'constant expression "false" in condition. remove the if: section'
|
||||
|
||||
3
.github/mypy-primer-ty.toml
vendored
3
.github/mypy-primer-ty.toml
vendored
@@ -4,6 +4,5 @@
|
||||
# Enable off-by-default rules.
|
||||
[rules]
|
||||
possibly-unresolved-reference = "warn"
|
||||
possibly-missing-import = "warn"
|
||||
unused-ignore-comment = "warn"
|
||||
division-by-zero = "warn"
|
||||
unsupported-dynamic-base = "warn"
|
||||
|
||||
19
.github/renovate.json5
vendored
19
.github/renovate.json5
vendored
@@ -2,11 +2,12 @@
|
||||
$schema: "https://docs.renovatebot.com/renovate-schema.json",
|
||||
dependencyDashboard: true,
|
||||
suppressNotifications: ["prEditedNotification"],
|
||||
extends: ["github>astral-sh/renovate-config"],
|
||||
extends: ["config:recommended"],
|
||||
labels: ["internal"],
|
||||
schedule: ["before 4am on Monday"],
|
||||
semanticCommits: "disabled",
|
||||
separateMajorMinor: false,
|
||||
prHourlyLimit: 10,
|
||||
enabledManagers: ["github-actions", "pre-commit", "cargo", "pep621", "pip_requirements", "npm"],
|
||||
cargo: {
|
||||
// See https://docs.renovatebot.com/configuration-options/#rangestrategy
|
||||
@@ -15,7 +16,7 @@
|
||||
pep621: {
|
||||
// The default for this package manager is to only search for `pyproject.toml` files
|
||||
// found at the repository root: https://docs.renovatebot.com/modules/manager/pep621/#file-matching
|
||||
managerFilePatterns: ["^(python|scripts)/.*pyproject\\.toml$"],
|
||||
fileMatch: ["^(python|scripts)/.*pyproject\\.toml$"],
|
||||
},
|
||||
pip_requirements: {
|
||||
// The default for this package manager is to run on all requirements.txt files:
|
||||
@@ -33,7 +34,7 @@
|
||||
npm: {
|
||||
// The default for this package manager is to only search for `package.json` files
|
||||
// found at the repository root: https://docs.renovatebot.com/modules/manager/npm/#file-matching
|
||||
managerFilePatterns: ["^playground/.*package\\.json$"],
|
||||
fileMatch: ["^playground/.*package\\.json$"],
|
||||
},
|
||||
"pre-commit": {
|
||||
enabled: true,
|
||||
@@ -76,9 +77,17 @@
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
groupName: "prek dependencies",
|
||||
// `mkdocs-material` requires a manual update to keep the version in sync
|
||||
// with `mkdocs-material-insider`.
|
||||
// See: https://squidfunk.github.io/mkdocs-material/insiders/upgrade/
|
||||
matchManagers: ["pip_requirements"],
|
||||
matchPackageNames: ["mkdocs-material"],
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
groupName: "pre-commit dependencies",
|
||||
matchManagers: ["pre-commit"],
|
||||
description: "Weekly update of prek dependencies",
|
||||
description: "Weekly update of pre-commit dependencies",
|
||||
},
|
||||
{
|
||||
groupName: "NPM Development dependencies",
|
||||
|
||||
70
.github/workflows/build-binaries.yml
vendored
70
.github/workflows/build-binaries.yml
vendored
@@ -39,11 +39,11 @@ jobs:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
@@ -51,7 +51,6 @@ jobs:
|
||||
- name: "Build sdist"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
command: sdist
|
||||
args: --out dist
|
||||
- name: "Test sdist"
|
||||
@@ -60,7 +59,7 @@ jobs:
|
||||
"${MODULE_NAME}" --help
|
||||
python -m "${MODULE_NAME}" --help
|
||||
- name: "Upload sdist"
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: wheels-sdist
|
||||
path: dist
|
||||
@@ -69,11 +68,11 @@ jobs:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: macos-14
|
||||
steps:
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
@@ -82,11 +81,10 @@ jobs:
|
||||
- name: "Build wheels - x86_64"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: x86_64
|
||||
args: --release --locked --out dist
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: wheels-macos-x86_64
|
||||
path: dist
|
||||
@@ -101,7 +99,7 @@ jobs:
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: artifacts-macos-x86_64
|
||||
path: |
|
||||
@@ -112,11 +110,11 @@ jobs:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: macos-14
|
||||
steps:
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: arm64
|
||||
@@ -125,7 +123,6 @@ jobs:
|
||||
- name: "Build wheels - aarch64"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: aarch64
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel - aarch64"
|
||||
@@ -134,7 +131,7 @@ jobs:
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: wheels-aarch64-apple-darwin
|
||||
path: dist
|
||||
@@ -149,7 +146,7 @@ jobs:
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: artifacts-aarch64-apple-darwin
|
||||
path: |
|
||||
@@ -169,11 +166,11 @@ jobs:
|
||||
- target: aarch64-pc-windows-msvc
|
||||
arch: x64
|
||||
steps:
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: ${{ matrix.platform.arch }}
|
||||
@@ -182,7 +179,6 @@ jobs:
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.platform.target }}
|
||||
args: --release --locked --out dist
|
||||
env:
|
||||
@@ -196,7 +192,7 @@ jobs:
|
||||
"${MODULE_NAME}" --help
|
||||
python -m "${MODULE_NAME}" --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: wheels-${{ matrix.platform.target }}
|
||||
path: dist
|
||||
@@ -207,7 +203,7 @@ jobs:
|
||||
7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/ruff.exe
|
||||
sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: artifacts-${{ matrix.platform.target }}
|
||||
path: |
|
||||
@@ -223,11 +219,11 @@ jobs:
|
||||
- x86_64-unknown-linux-gnu
|
||||
- i686-unknown-linux-gnu
|
||||
steps:
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
@@ -236,7 +232,6 @@ jobs:
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: auto
|
||||
args: --release --locked --out dist
|
||||
@@ -247,7 +242,7 @@ jobs:
|
||||
"${MODULE_NAME}" --help
|
||||
python -m "${MODULE_NAME}" --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: wheels-${{ matrix.target }}
|
||||
path: dist
|
||||
@@ -265,7 +260,7 @@ jobs:
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: artifacts-${{ matrix.target }}
|
||||
path: |
|
||||
@@ -301,11 +296,11 @@ jobs:
|
||||
arch: riscv64
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
@@ -313,7 +308,6 @@ jobs:
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
@@ -333,7 +327,7 @@ jobs:
|
||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: wheels-${{ matrix.platform.target }}
|
||||
path: dist
|
||||
@@ -351,7 +345,7 @@ jobs:
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: artifacts-${{ matrix.platform.target }}
|
||||
path: |
|
||||
@@ -367,11 +361,11 @@ jobs:
|
||||
- x86_64-unknown-linux-musl
|
||||
- i686-unknown-linux-musl
|
||||
steps:
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
@@ -380,7 +374,6 @@ jobs:
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: musllinux_1_2
|
||||
args: --release --locked --out dist
|
||||
@@ -396,7 +389,7 @@ jobs:
|
||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
.venv/bin/${{ env.MODULE_NAME }} --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: wheels-${{ matrix.target }}
|
||||
path: dist
|
||||
@@ -414,7 +407,7 @@ jobs:
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: artifacts-${{ matrix.target }}
|
||||
path: |
|
||||
@@ -434,11 +427,11 @@ jobs:
|
||||
arch: armv7
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
@@ -446,7 +439,6 @@ jobs:
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: musllinux_1_2
|
||||
args: --release --locked --out dist
|
||||
@@ -464,7 +456,7 @@ jobs:
|
||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
.venv/bin/${{ env.MODULE_NAME }} --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: wheels-${{ matrix.platform.target }}
|
||||
path: dist
|
||||
@@ -482,7 +474,7 @@ jobs:
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: artifacts-${{ matrix.platform.target }}
|
||||
path: |
|
||||
|
||||
30
.github/workflows/build-docker.yml
vendored
30
.github/workflows/build-docker.yml
vendored
@@ -20,12 +20,6 @@ on:
|
||||
env:
|
||||
RUFF_BASE_IMG: ghcr.io/${{ github.repository_owner }}/ruff
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
# TODO(zanieb): Ideally, this would be `read` on dry-run but that will require
|
||||
# significant changes to the workflow.
|
||||
packages: write # zizmor: ignore[excessive-permissions]
|
||||
|
||||
jobs:
|
||||
docker-build:
|
||||
name: Build Docker image (ghcr.io/astral-sh/ruff) for ${{ matrix.platform }}
|
||||
@@ -39,12 +33,12 @@ jobs:
|
||||
- linux/amd64
|
||||
- linux/arm64
|
||||
steps:
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
|
||||
- uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||
- uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
@@ -69,7 +63,7 @@ jobs:
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
|
||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
||||
with:
|
||||
images: ${{ env.RUFF_BASE_IMG }}
|
||||
# Defining this makes sure the org.opencontainers.image.version OCI label becomes the actual release version and not the branch name
|
||||
@@ -102,7 +96,7 @@ jobs:
|
||||
touch "/tmp/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digests
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: digests-${{ env.PLATFORM_TUPLE }}
|
||||
path: /tmp/digests/*
|
||||
@@ -119,17 +113,17 @@ jobs:
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||
- uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
|
||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
||||
with:
|
||||
images: ${{ env.RUFF_BASE_IMG }}
|
||||
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
|
||||
@@ -173,7 +167,7 @@ jobs:
|
||||
- debian:bookworm-slim,bookworm-slim,debian-slim
|
||||
- buildpack-deps:bookworm,bookworm,debian
|
||||
steps:
|
||||
- uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||
- uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
@@ -225,7 +219,7 @@ jobs:
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
|
||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
||||
# ghcr.io prefers index level annotations
|
||||
env:
|
||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||
@@ -262,17 +256,17 @@ jobs:
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||
- uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
|
||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
||||
env:
|
||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||
with:
|
||||
|
||||
58
.github/workflows/build-wasm.yml
vendored
58
.github/workflows/build-wasm.yml
vendored
@@ -1,58 +0,0 @@
|
||||
# Build ruff_wasm for npm.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a local
|
||||
# artifacts job within `cargo-dist`.
|
||||
name: "Build wasm"
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/build-wasm.yml
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
build:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target: [web, bundler, nodejs]
|
||||
fail-fast: false
|
||||
steps:
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: jetli/wasm-pack-action@0d096b08b4e5a7de8c28de67e11e945404e9eefa # v0.4.0
|
||||
with:
|
||||
version: v0.13.1
|
||||
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
|
||||
- name: "Run wasm-pack build"
|
||||
run: wasm-pack build --target ${{ matrix.target }} crates/ruff_wasm
|
||||
- name: "Rename generated package"
|
||||
run: | # Replace the package name w/ jq
|
||||
jq '.name="@astral-sh/ruff-wasm-${{ matrix.target }}"' crates/ruff_wasm/pkg/package.json > /tmp/package.json
|
||||
mv /tmp/package.json crates/ruff_wasm/pkg
|
||||
- run: cp LICENSE crates/ruff_wasm/pkg # wasm-pack does not put the LICENSE file in the pkg
|
||||
- name: "Upload wasm artifact"
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: artifacts-wasm-${{ matrix.target }}
|
||||
path: crates/ruff_wasm/pkg
|
||||
670
.github/workflows/ci.yaml
vendored
670
.github/workflows/ci.yaml
vendored
File diff suppressed because it is too large
Load Diff
15
.github/workflows/daily_fuzz.yaml
vendored
15
.github/workflows/daily_fuzz.yaml
vendored
@@ -31,15 +31,15 @@ jobs:
|
||||
# Don't run the cron job on forks:
|
||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
||||
steps:
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v7.2.0
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
- name: Build ruff
|
||||
# A debug build means the script runs slower once it gets started,
|
||||
# but this is outweighed by the fact that a release build takes *much* longer to compile in CI
|
||||
@@ -48,10 +48,9 @@ jobs:
|
||||
run: |
|
||||
# shellcheck disable=SC2046
|
||||
(
|
||||
uv run \
|
||||
--python=3.14 \
|
||||
--project=./python/py-fuzzer \
|
||||
--locked \
|
||||
uvx \
|
||||
--python=3.12 \
|
||||
--from=./python/py-fuzzer \
|
||||
fuzz \
|
||||
--test-executable=target/debug/ruff \
|
||||
--bin=ruff \
|
||||
@@ -62,7 +61,7 @@ jobs:
|
||||
name: Create an issue if the daily fuzz surfaced any bugs
|
||||
runs-on: ubuntu-latest
|
||||
needs: fuzz
|
||||
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && needs.fuzz.result != 'success' }}
|
||||
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && needs.fuzz.result == 'failure' }}
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
|
||||
84
.github/workflows/mypy_primer.yaml
vendored
84
.github/workflows/mypy_primer.yaml
vendored
@@ -6,11 +6,6 @@ on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "crates/ty*/**"
|
||||
- "!crates/ty_ide/**"
|
||||
- "!crates/ty_server/**"
|
||||
- "!crates/ty_test/**"
|
||||
- "!crates/ty_completion_eval/**"
|
||||
- "!crates/ty_wasm/**"
|
||||
- "crates/ruff_db"
|
||||
- "crates/ruff_python_ast"
|
||||
- "crates/ruff_python_parser"
|
||||
@@ -41,18 +36,17 @@ jobs:
|
||||
runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-22.04-32' || 'ubuntu-latest' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
path: ruff
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v7.2.0
|
||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
shared-key: "mypy-primer"
|
||||
workspaces: "ruff"
|
||||
|
||||
- name: Install Rust toolchain
|
||||
@@ -61,38 +55,41 @@ jobs:
|
||||
- name: Run mypy_primer
|
||||
env:
|
||||
PRIMER_SELECTOR: crates/ty_python_semantic/resources/primer/good.txt
|
||||
CLICOLOR_FORCE: "1"
|
||||
DIFF_FILE: mypy_primer.diff
|
||||
run: |
|
||||
cd ruff
|
||||
scripts/mypy_primer.sh
|
||||
echo ${{ github.event.number }} > ../pr-number
|
||||
|
||||
# NOTE: astral-sh-bot uses this artifact to post comments on PRs.
|
||||
# Make sure to update the bot if you rename the artifact.
|
||||
- name: Upload diff
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: mypy_primer_diff
|
||||
path: mypy_primer.diff
|
||||
|
||||
- name: Upload pr-number
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pr-number
|
||||
path: pr-number
|
||||
|
||||
memory_usage:
|
||||
name: Run memory statistics
|
||||
runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-22.04-32' || 'ubuntu-latest' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
path: ruff
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v7.2.0
|
||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
workspaces: "ruff"
|
||||
shared-key: "mypy-primer"
|
||||
|
||||
- name: Install Rust toolchain
|
||||
run: rustup show
|
||||
@@ -108,58 +105,7 @@ jobs:
|
||||
scripts/mypy_primer.sh
|
||||
|
||||
- name: Upload diff
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: mypy_primer_memory_diff
|
||||
path: mypy_primer_memory.diff
|
||||
|
||||
# Runs mypy twice against the same ty version to catch any non-deterministic behavior (ideally).
|
||||
# The job is disabled for now because there are some non-deterministic diagnostics.
|
||||
mypy_primer_same_revision:
|
||||
name: Run mypy_primer on same revision
|
||||
runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-22.04-32' || 'ubuntu-latest' }}
|
||||
timeout-minutes: 20
|
||||
# TODO: Enable once we fixed the non-deterministic diagnostics
|
||||
if: false
|
||||
steps:
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
path: ruff
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v7.2.0
|
||||
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
with:
|
||||
workspaces: "ruff"
|
||||
shared-key: "mypy-primer"
|
||||
|
||||
- name: Install Rust toolchain
|
||||
run: rustup show
|
||||
|
||||
- name: Run determinism check
|
||||
env:
|
||||
BASE_REVISION: ${{ github.event.pull_request.head.sha }}
|
||||
PRIMER_SELECTOR: crates/ty_python_semantic/resources/primer/good.txt
|
||||
CLICOLOR_FORCE: "1"
|
||||
DIFF_FILE: mypy_primer_determinism.diff
|
||||
run: |
|
||||
cd ruff
|
||||
scripts/mypy_primer.sh
|
||||
|
||||
- name: Check for non-determinism
|
||||
run: |
|
||||
# Remove ANSI color codes for checking
|
||||
sed -e 's/\x1b\[[0-9;]*m//g' mypy_primer_determinism.diff > mypy_primer_determinism_clean.diff
|
||||
|
||||
# Check if there are any differences (non-determinism)
|
||||
if [ -s mypy_primer_determinism_clean.diff ]; then
|
||||
echo "ERROR: Non-deterministic output detected!"
|
||||
echo "The following differences were found when running ty twice on the same commit:"
|
||||
cat mypy_primer_determinism_clean.diff
|
||||
exit 1
|
||||
else
|
||||
echo "✓ Output is deterministic"
|
||||
fi
|
||||
|
||||
122
.github/workflows/mypy_primer_comment.yaml
vendored
Normal file
122
.github/workflows/mypy_primer_comment.yaml
vendored
Normal file
@@ -0,0 +1,122 @@
|
||||
name: PR comment (mypy_primer)
|
||||
|
||||
on: # zizmor: ignore[dangerous-triggers]
|
||||
workflow_run:
|
||||
workflows: [Run mypy_primer]
|
||||
types: [completed]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
workflow_run_id:
|
||||
description: The mypy_primer workflow that triggers the workflow run
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: Download PR number
|
||||
with:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Parse pull request number
|
||||
id: pr-number
|
||||
run: |
|
||||
if [[ -f pr-number ]]
|
||||
then
|
||||
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: "Download mypy_primer results"
|
||||
id: download-mypy_primer_diff
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
name: mypy_primer_diff
|
||||
workflow: mypy_primer.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/mypy_primer_diff
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: "Download mypy_primer memory results"
|
||||
id: download-mypy_primer_memory_diff
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
name: mypy_primer_memory_diff
|
||||
workflow: mypy_primer.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/mypy_primer_memory_diff
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
if: ${{ steps.download-mypy_primer_diff.outputs.found_artifact == 'true' && steps.download-mypy_primer_memory_diff.outputs.found_artifact == 'true' }}
|
||||
run: |
|
||||
# Guard against malicious mypy_primer results that symlink to a secret
|
||||
# file on this runner
|
||||
if [[ -L pr/mypy_primer_diff/mypy_primer.diff ]] || [[ -L pr/mypy_primer_memory_diff/mypy_primer_memory.diff ]]
|
||||
then
|
||||
echo "Error: mypy_primer.diff and mypy_primer_memory.diff cannot be a symlink"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Note this identifier is used to find the comment to update on
|
||||
# subsequent runs
|
||||
echo '<!-- generated-comment mypy_primer -->' >> comment.txt
|
||||
|
||||
echo '## `mypy_primer` results' >> comment.txt
|
||||
if [ -s "pr/mypy_primer_diff/mypy_primer.diff" ]; then
|
||||
echo '<details>' >> comment.txt
|
||||
echo '<summary>Changes were detected when running on open source projects</summary>' >> comment.txt
|
||||
echo '' >> comment.txt
|
||||
echo '```diff' >> comment.txt
|
||||
cat pr/mypy_primer_diff/mypy_primer.diff >> comment.txt
|
||||
echo '```' >> comment.txt
|
||||
echo '</details>' >> comment.txt
|
||||
else
|
||||
echo 'No ecosystem changes detected ✅' >> comment.txt
|
||||
fi
|
||||
|
||||
if [ -s "pr/mypy_primer_memory_diff/mypy_primer_memory.diff" ]; then
|
||||
echo '<details>' >> comment.txt
|
||||
echo '<summary>Memory usage changes were detected when running on open source projects</summary>' >> comment.txt
|
||||
echo '' >> comment.txt
|
||||
echo '```diff' >> comment.txt
|
||||
cat pr/mypy_primer_memory_diff/mypy_primer_memory.diff >> comment.txt
|
||||
echo '```' >> comment.txt
|
||||
echo '</details>' >> comment.txt
|
||||
else
|
||||
echo 'No memory usage changes detected ✅' >> comment.txt
|
||||
fi
|
||||
|
||||
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||
cat comment.txt >> "$GITHUB_OUTPUT"
|
||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Find existing comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
|
||||
if: steps.generate-comment.outcome == 'success'
|
||||
id: find-comment
|
||||
with:
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
comment-author: "github-actions[bot]"
|
||||
body-includes: "<!-- generated-comment mypy_primer -->"
|
||||
|
||||
- name: Create or update comment
|
||||
if: steps.find-comment.outcome == 'success'
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
body-path: comment.txt
|
||||
edit-mode: replace
|
||||
88
.github/workflows/pr-comment.yaml
vendored
Normal file
88
.github/workflows/pr-comment.yaml
vendored
Normal file
@@ -0,0 +1,88 @@
|
||||
name: Ecosystem check comment
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: [CI]
|
||||
types: [completed]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
workflow_run_id:
|
||||
description: The ecosystem workflow that triggers the workflow run
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: Download pull request number
|
||||
with:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Parse pull request number
|
||||
id: pr-number
|
||||
run: |
|
||||
if [[ -f pr-number ]]
|
||||
then
|
||||
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: "Download ecosystem results"
|
||||
id: download-ecosystem-result
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
name: ecosystem-result
|
||||
workflow: ci.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/ecosystem
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
if: steps.download-ecosystem-result.outputs.found_artifact == 'true'
|
||||
run: |
|
||||
# Guard against malicious ecosystem results that symlink to a secret
|
||||
# file on this runner
|
||||
if [[ -L pr/ecosystem/ecosystem-result ]]
|
||||
then
|
||||
echo "Error: ecosystem-result cannot be a symlink"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Note this identifier is used to find the comment to update on
|
||||
# subsequent runs
|
||||
echo '<!-- generated-comment ecosystem -->' >> comment.txt
|
||||
|
||||
echo '## `ruff-ecosystem` results' >> comment.txt
|
||||
cat pr/ecosystem/ecosystem-result >> comment.txt
|
||||
echo "" >> comment.txt
|
||||
|
||||
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||
cat comment.txt >> "$GITHUB_OUTPUT"
|
||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Find existing comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
|
||||
if: steps.generate-comment.outcome == 'success'
|
||||
id: find-comment
|
||||
with:
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
comment-author: "github-actions[bot]"
|
||||
body-includes: "<!-- generated-comment ecosystem -->"
|
||||
|
||||
- name: Create or update comment
|
||||
if: steps.find-comment.outcome == 'success'
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
body-path: comment.txt
|
||||
edit-mode: replace
|
||||
29
.github/workflows/publish-docs.yml
vendored
29
.github/workflows/publish-docs.yml
vendored
@@ -17,19 +17,18 @@ on:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
mkdocs:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
persist-credentials: true
|
||||
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: 3.12
|
||||
|
||||
@@ -60,12 +59,23 @@ jobs:
|
||||
echo "branch_name=update-docs-$branch_display_name-$timestamp" >> "$GITHUB_ENV"
|
||||
echo "timestamp=$timestamp" >> "$GITHUB_ENV"
|
||||
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@a6f90b1f127823b31d4d4a8d96047790581349bd # v0.9.1
|
||||
with:
|
||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: pip install -r docs/requirements-insiders.txt
|
||||
|
||||
- name: "Install dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: pip install -r docs/requirements.txt
|
||||
|
||||
- name: "Copy README File"
|
||||
@@ -73,8 +83,13 @@ jobs:
|
||||
python scripts/transform_readme.py --target mkdocs
|
||||
python scripts/generate_mkdocs.py
|
||||
|
||||
- name: "Build Insiders docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.insiders.yml
|
||||
|
||||
- name: "Build docs"
|
||||
run: mkdocs build --strict -f mkdocs.yml
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.public.yml
|
||||
|
||||
- name: "Clone docs repo"
|
||||
run: git clone https://${{ secrets.ASTRAL_DOCS_PAT }}@github.com/astral-sh/docs.git astral-docs
|
||||
|
||||
13
.github/workflows/publish-playground.yml
vendored
13
.github/workflows/publish-playground.yml
vendored
@@ -18,26 +18,25 @@ env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
- uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
with:
|
||||
node-version: 24
|
||||
package-manager-cache: false
|
||||
node-version: 22
|
||||
cache: "npm"
|
||||
cache-dependency-path: playground/package-lock.json
|
||||
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
|
||||
- name: "Install Node dependencies"
|
||||
run: npm ci --ignore-scripts
|
||||
run: npm ci
|
||||
working-directory: playground
|
||||
- name: "Run TypeScript checks"
|
||||
run: npm run check
|
||||
|
||||
4
.github/workflows/publish-pypi.yml
vendored
4
.github/workflows/publish-pypi.yml
vendored
@@ -22,8 +22,8 @@ jobs:
|
||||
id-token: write
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v7.2.0
|
||||
- uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
- uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
pattern: wheels-*
|
||||
path: wheels
|
||||
|
||||
9
.github/workflows/publish-ty-playground.yml
vendored
9
.github/workflows/publish-ty-playground.yml
vendored
@@ -30,18 +30,17 @@ jobs:
|
||||
env:
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
- uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
with:
|
||||
node-version: 24
|
||||
package-manager-cache: false
|
||||
node-version: 22
|
||||
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
|
||||
- name: "Install Node dependencies"
|
||||
run: npm ci --ignore-scripts
|
||||
run: npm ci
|
||||
working-directory: playground
|
||||
- name: "Run TypeScript checks"
|
||||
run: npm run check
|
||||
|
||||
39
.github/workflows/publish-wasm.yml
vendored
39
.github/workflows/publish-wasm.yml
vendored
@@ -1,18 +1,25 @@
|
||||
# Publish ruff_wasm to npm.
|
||||
# Build and publish ruff-api for wasm.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a publish
|
||||
# job within `cargo-dist`.
|
||||
name: "Publish wasm"
|
||||
name: "Build and publish wasm"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
ruff_wasm:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -22,19 +29,31 @@ jobs:
|
||||
target: [web, bundler, nodejs]
|
||||
fail-fast: false
|
||||
steps:
|
||||
- uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
name: artifacts-wasm-${{ matrix.target }}
|
||||
path: pkg
|
||||
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: jetli/wasm-pack-action@0d096b08b4e5a7de8c28de67e11e945404e9eefa # v0.4.0
|
||||
with:
|
||||
node-version: 24
|
||||
version: v0.13.1
|
||||
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
|
||||
- name: "Run wasm-pack build"
|
||||
run: wasm-pack build --target ${{ matrix.target }} crates/ruff_wasm
|
||||
- name: "Rename generated package"
|
||||
run: | # Replace the package name w/ jq
|
||||
jq '.name="@astral-sh/ruff-wasm-${{ matrix.target }}"' crates/ruff_wasm/pkg/package.json > /tmp/package.json
|
||||
mv /tmp/package.json crates/ruff_wasm/pkg
|
||||
- run: cp LICENSE crates/ruff_wasm/pkg # wasm-pack does not put the LICENSE file in the pkg
|
||||
- uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
with:
|
||||
node-version: 22
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- name: "Publish (dry-run)"
|
||||
if: ${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
run: npm publish --dry-run pkg
|
||||
run: npm publish --dry-run crates/ruff_wasm/pkg
|
||||
- name: "Publish"
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
run: npm publish --provenance --access public pkg
|
||||
run: npm publish --provenance --access public crates/ruff_wasm/pkg
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
46
.github/workflows/release.yml
vendored
46
.github/workflows/release.yml
vendored
@@ -1,6 +1,7 @@
|
||||
# This file was autogenerated by dist: https://axodotdev.github.io/cargo-dist
|
||||
# This file was autogenerated by dist: https://github.com/astral-sh/cargo-dist
|
||||
#
|
||||
# Copyright 2022-2024, axodotdev
|
||||
# Copyright 2025 Astral Software Inc.
|
||||
# SPDX-License-Identifier: MIT or Apache-2.0
|
||||
#
|
||||
# CI that:
|
||||
@@ -60,7 +61,7 @@ jobs:
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd
|
||||
- uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -68,9 +69,9 @@ jobs:
|
||||
# we specify bash to get pipefail; it guards against the `curl` command
|
||||
# failing. otherwise `sh` won't catch that `curl` returned non-0
|
||||
shell: bash
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.30.2/cargo-dist-installer.sh | sh"
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/cargo-dist/releases/download/v0.28.5-prerelease.1/cargo-dist-installer.sh | sh"
|
||||
- name: Cache dist
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
|
||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/dist
|
||||
@@ -86,7 +87,7 @@ jobs:
|
||||
cat plan-dist-manifest.json
|
||||
echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
||||
- name: "Upload dist-manifest.json"
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
|
||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||
with:
|
||||
name: artifacts-plan-dist-manifest
|
||||
path: plan-dist-manifest.json
|
||||
@@ -112,40 +113,30 @@ jobs:
|
||||
"contents": "read"
|
||||
"packages": "write"
|
||||
|
||||
custom-build-wasm:
|
||||
needs:
|
||||
- plan
|
||||
if: ${{ needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload' || inputs.tag == 'dry-run' }}
|
||||
uses: ./.github/workflows/build-wasm.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
|
||||
# Build and package all the platform-agnostic(ish) things
|
||||
build-global-artifacts:
|
||||
needs:
|
||||
- plan
|
||||
- custom-build-binaries
|
||||
- custom-build-docker
|
||||
- custom-build-wasm
|
||||
runs-on: "depot-ubuntu-latest-4"
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd
|
||||
- uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Install cached dist
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/
|
||||
- run: chmod +x ~/.cargo/bin/dist
|
||||
# Get all the local artifacts for the global tasks to use (for e.g. checksums)
|
||||
- name: Fetch local artifacts
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: target/distrib/
|
||||
@@ -163,7 +154,7 @@ jobs:
|
||||
|
||||
cp dist-manifest.json "$BUILD_MANIFEST_NAME"
|
||||
- name: "Upload artifacts"
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
|
||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||
with:
|
||||
name: artifacts-build-global
|
||||
path: |
|
||||
@@ -175,29 +166,28 @@ jobs:
|
||||
- plan
|
||||
- custom-build-binaries
|
||||
- custom-build-docker
|
||||
- custom-build-wasm
|
||||
- build-global-artifacts
|
||||
# Only run if we're "publishing", and only if plan, local and global didn't fail (skipped is fine)
|
||||
if: ${{ always() && needs.plan.result == 'success' && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.custom-build-binaries.result == 'skipped' || needs.custom-build-binaries.result == 'success') && (needs.custom-build-docker.result == 'skipped' || needs.custom-build-docker.result == 'success') && (needs.custom-build-wasm.result == 'skipped' || needs.custom-build-wasm.result == 'success') }}
|
||||
# Only run if we're "publishing", and only if local and global didn't fail (skipped is fine)
|
||||
if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.custom-build-binaries.result == 'skipped' || needs.custom-build-binaries.result == 'success') && (needs.custom-build-docker.result == 'skipped' || needs.custom-build-docker.result == 'success') }}
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
runs-on: "depot-ubuntu-latest-4"
|
||||
outputs:
|
||||
val: ${{ steps.host.outputs.manifest }}
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd
|
||||
- uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Install cached dist
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/
|
||||
- run: chmod +x ~/.cargo/bin/dist
|
||||
# Fetch artifacts from scratch-storage
|
||||
- name: Fetch artifacts
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: target/distrib/
|
||||
@@ -211,7 +201,7 @@ jobs:
|
||||
cat dist-manifest.json
|
||||
echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
||||
- name: "Upload dist-manifest.json"
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
|
||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||
with:
|
||||
# Overwrite the previous copy
|
||||
name: artifacts-dist-manifest
|
||||
@@ -261,13 +251,13 @@ jobs:
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd
|
||||
- uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
# Create a GitHub Release while uploading all files to it
|
||||
- name: "Download GitHub Artifacts"
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: artifacts
|
||||
|
||||
50
.github/workflows/sync_typeshed.yaml
vendored
50
.github/workflows/sync_typeshed.yaml
vendored
@@ -16,7 +16,8 @@ name: Sync typeshed
|
||||
# 3. Once the Windows worker is done, a MacOS worker:
|
||||
# a. Checks out the branch created by the Linux worker
|
||||
# b. Syncs all docstrings available on MacOS that are not available on Linux or Windows
|
||||
# c. Formats the code again
|
||||
# c. Attempts to update any snapshots that might have changed
|
||||
# (this sub-step is allowed to fail)
|
||||
# d. Commits the changes and pushes them to the same upstream branch
|
||||
# e. Creates a PR against the `main` branch using the branch all three workers have pushed to
|
||||
# 4. If any of steps 1-3 failed, an issue is created in the `astral-sh/ruff` repository
|
||||
@@ -61,12 +62,12 @@ jobs:
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
name: Checkout Ruff
|
||||
with:
|
||||
path: ruff
|
||||
persist-credentials: true
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
name: Checkout typeshed
|
||||
with:
|
||||
repository: python/typeshed
|
||||
@@ -76,7 +77,7 @@ jobs:
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
git config --global user.email '<>'
|
||||
- uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v7.2.0
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
- name: Sync typeshed stubs
|
||||
run: |
|
||||
rm -rf "ruff/${VENDORED_TYPESHED}"
|
||||
@@ -125,12 +126,12 @@ jobs:
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
name: Checkout Ruff
|
||||
with:
|
||||
persist-credentials: true
|
||||
ref: ${{ env.UPSTREAM_BRANCH}}
|
||||
- uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v7.2.0
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
@@ -164,12 +165,12 @@ jobs:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
name: Checkout Ruff
|
||||
with:
|
||||
persist-credentials: true
|
||||
ref: ${{ env.UPSTREAM_BRANCH}}
|
||||
- uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v7.2.0
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
@@ -197,6 +198,37 @@ jobs:
|
||||
run: |
|
||||
rm "${VENDORED_TYPESHED}/pyproject.toml"
|
||||
git commit -am "Remove pyproject.toml file"
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
- name: "Install Rust toolchain"
|
||||
if: ${{ success() }}
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
if: ${{ success() }}
|
||||
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||
- name: "Install cargo nextest"
|
||||
if: ${{ success() }}
|
||||
uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
if: ${{ success() }}
|
||||
uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: Update snapshots
|
||||
if: ${{ success() }}
|
||||
run: |
|
||||
# The `cargo insta` docs indicate that `--unreferenced=delete` might be a good option,
|
||||
# but from local testing it appears to just revert all changes made by `cargo insta test --accept`.
|
||||
#
|
||||
# If there were only snapshot-related failures, `cargo insta test --accept` will have exit code 0,
|
||||
# but if there were also other mdtest failures (for example), it will return a nonzero exit code.
|
||||
# We don't care about other tests failing here, we just want snapshots updated where possible,
|
||||
# so we use `|| true` here to ignore the exit code.
|
||||
cargo insta test --accept --color=always --all-features --test-runner=nextest || true
|
||||
- name: Commit snapshot changes
|
||||
if: ${{ success() }}
|
||||
run: git commit -am "Update snapshots" || echo "No snapshot changes to commit"
|
||||
- name: Push changes upstream and create a PR
|
||||
if: ${{ success() }}
|
||||
run: |
|
||||
@@ -208,7 +240,7 @@ jobs:
|
||||
name: Create an issue if the typeshed sync failed
|
||||
runs-on: ubuntu-latest
|
||||
needs: [sync, docstrings-windows, docstrings-macos-and-pr]
|
||||
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && (needs.sync.result != 'success' || needs.docstrings-windows.result != 'success' || needs.docstrings-macos-and-pr.result != 'success') }}
|
||||
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && (needs.sync.result == 'failure' || needs.docstrings-windows.result == 'failure' || needs.docstrings-macos-and-pr.result == 'failure') }}
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
|
||||
61
.github/workflows/ty-ecosystem-analyzer.yaml
vendored
61
.github/workflows/ty-ecosystem-analyzer.yaml
vendored
@@ -4,13 +4,7 @@ permissions: {}
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
# The default for `pull_request` is to trigger on `synchronize`, `opened` and `reopened`.
|
||||
# We also add `labeled` here so that the workflow triggers when a label is initially added.
|
||||
types:
|
||||
- labeled
|
||||
- synchronize
|
||||
- opened
|
||||
- reopened
|
||||
types: [labeled]
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.event.pull_request.number || github.sha }}
|
||||
@@ -23,29 +17,27 @@ env:
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
RUST_BACKTRACE: 1
|
||||
REF_NAME: ${{ github.ref_name }}
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
|
||||
jobs:
|
||||
ty-ecosystem-analyzer:
|
||||
name: Compute diagnostic diff
|
||||
runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-22.04-32' || 'ubuntu-latest' }}
|
||||
timeout-minutes: 20
|
||||
if: contains( github.event.pull_request.labels.*.name, 'ecosystem-analyzer')
|
||||
if: contains(github.event.label.name, 'ecosystem-analyzer')
|
||||
steps:
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
path: ruff
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v7.2.0
|
||||
with:
|
||||
enable-cache: true
|
||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
workspaces: "ruff"
|
||||
lookup-only: false
|
||||
|
||||
- name: Install Rust toolchain
|
||||
run: rustup show
|
||||
@@ -72,7 +64,7 @@ jobs:
|
||||
|
||||
cd ..
|
||||
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@2e1816eac09c90140b1ba51d19afc5f59da460f5"
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@908758da02a73ef3f3308e1dbb2248510029bbe4"
|
||||
|
||||
ecosystem-analyzer \
|
||||
--repository ruff \
|
||||
@@ -117,30 +109,45 @@ jobs:
|
||||
|
||||
cat diff-statistics.md >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
# NOTE: astral-sh-bot uses this artifact to post comments on PRs.
|
||||
# Make sure to update the bot if you rename the artifact.
|
||||
- name: "Upload full report"
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: full-report
|
||||
path: dist/
|
||||
echo ${{ github.event.number }} > pr-number
|
||||
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
id: deploy
|
||||
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3.14.1
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
command: pages deploy dist --project-name=ty-ecosystem --branch ${{ github.head_ref }} --commit-hash ${GITHUB_SHA}
|
||||
|
||||
- name: "Append deployment URL"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
env:
|
||||
DEPLOYMENT_URL: ${{ steps.deploy.outputs.pages-deployment-alias-url }}
|
||||
run: |
|
||||
echo >> comment.md
|
||||
echo "**[Full report with detailed diff]($DEPLOYMENT_URL/diff)** ([timing results]($DEPLOYMENT_URL/timing))" >> comment.md
|
||||
|
||||
# NOTE: astral-sh-bot uses this artifact to post comments on PRs.
|
||||
# Make sure to update the bot if you rename the artifact.
|
||||
- name: Upload comment
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: comment.md
|
||||
path: comment.md
|
||||
|
||||
- name: Upload pr-number
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pr-number
|
||||
path: pr-number
|
||||
|
||||
- name: Upload diagnostics diff
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: diff.html
|
||||
path: dist/diff.html
|
||||
|
||||
- name: Upload timing diff
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: timing.html
|
||||
path: dist/timing.html
|
||||
|
||||
85
.github/workflows/ty-ecosystem-analyzer_comment.yaml
vendored
Normal file
85
.github/workflows/ty-ecosystem-analyzer_comment.yaml
vendored
Normal file
@@ -0,0 +1,85 @@
|
||||
name: PR comment (ty ecosystem-analyzer)
|
||||
|
||||
on: # zizmor: ignore[dangerous-triggers]
|
||||
workflow_run:
|
||||
workflows: [ty ecosystem-analyzer]
|
||||
types: [completed]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
workflow_run_id:
|
||||
description: The ty ecosystem-analyzer workflow that triggers the workflow run
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: Download PR number
|
||||
with:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Parse pull request number
|
||||
id: pr-number
|
||||
run: |
|
||||
if [[ -f pr-number ]]
|
||||
then
|
||||
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: "Download comment.md"
|
||||
id: download-comment
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
name: comment.md
|
||||
workflow: ty-ecosystem-analyzer.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/comment
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
if: ${{ steps.download-comment.outputs.found_artifact == 'true' }}
|
||||
run: |
|
||||
# Guard against malicious ty ecosystem-analyzer results that symlink to a secret
|
||||
# file on this runner
|
||||
if [[ -L pr/comment/comment.md ]]
|
||||
then
|
||||
echo "Error: comment.md cannot be a symlink"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Note: this identifier is used to find the comment to update on subsequent runs
|
||||
echo '<!-- generated-comment ty ecosystem-analyzer -->' > comment.md
|
||||
echo >> comment.md
|
||||
cat pr/comment/comment.md >> comment.md
|
||||
|
||||
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||
cat comment.md >> "$GITHUB_OUTPUT"
|
||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Find existing comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
|
||||
if: steps.generate-comment.outcome == 'success'
|
||||
id: find-comment
|
||||
with:
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
comment-author: "github-actions[bot]"
|
||||
body-includes: "<!-- generated-comment ty ecosystem-analyzer -->"
|
||||
|
||||
- name: Create or update comment
|
||||
if: steps.find-comment.outcome == 'success'
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
body-path: comment.md
|
||||
edit-mode: replace
|
||||
29
.github/workflows/ty-ecosystem-report.yaml
vendored
29
.github/workflows/ty-ecosystem-report.yaml
vendored
@@ -1,7 +1,3 @@
|
||||
# This workflow is a cron job that generates a report describing
|
||||
# all diagnostics ty emits across the whole ecosystem. The report
|
||||
# is uploaded to https://ty-ecosystem-ext.pages.dev/ on a weekly basis.
|
||||
|
||||
name: ty ecosystem-report
|
||||
|
||||
permissions: {}
|
||||
@@ -18,6 +14,7 @@ env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
RUST_BACKTRACE: 1
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
|
||||
jobs:
|
||||
ty-ecosystem-report:
|
||||
@@ -25,21 +22,18 @@ jobs:
|
||||
runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-22.04-32' || 'ubuntu-latest' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
path: ruff
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v7.2.0
|
||||
with:
|
||||
enable-cache: true
|
||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
workspaces: "ruff"
|
||||
lookup-only: false
|
||||
|
||||
- name: Install Rust toolchain
|
||||
run: rustup show
|
||||
@@ -55,7 +49,7 @@ jobs:
|
||||
|
||||
cd ..
|
||||
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@2e1816eac09c90140b1ba51d19afc5f59da460f5"
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@908758da02a73ef3f3308e1dbb2248510029bbe4"
|
||||
|
||||
ecosystem-analyzer \
|
||||
--verbose \
|
||||
@@ -73,10 +67,11 @@ jobs:
|
||||
ecosystem-diagnostics.json \
|
||||
--output dist/index.html
|
||||
|
||||
# NOTE: astral-sh-bot uses this artifact to publish the ecosystem report.
|
||||
# Make sure to update the bot if you rename the artifact.
|
||||
- name: "Upload ecosystem report"
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
id: deploy
|
||||
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3.14.1
|
||||
with:
|
||||
name: full-report
|
||||
path: dist/
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
command: pages deploy dist --project-name=ty-ecosystem --branch main --commit-hash ${GITHUB_SHA}
|
||||
|
||||
31
.github/workflows/typing_conformance.yaml
vendored
31
.github/workflows/typing_conformance.yaml
vendored
@@ -6,11 +6,6 @@ on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "crates/ty*/**"
|
||||
- "!crates/ty_ide/**"
|
||||
- "!crates/ty_server/**"
|
||||
- "!crates/ty_test/**"
|
||||
- "!crates/ty_completion_eval/**"
|
||||
- "!crates/ty_wasm/**"
|
||||
- "crates/ruff_db"
|
||||
- "crates/ruff_python_ast"
|
||||
- "crates/ruff_python_parser"
|
||||
@@ -29,7 +24,7 @@ env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
RUST_BACKTRACE: 1
|
||||
CONFORMANCE_SUITE_COMMIT: 9f6d8ced7cd1c8d92687a4e9c96d7716452e471e
|
||||
CONFORMANCE_SUITE_COMMIT: d4f39b27a4a47aac8b6d4019e1b0b5b3156fabdc
|
||||
|
||||
jobs:
|
||||
typing_conformance:
|
||||
@@ -37,20 +32,20 @@ jobs:
|
||||
runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-22.04-32' || 'ubuntu-latest' }}
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
path: ruff
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
repository: python/typing
|
||||
ref: ${{ env.CONFORMANCE_SUITE_COMMIT }}
|
||||
path: typing
|
||||
persist-credentials: false
|
||||
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
workspaces: "ruff"
|
||||
|
||||
@@ -59,6 +54,9 @@ jobs:
|
||||
|
||||
- name: Compute diagnostic diff
|
||||
shell: bash
|
||||
env:
|
||||
# TODO: Remove this once we fixed the remaining panics in the conformance suite.
|
||||
TY_MAX_PARALLELISM: 1
|
||||
run: |
|
||||
RUFF_DIR="$GITHUB_WORKSPACE/ruff"
|
||||
|
||||
@@ -96,20 +94,23 @@ jobs:
|
||||
touch typing_conformance_diagnostics.diff
|
||||
fi
|
||||
|
||||
echo ${{ github.event.number }} > pr-number
|
||||
echo "${CONFORMANCE_SUITE_COMMIT}" > conformance-suite-commit
|
||||
|
||||
# NOTE: astral-sh-bot uses this artifact to post comments on PRs.
|
||||
# Make sure to update the bot if you rename the artifact.
|
||||
- name: Upload diff
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: typing_conformance_diagnostics_diff
|
||||
path: typing_conformance_diagnostics.diff
|
||||
|
||||
# NOTE: astral-sh-bot uses this artifact to post comments on PRs.
|
||||
# Make sure to update the bot if you rename the artifact.
|
||||
- name: Upload pr-number
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pr-number
|
||||
path: pr-number
|
||||
|
||||
- name: Upload conformance suite commit
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: conformance-suite-commit
|
||||
path: conformance-suite-commit
|
||||
|
||||
112
.github/workflows/typing_conformance_comment.yaml
vendored
Normal file
112
.github/workflows/typing_conformance_comment.yaml
vendored
Normal file
@@ -0,0 +1,112 @@
|
||||
name: PR comment (typing_conformance)
|
||||
|
||||
on: # zizmor: ignore[dangerous-triggers]
|
||||
workflow_run:
|
||||
workflows: [Run typing conformance]
|
||||
types: [completed]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
workflow_run_id:
|
||||
description: The typing_conformance workflow that triggers the workflow run
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: Download PR number
|
||||
with:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Parse pull request number
|
||||
id: pr-number
|
||||
run: |
|
||||
if [[ -f pr-number ]]
|
||||
then
|
||||
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: Download typing conformance suite commit
|
||||
with:
|
||||
name: conformance-suite-commit
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: "Download typing_conformance results"
|
||||
id: download-typing_conformance_diff
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
name: typing_conformance_diagnostics_diff
|
||||
workflow: typing_conformance.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/typing_conformance_diagnostics_diff
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
if: ${{ steps.download-typing_conformance_diff.outputs.found_artifact == 'true' }}
|
||||
run: |
|
||||
# Guard against malicious typing_conformance results that symlink to a secret
|
||||
# file on this runner
|
||||
if [[ -L pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff ]]
|
||||
then
|
||||
echo "Error: typing_conformance_diagnostics.diff cannot be a symlink"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Note this identifier is used to find the comment to update on
|
||||
# subsequent runs
|
||||
echo '<!-- generated-comment typing_conformance_diagnostics_diff -->' >> comment.txt
|
||||
|
||||
if [[ -f conformance-suite-commit ]]
|
||||
then
|
||||
echo "## Diagnostic diff on [typing conformance tests](https://github.com/python/typing/tree/$(<conformance-suite-commit)/conformance)" >> comment.txt
|
||||
else
|
||||
echo "conformance-suite-commit file not found"
|
||||
echo "## Diagnostic diff on typing conformance tests" >> comment.txt
|
||||
fi
|
||||
|
||||
if [ -s "pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff" ]; then
|
||||
echo '<details>' >> comment.txt
|
||||
echo '<summary>Changes were detected when running ty on typing conformance tests</summary>' >> comment.txt
|
||||
echo '' >> comment.txt
|
||||
echo '```diff' >> comment.txt
|
||||
cat pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff >> comment.txt
|
||||
echo '```' >> comment.txt
|
||||
echo '</details>' >> comment.txt
|
||||
else
|
||||
echo 'No changes detected when running ty on typing conformance tests ✅' >> comment.txt
|
||||
fi
|
||||
|
||||
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||
cat comment.txt >> "$GITHUB_OUTPUT"
|
||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Find existing comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
|
||||
if: steps.generate-comment.outcome == 'success'
|
||||
id: find-comment
|
||||
with:
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
comment-author: "github-actions[bot]"
|
||||
body-includes: "<!-- generated-comment typing_conformance_diagnostics_diff -->"
|
||||
|
||||
- name: Create or update comment
|
||||
if: steps.find-comment.outcome == 'success'
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
body-path: comment.txt
|
||||
edit-mode: replace
|
||||
21
.github/zizmor.yml
vendored
Normal file
21
.github/zizmor.yml
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
# Configuration for the zizmor static analysis tool, run via pre-commit in CI
|
||||
# https://woodruffw.github.io/zizmor/configuration/
|
||||
#
|
||||
# TODO: can we remove the ignores here so that our workflows are more secure?
|
||||
rules:
|
||||
dangerous-triggers:
|
||||
ignore:
|
||||
- pr-comment.yaml
|
||||
cache-poisoning:
|
||||
ignore:
|
||||
- build-docker.yml
|
||||
- publish-playground.yml
|
||||
- ty-ecosystem-analyzer.yaml
|
||||
- ty-ecosystem-report.yaml
|
||||
excessive-permissions:
|
||||
# it's hard to test what the impact of removing these ignores would be
|
||||
# without actually running the release workflow...
|
||||
ignore:
|
||||
- build-docker.yml
|
||||
- publish-playground.yml
|
||||
- publish-docs.yml
|
||||
@@ -21,91 +21,31 @@ exclude: |
|
||||
)$
|
||||
|
||||
repos:
|
||||
# Priority 0: Read-only hooks; hooks that modify disjoint file types.
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v6.0.0
|
||||
rev: v5.0.0
|
||||
hooks:
|
||||
- id: check-merge-conflict
|
||||
priority: 0
|
||||
|
||||
- repo: https://github.com/abravalheri/validate-pyproject
|
||||
rev: v0.24.1
|
||||
hooks:
|
||||
- id: validate-pyproject
|
||||
priority: 0
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.41.0
|
||||
hooks:
|
||||
- id: typos
|
||||
priority: 0
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: rustfmt
|
||||
name: rustfmt
|
||||
entry: rustfmt
|
||||
language: system
|
||||
types: [rust]
|
||||
priority: 0
|
||||
|
||||
# Prettier
|
||||
- repo: https://github.com/rbubley/mirrors-prettier
|
||||
rev: v3.7.4
|
||||
hooks:
|
||||
- id: prettier
|
||||
types: [yaml]
|
||||
priority: 0
|
||||
|
||||
# zizmor detects security vulnerabilities in GitHub Actions workflows.
|
||||
# Additional configuration for the tool is found in `.github/zizmor.yml`
|
||||
- repo: https://github.com/zizmorcore/zizmor-pre-commit
|
||||
rev: v1.19.0
|
||||
hooks:
|
||||
- id: zizmor
|
||||
priority: 0
|
||||
|
||||
- repo: https://github.com/python-jsonschema/check-jsonschema
|
||||
rev: 0.36.0
|
||||
hooks:
|
||||
- id: check-github-workflows
|
||||
priority: 0
|
||||
|
||||
- repo: https://github.com/shellcheck-py/shellcheck-py
|
||||
rev: v0.11.0.1
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
priority: 0
|
||||
|
||||
- repo: https://github.com/executablebooks/mdformat
|
||||
rev: 1.0.0
|
||||
rev: 0.7.22
|
||||
hooks:
|
||||
- id: mdformat
|
||||
language: python # means renovate will also update `additional_dependencies`
|
||||
additional_dependencies:
|
||||
- mdformat-mkdocs==5.0.0
|
||||
- mdformat-footnote==0.1.2
|
||||
- mdformat-mkdocs==4.0.0
|
||||
- mdformat-footnote==0.1.1
|
||||
exclude: |
|
||||
(?x)^(
|
||||
docs/formatter/black\.md
|
||||
| docs/\w+\.md
|
||||
)$
|
||||
priority: 0
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.14.10
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
priority: 0
|
||||
- id: ruff-check
|
||||
args: [--fix, --exit-non-zero-on-fix]
|
||||
types_or: [python, pyi]
|
||||
require_serial: true
|
||||
priority: 1
|
||||
|
||||
# Priority 1: Second-pass fixers (e.g., markdownlint-fix runs after mdformat).
|
||||
- repo: https://github.com/igorshubovych/markdownlint-cli
|
||||
rev: v0.47.0
|
||||
rev: v0.45.0
|
||||
hooks:
|
||||
- id: markdownlint-fix
|
||||
exclude: |
|
||||
@@ -113,14 +53,11 @@ repos:
|
||||
docs/formatter/black\.md
|
||||
| docs/\w+\.md
|
||||
)$
|
||||
priority: 1
|
||||
|
||||
# Priority 2: blacken-docs runs after markdownlint-fix (both modify markdown).
|
||||
- repo: https://github.com/adamchainz/blacken-docs
|
||||
rev: 1.20.0
|
||||
rev: 1.19.1
|
||||
hooks:
|
||||
- id: blacken-docs
|
||||
language: python # means renovate will also update `additional_dependencies`
|
||||
args: ["--pyi", "--line-length", "130"]
|
||||
files: '^crates/.*/resources/mdtest/.*\.md'
|
||||
exclude: |
|
||||
@@ -128,27 +65,74 @@ repos:
|
||||
.*?invalid(_.+)*_syntax\.md
|
||||
)$
|
||||
additional_dependencies:
|
||||
- black==25.12.0
|
||||
priority: 2
|
||||
- black==25.1.0
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.34.0
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: cargo-fmt
|
||||
name: cargo fmt
|
||||
entry: cargo fmt --
|
||||
language: system
|
||||
types: [rust]
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.12.7
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff-check
|
||||
args: [--fix, --exit-non-zero-on-fix]
|
||||
types_or: [python, pyi]
|
||||
require_serial: true
|
||||
|
||||
# Prettier
|
||||
- repo: https://github.com/rbubley/mirrors-prettier
|
||||
rev: v3.6.2
|
||||
hooks:
|
||||
- id: prettier
|
||||
types: [yaml]
|
||||
|
||||
# zizmor detects security vulnerabilities in GitHub Actions workflows.
|
||||
# Additional configuration for the tool is found in `.github/zizmor.yml`
|
||||
- repo: https://github.com/woodruffw/zizmor-pre-commit
|
||||
rev: v1.11.0
|
||||
hooks:
|
||||
- id: zizmor
|
||||
|
||||
- repo: https://github.com/python-jsonschema/check-jsonschema
|
||||
rev: 0.33.2
|
||||
hooks:
|
||||
- id: check-github-workflows
|
||||
|
||||
# `actionlint` hook, for verifying correct syntax in GitHub Actions workflows.
|
||||
# Some additional configuration for `actionlint` can be found in `.github/actionlint.yaml`.
|
||||
- repo: https://github.com/rhysd/actionlint
|
||||
rev: v1.7.10
|
||||
rev: v1.7.7
|
||||
hooks:
|
||||
- id: actionlint
|
||||
stages:
|
||||
# This hook is disabled by default, since it's quite slow.
|
||||
# To run all hooks *including* this hook, use `uvx prek run -a --hook-stage=manual`.
|
||||
# To run *just* this hook, use `uvx prek run -a actionlint --hook-stage=manual`.
|
||||
# To run all hooks *including* this hook, use `uvx pre-commit run -a --hook-stage=manual`.
|
||||
# To run *just* this hook, use `uvx pre-commit run -a actionlint --hook-stage=manual`.
|
||||
- manual
|
||||
args:
|
||||
- "-ignore=SC2129" # ignorable stylistic lint from shellcheck
|
||||
- "-ignore=SC2016" # another shellcheck lint: seems to have false positives?
|
||||
language: golang # means renovate will also update `additional_dependencies`
|
||||
additional_dependencies:
|
||||
# actionlint has a shellcheck integration which extracts shell scripts in `run:` steps from GitHub Actions
|
||||
# and checks these with shellcheck. This is arguably its most useful feature,
|
||||
# but the integration only works if shellcheck is installed
|
||||
- "github.com/wasilibs/go-shellcheck/cmd/shellcheck@v0.11.1"
|
||||
priority: 0
|
||||
- "github.com/wasilibs/go-shellcheck/cmd/shellcheck@v0.10.0"
|
||||
|
||||
- repo: https://github.com/shellcheck-py/shellcheck-py
|
||||
rev: v0.10.0.1
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
|
||||
ci:
|
||||
skip: [cargo-fmt, dev-generate-all]
|
||||
|
||||
3
.vscode/settings.json
vendored
3
.vscode/settings.json
vendored
@@ -5,6 +5,5 @@
|
||||
"rust-analyzer.check.command": "clippy",
|
||||
"search.exclude": {
|
||||
"**/*.snap": true
|
||||
},
|
||||
"ty.diagnosticMode": "openFilesOnly"
|
||||
}
|
||||
}
|
||||
|
||||
457
CHANGELOG.md
457
CHANGELOG.md
@@ -1,462 +1,5 @@
|
||||
# Changelog
|
||||
|
||||
## 0.14.11
|
||||
|
||||
Released on 2026-01-08.
|
||||
|
||||
### Preview features
|
||||
|
||||
- Consolidate diagnostics for matched disable/enable suppression comments ([#22099](https://github.com/astral-sh/ruff/pull/22099))
|
||||
- Report diagnostics for invalid/unmatched range suppression comments ([#21908](https://github.com/astral-sh/ruff/pull/21908))
|
||||
- \[`airflow`\] Passing positional argument into `airflow.lineage.hook.HookLineageCollector.create_asset` is not allowed (`AIR303`) ([#22046](https://github.com/astral-sh/ruff/pull/22046))
|
||||
- \[`refurb`\] Mark `FURB192` fix as always unsafe ([#22210](https://github.com/astral-sh/ruff/pull/22210))
|
||||
- \[`ruff`\] Add `non-empty-init-module` (`RUF067`) ([#22143](https://github.com/astral-sh/ruff/pull/22143))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix GitHub format for multi-line diagnostics ([#22108](https://github.com/astral-sh/ruff/pull/22108))
|
||||
- \[`flake8-unused-arguments`\] Mark `**kwargs` in `TypeVar` as used (`ARG001`) ([#22214](https://github.com/astral-sh/ruff/pull/22214))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- Add `help:` subdiagnostics for several Ruff rules that can sometimes appear to disagree with `ty` ([#22331](https://github.com/astral-sh/ruff/pull/22331))
|
||||
- \[`pylint`\] Demote `PLW1510` fix to display-only ([#22318](https://github.com/astral-sh/ruff/pull/22318))
|
||||
- \[`pylint`\] Ignore identical members (`PLR1714`) ([#22220](https://github.com/astral-sh/ruff/pull/22220))
|
||||
- \[`pylint`\] Improve diagnostic range for `PLC0206` ([#22312](https://github.com/astral-sh/ruff/pull/22312))
|
||||
- \[`ruff`\] Improve fix title for `RUF102` invalid rule code ([#22100](https://github.com/astral-sh/ruff/pull/22100))
|
||||
- \[`flake8-simplify`\]: Avoid unnecessary builtins import for `SIM105` ([#22358](https://github.com/astral-sh/ruff/pull/22358))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Allow Python 3.15 as valid `target-version` value in preview ([#22419](https://github.com/astral-sh/ruff/pull/22419))
|
||||
- Check `required-version` before parsing rules ([#22410](https://github.com/astral-sh/ruff/pull/22410))
|
||||
- Include configured `src` directories when resolving graphs ([#22451](https://github.com/astral-sh/ruff/pull/22451))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Update `T201` suggestion to not use root logger to satisfy `LOG015` ([#22059](https://github.com/astral-sh/ruff/pull/22059))
|
||||
- Fix `iter` example in unsafe fixes doc ([#22118](https://github.com/astral-sh/ruff/pull/22118))
|
||||
- \[`flake8_print`\] better suggestion for `basicConfig` in `T201` docs ([#22101](https://github.com/astral-sh/ruff/pull/22101))
|
||||
- \[`pylint`\] Restore the fix safety docs for `PLW0133` ([#22211](https://github.com/astral-sh/ruff/pull/22211))
|
||||
- Fix Jupyter notebook discovery info for editors ([#22447](https://github.com/astral-sh/ruff/pull/22447))
|
||||
|
||||
### Contributors
|
||||
|
||||
- [@charliermarsh](https://github.com/charliermarsh)
|
||||
- [@ntBre](https://github.com/ntBre)
|
||||
- [@cenviity](https://github.com/cenviity)
|
||||
- [@njhearp](https://github.com/njhearp)
|
||||
- [@cbachhuber](https://github.com/cbachhuber)
|
||||
- [@jelle-openai](https://github.com/jelle-openai)
|
||||
- [@AlexWaygood](https://github.com/AlexWaygood)
|
||||
- [@ValdonVitija](https://github.com/ValdonVitija)
|
||||
- [@BurntSushi](https://github.com/BurntSushi)
|
||||
- [@Jkhall81](https://github.com/Jkhall81)
|
||||
- [@PeterJCLaw](https://github.com/PeterJCLaw)
|
||||
- [@harupy](https://github.com/harupy)
|
||||
- [@amyreese](https://github.com/amyreese)
|
||||
- [@sjyangkevin](https://github.com/sjyangkevin)
|
||||
- [@woodruffw](https://github.com/woodruffw)
|
||||
|
||||
## 0.14.10
|
||||
|
||||
Released on 2025-12-18.
|
||||
|
||||
### Preview features
|
||||
|
||||
- [formatter] Fluent formatting of method chains ([#21369](https://github.com/astral-sh/ruff/pull/21369))
|
||||
- [formatter] Keep lambda parameters on one line and parenthesize the body if it expands ([#21385](https://github.com/astral-sh/ruff/pull/21385))
|
||||
- \[`flake8-implicit-str-concat`\] New rule to prevent implicit string concatenation in collections (`ISC004`) ([#21972](https://github.com/astral-sh/ruff/pull/21972))
|
||||
- \[`flake8-use-pathlib`\] Make fixes unsafe when types change in compound statements (`PTH104`, `PTH105`, `PTH109`, `PTH115`) ([#22009](https://github.com/astral-sh/ruff/pull/22009))
|
||||
- \[`refurb`\] Extend support for `Path.open` (`FURB101`, `FURB103`) ([#21080](https://github.com/astral-sh/ruff/pull/21080))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`pyupgrade`\] Fix parsing named Unicode escape sequences (`UP032`) ([#21901](https://github.com/astral-sh/ruff/pull/21901))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`eradicate`\] Ignore `ruff:disable` and `ruff:enable` comments in `ERA001` ([#22038](https://github.com/astral-sh/ruff/pull/22038))
|
||||
- \[`flake8-pytest-style`\] Allow `match` and `check` keyword arguments without an expected exception type (`PT010`) ([#21964](https://github.com/astral-sh/ruff/pull/21964))
|
||||
- [syntax-errors] Annotated name cannot be global ([#20868](https://github.com/astral-sh/ruff/pull/20868))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add `uv` and `ty` to the Ruff README ([#21996](https://github.com/astral-sh/ruff/pull/21996))
|
||||
- Document known lambda formatting deviations from Black ([#21954](https://github.com/astral-sh/ruff/pull/21954))
|
||||
- Update `setup.md` ([#22024](https://github.com/astral-sh/ruff/pull/22024))
|
||||
- \[`flake8-bandit`\] Fix broken link (`S704`) ([#22039](https://github.com/astral-sh/ruff/pull/22039))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Fix playground Share button showing "Copied!" before clipboard copy completes ([#21942](https://github.com/astral-sh/ruff/pull/21942))
|
||||
|
||||
### Contributors
|
||||
|
||||
- [@dylwil3](https://github.com/dylwil3)
|
||||
- [@charliecloudberry](https://github.com/charliecloudberry)
|
||||
- [@charliermarsh](https://github.com/charliermarsh)
|
||||
- [@chirizxc](https://github.com/chirizxc)
|
||||
- [@ntBre](https://github.com/ntBre)
|
||||
- [@zanieb](https://github.com/zanieb)
|
||||
- [@amyreese](https://github.com/amyreese)
|
||||
- [@hauntsaninja](https://github.com/hauntsaninja)
|
||||
- [@11happy](https://github.com/11happy)
|
||||
- [@mahiro72](https://github.com/mahiro72)
|
||||
- [@MichaReiser](https://github.com/MichaReiser)
|
||||
- [@phongddo](https://github.com/phongddo)
|
||||
- [@PeterJCLaw](https://github.com/PeterJCLaw)
|
||||
|
||||
## 0.14.9
|
||||
|
||||
Released on 2025-12-11.
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`ruff`\] New `RUF100` diagnostics for unused range suppressions ([#21783](https://github.com/astral-sh/ruff/pull/21783))
|
||||
- \[`pylint`\] Detect subclasses of builtin exceptions (`PLW0133`) ([#21382](https://github.com/astral-sh/ruff/pull/21382))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix comment placement in lambda parameters ([#21868](https://github.com/astral-sh/ruff/pull/21868))
|
||||
- Skip over trivia tokens after re-lexing ([#21895](https://github.com/astral-sh/ruff/pull/21895))
|
||||
- \[`flake8-bandit`\] Fix false positive when using non-standard `CSafeLoader` path (S506). ([#21830](https://github.com/astral-sh/ruff/pull/21830))
|
||||
- \[`flake8-bugbear`\] Accept immutable slice default arguments (`B008`) ([#21823](https://github.com/astral-sh/ruff/pull/21823))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pydocstyle`\] Suppress `D417` for parameters with `Unpack` annotations ([#21816](https://github.com/astral-sh/ruff/pull/21816))
|
||||
|
||||
### Performance
|
||||
|
||||
- Use `memchr` for computing line indexes ([#21838](https://github.com/astral-sh/ruff/pull/21838))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Document `*.pyw` is included by default in preview ([#21885](https://github.com/astral-sh/ruff/pull/21885))
|
||||
- Document range suppressions, reorganize suppression docs ([#21884](https://github.com/astral-sh/ruff/pull/21884))
|
||||
- Update mkdocs-material to 9.7.0 (Insiders now free) ([#21797](https://github.com/astral-sh/ruff/pull/21797))
|
||||
|
||||
### Contributors
|
||||
|
||||
- [@Avasam](https://github.com/Avasam)
|
||||
- [@MichaReiser](https://github.com/MichaReiser)
|
||||
- [@charliermarsh](https://github.com/charliermarsh)
|
||||
- [@amyreese](https://github.com/amyreese)
|
||||
- [@phongddo](https://github.com/phongddo)
|
||||
- [@prakhar1144](https://github.com/prakhar1144)
|
||||
- [@mahiro72](https://github.com/mahiro72)
|
||||
- [@ntBre](https://github.com/ntBre)
|
||||
- [@LoicRiegel](https://github.com/LoicRiegel)
|
||||
|
||||
## 0.14.8
|
||||
|
||||
Released on 2025-12-04.
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-bugbear`\] Catch `yield` expressions within other statements (`B901`) ([#21200](https://github.com/astral-sh/ruff/pull/21200))
|
||||
- \[`flake8-use-pathlib`\] Mark fixes unsafe for return type changes (`PTH104`, `PTH105`, `PTH109`, `PTH115`) ([#21440](https://github.com/astral-sh/ruff/pull/21440))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix syntax error false positives for `await` outside functions ([#21763](https://github.com/astral-sh/ruff/pull/21763))
|
||||
- \[`flake8-simplify`\] Fix truthiness assumption for non-iterable arguments in tuple/list/set calls (`SIM222`, `SIM223`) ([#21479](https://github.com/astral-sh/ruff/pull/21479))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Suggest using `--output-file` option in GitLab integration ([#21706](https://github.com/astral-sh/ruff/pull/21706))
|
||||
|
||||
### Other changes
|
||||
|
||||
- [syntax-error] Default type parameter followed by non-default type parameter ([#21657](https://github.com/astral-sh/ruff/pull/21657))
|
||||
|
||||
### Contributors
|
||||
|
||||
- [@kieran-ryan](https://github.com/kieran-ryan)
|
||||
- [@11happy](https://github.com/11happy)
|
||||
- [@danparizher](https://github.com/danparizher)
|
||||
- [@ntBre](https://github.com/ntBre)
|
||||
|
||||
## 0.14.7
|
||||
|
||||
Released on 2025-11-28.
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-bandit`\] Handle string literal bindings in suspicious-url-open-usage (`S310`) ([#21469](https://github.com/astral-sh/ruff/pull/21469))
|
||||
- \[`pylint`\] Fix `PLR1708` false positives on nested functions ([#21177](https://github.com/astral-sh/ruff/pull/21177))
|
||||
- \[`pylint`\] Fix suppression for empty dict without tuple key annotation (`PLE1141`) ([#21290](https://github.com/astral-sh/ruff/pull/21290))
|
||||
- \[`ruff`\] Add rule `RUF066` to detect unnecessary class properties ([#21535](https://github.com/astral-sh/ruff/pull/21535))
|
||||
- \[`ruff`\] Catch more dummy variable uses (`RUF052`) ([#19799](https://github.com/astral-sh/ruff/pull/19799))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [server] Set severity for non-rule diagnostics ([#21559](https://github.com/astral-sh/ruff/pull/21559))
|
||||
- \[`flake8-implicit-str-concat`\] Avoid invalid fix in (`ISC003`) ([#21517](https://github.com/astral-sh/ruff/pull/21517))
|
||||
- \[`parser`\] Fix panic when parsing IPython escape command expressions ([#21480](https://github.com/astral-sh/ruff/pull/21480))
|
||||
|
||||
### CLI
|
||||
|
||||
- Show partial fixability indicator in statistics output ([#21513](https://github.com/astral-sh/ruff/pull/21513))
|
||||
|
||||
### Contributors
|
||||
|
||||
- [@mikeleppane](https://github.com/mikeleppane)
|
||||
- [@senekor](https://github.com/senekor)
|
||||
- [@ShaharNaveh](https://github.com/ShaharNaveh)
|
||||
- [@JumboBear](https://github.com/JumboBear)
|
||||
- [@prakhar1144](https://github.com/prakhar1144)
|
||||
- [@tsvikas](https://github.com/tsvikas)
|
||||
- [@danparizher](https://github.com/danparizher)
|
||||
- [@chirizxc](https://github.com/chirizxc)
|
||||
- [@AlexWaygood](https://github.com/AlexWaygood)
|
||||
- [@MichaReiser](https://github.com/MichaReiser)
|
||||
|
||||
## 0.14.6
|
||||
|
||||
Released on 2025-11-21.
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-bandit`\] Support new PySNMP API paths (`S508`, `S509`) ([#21374](https://github.com/astral-sh/ruff/pull/21374))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Adjust own-line comment placement between branches ([#21185](https://github.com/astral-sh/ruff/pull/21185))
|
||||
- Avoid syntax error when formatting attribute expressions with outer parentheses, parenthesized value, and trailing comment on value ([#20418](https://github.com/astral-sh/ruff/pull/20418))
|
||||
- Fix panic when formatting comments in unary expressions ([#21501](https://github.com/astral-sh/ruff/pull/21501))
|
||||
- Respect `fmt: skip` for compound statements on a single line ([#20633](https://github.com/astral-sh/ruff/pull/20633))
|
||||
- \[`refurb`\] Fix `FURB103` autofix ([#21454](https://github.com/astral-sh/ruff/pull/21454))
|
||||
- \[`ruff`\] Fix false positive for complex conversion specifiers in `logging-eager-conversion` (`RUF065`) ([#21464](https://github.com/astral-sh/ruff/pull/21464))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`ruff`\] Avoid false positive on `ClassVar` reassignment (`RUF012`) ([#21478](https://github.com/astral-sh/ruff/pull/21478))
|
||||
|
||||
### CLI
|
||||
|
||||
- Render hyperlinks for lint errors ([#21514](https://github.com/astral-sh/ruff/pull/21514))
|
||||
- Add a `ruff analyze` option to skip over imports in `TYPE_CHECKING` blocks ([#21472](https://github.com/astral-sh/ruff/pull/21472))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Limit `eglot-format` hook to eglot-managed Python buffers ([#21459](https://github.com/astral-sh/ruff/pull/21459))
|
||||
- Mention `force-exclude` in "Configuration > Python file discovery" ([#21500](https://github.com/astral-sh/ruff/pull/21500))
|
||||
|
||||
### Contributors
|
||||
|
||||
- [@ntBre](https://github.com/ntBre)
|
||||
- [@dylwil3](https://github.com/dylwil3)
|
||||
- [@gauthsvenkat](https://github.com/gauthsvenkat)
|
||||
- [@MichaReiser](https://github.com/MichaReiser)
|
||||
- [@thamer](https://github.com/thamer)
|
||||
- [@Ruchir28](https://github.com/Ruchir28)
|
||||
- [@thejcannon](https://github.com/thejcannon)
|
||||
- [@danparizher](https://github.com/danparizher)
|
||||
- [@chirizxc](https://github.com/chirizxc)
|
||||
|
||||
## 0.14.5
|
||||
|
||||
Released on 2025-11-13.
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-simplify`\] Apply `SIM113` when index variable is of type `int` ([#21395](https://github.com/astral-sh/ruff/pull/21395))
|
||||
- \[`pydoclint`\] Fix false positive when Sphinx directives follow a "Raises" section (`DOC502`) ([#20535](https://github.com/astral-sh/ruff/pull/20535))
|
||||
- \[`pydoclint`\] Support NumPy-style comma-separated parameters (`DOC102`) ([#20972](https://github.com/astral-sh/ruff/pull/20972))
|
||||
- \[`refurb`\] Auto-fix annotated assignments (`FURB101`) ([#21278](https://github.com/astral-sh/ruff/pull/21278))
|
||||
- \[`ruff`\] Ignore `str()` when not used for simple conversion (`RUF065`) ([#21330](https://github.com/astral-sh/ruff/pull/21330))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix syntax error false positive on alternative `match` patterns ([#21362](https://github.com/astral-sh/ruff/pull/21362))
|
||||
- \[`flake8-simplify`\] Fix false positive for iterable initializers with generator arguments (`SIM222`) ([#21187](https://github.com/astral-sh/ruff/pull/21187))
|
||||
- \[`pyupgrade`\] Fix false positive on relative imports from local `.builtins` module (`UP029`) ([#21309](https://github.com/astral-sh/ruff/pull/21309))
|
||||
- \[`pyupgrade`\] Consistently set the deprecated tag (`UP035`) ([#21396](https://github.com/astral-sh/ruff/pull/21396))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`refurb`\] Detect empty f-strings (`FURB105`) ([#21348](https://github.com/astral-sh/ruff/pull/21348))
|
||||
|
||||
### CLI
|
||||
|
||||
- Add option to provide a reason to `--add-noqa` ([#21294](https://github.com/astral-sh/ruff/pull/21294))
|
||||
- Add upstream linter URL to `ruff linter --output-format=json` ([#21316](https://github.com/astral-sh/ruff/pull/21316))
|
||||
- Add color to `--help` ([#21337](https://github.com/astral-sh/ruff/pull/21337))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add a new "Opening a PR" section to the contribution guide ([#21298](https://github.com/astral-sh/ruff/pull/21298))
|
||||
- Added the PyScripter IDE to the list of "Who is using Ruff?" ([#21402](https://github.com/astral-sh/ruff/pull/21402))
|
||||
- Update PyCharm setup instructions ([#21409](https://github.com/astral-sh/ruff/pull/21409))
|
||||
- \[`flake8-annotations`\] Add link to `allow-star-arg-any` option (`ANN401`) ([#21326](https://github.com/astral-sh/ruff/pull/21326))
|
||||
|
||||
### Other changes
|
||||
|
||||
- \[`configuration`\] Improve error message when `line-length` exceeds `u16::MAX` ([#21329](https://github.com/astral-sh/ruff/pull/21329))
|
||||
|
||||
### Contributors
|
||||
|
||||
- [@njhearp](https://github.com/njhearp)
|
||||
- [@11happy](https://github.com/11happy)
|
||||
- [@hugovk](https://github.com/hugovk)
|
||||
- [@Gankra](https://github.com/Gankra)
|
||||
- [@ntBre](https://github.com/ntBre)
|
||||
- [@pyscripter](https://github.com/pyscripter)
|
||||
- [@danparizher](https://github.com/danparizher)
|
||||
- [@MichaReiser](https://github.com/MichaReiser)
|
||||
- [@henryiii](https://github.com/henryiii)
|
||||
- [@charliecloudberry](https://github.com/charliecloudberry)
|
||||
|
||||
## 0.14.4
|
||||
|
||||
Released on 2025-11-06.
|
||||
|
||||
### Preview features
|
||||
|
||||
- [formatter] Allow newlines after function headers without docstrings ([#21110](https://github.com/astral-sh/ruff/pull/21110))
|
||||
- [formatter] Avoid extra parentheses for long `match` patterns with `as` captures ([#21176](https://github.com/astral-sh/ruff/pull/21176))
|
||||
- \[`refurb`\] Expand fix safety for keyword arguments and `Decimal`s (`FURB164`) ([#21259](https://github.com/astral-sh/ruff/pull/21259))
|
||||
- \[`refurb`\] Preserve argument ordering in autofix (`FURB103`) ([#20790](https://github.com/astral-sh/ruff/pull/20790))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [server] Fix missing diagnostics for notebooks ([#21156](https://github.com/astral-sh/ruff/pull/21156))
|
||||
- \[`flake8-bugbear`\] Ignore non-NFKC attribute names in `B009` and `B010` ([#21131](https://github.com/astral-sh/ruff/pull/21131))
|
||||
- \[`refurb`\] Fix false negative for underscores before sign in `Decimal` constructor (`FURB157`) ([#21190](https://github.com/astral-sh/ruff/pull/21190))
|
||||
- \[`ruff`\] Fix false positives on starred arguments (`RUF057`) ([#21256](https://github.com/astral-sh/ruff/pull/21256))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`airflow`\] extend deprecated argument `concurrency` in `airflow..DAG` (`AIR301`) ([#21220](https://github.com/astral-sh/ruff/pull/21220))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Improve `extend` docs ([#21135](https://github.com/astral-sh/ruff/pull/21135))
|
||||
- \[`flake8-comprehensions`\] Fix typo in `C416` documentation ([#21184](https://github.com/astral-sh/ruff/pull/21184))
|
||||
- Revise Ruff setup instructions for Zed editor ([#20935](https://github.com/astral-sh/ruff/pull/20935))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Make `ruff analyze graph` work with jupyter notebooks ([#21161](https://github.com/astral-sh/ruff/pull/21161))
|
||||
|
||||
### Contributors
|
||||
|
||||
- [@chirizxc](https://github.com/chirizxc)
|
||||
- [@Lee-W](https://github.com/Lee-W)
|
||||
- [@musicinmybrain](https://github.com/musicinmybrain)
|
||||
- [@MichaReiser](https://github.com/MichaReiser)
|
||||
- [@tjkuson](https://github.com/tjkuson)
|
||||
- [@danparizher](https://github.com/danparizher)
|
||||
- [@renovate](https://github.com/renovate)
|
||||
- [@ntBre](https://github.com/ntBre)
|
||||
- [@gauthsvenkat](https://github.com/gauthsvenkat)
|
||||
- [@LoicRiegel](https://github.com/LoicRiegel)
|
||||
|
||||
## 0.14.3
|
||||
|
||||
Released on 2025-10-30.
|
||||
|
||||
### Preview features
|
||||
|
||||
- Respect `--output-format` with `--watch` ([#21097](https://github.com/astral-sh/ruff/pull/21097))
|
||||
- \[`pydoclint`\] Fix false positive on explicit exception re-raising (`DOC501`, `DOC502`) ([#21011](https://github.com/astral-sh/ruff/pull/21011))
|
||||
- \[`pyflakes`\] Revert to stable behavior if imports for module lie in alternate branches for `F401` ([#20878](https://github.com/astral-sh/ruff/pull/20878))
|
||||
- \[`pylint`\] Implement `stop-iteration-return` (`PLR1708`) ([#20733](https://github.com/astral-sh/ruff/pull/20733))
|
||||
- \[`ruff`\] Add support for additional eager conversion patterns (`RUF065`) ([#20657](https://github.com/astral-sh/ruff/pull/20657))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix finding keyword range for clause header after statement ending with semicolon ([#21067](https://github.com/astral-sh/ruff/pull/21067))
|
||||
- Fix syntax error false positive on nested alternative patterns ([#21104](https://github.com/astral-sh/ruff/pull/21104))
|
||||
- \[`ISC001`\] Fix panic when string literals are unclosed ([#21034](https://github.com/astral-sh/ruff/pull/21034))
|
||||
- \[`flake8-django`\] Apply `DJ001` to annotated fields ([#20907](https://github.com/astral-sh/ruff/pull/20907))
|
||||
- \[`flake8-pyi`\] Fix `PYI034` to not trigger on metaclasses (`PYI034`) ([#20881](https://github.com/astral-sh/ruff/pull/20881))
|
||||
- \[`flake8-type-checking`\] Fix `TC003` false positive with `future-annotations` ([#21125](https://github.com/astral-sh/ruff/pull/21125))
|
||||
- \[`pyflakes`\] Fix false positive for `__class__` in lambda expressions within class definitions (`F821`) ([#20564](https://github.com/astral-sh/ruff/pull/20564))
|
||||
- \[`pyupgrade`\] Fix false positive for `TypeVar` with default on Python \<3.13 (`UP046`,`UP047`) ([#21045](https://github.com/astral-sh/ruff/pull/21045))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- Add missing docstring sections to the numpy list ([#20931](https://github.com/astral-sh/ruff/pull/20931))
|
||||
- \[`airflow`\] Extend `airflow.models..Param` check (`AIR311`) ([#21043](https://github.com/astral-sh/ruff/pull/21043))
|
||||
- \[`airflow`\] Warn that `airflow....DAG.create_dagrun` has been removed (`AIR301`) ([#21093](https://github.com/astral-sh/ruff/pull/21093))
|
||||
- \[`refurb`\] Preserve digit separators in `Decimal` constructor (`FURB157`) ([#20588](https://github.com/astral-sh/ruff/pull/20588))
|
||||
|
||||
### Server
|
||||
|
||||
- Avoid sending an unnecessary "clear diagnostics" message for clients supporting pull diagnostics ([#21105](https://github.com/astral-sh/ruff/pull/21105))
|
||||
|
||||
### Documentation
|
||||
|
||||
- \[`flake8-bandit`\] Fix correct example for `S308` ([#21128](https://github.com/astral-sh/ruff/pull/21128))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Clearer error message when `line-length` goes beyond threshold ([#21072](https://github.com/astral-sh/ruff/pull/21072))
|
||||
|
||||
### Contributors
|
||||
|
||||
- [@danparizher](https://github.com/danparizher)
|
||||
- [@jvacek](https://github.com/jvacek)
|
||||
- [@ntBre](https://github.com/ntBre)
|
||||
- [@augustelalande](https://github.com/augustelalande)
|
||||
- [@prakhar1144](https://github.com/prakhar1144)
|
||||
- [@TaKO8Ki](https://github.com/TaKO8Ki)
|
||||
- [@dylwil3](https://github.com/dylwil3)
|
||||
- [@fatelei](https://github.com/fatelei)
|
||||
- [@ShaharNaveh](https://github.com/ShaharNaveh)
|
||||
- [@Lee-W](https://github.com/Lee-W)
|
||||
|
||||
## 0.14.2
|
||||
|
||||
Released on 2025-10-23.
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-gettext`\] Resolve qualified names and built-in bindings (`INT001`, `INT002`, `INT003`) ([#19045](https://github.com/astral-sh/ruff/pull/19045))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid reusing nested, interpolated quotes before Python 3.12 ([#20930](https://github.com/astral-sh/ruff/pull/20930))
|
||||
- Catch syntax errors in nested interpolations before Python 3.12 ([#20949](https://github.com/astral-sh/ruff/pull/20949))
|
||||
- \[`fastapi`\] Handle ellipsis defaults in `FAST002` autofix ([#20810](https://github.com/astral-sh/ruff/pull/20810))
|
||||
- \[`flake8-simplify`\] Skip `SIM911` when unknown arguments are present ([#20697](https://github.com/astral-sh/ruff/pull/20697))
|
||||
- \[`pyupgrade`\] Always parenthesize assignment expressions in fix for `f-string` (`UP032`) ([#21003](https://github.com/astral-sh/ruff/pull/21003))
|
||||
- \[`pyupgrade`\] Fix `UP032` conversion for decimal ints with underscores ([#21022](https://github.com/astral-sh/ruff/pull/21022))
|
||||
- \[`fastapi`\] Skip autofix for keyword and `__debug__` path params (`FAST003`) ([#20960](https://github.com/astral-sh/ruff/pull/20960))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-bugbear`\] Skip `B905` and `B912` for fewer than two iterables and no starred arguments ([#20998](https://github.com/astral-sh/ruff/pull/20998))
|
||||
- \[`ruff`\] Use `DiagnosticTag` for more `pyflakes` and `pandas` rules ([#20801](https://github.com/astral-sh/ruff/pull/20801))
|
||||
|
||||
### CLI
|
||||
|
||||
- Improve JSON output from `ruff rule` ([#20168](https://github.com/astral-sh/ruff/pull/20168))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add source to testimonial ([#20971](https://github.com/astral-sh/ruff/pull/20971))
|
||||
- Document when a rule was added ([#21035](https://github.com/astral-sh/ruff/pull/21035))
|
||||
|
||||
### Other changes
|
||||
|
||||
- [syntax-errors] Name is parameter and global ([#20426](https://github.com/astral-sh/ruff/pull/20426))
|
||||
- [syntax-errors] Alternative `match` patterns bind different names ([#20682](https://github.com/astral-sh/ruff/pull/20682))
|
||||
|
||||
### Contributors
|
||||
|
||||
- [@hengky-kurniawan-1](https://github.com/hengky-kurniawan-1)
|
||||
- [@ShalokShalom](https://github.com/ShalokShalom)
|
||||
- [@robsdedude](https://github.com/robsdedude)
|
||||
- [@LoicRiegel](https://github.com/LoicRiegel)
|
||||
- [@TaKO8Ki](https://github.com/TaKO8Ki)
|
||||
- [@dylwil3](https://github.com/dylwil3)
|
||||
- [@11happy](https://github.com/11happy)
|
||||
- [@ntBre](https://github.com/ntBre)
|
||||
|
||||
## 0.14.1
|
||||
|
||||
Released on 2025-10-16.
|
||||
|
||||
71
CLAUDE.md
71
CLAUDE.md
@@ -1,71 +0,0 @@
|
||||
# Ruff Repository
|
||||
|
||||
This repository contains both Ruff (a Python linter and formatter) and ty (a Python type checker). The crates follow a naming convention: `ruff_*` for Ruff-specific code and `ty_*` for ty-specific code. ty reuses several Ruff crates, including the Python parser (`ruff_python_parser`) and AST definitions (`ruff_python_ast`).
|
||||
|
||||
## Running Tests
|
||||
|
||||
Run all tests (using `nextest` for faster execution):
|
||||
|
||||
```sh
|
||||
cargo nextest run
|
||||
```
|
||||
|
||||
For faster test execution, use the `fast-test` profile which enables optimizations while retaining debug info:
|
||||
|
||||
```sh
|
||||
cargo nextest run --cargo-profile fast-test
|
||||
```
|
||||
|
||||
Run tests for a specific crate:
|
||||
|
||||
```sh
|
||||
cargo nextest run -p ty_python_semantic
|
||||
```
|
||||
|
||||
Run a specific mdtest (use a substring of the test name):
|
||||
|
||||
```sh
|
||||
MDTEST_TEST_FILTER="<filter>" cargo nextest run -p ty_python_semantic mdtest
|
||||
```
|
||||
|
||||
Update snapshots after running tests:
|
||||
|
||||
```sh
|
||||
cargo insta accept
|
||||
```
|
||||
|
||||
## Running Clippy
|
||||
|
||||
```sh
|
||||
cargo clippy --workspace --all-targets --all-features -- -D warnings
|
||||
```
|
||||
|
||||
## Running Debug Builds
|
||||
|
||||
Use debug builds (not `--release`) when developing, as release builds lack debug assertions and have slower compile times.
|
||||
|
||||
Run Ruff:
|
||||
|
||||
```sh
|
||||
cargo run --bin ruff -- check path/to/file.py
|
||||
```
|
||||
|
||||
Run ty:
|
||||
|
||||
```sh
|
||||
cargo run --bin ty -- check path/to/file.py
|
||||
```
|
||||
|
||||
## Pull Requests
|
||||
|
||||
When working on ty, PR titles should start with `[ty]` and be tagged with the `ty` GitHub label.
|
||||
|
||||
## Development Guidelines
|
||||
|
||||
- All changes must be tested. If you're not testing your changes, you're not done.
|
||||
- Get your tests to pass. If you didn't run the tests, your code does not work.
|
||||
- Follow existing code style. Check neighboring files for patterns.
|
||||
- Always run `uvx prek run -a` at the end of a task.
|
||||
- Avoid writing significant amounts of new code. This is often a sign that we're missing an existing method or mechanism that could help solve the problem. Look for existing utilities first.
|
||||
- Avoid falling back to patterns that require `panic!`, `unreachable!`, or `.unwrap()`. Instead, try to encode those constraints in the type system.
|
||||
- Prefer let chains (`if let` combined with `&&`) over nested `if let` statements to reduce indentation and improve readability.
|
||||
@@ -53,12 +53,12 @@ cargo install cargo-insta
|
||||
You'll need [uv](https://docs.astral.sh/uv/getting-started/installation/) (or `pipx` and `pip`) to
|
||||
run Python utility commands.
|
||||
|
||||
You can optionally install hooks to automatically run the validation checks
|
||||
You can optionally install pre-commit hooks to automatically run the validation checks
|
||||
when making a commit:
|
||||
|
||||
```shell
|
||||
uv tool install prek
|
||||
prek install
|
||||
uv tool install pre-commit
|
||||
pre-commit install
|
||||
```
|
||||
|
||||
We recommend [nextest](https://nexte.st/) to run Ruff's test suite (via `cargo nextest run`),
|
||||
@@ -85,7 +85,7 @@ and that it passes both the lint and test validation checks:
|
||||
```shell
|
||||
cargo clippy --workspace --all-targets --all-features -- -D warnings # Rust linting
|
||||
RUFF_UPDATE_SCHEMA=1 cargo test # Rust testing and updating ruff.schema.json
|
||||
uvx prek run -a # Rust and Python formatting, Markdown and Python linting, etc.
|
||||
uvx pre-commit run --all-files --show-diff-on-failure # Rust and Python formatting, Markdown and Python linting, etc.
|
||||
```
|
||||
|
||||
These checks will run on GitHub Actions when you open your pull request, but running them locally
|
||||
@@ -280,57 +280,15 @@ Note that plugin-specific configuration options are defined in their own modules
|
||||
|
||||
Finally, regenerate the documentation and generated code with `cargo dev generate-all`.
|
||||
|
||||
### Opening a PR
|
||||
|
||||
After you finish your changes, the next step is to open a PR. By default, two
|
||||
sections will be filled into the PR body: the summary and the test plan.
|
||||
|
||||
#### The summary
|
||||
|
||||
The summary is intended to give us as maintainers information about your PR.
|
||||
This should typically include a link to the relevant issue(s) you're addressing
|
||||
in your PR, as well as a summary of the issue and your approach to fixing it. If
|
||||
you have any questions about your approach or design, or if you considered
|
||||
alternative approaches, that can also be helpful to include.
|
||||
|
||||
AI can be helpful in generating both the code and summary of your PR, but a
|
||||
successful contribution should still be carefully reviewed by you and the
|
||||
summary editorialized before submitting a PR. A great summary is thorough but
|
||||
also succinct and gives us the context we need to review your PR.
|
||||
|
||||
You can find examples of excellent issues and PRs by searching for the
|
||||
[`great writeup`](https://github.com/astral-sh/ruff/issues?q=label%3A%22great%20writeup%22)
|
||||
label.
|
||||
|
||||
#### The test plan
|
||||
|
||||
The test plan is likely to be shorter than the summary and can be as simple as
|
||||
"Added new snapshot tests for `RUF123`," at least for rule bugs. For LSP or some
|
||||
types of CLI changes, in particular, it can also be helpful to include
|
||||
screenshots or recordings of your change in action.
|
||||
|
||||
#### Ecosystem report
|
||||
|
||||
After opening the PR, an ecosystem report will be run as part of CI. This shows
|
||||
a diff of linter and formatter behavior before and after the changes in your PR.
|
||||
Going through these changes and reporting your findings in the PR summary or an
|
||||
additional comment help us to review your PR more efficiently. It's also a great
|
||||
way to find new test cases to incorporate into your PR if you identify any
|
||||
issues.
|
||||
|
||||
#### PR status
|
||||
|
||||
To help us know when your PR is ready for review again, please either move your
|
||||
PR back to a draft while working on it (marking it ready for review afterwards
|
||||
will ping the previous reviewers) or explicitly re-request a review. This helps
|
||||
us to avoid re-reviewing a PR while you're still working on it and also to
|
||||
prioritize PRs that are definitely ready for review.
|
||||
|
||||
You can also thumbs-up or mark as resolved any comments we leave to let us know
|
||||
you addressed them.
|
||||
|
||||
## MkDocs
|
||||
|
||||
> [!NOTE]
|
||||
>
|
||||
> The documentation uses Material for MkDocs Insiders, which is closed-source software.
|
||||
> This means only members of the Astral organization can preview the documentation exactly as it
|
||||
> will appear in production.
|
||||
> Outside contributors can still preview the documentation, but there will be some differences. Consult [the Material for MkDocs documentation](https://squidfunk.github.io/mkdocs-material/insiders/benefits/#features) for which features are exclusively available in the insiders version.
|
||||
|
||||
To preview any changes to the documentation locally:
|
||||
|
||||
1. Install the [Rust toolchain](https://www.rust-lang.org/tools/install).
|
||||
@@ -344,7 +302,11 @@ To preview any changes to the documentation locally:
|
||||
1. Run the development server with:
|
||||
|
||||
```shell
|
||||
uvx --with-requirements docs/requirements.txt -- mkdocs serve -f mkdocs.yml
|
||||
# For contributors.
|
||||
uvx --with-requirements docs/requirements.txt -- mkdocs serve -f mkdocs.public.yml
|
||||
|
||||
# For members of the Astral org, which has access to MkDocs Insiders via sponsorship.
|
||||
uvx --with-requirements docs/requirements-insiders.txt -- mkdocs serve -f mkdocs.insiders.yml
|
||||
```
|
||||
|
||||
The documentation should then be available locally at
|
||||
@@ -381,7 +343,7 @@ Commit each step of this process separately for easier review.
|
||||
|
||||
- Often labels will be missing from pull requests they will need to be manually organized into the proper section
|
||||
- Changes should be edited to be user-facing descriptions, avoiding internal details
|
||||
- Square brackets (eg, `[ruff]` project name) will be automatically escaped by `prek`
|
||||
- Square brackets (eg, `[ruff]` project name) will be automatically escaped by `pre-commit`
|
||||
|
||||
Additionally, for minor releases:
|
||||
|
||||
|
||||
879
Cargo.lock
generated
879
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
62
Cargo.toml
62
Cargo.toml
@@ -5,7 +5,7 @@ resolver = "2"
|
||||
[workspace.package]
|
||||
# Please update rustfmt.toml when bumping the Rust edition
|
||||
edition = "2024"
|
||||
rust-version = "1.90"
|
||||
rust-version = "1.88"
|
||||
homepage = "https://docs.astral.sh/ruff"
|
||||
documentation = "https://docs.astral.sh/ruff"
|
||||
repository = "https://github.com/astral-sh/ruff"
|
||||
@@ -45,7 +45,6 @@ ty = { path = "crates/ty" }
|
||||
ty_combine = { path = "crates/ty_combine" }
|
||||
ty_completion_eval = { path = "crates/ty_completion_eval" }
|
||||
ty_ide = { path = "crates/ty_ide" }
|
||||
ty_module_resolver = { path = "crates/ty_module_resolver" }
|
||||
ty_project = { path = "crates/ty_project", default-features = false }
|
||||
ty_python_semantic = { path = "crates/ty_python_semantic" }
|
||||
ty_server = { path = "crates/ty_server" }
|
||||
@@ -58,8 +57,8 @@ anstream = { version = "0.6.18" }
|
||||
anstyle = { version = "1.0.10" }
|
||||
anyhow = { version = "1.0.80" }
|
||||
arc-swap = { version = "1.7.1" }
|
||||
argfile = { version = "0.2.0" }
|
||||
assert_fs = { version = "1.1.0" }
|
||||
argfile = { version = "0.2.0" }
|
||||
bincode = { version = "2.0.0" }
|
||||
bitflags = { version = "2.5.0" }
|
||||
bitvec = { version = "1.0.1", default-features = false, features = [
|
||||
@@ -71,31 +70,30 @@ camino = { version = "1.1.7" }
|
||||
clap = { version = "4.5.3", features = ["derive"] }
|
||||
clap_complete_command = { version = "0.6.0" }
|
||||
clearscreen = { version = "4.0.0" }
|
||||
csv = { version = "1.3.1" }
|
||||
divan = { package = "codspeed-divan-compat", version = "4.0.4" }
|
||||
codspeed-criterion-compat = { version = "4.0.4", default-features = false }
|
||||
colored = { version = "3.0.0" }
|
||||
compact_str = "0.9.0"
|
||||
console_error_panic_hook = { version = "0.1.7" }
|
||||
console_log = { version = "1.0.0" }
|
||||
countme = { version = "3.0.1" }
|
||||
criterion = { version = "0.8.0", default-features = false }
|
||||
compact_str = "0.9.0"
|
||||
criterion = { version = "0.7.0", default-features = false }
|
||||
crossbeam = { version = "0.8.4" }
|
||||
csv = { version = "1.3.1" }
|
||||
dashmap = { version = "6.0.1" }
|
||||
datatest-stable = { version = "0.3.3" }
|
||||
divan = { package = "codspeed-divan-compat", version = "4.0.4" }
|
||||
drop_bomb = { version = "0.1.5" }
|
||||
dir-test = { version = "0.4.0" }
|
||||
dunce = { version = "1.0.5" }
|
||||
etcetera = { version = "0.11.0" }
|
||||
drop_bomb = { version = "0.1.5" }
|
||||
etcetera = { version = "0.10.0" }
|
||||
fern = { version = "0.7.0" }
|
||||
filetime = { version = "0.2.23" }
|
||||
get-size2 = { version = "0.7.3", features = [
|
||||
getrandom = { version = "0.3.1" }
|
||||
get-size2 = { version = "0.7.0", features = [
|
||||
"derive",
|
||||
"smallvec",
|
||||
"hashbrown",
|
||||
"compact-str",
|
||||
"ordermap"
|
||||
] }
|
||||
getrandom = { version = "0.3.1" }
|
||||
glob = { version = "0.3.1" }
|
||||
globset = { version = "0.4.14" }
|
||||
globwalk = { version = "0.9.1" }
|
||||
@@ -105,7 +103,7 @@ hashbrown = { version = "0.16.0", default-features = false, features = [
|
||||
"inline-more",
|
||||
] }
|
||||
heck = "0.5.0"
|
||||
ignore = { version = "0.4.24" }
|
||||
ignore = { version = "0.4.22" }
|
||||
imara-diff = { version = "0.1.5" }
|
||||
imperative = { version = "1.0.4" }
|
||||
indexmap = { version = "2.6.0" }
|
||||
@@ -117,8 +115,8 @@ is-macro = { version = "0.3.5" }
|
||||
is-wsl = { version = "0.4.0" }
|
||||
itertools = { version = "0.14.0" }
|
||||
jiff = { version = "0.2.0" }
|
||||
jod-thread = { version = "1.0.0" }
|
||||
js-sys = { version = "0.3.69" }
|
||||
jod-thread = { version = "1.0.0" }
|
||||
libc = { version = "0.2.153" }
|
||||
libcst = { version = "1.8.4", default-features = false }
|
||||
log = { version = "0.4.17" }
|
||||
@@ -126,12 +124,12 @@ lsp-server = { version = "0.7.6" }
|
||||
lsp-types = { git = "https://github.com/astral-sh/lsp-types.git", rev = "3512a9f", features = [
|
||||
"proposed",
|
||||
] }
|
||||
matchit = { version = "0.9.0" }
|
||||
matchit = { version = "0.8.1" }
|
||||
memchr = { version = "2.7.1" }
|
||||
mimalloc = { version = "0.1.39" }
|
||||
natord = { version = "1.0.9" }
|
||||
notify = { version = "8.0.0" }
|
||||
ordermap = { version = "1.0.0" }
|
||||
ordermap = { version = "0.5.0" }
|
||||
path-absolutize = { version = "3.1.1" }
|
||||
path-slash = { version = "0.2.1" }
|
||||
pathdiff = { version = "0.2.1" }
|
||||
@@ -140,8 +138,6 @@ pretty_assertions = "1.3.0"
|
||||
proc-macro2 = { version = "1.0.79" }
|
||||
pyproject-toml = { version = "0.13.4" }
|
||||
quick-junit = { version = "0.5.0" }
|
||||
quickcheck = { version = "1.0.3", default-features = false }
|
||||
quickcheck_macros = { version = "1.0.0" }
|
||||
quote = { version = "1.0.23" }
|
||||
rand = { version = "0.9.0" }
|
||||
rayon = { version = "1.10.0" }
|
||||
@@ -150,13 +146,13 @@ regex-automata = { version = "0.4.9" }
|
||||
rustc-hash = { version = "2.0.0" }
|
||||
rustc-stable-hash = { version = "0.1.2" }
|
||||
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "9860ff6ca0f1f8f3a8d6b832020002790b501254", default-features = false, features = [
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "ef9f9329be6923acd050c8dddd172e3bc93e8051", default-features = false, features = [
|
||||
"compact_str",
|
||||
"macros",
|
||||
"salsa_unstable",
|
||||
"inventory",
|
||||
] }
|
||||
schemars = { version = "1.0.4" }
|
||||
schemars = { version = "0.8.16" }
|
||||
seahash = { version = "4.1.0" }
|
||||
serde = { version = "1.0.197", features = ["derive"] }
|
||||
serde-wasm-bindgen = { version = "0.6.4" }
|
||||
@@ -177,7 +173,6 @@ snapbox = { version = "0.6.0", features = [
|
||||
static_assertions = "1.1.0"
|
||||
strum = { version = "0.27.0", features = ["strum_macros"] }
|
||||
strum_macros = { version = "0.27.0" }
|
||||
supports-hyperlinks = { version = "3.1.0" }
|
||||
syn = { version = "2.0.55" }
|
||||
tempfile = { version = "3.9.0" }
|
||||
test-case = { version = "3.3.1" }
|
||||
@@ -198,9 +193,9 @@ tryfn = { version = "0.2.1" }
|
||||
typed-arena = { version = "2.0.2" }
|
||||
unic-ucd-category = { version = "0.9" }
|
||||
unicode-ident = { version = "1.0.12" }
|
||||
unicode-normalization = { version = "0.1.23" }
|
||||
unicode-width = { version = "0.2.0" }
|
||||
unicode_names2 = { version = "1.2.2" }
|
||||
unicode-normalization = { version = "0.1.23" }
|
||||
url = { version = "2.5.0" }
|
||||
uuid = { version = "1.6.1", features = ["v4", "fast-rng", "macro-diagnostics"] }
|
||||
walkdir = { version = "2.3.2" }
|
||||
@@ -210,13 +205,8 @@ wild = { version = "2" }
|
||||
zip = { version = "0.6.6", default-features = false }
|
||||
|
||||
[workspace.metadata.cargo-shear]
|
||||
ignored = [
|
||||
"getrandom",
|
||||
"ruff_options_metadata",
|
||||
"uuid",
|
||||
"get-size2",
|
||||
"ty_completion_eval",
|
||||
]
|
||||
ignored = ["getrandom", "ruff_options_metadata", "uuid", "get-size2", "ty_completion_eval"]
|
||||
|
||||
|
||||
[workspace.lints.rust]
|
||||
unsafe_code = "warn"
|
||||
@@ -276,6 +266,7 @@ if_not_else = "allow"
|
||||
# Diagnostics are not actionable: Enable once https://github.com/rust-lang/rust-clippy/issues/13774 is resolved.
|
||||
large_stack_arrays = "allow"
|
||||
|
||||
|
||||
[profile.release]
|
||||
lto = "fat"
|
||||
codegen-units = 16
|
||||
@@ -290,12 +281,6 @@ codegen-units = 1
|
||||
[profile.release.package.salsa]
|
||||
codegen-units = 1
|
||||
|
||||
# Profile to build a minimally sized binary for ruff/ty
|
||||
[profile.minimal-size]
|
||||
inherits = "release"
|
||||
opt-level = "z"
|
||||
codegen-units = 1
|
||||
|
||||
[profile.dev.package.insta]
|
||||
opt-level = 3
|
||||
|
||||
@@ -335,11 +320,6 @@ strip = false
|
||||
debug = "full"
|
||||
lto = false
|
||||
|
||||
# Profile for faster iteration: applies minimal optimizations for faster tests.
|
||||
[profile.fast-test]
|
||||
inherits = "dev"
|
||||
opt-level = 1
|
||||
|
||||
# The profile that 'cargo dist' will build with.
|
||||
[profile.dist]
|
||||
inherits = "release"
|
||||
|
||||
17
README.md
17
README.md
@@ -57,11 +57,8 @@ Ruff is extremely actively developed and used in major open-source projects like
|
||||
|
||||
...and [many more](#whos-using-ruff).
|
||||
|
||||
Ruff is backed by [Astral](https://astral.sh), the creators of
|
||||
[uv](https://github.com/astral-sh/uv) and [ty](https://github.com/astral-sh/ty).
|
||||
|
||||
Read the [launch post](https://astral.sh/blog/announcing-astral-the-company-behind-ruff), or the
|
||||
original [project announcement](https://notes.crmarsh.com/python-tooling-could-be-much-much-faster).
|
||||
Ruff is backed by [Astral](https://astral.sh). Read the [launch post](https://astral.sh/blog/announcing-astral-the-company-behind-ruff),
|
||||
or the original [project announcement](https://notes.crmarsh.com/python-tooling-could-be-much-much-faster).
|
||||
|
||||
## Testimonials
|
||||
|
||||
@@ -92,7 +89,8 @@ creator of [isort](https://github.com/PyCQA/isort):
|
||||
> Just switched my first project to Ruff. Only one downside so far: it's so fast I couldn't believe
|
||||
> it was working till I intentionally introduced some errors.
|
||||
|
||||
[**Tim Abbott**](https://github.com/zulip/zulip/pull/23431#issuecomment-1302557034), lead developer of [Zulip](https://github.com/zulip/zulip) (also [here](https://github.com/astral-sh/ruff/issues/465#issuecomment-1317400028)):
|
||||
[**Tim Abbott**](https://github.com/astral-sh/ruff/issues/465#issuecomment-1317400028), lead
|
||||
developer of [Zulip](https://github.com/zulip/zulip):
|
||||
|
||||
> This is just ridiculously fast... `ruff` is amazing.
|
||||
|
||||
@@ -150,8 +148,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.14.11/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.14.11/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.14.1/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.14.1/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -184,7 +182,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.14.11
|
||||
rev: v0.14.1
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff-check
|
||||
@@ -494,7 +492,6 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [PyTorch](https://github.com/pytorch/pytorch)
|
||||
- [Pydantic](https://github.com/pydantic/pydantic)
|
||||
- [Pylint](https://github.com/PyCQA/pylint)
|
||||
- [PyScripter](https://github.com/pyscripter/pyscripter)
|
||||
- [PyVista](https://github.com/pyvista/pyvista)
|
||||
- [Reflex](https://github.com/reflex-dev/reflex)
|
||||
- [River](https://github.com/online-ml/river)
|
||||
|
||||
@@ -4,7 +4,6 @@ extend-exclude = [
|
||||
"crates/ty_vendored/vendor/**/*",
|
||||
"**/resources/**/*",
|
||||
"**/snapshots/**/*",
|
||||
"crates/ruff_linter/src/rules/flake8_implicit_str_concat/rules/collection_literal.rs",
|
||||
# Completion tests tend to have a lot of incomplete
|
||||
# words naturally. It's annoying to have to make all
|
||||
# of them actually words. So just ignore typos here.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.14.11"
|
||||
version = "0.14.1"
|
||||
publish = true
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
@@ -12,13 +12,6 @@ license = { workspace = true }
|
||||
readme = "../../README.md"
|
||||
default-run = "ruff"
|
||||
|
||||
[package.metadata.cargo-shear]
|
||||
# Used via macro expansion.
|
||||
ignored = ["jiff"]
|
||||
|
||||
[package.metadata.dist]
|
||||
dist = true
|
||||
|
||||
[dependencies]
|
||||
ruff_cache = { workspace = true }
|
||||
ruff_db = { workspace = true, default-features = false, features = ["os"] }
|
||||
@@ -48,7 +41,6 @@ colored = { workspace = true }
|
||||
filetime = { workspace = true }
|
||||
globwalk = { workspace = true }
|
||||
ignore = { workspace = true }
|
||||
indexmap = { workspace = true }
|
||||
is-macro = { workspace = true }
|
||||
itertools = { workspace = true }
|
||||
jiff = { workspace = true }
|
||||
@@ -69,12 +61,6 @@ tracing = { workspace = true, features = ["log"] }
|
||||
walkdir = { workspace = true }
|
||||
wild = { workspace = true }
|
||||
|
||||
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), not(target_os = "aix"), not(target_os = "android"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64", target_arch = "riscv64")))'.dependencies]
|
||||
tikv-jemallocator = { workspace = true }
|
||||
|
||||
[target.'cfg(target_os = "windows")'.dependencies]
|
||||
mimalloc = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
# Enable test rules during development
|
||||
ruff_linter = { workspace = true, features = ["clap", "test-rules"] }
|
||||
@@ -90,5 +76,18 @@ ruff_python_trivia = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
test-case = { workspace = true }
|
||||
|
||||
[package.metadata.cargo-shear]
|
||||
# Used via macro expansion.
|
||||
ignored = ["jiff"]
|
||||
|
||||
[package.metadata.dist]
|
||||
dist = true
|
||||
|
||||
[target.'cfg(target_os = "windows")'.dependencies]
|
||||
mimalloc = { workspace = true }
|
||||
|
||||
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), not(target_os = "aix"), not(target_os = "android"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64", target_arch = "riscv64")))'.dependencies]
|
||||
tikv-jemallocator = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
@@ -7,10 +7,8 @@ use std::sync::Arc;
|
||||
|
||||
use crate::commands::completions::config::{OptionString, OptionStringParser};
|
||||
use anyhow::bail;
|
||||
use clap::builder::Styles;
|
||||
use clap::builder::styling::{AnsiColor, Effects};
|
||||
use clap::builder::{TypedValueParser, ValueParserFactory};
|
||||
use clap::{Parser, Subcommand};
|
||||
use clap::{Parser, Subcommand, command};
|
||||
use colored::Colorize;
|
||||
use itertools::Itertools;
|
||||
use path_absolutize::path_dedot;
|
||||
@@ -80,13 +78,6 @@ impl GlobalConfigArgs {
|
||||
}
|
||||
}
|
||||
|
||||
// Configures Clap v3-style help menu colors
|
||||
const STYLES: Styles = Styles::styled()
|
||||
.header(AnsiColor::Green.on_default().effects(Effects::BOLD))
|
||||
.usage(AnsiColor::Green.on_default().effects(Effects::BOLD))
|
||||
.literal(AnsiColor::Cyan.on_default().effects(Effects::BOLD))
|
||||
.placeholder(AnsiColor::Cyan.on_default());
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(
|
||||
author,
|
||||
@@ -95,7 +86,6 @@ const STYLES: Styles = Styles::styled()
|
||||
after_help = "For help with a specific command, see: `ruff help <command>`."
|
||||
)]
|
||||
#[command(version)]
|
||||
#[command(styles = STYLES)]
|
||||
pub struct Args {
|
||||
#[command(subcommand)]
|
||||
pub(crate) command: Command,
|
||||
@@ -167,7 +157,6 @@ pub enum AnalyzeCommand {
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, clap::Parser)]
|
||||
#[expect(clippy::struct_excessive_bools)]
|
||||
pub struct AnalyzeGraphCommand {
|
||||
/// List of files or directories to include.
|
||||
#[clap(help = "List of files or directories to include [default: .]")]
|
||||
@@ -194,12 +183,6 @@ pub struct AnalyzeGraphCommand {
|
||||
/// Path to a virtual environment to use for resolving additional dependencies
|
||||
#[arg(long)]
|
||||
python: Option<PathBuf>,
|
||||
/// Include imports that are only used for type checking (i.e., imports within `if TYPE_CHECKING:` blocks).
|
||||
/// Use `--no-type-checking-imports` to exclude imports that are only used for type checking.
|
||||
#[arg(long, overrides_with("no_type_checking_imports"))]
|
||||
type_checking_imports: bool,
|
||||
#[arg(long, overrides_with("type_checking_imports"), hide = true)]
|
||||
no_type_checking_imports: bool,
|
||||
}
|
||||
|
||||
// The `Parser` derive is for ruff_dev, for ruff `Args` would be sufficient
|
||||
@@ -422,13 +405,8 @@ pub struct CheckCommand {
|
||||
)]
|
||||
pub statistics: bool,
|
||||
/// Enable automatic additions of `noqa` directives to failing lines.
|
||||
/// Optionally provide a reason to append after the codes.
|
||||
#[arg(
|
||||
long,
|
||||
value_name = "REASON",
|
||||
default_missing_value = "",
|
||||
num_args = 0..=1,
|
||||
require_equals = true,
|
||||
// conflicts_with = "add_noqa",
|
||||
conflicts_with = "show_files",
|
||||
conflicts_with = "show_settings",
|
||||
@@ -440,7 +418,7 @@ pub struct CheckCommand {
|
||||
conflicts_with = "fix",
|
||||
conflicts_with = "diff",
|
||||
)]
|
||||
pub add_noqa: Option<String>,
|
||||
pub add_noqa: bool,
|
||||
/// See the files Ruff will be run against with the current settings.
|
||||
#[arg(
|
||||
long,
|
||||
@@ -846,10 +824,6 @@ impl AnalyzeGraphCommand {
|
||||
string_imports_min_dots: self.min_dots,
|
||||
preview: resolve_bool_arg(self.preview, self.no_preview).map(PreviewMode::from),
|
||||
target_version: self.target_version.map(ast::PythonVersion::from),
|
||||
type_checking_imports: resolve_bool_arg(
|
||||
self.type_checking_imports,
|
||||
self.no_type_checking_imports,
|
||||
),
|
||||
..ExplicitConfigOverrides::default()
|
||||
};
|
||||
|
||||
@@ -1073,7 +1047,7 @@ Possible choices:
|
||||
/// etc.).
|
||||
#[expect(clippy::struct_excessive_bools)]
|
||||
pub struct CheckArguments {
|
||||
pub add_noqa: Option<String>,
|
||||
pub add_noqa: bool,
|
||||
pub diff: bool,
|
||||
pub exit_non_zero_on_fix: bool,
|
||||
pub exit_zero: bool,
|
||||
@@ -1346,7 +1320,6 @@ struct ExplicitConfigOverrides {
|
||||
extension: Option<Vec<ExtensionPair>>,
|
||||
detect_string_imports: Option<bool>,
|
||||
string_imports_min_dots: Option<usize>,
|
||||
type_checking_imports: Option<bool>,
|
||||
}
|
||||
|
||||
impl ConfigurationTransformer for ExplicitConfigOverrides {
|
||||
@@ -1437,9 +1410,6 @@ impl ConfigurationTransformer for ExplicitConfigOverrides {
|
||||
if let Some(string_imports_min_dots) = &self.string_imports_min_dots {
|
||||
config.analyze.string_imports_min_dots = Some(*string_imports_min_dots);
|
||||
}
|
||||
if let Some(type_checking_imports) = &self.type_checking_imports {
|
||||
config.analyze.type_checking_imports = Some(*type_checking_imports);
|
||||
}
|
||||
|
||||
config
|
||||
}
|
||||
|
||||
@@ -21,7 +21,6 @@ pub(crate) fn add_noqa(
|
||||
files: &[PathBuf],
|
||||
pyproject_config: &PyprojectConfig,
|
||||
config_arguments: &ConfigArguments,
|
||||
reason: Option<&str>,
|
||||
) -> Result<usize> {
|
||||
// Collect all the files to check.
|
||||
let start = Instant::now();
|
||||
@@ -77,14 +76,7 @@ pub(crate) fn add_noqa(
|
||||
return None;
|
||||
}
|
||||
};
|
||||
match add_noqa_to_path(
|
||||
path,
|
||||
package,
|
||||
&source_kind,
|
||||
source_type,
|
||||
&settings.linter,
|
||||
reason,
|
||||
) {
|
||||
match add_noqa_to_path(path, package, &source_kind, source_type, &settings.linter) {
|
||||
Ok(count) => Some(count),
|
||||
Err(e) => {
|
||||
error!("Failed to add noqa to {}: {e}", path.display());
|
||||
|
||||
@@ -2,17 +2,15 @@ use crate::args::{AnalyzeGraphArgs, ConfigArguments};
|
||||
use crate::resolve::resolve;
|
||||
use crate::{ExitStatus, resolve_default_files};
|
||||
use anyhow::Result;
|
||||
use indexmap::IndexSet;
|
||||
use log::{debug, warn};
|
||||
use path_absolutize::CWD;
|
||||
use ruff_db::system::{SystemPath, SystemPathBuf};
|
||||
use ruff_graph::{Direction, ImportMap, ModuleDb, ModuleImports};
|
||||
use ruff_linter::package::PackageRoot;
|
||||
use ruff_linter::source_kind::SourceKind;
|
||||
use ruff_linter::{warn_user, warn_user_once};
|
||||
use ruff_python_ast::{PySourceType, SourceType};
|
||||
use ruff_workspace::resolver::{ResolvedFile, match_exclusion, python_files_in_path};
|
||||
use rustc_hash::{FxBuildHasher, FxHashMap};
|
||||
use rustc_hash::FxHashMap;
|
||||
use std::io::Write;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::{Arc, Mutex};
|
||||
@@ -60,34 +58,17 @@ pub(crate) fn analyze_graph(
|
||||
})
|
||||
.collect::<FxHashMap<_, _>>();
|
||||
|
||||
// Create a database from the source roots, combining configured `src` paths with detected
|
||||
// package roots. Configured paths are added first so they take precedence, and duplicates
|
||||
// are removed.
|
||||
let mut src_roots: IndexSet<SystemPathBuf, FxBuildHasher> = IndexSet::default();
|
||||
|
||||
// Add configured `src` paths first (for precedence), filtering to only include existing
|
||||
// directories.
|
||||
src_roots.extend(
|
||||
pyproject_config
|
||||
.settings
|
||||
.linter
|
||||
.src
|
||||
.iter()
|
||||
.filter(|path| path.is_dir())
|
||||
.filter_map(|path| SystemPathBuf::from_path_buf(path.clone()).ok()),
|
||||
);
|
||||
|
||||
// Add detected package roots.
|
||||
src_roots.extend(
|
||||
package_roots
|
||||
.values()
|
||||
.filter_map(|package| package.as_deref())
|
||||
.filter_map(|path| path.parent())
|
||||
.filter_map(|path| SystemPathBuf::from_path_buf(path.to_path_buf()).ok()),
|
||||
);
|
||||
// Create a database from the source roots.
|
||||
let src_roots = package_roots
|
||||
.values()
|
||||
.filter_map(|package| package.as_deref())
|
||||
.filter_map(|package| package.parent())
|
||||
.map(Path::to_path_buf)
|
||||
.filter_map(|path| SystemPathBuf::from_path_buf(path).ok())
|
||||
.collect();
|
||||
|
||||
let db = ModuleDb::from_src_roots(
|
||||
src_roots.into_iter().collect(),
|
||||
src_roots,
|
||||
pyproject_config
|
||||
.settings
|
||||
.analyze
|
||||
@@ -123,7 +104,6 @@ pub(crate) fn analyze_graph(
|
||||
let settings = resolver.resolve(path);
|
||||
let string_imports = settings.analyze.string_imports;
|
||||
let include_dependencies = settings.analyze.include_dependencies.get(path).cloned();
|
||||
let type_checking_imports = settings.analyze.type_checking_imports;
|
||||
|
||||
// Skip excluded files.
|
||||
if (settings.file_resolver.force_exclude || !resolved_file.is_root())
|
||||
@@ -147,6 +127,10 @@ pub(crate) fn analyze_graph(
|
||||
},
|
||||
Some(language) => PySourceType::from(language),
|
||||
};
|
||||
if matches!(source_type, PySourceType::Ipynb) {
|
||||
debug!("Ignoring Jupyter notebook: {}", path.display());
|
||||
continue;
|
||||
}
|
||||
|
||||
// Convert to system paths.
|
||||
let Ok(package) = package.map(SystemPathBuf::from_path_buf).transpose() else {
|
||||
@@ -163,35 +147,13 @@ pub(crate) fn analyze_graph(
|
||||
let root = root.clone();
|
||||
let result = inner_result.clone();
|
||||
scope.spawn(move |_| {
|
||||
// Extract source code (handles both .py and .ipynb files)
|
||||
let source_kind = match SourceKind::from_path(path.as_std_path(), source_type) {
|
||||
Ok(Some(source_kind)) => source_kind,
|
||||
Ok(None) => {
|
||||
debug!("Skipping non-Python notebook: {path}");
|
||||
return;
|
||||
}
|
||||
Err(err) => {
|
||||
warn!("Failed to read source for {path}: {err}");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let source_code = source_kind.source_code();
|
||||
|
||||
// Identify any imports via static analysis.
|
||||
let mut imports = ModuleImports::detect(
|
||||
&db,
|
||||
source_code,
|
||||
source_type,
|
||||
&path,
|
||||
package.as_deref(),
|
||||
string_imports,
|
||||
type_checking_imports,
|
||||
)
|
||||
.unwrap_or_else(|err| {
|
||||
warn!("Failed to generate import map for {path}: {err}");
|
||||
ModuleImports::default()
|
||||
});
|
||||
let mut imports =
|
||||
ModuleImports::detect(&db, &path, package.as_deref(), string_imports)
|
||||
.unwrap_or_else(|err| {
|
||||
warn!("Failed to generate import map for {path}: {err}");
|
||||
ModuleImports::default()
|
||||
});
|
||||
|
||||
debug!("Discovered {} imports for {}", imports.len(), path);
|
||||
|
||||
|
||||
@@ -370,7 +370,7 @@ pub(crate) fn format_source(
|
||||
let line_index = LineIndex::from_source_text(unformatted);
|
||||
let byte_range = range.to_text_range(unformatted, &line_index);
|
||||
format_range(unformatted, byte_range, options).map(|formatted_range| {
|
||||
let mut formatted = unformatted.clone();
|
||||
let mut formatted = unformatted.to_string();
|
||||
formatted.replace_range(
|
||||
std::ops::Range::<usize>::from(formatted_range.source_range()),
|
||||
formatted_range.as_code(),
|
||||
@@ -879,7 +879,19 @@ impl From<&FormatCommandError> for Diagnostic {
|
||||
| FormatCommandError::Write(_, source_error) => {
|
||||
Diagnostic::new(DiagnosticId::Io, Severity::Error, source_error)
|
||||
}
|
||||
FormatCommandError::Format(_, format_module_error) => format_module_error.into(),
|
||||
FormatCommandError::Format(_, format_module_error) => match format_module_error {
|
||||
FormatModuleError::ParseError(parse_error) => Diagnostic::new(
|
||||
DiagnosticId::InternalError,
|
||||
Severity::Error,
|
||||
&parse_error.error,
|
||||
),
|
||||
FormatModuleError::FormatError(format_error) => {
|
||||
Diagnostic::new(DiagnosticId::InternalError, Severity::Error, format_error)
|
||||
}
|
||||
FormatModuleError::PrintError(print_error) => {
|
||||
Diagnostic::new(DiagnosticId::InternalError, Severity::Error, print_error)
|
||||
}
|
||||
},
|
||||
FormatCommandError::RangeFormatNotebook(_) => Diagnostic::new(
|
||||
DiagnosticId::InvalidCliOption,
|
||||
Severity::Error,
|
||||
@@ -1305,7 +1317,7 @@ mod tests {
|
||||
settings.add_filter(r"(Panicked at) [^:]+:\d+:\d+", "$1 <location>");
|
||||
let _s = settings.bind_to_scope();
|
||||
|
||||
assert_snapshot!(str::from_utf8(&buf)?, @"
|
||||
assert_snapshot!(str::from_utf8(&buf)?, @r"
|
||||
io: test.py: Permission denied
|
||||
--> test.py:1:1
|
||||
|
||||
|
||||
@@ -16,8 +16,6 @@ struct LinterInfo {
|
||||
prefix: &'static str,
|
||||
name: &'static str,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
url: Option<&'static str>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
categories: Option<Vec<LinterCategoryInfo>>,
|
||||
}
|
||||
|
||||
@@ -52,7 +50,6 @@ pub(crate) fn linter(format: HelpFormat) -> Result<()> {
|
||||
.map(|linter_info| LinterInfo {
|
||||
prefix: linter_info.common_prefix(),
|
||||
name: linter_info.name(),
|
||||
url: linter_info.url(),
|
||||
categories: linter_info.upstream_categories().map(|cats| {
|
||||
cats.iter()
|
||||
.map(|c| LinterCategoryInfo {
|
||||
|
||||
@@ -7,7 +7,6 @@ use serde::{Serialize, Serializer};
|
||||
use strum::IntoEnumIterator;
|
||||
|
||||
use ruff_linter::FixAvailability;
|
||||
use ruff_linter::codes::RuleGroup;
|
||||
use ruff_linter::registry::{Linter, Rule, RuleNamespace};
|
||||
|
||||
use crate::args::HelpFormat;
|
||||
@@ -20,12 +19,9 @@ struct Explanation<'a> {
|
||||
summary: &'a str,
|
||||
message_formats: &'a [&'a str],
|
||||
fix: String,
|
||||
fix_availability: FixAvailability,
|
||||
#[expect(clippy::struct_field_names)]
|
||||
explanation: Option<&'a str>,
|
||||
preview: bool,
|
||||
status: RuleGroup,
|
||||
source_location: SourceLocation,
|
||||
}
|
||||
|
||||
impl<'a> Explanation<'a> {
|
||||
@@ -40,14 +36,8 @@ impl<'a> Explanation<'a> {
|
||||
summary: rule.message_formats()[0],
|
||||
message_formats: rule.message_formats(),
|
||||
fix,
|
||||
fix_availability: rule.fixable(),
|
||||
explanation: rule.explanation(),
|
||||
preview: rule.is_preview(),
|
||||
status: rule.group(),
|
||||
source_location: SourceLocation {
|
||||
file: rule.file(),
|
||||
line: rule.line(),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -132,14 +122,3 @@ pub(crate) fn rules(format: HelpFormat) -> Result<()> {
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// The location of the rule's implementation in the Ruff source tree, relative to the repository
|
||||
/// root.
|
||||
///
|
||||
/// For most rules this will point to the `#[derive(ViolationMetadata)]` line above the rule's
|
||||
/// struct.
|
||||
#[derive(Serialize)]
|
||||
struct SourceLocation {
|
||||
file: &'static str,
|
||||
line: u32,
|
||||
}
|
||||
|
||||
@@ -29,10 +29,10 @@ pub(crate) fn show_settings(
|
||||
bail!("No files found under the given path");
|
||||
};
|
||||
|
||||
let (settings, config_path) = resolver.resolve_with_path(&path);
|
||||
let settings = resolver.resolve(&path);
|
||||
|
||||
writeln!(writer, "Resolved settings for: \"{}\"", path.display())?;
|
||||
if let Some(settings_path) = config_path {
|
||||
if let Some(settings_path) = pyproject_config.path.as_ref() {
|
||||
writeln!(writer, "Settings path: \"{}\"", settings_path.display())?;
|
||||
}
|
||||
write!(writer, "{settings}")?;
|
||||
|
||||
@@ -4,3 +4,4 @@ source: crates/ruff/src/commands/check.rs
|
||||
/home/ferris/project/code.py:1:1: E902 Permission denied (os error 13)
|
||||
/home/ferris/project/notebook.ipynb:1:1: E902 Permission denied (os error 13)
|
||||
/home/ferris/project/pyproject.toml:1:1: E902 Permission denied (os error 13)
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ use std::sync::mpsc::channel;
|
||||
use anyhow::Result;
|
||||
use clap::CommandFactory;
|
||||
use colored::Colorize;
|
||||
use log::error;
|
||||
use log::{error, warn};
|
||||
use notify::{RecursiveMode, Watcher, recommended_watcher};
|
||||
|
||||
use args::{GlobalConfigArgs, ServerCommand};
|
||||
@@ -319,20 +319,12 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<Exi
|
||||
warn_user!("Detected debug build without --no-cache.");
|
||||
}
|
||||
|
||||
if let Some(reason) = &cli.add_noqa {
|
||||
if cli.add_noqa {
|
||||
if !fix_mode.is_generate() {
|
||||
warn_user!("--fix is incompatible with --add-noqa.");
|
||||
}
|
||||
if reason.contains(['\n', '\r']) {
|
||||
return Err(anyhow::anyhow!(
|
||||
"--add-noqa <reason> cannot contain newline characters"
|
||||
));
|
||||
}
|
||||
|
||||
let reason_opt = (!reason.is_empty()).then_some(reason.as_str());
|
||||
|
||||
let modifications =
|
||||
commands::add_noqa::add_noqa(&files, &pyproject_config, &config_arguments, reason_opt)?;
|
||||
commands::add_noqa::add_noqa(&files, &pyproject_config, &config_arguments)?;
|
||||
if modifications > 0 && config_arguments.log_level >= LogLevel::Default {
|
||||
let s = if modifications == 1 { "" } else { "s" };
|
||||
#[expect(clippy::print_stderr)]
|
||||
|
||||
@@ -9,7 +9,9 @@ use itertools::{Itertools, iterate};
|
||||
use ruff_linter::linter::FixTable;
|
||||
use serde::Serialize;
|
||||
|
||||
use ruff_db::diagnostic::{Diagnostic, DisplayDiagnosticConfig, SecondaryCode};
|
||||
use ruff_db::diagnostic::{
|
||||
Diagnostic, DiagnosticFormat, DisplayDiagnosticConfig, DisplayDiagnostics, SecondaryCode,
|
||||
};
|
||||
use ruff_linter::fs::relativize_path;
|
||||
use ruff_linter::logging::LogLevel;
|
||||
use ruff_linter::message::{EmitterContext, render_diagnostics};
|
||||
@@ -34,21 +36,9 @@ struct ExpandedStatistics<'a> {
|
||||
code: Option<&'a SecondaryCode>,
|
||||
name: &'static str,
|
||||
count: usize,
|
||||
#[serde(rename = "fixable")]
|
||||
all_fixable: bool,
|
||||
fixable_count: usize,
|
||||
fixable: bool,
|
||||
}
|
||||
|
||||
impl ExpandedStatistics<'_> {
|
||||
fn any_fixable(&self) -> bool {
|
||||
self.fixable_count > 0
|
||||
}
|
||||
}
|
||||
|
||||
/// Accumulator type for grouping diagnostics by code.
|
||||
/// Format: (`code`, `representative_diagnostic`, `total_count`, `fixable_count`)
|
||||
type DiagnosticGroup<'a> = (Option<&'a SecondaryCode>, &'a Diagnostic, usize, usize);
|
||||
|
||||
pub(crate) struct Printer {
|
||||
format: OutputFormat,
|
||||
log_level: LogLevel,
|
||||
@@ -145,7 +135,7 @@ impl Printer {
|
||||
if fixables.applicable > 0 {
|
||||
writeln!(
|
||||
writer,
|
||||
"{fix_prefix} {} fixable with the `--fix` option.",
|
||||
"{fix_prefix} {} fixable with the --fix option.",
|
||||
fixables.applicable
|
||||
)?;
|
||||
}
|
||||
@@ -268,41 +258,35 @@ impl Printer {
|
||||
diagnostics: &Diagnostics,
|
||||
writer: &mut dyn Write,
|
||||
) -> Result<()> {
|
||||
let required_applicability = self.unsafe_fixes.required_applicability();
|
||||
let statistics: Vec<ExpandedStatistics> = diagnostics
|
||||
.inner
|
||||
.iter()
|
||||
.sorted_by_key(|diagnostic| diagnostic.secondary_code())
|
||||
.fold(vec![], |mut acc: Vec<DiagnosticGroup>, diagnostic| {
|
||||
let is_fixable = diagnostic
|
||||
.fix()
|
||||
.is_some_and(|fix| fix.applies(required_applicability));
|
||||
let code = diagnostic.secondary_code();
|
||||
|
||||
if let Some((prev_code, _prev_message, count, fixable_count)) = acc.last_mut() {
|
||||
if *prev_code == code {
|
||||
*count += 1;
|
||||
if is_fixable {
|
||||
*fixable_count += 1;
|
||||
.map(|message| (message.secondary_code(), message))
|
||||
.sorted_by_key(|(code, message)| (*code, message.fixable()))
|
||||
.fold(
|
||||
vec![],
|
||||
|mut acc: Vec<((Option<&SecondaryCode>, &Diagnostic), usize)>, (code, message)| {
|
||||
if let Some(((prev_code, _prev_message), count)) = acc.last_mut() {
|
||||
if *prev_code == code {
|
||||
*count += 1;
|
||||
return acc;
|
||||
}
|
||||
return acc;
|
||||
}
|
||||
}
|
||||
acc.push((code, diagnostic, 1, usize::from(is_fixable)));
|
||||
acc
|
||||
})
|
||||
.iter()
|
||||
.map(
|
||||
|&(code, message, count, fixable_count)| ExpandedStatistics {
|
||||
code,
|
||||
name: message.name(),
|
||||
count,
|
||||
// Backward compatibility: `fixable` is true only when all violations are fixable.
|
||||
// See: https://github.com/astral-sh/ruff/pull/21513
|
||||
all_fixable: fixable_count == count,
|
||||
fixable_count,
|
||||
acc.push(((code, message), 1));
|
||||
acc
|
||||
},
|
||||
)
|
||||
.iter()
|
||||
.map(|&((code, message), count)| ExpandedStatistics {
|
||||
code,
|
||||
name: message.name(),
|
||||
count,
|
||||
fixable: if let Some(fix) = message.fix() {
|
||||
fix.applies(self.unsafe_fixes.required_applicability())
|
||||
} else {
|
||||
false
|
||||
},
|
||||
})
|
||||
.sorted_by_key(|statistic| Reverse(statistic.count))
|
||||
.collect();
|
||||
|
||||
@@ -326,14 +310,13 @@ impl Printer {
|
||||
.map(|statistic| statistic.code.map_or(0, |s| s.len()))
|
||||
.max()
|
||||
.unwrap();
|
||||
let any_fixable = statistics.iter().any(ExpandedStatistics::any_fixable);
|
||||
let any_fixable = statistics.iter().any(|statistic| statistic.fixable);
|
||||
|
||||
let all_fixable = format!("[{}] ", "*".cyan());
|
||||
let partially_fixable = format!("[{}] ", "-".cyan());
|
||||
let fixable = format!("[{}] ", "*".cyan());
|
||||
let unfixable = "[ ] ";
|
||||
|
||||
// By default, we mimic Flake8's `--statistics` format.
|
||||
for statistic in &statistics {
|
||||
for statistic in statistics {
|
||||
writeln!(
|
||||
writer,
|
||||
"{:>count_width$}\t{:<code_width$}\t{}{}",
|
||||
@@ -345,10 +328,8 @@ impl Printer {
|
||||
.red()
|
||||
.bold(),
|
||||
if any_fixable {
|
||||
if statistic.all_fixable {
|
||||
&all_fixable
|
||||
} else if statistic.any_fixable() {
|
||||
&partially_fixable
|
||||
if statistic.fixable {
|
||||
&fixable
|
||||
} else {
|
||||
unfixable
|
||||
}
|
||||
@@ -409,18 +390,21 @@ impl Printer {
|
||||
|
||||
let context = EmitterContext::new(&diagnostics.notebook_indexes);
|
||||
let format = if preview {
|
||||
self.format
|
||||
DiagnosticFormat::Full
|
||||
} else {
|
||||
OutputFormat::Concise
|
||||
DiagnosticFormat::Concise
|
||||
};
|
||||
let config = DisplayDiagnosticConfig::default()
|
||||
.preview(preview)
|
||||
.hide_severity(true)
|
||||
.color(!cfg!(test) && colored::control::SHOULD_COLORIZE.should_colorize())
|
||||
.with_show_fix_status(show_fix_status(self.fix_mode, fixables.as_ref()))
|
||||
.with_fix_applicability(self.unsafe_fixes.required_applicability())
|
||||
.show_fix_diff(preview);
|
||||
render_diagnostics(writer, format, config, &context, &diagnostics.inner)?;
|
||||
.format(format)
|
||||
.with_fix_applicability(self.unsafe_fixes.required_applicability());
|
||||
write!(
|
||||
writer,
|
||||
"{}",
|
||||
DisplayDiagnostics::new(&context, &config, &diagnostics.inner)
|
||||
)?;
|
||||
}
|
||||
writer.flush()?;
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
---
|
||||
source: crates/ruff/src/version.rs
|
||||
expression: version
|
||||
snapshot_kind: text
|
||||
---
|
||||
0.0.0
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
---
|
||||
source: crates/ruff/src/version.rs
|
||||
expression: version
|
||||
snapshot_kind: text
|
||||
---
|
||||
0.0.0 (53b0f5d92 2023-10-19)
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
---
|
||||
source: crates/ruff/src/version.rs
|
||||
expression: version
|
||||
snapshot_kind: text
|
||||
---
|
||||
0.0.0+24 (53b0f5d92 2023-10-19)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
---
|
||||
source: crates/ruff/src/version.rs
|
||||
expression: version
|
||||
snapshot_kind: text
|
||||
---
|
||||
{
|
||||
"version": "0.0.0",
|
||||
|
||||
@@ -132,29 +132,29 @@ fn dependents() -> Result<()> {
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(command().arg("--direction").arg("dependents").current_dir(&root), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [],
|
||||
"ruff/b.py": [
|
||||
"ruff/a.py"
|
||||
],
|
||||
"ruff/c.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/d.py": [
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/e.py": [
|
||||
"ruff/d.py"
|
||||
]
|
||||
}
|
||||
assert_cmd_snapshot!(command().arg("--direction").arg("dependents").current_dir(&root), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [],
|
||||
"ruff/b.py": [
|
||||
"ruff/a.py"
|
||||
],
|
||||
"ruff/c.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/d.py": [
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/e.py": [
|
||||
"ruff/d.py"
|
||||
]
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"#);
|
||||
----- stderr -----
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
@@ -184,21 +184,21 @@ fn string_detection() -> Result<()> {
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(command().current_dir(&root), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [],
|
||||
"ruff/c.py": []
|
||||
}
|
||||
assert_cmd_snapshot!(command().current_dir(&root), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [],
|
||||
"ruff/c.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"#);
|
||||
----- stderr -----
|
||||
"###);
|
||||
});
|
||||
|
||||
insta::with_settings!({
|
||||
@@ -319,7 +319,7 @@ fn globs() -> Result<()> {
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(command().current_dir(&root), @r#"
|
||||
assert_cmd_snapshot!(command().current_dir(&root), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -340,7 +340,7 @@ fn globs() -> Result<()> {
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"#);
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
@@ -368,7 +368,7 @@ fn exclude() -> Result<()> {
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(command().current_dir(&root), @r#"
|
||||
assert_cmd_snapshot!(command().current_dir(&root), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -381,7 +381,7 @@ fn exclude() -> Result<()> {
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"#);
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
@@ -421,7 +421,7 @@ fn wildcard() -> Result<()> {
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(command().current_dir(&root), @r#"
|
||||
assert_cmd_snapshot!(command().current_dir(&root), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -443,7 +443,7 @@ fn wildcard() -> Result<()> {
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"#);
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
@@ -639,7 +639,7 @@ fn venv() -> Result<()> {
|
||||
}, {
|
||||
assert_cmd_snapshot!(
|
||||
command().args(["--python", "none"]).arg("packages/albatross").current_dir(&root),
|
||||
@"
|
||||
@r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
@@ -653,248 +653,3 @@ fn venv() -> Result<()> {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn notebook_basic() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let root = ChildPath::new(tempdir.path());
|
||||
|
||||
root.child("ruff").child("__init__.py").write_str("")?;
|
||||
root.child("ruff")
|
||||
.child("a.py")
|
||||
.write_str(indoc::indoc! {r#"
|
||||
def helper():
|
||||
pass
|
||||
"#})?;
|
||||
|
||||
// Create a basic notebook with a simple import
|
||||
root.child("notebook.ipynb").write_str(indoc::indoc! {r#"
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from ruff.a import helper"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"language_info": {
|
||||
"name": "python",
|
||||
"version": "3.12.0"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
"#})?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(command().current_dir(&root), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"notebook.ipynb": [
|
||||
"ruff/a.py"
|
||||
],
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"#);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test that the `src` configuration option is respected.
|
||||
///
|
||||
/// This is useful for monorepos where there are multiple source directories that need to be
|
||||
/// included in the module resolution search path.
|
||||
#[test]
|
||||
fn src_option() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let root = ChildPath::new(tempdir.path());
|
||||
|
||||
// Create a lib directory with a package.
|
||||
root.child("lib")
|
||||
.child("mylib")
|
||||
.child("__init__.py")
|
||||
.write_str("def helper(): pass")?;
|
||||
|
||||
// Create an app directory with a file that imports from mylib.
|
||||
root.child("app").child("__init__.py").write_str("")?;
|
||||
root.child("app")
|
||||
.child("main.py")
|
||||
.write_str("from mylib import helper")?;
|
||||
|
||||
// Without src configured, the import from mylib won't resolve.
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(command().arg("app").current_dir(&root), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"app/__init__.py": [],
|
||||
"app/main.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"#);
|
||||
});
|
||||
|
||||
// With src = ["lib"], the import should resolve.
|
||||
root.child("ruff.toml").write_str(indoc::indoc! {r#"
|
||||
src = ["lib"]
|
||||
"#})?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(command().arg("app").current_dir(&root), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"app/__init__.py": [],
|
||||
"app/main.py": [
|
||||
"lib/mylib/__init__.py"
|
||||
]
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"#);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test that glob patterns in `src` are expanded.
|
||||
#[test]
|
||||
fn src_glob_expansion() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let root = ChildPath::new(tempdir.path());
|
||||
|
||||
// Create multiple lib directories with packages.
|
||||
root.child("libs")
|
||||
.child("lib_a")
|
||||
.child("pkg_a")
|
||||
.child("__init__.py")
|
||||
.write_str("def func_a(): pass")?;
|
||||
root.child("libs")
|
||||
.child("lib_b")
|
||||
.child("pkg_b")
|
||||
.child("__init__.py")
|
||||
.write_str("def func_b(): pass")?;
|
||||
|
||||
// Create an app that imports from both packages.
|
||||
root.child("app").child("__init__.py").write_str("")?;
|
||||
root.child("app")
|
||||
.child("main.py")
|
||||
.write_str("from pkg_a import func_a\nfrom pkg_b import func_b")?;
|
||||
|
||||
// Use a glob pattern to include all lib directories.
|
||||
root.child("ruff.toml").write_str(indoc::indoc! {r#"
|
||||
src = ["libs/*"]
|
||||
"#})?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(command().arg("app").current_dir(&root), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"app/__init__.py": [],
|
||||
"app/main.py": [
|
||||
"libs/lib_a/pkg_a/__init__.py",
|
||||
"libs/lib_b/pkg_b/__init__.py"
|
||||
]
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"#);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn notebook_with_magic() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let root = ChildPath::new(tempdir.path());
|
||||
|
||||
root.child("ruff").child("__init__.py").write_str("")?;
|
||||
root.child("ruff")
|
||||
.child("a.py")
|
||||
.write_str(indoc::indoc! {r#"
|
||||
def helper():
|
||||
pass
|
||||
"#})?;
|
||||
|
||||
// Create a notebook with IPython magic commands and imports
|
||||
root.child("notebook.ipynb").write_str(indoc::indoc! {r#"
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%load_ext autoreload\n",
|
||||
"%autoreload 2"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from ruff.a import helper"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"language_info": {
|
||||
"name": "python",
|
||||
"version": "3.12.0"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
"#})?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(command().current_dir(&root), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"notebook.ipynb": [
|
||||
"ruff/a.py"
|
||||
],
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"#);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,193 +0,0 @@
|
||||
use std::process::Command;
|
||||
|
||||
use insta_cmd::assert_cmd_snapshot;
|
||||
|
||||
use crate::CliTest;
|
||||
|
||||
#[test]
|
||||
fn type_checking_imports() -> anyhow::Result<()> {
|
||||
let test = AnalyzeTest::with_files([
|
||||
("ruff/__init__.py", ""),
|
||||
(
|
||||
"ruff/a.py",
|
||||
r#"
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import ruff.b
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import ruff.c
|
||||
"#,
|
||||
),
|
||||
(
|
||||
"ruff/b.py",
|
||||
r#"
|
||||
if TYPE_CHECKING:
|
||||
from ruff import c
|
||||
"#,
|
||||
),
|
||||
("ruff/c.py", ""),
|
||||
])?;
|
||||
|
||||
assert_cmd_snapshot!(test.command(), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py",
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/b.py": [
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/c.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"#);
|
||||
|
||||
assert_cmd_snapshot!(
|
||||
test.command()
|
||||
.arg("--no-type-checking-imports"),
|
||||
@r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [],
|
||||
"ruff/c.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"#
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn type_checking_imports_from_config() -> anyhow::Result<()> {
|
||||
let test = AnalyzeTest::with_files([
|
||||
("ruff/__init__.py", ""),
|
||||
(
|
||||
"ruff/a.py",
|
||||
r#"
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import ruff.b
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import ruff.c
|
||||
"#,
|
||||
),
|
||||
(
|
||||
"ruff/b.py",
|
||||
r#"
|
||||
if TYPE_CHECKING:
|
||||
from ruff import c
|
||||
"#,
|
||||
),
|
||||
("ruff/c.py", ""),
|
||||
(
|
||||
"ruff.toml",
|
||||
r#"
|
||||
[analyze]
|
||||
type-checking-imports = false
|
||||
"#,
|
||||
),
|
||||
])?;
|
||||
|
||||
assert_cmd_snapshot!(test.command(), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [],
|
||||
"ruff/c.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"#);
|
||||
|
||||
test.write_file(
|
||||
"ruff.toml",
|
||||
r#"
|
||||
[analyze]
|
||||
type-checking-imports = true
|
||||
"#,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(test.command(), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py",
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/b.py": [
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/c.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"#
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct AnalyzeTest {
|
||||
cli_test: CliTest,
|
||||
}
|
||||
|
||||
impl AnalyzeTest {
|
||||
pub(crate) fn new() -> anyhow::Result<Self> {
|
||||
Ok(Self {
|
||||
cli_test: CliTest::with_settings(|_, mut settings| {
|
||||
settings.add_filter(r#"\\\\"#, "/");
|
||||
settings
|
||||
})?,
|
||||
})
|
||||
}
|
||||
|
||||
fn with_files<'a>(files: impl IntoIterator<Item = (&'a str, &'a str)>) -> anyhow::Result<Self> {
|
||||
let case = Self::new()?;
|
||||
case.write_files(files)?;
|
||||
Ok(case)
|
||||
}
|
||||
|
||||
#[expect(unused)]
|
||||
fn with_file(path: impl AsRef<std::path::Path>, content: &str) -> anyhow::Result<Self> {
|
||||
let fixture = Self::new()?;
|
||||
fixture.write_file(path, content)?;
|
||||
Ok(fixture)
|
||||
}
|
||||
|
||||
fn command(&self) -> Command {
|
||||
let mut command = self.cli_test.command();
|
||||
command.arg("analyze").arg("graph").arg("--preview");
|
||||
command
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Deref for AnalyzeTest {
|
||||
type Target = CliTest;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.cli_test
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -15,8 +15,6 @@ use std::{
|
||||
};
|
||||
use tempfile::TempDir;
|
||||
|
||||
mod analyze_graph;
|
||||
mod format;
|
||||
mod lint;
|
||||
|
||||
const BIN_NAME: &str = "ruff";
|
||||
@@ -59,14 +57,6 @@ impl CliTest {
|
||||
Self::with_settings(|_, settings| settings)
|
||||
}
|
||||
|
||||
pub(crate) fn with_files<'a>(
|
||||
files: impl IntoIterator<Item = (&'a str, &'a str)>,
|
||||
) -> anyhow::Result<Self> {
|
||||
let case = Self::new()?;
|
||||
case.write_files(files)?;
|
||||
Ok(case)
|
||||
}
|
||||
|
||||
pub(crate) fn with_settings(
|
||||
setup_settings: impl FnOnce(&Path, insta::Settings) -> insta::Settings,
|
||||
) -> Result<Self> {
|
||||
@@ -152,16 +142,6 @@ impl CliTest {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn write_files<'a>(
|
||||
&self,
|
||||
files: impl IntoIterator<Item = (&'a str, &'a str)>,
|
||||
) -> Result<()> {
|
||||
for file in files {
|
||||
self.write_file(file.0, file.1)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Returns the path to the test directory root.
|
||||
pub(crate) fn root(&self) -> &Path {
|
||||
&self.project_dir
|
||||
@@ -194,10 +174,4 @@ impl CliTest {
|
||||
|
||||
command
|
||||
}
|
||||
|
||||
pub(crate) fn format_command(&self) -> Command {
|
||||
let mut command = self.command();
|
||||
command.args(["format", "--no-cache"]);
|
||||
command
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
source: crates/ruff/tests/cli/lint.rs
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
@@ -17,6 +17,7 @@ info:
|
||||
- "--fix"
|
||||
- "-"
|
||||
stdin: "1"
|
||||
snapshot_kind: text
|
||||
---
|
||||
success: false
|
||||
exit_code: 2
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
source: crates/ruff/tests/cli/lint.rs
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
source: crates/ruff/tests/cli/lint.rs
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
source: crates/ruff/tests/cli/lint.rs
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
source: crates/ruff/tests/cli/lint.rs
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
source: crates/ruff/tests/cli/lint.rs
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
source: crates/ruff/tests/cli/lint.rs
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
source: crates/ruff/tests/cli/lint.rs
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
source: crates/ruff/tests/cli/lint.rs
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
source: crates/ruff/tests/cli/lint.rs
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
source: crates/ruff/tests/cli/lint.rs
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
source: crates/ruff/tests/cli/lint.rs
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
source: crates/ruff/tests/cli/lint.rs
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
|
||||
@@ -12,6 +12,7 @@ info:
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
snapshot_kind: text
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
source: crates/ruff/tests/cli/lint.rs
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
source: crates/ruff/tests/cli/lint.rs
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
source: crates/ruff/tests/cli/lint.rs
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
|
||||
@@ -9,6 +9,7 @@ info:
|
||||
- concise
|
||||
- "--show-settings"
|
||||
- test.py
|
||||
snapshot_kind: text
|
||||
---
|
||||
success: true
|
||||
exit_code: 0
|
||||
@@ -125,7 +126,7 @@ linter.flake8_copyright.notice_rgx = (?i)Copyright\s+((?:\(C\)|©)\s+)?\d{4}((-|
|
||||
linter.flake8_copyright.author = none
|
||||
linter.flake8_copyright.min_file_size = 0
|
||||
linter.flake8_errmsg.max_string_length = 0
|
||||
linter.flake8_gettext.function_names = [
|
||||
linter.flake8_gettext.functions_names = [
|
||||
_,
|
||||
gettext,
|
||||
ngettext,
|
||||
@@ -261,7 +262,6 @@ linter.pylint.max_locals = 15
|
||||
linter.pylint.max_nested_blocks = 5
|
||||
linter.pyupgrade.keep_runtime_typing = false
|
||||
linter.ruff.parenthesize_tuple_in_subscript = false
|
||||
linter.ruff.strictly_empty_init_modules = false
|
||||
|
||||
# Formatter Settings
|
||||
formatter.exclude = []
|
||||
@@ -284,6 +284,5 @@ analyze.target_version = 3.10
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
analyze.type_checking_imports = true
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -12,6 +12,7 @@ info:
|
||||
- UP007
|
||||
- test.py
|
||||
- "-"
|
||||
snapshot_kind: text
|
||||
---
|
||||
success: true
|
||||
exit_code: 0
|
||||
@@ -127,7 +128,7 @@ linter.flake8_copyright.notice_rgx = (?i)Copyright\s+((?:\(C\)|©)\s+)?\d{4}((-|
|
||||
linter.flake8_copyright.author = none
|
||||
linter.flake8_copyright.min_file_size = 0
|
||||
linter.flake8_errmsg.max_string_length = 0
|
||||
linter.flake8_gettext.function_names = [
|
||||
linter.flake8_gettext.functions_names = [
|
||||
_,
|
||||
gettext,
|
||||
ngettext,
|
||||
@@ -263,7 +264,6 @@ linter.pylint.max_locals = 15
|
||||
linter.pylint.max_nested_blocks = 5
|
||||
linter.pyupgrade.keep_runtime_typing = false
|
||||
linter.ruff.parenthesize_tuple_in_subscript = false
|
||||
linter.ruff.strictly_empty_init_modules = false
|
||||
|
||||
# Formatter Settings
|
||||
formatter.exclude = []
|
||||
@@ -286,6 +286,5 @@ analyze.target_version = 3.11
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
analyze.type_checking_imports = true
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -13,6 +13,7 @@ info:
|
||||
- UP007
|
||||
- test.py
|
||||
- "-"
|
||||
snapshot_kind: text
|
||||
---
|
||||
success: true
|
||||
exit_code: 0
|
||||
@@ -129,7 +130,7 @@ linter.flake8_copyright.notice_rgx = (?i)Copyright\s+((?:\(C\)|©)\s+)?\d{4}((-|
|
||||
linter.flake8_copyright.author = none
|
||||
linter.flake8_copyright.min_file_size = 0
|
||||
linter.flake8_errmsg.max_string_length = 0
|
||||
linter.flake8_gettext.function_names = [
|
||||
linter.flake8_gettext.functions_names = [
|
||||
_,
|
||||
gettext,
|
||||
ngettext,
|
||||
@@ -265,7 +266,6 @@ linter.pylint.max_locals = 15
|
||||
linter.pylint.max_nested_blocks = 5
|
||||
linter.pyupgrade.keep_runtime_typing = false
|
||||
linter.ruff.parenthesize_tuple_in_subscript = false
|
||||
linter.ruff.strictly_empty_init_modules = false
|
||||
|
||||
# Formatter Settings
|
||||
formatter.exclude = []
|
||||
@@ -288,6 +288,5 @@ analyze.target_version = 3.11
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
analyze.type_checking_imports = true
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -14,6 +14,7 @@ info:
|
||||
- py310
|
||||
- test.py
|
||||
- "-"
|
||||
snapshot_kind: text
|
||||
---
|
||||
success: true
|
||||
exit_code: 0
|
||||
@@ -129,7 +130,7 @@ linter.flake8_copyright.notice_rgx = (?i)Copyright\s+((?:\(C\)|©)\s+)?\d{4}((-|
|
||||
linter.flake8_copyright.author = none
|
||||
linter.flake8_copyright.min_file_size = 0
|
||||
linter.flake8_errmsg.max_string_length = 0
|
||||
linter.flake8_gettext.function_names = [
|
||||
linter.flake8_gettext.functions_names = [
|
||||
_,
|
||||
gettext,
|
||||
ngettext,
|
||||
@@ -265,7 +266,6 @@ linter.pylint.max_locals = 15
|
||||
linter.pylint.max_nested_blocks = 5
|
||||
linter.pyupgrade.keep_runtime_typing = false
|
||||
linter.ruff.parenthesize_tuple_in_subscript = false
|
||||
linter.ruff.strictly_empty_init_modules = false
|
||||
|
||||
# Formatter Settings
|
||||
formatter.exclude = []
|
||||
@@ -288,6 +288,5 @@ analyze.target_version = 3.10
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
analyze.type_checking_imports = true
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -11,6 +11,7 @@ info:
|
||||
- "--select"
|
||||
- UP007
|
||||
- foo/test.py
|
||||
snapshot_kind: text
|
||||
---
|
||||
success: true
|
||||
exit_code: 0
|
||||
@@ -126,7 +127,7 @@ linter.flake8_copyright.notice_rgx = (?i)Copyright\s+((?:\(C\)|©)\s+)?\d{4}((-|
|
||||
linter.flake8_copyright.author = none
|
||||
linter.flake8_copyright.min_file_size = 0
|
||||
linter.flake8_errmsg.max_string_length = 0
|
||||
linter.flake8_gettext.function_names = [
|
||||
linter.flake8_gettext.functions_names = [
|
||||
_,
|
||||
gettext,
|
||||
ngettext,
|
||||
@@ -262,7 +263,6 @@ linter.pylint.max_locals = 15
|
||||
linter.pylint.max_nested_blocks = 5
|
||||
linter.pyupgrade.keep_runtime_typing = false
|
||||
linter.ruff.parenthesize_tuple_in_subscript = false
|
||||
linter.ruff.strictly_empty_init_modules = false
|
||||
|
||||
# Formatter Settings
|
||||
formatter.exclude = []
|
||||
@@ -285,6 +285,5 @@ analyze.target_version = 3.11
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
analyze.type_checking_imports = true
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -11,12 +11,12 @@ info:
|
||||
- "--select"
|
||||
- UP007
|
||||
- foo/test.py
|
||||
snapshot_kind: text
|
||||
---
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
Resolved settings for: "[TMP]/foo/test.py"
|
||||
Settings path: "[TMP]/foo/pyproject.toml"
|
||||
|
||||
# General Settings
|
||||
cache_dir = "[TMP]/foo/.ruff_cache"
|
||||
@@ -127,7 +127,7 @@ linter.flake8_copyright.notice_rgx = (?i)Copyright\s+((?:\(C\)|©)\s+)?\d{4}((-|
|
||||
linter.flake8_copyright.author = none
|
||||
linter.flake8_copyright.min_file_size = 0
|
||||
linter.flake8_errmsg.max_string_length = 0
|
||||
linter.flake8_gettext.function_names = [
|
||||
linter.flake8_gettext.functions_names = [
|
||||
_,
|
||||
gettext,
|
||||
ngettext,
|
||||
@@ -263,7 +263,6 @@ linter.pylint.max_locals = 15
|
||||
linter.pylint.max_nested_blocks = 5
|
||||
linter.pyupgrade.keep_runtime_typing = false
|
||||
linter.ruff.parenthesize_tuple_in_subscript = false
|
||||
linter.ruff.strictly_empty_init_modules = false
|
||||
|
||||
# Formatter Settings
|
||||
formatter.exclude = []
|
||||
@@ -286,6 +285,5 @@ analyze.target_version = 3.10
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
analyze.type_checking_imports = true
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -125,7 +125,7 @@ linter.flake8_copyright.notice_rgx = (?i)Copyright\s+((?:\(C\)|©)\s+)?\d{4}((-|
|
||||
linter.flake8_copyright.author = none
|
||||
linter.flake8_copyright.min_file_size = 0
|
||||
linter.flake8_errmsg.max_string_length = 0
|
||||
linter.flake8_gettext.function_names = [
|
||||
linter.flake8_gettext.functions_names = [
|
||||
_,
|
||||
gettext,
|
||||
ngettext,
|
||||
@@ -261,7 +261,6 @@ linter.pylint.max_locals = 15
|
||||
linter.pylint.max_nested_blocks = 5
|
||||
linter.pyupgrade.keep_runtime_typing = false
|
||||
linter.ruff.parenthesize_tuple_in_subscript = false
|
||||
linter.ruff.strictly_empty_init_modules = false
|
||||
|
||||
# Formatter Settings
|
||||
formatter.exclude = []
|
||||
@@ -284,6 +283,5 @@ analyze.target_version = 3.10
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
analyze.type_checking_imports = true
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -125,7 +125,7 @@ linter.flake8_copyright.notice_rgx = (?i)Copyright\s+((?:\(C\)|©)\s+)?\d{4}((-|
|
||||
linter.flake8_copyright.author = none
|
||||
linter.flake8_copyright.min_file_size = 0
|
||||
linter.flake8_errmsg.max_string_length = 0
|
||||
linter.flake8_gettext.function_names = [
|
||||
linter.flake8_gettext.functions_names = [
|
||||
_,
|
||||
gettext,
|
||||
ngettext,
|
||||
@@ -261,7 +261,6 @@ linter.pylint.max_locals = 15
|
||||
linter.pylint.max_nested_blocks = 5
|
||||
linter.pyupgrade.keep_runtime_typing = false
|
||||
linter.ruff.parenthesize_tuple_in_subscript = false
|
||||
linter.ruff.strictly_empty_init_modules = false
|
||||
|
||||
# Formatter Settings
|
||||
formatter.exclude = []
|
||||
@@ -284,6 +283,5 @@ analyze.target_version = 3.10
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
analyze.type_checking_imports = true
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -9,6 +9,7 @@ info:
|
||||
- concise
|
||||
- test.py
|
||||
- "--show-settings"
|
||||
snapshot_kind: text
|
||||
---
|
||||
success: true
|
||||
exit_code: 0
|
||||
@@ -125,7 +126,7 @@ linter.flake8_copyright.notice_rgx = (?i)Copyright\s+((?:\(C\)|©)\s+)?\d{4}((-|
|
||||
linter.flake8_copyright.author = none
|
||||
linter.flake8_copyright.min_file_size = 0
|
||||
linter.flake8_errmsg.max_string_length = 0
|
||||
linter.flake8_gettext.function_names = [
|
||||
linter.flake8_gettext.functions_names = [
|
||||
_,
|
||||
gettext,
|
||||
ngettext,
|
||||
@@ -261,7 +262,6 @@ linter.pylint.max_locals = 15
|
||||
linter.pylint.max_nested_blocks = 5
|
||||
linter.pyupgrade.keep_runtime_typing = false
|
||||
linter.ruff.parenthesize_tuple_in_subscript = false
|
||||
linter.ruff.strictly_empty_init_modules = false
|
||||
|
||||
# Formatter Settings
|
||||
formatter.exclude = []
|
||||
@@ -284,6 +284,5 @@ analyze.target_version = 3.11
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
analyze.type_checking_imports = true
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
source: crates/ruff/tests/cli/lint.rs
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
|
||||
@@ -18,13 +18,13 @@ fn check_in_deleted_directory_errors() {
|
||||
set_current_dir(&temp_path).unwrap();
|
||||
drop(temp_dir);
|
||||
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)).arg("check"), @"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)).arg("check"), @r###"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
ruff failed
|
||||
Cause: Working directory does not exist
|
||||
");
|
||||
----- stderr -----
|
||||
ruff failed
|
||||
Cause: Working directory does not exist
|
||||
"###);
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -97,7 +97,7 @@ impl<'a> RuffCheck<'a> {
|
||||
fn stdin_success() {
|
||||
let mut cmd = RuffCheck::default().args([]).build();
|
||||
assert_cmd_snapshot!(cmd
|
||||
.pass_stdin(""), @"
|
||||
.pass_stdin(""), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -111,7 +111,7 @@ fn stdin_success() {
|
||||
fn stdin_error() {
|
||||
let mut cmd = RuffCheck::default().args([]).build();
|
||||
assert_cmd_snapshot!(cmd
|
||||
.pass_stdin("import os\n"), @"
|
||||
.pass_stdin("import os\n"), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -136,7 +136,7 @@ fn stdin_filename() {
|
||||
.args(["--stdin-filename", "F401.py"])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd
|
||||
.pass_stdin("import os\n"), @"
|
||||
.pass_stdin("import os\n"), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -172,7 +172,7 @@ import bar # unused import
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(["check", "--isolated", "--no-cache", "--select", "F401"]).current_dir(tempdir.path()), @"
|
||||
.args(["check", "--isolated", "--no-cache", "--select", "F401"]).current_dir(tempdir.path()), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -208,7 +208,7 @@ fn check_warn_stdin_filename_with_files() {
|
||||
.filename("foo.py")
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd
|
||||
.pass_stdin("import os\n"), @"
|
||||
.pass_stdin("import os\n"), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -235,7 +235,7 @@ fn stdin_source_type_py() {
|
||||
.args(["--stdin-filename", "TCH.py"])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd
|
||||
.pass_stdin("import os\n"), @"
|
||||
.pass_stdin("import os\n"), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -261,7 +261,7 @@ fn stdin_source_type_pyi() {
|
||||
.args(["--stdin-filename", "TCH.pyi", "--select", "TCH"])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd
|
||||
.pass_stdin("import os\n"), @"
|
||||
.pass_stdin("import os\n"), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -294,7 +294,7 @@ fn stdin_json() {
|
||||
fn stdin_fix_py() {
|
||||
let mut cmd = RuffCheck::default().args(["--fix"]).build();
|
||||
assert_cmd_snapshot!(cmd
|
||||
.pass_stdin("import os\nimport sys\n\nprint(sys.version)\n"), @"
|
||||
.pass_stdin("import os\nimport sys\n\nprint(sys.version)\n"), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -572,7 +572,7 @@ fn stdin_override_parser_ipynb() {
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}"#), @"
|
||||
}"#), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -610,7 +610,7 @@ fn stdin_override_parser_py() {
|
||||
])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd
|
||||
.pass_stdin("import os\n"), @"
|
||||
.pass_stdin("import os\n"), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -633,7 +633,7 @@ fn stdin_override_parser_py() {
|
||||
fn stdin_fix_when_not_fixable_should_still_print_contents() {
|
||||
let mut cmd = RuffCheck::default().args(["--fix"]).build();
|
||||
assert_cmd_snapshot!(cmd
|
||||
.pass_stdin("import os\nimport sys\n\nif (1, 2):\n print(sys.version)\n"), @"
|
||||
.pass_stdin("import os\nimport sys\n\nif (1, 2):\n print(sys.version)\n"), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -654,14 +654,14 @@ fn stdin_fix_when_not_fixable_should_still_print_contents() {
|
||||
|
|
||||
|
||||
Found 2 errors (1 fixed, 1 remaining).
|
||||
");
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn stdin_fix_when_no_issues_should_still_print_contents() {
|
||||
let mut cmd = RuffCheck::default().args(["--fix"]).build();
|
||||
assert_cmd_snapshot!(cmd
|
||||
.pass_stdin("import sys\n\nprint(sys.version)\n"), @"
|
||||
.pass_stdin("import sys\n\nprint(sys.version)\n"), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -805,7 +805,7 @@ fn stdin_format_jupyter() {
|
||||
fn stdin_parse_error() {
|
||||
let mut cmd = RuffCheck::default().build();
|
||||
assert_cmd_snapshot!(cmd
|
||||
.pass_stdin("from foo import\n"), @"
|
||||
.pass_stdin("from foo import\n"), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -826,7 +826,7 @@ fn stdin_parse_error() {
|
||||
fn stdin_multiple_parse_error() {
|
||||
let mut cmd = RuffCheck::default().build();
|
||||
assert_cmd_snapshot!(cmd
|
||||
.pass_stdin("from foo import\nbar =\n"), @"
|
||||
.pass_stdin("from foo import\nbar =\n"), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -857,7 +857,7 @@ fn parse_error_not_included() {
|
||||
// Parse errors are always shown
|
||||
let mut cmd = RuffCheck::default().args(["--select=I"]).build();
|
||||
assert_cmd_snapshot!(cmd
|
||||
.pass_stdin("foo =\n"), @"
|
||||
.pass_stdin("foo =\n"), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -878,7 +878,7 @@ fn parse_error_not_included() {
|
||||
fn full_output_preview() {
|
||||
let mut cmd = RuffCheck::default().args(["--preview"]).build();
|
||||
assert_cmd_snapshot!(cmd
|
||||
.pass_stdin("l = 1"), @"
|
||||
.pass_stdin("l = 1"), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -907,7 +907,7 @@ preview = true
|
||||
",
|
||||
)?;
|
||||
let mut cmd = RuffCheck::default().config(&pyproject_toml).build();
|
||||
assert_cmd_snapshot!(cmd.pass_stdin("l = 1"), @"
|
||||
assert_cmd_snapshot!(cmd.pass_stdin("l = 1"), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -929,7 +929,7 @@ preview = true
|
||||
fn full_output_format() {
|
||||
let mut cmd = RuffCheck::default().output_format("full").build();
|
||||
assert_cmd_snapshot!(cmd
|
||||
.pass_stdin("l = 1"), @"
|
||||
.pass_stdin("l = 1"), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -951,51 +951,9 @@ fn rule_f401() {
|
||||
assert_cmd_snapshot!(ruff_cmd().args(["rule", "F401"]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rule_f401_output_json() {
|
||||
insta::with_settings!({filters => vec![
|
||||
(r#"("file": ")[^"]+(",)"#, "$1<FILE>$2"),
|
||||
]}, {
|
||||
assert_cmd_snapshot!(ruff_cmd().args(["rule", "F401", "--output-format", "json"]));
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rule_f401_output_text() {
|
||||
assert_cmd_snapshot!(ruff_cmd().args(["rule", "F401", "--output-format", "text"]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rule_invalid_rule_name() {
|
||||
assert_cmd_snapshot!(ruff_cmd().args(["rule", "RUF404"]), @"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: invalid value 'RUF404' for '[RULE]'
|
||||
|
||||
For more information, try '--help'.
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rule_invalid_rule_name_output_json() {
|
||||
assert_cmd_snapshot!(ruff_cmd().args(["rule", "RUF404", "--output-format", "json"]), @"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: invalid value 'RUF404' for '[RULE]'
|
||||
|
||||
For more information, try '--help'.
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rule_invalid_rule_name_output_text() {
|
||||
assert_cmd_snapshot!(ruff_cmd().args(["rule", "RUF404", "--output-format", "text"]), @"
|
||||
assert_cmd_snapshot!(ruff_cmd().args(["rule", "RUF404"]), @r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
@@ -1016,7 +974,7 @@ fn show_statistics() {
|
||||
.pass_stdin(r#"
|
||||
def mvce(keys, values):
|
||||
return {key: value for key, value in zip(keys, values)}
|
||||
"#), @"
|
||||
"#), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -1037,13 +995,13 @@ fn show_statistics_unsafe_fixes() {
|
||||
.pass_stdin(r#"
|
||||
def mvce(keys, values):
|
||||
return {key: value for key, value in zip(keys, values)}
|
||||
"#), @"
|
||||
"#), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
1 C416 [*] unnecessary-comprehension
|
||||
Found 1 error.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
[*] 1 fixable with the --fix option.
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
@@ -1073,8 +1031,7 @@ def mvce(keys, values):
|
||||
"code": "C416",
|
||||
"name": "unnecessary-comprehension",
|
||||
"count": 1,
|
||||
"fixable": false,
|
||||
"fixable_count": 0
|
||||
"fixable": false
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1107,8 +1064,7 @@ def mvce(keys, values):
|
||||
"code": "C416",
|
||||
"name": "unnecessary-comprehension",
|
||||
"count": 1,
|
||||
"fixable": true,
|
||||
"fixable_count": 1
|
||||
"fixable": true
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1116,54 +1072,6 @@ def mvce(keys, values):
|
||||
"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn show_statistics_json_partial_fix() {
|
||||
let mut cmd = RuffCheck::default()
|
||||
.args([
|
||||
"--select",
|
||||
"UP035",
|
||||
"--statistics",
|
||||
"--output-format",
|
||||
"json",
|
||||
])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd
|
||||
.pass_stdin("from typing import List, AsyncGenerator"), @r#"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
[
|
||||
{
|
||||
"code": "UP035",
|
||||
"name": "deprecated-import",
|
||||
"count": 2,
|
||||
"fixable": false,
|
||||
"fixable_count": 1
|
||||
}
|
||||
]
|
||||
|
||||
----- stderr -----
|
||||
"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn show_statistics_partial_fix() {
|
||||
let mut cmd = RuffCheck::default()
|
||||
.args(["--select", "UP035", "--statistics"])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd
|
||||
.pass_stdin("from typing import List, AsyncGenerator"), @"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
2 UP035 [-] deprecated-import
|
||||
Found 2 errors.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn show_statistics_syntax_errors() {
|
||||
let mut cmd = RuffCheck::default()
|
||||
@@ -1173,7 +1081,7 @@ fn show_statistics_syntax_errors() {
|
||||
// ParseError
|
||||
assert_cmd_snapshot!(
|
||||
cmd.pass_stdin("x ="),
|
||||
@"
|
||||
@r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -1186,7 +1094,7 @@ fn show_statistics_syntax_errors() {
|
||||
// match before 3.10, UnsupportedSyntaxError
|
||||
assert_cmd_snapshot!(
|
||||
cmd.pass_stdin("match 2:\n case 1: ..."),
|
||||
@"
|
||||
@r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -1199,7 +1107,7 @@ fn show_statistics_syntax_errors() {
|
||||
// rebound comprehension variable, SemanticSyntaxError
|
||||
assert_cmd_snapshot!(
|
||||
cmd.pass_stdin("[x := 1 for x in range(0)]"),
|
||||
@"
|
||||
@r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -1216,7 +1124,7 @@ fn preview_enabled_prefix() {
|
||||
let mut cmd = RuffCheck::default()
|
||||
.args(["--select", "RUF9", "--output-format=concise", "--preview"])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd, @"
|
||||
assert_cmd_snapshot!(cmd, @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -1238,7 +1146,7 @@ fn preview_enabled_all() {
|
||||
let mut cmd = RuffCheck::default()
|
||||
.args(["--select", "ALL", "--output-format=concise", "--preview"])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd, @"
|
||||
assert_cmd_snapshot!(cmd, @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -1265,7 +1173,7 @@ fn preview_enabled_direct() {
|
||||
let mut cmd = RuffCheck::default()
|
||||
.args(["--select", "RUF911", "--output-format=concise", "--preview"])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd, @"
|
||||
assert_cmd_snapshot!(cmd, @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -1282,7 +1190,7 @@ fn preview_disabled_direct() {
|
||||
let mut cmd = RuffCheck::default()
|
||||
.args(["--select", "RUF911", "--output-format=concise"])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd, @"
|
||||
assert_cmd_snapshot!(cmd, @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -1299,7 +1207,7 @@ fn preview_disabled_prefix_empty() {
|
||||
let mut cmd = RuffCheck::default()
|
||||
.args(["--select", "RUF91", "--output-format=concise"])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd, @"
|
||||
assert_cmd_snapshot!(cmd, @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -1316,7 +1224,7 @@ fn preview_disabled_does_not_warn_for_empty_ignore_selections() {
|
||||
let mut cmd = RuffCheck::default()
|
||||
.args(["--ignore", "RUF9", "--output-format=concise"])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd, @"
|
||||
assert_cmd_snapshot!(cmd, @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -1332,7 +1240,7 @@ fn preview_disabled_does_not_warn_for_empty_fixable_selections() {
|
||||
let mut cmd = RuffCheck::default()
|
||||
.args(["--fixable", "RUF9", "--output-format=concise"])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd, @"
|
||||
assert_cmd_snapshot!(cmd, @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -1354,7 +1262,7 @@ fn preview_group_selector() {
|
||||
])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd
|
||||
.pass_stdin("I=42\n"), @"
|
||||
.pass_stdin("I=42\n"), @r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
@@ -1379,7 +1287,7 @@ fn preview_enabled_group_ignore() {
|
||||
"--output-format=concise",
|
||||
])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd, @"
|
||||
assert_cmd_snapshot!(cmd, @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -1400,7 +1308,7 @@ fn preview_enabled_group_ignore() {
|
||||
fn removed_direct() {
|
||||
// Selection of a removed rule should fail
|
||||
let mut cmd = RuffCheck::default().args(["--select", "RUF931"]).build();
|
||||
assert_cmd_snapshot!(cmd, @"
|
||||
assert_cmd_snapshot!(cmd, @r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
@@ -1418,7 +1326,7 @@ fn removed_direct_multiple() {
|
||||
let mut cmd = RuffCheck::default()
|
||||
.args(["--select", "RUF930", "--select", "RUF931"])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd, @"
|
||||
assert_cmd_snapshot!(cmd, @r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
@@ -1436,7 +1344,7 @@ fn removed_indirect() {
|
||||
// Selection _including_ a removed rule without matching should not fail
|
||||
// nor should the rule be used
|
||||
let mut cmd = RuffCheck::default().args(["--select", "RUF93"]).build();
|
||||
assert_cmd_snapshot!(cmd, @"
|
||||
assert_cmd_snapshot!(cmd, @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -1449,7 +1357,7 @@ fn removed_indirect() {
|
||||
#[test]
|
||||
fn removed_ignore_direct() {
|
||||
let mut cmd = RuffCheck::default().args(["--ignore", "UP027"]).build();
|
||||
assert_cmd_snapshot!(cmd, @"
|
||||
assert_cmd_snapshot!(cmd, @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -1466,7 +1374,7 @@ fn removed_ignore_multiple_direct() {
|
||||
let mut cmd = RuffCheck::default()
|
||||
.args(["--ignore", "UP027", "--ignore", "PLR1706"])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd, @"
|
||||
assert_cmd_snapshot!(cmd, @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -1482,7 +1390,7 @@ fn removed_ignore_multiple_direct() {
|
||||
#[test]
|
||||
fn removed_ignore_remapped_direct() {
|
||||
let mut cmd = RuffCheck::default().args(["--ignore", "PGH001"]).build();
|
||||
assert_cmd_snapshot!(cmd, @"
|
||||
assert_cmd_snapshot!(cmd, @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -1498,7 +1406,7 @@ fn removed_ignore_indirect() {
|
||||
// `PLR170` includes removed rules but should not select or warn
|
||||
// since it is not a "direct" selection
|
||||
let mut cmd = RuffCheck::default().args(["--ignore", "PLR170"]).build();
|
||||
assert_cmd_snapshot!(cmd, @"
|
||||
assert_cmd_snapshot!(cmd, @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -1512,7 +1420,7 @@ fn removed_ignore_indirect() {
|
||||
fn redirect_direct() {
|
||||
// Selection of a redirected rule directly should use the new rule and warn
|
||||
let mut cmd = RuffCheck::default().args(["--select", "RUF940"]).build();
|
||||
assert_cmd_snapshot!(cmd, @"
|
||||
assert_cmd_snapshot!(cmd, @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -1531,7 +1439,7 @@ fn redirect_indirect() {
|
||||
// Selection _including_ a redirected rule without matching should not fail
|
||||
// nor should the rule be used
|
||||
let mut cmd = RuffCheck::default().args(["--select", "RUF94"]).build();
|
||||
assert_cmd_snapshot!(cmd, @"
|
||||
assert_cmd_snapshot!(cmd, @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -1546,7 +1454,7 @@ fn redirect_prefix() {
|
||||
// Selection using a redirected prefix should switch to all rules in the
|
||||
// new prefix
|
||||
let mut cmd = RuffCheck::default().args(["--select", "RUF96"]).build();
|
||||
assert_cmd_snapshot!(cmd, @"
|
||||
assert_cmd_snapshot!(cmd, @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -1565,7 +1473,7 @@ fn deprecated_direct() {
|
||||
// Selection of a deprecated rule without preview enabled should still work
|
||||
// but a warning should be displayed
|
||||
let mut cmd = RuffCheck::default().args(["--select", "RUF920"]).build();
|
||||
assert_cmd_snapshot!(cmd, @"
|
||||
assert_cmd_snapshot!(cmd, @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -1586,7 +1494,7 @@ fn deprecated_multiple_direct() {
|
||||
let mut cmd = RuffCheck::default()
|
||||
.args(["--select", "RUF920", "--select", "RUF921"])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd, @"
|
||||
assert_cmd_snapshot!(cmd, @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -1609,7 +1517,7 @@ fn deprecated_indirect() {
|
||||
// `RUF92` includes deprecated rules but should not warn
|
||||
// since it is not a "direct" selection
|
||||
let mut cmd = RuffCheck::default().args(["--select", "RUF92"]).build();
|
||||
assert_cmd_snapshot!(cmd, @"
|
||||
assert_cmd_snapshot!(cmd, @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -1625,7 +1533,7 @@ fn deprecated_direct_preview_enabled() {
|
||||
let mut cmd = RuffCheck::default()
|
||||
.args(["--select", "RUF920", "--preview"])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd, @"
|
||||
assert_cmd_snapshot!(cmd, @r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
@@ -1642,7 +1550,7 @@ fn deprecated_indirect_preview_enabled() {
|
||||
let mut cmd = RuffCheck::default()
|
||||
.args(["--select", "RUF92", "--preview"])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd, @"
|
||||
assert_cmd_snapshot!(cmd, @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -1659,7 +1567,7 @@ fn deprecated_multiple_direct_preview_enabled() {
|
||||
let mut cmd = RuffCheck::default()
|
||||
.args(["--select", "RUF920", "--select", "RUF921", "--preview"])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd, @"
|
||||
assert_cmd_snapshot!(cmd, @r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
@@ -1720,7 +1628,7 @@ fn unreadable_dir() -> Result<()> {
|
||||
.filename(unreadable_dir.to_str().unwrap())
|
||||
.args([])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd, @"
|
||||
assert_cmd_snapshot!(cmd, @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -1728,7 +1636,7 @@ fn unreadable_dir() -> Result<()> {
|
||||
|
||||
----- stderr -----
|
||||
warning: Encountered error: Permission denied (os error 13)
|
||||
");
|
||||
"###);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1758,7 +1666,7 @@ fn check_input_from_argfile() -> Result<()> {
|
||||
(file_a_path.display().to_string().as_str(), "/path/to/a.py"),
|
||||
]}, {
|
||||
assert_cmd_snapshot!(cmd
|
||||
.pass_stdin(""), @"
|
||||
.pass_stdin(""), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -1787,17 +1695,17 @@ fn missing_argfile_reports_error() {
|
||||
insta::with_settings!({filters => vec![
|
||||
("The system cannot find the file specified.", "No such file or directory")
|
||||
]}, {
|
||||
assert_cmd_snapshot!(cmd, @"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
assert_cmd_snapshot!(cmd, @r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
ruff failed
|
||||
Cause: Failed to read CLI arguments from files
|
||||
Cause: failed to open file `!.txt`
|
||||
Cause: No such file or directory (os error 2)
|
||||
");
|
||||
----- stderr -----
|
||||
ruff failed
|
||||
Cause: Failed to read CLI arguments from files
|
||||
Cause: failed to open file `!.txt`
|
||||
Cause: No such file or directory (os error 2)
|
||||
");
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1807,7 +1715,7 @@ fn check_hints_hidden_unsafe_fixes() {
|
||||
.args(["--select", "RUF901,RUF902"])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd,
|
||||
@"
|
||||
@r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -1829,7 +1737,7 @@ fn check_hints_hidden_unsafe_fixes_with_no_safe_fixes() {
|
||||
let mut cmd = RuffCheck::default().args(["--select", "RUF902"]).build();
|
||||
assert_cmd_snapshot!(cmd
|
||||
.pass_stdin("x = {'a': 1, 'a': 1}\n"),
|
||||
@"
|
||||
@r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -1849,7 +1757,7 @@ fn check_no_hint_for_hidden_unsafe_fixes_when_disabled() {
|
||||
.args(["--select", "RUF901,RUF902", "--no-unsafe-fixes"])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd,
|
||||
@"
|
||||
@r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -1860,7 +1768,7 @@ fn check_no_hint_for_hidden_unsafe_fixes_when_disabled() {
|
||||
--> -:1:1
|
||||
|
||||
Found 2 errors.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
[*] 1 fixable with the --fix option.
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
@@ -1873,7 +1781,7 @@ fn check_no_hint_for_hidden_unsafe_fixes_with_no_safe_fixes_when_disabled() {
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd
|
||||
.pass_stdin("x = {'a': 1, 'a': 1}\n"),
|
||||
@"
|
||||
@r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -1892,7 +1800,7 @@ fn check_shows_unsafe_fixes_with_opt_in() {
|
||||
.args(["--select", "RUF901,RUF902", "--unsafe-fixes"])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd,
|
||||
@"
|
||||
@r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -1903,7 +1811,7 @@ fn check_shows_unsafe_fixes_with_opt_in() {
|
||||
--> -:1:1
|
||||
|
||||
Found 2 errors.
|
||||
[*] 2 fixable with the `--fix` option.
|
||||
[*] 2 fixable with the --fix option.
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
@@ -1915,7 +1823,7 @@ fn fix_applies_safe_fixes_by_default() {
|
||||
.args(["--select", "RUF901,RUF902", "--fix"])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd,
|
||||
@"
|
||||
@r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -1936,7 +1844,7 @@ fn fix_applies_unsafe_fixes_with_opt_in() {
|
||||
.args(["--select", "RUF901,RUF902", "--fix", "--unsafe-fixes"])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd,
|
||||
@"
|
||||
@r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -1955,7 +1863,7 @@ fn fix_does_not_apply_display_only_fixes() {
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd
|
||||
.pass_stdin("def add_to_list(item, some_list=[]): ..."),
|
||||
@"
|
||||
@r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -1975,7 +1883,7 @@ fn fix_does_not_apply_display_only_fixes_with_unsafe_fixes_enabled() {
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd
|
||||
.pass_stdin("def add_to_list(item, some_list=[]): ..."),
|
||||
@"
|
||||
@r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -1994,7 +1902,7 @@ fn fix_only_unsafe_fixes_available() {
|
||||
.args(["--select", "RUF902", "--fix"])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd,
|
||||
@"
|
||||
@r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -2014,7 +1922,7 @@ fn fix_only_flag_applies_safe_fixes_by_default() {
|
||||
.args(["--select", "RUF901,RUF902", "--fix-only"])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd,
|
||||
@"
|
||||
@r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -2031,7 +1939,7 @@ fn fix_only_flag_applies_unsafe_fixes_with_opt_in() {
|
||||
.args(["--select", "RUF901,RUF902", "--fix-only", "--unsafe-fixes"])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd,
|
||||
@"
|
||||
@r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -2049,7 +1957,7 @@ fn diff_shows_safe_fixes_by_default() {
|
||||
.args(["--select", "RUF901,RUF902", "--diff"])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd,
|
||||
@"
|
||||
@r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -2069,7 +1977,7 @@ fn diff_shows_unsafe_fixes_with_opt_in() {
|
||||
.args(["--select", "RUF901,RUF902", "--diff", "--unsafe-fixes"])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd,
|
||||
@"
|
||||
@r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -2091,7 +1999,7 @@ fn diff_does_not_show_display_only_fixes_with_unsafe_fixes_enabled() {
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd
|
||||
.pass_stdin("def add_to_list(item, some_list=[]): ..."),
|
||||
@"
|
||||
@r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -2106,7 +2014,7 @@ fn diff_only_unsafe_fixes_available() {
|
||||
.args(["--select", "RUF902", "--diff"])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd,
|
||||
@"
|
||||
@r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -2134,7 +2042,7 @@ extend-unsafe-fixes = ["RUF901"]
|
||||
.args(["--select", "RUF901,RUF902"])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd,
|
||||
@"
|
||||
@r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -2170,7 +2078,7 @@ extend-safe-fixes = ["RUF902"]
|
||||
.args(["--select", "RUF901,RUF902"])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd,
|
||||
@"
|
||||
@r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -2208,7 +2116,7 @@ extend-safe-fixes = ["RUF902"]
|
||||
.args(["--select", "RUF901,RUF902"])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd,
|
||||
@"
|
||||
@r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -2248,7 +2156,7 @@ extend-safe-fixes = ["RUF9"]
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd
|
||||
.pass_stdin("x = {'a': 1, 'a': 1}\nprint(('foo'))\nprint(str('foo'))\nisinstance(x, (int, str))\n"),
|
||||
@"
|
||||
@r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -2307,7 +2215,7 @@ def log(x, base) -> float:
|
||||
.args(["--select", "D41"])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd
|
||||
.pass_stdin(stdin), @"
|
||||
.pass_stdin(stdin), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -2360,7 +2268,7 @@ select = ["RUF017"]
|
||||
let mut cmd = RuffCheck::default().config(&ruff_toml).build();
|
||||
assert_cmd_snapshot!(cmd
|
||||
.pass_stdin("x = [1, 2, 3]\ny = [4, 5, 6]\nsum([x, y], [])"),
|
||||
@"
|
||||
@r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -2401,7 +2309,7 @@ unfixable = ["RUF"]
|
||||
let mut cmd = RuffCheck::default().config(&ruff_toml).build();
|
||||
assert_cmd_snapshot!(cmd
|
||||
.pass_stdin("x = [1, 2, 3]\ny = [4, 5, 6]\nsum([x, y], [])"),
|
||||
@"
|
||||
@r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -2431,7 +2339,7 @@ fn pyproject_toml_stdin_syntax_error() {
|
||||
|
||||
assert_cmd_snapshot!(
|
||||
cmd.pass_stdin("[project"),
|
||||
@"
|
||||
@r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -2457,7 +2365,7 @@ fn pyproject_toml_stdin_schema_error() {
|
||||
|
||||
assert_cmd_snapshot!(
|
||||
cmd.pass_stdin("[project]\nname = 1"),
|
||||
@"
|
||||
@r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -2484,7 +2392,7 @@ fn pyproject_toml_stdin_no_applicable_rules_selected() {
|
||||
|
||||
assert_cmd_snapshot!(
|
||||
cmd.pass_stdin("[project"),
|
||||
@"
|
||||
@r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -2503,7 +2411,7 @@ fn pyproject_toml_stdin_no_applicable_rules_selected_2() {
|
||||
|
||||
assert_cmd_snapshot!(
|
||||
cmd.pass_stdin("[project"),
|
||||
@"
|
||||
@r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -2522,7 +2430,7 @@ fn pyproject_toml_stdin_no_errors() {
|
||||
|
||||
assert_cmd_snapshot!(
|
||||
cmd.pass_stdin(r#"[project]\nname = "ruff"\nversion = "0.0.0""#),
|
||||
@"
|
||||
@r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -2547,7 +2455,7 @@ fn pyproject_toml_stdin_schema_error_fix() {
|
||||
|
||||
assert_cmd_snapshot!(
|
||||
cmd.pass_stdin("[project]\nname = 1"),
|
||||
@"
|
||||
@r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -2581,7 +2489,7 @@ fn pyproject_toml_stdin_schema_error_fix_only() {
|
||||
|
||||
assert_cmd_snapshot!(
|
||||
cmd.pass_stdin("[project]\nname = 1"),
|
||||
@"
|
||||
@r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -2607,7 +2515,7 @@ fn pyproject_toml_stdin_schema_error_fix_diff() {
|
||||
|
||||
assert_cmd_snapshot!(
|
||||
cmd.pass_stdin("[project]\nname = 1"),
|
||||
@"
|
||||
@r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
@@ -29,7 +29,7 @@ fn check_project_include_defaults() {
|
||||
filters => TEST_FILTERS.to_vec()
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(["check", "--show-files"]).current_dir(Path::new("./resources/test/fixtures/include-test")), @"
|
||||
.args(["check", "--show-files"]).current_dir(Path::new("./resources/test/fixtures/include-test")), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -53,7 +53,7 @@ fn check_project_respects_direct_paths() {
|
||||
filters => TEST_FILTERS.to_vec()
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(["check", "--show-files", "b.py"]).current_dir(Path::new("./resources/test/fixtures/include-test")), @"
|
||||
.args(["check", "--show-files", "b.py"]).current_dir(Path::new("./resources/test/fixtures/include-test")), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -72,7 +72,7 @@ fn check_project_respects_subdirectory_includes() {
|
||||
filters => TEST_FILTERS.to_vec()
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(["check", "--show-files", "subdirectory"]).current_dir(Path::new("./resources/test/fixtures/include-test")), @"
|
||||
.args(["check", "--show-files", "subdirectory"]).current_dir(Path::new("./resources/test/fixtures/include-test")), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -91,7 +91,7 @@ fn check_project_from_project_subdirectory_respects_includes() {
|
||||
filters => TEST_FILTERS.to_vec()
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(["check", "--show-files"]).current_dir(Path::new("./resources/test/fixtures/include-test/subdirectory")), @"
|
||||
.args(["check", "--show-files"]).current_dir(Path::new("./resources/test/fixtures/include-test/subdirectory")), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
@@ -50,56 +50,6 @@ ignore = [
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn display_settings_from_nested_directory() -> anyhow::Result<()> {
|
||||
let tempdir = TempDir::new().context("Failed to create temp directory.")?;
|
||||
|
||||
// Tempdir path's on macos are symlinks, which doesn't play nicely with
|
||||
// our snapshot filtering.
|
||||
let project_dir =
|
||||
dunce::canonicalize(tempdir.path()).context("Failed to canonical tempdir path.")?;
|
||||
|
||||
// Root pyproject.toml.
|
||||
std::fs::write(
|
||||
project_dir.join("pyproject.toml"),
|
||||
r#"
|
||||
[tool.ruff]
|
||||
line-length = 100
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = ["E", "F"]
|
||||
"#,
|
||||
)?;
|
||||
|
||||
// Create a subdirectory with its own pyproject.toml.
|
||||
let subdir = project_dir.join("subdir");
|
||||
std::fs::create_dir(&subdir)?;
|
||||
|
||||
std::fs::write(
|
||||
subdir.join("pyproject.toml"),
|
||||
r#"
|
||||
[tool.ruff]
|
||||
line-length = 120
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = ["E", "F", "I"]
|
||||
"#,
|
||||
)?;
|
||||
|
||||
std::fs::write(subdir.join("test.py"), r#"import os"#).context("Failed to write test.py.")?;
|
||||
|
||||
insta::with_settings!({filters => vec![
|
||||
(&*tempdir_filter(&project_dir), "<temp_dir>/"),
|
||||
(r#"\\(\w\w|\s|\.|")"#, "/$1"),
|
||||
]}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(["check", "--show-settings", "subdir/test.py"])
|
||||
.current_dir(&project_dir));
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn tempdir_filter(project_dir: &Path) -> String {
|
||||
format!(r#"{}\\?/?"#, regex::escape(project_dir.to_str().unwrap()))
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
source: crates/ruff/tests/cli/format.rs
|
||||
source: crates/ruff/tests/format.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
source: crates/ruff/tests/cli/format.rs
|
||||
source: crates/ruff/tests/format.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
source: crates/ruff/tests/cli/format.rs
|
||||
source: crates/ruff/tests/format.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
source: crates/ruff/tests/cli/format.rs
|
||||
source: crates/ruff/tests/format.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
@@ -14,6 +14,6 @@ info:
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
::error title=Ruff (unformatted),file=[TMP]/input.py,line=1,endLine=2::input.py:1:1: unformatted: File would be reformatted
|
||||
::error title=Ruff (unformatted),file=[TMP]/input.py,line=1,col=1,endLine=2,endColumn=1::input.py:1:1: unformatted: File would be reformatted
|
||||
|
||||
----- stderr -----
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
source: crates/ruff/tests/cli/format.rs
|
||||
source: crates/ruff/tests/format.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
source: crates/ruff/tests/cli/format.rs
|
||||
source: crates/ruff/tests/format.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
@@ -7,7 +7,6 @@ info:
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- grouped
|
||||
- "--preview"
|
||||
- "--check"
|
||||
- input.py
|
||||
---
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
source: crates/ruff/tests/cli/format.rs
|
||||
source: crates/ruff/tests/format.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
source: crates/ruff/tests/cli/format.rs
|
||||
source: crates/ruff/tests/format.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
source: crates/ruff/tests/cli/format.rs
|
||||
source: crates/ruff/tests/format.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
source: crates/ruff/tests/cli/format.rs
|
||||
source: crates/ruff/tests/format.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
@@ -7,7 +7,6 @@ info:
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- pylint
|
||||
- "--preview"
|
||||
- "--check"
|
||||
- input.py
|
||||
---
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
source: crates/ruff/tests/cli/format.rs
|
||||
source: crates/ruff/tests/format.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
source: crates/ruff/tests/cli/format.rs
|
||||
source: crates/ruff/tests/format.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user