Compare commits
3 Commits
0.12.0
...
implicit-s
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
dc24d01b2e | ||
|
|
5a9d656bc4 | ||
|
|
33184dc6a4 |
@@ -1,14 +1,3 @@
|
||||
[alias]
|
||||
dev = "run --package ruff_dev --bin ruff_dev"
|
||||
benchmark = "bench -p ruff_benchmark --bench linter --bench formatter --"
|
||||
|
||||
# statically link the C runtime so the executable does not depend on
|
||||
# that shared/dynamic library.
|
||||
#
|
||||
# See: https://github.com/astral-sh/ruff/issues/11503
|
||||
[target.'cfg(all(target_env="msvc", target_os = "windows"))']
|
||||
rustflags = ["-C", "target-feature=+crt-static"]
|
||||
|
||||
[target.'wasm32-unknown-unknown']
|
||||
# See https://docs.rs/getrandom/latest/getrandom/#webassembly-support
|
||||
rustflags = ["--cfg", 'getrandom_backend="wasm_js"']
|
||||
@@ -6,10 +6,3 @@ failure-output = "immediate-final"
|
||||
fail-fast = false
|
||||
|
||||
status-level = "skip"
|
||||
|
||||
# Mark tests that take longer than 1s as slow.
|
||||
# Terminate after 60s as a stop-gap measure to terminate on deadlock.
|
||||
slow-timeout = { period = "1s", terminate-after = 60 }
|
||||
|
||||
# Show slow jobs in the final summary
|
||||
final-status-level = "slow"
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
"rust-lang.rust-analyzer",
|
||||
"fill-labs.dependi",
|
||||
"serayuzgur.crates",
|
||||
"tamasfe.even-better-toml",
|
||||
"Swellaby.vscode-rust-test-adapter",
|
||||
"charliermarsh.ruff"
|
||||
|
||||
@@ -17,7 +17,4 @@ indent_size = 4
|
||||
trim_trailing_whitespace = false
|
||||
|
||||
[*.md]
|
||||
max_line_length = 100
|
||||
|
||||
[*.toml]
|
||||
indent_size = 4
|
||||
max_line_length = 100
|
||||
22
.gitattributes
vendored
22
.gitattributes
vendored
@@ -2,29 +2,9 @@
|
||||
|
||||
crates/ruff_linter/resources/test/fixtures/isort/line_ending_crlf.py text eol=crlf
|
||||
crates/ruff_linter/resources/test/fixtures/pycodestyle/W605_1.py text eol=crlf
|
||||
crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_2.py text eol=crlf
|
||||
crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_3.py text eol=crlf
|
||||
|
||||
crates/ruff_python_formatter/resources/test/fixtures/ruff/f-string-carriage-return-newline.py text eol=crlf
|
||||
crates/ruff_python_formatter/tests/snapshots/format@f-string-carriage-return-newline.py.snap text eol=crlf
|
||||
|
||||
crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring_code_examples_crlf.py text eol=crlf
|
||||
crates/ruff_python_formatter/tests/snapshots/format@docstring_code_examples_crlf.py.snap text eol=crlf
|
||||
|
||||
crates/ruff_python_parser/resources/invalid/re_lexing/line_continuation_windows_eol.py text eol=crlf
|
||||
crates/ruff_python_parser/resources/invalid/re_lex_logical_token_windows_eol.py text eol=crlf
|
||||
crates/ruff_python_parser/resources/invalid/re_lex_logical_token_mac_eol.py text eol=cr
|
||||
|
||||
crates/ruff_linter/resources/test/fixtures/ruff/RUF046_CR.py text eol=cr
|
||||
crates/ruff_linter/resources/test/fixtures/ruff/RUF046_LF.py text eol=lf
|
||||
|
||||
crates/ruff_linter/resources/test/fixtures/pyupgrade/UP018_CR.py text eol=cr
|
||||
crates/ruff_linter/resources/test/fixtures/pyupgrade/UP018_LF.py text eol=lf
|
||||
|
||||
crates/ruff_python_parser/resources/inline linguist-generated=true
|
||||
|
||||
ruff.schema.json -diff linguist-generated=true text=auto eol=lf
|
||||
ty.schema.json -diff linguist-generated=true text=auto eol=lf
|
||||
crates/ruff_python_ast/src/generated.rs -diff linguist-generated=true text=auto eol=lf
|
||||
crates/ruff_python_formatter/src/generated.rs -diff linguist-generated=true text=auto eol=lf
|
||||
ruff.schema.json linguist-generated=true text=auto eol=lf
|
||||
*.md.snap linguist-language=Markdown
|
||||
|
||||
19
.github/CODEOWNERS
vendored
19
.github/CODEOWNERS
vendored
@@ -5,20 +5,5 @@
|
||||
# - The '*' pattern is global owners.
|
||||
# - Order is important. The last matching pattern has the most precedence.
|
||||
|
||||
/crates/ruff_notebook/ @dhruvmanila
|
||||
/crates/ruff_formatter/ @MichaReiser
|
||||
/crates/ruff_python_formatter/ @MichaReiser
|
||||
/crates/ruff_python_parser/ @MichaReiser @dhruvmanila
|
||||
/crates/ruff_annotate_snippets/ @BurntSushi
|
||||
|
||||
# flake8-pyi
|
||||
/crates/ruff_linter/src/rules/flake8_pyi/ @AlexWaygood
|
||||
|
||||
# Script for fuzzing the parser/ty etc.
|
||||
/python/py-fuzzer/ @AlexWaygood
|
||||
|
||||
# ty
|
||||
/crates/ty* @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
|
||||
/crates/ruff_db/ @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
|
||||
/scripts/ty_benchmark/ @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
|
||||
/crates/ty_python_semantic @carljm @AlexWaygood @sharkdp @dcreager
|
||||
# Jupyter
|
||||
/crates/ruff_linter/src/jupyter/ @dhruvmanila
|
||||
|
||||
10
.github/ISSUE_TEMPLATE.md
vendored
Normal file
10
.github/ISSUE_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
<!--
|
||||
Thank you for taking the time to report an issue! We're glad to have you involved with Ruff.
|
||||
|
||||
If you're filing a bug report, please consider including the following information:
|
||||
|
||||
* A minimal code snippet that reproduces the bug.
|
||||
* The command you invoked (e.g., `ruff /path/to/file.py --fix`), ideally including the `--isolated` flag.
|
||||
* The current Ruff settings (any relevant sections from your `pyproject.toml`).
|
||||
* The current Ruff version (`ruff --version`).
|
||||
-->
|
||||
31
.github/ISSUE_TEMPLATE/1_bug_report.yaml
vendored
31
.github/ISSUE_TEMPLATE/1_bug_report.yaml
vendored
@@ -1,31 +0,0 @@
|
||||
name: Bug report
|
||||
description: Report an error or unexpected behavior
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for taking the time to report an issue! We're glad to have you involved with Ruff.
|
||||
|
||||
**Before reporting, please make sure to search through [existing issues](https://github.com/astral-sh/ruff/issues?q=is:issue+is:open+label:bug) (including [closed](https://github.com/astral-sh/ruff/issues?q=is:issue%20state:closed%20label:bug)).**
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: |
|
||||
A clear and concise description of the bug, including a minimal reproducible example.
|
||||
|
||||
Be sure to include the command you invoked (e.g., `ruff check /path/to/file.py --fix`), ideally including the `--isolated` flag and
|
||||
the current Ruff settings (e.g., relevant sections from your `pyproject.toml`).
|
||||
|
||||
If possible, try to include the [playground](https://play.ruff.rs) link that reproduces this issue.
|
||||
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
attributes:
|
||||
label: Version
|
||||
description: What version of ruff are you using? (see `ruff version`)
|
||||
placeholder: e.g., ruff 0.9.3 (90589372d 2025-01-23)
|
||||
validations:
|
||||
required: false
|
||||
10
.github/ISSUE_TEMPLATE/2_rule_request.yaml
vendored
10
.github/ISSUE_TEMPLATE/2_rule_request.yaml
vendored
@@ -1,10 +0,0 @@
|
||||
name: Rule request
|
||||
description: Anything related to lint rules (proposing new rules, changes to existing rules, auto-fixes, etc.)
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: |
|
||||
A clear and concise description of the relevant request. If applicable, please describe the current behavior as well.
|
||||
validations:
|
||||
required: true
|
||||
18
.github/ISSUE_TEMPLATE/3_question.yaml
vendored
18
.github/ISSUE_TEMPLATE/3_question.yaml
vendored
@@ -1,18 +0,0 @@
|
||||
name: Question
|
||||
description: Ask a question about Ruff
|
||||
labels: ["question"]
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Question
|
||||
description: Describe your question in detail.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
attributes:
|
||||
label: Version
|
||||
description: What version of ruff are you using? (see `ruff version`)
|
||||
placeholder: e.g., ruff 0.9.3 (90589372d 2025-01-23)
|
||||
validations:
|
||||
required: false
|
||||
11
.github/ISSUE_TEMPLATE/config.yml
vendored
11
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,11 +0,0 @@
|
||||
blank_issues_enabled: true
|
||||
contact_links:
|
||||
- name: Report an issue with ty
|
||||
url: https://github.com/astral-sh/ty/issues/new/choose
|
||||
about: Please report issues for our type checker ty in the ty repository.
|
||||
- name: Documentation
|
||||
url: https://docs.astral.sh/ruff
|
||||
about: Please consult the documentation before creating an issue.
|
||||
- name: Community
|
||||
url: https://discord.com/invite/astral-sh
|
||||
about: Join our Discord community to ask questions and collaborate.
|
||||
5
.github/PULL_REQUEST_TEMPLATE.md
vendored
5
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,9 +1,8 @@
|
||||
<!--
|
||||
Thank you for contributing to Ruff/ty! To help us out with reviewing, please consider the following:
|
||||
Thank you for contributing to Ruff! To help us out with reviewing, please consider the following:
|
||||
|
||||
- Does this pull request include a summary of the change? (See below.)
|
||||
- Does this pull request include a descriptive title? (Please prefix with `[ty]` for ty pull
|
||||
requests.)
|
||||
- Does this pull request include a descriptive title?
|
||||
- Does this pull request include references to any relevant issues?
|
||||
-->
|
||||
|
||||
|
||||
11
.github/actionlint.yaml
vendored
11
.github/actionlint.yaml
vendored
@@ -1,11 +0,0 @@
|
||||
# Configuration for the actionlint tool, which we run via pre-commit
|
||||
# to verify the correctness of the syntax in our GitHub Actions workflows.
|
||||
|
||||
self-hosted-runner:
|
||||
# Various runners we use that aren't recognized out-of-the-box by actionlint:
|
||||
labels:
|
||||
- depot-ubuntu-latest-8
|
||||
- depot-ubuntu-22.04-16
|
||||
- depot-ubuntu-22.04-32
|
||||
- github-windows-2025-x86_64-8
|
||||
- github-windows-2025-x86_64-16
|
||||
21
.github/dependabot.yml
vendored
Normal file
21
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
labels: ["internal"]
|
||||
groups:
|
||||
actions:
|
||||
patterns:
|
||||
- "*"
|
||||
ignore:
|
||||
# The latest versions of these are not compatible with our release workflow
|
||||
- dependency-name: "actions/upload-artifact"
|
||||
- dependency-name: "actions/download-artifact"
|
||||
|
||||
- package-ecosystem: "cargo"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
labels: ["internal"]
|
||||
8
.github/mypy-primer-ty.toml
vendored
8
.github/mypy-primer-ty.toml
vendored
@@ -1,8 +0,0 @@
|
||||
#:schema ../ty.schema.json
|
||||
# Configuration overrides for the mypy primer run
|
||||
|
||||
# Enable off-by-default rules.
|
||||
[rules]
|
||||
possibly-unresolved-reference = "warn"
|
||||
unused-ignore-comment = "warn"
|
||||
division-by-zero = "warn"
|
||||
115
.github/renovate.json5
vendored
115
.github/renovate.json5
vendored
@@ -1,115 +0,0 @@
|
||||
{
|
||||
$schema: "https://docs.renovatebot.com/renovate-schema.json",
|
||||
dependencyDashboard: true,
|
||||
suppressNotifications: ["prEditedNotification"],
|
||||
extends: ["config:recommended"],
|
||||
labels: ["internal"],
|
||||
schedule: ["before 4am on Monday"],
|
||||
semanticCommits: "disabled",
|
||||
separateMajorMinor: false,
|
||||
prHourlyLimit: 10,
|
||||
enabledManagers: ["github-actions", "pre-commit", "cargo", "pep621", "pip_requirements", "npm"],
|
||||
cargo: {
|
||||
// See https://docs.renovatebot.com/configuration-options/#rangestrategy
|
||||
rangeStrategy: "update-lockfile",
|
||||
},
|
||||
pep621: {
|
||||
// The default for this package manager is to only search for `pyproject.toml` files
|
||||
// found at the repository root: https://docs.renovatebot.com/modules/manager/pep621/#file-matching
|
||||
fileMatch: ["^(python|scripts)/.*pyproject\\.toml$"],
|
||||
},
|
||||
pip_requirements: {
|
||||
// The default for this package manager is to run on all requirements.txt files:
|
||||
// https://docs.renovatebot.com/modules/manager/pip_requirements/#file-matching
|
||||
// `fileMatch` doesn't work for excluding files; to exclude `requirements.txt` files
|
||||
// outside the `doc/` directory, we instead have to use `ignorePaths`. Unlike `fileMatch`,
|
||||
// which takes a regex string, `ignorePaths` takes a glob string, so we have to use
|
||||
// a "negative glob pattern".
|
||||
// See:
|
||||
// - https://docs.renovatebot.com/modules/manager/#ignoring-files-that-match-the-default-filematch
|
||||
// - https://docs.renovatebot.com/configuration-options/#ignorepaths
|
||||
// - https://docs.renovatebot.com/string-pattern-matching/#negative-matching
|
||||
ignorePaths: ["!docs/requirements*.txt"]
|
||||
},
|
||||
npm: {
|
||||
// The default for this package manager is to only search for `package.json` files
|
||||
// found at the repository root: https://docs.renovatebot.com/modules/manager/npm/#file-matching
|
||||
fileMatch: ["^playground/.*package\\.json$"],
|
||||
},
|
||||
"pre-commit": {
|
||||
enabled: true,
|
||||
},
|
||||
packageRules: [
|
||||
// Pin GitHub Actions to immutable SHAs.
|
||||
{
|
||||
matchDepTypes: ["action"],
|
||||
pinDigests: true,
|
||||
},
|
||||
// Annotate GitHub Actions SHAs with a SemVer version.
|
||||
{
|
||||
extends: ["helpers:pinGitHubActionDigests"],
|
||||
extractVersion: "^(?<version>v?\\d+\\.\\d+\\.\\d+)$",
|
||||
versioning: "regex:^v?(?<major>\\d+)(\\.(?<minor>\\d+)\\.(?<patch>\\d+))?$",
|
||||
},
|
||||
{
|
||||
// Group upload/download artifact updates, the versions are dependent
|
||||
groupName: "Artifact GitHub Actions dependencies",
|
||||
matchManagers: ["github-actions"],
|
||||
matchDatasources: ["gitea-tags", "github-tags"],
|
||||
matchPackageNames: ["actions/.*-artifact"],
|
||||
description: "Weekly update of artifact-related GitHub Actions dependencies",
|
||||
},
|
||||
{
|
||||
// This package rule disables updates for GitHub runners:
|
||||
// we'd only pin them to a specific version
|
||||
// if there was a deliberate reason to do so
|
||||
groupName: "GitHub runners",
|
||||
matchManagers: ["github-actions"],
|
||||
matchDatasources: ["github-runners"],
|
||||
description: "Disable PRs updating GitHub runners (e.g. 'runs-on: macos-14')",
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
// Disable updates of `zip-rs`; intentionally pinned for now due to ownership change
|
||||
// See: https://github.com/astral-sh/uv/issues/3642
|
||||
matchPackageNames: ["zip"],
|
||||
matchManagers: ["cargo"],
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
// `mkdocs-material` requires a manual update to keep the version in sync
|
||||
// with `mkdocs-material-insider`.
|
||||
// See: https://squidfunk.github.io/mkdocs-material/insiders/upgrade/
|
||||
matchManagers: ["pip_requirements"],
|
||||
matchPackageNames: ["mkdocs-material"],
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
groupName: "pre-commit dependencies",
|
||||
matchManagers: ["pre-commit"],
|
||||
description: "Weekly update of pre-commit dependencies",
|
||||
},
|
||||
{
|
||||
groupName: "NPM Development dependencies",
|
||||
matchManagers: ["npm"],
|
||||
matchDepTypes: ["devDependencies"],
|
||||
description: "Weekly update of NPM development dependencies",
|
||||
},
|
||||
{
|
||||
groupName: "Monaco",
|
||||
matchManagers: ["npm"],
|
||||
matchPackageNames: ["monaco"],
|
||||
description: "Weekly update of the Monaco editor",
|
||||
},
|
||||
{
|
||||
groupName: "strum",
|
||||
matchManagers: ["cargo"],
|
||||
matchPackageNames: ["strum"],
|
||||
description: "Weekly update of strum dependencies",
|
||||
}
|
||||
],
|
||||
vulnerabilityAlerts: {
|
||||
commitMessageSuffix: "",
|
||||
labels: ["internal", "security"],
|
||||
},
|
||||
}
|
||||
480
.github/workflows/build-binaries.yml
vendored
480
.github/workflows/build-binaries.yml
vendored
@@ -1,480 +0,0 @@
|
||||
# Build ruff on all platforms.
|
||||
#
|
||||
# Generates both wheels (for PyPI) and archived binaries (for GitHub releases).
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a local
|
||||
# artifacts job within `cargo-dist`.
|
||||
name: "Build binaries"
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
pull_request:
|
||||
paths:
|
||||
# When we change pyproject.toml, we want to ensure that the maturin builds still work.
|
||||
- pyproject.toml
|
||||
# And when we change this workflow itself...
|
||||
- .github/workflows/build-binaries.yml
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
env:
|
||||
PACKAGE_NAME: ruff
|
||||
MODULE_NAME: ruff
|
||||
PYTHON_VERSION: "3.13"
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
sdist:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build sdist"
|
||||
uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
|
||||
with:
|
||||
command: sdist
|
||||
args: --out dist
|
||||
- name: "Test sdist"
|
||||
run: |
|
||||
pip install dist/"${PACKAGE_NAME}"-*.tar.gz --force-reinstall
|
||||
"${MODULE_NAME}" --help
|
||||
python -m "${MODULE_NAME}" --help
|
||||
- name: "Upload sdist"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: wheels-sdist
|
||||
path: dist
|
||||
|
||||
macos-x86_64:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: macos-14
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels - x86_64"
|
||||
uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
|
||||
with:
|
||||
target: x86_64
|
||||
args: --release --locked --out dist
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: wheels-macos-x86_64
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
TARGET=x86_64-apple-darwin
|
||||
ARCHIVE_NAME=ruff-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
mkdir -p $ARCHIVE_NAME
|
||||
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: artifacts-macos-x86_64
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
macos-aarch64:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: macos-14
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: arm64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels - aarch64"
|
||||
uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
|
||||
with:
|
||||
target: aarch64
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel - aarch64"
|
||||
run: |
|
||||
pip install dist/"${PACKAGE_NAME}"-*.whl --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: wheels-aarch64-apple-darwin
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
TARGET=aarch64-apple-darwin
|
||||
ARCHIVE_NAME=ruff-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
mkdir -p $ARCHIVE_NAME
|
||||
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: artifacts-aarch64-apple-darwin
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
windows:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: windows-latest
|
||||
strategy:
|
||||
matrix:
|
||||
platform:
|
||||
- target: x86_64-pc-windows-msvc
|
||||
arch: x64
|
||||
- target: i686-pc-windows-msvc
|
||||
arch: x86
|
||||
- target: aarch64-pc-windows-msvc
|
||||
arch: x64
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: ${{ matrix.platform.arch }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
args: --release --locked --out dist
|
||||
env:
|
||||
# aarch64 build fails, see https://github.com/PyO3/maturin/issues/2110
|
||||
XWIN_VERSION: 16
|
||||
- name: "Test wheel"
|
||||
if: ${{ !startsWith(matrix.platform.target, 'aarch64') }}
|
||||
shell: bash
|
||||
run: |
|
||||
python -m pip install dist/"${PACKAGE_NAME}"-*.whl --force-reinstall
|
||||
"${MODULE_NAME}" --help
|
||||
python -m "${MODULE_NAME}" --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: wheels-${{ matrix.platform.target }}
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.zip
|
||||
7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/ruff.exe
|
||||
sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: artifacts-${{ matrix.platform.target }}
|
||||
path: |
|
||||
*.zip
|
||||
*.sha256
|
||||
|
||||
linux:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target:
|
||||
- x86_64-unknown-linux-gnu
|
||||
- i686-unknown-linux-gnu
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: auto
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel"
|
||||
if: ${{ startsWith(matrix.target, 'x86_64') }}
|
||||
run: |
|
||||
pip install dist/"${PACKAGE_NAME}"-*.whl --force-reinstall
|
||||
"${MODULE_NAME}" --help
|
||||
python -m "${MODULE_NAME}" --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: wheels-${{ matrix.target }}
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
TARGET=${{ matrix.target }}
|
||||
ARCHIVE_NAME=ruff-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
mkdir -p $ARCHIVE_NAME
|
||||
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: artifacts-${{ matrix.target }}
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
linux-cross:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
platform:
|
||||
- target: aarch64-unknown-linux-gnu
|
||||
arch: aarch64
|
||||
# see https://github.com/astral-sh/ruff/issues/3791
|
||||
# and https://github.com/gnzlbg/jemallocator/issues/170#issuecomment-1503228963
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
- target: armv7-unknown-linux-gnueabihf
|
||||
arch: armv7
|
||||
- target: s390x-unknown-linux-gnu
|
||||
arch: s390x
|
||||
- target: powerpc64le-unknown-linux-gnu
|
||||
arch: ppc64le
|
||||
# see https://github.com/astral-sh/ruff/issues/10073
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
- target: powerpc64-unknown-linux-gnu
|
||||
arch: ppc64
|
||||
# see https://github.com/astral-sh/ruff/issues/10073
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
- target: arm-unknown-linux-musleabihf
|
||||
arch: arm
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
args: --release --locked --out dist
|
||||
- uses: uraimo/run-on-arch-action@d94c13912ea685de38fccc1109385b83fd79427d # v3.0.1
|
||||
if: ${{ matrix.platform.arch != 'ppc64' && matrix.platform.arch != 'ppc64le'}}
|
||||
name: Test wheel
|
||||
with:
|
||||
arch: ${{ matrix.platform.arch == 'arm' && 'armv6' || matrix.platform.arch }}
|
||||
distro: ${{ matrix.platform.arch == 'arm' && 'bullseye' || 'ubuntu20.04' }}
|
||||
githubToken: ${{ github.token }}
|
||||
install: |
|
||||
apt-get update
|
||||
apt-get install -y --no-install-recommends python3 python3-pip
|
||||
pip3 install -U pip
|
||||
run: |
|
||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: wheels-${{ matrix.platform.target }}
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
TARGET=${{ matrix.platform.target }}
|
||||
ARCHIVE_NAME=ruff-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
mkdir -p $ARCHIVE_NAME
|
||||
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: artifacts-${{ matrix.platform.target }}
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
musllinux:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target:
|
||||
- x86_64-unknown-linux-musl
|
||||
- i686-unknown-linux-musl
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: musllinux_1_2
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel"
|
||||
if: matrix.target == 'x86_64-unknown-linux-musl'
|
||||
uses: addnab/docker-run-action@4f65fabd2431ebc8d299f8e5a018d79a769ae185 # v3
|
||||
with:
|
||||
image: alpine:latest
|
||||
options: -v ${{ github.workspace }}:/io -w /io
|
||||
run: |
|
||||
apk add python3
|
||||
python -m venv .venv
|
||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
.venv/bin/${{ env.MODULE_NAME }} --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: wheels-${{ matrix.target }}
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
TARGET=${{ matrix.target }}
|
||||
ARCHIVE_NAME=ruff-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
mkdir -p $ARCHIVE_NAME
|
||||
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: artifacts-${{ matrix.target }}
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
musllinux-cross:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
platform:
|
||||
- target: aarch64-unknown-linux-musl
|
||||
arch: aarch64
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
- target: armv7-unknown-linux-musleabihf
|
||||
arch: armv7
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: musllinux_1_2
|
||||
args: --release --locked --out dist
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
- uses: uraimo/run-on-arch-action@d94c13912ea685de38fccc1109385b83fd79427d # v3.0.1
|
||||
name: Test wheel
|
||||
with:
|
||||
arch: ${{ matrix.platform.arch }}
|
||||
distro: alpine_latest
|
||||
githubToken: ${{ github.token }}
|
||||
install: |
|
||||
apk add python3
|
||||
run: |
|
||||
python -m venv .venv
|
||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
.venv/bin/${{ env.MODULE_NAME }} --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: wheels-${{ matrix.platform.target }}
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
TARGET=${{ matrix.platform.target }}
|
||||
ARCHIVE_NAME=ruff-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
mkdir -p $ARCHIVE_NAME
|
||||
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: artifacts-${{ matrix.platform.target }}
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
299
.github/workflows/build-docker.yml
vendored
299
.github/workflows/build-docker.yml
vendored
@@ -1,299 +0,0 @@
|
||||
# Build and publish a Docker image.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a local
|
||||
# artifacts job within `cargo-dist`.
|
||||
#
|
||||
# TODO(charlie): Ideally, the publish step would happen as a publish job within `cargo-dist`, but
|
||||
# sharing the built image as an artifact between jobs is challenging.
|
||||
name: "[ruff] Build Docker image"
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/build-docker.yml
|
||||
|
||||
env:
|
||||
RUFF_BASE_IMG: ghcr.io/${{ github.repository_owner }}/ruff
|
||||
|
||||
jobs:
|
||||
docker-build:
|
||||
name: Build Docker image (ghcr.io/astral-sh/ruff) for ${{ matrix.platform }}
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux/amd64
|
||||
- linux/arm64
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
|
||||
- uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Check tag consistency
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
env:
|
||||
TAG: ${{ inputs.plan != '' && fromJson(inputs.plan).announcement_tag || 'dry-run' }}
|
||||
run: |
|
||||
version=$(grep -m 1 "^version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
|
||||
if [ "${TAG}" != "${version}" ]; then
|
||||
echo "The input tag does not match the version from pyproject.toml:" >&2
|
||||
echo "${TAG}" >&2
|
||||
echo "${version}" >&2
|
||||
exit 1
|
||||
else
|
||||
echo "Releasing ${version}"
|
||||
fi
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
with:
|
||||
images: ${{ env.RUFF_BASE_IMG }}
|
||||
# Defining this makes sure the org.opencontainers.image.version OCI label becomes the actual release version and not the branch name
|
||||
tags: |
|
||||
type=raw,value=dry-run,enable=${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
type=pep440,pattern={{ version }},value=${{ inputs.plan != '' && fromJson(inputs.plan).announcement_tag || 'dry-run' }},enable=${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
|
||||
- name: Normalize Platform Pair (replace / with -)
|
||||
run: |
|
||||
platform=${{ matrix.platform }}
|
||||
echo "PLATFORM_TUPLE=${platform//\//-}" >> "$GITHUB_ENV"
|
||||
|
||||
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
|
||||
- name: Build and push by digest
|
||||
id: build
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: .
|
||||
platforms: ${{ matrix.platform }}
|
||||
cache-from: type=gha,scope=ruff-${{ env.PLATFORM_TUPLE }}
|
||||
cache-to: type=gha,mode=min,scope=ruff-${{ env.PLATFORM_TUPLE }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
outputs: type=image,name=${{ env.RUFF_BASE_IMG }},push-by-digest=true,name-canonical=true,push=${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
|
||||
- name: Export digests
|
||||
env:
|
||||
digest: ${{ steps.build.outputs.digest }}
|
||||
run: |
|
||||
mkdir -p /tmp/digests
|
||||
touch "/tmp/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digests
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: digests-${{ env.PLATFORM_TUPLE }}
|
||||
path: /tmp/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
docker-publish:
|
||||
name: Publish Docker image (ghcr.io/astral-sh/ruff)
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
needs:
|
||||
- docker-build
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
with:
|
||||
images: ${{ env.RUFF_BASE_IMG }}
|
||||
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
|
||||
tags: |
|
||||
type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
|
||||
- name: Create manifest list and push
|
||||
working-directory: /tmp/digests
|
||||
# The jq command expands the docker/metadata json "tags" array entry to `-t tag1 -t tag2 ...` for each tag in the array
|
||||
# The printf will expand the base image with the `<RUFF_BASE_IMG>@sha256:<sha256> ...` for each sha256 in the directory
|
||||
# The final command becomes `docker buildx imagetools create -t tag1 -t tag2 ... <RUFF_BASE_IMG>@sha256:<sha256_1> <RUFF_BASE_IMG>@sha256:<sha256_2> ...`
|
||||
run: |
|
||||
# shellcheck disable=SC2046
|
||||
docker buildx imagetools create \
|
||||
$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf "${RUFF_BASE_IMG}@sha256:%s " *)
|
||||
|
||||
docker-publish-extra:
|
||||
name: Publish additional Docker image based on ${{ matrix.image-mapping }}
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
needs:
|
||||
- docker-publish
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# Mapping of base image followed by a comma followed by one or more base tags (comma separated)
|
||||
# Note, org.opencontainers.image.version label will use the first base tag (use the most specific tag first)
|
||||
image-mapping:
|
||||
- alpine:3.21,alpine3.21,alpine
|
||||
- debian:bookworm-slim,bookworm-slim,debian-slim
|
||||
- buildpack-deps:bookworm,bookworm,debian
|
||||
steps:
|
||||
- uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate Dynamic Dockerfile Tags
|
||||
shell: bash
|
||||
env:
|
||||
TAG_VALUE: ${{ fromJson(inputs.plan).announcement_tag }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the image and tags from the matrix variable
|
||||
IFS=',' read -r BASE_IMAGE BASE_TAGS <<< "${{ matrix.image-mapping }}"
|
||||
|
||||
# Generate Dockerfile content
|
||||
cat <<EOF > Dockerfile
|
||||
FROM ${BASE_IMAGE}
|
||||
COPY --from=${RUFF_BASE_IMG}:latest /ruff /usr/local/bin/ruff
|
||||
ENTRYPOINT []
|
||||
CMD ["/usr/local/bin/ruff"]
|
||||
EOF
|
||||
|
||||
# Initialize a variable to store all tag docker metadata patterns
|
||||
TAG_PATTERNS=""
|
||||
|
||||
# Loop through all base tags and append its docker metadata pattern to the list
|
||||
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
|
||||
IFS=','; for TAG in ${BASE_TAGS}; do
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ version }},suffix=-${TAG},value=${TAG_VALUE}\n"
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ major }}.{{ minor }},suffix=-${TAG},value=${TAG_VALUE}\n"
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=raw,value=${TAG}\n"
|
||||
done
|
||||
|
||||
# Remove the trailing newline from the pattern list
|
||||
TAG_PATTERNS="${TAG_PATTERNS%\\n}"
|
||||
|
||||
# Export image cache name
|
||||
echo "IMAGE_REF=${BASE_IMAGE//:/-}" >> "$GITHUB_ENV"
|
||||
|
||||
# Export tag patterns using the multiline env var syntax
|
||||
{
|
||||
echo "TAG_PATTERNS<<EOF"
|
||||
echo -e "${TAG_PATTERNS}"
|
||||
echo EOF
|
||||
} >> "$GITHUB_ENV"
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
# ghcr.io prefers index level annotations
|
||||
env:
|
||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||
with:
|
||||
images: ${{ env.RUFF_BASE_IMG }}
|
||||
flavor: |
|
||||
latest=false
|
||||
tags: |
|
||||
${{ env.TAG_PATTERNS }}
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
# We do not really need to cache here as the Dockerfile is tiny
|
||||
#cache-from: type=gha,scope=ruff-${{ env.IMAGE_REF }}
|
||||
#cache-to: type=gha,mode=min,scope=ruff-${{ env.IMAGE_REF }}
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
annotations: ${{ steps.meta.outputs.annotations }}
|
||||
|
||||
# This is effectively a duplicate of `docker-publish` to make https://github.com/astral-sh/ruff/pkgs/container/ruff
|
||||
# show the ruff base image first since GitHub always shows the last updated image digests
|
||||
# This works by annotating the original digests (previously non-annotated) which triggers an update to ghcr.io
|
||||
docker-republish:
|
||||
name: Annotate Docker image (ghcr.io/astral-sh/ruff)
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
needs:
|
||||
- docker-publish-extra
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
env:
|
||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||
with:
|
||||
images: ${{ env.RUFF_BASE_IMG }}
|
||||
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
|
||||
tags: |
|
||||
type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
|
||||
- name: Create manifest list and push
|
||||
working-directory: /tmp/digests
|
||||
# The readarray part is used to make sure the quoting and special characters are preserved on expansion (e.g. spaces)
|
||||
# The jq command expands the docker/metadata json "tags" array entry to `-t tag1 -t tag2 ...` for each tag in the array
|
||||
# The printf will expand the base image with the `<RUFF_BASE_IMG>@sha256:<sha256> ...` for each sha256 in the directory
|
||||
# The final command becomes `docker buildx imagetools create -t tag1 -t tag2 ... <RUFF_BASE_IMG>@sha256:<sha256_1> <RUFF_BASE_IMG>@sha256:<sha256_2> ...`
|
||||
run: |
|
||||
readarray -t lines <<< "$DOCKER_METADATA_OUTPUT_ANNOTATIONS"; annotations=(); for line in "${lines[@]}"; do annotations+=(--annotation "$line"); done
|
||||
|
||||
# shellcheck disable=SC2046
|
||||
docker buildx imagetools create \
|
||||
"${annotations[@]}" \
|
||||
$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf "${RUFF_BASE_IMG}@sha256:%s " *)
|
||||
714
.github/workflows/ci.yaml
vendored
714
.github/workflows/ci.yaml
vendored
File diff suppressed because it is too large
Load Diff
78
.github/workflows/daily_fuzz.yaml
vendored
78
.github/workflows/daily_fuzz.yaml
vendored
@@ -1,78 +0,0 @@
|
||||
name: Daily parser fuzz
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "0 0 * * *"
|
||||
pull_request:
|
||||
paths:
|
||||
- ".github/workflows/daily_fuzz.yaml"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
PACKAGE_NAME: ruff
|
||||
FORCE_COLOR: 1
|
||||
|
||||
jobs:
|
||||
fuzz:
|
||||
name: Fuzz
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
# Don't run the cron job on forks:
|
||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@b3958095189f34b95d402a680b6e96b7f194f7b9 # v1
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: Build ruff
|
||||
# A debug build means the script runs slower once it gets started,
|
||||
# but this is outweighed by the fact that a release build takes *much* longer to compile in CI
|
||||
run: cargo build --locked
|
||||
- name: Fuzz
|
||||
run: |
|
||||
# shellcheck disable=SC2046
|
||||
(
|
||||
uvx \
|
||||
--python=3.12 \
|
||||
--from=./python/py-fuzzer \
|
||||
fuzz \
|
||||
--test-executable=target/debug/ruff \
|
||||
--bin=ruff \
|
||||
$(shuf -i 0-9999999999999999999 -n 1000)
|
||||
)
|
||||
|
||||
create-issue-on-failure:
|
||||
name: Create an issue if the daily fuzz surfaced any bugs
|
||||
runs-on: ubuntu-latest
|
||||
needs: fuzz
|
||||
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && needs.fuzz.result == 'failure' }}
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
await github.rest.issues.create({
|
||||
owner: "astral-sh",
|
||||
repo: "ruff",
|
||||
title: `Daily parser fuzz failed on ${new Date().toDateString()}`,
|
||||
body: "Run listed here: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}",
|
||||
labels: ["bug", "parser", "fuzzer"],
|
||||
})
|
||||
55
.github/workflows/docs.yaml
vendored
Normal file
55
.github/workflows/docs.yaml
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
name: mkdocs
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
ref:
|
||||
description: "The commit SHA, tag, or branch to publish. Uses the default branch if not specified."
|
||||
default: ""
|
||||
type: string
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
mkdocs:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
- uses: actions/setup-python@v5
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@v0.9.0
|
||||
with:
|
||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: pip install -r docs/requirements-insiders.txt
|
||||
- name: "Install dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: pip install -r docs/requirements.txt
|
||||
- name: "Copy README File"
|
||||
run: |
|
||||
python scripts/transform_readme.py --target mkdocs
|
||||
python scripts/generate_mkdocs.py
|
||||
- name: "Build Insiders docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.insiders.yml
|
||||
- name: "Build docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.public.yml
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@v3.4.1
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
# `github.head_ref` is only set during pull requests and for manual runs or tags we use `main` to deploy to production
|
||||
command: pages deploy site --project-name=astral-docs --branch ${{ github.head_ref || 'main' }} --commit-hash ${GITHUB_SHA}
|
||||
101
.github/workflows/mypy_primer.yaml
vendored
101
.github/workflows/mypy_primer.yaml
vendored
@@ -1,101 +0,0 @@
|
||||
name: Run mypy_primer
|
||||
|
||||
permissions: {}
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "crates/ty*/**"
|
||||
- "crates/ruff_db"
|
||||
- "crates/ruff_python_ast"
|
||||
- "crates/ruff_python_parser"
|
||||
- ".github/workflows/mypy_primer.yaml"
|
||||
- ".github/workflows/mypy_primer_comment.yaml"
|
||||
- "Cargo.lock"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.event.pull_request.number || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
RUST_BACKTRACE: 1
|
||||
|
||||
jobs:
|
||||
mypy_primer:
|
||||
name: Run mypy_primer
|
||||
runs-on: depot-ubuntu-22.04-32
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
path: ruff
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
with:
|
||||
workspaces: "ruff"
|
||||
|
||||
- name: Install Rust toolchain
|
||||
run: rustup show
|
||||
|
||||
- name: Run mypy_primer
|
||||
shell: bash
|
||||
run: |
|
||||
cd ruff
|
||||
|
||||
echo "Enabling mypy primer specific configuration overloads (see .github/mypy-primer-ty.toml)"
|
||||
mkdir -p ~/.config/ty
|
||||
cp .github/mypy-primer-ty.toml ~/.config/ty/ty.toml
|
||||
|
||||
PRIMER_SELECTOR="$(paste -s -d'|' crates/ty_python_semantic/resources/primer/good.txt)"
|
||||
|
||||
echo "new commit"
|
||||
git rev-list --format=%s --max-count=1 "$GITHUB_SHA"
|
||||
|
||||
MERGE_BASE="$(git merge-base "$GITHUB_SHA" "origin/$GITHUB_BASE_REF")"
|
||||
git checkout -b base_commit "$MERGE_BASE"
|
||||
echo "base commit"
|
||||
git rev-list --format=%s --max-count=1 base_commit
|
||||
|
||||
cd ..
|
||||
|
||||
echo "Project selector: $PRIMER_SELECTOR"
|
||||
# Allow the exit code to be 0 or 1, only fail for actual mypy_primer crashes/bugs
|
||||
uvx \
|
||||
--from="git+https://github.com/hauntsaninja/mypy_primer@01a7ca325f674433c58e02416a867178d1571128" \
|
||||
mypy_primer \
|
||||
--repo ruff \
|
||||
--type-checker ty \
|
||||
--old base_commit \
|
||||
--new "$GITHUB_SHA" \
|
||||
--project-selector "/($PRIMER_SELECTOR)\$" \
|
||||
--output concise \
|
||||
--debug > mypy_primer.diff || [ $? -eq 1 ]
|
||||
|
||||
# Output diff with ANSI color codes
|
||||
cat mypy_primer.diff
|
||||
|
||||
# Remove ANSI color codes before uploading
|
||||
sed -ie 's/\x1b\[[0-9;]*m//g' mypy_primer.diff
|
||||
|
||||
echo ${{ github.event.number }} > pr-number
|
||||
|
||||
- name: Upload diff
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: mypy_primer_diff
|
||||
path: mypy_primer.diff
|
||||
|
||||
- name: Upload pr-number
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pr-number
|
||||
path: pr-number
|
||||
97
.github/workflows/mypy_primer_comment.yaml
vendored
97
.github/workflows/mypy_primer_comment.yaml
vendored
@@ -1,97 +0,0 @@
|
||||
name: PR comment (mypy_primer)
|
||||
|
||||
on: # zizmor: ignore[dangerous-triggers]
|
||||
workflow_run:
|
||||
workflows: [Run mypy_primer]
|
||||
types: [completed]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
workflow_run_id:
|
||||
description: The mypy_primer workflow that triggers the workflow run
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: Download PR number
|
||||
with:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Parse pull request number
|
||||
id: pr-number
|
||||
run: |
|
||||
if [[ -f pr-number ]]
|
||||
then
|
||||
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: "Download mypy_primer results"
|
||||
id: download-mypy_primer_diff
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
name: mypy_primer_diff
|
||||
workflow: mypy_primer.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/mypy_primer_diff
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
if: steps.download-mypy_primer_diff.outputs.found_artifact == 'true'
|
||||
run: |
|
||||
# Guard against malicious mypy_primer results that symlink to a secret
|
||||
# file on this runner
|
||||
if [[ -L pr/mypy_primer_diff/mypy_primer.diff ]]
|
||||
then
|
||||
echo "Error: mypy_primer.diff cannot be a symlink"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Note this identifier is used to find the comment to update on
|
||||
# subsequent runs
|
||||
echo '<!-- generated-comment mypy_primer -->' >> comment.txt
|
||||
|
||||
echo '## `mypy_primer` results' >> comment.txt
|
||||
if [ -s "pr/mypy_primer_diff/mypy_primer.diff" ]; then
|
||||
echo '<details>' >> comment.txt
|
||||
echo '<summary>Changes were detected when running on open source projects</summary>' >> comment.txt
|
||||
echo '' >> comment.txt
|
||||
echo '```diff' >> comment.txt
|
||||
cat pr/mypy_primer_diff/mypy_primer.diff >> comment.txt
|
||||
echo '```' >> comment.txt
|
||||
echo '</details>' >> comment.txt
|
||||
else
|
||||
echo 'No ecosystem changes detected ✅' >> comment.txt
|
||||
fi
|
||||
|
||||
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||
cat comment.txt >> "$GITHUB_OUTPUT"
|
||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Find existing comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
|
||||
if: steps.generate-comment.outcome == 'success'
|
||||
id: find-comment
|
||||
with:
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
comment-author: "github-actions[bot]"
|
||||
body-includes: "<!-- generated-comment mypy_primer -->"
|
||||
|
||||
- name: Create or update comment
|
||||
if: steps.find-comment.outcome == 'success'
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
body-path: comment.txt
|
||||
edit-mode: replace
|
||||
29
.github/workflows/notify-dependents.yml
vendored
29
.github/workflows/notify-dependents.yml
vendored
@@ -1,29 +0,0 @@
|
||||
# Notify downstream repositories of a new release.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a post-announce
|
||||
# job within `cargo-dist`.
|
||||
name: "[ruff] Notify dependents"
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
update-dependents:
|
||||
name: Notify dependents
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: "Update pre-commit mirror"
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
with:
|
||||
github-token: ${{ secrets.RUFF_PRE_COMMIT_PAT }}
|
||||
script: |
|
||||
github.rest.actions.createWorkflowDispatch({
|
||||
owner: 'astral-sh',
|
||||
repo: 'ruff-pre-commit',
|
||||
workflow_id: 'main.yml',
|
||||
ref: 'main',
|
||||
})
|
||||
48
.github/workflows/playground.yaml
vendored
Normal file
48
.github/workflows/playground.yaml
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
name: "[Playground] Release"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18
|
||||
cache: "npm"
|
||||
cache-dependency-path: playground/package-lock.json
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
- uses: jetli/wasm-bindgen-action@v0.2.0
|
||||
- name: "Run wasm-pack"
|
||||
run: wasm-pack build --target web --out-dir ../../playground/src/pkg crates/ruff_wasm
|
||||
- name: "Install Node dependencies"
|
||||
run: npm ci
|
||||
working-directory: playground
|
||||
- name: "Run TypeScript checks"
|
||||
run: npm run check
|
||||
working-directory: playground
|
||||
- name: "Build JavaScript bundle"
|
||||
run: npm run build
|
||||
working-directory: playground
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@v3.4.1
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
# `github.head_ref` is only set during pull requests and for manual runs or tags we use `main` to deploy to production
|
||||
command: pages deploy playground/dist --project-name=ruff-playground --branch ${{ github.head_ref || 'main' }} --commit-hash ${GITHUB_SHA}
|
||||
31
.github/workflows/pr-comment.yaml
vendored
31
.github/workflows/pr-comment.yaml
vendored
@@ -10,29 +10,29 @@ on:
|
||||
description: The ecosystem workflow that triggers the workflow run
|
||||
required: true
|
||||
|
||||
permissions:
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
- uses: dawidd6/action-download-artifact@v3
|
||||
name: Download pull request number
|
||||
with:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Parse pull request number
|
||||
id: pr-number
|
||||
run: |
|
||||
if [[ -f pr-number ]]
|
||||
then
|
||||
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||
echo "pr-number=$(<pr-number)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
- uses: dawidd6/action-download-artifact@v3
|
||||
name: "Download ecosystem results"
|
||||
id: download-ecosystem-result
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
@@ -43,20 +43,11 @@ jobs:
|
||||
path: pr/ecosystem
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
if: steps.download-ecosystem-result.outputs.found_artifact == 'true'
|
||||
run: |
|
||||
# Guard against malicious ecosystem results that symlink to a secret
|
||||
# file on this runner
|
||||
if [[ -L pr/ecosystem/ecosystem-result ]]
|
||||
then
|
||||
echo "Error: ecosystem-result cannot be a symlink"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Note this identifier is used to find the comment to update on
|
||||
# subsequent runs
|
||||
echo '<!-- generated-comment ecosystem -->' >> comment.txt
|
||||
@@ -65,12 +56,12 @@ jobs:
|
||||
cat pr/ecosystem/ecosystem-result >> comment.txt
|
||||
echo "" >> comment.txt
|
||||
|
||||
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||
cat comment.txt >> "$GITHUB_OUTPUT"
|
||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||
echo 'comment<<EOF' >> $GITHUB_OUTPUT
|
||||
cat comment.txt >> $GITHUB_OUTPUT
|
||||
echo 'EOF' >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Find existing comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
|
||||
uses: peter-evans/find-comment@v3
|
||||
if: steps.generate-comment.outcome == 'success'
|
||||
id: find-comment
|
||||
with:
|
||||
@@ -80,7 +71,7 @@ jobs:
|
||||
|
||||
- name: Create or update comment
|
||||
if: steps.find-comment.outcome == 'success'
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
|
||||
144
.github/workflows/publish-docs.yml
vendored
144
.github/workflows/publish-docs.yml
vendored
@@ -1,144 +0,0 @@
|
||||
# Publish the Ruff documentation.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a post-announce
|
||||
# job within `cargo-dist`.
|
||||
name: mkdocs
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
ref:
|
||||
description: "The commit SHA, tag, or branch to publish. Uses the default branch if not specified."
|
||||
default: ""
|
||||
type: string
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
mkdocs:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
persist-credentials: true
|
||||
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: 3.12
|
||||
|
||||
- name: "Set docs version"
|
||||
env:
|
||||
version: ${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || inputs.ref }}
|
||||
run: |
|
||||
# if version is missing, use 'latest'
|
||||
if [ -z "$version" ]; then
|
||||
echo "Using 'latest' as version"
|
||||
version="latest"
|
||||
fi
|
||||
|
||||
# Use version as display name for now
|
||||
display_name="$version"
|
||||
|
||||
echo "version=$version" >> "$GITHUB_ENV"
|
||||
echo "display_name=$display_name" >> "$GITHUB_ENV"
|
||||
|
||||
- name: "Set branch name"
|
||||
run: |
|
||||
timestamp="$(date +%s)"
|
||||
|
||||
# create branch_display_name from display_name by replacing all
|
||||
# characters disallowed in git branch names with hyphens
|
||||
branch_display_name="$(echo "${display_name}" | tr -c '[:alnum:]._' '-' | tr -s '-')"
|
||||
|
||||
echo "branch_name=update-docs-$branch_display_name-$timestamp" >> "$GITHUB_ENV"
|
||||
echo "timestamp=$timestamp" >> "$GITHUB_ENV"
|
||||
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@a6f90b1f127823b31d4d4a8d96047790581349bd # v0.9.1
|
||||
with:
|
||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: pip install -r docs/requirements-insiders.txt
|
||||
|
||||
- name: "Install dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: pip install -r docs/requirements.txt
|
||||
|
||||
- name: "Copy README File"
|
||||
run: |
|
||||
python scripts/transform_readme.py --target mkdocs
|
||||
python scripts/generate_mkdocs.py
|
||||
|
||||
- name: "Build Insiders docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.insiders.yml
|
||||
|
||||
- name: "Build docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.public.yml
|
||||
|
||||
- name: "Clone docs repo"
|
||||
run: git clone https://${{ secrets.ASTRAL_DOCS_PAT }}@github.com/astral-sh/docs.git astral-docs
|
||||
|
||||
- name: "Copy docs"
|
||||
run: rm -rf astral-docs/site/ruff && mkdir -p astral-docs/site && cp -r site/ruff astral-docs/site/
|
||||
|
||||
- name: "Commit docs"
|
||||
working-directory: astral-docs
|
||||
run: |
|
||||
git config user.name "astral-docs-bot"
|
||||
git config user.email "176161322+astral-docs-bot@users.noreply.github.com"
|
||||
|
||||
git checkout -b "${branch_name}"
|
||||
git add site/ruff
|
||||
git commit -m "Update ruff documentation for $version"
|
||||
|
||||
- name: "Create Pull Request"
|
||||
working-directory: astral-docs
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
|
||||
run: |
|
||||
# set the PR title
|
||||
pull_request_title="Update ruff documentation for ${display_name}"
|
||||
|
||||
# Delete any existing pull requests that are open for this version
|
||||
# by checking against pull_request_title because the new PR will
|
||||
# supersede the old one.
|
||||
gh pr list --state open --json title --jq '.[] | select(.title == "$pull_request_title") | .number' | \
|
||||
xargs -I {} gh pr close {}
|
||||
|
||||
# push the branch to GitHub
|
||||
git push origin "${branch_name}"
|
||||
|
||||
# create the PR
|
||||
gh pr create \
|
||||
--base=main \
|
||||
--head="${branch_name}" \
|
||||
--title="${pull_request_title}" \
|
||||
--body="Automated documentation update for ${display_name}" \
|
||||
--label="documentation"
|
||||
|
||||
- name: "Merge Pull Request"
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
working-directory: astral-docs
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
|
||||
run: |
|
||||
# auto-merge the PR if the build was triggered by a release. Manual builds should be reviewed by a human.
|
||||
# give the PR a few seconds to be created before trying to auto-merge it
|
||||
sleep 10
|
||||
gh pr merge --squash "${branch_name}"
|
||||
54
.github/workflows/publish-playground.yml
vendored
54
.github/workflows/publish-playground.yml
vendored
@@ -1,54 +0,0 @@
|
||||
# Publish the Ruff playground.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a post-announce
|
||||
# job within `cargo-dist`.
|
||||
name: "[Playground] Release"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version: 22
|
||||
cache: "npm"
|
||||
cache-dependency-path: playground/package-lock.json
|
||||
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
|
||||
- name: "Install Node dependencies"
|
||||
run: npm ci
|
||||
working-directory: playground
|
||||
- name: "Run TypeScript checks"
|
||||
run: npm run check
|
||||
working-directory: playground
|
||||
- name: "Build Ruff playground"
|
||||
run: npm run build --workspace ruff-playground
|
||||
working-directory: playground
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3.14.1
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
# `github.head_ref` is only set during pull requests and for manual runs or tags we use `main` to deploy to production
|
||||
command: pages deploy playground/ruff/dist --project-name=ruff-playground --branch ${{ github.head_ref || 'main' }} --commit-hash ${GITHUB_SHA}
|
||||
32
.github/workflows/publish-pypi.yml
vendored
32
.github/workflows/publish-pypi.yml
vendored
@@ -1,32 +0,0 @@
|
||||
# Publish a release to PyPI.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a publish job
|
||||
# within `cargo-dist`.
|
||||
name: "[ruff] Publish to PyPI"
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
pypi-publish:
|
||||
name: Upload to PyPI
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
permissions:
|
||||
# For PyPI's trusted publishing.
|
||||
id-token: write
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
pattern: wheels-*
|
||||
path: wheels
|
||||
merge-multiple: true
|
||||
- name: Publish to PyPi
|
||||
run: uv publish -v wheels/*
|
||||
58
.github/workflows/publish-ty-playground.yml
vendored
58
.github/workflows/publish-ty-playground.yml
vendored
@@ -1,58 +0,0 @@
|
||||
# Publish the ty playground.
|
||||
name: "[ty Playground] Release"
|
||||
|
||||
permissions: {}
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "crates/ty*/**"
|
||||
- "crates/ruff_db/**"
|
||||
- "crates/ruff_python_ast/**"
|
||||
- "crates/ruff_python_parser/**"
|
||||
- "playground/**"
|
||||
- ".github/workflows/publish-ty-playground.yml"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version: 22
|
||||
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
|
||||
- name: "Install Node dependencies"
|
||||
run: npm ci
|
||||
working-directory: playground
|
||||
- name: "Run TypeScript checks"
|
||||
run: npm run check
|
||||
working-directory: playground
|
||||
- name: "Build ty playground"
|
||||
run: npm run build --workspace ty-playground
|
||||
working-directory: playground
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3.14.1
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
# `github.head_ref` is only set during pull requests and for manual runs or tags we use `main` to deploy to production
|
||||
command: pages deploy playground/ty/dist --project-name=ty-playground --branch ${{ github.head_ref || 'main' }} --commit-hash ${GITHUB_SHA}
|
||||
59
.github/workflows/publish-wasm.yml
vendored
59
.github/workflows/publish-wasm.yml
vendored
@@ -1,59 +0,0 @@
|
||||
# Build and publish ruff-api for wasm.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a publish
|
||||
# job within `cargo-dist`.
|
||||
name: "Build and publish wasm"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
ruff_wasm:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
strategy:
|
||||
matrix:
|
||||
target: [web, bundler, nodejs]
|
||||
fail-fast: false
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: jetli/wasm-pack-action@0d096b08b4e5a7de8c28de67e11e945404e9eefa # v0.4.0
|
||||
with:
|
||||
version: v0.13.1
|
||||
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
|
||||
- name: "Run wasm-pack build"
|
||||
run: wasm-pack build --target ${{ matrix.target }} crates/ruff_wasm
|
||||
- name: "Rename generated package"
|
||||
run: | # Replace the package name w/ jq
|
||||
jq '.name="@astral-sh/ruff-wasm-${{ matrix.target }}"' crates/ruff_wasm/pkg/package.json > /tmp/package.json
|
||||
mv /tmp/package.json crates/ruff_wasm/pkg
|
||||
- run: cp LICENSE crates/ruff_wasm/pkg # wasm-pack does not put the LICENSE file in the pkg
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version: 20
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- name: "Publish (dry-run)"
|
||||
if: ${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
run: npm publish --dry-run crates/ruff_wasm/pkg
|
||||
- name: "Publish"
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
run: npm publish --provenance --access public crates/ruff_wasm/pkg
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
593
.github/workflows/release.yaml
vendored
Normal file
593
.github/workflows/release.yaml
vendored
Normal file
@@ -0,0 +1,593 @@
|
||||
name: "[ruff] Release"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
description: "The version to tag, without the leading 'v'. If omitted, will initiate a dry run (no uploads)."
|
||||
type: string
|
||||
sha:
|
||||
description: "The full sha of the commit to be released. If omitted, the latest commit on the default branch will be used."
|
||||
default: ""
|
||||
type: string
|
||||
pull_request:
|
||||
paths:
|
||||
# When we change pyproject.toml, we want to ensure that the maturin builds still work
|
||||
- pyproject.toml
|
||||
# And when we change this workflow itself...
|
||||
- .github/workflows/release.yaml
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
PACKAGE_NAME: ruff
|
||||
PYTHON_VERSION: "3.11"
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
sdist:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build sdist"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
command: sdist
|
||||
args: --out dist
|
||||
- name: "Test sdist"
|
||||
run: |
|
||||
pip install dist/${{ env.PACKAGE_NAME }}-*.tar.gz --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload sdist"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
|
||||
macos-x86_64:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels - x86_64"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: x86_64
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel - x86_64"
|
||||
run: |
|
||||
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-x86_64-apple-darwin.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/x86_64-apple-darwin/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
macos-universal:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels - universal2"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
args: --release --locked --target universal2-apple-darwin --out dist
|
||||
- name: "Test wheel - universal2"
|
||||
run: |
|
||||
pip install dist/${{ env.PACKAGE_NAME }}-*universal2.whl --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-aarch64-apple-darwin.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/aarch64-apple-darwin/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
windows:
|
||||
runs-on: windows-latest
|
||||
strategy:
|
||||
matrix:
|
||||
platform:
|
||||
- target: x86_64-pc-windows-msvc
|
||||
arch: x64
|
||||
- target: i686-pc-windows-msvc
|
||||
arch: x86
|
||||
- target: aarch64-pc-windows-msvc
|
||||
arch: x64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: ${{ matrix.platform.arch }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel"
|
||||
if: ${{ !startsWith(matrix.platform.target, 'aarch64') }}
|
||||
shell: bash
|
||||
run: |
|
||||
python -m pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.zip
|
||||
7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/ruff.exe
|
||||
sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries
|
||||
path: |
|
||||
*.zip
|
||||
*.sha256
|
||||
|
||||
linux:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target:
|
||||
- x86_64-unknown-linux-gnu
|
||||
- i686-unknown-linux-gnu
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: auto
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel"
|
||||
if: ${{ startsWith(matrix.target, 'x86_64') }}
|
||||
run: |
|
||||
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.target }}.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
linux-cross:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
platform:
|
||||
- target: aarch64-unknown-linux-gnu
|
||||
arch: aarch64
|
||||
# see https://github.com/astral-sh/ruff/issues/3791
|
||||
# and https://github.com/gnzlbg/jemallocator/issues/170#issuecomment-1503228963
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
- target: armv7-unknown-linux-gnueabihf
|
||||
arch: armv7
|
||||
- target: s390x-unknown-linux-gnu
|
||||
arch: s390x
|
||||
- target: powerpc64le-unknown-linux-gnu
|
||||
arch: ppc64le
|
||||
- target: powerpc64-unknown-linux-gnu
|
||||
arch: ppc64
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
args: --release --locked --out dist
|
||||
- uses: uraimo/run-on-arch-action@v2
|
||||
if: matrix.platform.arch != 'ppc64'
|
||||
name: Test wheel
|
||||
with:
|
||||
arch: ${{ matrix.platform.arch }}
|
||||
distro: ubuntu20.04
|
||||
githubToken: ${{ github.token }}
|
||||
install: |
|
||||
apt-get update
|
||||
apt-get install -y --no-install-recommends python3 python3-pip
|
||||
pip3 install -U pip
|
||||
run: |
|
||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
musllinux:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target:
|
||||
- x86_64-unknown-linux-musl
|
||||
- i686-unknown-linux-musl
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: musllinux_1_2
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel"
|
||||
if: matrix.target == 'x86_64-unknown-linux-musl'
|
||||
uses: addnab/docker-run-action@v3
|
||||
with:
|
||||
image: alpine:latest
|
||||
options: -v ${{ github.workspace }}:/io -w /io
|
||||
run: |
|
||||
apk add python3
|
||||
python -m venv .venv
|
||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
.venv/bin/ruff check --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.target }}.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
musllinux-cross:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
platform:
|
||||
- target: aarch64-unknown-linux-musl
|
||||
arch: aarch64
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
- target: armv7-unknown-linux-musleabihf
|
||||
arch: armv7
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: musllinux_1_2
|
||||
args: --release --locked --out dist
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
- uses: uraimo/run-on-arch-action@v2
|
||||
name: Test wheel
|
||||
with:
|
||||
arch: ${{ matrix.platform.arch }}
|
||||
distro: alpine_latest
|
||||
githubToken: ${{ github.token }}
|
||||
install: |
|
||||
apk add python3
|
||||
run: |
|
||||
python -m venv .venv
|
||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
.venv/bin/ruff check --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
validate-tag:
|
||||
name: Validate tag
|
||||
runs-on: ubuntu-latest
|
||||
# If you don't set an input tag, it's a dry run (no uploads).
|
||||
if: ${{ inputs.tag }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: main # We checkout the main branch to check for the commit
|
||||
- name: Check main branch
|
||||
if: ${{ inputs.sha }}
|
||||
run: |
|
||||
# Fetch the main branch since a shallow checkout is used by default
|
||||
git fetch origin main --unshallow
|
||||
if ! git branch --contains ${{ inputs.sha }} | grep -E '(^|\s)main$'; then
|
||||
echo "The specified sha is not on the main branch" >&2
|
||||
exit 1
|
||||
fi
|
||||
- name: Check tag consistency
|
||||
run: |
|
||||
# Switch to the commit we want to release
|
||||
git checkout ${{ inputs.sha }}
|
||||
version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
|
||||
if [ "${{ inputs.tag }}" != "${version}" ]; then
|
||||
echo "The input tag does not match the version from pyproject.toml:" >&2
|
||||
echo "${{ inputs.tag }}" >&2
|
||||
echo "${version}" >&2
|
||||
exit 1
|
||||
else
|
||||
echo "Releasing ${version}"
|
||||
fi
|
||||
|
||||
upload-release:
|
||||
name: Upload to PyPI
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- macos-universal
|
||||
- macos-x86_64
|
||||
- windows
|
||||
- linux
|
||||
- linux-cross
|
||||
- musllinux
|
||||
- musllinux-cross
|
||||
- validate-tag
|
||||
# If you don't set an input tag, it's a dry run (no uploads).
|
||||
if: ${{ inputs.tag }}
|
||||
environment:
|
||||
name: release
|
||||
permissions:
|
||||
# For pypi trusted publishing
|
||||
id-token: write
|
||||
steps:
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: wheels
|
||||
- name: Publish to PyPi
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
skip-existing: true
|
||||
packages-dir: wheels
|
||||
verbose: true
|
||||
|
||||
tag-release:
|
||||
name: Tag release
|
||||
runs-on: ubuntu-latest
|
||||
needs: upload-release
|
||||
# If you don't set an input tag, it's a dry run (no uploads).
|
||||
if: ${{ inputs.tag }}
|
||||
permissions:
|
||||
# For git tag
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- name: git tag
|
||||
run: |
|
||||
git config user.email "hey@astral.sh"
|
||||
git config user.name "Ruff Release CI"
|
||||
git tag -m "v${{ inputs.tag }}" "v${{ inputs.tag }}"
|
||||
# If there is duplicate tag, this will fail. The publish to pypi action will have been a noop (due to skip
|
||||
# existing), so we make a non-destructive exit here
|
||||
git push --tags
|
||||
|
||||
publish-release:
|
||||
name: Publish to GitHub
|
||||
runs-on: ubuntu-latest
|
||||
needs: tag-release
|
||||
# If you don't set an input tag, it's a dry run (no uploads).
|
||||
if: ${{ inputs.tag }}
|
||||
permissions:
|
||||
# For GitHub release publishing
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: binaries
|
||||
path: binaries
|
||||
- name: "Publish to GitHub"
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
draft: true
|
||||
files: binaries/*
|
||||
tag_name: v${{ inputs.tag }}
|
||||
|
||||
docker-publish:
|
||||
# This action doesn't need to wait on any other task, it's easy to re-tag if something failed and we're validating
|
||||
# the tag here also
|
||||
name: Push Docker image ghcr.io/astral-sh/ruff
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
permissions:
|
||||
# For the docker push
|
||||
packages: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
- uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ghcr.io/astral-sh/ruff
|
||||
|
||||
- name: Check tag consistency
|
||||
# Unlike validate-tag we don't check if the commit is on the main branch, but it seems good enough since we can
|
||||
# change docker tags
|
||||
if: ${{ inputs.tag }}
|
||||
run: |
|
||||
version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
|
||||
if [ "${{ inputs.tag }}" != "${version}" ]; then
|
||||
echo "The input tag does not match the version from pyproject.toml:" >&2
|
||||
echo "${{ inputs.tag }}" >&2
|
||||
echo "${version}" >&2
|
||||
exit 1
|
||||
else
|
||||
echo "Releasing ${version}"
|
||||
fi
|
||||
|
||||
- name: "Build and push Docker image"
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
# Reuse the builder
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
push: ${{ inputs.tag != '' }}
|
||||
tags: ghcr.io/astral-sh/ruff:latest,ghcr.io/astral-sh/ruff:${{ inputs.tag || 'dry-run' }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
|
||||
# After the release has been published, we update downstream repositories
|
||||
# This is separate because if this fails the release is still fine, we just need to do some manual workflow triggers
|
||||
update-dependents:
|
||||
name: Update dependents
|
||||
runs-on: ubuntu-latest
|
||||
needs: publish-release
|
||||
steps:
|
||||
- name: "Update pre-commit mirror"
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.RUFF_PRE_COMMIT_PAT }}
|
||||
script: |
|
||||
github.rest.actions.createWorkflowDispatch({
|
||||
owner: 'astral-sh',
|
||||
repo: 'ruff-pre-commit',
|
||||
workflow_id: 'main.yml',
|
||||
ref: 'main',
|
||||
})
|
||||
306
.github/workflows/release.yml
vendored
306
.github/workflows/release.yml
vendored
@@ -1,306 +0,0 @@
|
||||
# This file was autogenerated by dist: https://github.com/astral-sh/cargo-dist
|
||||
#
|
||||
# Copyright 2022-2024, axodotdev
|
||||
# Copyright 2025 Astral Software Inc.
|
||||
# SPDX-License-Identifier: MIT or Apache-2.0
|
||||
#
|
||||
# CI that:
|
||||
#
|
||||
# * checks for a Git Tag that looks like a release
|
||||
# * builds artifacts with dist (archives, installers, hashes)
|
||||
# * uploads those artifacts to temporary workflow zip
|
||||
# * on success, uploads the artifacts to a GitHub Release
|
||||
#
|
||||
# Note that the GitHub Release will be created with a generated
|
||||
# title/body based on your changelogs.
|
||||
|
||||
name: Release
|
||||
permissions:
|
||||
"contents": "write"
|
||||
|
||||
# This task will run whenever you workflow_dispatch with a tag that looks like a version
|
||||
# like "1.0.0", "v0.1.0-prerelease.1", "my-app/0.1.0", "releases/v1.0.0", etc.
|
||||
# Various formats will be parsed into a VERSION and an optional PACKAGE_NAME, where
|
||||
# PACKAGE_NAME must be the name of a Cargo package in your workspace, and VERSION
|
||||
# must be a Cargo-style SemVer Version (must have at least major.minor.patch).
|
||||
#
|
||||
# If PACKAGE_NAME is specified, then the announcement will be for that
|
||||
# package (erroring out if it doesn't have the given version or isn't dist-able).
|
||||
#
|
||||
# If PACKAGE_NAME isn't specified, then the announcement will be for all
|
||||
# (dist-able) packages in the workspace with that version (this mode is
|
||||
# intended for workspaces with only one dist-able package, or with all dist-able
|
||||
# packages versioned/released in lockstep).
|
||||
#
|
||||
# If you push multiple tags at once, separate instances of this workflow will
|
||||
# spin up, creating an independent announcement for each one. However, GitHub
|
||||
# will hard limit this to 3 tags per commit, as it will assume more tags is a
|
||||
# mistake.
|
||||
#
|
||||
# If there's a prerelease-style suffix to the version, then the release(s)
|
||||
# will be marked as a prerelease.
|
||||
on:
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
description: Release Tag
|
||||
required: true
|
||||
default: dry-run
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
# Run 'dist plan' (or host) to determine what tasks we need to do
|
||||
plan:
|
||||
runs-on: "depot-ubuntu-latest-4"
|
||||
outputs:
|
||||
val: ${{ steps.plan.outputs.manifest }}
|
||||
tag: ${{ (inputs.tag != 'dry-run' && inputs.tag) || '' }}
|
||||
tag-flag: ${{ inputs.tag && inputs.tag != 'dry-run' && format('--tag={0}', inputs.tag) || '' }}
|
||||
publishing: ${{ inputs.tag && inputs.tag != 'dry-run' }}
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@09d2acae674a48949e3602304ab46fd20ae0c42f
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Install dist
|
||||
# we specify bash to get pipefail; it guards against the `curl` command
|
||||
# failing. otherwise `sh` won't catch that `curl` returned non-0
|
||||
shell: bash
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/cargo-dist/releases/download/v0.28.5-prerelease.1/cargo-dist-installer.sh | sh"
|
||||
- name: Cache dist
|
||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/dist
|
||||
# sure would be cool if github gave us proper conditionals...
|
||||
# so here's a doubly-nested ternary-via-truthiness to try to provide the best possible
|
||||
# functionality based on whether this is a pull_request, and whether it's from a fork.
|
||||
# (PRs run on the *source* but secrets are usually on the *target* -- that's *good*
|
||||
# but also really annoying to build CI around when it needs secrets to work right.)
|
||||
- id: plan
|
||||
run: |
|
||||
dist ${{ (inputs.tag && inputs.tag != 'dry-run' && format('host --steps=create --tag={0}', inputs.tag)) || 'plan' }} --output-format=json > plan-dist-manifest.json
|
||||
echo "dist ran successfully"
|
||||
cat plan-dist-manifest.json
|
||||
echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
||||
- name: "Upload dist-manifest.json"
|
||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||
with:
|
||||
name: artifacts-plan-dist-manifest
|
||||
path: plan-dist-manifest.json
|
||||
|
||||
custom-build-binaries:
|
||||
needs:
|
||||
- plan
|
||||
if: ${{ needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload' || inputs.tag == 'dry-run' }}
|
||||
uses: ./.github/workflows/build-binaries.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
|
||||
custom-build-docker:
|
||||
needs:
|
||||
- plan
|
||||
if: ${{ needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload' || inputs.tag == 'dry-run' }}
|
||||
uses: ./.github/workflows/build-docker.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
permissions:
|
||||
"contents": "read"
|
||||
"packages": "write"
|
||||
|
||||
# Build and package all the platform-agnostic(ish) things
|
||||
build-global-artifacts:
|
||||
needs:
|
||||
- plan
|
||||
- custom-build-binaries
|
||||
- custom-build-docker
|
||||
runs-on: "depot-ubuntu-latest-4"
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json
|
||||
steps:
|
||||
- uses: actions/checkout@09d2acae674a48949e3602304ab46fd20ae0c42f
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Install cached dist
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/
|
||||
- run: chmod +x ~/.cargo/bin/dist
|
||||
# Get all the local artifacts for the global tasks to use (for e.g. checksums)
|
||||
- name: Fetch local artifacts
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: target/distrib/
|
||||
merge-multiple: true
|
||||
- id: cargo-dist
|
||||
shell: bash
|
||||
run: |
|
||||
dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json
|
||||
echo "dist ran successfully"
|
||||
|
||||
# Parse out what we just built and upload it to scratch storage
|
||||
echo "paths<<EOF" >> "$GITHUB_OUTPUT"
|
||||
jq --raw-output ".upload_files[]" dist-manifest.json >> "$GITHUB_OUTPUT"
|
||||
echo "EOF" >> "$GITHUB_OUTPUT"
|
||||
|
||||
cp dist-manifest.json "$BUILD_MANIFEST_NAME"
|
||||
- name: "Upload artifacts"
|
||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||
with:
|
||||
name: artifacts-build-global
|
||||
path: |
|
||||
${{ steps.cargo-dist.outputs.paths }}
|
||||
${{ env.BUILD_MANIFEST_NAME }}
|
||||
# Determines if we should publish/announce
|
||||
host:
|
||||
needs:
|
||||
- plan
|
||||
- custom-build-binaries
|
||||
- custom-build-docker
|
||||
- build-global-artifacts
|
||||
# Only run if we're "publishing", and only if local and global didn't fail (skipped is fine)
|
||||
if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.custom-build-binaries.result == 'skipped' || needs.custom-build-binaries.result == 'success') && (needs.custom-build-docker.result == 'skipped' || needs.custom-build-docker.result == 'success') }}
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
runs-on: "depot-ubuntu-latest-4"
|
||||
outputs:
|
||||
val: ${{ steps.host.outputs.manifest }}
|
||||
steps:
|
||||
- uses: actions/checkout@09d2acae674a48949e3602304ab46fd20ae0c42f
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Install cached dist
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/
|
||||
- run: chmod +x ~/.cargo/bin/dist
|
||||
# Fetch artifacts from scratch-storage
|
||||
- name: Fetch artifacts
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: target/distrib/
|
||||
merge-multiple: true
|
||||
# This is a harmless no-op for GitHub Releases, hosting for that happens in "announce"
|
||||
- id: host
|
||||
shell: bash
|
||||
run: |
|
||||
dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json
|
||||
echo "artifacts uploaded and released successfully"
|
||||
cat dist-manifest.json
|
||||
echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
||||
- name: "Upload dist-manifest.json"
|
||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||
with:
|
||||
# Overwrite the previous copy
|
||||
name: artifacts-dist-manifest
|
||||
path: dist-manifest.json
|
||||
|
||||
custom-publish-pypi:
|
||||
needs:
|
||||
- plan
|
||||
- host
|
||||
if: ${{ !fromJson(needs.plan.outputs.val).announcement_is_prerelease || fromJson(needs.plan.outputs.val).publish_prereleases }}
|
||||
uses: ./.github/workflows/publish-pypi.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
# publish jobs get escalated permissions
|
||||
permissions:
|
||||
"id-token": "write"
|
||||
"packages": "write"
|
||||
|
||||
custom-publish-wasm:
|
||||
needs:
|
||||
- plan
|
||||
- host
|
||||
if: ${{ !fromJson(needs.plan.outputs.val).announcement_is_prerelease || fromJson(needs.plan.outputs.val).publish_prereleases }}
|
||||
uses: ./.github/workflows/publish-wasm.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
# publish jobs get escalated permissions
|
||||
permissions:
|
||||
"contents": "read"
|
||||
"id-token": "write"
|
||||
"packages": "write"
|
||||
|
||||
# Create a GitHub Release while uploading all files to it
|
||||
announce:
|
||||
needs:
|
||||
- plan
|
||||
- host
|
||||
- custom-publish-pypi
|
||||
- custom-publish-wasm
|
||||
# use "always() && ..." to allow us to wait for all publish jobs while
|
||||
# still allowing individual publish jobs to skip themselves (for prereleases).
|
||||
# "host" however must run to completion, no skipping allowed!
|
||||
if: ${{ always() && needs.host.result == 'success' && (needs.custom-publish-pypi.result == 'skipped' || needs.custom-publish-pypi.result == 'success') && (needs.custom-publish-wasm.result == 'skipped' || needs.custom-publish-wasm.result == 'success') }}
|
||||
runs-on: "depot-ubuntu-latest-4"
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@09d2acae674a48949e3602304ab46fd20ae0c42f
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
# Create a GitHub Release while uploading all files to it
|
||||
- name: "Download GitHub Artifacts"
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: artifacts
|
||||
merge-multiple: true
|
||||
- name: Cleanup
|
||||
run: |
|
||||
# Remove the granular manifests
|
||||
rm -f artifacts/*-dist-manifest.json
|
||||
- name: Create GitHub Release
|
||||
env:
|
||||
PRERELEASE_FLAG: "${{ fromJson(needs.host.outputs.val).announcement_is_prerelease && '--prerelease' || '' }}"
|
||||
ANNOUNCEMENT_TITLE: "${{ fromJson(needs.host.outputs.val).announcement_title }}"
|
||||
ANNOUNCEMENT_BODY: "${{ fromJson(needs.host.outputs.val).announcement_github_body }}"
|
||||
RELEASE_COMMIT: "${{ github.sha }}"
|
||||
run: |
|
||||
# Write and read notes from a file to avoid quoting breaking things
|
||||
echo "$ANNOUNCEMENT_BODY" > $RUNNER_TEMP/notes.txt
|
||||
|
||||
gh release create "${{ needs.plan.outputs.tag }}" --target "$RELEASE_COMMIT" $PRERELEASE_FLAG --title "$ANNOUNCEMENT_TITLE" --notes-file "$RUNNER_TEMP/notes.txt" artifacts/*
|
||||
|
||||
custom-notify-dependents:
|
||||
needs:
|
||||
- plan
|
||||
- announce
|
||||
uses: ./.github/workflows/notify-dependents.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
|
||||
custom-publish-docs:
|
||||
needs:
|
||||
- plan
|
||||
- announce
|
||||
uses: ./.github/workflows/publish-docs.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
|
||||
custom-publish-playground:
|
||||
needs:
|
||||
- plan
|
||||
- announce
|
||||
uses: ./.github/workflows/publish-playground.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
83
.github/workflows/sync_typeshed.yaml
vendored
83
.github/workflows/sync_typeshed.yaml
vendored
@@ -1,83 +0,0 @@
|
||||
name: Sync typeshed
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Run on the 1st and the 15th of every month:
|
||||
- cron: "0 0 1,15 * *"
|
||||
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
|
||||
jobs:
|
||||
sync:
|
||||
name: Sync typeshed
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
# Don't run the cron job on forks:
|
||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
name: Checkout Ruff
|
||||
with:
|
||||
path: ruff
|
||||
persist-credentials: true
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
name: Checkout typeshed
|
||||
with:
|
||||
repository: python/typeshed
|
||||
path: typeshed
|
||||
persist-credentials: false
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
git config --global user.email '<>'
|
||||
- name: Sync typeshed
|
||||
id: sync
|
||||
run: |
|
||||
rm -rf ruff/crates/ty_vendored/vendor/typeshed
|
||||
mkdir ruff/crates/ty_vendored/vendor/typeshed
|
||||
cp typeshed/README.md ruff/crates/ty_vendored/vendor/typeshed
|
||||
cp typeshed/LICENSE ruff/crates/ty_vendored/vendor/typeshed
|
||||
cp -r typeshed/stdlib ruff/crates/ty_vendored/vendor/typeshed/stdlib
|
||||
rm -rf ruff/crates/ty_vendored/vendor/typeshed/stdlib/@tests
|
||||
git -C typeshed rev-parse HEAD > ruff/crates/ty_vendored/vendor/typeshed/source_commit.txt
|
||||
- name: Commit the changes
|
||||
id: commit
|
||||
if: ${{ steps.sync.outcome == 'success' }}
|
||||
run: |
|
||||
cd ruff
|
||||
git checkout -b typeshedbot/sync-typeshed
|
||||
git add .
|
||||
git diff --staged --quiet || git commit -m "Sync typeshed. Source commit: https://github.com/python/typeshed/commit/$(git -C ../typeshed rev-parse HEAD)"
|
||||
- name: Create a PR
|
||||
if: ${{ steps.sync.outcome == 'success' && steps.commit.outcome == 'success' }}
|
||||
run: |
|
||||
cd ruff
|
||||
git push --force origin typeshedbot/sync-typeshed
|
||||
gh pr list --repo "$GITHUB_REPOSITORY" --head typeshedbot/sync-typeshed --json id --jq length | grep 1 && exit 0 # exit if there is existing pr
|
||||
gh pr create --title "[ty] Sync vendored typeshed stubs" --body "Close and reopen this PR to trigger CI" --label "ty"
|
||||
|
||||
create-issue-on-failure:
|
||||
name: Create an issue if the typeshed sync failed
|
||||
runs-on: ubuntu-latest
|
||||
needs: [sync]
|
||||
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && needs.sync.result == 'failure' }}
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
await github.rest.issues.create({
|
||||
owner: "astral-sh",
|
||||
repo: "ruff",
|
||||
title: `Automated typeshed sync failed on ${new Date().toDateString()}`,
|
||||
body: "Run listed here: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}",
|
||||
labels: ["bug", "ty"],
|
||||
})
|
||||
19
.github/zizmor.yml
vendored
19
.github/zizmor.yml
vendored
@@ -1,19 +0,0 @@
|
||||
# Configuration for the zizmor static analysis tool, run via pre-commit in CI
|
||||
# https://woodruffw.github.io/zizmor/configuration/
|
||||
#
|
||||
# TODO: can we remove the ignores here so that our workflows are more secure?
|
||||
rules:
|
||||
dangerous-triggers:
|
||||
ignore:
|
||||
- pr-comment.yaml
|
||||
cache-poisoning:
|
||||
ignore:
|
||||
- build-docker.yml
|
||||
- publish-playground.yml
|
||||
excessive-permissions:
|
||||
# it's hard to test what the impact of removing these ignores would be
|
||||
# without actually running the release workflow...
|
||||
ignore:
|
||||
- build-docker.yml
|
||||
- publish-playground.yml
|
||||
- publish-docs.yml
|
||||
13
.gitignore
vendored
13
.gitignore
vendored
@@ -21,18 +21,6 @@ flamegraph.svg
|
||||
# `CARGO_TARGET_DIR=target-llvm-lines RUSTFLAGS="-Csymbol-mangling-version=v0" cargo llvm-lines -p ruff --lib`
|
||||
/target*
|
||||
|
||||
# samply profiles
|
||||
profile.json
|
||||
|
||||
# tracing-flame traces
|
||||
tracing.folded
|
||||
tracing-flamechart.svg
|
||||
tracing-flamegraph.svg
|
||||
|
||||
# insta
|
||||
*.rs.pending-snap
|
||||
|
||||
|
||||
###
|
||||
# Rust.gitignore
|
||||
###
|
||||
@@ -104,7 +92,6 @@ coverage.xml
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
repos/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
|
||||
@@ -14,22 +14,7 @@ MD041: false
|
||||
# MD013/line-length
|
||||
MD013: false
|
||||
|
||||
# MD014/commands-show-output
|
||||
MD014: false
|
||||
|
||||
# MD024/no-duplicate-heading
|
||||
MD024:
|
||||
# Allow when nested under different parents e.g. CHANGELOG.md
|
||||
siblings_only: true
|
||||
|
||||
# MD046/code-block-style
|
||||
#
|
||||
# Ignore this because it conflicts with the code block style used in content
|
||||
# tabs of mkdocs-material which is to add a blank line after the content title.
|
||||
#
|
||||
# Ref: https://github.com/astral-sh/ruff/pull/15011#issuecomment-2544790854
|
||||
MD046: false
|
||||
|
||||
# Link text should be descriptive
|
||||
# Disallows link text like *here* which is annoying.
|
||||
MD059: false
|
||||
allow_different_nesting: true
|
||||
|
||||
@@ -1,17 +1,9 @@
|
||||
fail_fast: false
|
||||
fail_fast: true
|
||||
|
||||
exclude: |
|
||||
(?x)^(
|
||||
.github/workflows/release.yml|
|
||||
crates/ty_vendored/vendor/.*|
|
||||
crates/ty_project/resources/.*|
|
||||
crates/ty_python_semantic/resources/corpus/.*|
|
||||
crates/ty/docs/(configuration|rules|cli).md|
|
||||
crates/ruff_benchmark/resources/.*|
|
||||
crates/ruff_linter/resources/.*|
|
||||
crates/ruff_linter/src/rules/.*/snapshots/.*|
|
||||
crates/ruff_notebook/resources/.*|
|
||||
crates/ruff_server/resources/.*|
|
||||
crates/ruff/resources/.*|
|
||||
crates/ruff_python_formatter/resources/.*|
|
||||
crates/ruff_python_formatter/tests/snapshots/.*|
|
||||
@@ -20,23 +12,18 @@ exclude: |
|
||||
)$
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v5.0.0
|
||||
hooks:
|
||||
- id: check-merge-conflict
|
||||
|
||||
- repo: https://github.com/abravalheri/validate-pyproject
|
||||
rev: v0.24.1
|
||||
rev: v0.15
|
||||
hooks:
|
||||
- id: validate-pyproject
|
||||
|
||||
- repo: https://github.com/executablebooks/mdformat
|
||||
rev: 0.7.22
|
||||
rev: 0.7.17
|
||||
hooks:
|
||||
- id: mdformat
|
||||
additional_dependencies:
|
||||
- mdformat-mkdocs==4.0.0
|
||||
- mdformat-footnote==0.1.1
|
||||
- mdformat-mkdocs
|
||||
- mdformat-admon
|
||||
exclude: |
|
||||
(?x)^(
|
||||
docs/formatter/black\.md
|
||||
@@ -44,7 +31,7 @@ repos:
|
||||
)$
|
||||
|
||||
- repo: https://github.com/igorshubovych/markdownlint-cli
|
||||
rev: v0.45.0
|
||||
rev: v0.37.0
|
||||
hooks:
|
||||
- id: markdownlint-fix
|
||||
exclude: |
|
||||
@@ -53,21 +40,8 @@ repos:
|
||||
| docs/\w+\.md
|
||||
)$
|
||||
|
||||
- repo: https://github.com/adamchainz/blacken-docs
|
||||
rev: 1.19.1
|
||||
hooks:
|
||||
- id: blacken-docs
|
||||
args: ["--pyi", "--line-length", "130"]
|
||||
files: '^crates/.*/resources/mdtest/.*\.md'
|
||||
exclude: |
|
||||
(?x)^(
|
||||
.*?invalid(_.+)*_syntax\.md
|
||||
)$
|
||||
additional_dependencies:
|
||||
- black==25.1.0
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.33.1
|
||||
rev: v1.16.22
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
@@ -81,52 +55,25 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.11.13
|
||||
rev: v0.1.4
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
args: [--fix, --exit-non-zero-on-fix]
|
||||
types_or: [python, pyi]
|
||||
require_serial: true
|
||||
exclude: |
|
||||
(?x)^(
|
||||
crates/ruff_linter/resources/.*|
|
||||
crates/ruff_python_formatter/resources/.*
|
||||
)$
|
||||
|
||||
# Prettier
|
||||
- repo: https://github.com/rbubley/mirrors-prettier
|
||||
rev: v3.5.3
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: v3.0.3
|
||||
hooks:
|
||||
- id: prettier
|
||||
types: [yaml]
|
||||
|
||||
# zizmor detects security vulnerabilities in GitHub Actions workflows.
|
||||
# Additional configuration for the tool is found in `.github/zizmor.yml`
|
||||
- repo: https://github.com/woodruffw/zizmor-pre-commit
|
||||
rev: v1.9.0
|
||||
hooks:
|
||||
- id: zizmor
|
||||
|
||||
- repo: https://github.com/python-jsonschema/check-jsonschema
|
||||
rev: 0.33.0
|
||||
hooks:
|
||||
- id: check-github-workflows
|
||||
|
||||
# `actionlint` hook, for verifying correct syntax in GitHub Actions workflows.
|
||||
# Some additional configuration for `actionlint` can be found in `.github/actionlint.yaml`.
|
||||
- repo: https://github.com/rhysd/actionlint
|
||||
rev: v1.7.7
|
||||
hooks:
|
||||
- id: actionlint
|
||||
stages:
|
||||
# This hook is disabled by default, since it's quite slow.
|
||||
# To run all hooks *including* this hook, use `uvx pre-commit run -a --hook-stage=manual`.
|
||||
# To run *just* this hook, use `uvx pre-commit run -a actionlint --hook-stage=manual`.
|
||||
- manual
|
||||
args:
|
||||
- "-ignore=SC2129" # ignorable stylistic lint from shellcheck
|
||||
- "-ignore=SC2016" # another shellcheck lint: seems to have false positives?
|
||||
additional_dependencies:
|
||||
# actionlint has a shellcheck integration which extracts shell scripts in `run:` steps from GitHub Actions
|
||||
# and checks these with shellcheck. This is arguably its most useful feature,
|
||||
# but the integration only works if shellcheck is installed
|
||||
- "github.com/wasilibs/go-shellcheck/cmd/shellcheck@v0.10.0"
|
||||
|
||||
ci:
|
||||
skip: [cargo-fmt, dev-generate-all]
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
# Auto-generated by `cargo-dist`.
|
||||
.github/workflows/release.yml
|
||||
5
.vscode/extensions.json
vendored
5
.vscode/extensions.json
vendored
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"rust-lang.rust-analyzer"
|
||||
]
|
||||
}
|
||||
6
.vscode/settings.json
vendored
6
.vscode/settings.json
vendored
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"rust-analyzer.check.extraArgs": [
|
||||
"--all-features"
|
||||
],
|
||||
"rust-analyzer.check.command": "clippy",
|
||||
}
|
||||
@@ -1,227 +1,5 @@
|
||||
# Breaking Changes
|
||||
|
||||
## 0.12.0
|
||||
|
||||
- **Detection of more syntax errors**
|
||||
|
||||
Ruff now detects version-related syntax errors, such as the use of the `match`
|
||||
statement on Python versions before 3.10, and syntax errors emitted by
|
||||
CPython's compiler, such as irrefutable `match` patterns before the final
|
||||
`case` arm.
|
||||
|
||||
- **New default Python version handling for syntax errors**
|
||||
|
||||
Ruff will default to the _latest_ supported Python version (3.13) when
|
||||
checking for the version-related syntax errors mentioned above to prevent
|
||||
false positives in projects without a Python version configured. The default
|
||||
in all other cases, like applying lint rules, is unchanged and remains at the
|
||||
minimum supported Python version (3.9).
|
||||
|
||||
- **Updated f-string formatting**
|
||||
|
||||
Ruff now formats multi-line f-strings with format specifiers to avoid adding a
|
||||
line break after the format specifier. This addresses a change to the Python
|
||||
grammar in version 3.13.4 that made such a line break a syntax error.
|
||||
|
||||
- **`rust-toolchain.toml` is no longer included in source distributions**
|
||||
|
||||
The `rust-toolchain.toml` is used to specify a higher Rust version than Ruff's
|
||||
minimum supported Rust version (MSRV) for development and building release
|
||||
artifacts. However, when present in source distributions, it would also cause
|
||||
downstream package maintainers to pull in the same Rust toolchain, even if
|
||||
their available toolchain was MSRV-compatible.
|
||||
|
||||
- **[`suspicious-xmle-tree-usage`](https://docs.astral.sh/ruff/rules/suspicious-xmle-tree-usage/)
|
||||
(`S320`) has been removed**
|
||||
|
||||
## 0.11.0
|
||||
|
||||
This is a follow-up to release 0.10.0. Because of a mistake in the release process, the `requires-python` inference changes were not included in that release. Ruff 0.11.0 now includes this change as well as the stabilization of the preview behavior for `PGH004`.
|
||||
|
||||
- **Changes to how the Python version is inferred when a `target-version` is not specified** ([#16319](https://github.com/astral-sh/ruff/pull/16319))
|
||||
|
||||
In previous versions of Ruff, you could specify your Python version with:
|
||||
|
||||
- The `target-version` option in a `ruff.toml` file or the `[tool.ruff]` section of a pyproject.toml file.
|
||||
- The `project.requires-python` field in a `pyproject.toml` file with a `[tool.ruff]` section.
|
||||
|
||||
These options worked well in most cases, and are still recommended for fine control of the Python version. However, because of the way Ruff discovers config files, `pyproject.toml` files without a `[tool.ruff]` section would be ignored, including the `requires-python` setting. Ruff would then use the default Python version (3.9 as of this writing) instead, which is surprising when you've attempted to request another version.
|
||||
|
||||
In v0.10, config discovery has been updated to address this issue:
|
||||
|
||||
- If Ruff finds a `ruff.toml` file without a `target-version`, it will check
|
||||
for a `pyproject.toml` file in the same directory and respect its
|
||||
`requires-python` version, even if it does not contain a `[tool.ruff]`
|
||||
section.
|
||||
- If Ruff finds a user-level configuration, the `requires-python` field of the closest `pyproject.toml` in a parent directory will take precedence.
|
||||
- If there is no config file (`ruff.toml`or `pyproject.toml` with a
|
||||
`[tool.ruff]` section) in the directory of the file being checked, Ruff will
|
||||
search for the closest `pyproject.toml` in the parent directories and use its
|
||||
`requires-python` setting.
|
||||
|
||||
## 0.10.0
|
||||
|
||||
- **Changes to how the Python version is inferred when a `target-version` is not specified** ([#16319](https://github.com/astral-sh/ruff/pull/16319))
|
||||
|
||||
Because of a mistake in the release process, the `requires-python` inference changes are not included in this release and instead shipped as part of 0.11.0.
|
||||
You can find a description of this change in the 0.11.0 section.
|
||||
|
||||
- **Updated `TYPE_CHECKING` behavior** ([#16669](https://github.com/astral-sh/ruff/pull/16669))
|
||||
|
||||
Previously, Ruff only recognized typechecking blocks that tested the `typing.TYPE_CHECKING` symbol. Now, Ruff recognizes any local variable named `TYPE_CHECKING`. This release also removes support for the legacy `if 0:` and `if False:` typechecking checks. Use a local `TYPE_CHECKING` variable instead.
|
||||
|
||||
- **More robust noqa parsing** ([#16483](https://github.com/astral-sh/ruff/pull/16483))
|
||||
|
||||
The syntax for both file-level and in-line suppression comments has been unified and made more robust to certain errors. In most cases, this will result in more suppression comments being read by Ruff, but there are a few instances where previously read comments will now log an error to the user instead. Please refer to the documentation on [_Error suppression_](https://docs.astral.sh/ruff/linter/#error-suppression) for the full specification.
|
||||
|
||||
- **Avoid unnecessary parentheses around with statements with a single context manager and a trailing comment** ([#14005](https://github.com/astral-sh/ruff/pull/14005))
|
||||
|
||||
This change fixes a bug in the formatter where it introduced unnecessary parentheses around with statements with a single context manager and a trailing comment. This change may result in a change in formatting for some users.
|
||||
|
||||
- **Bump alpine default tag to 3.21 for derived Docker images** ([#16456](https://github.com/astral-sh/ruff/pull/16456))
|
||||
|
||||
Alpine 3.21 was released in Dec 2024 and is used in the official Alpine-based Python images. Now the ruff:alpine image will use 3.21 instead of 3.20 and ruff:alpine3.20 will no longer be updated.
|
||||
|
||||
- **\[`unsafe-markup-use`\]: `RUF035` has been recoded to `S704`** ([#15957](https://github.com/astral-sh/ruff/pull/15957))
|
||||
|
||||
## 0.9.0
|
||||
|
||||
Ruff now formats your code according to the 2025 style guide. As a result, your code might now get formatted differently. See the [changelog](./CHANGELOG.md#090) for a detailed list of changes.
|
||||
|
||||
## 0.8.0
|
||||
|
||||
- **Default to Python 3.9**
|
||||
|
||||
Ruff now defaults to Python 3.9 instead of 3.8 if no explicit Python version is configured using [`ruff.target-version`](https://docs.astral.sh/ruff/settings/#target-version) or [`project.requires-python`](https://packaging.python.org/en/latest/guides/writing-pyproject-toml/#python-requires) ([#13896](https://github.com/astral-sh/ruff/pull/13896))
|
||||
|
||||
- **Changed location of `pydoclint` diagnostics**
|
||||
|
||||
[`pydoclint`](https://docs.astral.sh/ruff/rules/#pydoclint-doc) diagnostics now point to the first-line of the problematic docstring. Previously, this was not the case.
|
||||
|
||||
If you've opted into these preview rules but have them suppressed using
|
||||
[`noqa`](https://docs.astral.sh/ruff/linter/#error-suppression) comments in
|
||||
some places, this change may mean that you need to move the `noqa` suppression
|
||||
comments. Most users should be unaffected by this change.
|
||||
|
||||
- **Use XDG (i.e. `~/.local/bin`) instead of the Cargo home directory in the standalone installer**
|
||||
|
||||
Previously, Ruff's installer used `$CARGO_HOME` or `~/.cargo/bin` for its target install directory. Now, Ruff will be installed into `$XDG_BIN_HOME`, `$XDG_DATA_HOME/../bin`, or `~/.local/bin` (in that order).
|
||||
|
||||
This change is only relevant to users of the standalone Ruff installer (using the shell or PowerShell script). If you installed Ruff using uv or pip, you should be unaffected.
|
||||
|
||||
- **Changes to the line width calculation**
|
||||
|
||||
Ruff now uses a new version of the [unicode-width](https://github.com/unicode-rs/unicode-width) Rust crate to calculate the line width. In very rare cases, this may lead to lines containing Unicode characters being reformatted, or being considered too long when they were not before ([`E501`](https://docs.astral.sh/ruff/rules/line-too-long/)).
|
||||
|
||||
## 0.7.0
|
||||
|
||||
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments
|
||||
([#12838](https://github.com/astral-sh/ruff/pull/12838), [#13292](https://github.com/astral-sh/ruff/pull/13292)).
|
||||
This was a change that we attempted to make in Ruff v0.6.0, but only partially made due to an error on our part.
|
||||
See the [blog post](https://astral.sh/blog/ruff-v0.7.0) for more details.
|
||||
- The `useless-try-except` rule (in our `tryceratops` category) has been recoded from `TRY302` to
|
||||
`TRY203` ([#13502](https://github.com/astral-sh/ruff/pull/13502)). This ensures Ruff's code is consistent with
|
||||
the same rule in the [`tryceratops`](https://github.com/guilatrova/tryceratops) linter.
|
||||
- The `lint.allow-unused-imports` setting has been removed ([#13677](https://github.com/astral-sh/ruff/pull/13677)). Use
|
||||
[`lint.pyflakes.allow-unused-imports`](https://docs.astral.sh/ruff/settings/#lint_pyflakes_allowed-unused-imports)
|
||||
instead.
|
||||
|
||||
## 0.6.0
|
||||
|
||||
- Detect imports in `src` layouts by default for `isort` rules ([#12848](https://github.com/astral-sh/ruff/pull/12848))
|
||||
|
||||
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments ([#12838](https://github.com/astral-sh/ruff/pull/12838)).
|
||||
|
||||
- Lint and format Jupyter Notebook by default ([#12878](https://github.com/astral-sh/ruff/pull/12878)).
|
||||
|
||||
You can disable specific rules for notebooks using [`per-file-ignores`](https://docs.astral.sh/ruff/settings/#lint_per-file-ignores):
|
||||
|
||||
```toml
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"*.ipynb" = ["E501"] # disable line-too-long in notebooks
|
||||
```
|
||||
|
||||
If you'd prefer to either only lint or only format Jupyter Notebook files, you can use the
|
||||
section-specific `exclude` option to do so. For example, the following would only lint Jupyter
|
||||
Notebook files and not format them:
|
||||
|
||||
```toml
|
||||
[tool.ruff.format]
|
||||
exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
And, conversely, the following would only format Jupyter Notebook files and not lint them:
|
||||
|
||||
```toml
|
||||
[tool.ruff.lint]
|
||||
exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
You can completely disable Jupyter Notebook support by updating the [`extend-exclude`](https://docs.astral.sh/ruff/settings/#extend-exclude) setting:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
extend-exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
## 0.5.0
|
||||
|
||||
- Follow the XDG specification to discover user-level configurations on macOS (same as on other Unix platforms)
|
||||
- Selecting `ALL` now excludes deprecated rules
|
||||
- The released archives now include an extra level of nesting, which can be removed with `--strip-components=1` when untarring.
|
||||
- The release artifact's file name no longer includes the version tag. This enables users to install via `/latest` URLs on GitHub.
|
||||
|
||||
## 0.3.0
|
||||
|
||||
### Ruff 2024.2 style
|
||||
|
||||
The formatter now formats code according to the Ruff 2024.2 style guide. Read the [changelog](./CHANGELOG.md#030) for a detailed list of stabilized style changes.
|
||||
|
||||
### `isort`: Use one blank line after imports in typing stub files ([#9971](https://github.com/astral-sh/ruff/pull/9971))
|
||||
|
||||
Previously, Ruff used one or two blank lines (or the number configured by `isort.lines-after-imports`) after imports in typing stub files (`.pyi` files).
|
||||
The [typing style guide for stubs](https://typing.readthedocs.io/en/latest/source/stubs.html#style-guide) recommends using at most 1 blank line for grouping.
|
||||
As of this release, `isort` now always uses one blank line after imports in stub files, the same as the formatter.
|
||||
|
||||
### `build` is no longer excluded by default ([#10093](https://github.com/astral-sh/ruff/pull/10093))
|
||||
|
||||
Ruff maintains a list of directories and files that are excluded by default. This list now consists of the following patterns:
|
||||
|
||||
- `.bzr`
|
||||
- `.direnv`
|
||||
- `.eggs`
|
||||
- `.git`
|
||||
- `.git-rewrite`
|
||||
- `.hg`
|
||||
- `.ipynb_checkpoints`
|
||||
- `.mypy_cache`
|
||||
- `.nox`
|
||||
- `.pants.d`
|
||||
- `.pyenv`
|
||||
- `.pytest_cache`
|
||||
- `.pytype`
|
||||
- `.ruff_cache`
|
||||
- `.svn`
|
||||
- `.tox`
|
||||
- `.venv`
|
||||
- `.vscode`
|
||||
- `__pypackages__`
|
||||
- `_build`
|
||||
- `buck-out`
|
||||
- `dist`
|
||||
- `node_modules`
|
||||
- `site-packages`
|
||||
- `venv`
|
||||
|
||||
Previously, the `build` directory was included in this list. However, the `build` directory tends to be a not-unpopular directory
|
||||
name, and excluding it by default caused confusion. Ruff now no longer excludes `build` except if it is excluded by a `.gitignore` file
|
||||
or because it is listed in `extend-exclude`.
|
||||
|
||||
### `--format` is no longer a valid `rule` or `linter` command option
|
||||
|
||||
Previously, `ruff rule` and `ruff linter` accepted the `--format <FORMAT>` option as an alias for `--output-format`. Ruff no longer
|
||||
supports this alias. Please use `ruff rule --output-format <FORMAT>` and `ruff linter --output-format <FORMAT>` instead.
|
||||
|
||||
## 0.1.9
|
||||
|
||||
### `site-packages` is now excluded by default ([#5513](https://github.com/astral-sh/ruff/pull/5513))
|
||||
@@ -280,7 +58,7 @@ flag or `unsafe-fixes` configuration option can be used to enable unsafe fixes.
|
||||
|
||||
See the [docs](https://docs.astral.sh/ruff/configuration/#fix-safety) for details.
|
||||
|
||||
### Remove formatter-conflicting rules from the default rule set ([#7900](https://github.com/astral-sh/ruff/pull/7900))
|
||||
### Remove formatter-conflicting rules from the default rule set ([#7900](https://github.com/astral-sh/ruff/pull/7900))
|
||||
|
||||
Previously, Ruff enabled all implemented rules in Pycodestyle (`E`) by default. Ruff now only includes the
|
||||
Pycodestyle prefixes `E4`, `E7`, and `E9` to exclude rules that conflict with automatic formatters. Consequently,
|
||||
@@ -293,8 +71,8 @@ This change only affects those using Ruff under its default rule set. Users that
|
||||
|
||||
### Remove support for emoji identifiers ([#7212](https://github.com/astral-sh/ruff/pull/7212))
|
||||
|
||||
Previously, Ruff supported non-standards-compliant emoji identifiers such as `📦 = 1`.
|
||||
We decided to remove this non-standard language extension. Ruff now reports syntax errors for invalid emoji identifiers in your code, the same as CPython.
|
||||
Previously, Ruff supported the non-standard compliant emoji identifiers e.g. `📦 = 1`.
|
||||
We decided to remove this non-standard language extension, and Ruff now reports syntax errors for emoji identifiers in your code, the same as CPython.
|
||||
|
||||
### Improved GitLab fingerprints ([#7203](https://github.com/astral-sh/ruff/pull/7203))
|
||||
|
||||
|
||||
3100
CHANGELOG.md
3100
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
@@ -71,7 +71,8 @@ representative at an online or offline event.
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported to the community leaders responsible for enforcement at <hey@astral.sh>.
|
||||
reported to the community leaders responsible for enforcement at
|
||||
<charlie.r.marsh@gmail.com>.
|
||||
All complaints will be reviewed and investigated promptly and fairly.
|
||||
|
||||
All community leaders are obligated to respect the privacy and security of the
|
||||
|
||||
220
CONTRIBUTING.md
220
CONTRIBUTING.md
@@ -2,25 +2,58 @@
|
||||
|
||||
Welcome! We're happy to have you here. Thank you in advance for your contribution to Ruff.
|
||||
|
||||
> [!NOTE]
|
||||
>
|
||||
> This guide is for Ruff. If you're looking to contribute to ty, please see [the ty contributing
|
||||
> guide](https://github.com/astral-sh/ruff/blob/main/crates/ty/CONTRIBUTING.md).
|
||||
- [The Basics](#the-basics)
|
||||
- [Prerequisites](#prerequisites)
|
||||
- [Development](#development)
|
||||
- [Project Structure](#project-structure)
|
||||
- [Example: Adding a new lint rule](#example-adding-a-new-lint-rule)
|
||||
- [Rule naming convention](#rule-naming-convention)
|
||||
- [Rule testing: fixtures and snapshots](#rule-testing-fixtures-and-snapshots)
|
||||
- [Example: Adding a new configuration option](#example-adding-a-new-configuration-option)
|
||||
- [MkDocs](#mkdocs)
|
||||
- [Release Process](#release-process)
|
||||
- [Creating a new release](#creating-a-new-release)
|
||||
- [Ecosystem CI](#ecosystem-ci)
|
||||
- [Benchmarking and Profiling](#benchmarking-and-profiling)
|
||||
- [CPython Benchmark](#cpython-benchmark)
|
||||
- [Microbenchmarks](#microbenchmarks)
|
||||
- [Benchmark-driven Development](#benchmark-driven-development)
|
||||
- [PR Summary](#pr-summary)
|
||||
- [Tips](#tips)
|
||||
- [Profiling Projects](#profiling-projects)
|
||||
- [Linux](#linux)
|
||||
- [Mac](#mac)
|
||||
- [`cargo dev`](#cargo-dev)
|
||||
- [Subsystems](#subsystems)
|
||||
- [Compilation Pipeline](#compilation-pipeline)
|
||||
- [Import Categorization](#import-categorization)
|
||||
- [Project root](#project-root)
|
||||
- [Package root](#package-root)
|
||||
- [Import categorization](#import-categorization-1)
|
||||
|
||||
## The Basics
|
||||
|
||||
Ruff welcomes contributions in the form of pull requests.
|
||||
Ruff welcomes contributions in the form of Pull Requests.
|
||||
|
||||
For small changes (e.g., bug fixes), feel free to submit a PR.
|
||||
|
||||
For larger changes (e.g., new lint rules, new functionality, new configuration options), consider
|
||||
creating an [**issue**](https://github.com/astral-sh/ruff/issues) outlining your proposed change.
|
||||
You can also join us on [Discord](https://discord.com/invite/astral-sh) to discuss your idea with the
|
||||
You can also join us on [**Discord**](https://discord.gg/c9MhzV8aU5) to discuss your idea with the
|
||||
community. We've labeled [beginner-friendly tasks](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22)
|
||||
in the issue tracker, along with [bugs](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3Abug)
|
||||
and [improvements](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3Aaccepted)
|
||||
that are ready for contributions.
|
||||
|
||||
If you're looking for a place to start, we recommend implementing a new lint rule (see:
|
||||
[_Adding a new lint rule_](#example-adding-a-new-lint-rule), which will allow you to learn from and
|
||||
pattern-match against the examples in the existing codebase. Many lint rules are inspired by
|
||||
existing Python plugins, which can be used as a reference implementation.
|
||||
|
||||
As a concrete example: consider taking on one of the rules from the [`flake8-pyi`](https://github.com/astral-sh/ruff/issues/848)
|
||||
plugin, and looking to the originating [Python source](https://github.com/PyCQA/flake8-pyi) for
|
||||
guidance.
|
||||
|
||||
If you have suggestions on how we might improve the contributing documentation, [let us know](https://github.com/astral-sh/ruff/discussions/5693)!
|
||||
|
||||
### Prerequisites
|
||||
@@ -34,14 +67,16 @@ You'll also need [Insta](https://insta.rs/docs/) to update snapshot tests:
|
||||
cargo install cargo-insta
|
||||
```
|
||||
|
||||
You'll need [uv](https://docs.astral.sh/uv/getting-started/installation/) (or `pipx` and `pip`) to
|
||||
run Python utility commands.
|
||||
And you'll need pre-commit to run some validation checks:
|
||||
|
||||
```shell
|
||||
pipx install pre-commit # or `pip install pre-commit` if you have a virtualenv
|
||||
```
|
||||
|
||||
You can optionally install pre-commit hooks to automatically run the validation checks
|
||||
when making a commit:
|
||||
|
||||
```shell
|
||||
uv tool install pre-commit
|
||||
pre-commit install
|
||||
```
|
||||
|
||||
@@ -69,14 +104,12 @@ and that it passes both the lint and test validation checks:
|
||||
```shell
|
||||
cargo clippy --workspace --all-targets --all-features -- -D warnings # Rust linting
|
||||
RUFF_UPDATE_SCHEMA=1 cargo test # Rust testing and updating ruff.schema.json
|
||||
uvx pre-commit run --all-files --show-diff-on-failure # Rust and Python formatting, Markdown and Python linting, etc.
|
||||
pre-commit run --all-files --show-diff-on-failure # Rust and Python formatting, Markdown and Python linting, etc.
|
||||
```
|
||||
|
||||
These checks will run on GitHub Actions when you open your pull request, but running them locally
|
||||
These checks will run on GitHub Actions when you open your Pull Request, but running them locally
|
||||
will save you time and expedite the merge process.
|
||||
|
||||
If you're using VS Code, you can also install the recommended [rust-analyzer](https://marketplace.visualstudio.com/items?itemName=rust-lang.rust-analyzer) extension to get these checks while editing.
|
||||
|
||||
Note that many code changes also require updating the snapshot tests, which is done interactively
|
||||
after running `cargo test` like so:
|
||||
|
||||
@@ -84,14 +117,7 @@ after running `cargo test` like so:
|
||||
cargo insta review
|
||||
```
|
||||
|
||||
If your pull request relates to a specific lint rule, include the category and rule code in the
|
||||
title, as in the following examples:
|
||||
|
||||
- \[`flake8-bugbear`\] Avoid false positive for usage after `continue` (`B031`)
|
||||
- \[`flake8-simplify`\] Detect implicit `else` cases in `needless-bool` (`SIM103`)
|
||||
- \[`pycodestyle`\] Implement `redundant-backslash` (`E502`)
|
||||
|
||||
Your pull request will be reviewed by a maintainer, which may involve a few rounds of iteration
|
||||
Your Pull Request will be reviewed by a maintainer, which may involve a few rounds of iteration
|
||||
prior to merging.
|
||||
|
||||
### Project Structure
|
||||
@@ -99,8 +125,8 @@ prior to merging.
|
||||
Ruff is structured as a monorepo with a [flat crate structure](https://matklad.github.io/2021/08/22/large-rust-workspaces.html),
|
||||
such that all crates are contained in a flat `crates` directory.
|
||||
|
||||
The vast majority of the code, including all lint rules, lives in the `ruff_linter` crate (located
|
||||
at `crates/ruff_linter`). As a contributor, that's the crate that'll be most relevant to you.
|
||||
The vast majority of the code, including all lint rules, lives in the `ruff` crate (located at
|
||||
`crates/ruff_linter`). As a contributor, that's the crate that'll be most relevant to you.
|
||||
|
||||
At the time of writing, the repository includes the following crates:
|
||||
|
||||
@@ -144,7 +170,7 @@ At a high level, the steps involved in adding a new lint rule are as follows:
|
||||
1. Create a file for your rule (e.g., `crates/ruff_linter/src/rules/flake8_bugbear/rules/assert_false.rs`).
|
||||
|
||||
1. In that file, define a violation struct (e.g., `pub struct AssertFalse`). You can grep for
|
||||
`#[derive(ViolationMetadata)]` to see examples.
|
||||
`#[violation]` to see examples.
|
||||
|
||||
1. In that file, define a function that adds the violation to the diagnostic list as appropriate
|
||||
(e.g., `pub(crate) fn assert_false`) based on whatever inputs are required for the rule (e.g.,
|
||||
@@ -173,14 +199,11 @@ and calling out to lint rule analyzer functions as it goes.
|
||||
If you need to inspect the AST, you can run `cargo dev print-ast` with a Python file. Grep
|
||||
for the `Diagnostic::new` invocations to understand how other, similar rules are implemented.
|
||||
|
||||
Once you're satisfied with your code, add tests for your rule
|
||||
(see: [rule testing](#rule-testing-fixtures-and-snapshots)), and regenerate the documentation and
|
||||
associated assets (like our JSON Schema) with `cargo dev generate-all`.
|
||||
Once you're satisfied with your code, add tests for your rule. See [rule testing](#rule-testing-fixtures-and-snapshots)
|
||||
for more details.
|
||||
|
||||
Finally, submit a pull request, and include the category, rule name, and rule code in the title, as
|
||||
in:
|
||||
|
||||
> \[`pycodestyle`\] Implement `redundant-backslash` (`E502`)
|
||||
Finally, regenerate the documentation and other generated assets (like our JSON Schema) with:
|
||||
`cargo dev generate-all`.
|
||||
|
||||
#### Rule naming convention
|
||||
|
||||
@@ -254,7 +277,7 @@ These represent, respectively: the schema used to parse the `pyproject.toml` fil
|
||||
intermediate representation; and the final, internal representation used to power Ruff.
|
||||
|
||||
To add a new configuration option, you'll likely want to modify these latter few files (along with
|
||||
`args.rs`, if appropriate). If you want to pattern-match against an existing example, grep for
|
||||
`arg.rs`, if appropriate). If you want to pattern-match against an existing example, grep for
|
||||
`dummy_variable_rgx`, which defines a regular expression to match against acceptable unused
|
||||
variables (e.g., `_`).
|
||||
|
||||
@@ -270,24 +293,30 @@ To preview any changes to the documentation locally:
|
||||
|
||||
1. Install the [Rust toolchain](https://www.rust-lang.org/tools/install).
|
||||
|
||||
1. Install MkDocs and Material for MkDocs with:
|
||||
|
||||
```shell
|
||||
pip install -r docs/requirements.txt
|
||||
```
|
||||
|
||||
1. Generate the MkDocs site with:
|
||||
|
||||
```shell
|
||||
uv run --no-project --isolated --with-requirements docs/requirements.txt scripts/generate_mkdocs.py
|
||||
python scripts/generate_mkdocs.py
|
||||
```
|
||||
|
||||
1. Run the development server with:
|
||||
|
||||
```shell
|
||||
# For contributors.
|
||||
uvx --with-requirements docs/requirements.txt -- mkdocs serve -f mkdocs.public.yml
|
||||
mkdocs serve -f mkdocs.public.yml
|
||||
|
||||
# For members of the Astral org, which has access to MkDocs Insiders via sponsorship.
|
||||
uvx --with-requirements docs/requirements-insiders.txt -- mkdocs serve -f mkdocs.insiders.yml
|
||||
mkdocs serve -f mkdocs.insiders.yml
|
||||
```
|
||||
|
||||
The documentation should then be available locally at
|
||||
[http://127.0.0.1:8000/ruff/](http://127.0.0.1:8000/ruff/).
|
||||
[http://127.0.0.1:8000/docs/](http://127.0.0.1:8000/docs/).
|
||||
|
||||
## Release Process
|
||||
|
||||
@@ -300,64 +329,42 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
|
||||
|
||||
### Creating a new release
|
||||
|
||||
1. Install `uv`: `curl -LsSf https://astral.sh/uv/install.sh | sh`
|
||||
We use an experimental in-house tool for managing releases.
|
||||
|
||||
1. Run `./scripts/release.sh`; this command will:
|
||||
|
||||
- Generate a temporary virtual environment with `rooster`
|
||||
1. Install `rooster`: `pip install git+https://github.com/zanieb/rooster@main`
|
||||
1. Run `rooster release`; this command will:
|
||||
- Generate a changelog entry in `CHANGELOG.md`
|
||||
- Update versions in `pyproject.toml` and `Cargo.toml`
|
||||
- Update references to versions in the `README.md` and documentation
|
||||
- Display contributors for the release
|
||||
|
||||
1. The changelog should then be editorialized for consistency
|
||||
|
||||
- Often labels will be missing from pull requests they will need to be manually organized into the proper section
|
||||
- Changes should be edited to be user-facing descriptions, avoiding internal details
|
||||
|
||||
1. Highlight any breaking changes in `BREAKING_CHANGES.md`
|
||||
|
||||
1. Run `cargo check`. This should update the lock file with new versions.
|
||||
|
||||
1. Create a pull request with the changelog and version updates
|
||||
|
||||
1. Merge the PR
|
||||
|
||||
1. Run the [release workflow](https://github.com/astral-sh/ruff/actions/workflows/release.yml) with:
|
||||
|
||||
1. Run the [release workflow](https://github.com/astral-sh/ruff/actions/workflows/release.yaml) with:
|
||||
- The new version number (without starting `v`)
|
||||
|
||||
- The commit hash of the merged release pull request on `main`
|
||||
1. The release workflow will do the following:
|
||||
|
||||
1. Build all the assets. If this fails (even though we tested in step 4), we haven't tagged or
|
||||
uploaded anything, you can restart after pushing a fix. If you just need to rerun the build,
|
||||
make sure you're [re-running all the failed
|
||||
jobs](https://docs.github.com/en/actions/managing-workflow-runs/re-running-workflows-and-jobs#re-running-failed-jobs-in-a-workflow) and not just a single failed job.
|
||||
uploaded anything, you can restart after pushing a fix.
|
||||
1. Upload to PyPI.
|
||||
1. Create and push the Git tag (as extracted from `pyproject.toml`). We create the Git tag only
|
||||
after building the wheels and uploading to PyPI, since we can't delete or modify the tag ([#4468](https://github.com/astral-sh/ruff/issues/4468)).
|
||||
1. Attach artifacts to draft GitHub release
|
||||
1. Trigger downstream repositories. This can fail non-catastrophically, as we can run any
|
||||
downstream jobs manually if needed.
|
||||
|
||||
1. Verify the GitHub release:
|
||||
|
||||
1. The Changelog should match the content of `CHANGELOG.md`
|
||||
1. Append the contributors from the `scripts/release.sh` script
|
||||
|
||||
1. Publish the GitHub release
|
||||
1. Open the draft release in the GitHub release section
|
||||
1. Copy the changelog for the release into the GitHub release
|
||||
- See previous releases for formatting of section headers
|
||||
1. Generate the contributor list with `rooster contributors` and add to the release notes
|
||||
1. If needed, [update the schemastore](https://github.com/astral-sh/ruff/blob/main/scripts/update_schemastore.py).
|
||||
|
||||
1. One can determine if an update is needed when
|
||||
`git diff old-version-tag new-version-tag -- ruff.schema.json` returns a non-empty diff.
|
||||
1. Once run successfully, you should follow the link in the output to create a PR.
|
||||
|
||||
1. If needed, update the [`ruff-lsp`](https://github.com/astral-sh/ruff-lsp) and
|
||||
[`ruff-vscode`](https://github.com/astral-sh/ruff-vscode) repositories and follow
|
||||
the release instructions in those repositories. `ruff-lsp` should always be updated
|
||||
before `ruff-vscode`.
|
||||
|
||||
This step is generally not required for a patch release, but should always be done
|
||||
for a minor release.
|
||||
1. If needed, update the `ruff-lsp` and `ruff-vscode` repositories.
|
||||
|
||||
## Ecosystem CI
|
||||
|
||||
@@ -365,21 +372,13 @@ GitHub Actions will run your changes against a number of real-world projects fro
|
||||
report on any linter or formatter differences. You can also run those checks locally via:
|
||||
|
||||
```shell
|
||||
uvx --from ./python/ruff-ecosystem ruff-ecosystem check ruff "./target/debug/ruff"
|
||||
uvx --from ./python/ruff-ecosystem ruff-ecosystem format ruff "./target/debug/ruff"
|
||||
pip install -e ./python/ruff-ecosystem
|
||||
ruff-ecosystem check ruff "./target/debug/ruff"
|
||||
ruff-ecosystem format ruff "./target/debug/ruff"
|
||||
```
|
||||
|
||||
See the [ruff-ecosystem package](https://github.com/astral-sh/ruff/tree/main/python/ruff-ecosystem) for more details.
|
||||
|
||||
## Upgrading Rust
|
||||
|
||||
1. Change the `channel` in `./rust-toolchain.toml` to the new Rust version (`<latest>`)
|
||||
1. Change the `rust-version` in the `./Cargo.toml` to `<latest> - 2` (e.g. 1.84 if the latest is 1.86)
|
||||
1. Run `cargo clippy --fix --allow-dirty --allow-staged` to fix new clippy warnings
|
||||
1. Create and merge the PR
|
||||
1. Bump the Rust version in Ruff's conda forge recipe. See [this PR](https://github.com/conda-forge/ruff-feedstock/pull/266) for an example.
|
||||
1. Enjoy the new Rust version!
|
||||
|
||||
## Benchmarking and Profiling
|
||||
|
||||
We have several ways of benchmarking and profiling Ruff:
|
||||
@@ -388,7 +387,7 @@ We have several ways of benchmarking and profiling Ruff:
|
||||
- Microbenchmarks which run the linter or the formatter on individual files. These run on pull requests.
|
||||
- Profiling the linter on either the microbenchmarks or entire projects
|
||||
|
||||
> **Note**
|
||||
> \[!NOTE\]
|
||||
> When running benchmarks, ensure that your CPU is otherwise idle (e.g., close any background
|
||||
> applications, like web browsers). You may also want to switch your CPU to a "performance"
|
||||
> mode, if it exists, especially when benchmarking short-lived processes.
|
||||
@@ -402,18 +401,12 @@ which makes it a good target for benchmarking.
|
||||
git clone --branch 3.10 https://github.com/python/cpython.git crates/ruff_linter/resources/test/cpython
|
||||
```
|
||||
|
||||
Install `hyperfine`:
|
||||
|
||||
```shell
|
||||
cargo install hyperfine
|
||||
```
|
||||
|
||||
To benchmark the release build:
|
||||
|
||||
```shell
|
||||
cargo build --release --bin ruff && hyperfine --warmup 10 \
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache -e" \
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ -e"
|
||||
cargo build --release && hyperfine --warmup 10 \
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache -e" \
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ -e"
|
||||
|
||||
Benchmark 1: ./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache
|
||||
Time (mean ± σ): 293.8 ms ± 3.2 ms [User: 2384.6 ms, System: 90.3 ms]
|
||||
@@ -432,7 +425,7 @@ To benchmark against the ecosystem's existing tools:
|
||||
|
||||
```shell
|
||||
hyperfine --ignore-failure --warmup 5 \
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache" \
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache" \
|
||||
"pyflakes crates/ruff_linter/resources/test/cpython" \
|
||||
"autoflake --recursive --expand-star-imports --remove-all-unused-imports --remove-unused-variables --remove-duplicate-keys resources/test/cpython" \
|
||||
"pycodestyle crates/ruff_linter/resources/test/cpython" \
|
||||
@@ -478,10 +471,10 @@ To benchmark a subset of rules, e.g. `LineTooLong` and `DocLineTooLong`:
|
||||
|
||||
```shell
|
||||
cargo build --release && hyperfine --warmup 10 \
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache -e --select W505,E501"
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache -e --select W505,E501"
|
||||
```
|
||||
|
||||
You can run `uv venv --project ./scripts/benchmarks`, activate the venv and then run `uv sync --project ./scripts/benchmarks` to create a working environment for the
|
||||
You can run `poetry install` from `./scripts/benchmarks` to create a working environment for the
|
||||
above. All reported benchmarks were computed using the versions specified by
|
||||
`./scripts/benchmarks/pyproject.toml` on Python 3.11.
|
||||
|
||||
@@ -535,12 +528,10 @@ You can run the benchmarks with
|
||||
cargo benchmark
|
||||
```
|
||||
|
||||
`cargo benchmark` is an alias for `cargo bench -p ruff_benchmark --bench linter --bench formatter --`
|
||||
|
||||
#### Benchmark-driven Development
|
||||
|
||||
Ruff uses [Criterion.rs](https://bheisler.github.io/criterion.rs/book/) for benchmarks. You can use
|
||||
`--save-baseline=<name>` to store an initial baseline benchmark (e.g., on `main`) and then use
|
||||
`--save-baseline=<name>` to store an initial baseline benchmark (e.g. on `main`) and then use
|
||||
`--benchmark=<name>` to compare against that benchmark. Criterion will print a message telling you
|
||||
if the benchmark improved/regressed compared to that baseline.
|
||||
|
||||
@@ -575,7 +566,7 @@ cargo install critcmp
|
||||
|
||||
#### Tips
|
||||
|
||||
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo bench -p ruff_benchmark lexer`
|
||||
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo benchmark lexer`
|
||||
to only run the lexer benchmarks.
|
||||
- Use `cargo bench -p ruff_benchmark -- --quiet` for a more cleaned up output (without statistical relevance)
|
||||
- Use `cargo bench -p ruff_benchmark -- --quick` to get faster results (more prone to noise)
|
||||
@@ -610,7 +601,8 @@ Then convert the recorded profile
|
||||
perf script -F +pid > /tmp/test.perf
|
||||
```
|
||||
|
||||
You can now view the converted file with [firefox profiler](https://profiler.firefox.com/). To learn more about Firefox profiler, read the [Firefox profiler profiling-guide](https://profiler.firefox.com/docs/#/./guide-perf-profiling).
|
||||
You can now view the converted file with [firefox profiler](https://profiler.firefox.com/), with a
|
||||
more in-depth guide [here](https://profiler.firefox.com/docs/#/./guide-perf-profiling)
|
||||
|
||||
An alternative is to convert the perf data to `flamegraph.svg` using
|
||||
[flamegraph](https://github.com/flamegraph-rs/flamegraph) (`cargo install flamegraph`):
|
||||
@@ -644,11 +636,11 @@ Otherwise, follow the instructions from the linux section.
|
||||
`cargo dev` is a shortcut for `cargo run --package ruff_dev --bin ruff_dev`. You can run some useful
|
||||
utils with it:
|
||||
|
||||
- `cargo dev print-ast <file>`: Print the AST of a python file using Ruff's
|
||||
[Python parser](https://github.com/astral-sh/ruff/tree/main/crates/ruff_python_parser).
|
||||
For `if True: pass # comment`, you can see the syntax tree, the byte offsets for start and
|
||||
stop of each node and also how the `:` token, the comment and whitespace are not represented
|
||||
anymore:
|
||||
- `cargo dev print-ast <file>`: Print the AST of a python file using the
|
||||
[RustPython parser](https://github.com/astral-sh/ruff/tree/main/crates/ruff_python_parser) that is
|
||||
mainly used in Ruff. For `if True: pass # comment`, you can see the syntax tree, the byte offsets
|
||||
for start and stop of each node and also how the `:` token, the comment and whitespace are not
|
||||
represented anymore:
|
||||
|
||||
```text
|
||||
[
|
||||
@@ -691,9 +683,9 @@ utils with it:
|
||||
23 Newline 24
|
||||
```
|
||||
|
||||
- `cargo dev print-cst <file>`: Print the CST of a Python file using
|
||||
- `cargo dev print-cst <file>`: Print the CST of a python file using
|
||||
[LibCST](https://github.com/Instagram/LibCST), which is used in addition to the RustPython parser
|
||||
in Ruff. For example, for `if True: pass # comment`, everything, including the whitespace, is represented:
|
||||
in Ruff. E.g. for `if True: pass # comment` everything including the whitespace is represented:
|
||||
|
||||
```text
|
||||
Module {
|
||||
@@ -821,8 +813,8 @@ To understand Ruff's import categorization system, we first need to define two c
|
||||
"project root".)
|
||||
- "Package root": The top-most directory defining the Python package that includes a given Python
|
||||
file. To find the package root for a given Python file, traverse up its parent directories until
|
||||
you reach a parent directory that doesn't contain an `__init__.py` file (and isn't in a subtree
|
||||
marked as a [namespace package](https://docs.astral.sh/ruff/settings/#namespace-packages)); take the directory
|
||||
you reach a parent directory that doesn't contain an `__init__.py` file (and isn't marked as
|
||||
a [namespace package](https://docs.astral.sh/ruff/settings/#namespace-packages)); take the directory
|
||||
just before that, i.e., the first directory in the package.
|
||||
|
||||
For example, given:
|
||||
@@ -876,7 +868,7 @@ each configuration file.
|
||||
|
||||
The package root is used to determine a file's "module path". Consider, again, `baz.py`. In that
|
||||
case, `./my_project/src/foo` was identified as the package root, so the module path for `baz.py`
|
||||
would resolve to `foo.bar.baz` — as computed by taking the relative path from the package root
|
||||
would resolve to `foo.bar.baz` — as computed by taking the relative path from the package root
|
||||
(inclusive of the root itself). The module path can be thought of as "the path you would use to
|
||||
import the module" (e.g., `import foo.bar.baz`).
|
||||
|
||||
@@ -911,11 +903,15 @@ There are three ways in which an import can be categorized as "first-party":
|
||||
package (e.g., `from foo import bar` or `import foo.bar`), they'll be classified as first-party
|
||||
automatically. This check is as simple as comparing the first segment of the current file's
|
||||
module path to the first segment of the import.
|
||||
1. **Source roots**: Ruff supports a [`src`](https://docs.astral.sh/ruff/settings/#src) setting, which
|
||||
1. **Source roots**: Ruff supports a `[src](https://docs.astral.sh/ruff/settings/#src)` setting, which
|
||||
sets the directories to scan when identifying first-party imports. The algorithm is
|
||||
straightforward: given an import, like `import foo`, iterate over the directories enumerated in
|
||||
the `src` setting and, for each directory, check for the existence of a subdirectory `foo` or a
|
||||
file `foo.py`.
|
||||
|
||||
By default, `src` is set to the project root, along with `"src"` subdirectory in the project root.
|
||||
This ensures that Ruff supports both flat and "src" layouts out of the box.
|
||||
By default, `src` is set to the project root. In the above example, we'd want to set
|
||||
`src = ["./src"]` to ensure that we locate `./my_project/src/foo` and thus categorize `import foo`
|
||||
as first-party in `baz.py`. In practice, for this limited example, setting `src = ["./src"]` is
|
||||
unnecessary, as all imports within `./my_project/src/foo` would be categorized as first-party via
|
||||
the same-package heuristic; but if your project contains multiple packages, you'll want to set `src`
|
||||
explicitly.
|
||||
|
||||
3400
Cargo.lock
generated
3400
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
237
Cargo.toml
237
Cargo.toml
@@ -3,9 +3,8 @@ members = ["crates/*"]
|
||||
resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
# Please update rustfmt.toml when bumping the Rust edition
|
||||
edition = "2024"
|
||||
rust-version = "1.85"
|
||||
edition = "2021"
|
||||
rust-version = "1.71"
|
||||
homepage = "https://docs.astral.sh/ruff"
|
||||
documentation = "https://docs.astral.sh/ruff"
|
||||
repository = "https://github.com/astral-sh/ruff"
|
||||
@@ -13,192 +12,108 @@ authors = ["Charlie Marsh <charlie.r.marsh@gmail.com>"]
|
||||
license = "MIT"
|
||||
|
||||
[workspace.dependencies]
|
||||
ruff = { path = "crates/ruff" }
|
||||
ruff_annotate_snippets = { path = "crates/ruff_annotate_snippets" }
|
||||
ruff_cache = { path = "crates/ruff_cache" }
|
||||
ruff_db = { path = "crates/ruff_db", default-features = false }
|
||||
ruff_diagnostics = { path = "crates/ruff_diagnostics" }
|
||||
ruff_formatter = { path = "crates/ruff_formatter" }
|
||||
ruff_graph = { path = "crates/ruff_graph" }
|
||||
ruff_index = { path = "crates/ruff_index" }
|
||||
ruff_linter = { path = "crates/ruff_linter" }
|
||||
ruff_macros = { path = "crates/ruff_macros" }
|
||||
ruff_notebook = { path = "crates/ruff_notebook" }
|
||||
ruff_options_metadata = { path = "crates/ruff_options_metadata" }
|
||||
ruff_python_ast = { path = "crates/ruff_python_ast" }
|
||||
ruff_python_codegen = { path = "crates/ruff_python_codegen" }
|
||||
ruff_python_formatter = { path = "crates/ruff_python_formatter" }
|
||||
ruff_python_index = { path = "crates/ruff_python_index" }
|
||||
ruff_python_literal = { path = "crates/ruff_python_literal" }
|
||||
ruff_python_parser = { path = "crates/ruff_python_parser" }
|
||||
ruff_python_semantic = { path = "crates/ruff_python_semantic" }
|
||||
ruff_python_stdlib = { path = "crates/ruff_python_stdlib" }
|
||||
ruff_python_trivia = { path = "crates/ruff_python_trivia" }
|
||||
ruff_server = { path = "crates/ruff_server" }
|
||||
ruff_source_file = { path = "crates/ruff_source_file" }
|
||||
ruff_text_size = { path = "crates/ruff_text_size" }
|
||||
ruff_workspace = { path = "crates/ruff_workspace" }
|
||||
|
||||
ty = { path = "crates/ty" }
|
||||
ty_ide = { path = "crates/ty_ide" }
|
||||
ty_project = { path = "crates/ty_project", default-features = false }
|
||||
ty_python_semantic = { path = "crates/ty_python_semantic" }
|
||||
ty_server = { path = "crates/ty_server" }
|
||||
ty_test = { path = "crates/ty_test" }
|
||||
ty_vendored = { path = "crates/ty_vendored" }
|
||||
|
||||
aho-corasick = { version = "1.1.3" }
|
||||
anstream = { version = "0.6.18" }
|
||||
anstyle = { version = "1.0.10" }
|
||||
anyhow = { version = "1.0.80" }
|
||||
arc-swap = { version = "1.7.1" }
|
||||
assert_fs = { version = "1.1.0" }
|
||||
argfile = { version = "0.2.0" }
|
||||
bincode = { version = "2.0.0" }
|
||||
bitflags = { version = "2.5.0" }
|
||||
bstr = { version = "1.9.1" }
|
||||
aho-corasick = { version = "1.1.2" }
|
||||
annotate-snippets = { version = "0.9.2", features = ["color"] }
|
||||
anyhow = { version = "1.0.79" }
|
||||
argfile = { version = "0.1.6" }
|
||||
assert_cmd = { version = "2.0.13" }
|
||||
bincode = { version = "1.3.3" }
|
||||
bitflags = { version = "2.4.1" }
|
||||
bstr = { version = "1.9.0" }
|
||||
cachedir = { version = "0.3.1" }
|
||||
camino = { version = "1.1.7" }
|
||||
clap = { version = "4.5.3", features = ["derive"] }
|
||||
clap_complete_command = { version = "0.6.0" }
|
||||
clearscreen = { version = "4.0.0" }
|
||||
codspeed-criterion-compat = { version = "2.6.0", default-features = false }
|
||||
colored = { version = "3.0.0" }
|
||||
chrono = { version = "0.4.34", default-features = false, features = ["clock"] }
|
||||
clap = { version = "4.4.18", features = ["derive"] }
|
||||
clap_complete_command = { version = "0.5.1" }
|
||||
clearscreen = { version = "2.0.0" }
|
||||
codspeed-criterion-compat = { version = "2.3.3", default-features = false }
|
||||
colored = { version = "2.1.0" }
|
||||
configparser = { version = "3.0.3" }
|
||||
console_error_panic_hook = { version = "0.1.7" }
|
||||
console_log = { version = "1.0.0" }
|
||||
countme = { version = "3.0.1" }
|
||||
compact_str = "0.9.0"
|
||||
criterion = { version = "0.6.0", default-features = false }
|
||||
crossbeam = { version = "0.8.4" }
|
||||
dashmap = { version = "6.0.1" }
|
||||
dir-test = { version = "0.4.0" }
|
||||
dunce = { version = "1.0.5" }
|
||||
countme = { version ="3.0.1"}
|
||||
criterion = { version = "0.5.1", default-features = false }
|
||||
dirs = { version = "5.0.0" }
|
||||
drop_bomb = { version = "0.1.5" }
|
||||
env_logger = { version = "0.11.0" }
|
||||
etcetera = { version = "0.10.0" }
|
||||
fern = { version = "0.7.0" }
|
||||
env_logger = { version ="0.10.1"}
|
||||
fern = { version = "0.6.1" }
|
||||
filetime = { version = "0.2.23" }
|
||||
getrandom = { version = "0.3.1" }
|
||||
fs-err = { version ="2.11.0"}
|
||||
glob = { version = "0.3.1" }
|
||||
globset = { version = "0.4.14" }
|
||||
globwalk = { version = "0.9.1" }
|
||||
hashbrown = { version = "0.15.0", default-features = false, features = [
|
||||
"raw-entry",
|
||||
"equivalent",
|
||||
"inline-more",
|
||||
] }
|
||||
heck = "0.5.0"
|
||||
hexf-parse = { version ="0.2.1"}
|
||||
ignore = { version = "0.4.22" }
|
||||
imara-diff = { version = "0.1.5" }
|
||||
imara-diff ={ version = "0.1.5"}
|
||||
imperative = { version = "1.0.4" }
|
||||
indexmap = { version = "2.6.0" }
|
||||
indicatif = { version = "0.17.8" }
|
||||
indoc = { version = "2.0.4" }
|
||||
insta = { version = "1.35.1" }
|
||||
insta-cmd = { version = "0.6.0" }
|
||||
indicatif ={ version = "0.17.8"}
|
||||
indoc ={ version = "2.0.4"}
|
||||
insta = { version = "1.34.0", feature = ["filters", "glob"] }
|
||||
insta-cmd = { version = "0.4.0" }
|
||||
is-macro = { version = "0.3.5" }
|
||||
is-wsl = { version = "0.4.0" }
|
||||
itertools = { version = "0.14.0" }
|
||||
jiff = { version = "0.2.0" }
|
||||
js-sys = { version = "0.3.69" }
|
||||
jod-thread = { version = "1.0.0" }
|
||||
libc = { version = "0.2.153" }
|
||||
itertools = { version = "0.12.1" }
|
||||
js-sys = { version = "0.3.67" }
|
||||
lalrpop-util = { version = "0.20.0", default-features = false }
|
||||
lexical-parse-float = { version = "0.8.0", features = ["format"] }
|
||||
libcst = { version = "1.1.0", default-features = false }
|
||||
log = { version = "0.4.17" }
|
||||
lsp-server = { version = "0.7.6" }
|
||||
lsp-types = { git = "https://github.com/astral-sh/lsp-types.git", rev = "3512a9f", features = [
|
||||
"proposed",
|
||||
] }
|
||||
matchit = { version = "0.8.1" }
|
||||
memchr = { version = "2.7.1" }
|
||||
mimalloc = { version = "0.1.39" }
|
||||
mimalloc = { version ="0.1.39"}
|
||||
natord = { version = "1.0.9" }
|
||||
notify = { version = "8.0.0" }
|
||||
ordermap = { version = "0.5.0" }
|
||||
notify = { version = "6.1.1" }
|
||||
once_cell = { version = "1.19.0" }
|
||||
path-absolutize = { version = "3.1.1" }
|
||||
path-slash = { version = "0.2.1" }
|
||||
pathdiff = { version = "0.2.1" }
|
||||
pep440_rs = { version = "0.7.1" }
|
||||
pep440_rs = { version = "0.4.0", features = ["serde"] }
|
||||
pretty_assertions = "1.3.0"
|
||||
proc-macro2 = { version = "1.0.79" }
|
||||
pyproject-toml = { version = "0.13.4" }
|
||||
quick-junit = { version = "0.5.0" }
|
||||
proc-macro2 = { version = "1.0.78" }
|
||||
pyproject-toml = { version = "0.9.0" }
|
||||
quick-junit = { version = "0.3.5" }
|
||||
quote = { version = "1.0.23" }
|
||||
rand = { version = "0.9.0" }
|
||||
rayon = { version = "1.10.0" }
|
||||
rand = { version = "0.8.5" }
|
||||
rayon = { version = "1.8.1" }
|
||||
regex = { version = "1.10.2" }
|
||||
regex-automata = { version = "0.4.9" }
|
||||
rustc-hash = { version = "2.0.0" }
|
||||
rustc-stable-hash = { version = "0.1.2" }
|
||||
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "09627e450566f894956710a3fd923dc80462ae6d" }
|
||||
result-like = { version = "0.5.0" }
|
||||
rustc-hash = { version = "1.1.0" }
|
||||
schemars = { version = "0.8.16" }
|
||||
seahash = { version = "4.1.0" }
|
||||
serde = { version = "1.0.197", features = ["derive"] }
|
||||
serde-wasm-bindgen = { version = "0.6.4" }
|
||||
seahash = { version ="4.1.0"}
|
||||
semver = { version = "1.0.21" }
|
||||
serde = { version = "1.0.196", features = ["derive"] }
|
||||
serde-wasm-bindgen = { version = "0.6.3" }
|
||||
serde_json = { version = "1.0.113" }
|
||||
serde_test = { version = "1.0.152" }
|
||||
serde_with = { version = "3.6.0", default-features = false, features = [
|
||||
"macros",
|
||||
] }
|
||||
serde_with = { version = "3.6.0", default-features = false, features = ["macros"] }
|
||||
shellexpand = { version = "3.0.0" }
|
||||
shlex = { version ="1.3.0"}
|
||||
similar = { version = "2.4.0", features = ["inline"] }
|
||||
smallvec = { version = "1.13.2" }
|
||||
snapbox = { version = "0.6.0", features = [
|
||||
"diff",
|
||||
"term-svg",
|
||||
"cmd",
|
||||
"examples",
|
||||
] }
|
||||
smallvec = { version = "1.13.1" }
|
||||
static_assertions = "1.1.0"
|
||||
strum = { version = "0.27.0", features = ["strum_macros"] }
|
||||
strum_macros = { version = "0.27.0" }
|
||||
syn = { version = "2.0.55" }
|
||||
tempfile = { version = "3.9.0" }
|
||||
strum = { version = "0.25.0", features = ["strum_macros"] }
|
||||
strum_macros = { version = "0.25.3" }
|
||||
syn = { version = "2.0.40" }
|
||||
tempfile = { version ="3.9.0"}
|
||||
test-case = { version = "3.3.1" }
|
||||
thiserror = { version = "2.0.0" }
|
||||
tikv-jemallocator = { version = "0.6.0" }
|
||||
toml = { version = "0.8.11" }
|
||||
thiserror = { version = "1.0.57" }
|
||||
tikv-jemallocator = { version ="0.5.0"}
|
||||
toml = { version = "0.8.9" }
|
||||
tracing = { version = "0.1.40" }
|
||||
tracing-flame = { version = "0.2.0" }
|
||||
tracing-indicatif = { version = "0.3.6" }
|
||||
tracing-log = { version = "0.2.0" }
|
||||
tracing-subscriber = { version = "0.3.18", default-features = false, features = [
|
||||
"env-filter",
|
||||
"fmt",
|
||||
"ansi",
|
||||
"smallvec"
|
||||
] }
|
||||
tryfn = { version = "0.2.1" }
|
||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
|
||||
typed-arena = { version = "2.0.2" }
|
||||
unic-ucd-category = { version = "0.9" }
|
||||
unic-ucd-category = { version ="0.9"}
|
||||
unicode-ident = { version = "1.0.12" }
|
||||
unicode-width = { version = "0.2.0" }
|
||||
unicode_names2 = { version = "1.2.2" }
|
||||
unicode-normalization = { version = "0.1.23" }
|
||||
unicode-width = { version = "0.1.11" }
|
||||
unicode_names2 = { version = "1.2.1" }
|
||||
ureq = { version = "2.9.1" }
|
||||
url = { version = "2.5.0" }
|
||||
uuid = { version = "1.6.1", features = [
|
||||
"v4",
|
||||
"fast-rng",
|
||||
"macro-diagnostics",
|
||||
] }
|
||||
uuid = { version = "1.6.1", features = ["v4", "fast-rng", "macro-diagnostics", "js"] }
|
||||
walkdir = { version = "2.3.2" }
|
||||
wasm-bindgen = { version = "0.2.92" }
|
||||
wasm-bindgen-test = { version = "0.3.42" }
|
||||
wasm-bindgen = { version = "0.2.84" }
|
||||
wasm-bindgen-test = { version = "0.3.40" }
|
||||
wild = { version = "2" }
|
||||
zip = { version = "0.6.6", default-features = false }
|
||||
|
||||
[workspace.metadata.cargo-shear]
|
||||
ignored = ["getrandom", "ruff_options_metadata", "uuid"]
|
||||
|
||||
|
||||
[workspace.lints.rust]
|
||||
unsafe_code = "warn"
|
||||
unreachable_pub = "warn"
|
||||
unexpected_cfgs = { level = "warn", check-cfg = [
|
||||
"cfg(fuzzing)",
|
||||
"cfg(codspeed)",
|
||||
] }
|
||||
|
||||
[workspace.lints.clippy]
|
||||
pedantic = { level = "warn", priority = -2 }
|
||||
@@ -207,18 +122,14 @@ char_lit_as_u8 = "allow"
|
||||
collapsible_else_if = "allow"
|
||||
collapsible_if = "allow"
|
||||
implicit_hasher = "allow"
|
||||
map_unwrap_or = "allow"
|
||||
match_same_arms = "allow"
|
||||
missing_errors_doc = "allow"
|
||||
missing_panics_doc = "allow"
|
||||
module_name_repetitions = "allow"
|
||||
must_use_candidate = "allow"
|
||||
similar_names = "allow"
|
||||
single_match_else = "allow"
|
||||
too_many_lines = "allow"
|
||||
needless_continue = "allow" # An explicit continue can be more readable, especially if the alternative is an empty block.
|
||||
unnecessary_debug_formatting = "allow" # too many instances, the display also doesn't quote the path which is often desired in logs where we use them the most often.
|
||||
# Without the hashes we run into a `rustfmt` bug in some snapshot tests, see #13250
|
||||
# To allow `#[allow(clippy::all)]` in `crates/ruff_python_parser/src/python.rs`.
|
||||
needless_raw_string_hashes = "allow"
|
||||
# Disallowed restriction lints
|
||||
print_stdout = "warn"
|
||||
@@ -231,13 +142,6 @@ get_unwrap = "warn"
|
||||
rc_buffer = "warn"
|
||||
rc_mutex = "warn"
|
||||
rest_pat_in_fully_bound_structs = "warn"
|
||||
# nursery rules
|
||||
redundant_clone = "warn"
|
||||
debug_assert_with_mut_call = "warn"
|
||||
unused_peekable = "warn"
|
||||
|
||||
# Diagnostics are not actionable: Enable once https://github.com/rust-lang/rust-clippy/issues/13774 is resolved.
|
||||
large_stack_arrays = "allow"
|
||||
|
||||
[profile.release]
|
||||
# Note that we set these explicitly, and these values
|
||||
@@ -262,9 +166,6 @@ opt-level = 3
|
||||
[profile.dev.package.similar]
|
||||
opt-level = 3
|
||||
|
||||
[profile.dev.package.salsa]
|
||||
opt-level = 3
|
||||
|
||||
# Reduce complexity of a parser function that would trigger a locals limit in a wasm tool.
|
||||
# https://github.com/bytecodealliance/wasm-tools/blob/b5c3d98e40590512a3b12470ef358d5c7b983b15/crates/wasmparser/src/limits.rs#L29
|
||||
[profile.dev.package.ruff_python_parser]
|
||||
@@ -275,7 +176,3 @@ opt-level = 1
|
||||
[profile.profiling]
|
||||
inherits = "release"
|
||||
debug = 1
|
||||
|
||||
# The profile that 'cargo dist' will build with.
|
||||
[profile.dist]
|
||||
inherits = "release"
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM --platform=$BUILDPLATFORM ubuntu AS build
|
||||
FROM --platform=$BUILDPLATFORM ubuntu as build
|
||||
ENV HOME="/root"
|
||||
WORKDIR $HOME
|
||||
|
||||
|
||||
25
LICENSE
25
LICENSE
@@ -1371,28 +1371,3 @@ are:
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
"""
|
||||
|
||||
- pydoclint, licensed as follows:
|
||||
"""
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2023 jsh9
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
"""
|
||||
|
||||
136
README.md
136
README.md
@@ -4,12 +4,11 @@
|
||||
|
||||
[](https://github.com/astral-sh/ruff)
|
||||
[](https://pypi.python.org/pypi/ruff)
|
||||
[](https://github.com/astral-sh/ruff/blob/main/LICENSE)
|
||||
[](https://pypi.python.org/pypi/ruff)
|
||||
[](https://pypi.python.org/pypi/ruff)
|
||||
[](https://github.com/astral-sh/ruff/actions)
|
||||
[](https://discord.com/invite/astral-sh)
|
||||
|
||||
[**Docs**](https://docs.astral.sh/ruff/) | [**Playground**](https://play.ruff.rs/)
|
||||
[**Discord**](https://discord.gg/c9MhzV8aU5) | [**Docs**](https://docs.astral.sh/ruff/) | [**Playground**](https://play.ruff.rs/)
|
||||
|
||||
An extremely fast Python linter and code formatter, written in Rust.
|
||||
|
||||
@@ -28,14 +27,15 @@ An extremely fast Python linter and code formatter, written in Rust.
|
||||
- ⚡️ 10-100x faster than existing linters (like Flake8) and formatters (like Black)
|
||||
- 🐍 Installable via `pip`
|
||||
- 🛠️ `pyproject.toml` support
|
||||
- 🤝 Python 3.13 compatibility
|
||||
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruffs-linter-compare-to-flake8), isort, and [Black](https://docs.astral.sh/ruff/faq/#how-does-ruffs-formatter-compare-to-black)
|
||||
- 🤝 Python 3.12 compatibility
|
||||
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruff-compare-to-flake8), isort, and Black
|
||||
- 📦 Built-in caching, to avoid re-analyzing unchanged files
|
||||
- 🔧 Fix support, for automatic error correction (e.g., automatically remove unused imports)
|
||||
- 📏 Over [800 built-in rules](https://docs.astral.sh/ruff/rules/), with native re-implementations
|
||||
- 📏 Over [700 built-in rules](https://docs.astral.sh/ruff/rules/), with native re-implementations
|
||||
of popular Flake8 plugins, like flake8-bugbear
|
||||
- ⌨️ First-party [editor integrations](https://docs.astral.sh/ruff/editors) for [VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://docs.astral.sh/ruff/editors/setup)
|
||||
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://docs.astral.sh/ruff/configuration/#config-file-discovery)
|
||||
- ⌨️ First-party [editor integrations](https://docs.astral.sh/ruff/integrations/) for
|
||||
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://github.com/astral-sh/ruff-lsp)
|
||||
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://docs.astral.sh/ruff/configuration/#pyprojecttoml-discovery)
|
||||
|
||||
Ruff aims to be orders of magnitude faster than alternative tools while integrating more
|
||||
functionality behind a single, common interface.
|
||||
@@ -49,7 +49,6 @@ times faster than any individual tool.
|
||||
Ruff is extremely actively developed and used in major open-source projects like:
|
||||
|
||||
- [Apache Airflow](https://github.com/apache/airflow)
|
||||
- [Apache Superset](https://github.com/apache/superset)
|
||||
- [FastAPI](https://github.com/tiangolo/fastapi)
|
||||
- [Hugging Face](https://github.com/huggingface/transformers)
|
||||
- [Pandas](https://github.com/pandas-dev/pandas)
|
||||
@@ -109,47 +108,16 @@ For more, see the [documentation](https://docs.astral.sh/ruff/).
|
||||
1. [Who's Using Ruff?](#whos-using-ruff)
|
||||
1. [License](#license)
|
||||
|
||||
## Getting Started<a id="getting-started"></a>
|
||||
## Getting Started
|
||||
|
||||
For more, see the [documentation](https://docs.astral.sh/ruff/).
|
||||
|
||||
### Installation
|
||||
|
||||
Ruff is available as [`ruff`](https://pypi.org/project/ruff/) on PyPI.
|
||||
|
||||
Invoke Ruff directly with [`uvx`](https://docs.astral.sh/uv/):
|
||||
Ruff is available as [`ruff`](https://pypi.org/project/ruff/) on PyPI:
|
||||
|
||||
```shell
|
||||
uvx ruff check # Lint all files in the current directory.
|
||||
uvx ruff format # Format all files in the current directory.
|
||||
```
|
||||
|
||||
Or install Ruff with `uv` (recommended), `pip`, or `pipx`:
|
||||
|
||||
```shell
|
||||
# With uv.
|
||||
uv tool install ruff@latest # Install Ruff globally.
|
||||
uv add --dev ruff # Or add Ruff to your project.
|
||||
|
||||
# With pip.
|
||||
pip install ruff
|
||||
|
||||
# With pipx.
|
||||
pipx install ruff
|
||||
```
|
||||
|
||||
Starting with version `0.5.0`, Ruff can be installed with our standalone installers:
|
||||
|
||||
```shell
|
||||
# On macOS and Linux.
|
||||
curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
|
||||
# On Windows.
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.12.0/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.12.0/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -160,7 +128,7 @@ and with [a variety of other package managers](https://docs.astral.sh/ruff/insta
|
||||
To run Ruff as a linter, try any of the following:
|
||||
|
||||
```shell
|
||||
ruff check # Lint all files in the current directory (and any subdirectories).
|
||||
ruff check . # Lint all files in the current directory (and any subdirectories).
|
||||
ruff check path/to/code/ # Lint all files in `/path/to/code` (and any subdirectories).
|
||||
ruff check path/to/code/*.py # Lint all `.py` files in `/path/to/code`.
|
||||
ruff check path/to/code/to/file.py # Lint `file.py`.
|
||||
@@ -170,7 +138,7 @@ ruff check @arguments.txt # Lint using an input file, treating its con
|
||||
Or, to run Ruff as a formatter:
|
||||
|
||||
```shell
|
||||
ruff format # Format all files in the current directory (and any subdirectories).
|
||||
ruff format . # Format all files in the current directory (and any subdirectories).
|
||||
ruff format path/to/code/ # Format all files in `/path/to/code` (and any subdirectories).
|
||||
ruff format path/to/code/*.py # Format all `.py` files in `/path/to/code`.
|
||||
ruff format path/to/code/to/file.py # Format `file.py`.
|
||||
@@ -182,7 +150,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.12.0
|
||||
rev: v0.2.1
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
@@ -191,10 +159,11 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
- id: ruff-format
|
||||
```
|
||||
|
||||
Ruff can also be used as a [VS Code extension](https://github.com/astral-sh/ruff-vscode) or with [various other editors](https://docs.astral.sh/ruff/editors/setup).
|
||||
Ruff can also be used as a [VS Code extension](https://github.com/astral-sh/ruff-vscode) or
|
||||
alongside any other editor through the [Ruff LSP](https://github.com/astral-sh/ruff-lsp).
|
||||
|
||||
Ruff can also be used as a [GitHub Action](https://github.com/features/actions) via
|
||||
[`ruff-action`](https://github.com/astral-sh/ruff-action):
|
||||
[`ruff-action`](https://github.com/chartboost/ruff-action):
|
||||
|
||||
```yaml
|
||||
name: Ruff
|
||||
@@ -203,19 +172,20 @@ jobs:
|
||||
ruff:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: astral-sh/ruff-action@v3
|
||||
- uses: actions/checkout@v3
|
||||
- uses: chartboost/ruff-action@v1
|
||||
```
|
||||
|
||||
### Configuration<a id="configuration"></a>
|
||||
### Configuration
|
||||
|
||||
Ruff can be configured through a `pyproject.toml`, `ruff.toml`, or `.ruff.toml` file (see:
|
||||
[_Configuration_](https://docs.astral.sh/ruff/configuration/), or [_Settings_](https://docs.astral.sh/ruff/settings/)
|
||||
for a complete list of all configuration options).
|
||||
|
||||
If left unspecified, Ruff's default configuration is equivalent to the following `ruff.toml` file:
|
||||
If left unspecified, Ruff's default configuration is equivalent to:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
# Exclude a variety of commonly ignored directories.
|
||||
exclude = [
|
||||
".bzr",
|
||||
@@ -250,11 +220,11 @@ exclude = [
|
||||
line-length = 88
|
||||
indent-width = 4
|
||||
|
||||
# Assume Python 3.9
|
||||
target-version = "py39"
|
||||
# Assume Python 3.8
|
||||
target-version = "py38"
|
||||
|
||||
[lint]
|
||||
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
|
||||
[tool.ruff.lint]
|
||||
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
|
||||
select = ["E4", "E7", "E9", "F"]
|
||||
ignore = []
|
||||
|
||||
@@ -265,7 +235,7 @@ unfixable = []
|
||||
# Allow unused variables when underscore-prefixed.
|
||||
dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
|
||||
|
||||
[format]
|
||||
[tool.ruff.format]
|
||||
# Like Black, use double quotes for strings.
|
||||
quote-style = "double"
|
||||
|
||||
@@ -279,35 +249,21 @@ skip-magic-trailing-comma = false
|
||||
line-ending = "auto"
|
||||
```
|
||||
|
||||
Note that, in a `pyproject.toml`, each section header should be prefixed with `tool.ruff`. For
|
||||
example, `[lint]` should be replaced with `[tool.ruff.lint]`.
|
||||
|
||||
Some configuration options can be provided via dedicated command-line arguments, such as those
|
||||
related to rule enablement and disablement, file discovery, and logging level:
|
||||
Some configuration options can be provided via the command-line, such as those related to
|
||||
rule enablement and disablement, file discovery, and logging level:
|
||||
|
||||
```shell
|
||||
ruff check --select F401 --select F403 --quiet
|
||||
ruff check path/to/code/ --select F401 --select F403 --quiet
|
||||
```
|
||||
|
||||
The remaining configuration options can be provided through a catch-all `--config` argument:
|
||||
|
||||
```shell
|
||||
ruff check --config "lint.per-file-ignores = {'some_file.py' = ['F841']}"
|
||||
```
|
||||
|
||||
To opt in to the latest lint rules, formatter style changes, interface updates, and more, enable
|
||||
[preview mode](https://docs.astral.sh/ruff/rules/) by setting `preview = true` in your configuration
|
||||
file or passing `--preview` on the command line. Preview mode enables a collection of unstable
|
||||
features that may change prior to stabilization.
|
||||
|
||||
See `ruff help` for more on Ruff's top-level commands, or `ruff help check` and `ruff help format`
|
||||
for more on the linting and formatting commands, respectively.
|
||||
|
||||
## Rules<a id="rules"></a>
|
||||
## Rules
|
||||
|
||||
<!-- Begin section: Rules -->
|
||||
|
||||
**Ruff supports over 800 lint rules**, many of which are inspired by popular tools like Flake8,
|
||||
**Ruff supports over 700 lint rules**, many of which are inspired by popular tools like Flake8,
|
||||
isort, pyupgrade, and others. Regardless of the rule's origin, Ruff re-implements every rule in
|
||||
Rust as a first-party feature.
|
||||
|
||||
@@ -363,6 +319,7 @@ quality tools, including:
|
||||
- [flake8-super](https://pypi.org/project/flake8-super/)
|
||||
- [flake8-tidy-imports](https://pypi.org/project/flake8-tidy-imports/)
|
||||
- [flake8-todos](https://pypi.org/project/flake8-todos/)
|
||||
- [flake8-trio](https://pypi.org/project/flake8-trio/)
|
||||
- [flake8-type-checking](https://pypi.org/project/flake8-type-checking/)
|
||||
- [flake8-use-pathlib](https://pypi.org/project/flake8-use-pathlib/)
|
||||
- [flynt](https://pypi.org/project/flynt/) ([#2102](https://github.com/astral-sh/ruff/issues/2102))
|
||||
@@ -379,21 +336,21 @@ quality tools, including:
|
||||
|
||||
For a complete enumeration of the supported rules, see [_Rules_](https://docs.astral.sh/ruff/rules/).
|
||||
|
||||
## Contributing<a id="contributing"></a>
|
||||
## Contributing
|
||||
|
||||
Contributions are welcome and highly appreciated. To get started, check out the
|
||||
[**contributing guidelines**](https://docs.astral.sh/ruff/contributing/).
|
||||
|
||||
You can also join us on [**Discord**](https://discord.com/invite/astral-sh).
|
||||
You can also join us on [**Discord**](https://discord.gg/c9MhzV8aU5).
|
||||
|
||||
## Support<a id="support"></a>
|
||||
## Support
|
||||
|
||||
Having trouble? Check out the existing issues on [**GitHub**](https://github.com/astral-sh/ruff/issues),
|
||||
or feel free to [**open a new one**](https://github.com/astral-sh/ruff/issues/new).
|
||||
|
||||
You can also ask for help on [**Discord**](https://discord.com/invite/astral-sh).
|
||||
You can also ask for help on [**Discord**](https://discord.gg/c9MhzV8aU5).
|
||||
|
||||
## Acknowledgements<a id="acknowledgements"></a>
|
||||
## Acknowledgements
|
||||
|
||||
Ruff's linter draws on both the APIs and implementation details of many other
|
||||
tools in the Python ecosystem, especially [Flake8](https://github.com/PyCQA/flake8), [Pyflakes](https://github.com/PyCQA/pyflakes),
|
||||
@@ -417,11 +374,10 @@ Ruff is the beneficiary of a large number of [contributors](https://github.com/a
|
||||
|
||||
Ruff is released under the MIT license.
|
||||
|
||||
## Who's Using Ruff?<a id="whos-using-ruff"></a>
|
||||
## Who's Using Ruff?
|
||||
|
||||
Ruff is used by a number of major open-source projects and companies, including:
|
||||
|
||||
- [Albumentations](https://github.com/albumentations-team/albumentations)
|
||||
- Amazon ([AWS SAM](https://github.com/aws/serverless-application-model))
|
||||
- Anthropic ([Python SDK](https://github.com/anthropics/anthropic-sdk-python))
|
||||
- [Apache Airflow](https://github.com/apache/airflow)
|
||||
@@ -429,16 +385,13 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [Babel](https://github.com/python-babel/babel)
|
||||
- Benchling ([Refac](https://github.com/benchling/refac))
|
||||
- [Bokeh](https://github.com/bokeh/bokeh)
|
||||
- CrowdCent ([NumerBlox](https://github.com/crowdcent/numerblox)) <!-- typos: ignore -->
|
||||
- [Cryptography (PyCA)](https://github.com/pyca/cryptography)
|
||||
- CERN ([Indico](https://getindico.io/))
|
||||
- [DVC](https://github.com/iterative/dvc)
|
||||
- [Dagger](https://github.com/dagger/dagger)
|
||||
- [Dagster](https://github.com/dagster-io/dagster)
|
||||
- Databricks ([MLflow](https://github.com/mlflow/mlflow))
|
||||
- [Dify](https://github.com/langgenius/dify)
|
||||
- [FastAPI](https://github.com/tiangolo/fastapi)
|
||||
- [Godot](https://github.com/godotengine/godot)
|
||||
- [Gradio](https://github.com/gradio-app/gradio)
|
||||
- [Great Expectations](https://github.com/great-expectations/great_expectations)
|
||||
- [HTTPX](https://github.com/encode/httpx)
|
||||
@@ -447,13 +400,10 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- Hugging Face ([Transformers](https://github.com/huggingface/transformers),
|
||||
[Datasets](https://github.com/huggingface/datasets),
|
||||
[Diffusers](https://github.com/huggingface/diffusers))
|
||||
- IBM ([Qiskit](https://github.com/Qiskit/qiskit))
|
||||
- ING Bank ([popmon](https://github.com/ing-bank/popmon), [probatus](https://github.com/ing-bank/probatus))
|
||||
- [Ibis](https://github.com/ibis-project/ibis)
|
||||
- [ivy](https://github.com/unifyai/ivy)
|
||||
- [JAX](https://github.com/jax-ml/jax)
|
||||
- [Jupyter](https://github.com/jupyter-server/jupyter_server)
|
||||
- [Kraken Tech](https://kraken.tech/)
|
||||
- [LangChain](https://github.com/hwchase17/langchain)
|
||||
- [Litestar](https://litestar.dev/)
|
||||
- [LlamaIndex](https://github.com/jerryjliu/llama_index)
|
||||
@@ -463,18 +413,15 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- Microsoft ([Semantic Kernel](https://github.com/microsoft/semantic-kernel),
|
||||
[ONNX Runtime](https://github.com/microsoft/onnxruntime),
|
||||
[LightGBM](https://github.com/microsoft/LightGBM))
|
||||
- Modern Treasury ([Python SDK](https://github.com/Modern-Treasury/modern-treasury-python))
|
||||
- Modern Treasury ([Python SDK](https://github.com/Modern-Treasury/modern-treasury-python-sdk))
|
||||
- Mozilla ([Firefox](https://github.com/mozilla/gecko-dev))
|
||||
- [Mypy](https://github.com/python/mypy)
|
||||
- [Nautobot](https://github.com/nautobot/nautobot)
|
||||
- Netflix ([Dispatch](https://github.com/Netflix/dispatch))
|
||||
- [Neon](https://github.com/neondatabase/neon)
|
||||
- [Nokia](https://nokia.com/)
|
||||
- [NoneBot](https://github.com/nonebot/nonebot2)
|
||||
- [NumPyro](https://github.com/pyro-ppl/numpyro)
|
||||
- [ONNX](https://github.com/onnx/onnx)
|
||||
- [OpenBB](https://github.com/OpenBB-finance/OpenBBTerminal)
|
||||
- [Open Wine Components](https://github.com/Open-Wine-Components/umu-launcher)
|
||||
- [PDM](https://github.com/pdm-project/pdm)
|
||||
- [PaddlePaddle](https://github.com/PaddlePaddle/Paddle)
|
||||
- [Pandas](https://github.com/pandas-dev/pandas)
|
||||
@@ -502,7 +449,6 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [Sphinx](https://github.com/sphinx-doc/sphinx)
|
||||
- [Stable Baselines3](https://github.com/DLR-RM/stable-baselines3)
|
||||
- [Starlette](https://github.com/encode/starlette)
|
||||
- [Streamlit](https://github.com/streamlit/streamlit)
|
||||
- [The Algorithms](https://github.com/TheAlgorithms/Python)
|
||||
- [Vega-Altair](https://github.com/altair-viz/altair)
|
||||
- WordPress ([Openverse](https://github.com/WordPress/openverse))
|
||||
@@ -538,9 +484,9 @@ If you're using Ruff, consider adding the Ruff badge to your project's `README.m
|
||||
<a href="https://github.com/astral-sh/ruff"><img src="https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json" alt="Ruff" style="max-width:100%;"></a>
|
||||
```
|
||||
|
||||
## License<a id="license"></a>
|
||||
## License
|
||||
|
||||
This repository is licensed under the [MIT License](https://github.com/astral-sh/ruff/blob/main/LICENSE)
|
||||
MIT
|
||||
|
||||
<div align="center">
|
||||
<a target="_blank" href="https://astral.sh" style="background:none">
|
||||
|
||||
15
SECURITY.md
15
SECURITY.md
@@ -1,15 +0,0 @@
|
||||
# Security policy
|
||||
|
||||
## Reporting a vulnerability
|
||||
|
||||
If you have found a possible vulnerability, please email `security at astral dot sh`.
|
||||
|
||||
## Bug bounties
|
||||
|
||||
While we sincerely appreciate and encourage reports of suspected security problems, please note that
|
||||
Astral does not currently run any bug bounty programs.
|
||||
|
||||
## Vulnerability disclosures
|
||||
|
||||
Critical vulnerabilities will be disclosed via GitHub's
|
||||
[security advisory](https://github.com/astral-sh/ruff/security) system.
|
||||
28
_typos.toml
28
_typos.toml
@@ -1,37 +1,11 @@
|
||||
[files]
|
||||
# https://github.com/crate-ci/typos/issues/868
|
||||
extend-exclude = [
|
||||
"crates/ty_vendored/vendor/**/*",
|
||||
"**/resources/**/*",
|
||||
"**/snapshots/**/*",
|
||||
# Completion tests tend to have a lot of incomplete
|
||||
# words naturally. It's annoying to have to make all
|
||||
# of them actually words. So just ignore typos here.
|
||||
"crates/ty_ide/src/completion.rs",
|
||||
]
|
||||
extend-exclude = ["**/resources/**/*", "**/snapshots/**/*"]
|
||||
|
||||
[default.extend-words]
|
||||
"arange" = "arange" # e.g. `numpy.arange`
|
||||
hel = "hel"
|
||||
whos = "whos"
|
||||
spawnve = "spawnve"
|
||||
ned = "ned"
|
||||
pn = "pn" # `import panel as pn` is a thing
|
||||
poit = "poit"
|
||||
BA = "BA" # acronym for "Bad Allowed", used in testing.
|
||||
jod = "jod" # e.g., `jod-thread`
|
||||
Numer = "Numer" # Library name 'NumerBlox' in "Who's Using Ruff?"
|
||||
|
||||
[default]
|
||||
extend-ignore-re = [
|
||||
# Line ignore with trailing "spellchecker:disable-line"
|
||||
"(?Rm)^.*#\\s*spellchecker:disable-line$",
|
||||
"LICENSEs",
|
||||
# Various third party dependencies uses `typ` as struct field names (e.g., lsp_types::LogMessageParams)
|
||||
"typ",
|
||||
# TODO: Remove this once the `TYP` redirects are removed from `rule_redirects.rs`
|
||||
"TYP",
|
||||
]
|
||||
|
||||
[default.extend-identifiers]
|
||||
"FrIeNdLy" = "FrIeNdLy"
|
||||
|
||||
29
clippy.toml
29
clippy.toml
@@ -1,26 +1,7 @@
|
||||
doc-valid-idents = [
|
||||
"..",
|
||||
"CodeQL",
|
||||
"CPython",
|
||||
"FastAPI",
|
||||
"IPython",
|
||||
"LangChain",
|
||||
"LibCST",
|
||||
"McCabe",
|
||||
"NumPy",
|
||||
"SCREAMING_SNAKE_CASE",
|
||||
"SQLAlchemy",
|
||||
"StackOverflow",
|
||||
"PyCharm",
|
||||
"SNMPv1",
|
||||
"SNMPv2",
|
||||
"SNMPv3",
|
||||
"PyFlakes",
|
||||
]
|
||||
|
||||
ignore-interior-mutability = [
|
||||
# Interned is read-only. The wrapped `Rc` never gets updated.
|
||||
"ruff_formatter::format_element::Interned",
|
||||
# The expression is read-only.
|
||||
"ruff_python_ast::hashable::HashableExpr",
|
||||
"StackOverflow",
|
||||
"CodeQL",
|
||||
"IPython",
|
||||
"NumPy",
|
||||
"..",
|
||||
]
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.12.0"
|
||||
publish = true
|
||||
version = "0.2.1"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
@@ -13,37 +13,31 @@ readme = "../../README.md"
|
||||
default-run = "ruff"
|
||||
|
||||
[dependencies]
|
||||
ruff_cache = { workspace = true }
|
||||
ruff_db = { workspace = true, default-features = false, features = ["os"] }
|
||||
ruff_diagnostics = { workspace = true }
|
||||
ruff_graph = { workspace = true, features = ["serde", "clap"] }
|
||||
ruff_linter = { workspace = true, features = ["clap"] }
|
||||
ruff_macros = { workspace = true }
|
||||
ruff_notebook = { workspace = true }
|
||||
ruff_options_metadata = { workspace = true, features = ["serde"] }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_python_formatter = { workspace = true }
|
||||
ruff_python_parser = { workspace = true }
|
||||
ruff_server = { workspace = true }
|
||||
ruff_source_file = { workspace = true }
|
||||
ruff_text_size = { workspace = true }
|
||||
ruff_workspace = { workspace = true }
|
||||
ruff_cache = { path = "../ruff_cache" }
|
||||
ruff_diagnostics = { path = "../ruff_diagnostics" }
|
||||
ruff_linter = { path = "../ruff_linter", features = ["clap"] }
|
||||
ruff_macros = { path = "../ruff_macros" }
|
||||
ruff_notebook = { path = "../ruff_notebook" }
|
||||
ruff_python_ast = { path = "../ruff_python_ast" }
|
||||
ruff_python_formatter = { path = "../ruff_python_formatter" }
|
||||
ruff_source_file = { path = "../ruff_source_file" }
|
||||
ruff_text_size = { path = "../ruff_text_size" }
|
||||
ruff_workspace = { path = "../ruff_workspace" }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
argfile = { workspace = true }
|
||||
bincode = { workspace = true, features = ["serde"] }
|
||||
bincode = { workspace = true }
|
||||
bitflags = { workspace = true }
|
||||
cachedir = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive", "env", "wrap_help"] }
|
||||
clap_complete_command = { workspace = true }
|
||||
clearscreen = { workspace = true }
|
||||
colored = { workspace = true }
|
||||
filetime = { workspace = true }
|
||||
globwalk = { workspace = true }
|
||||
ignore = { workspace = true }
|
||||
is-macro = { workspace = true }
|
||||
itertools = { workspace = true }
|
||||
jiff = { workspace = true }
|
||||
log = { workspace = true }
|
||||
notify = { workspace = true }
|
||||
path-absolutize = { workspace = true, features = ["once_cell_cache"] }
|
||||
@@ -63,28 +57,19 @@ wild = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
# Enable test rules during development
|
||||
ruff_linter = { workspace = true, features = ["clap", "test-rules"] }
|
||||
|
||||
assert_fs = { workspace = true }
|
||||
ruff_linter = { path = "../ruff_linter", features = ["clap", "test-rules"] }
|
||||
assert_cmd = { workspace = true }
|
||||
# Avoid writing colored snapshots when running tests from the terminal
|
||||
colored = { workspace = true, features = ["no-color"] }
|
||||
indoc = { workspace = true }
|
||||
colored = { workspace = true, features = ["no-color"]}
|
||||
insta = { workspace = true, features = ["filters", "json"] }
|
||||
insta-cmd = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
test-case = { workspace = true }
|
||||
|
||||
[package.metadata.cargo-shear]
|
||||
# Used via macro expansion.
|
||||
ignored = ["jiff"]
|
||||
|
||||
[package.metadata.dist]
|
||||
dist = true
|
||||
|
||||
[target.'cfg(target_os = "windows")'.dependencies]
|
||||
mimalloc = { workspace = true }
|
||||
|
||||
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), not(target_os = "aix"), not(target_os = "android"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64")))'.dependencies]
|
||||
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64")))'.dependencies]
|
||||
tikv-jemallocator = { workspace = true }
|
||||
|
||||
[lints]
|
||||
|
||||
@@ -1,8 +1,4 @@
|
||||
use std::{
|
||||
fs,
|
||||
path::{Path, PathBuf},
|
||||
process::Command,
|
||||
};
|
||||
use std::{fs, path::Path, process::Command};
|
||||
|
||||
fn main() {
|
||||
// The workspace root directory is not available without walking up the tree
|
||||
@@ -13,8 +9,9 @@ fn main() {
|
||||
|
||||
commit_info(&workspace_root);
|
||||
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
let target = std::env::var("TARGET").unwrap();
|
||||
println!("cargo::rustc-env=RUST_HOST_TARGET={target}");
|
||||
println!("cargo:rustc-env=RUST_HOST_TARGET={target}");
|
||||
}
|
||||
|
||||
fn commit_info(workspace_root: &Path) {
|
||||
@@ -24,22 +21,27 @@ fn commit_info(workspace_root: &Path) {
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(git_head_path) = git_head(&git_dir) {
|
||||
println!("cargo:rerun-if-changed={}", git_head_path.display());
|
||||
let git_head_path = git_dir.join("HEAD");
|
||||
println!(
|
||||
"cargo:rerun-if-changed={}",
|
||||
git_head_path.as_path().display()
|
||||
);
|
||||
|
||||
let git_head_contents = fs::read_to_string(git_head_path);
|
||||
if let Ok(git_head_contents) = git_head_contents {
|
||||
// The contents are either a commit or a reference in the following formats
|
||||
// - "<commit>" when the head is detached
|
||||
// - "ref <ref>" when working on a branch
|
||||
// If a commit, checking if the HEAD file has changed is sufficient
|
||||
// If a ref, we need to add the head file for that ref to rebuild on commit
|
||||
let mut git_ref_parts = git_head_contents.split_whitespace();
|
||||
git_ref_parts.next();
|
||||
if let Some(git_ref) = git_ref_parts.next() {
|
||||
let git_ref_path = git_dir.join(git_ref);
|
||||
println!("cargo:rerun-if-changed={}", git_ref_path.display());
|
||||
}
|
||||
let git_head_contents = fs::read_to_string(git_head_path);
|
||||
if let Ok(git_head_contents) = git_head_contents {
|
||||
// The contents are either a commit or a reference in the following formats
|
||||
// - "<commit>" when the head is detached
|
||||
// - "ref <ref>" when working on a branch
|
||||
// If a commit, checking if the HEAD file has changed is sufficient
|
||||
// If a ref, we need to add the head file for that ref to rebuild on commit
|
||||
let mut git_ref_parts = git_head_contents.split_whitespace();
|
||||
git_ref_parts.next();
|
||||
if let Some(git_ref) = git_ref_parts.next() {
|
||||
let git_ref_path = git_dir.join(git_ref);
|
||||
println!(
|
||||
"cargo:rerun-if-changed={}",
|
||||
git_ref_path.as_path().display()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -48,7 +50,7 @@ fn commit_info(workspace_root: &Path) {
|
||||
.arg("-1")
|
||||
.arg("--date=short")
|
||||
.arg("--abbrev=9")
|
||||
.arg("--format=%H %h %cd %(describe:tags)")
|
||||
.arg("--format=%H %h %cd %(describe)")
|
||||
.output()
|
||||
{
|
||||
Ok(output) if output.status.success() => output,
|
||||
@@ -57,49 +59,22 @@ fn commit_info(workspace_root: &Path) {
|
||||
let stdout = String::from_utf8(output.stdout).unwrap();
|
||||
let mut parts = stdout.split_whitespace();
|
||||
let mut next = || parts.next().unwrap();
|
||||
println!("cargo::rustc-env=RUFF_COMMIT_HASH={}", next());
|
||||
println!("cargo::rustc-env=RUFF_COMMIT_SHORT_HASH={}", next());
|
||||
println!("cargo::rustc-env=RUFF_COMMIT_DATE={}", next());
|
||||
println!("cargo:rustc-env=RUFF_COMMIT_HASH={}", next());
|
||||
println!("cargo:rustc-env=RUFF_COMMIT_SHORT_HASH={}", next());
|
||||
println!("cargo:rustc-env=RUFF_COMMIT_DATE={}", next());
|
||||
|
||||
// Describe can fail for some commits
|
||||
// https://git-scm.com/docs/pretty-formats#Documentation/pretty-formats.txt-emdescribeoptionsem
|
||||
if let Some(describe) = parts.next() {
|
||||
let mut describe_parts = describe.split('-');
|
||||
println!(
|
||||
"cargo::rustc-env=RUFF_LAST_TAG={}",
|
||||
"cargo:rustc-env=RUFF_LAST_TAG={}",
|
||||
describe_parts.next().unwrap()
|
||||
);
|
||||
// If this is the tagged commit, this component will be missing
|
||||
println!(
|
||||
"cargo::rustc-env=RUFF_LAST_TAG_DISTANCE={}",
|
||||
"cargo:rustc-env=RUFF_LAST_TAG_DISTANCE={}",
|
||||
describe_parts.next().unwrap_or("0")
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn git_head(git_dir: &Path) -> Option<PathBuf> {
|
||||
// The typical case is a standard git repository.
|
||||
let git_head_path = git_dir.join("HEAD");
|
||||
if git_head_path.exists() {
|
||||
return Some(git_head_path);
|
||||
}
|
||||
if !git_dir.is_file() {
|
||||
return None;
|
||||
}
|
||||
// If `.git/HEAD` doesn't exist and `.git` is actually a file,
|
||||
// then let's try to attempt to read it as a worktree. If it's
|
||||
// a worktree, then its contents will look like this, e.g.:
|
||||
//
|
||||
// gitdir: /home/andrew/astral/uv/main/.git/worktrees/pr2
|
||||
//
|
||||
// And the HEAD file we want to watch will be at:
|
||||
//
|
||||
// /home/andrew/astral/uv/main/.git/worktrees/pr2/HEAD
|
||||
let contents = fs::read_to_string(git_dir).ok()?;
|
||||
let (label, worktree_path) = contents.split_once(':')?;
|
||||
if label != "gitdir" {
|
||||
return None;
|
||||
}
|
||||
let worktree_path = worktree_path.trim();
|
||||
Some(PathBuf::from(worktree_path))
|
||||
}
|
||||
|
||||
@@ -1,104 +1,55 @@
|
||||
use std::cmp::Ordering;
|
||||
use std::fmt::{Formatter, Write as _};
|
||||
use std::fmt::Formatter;
|
||||
use std::ops::Deref;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::commands::completions::config::{OptionString, OptionStringParser};
|
||||
use anyhow::bail;
|
||||
use clap::builder::{TypedValueParser, ValueParserFactory};
|
||||
use clap::{Parser, Subcommand, command};
|
||||
use clap::{command, Parser};
|
||||
use colored::Colorize;
|
||||
use itertools::Itertools;
|
||||
use path_absolutize::path_dedot;
|
||||
use regex::Regex;
|
||||
use ruff_graph::Direction;
|
||||
use rustc_hash::FxHashMap;
|
||||
use toml;
|
||||
|
||||
use ruff_linter::line_width::LineLength;
|
||||
use ruff_linter::logging::LogLevel;
|
||||
use ruff_linter::registry::Rule;
|
||||
use ruff_linter::settings::types::{
|
||||
ExtensionPair, FilePattern, OutputFormat, PatternPrefixPair, PerFileIgnore, PreviewMode,
|
||||
PythonVersion, UnsafeFixes,
|
||||
ExtensionPair, FilePattern, PatternPrefixPair, PerFileIgnore, PreviewMode, PythonVersion,
|
||||
SerializationFormat, UnsafeFixes,
|
||||
};
|
||||
use ruff_linter::{RuleParser, RuleSelector, RuleSelectorParser};
|
||||
use ruff_options_metadata::{OptionEntry, OptionsMetadata};
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_source_file::{LineIndex, OneIndexed, PositionEncoding};
|
||||
use ruff_linter::{warn_user, RuleParser, RuleSelector, RuleSelectorParser};
|
||||
use ruff_source_file::{LineIndex, OneIndexed};
|
||||
use ruff_text_size::TextRange;
|
||||
use ruff_workspace::configuration::{Configuration, RuleSelection};
|
||||
use ruff_workspace::options::{Options, PycodestyleOptions};
|
||||
use ruff_workspace::resolver::ConfigurationTransformer;
|
||||
use rustc_hash::FxHashMap;
|
||||
use toml;
|
||||
|
||||
/// All configuration options that can be passed "globally",
|
||||
/// i.e., can be passed to all subcommands
|
||||
#[derive(Debug, Default, Clone, clap::Args)]
|
||||
pub struct GlobalConfigArgs {
|
||||
#[clap(flatten)]
|
||||
log_level_args: LogLevelArgs,
|
||||
/// Either a path to a TOML configuration file (`pyproject.toml` or `ruff.toml`),
|
||||
/// or a TOML `<KEY> = <VALUE>` pair
|
||||
/// (such as you might find in a `ruff.toml` configuration file)
|
||||
/// overriding a specific configuration option.
|
||||
/// Overrides of individual settings using this option always take precedence
|
||||
/// over all configuration files, including configuration files that were also
|
||||
/// specified using `--config`.
|
||||
#[arg(
|
||||
long,
|
||||
action = clap::ArgAction::Append,
|
||||
value_name = "CONFIG_OPTION",
|
||||
value_parser = ConfigArgumentParser,
|
||||
global = true,
|
||||
help_heading = "Global options",
|
||||
)]
|
||||
pub config: Vec<SingleConfigArgument>,
|
||||
/// Ignore all configuration files.
|
||||
//
|
||||
// Note: We can't mark this as conflicting with `--config` here
|
||||
// as `--config` can be used for specifying configuration overrides
|
||||
// as well as configuration files.
|
||||
// Specifying a configuration file conflicts with `--isolated`;
|
||||
// specifying a configuration override does not.
|
||||
// If a user specifies `ruff check --isolated --config=ruff.toml`,
|
||||
// we emit an error later on, after the initial parsing by clap.
|
||||
#[arg(long, help_heading = "Global options", global = true)]
|
||||
pub isolated: bool,
|
||||
}
|
||||
|
||||
impl GlobalConfigArgs {
|
||||
pub fn log_level(&self) -> LogLevel {
|
||||
LogLevel::from(&self.log_level_args)
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
fn partition(self) -> (LogLevel, Vec<SingleConfigArgument>, bool) {
|
||||
(self.log_level(), self.config, self.isolated)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(
|
||||
author,
|
||||
name = "ruff",
|
||||
about = "Ruff: An extremely fast Python linter and code formatter.",
|
||||
about = "Ruff: An extremely fast Python linter.",
|
||||
after_help = "For help with a specific command, see: `ruff help <command>`."
|
||||
)]
|
||||
#[command(version)]
|
||||
pub struct Args {
|
||||
#[command(subcommand)]
|
||||
pub(crate) command: Command,
|
||||
pub command: Command,
|
||||
#[clap(flatten)]
|
||||
pub(crate) global_options: GlobalConfigArgs,
|
||||
pub log_level_args: LogLevelArgs,
|
||||
}
|
||||
|
||||
#[expect(clippy::large_enum_variant)]
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
#[derive(Debug, clap::Subcommand)]
|
||||
pub enum Command {
|
||||
/// Run Ruff on the given files or directories.
|
||||
/// Run Ruff on the given files or directories (default).
|
||||
Check(CheckCommand),
|
||||
/// Explain a rule (or all rules).
|
||||
#[clap(alias = "--explain")]
|
||||
#[command(group = clap::ArgGroup::new("selector").multiple(false).required(true))]
|
||||
Rule {
|
||||
/// Rule to explain
|
||||
@@ -112,37 +63,31 @@ pub enum Command {
|
||||
/// Output format
|
||||
#[arg(long, value_enum, default_value = "text")]
|
||||
output_format: HelpFormat,
|
||||
|
||||
/// Output format (Deprecated: Use `--output-format` instead).
|
||||
#[arg(long, value_enum, conflicts_with = "output_format", hide = true)]
|
||||
format: Option<HelpFormat>,
|
||||
},
|
||||
/// List or describe the available configuration options.
|
||||
Config {
|
||||
/// Config key to show
|
||||
#[arg(
|
||||
value_parser = OptionStringParser,
|
||||
hide_possible_values = true
|
||||
)]
|
||||
option: Option<OptionString>,
|
||||
/// Output format
|
||||
#[arg(long, value_enum, default_value = "text")]
|
||||
output_format: HelpFormat,
|
||||
},
|
||||
Config { option: Option<String> },
|
||||
/// List all supported upstream linters.
|
||||
Linter {
|
||||
/// Output format
|
||||
#[arg(long, value_enum, default_value = "text")]
|
||||
output_format: HelpFormat,
|
||||
|
||||
/// Output format (Deprecated: Use `--output-format` instead).
|
||||
#[arg(long, value_enum, conflicts_with = "output_format", hide = true)]
|
||||
format: Option<HelpFormat>,
|
||||
},
|
||||
/// Clear any caches in the current directory and any subdirectories.
|
||||
#[clap(alias = "--clean")]
|
||||
Clean,
|
||||
/// Generate shell completion.
|
||||
#[clap(hide = true)]
|
||||
#[clap(alias = "--generate-shell-completion", hide = true)]
|
||||
GenerateShellCompletion { shell: clap_complete_command::Shell },
|
||||
/// Run the Ruff formatter on the given files or directories.
|
||||
Format(FormatCommand),
|
||||
/// Run the language server.
|
||||
Server(ServerCommand),
|
||||
/// Run analysis over Python source code.
|
||||
#[clap(subcommand)]
|
||||
Analyze(AnalyzeCommand),
|
||||
/// Display Ruff's version
|
||||
Version {
|
||||
#[arg(long, value_enum, default_value = "text")]
|
||||
@@ -150,41 +95,9 @@ pub enum Command {
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Subcommand)]
|
||||
pub enum AnalyzeCommand {
|
||||
/// Generate a map of Python file dependencies or dependents.
|
||||
Graph(AnalyzeGraphCommand),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, clap::Parser)]
|
||||
pub struct AnalyzeGraphCommand {
|
||||
/// List of files or directories to include.
|
||||
#[clap(help = "List of files or directories to include [default: .]")]
|
||||
files: Vec<PathBuf>,
|
||||
/// The direction of the import map. By default, generates a dependency map, i.e., a map from
|
||||
/// file to files that it depends on. Use `--direction dependents` to generate a map from file
|
||||
/// to files that depend on it.
|
||||
#[clap(long, value_enum, default_value_t)]
|
||||
direction: Direction,
|
||||
/// Attempt to detect imports from string literals.
|
||||
#[clap(long)]
|
||||
detect_string_imports: bool,
|
||||
/// Enable preview mode. Use `--no-preview` to disable.
|
||||
#[arg(long, overrides_with("no_preview"))]
|
||||
preview: bool,
|
||||
#[clap(long, overrides_with("preview"), hide = true)]
|
||||
no_preview: bool,
|
||||
/// The minimum Python version that should be supported.
|
||||
#[arg(long, value_enum)]
|
||||
target_version: Option<PythonVersion>,
|
||||
/// Path to a virtual environment to use for resolving additional dependencies
|
||||
#[arg(long)]
|
||||
python: Option<PathBuf>,
|
||||
}
|
||||
|
||||
// The `Parser` derive is for ruff_dev, for ruff `Args` would be sufficient
|
||||
#[derive(Clone, Debug, clap::Parser)]
|
||||
#[expect(clippy::struct_excessive_bools)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct CheckCommand {
|
||||
/// List of files or directories to check.
|
||||
#[clap(help = "List of files or directories to check [default: .]")]
|
||||
@@ -201,20 +114,26 @@ pub struct CheckCommand {
|
||||
unsafe_fixes: bool,
|
||||
#[arg(long, overrides_with("unsafe_fixes"), hide = true)]
|
||||
no_unsafe_fixes: bool,
|
||||
/// Show violations with source code.
|
||||
/// Use `--no-show-source` to disable.
|
||||
/// (Deprecated: use `--output-format=full` or `--output-format=concise` instead of `--show-source` and `--no-show-source`, respectively)
|
||||
#[arg(long, overrides_with("no_show_source"))]
|
||||
show_source: bool,
|
||||
#[clap(long, overrides_with("show_source"), hide = true)]
|
||||
no_show_source: bool,
|
||||
/// Show an enumeration of all fixed lint violations.
|
||||
/// Use `--no-show-fixes` to disable.
|
||||
#[arg(long, overrides_with("no_show_fixes"))]
|
||||
show_fixes: bool,
|
||||
#[clap(long, overrides_with("show_fixes"), hide = true)]
|
||||
no_show_fixes: bool,
|
||||
/// Avoid writing any fixed files back; instead, output a diff for each changed file to stdout, and exit 0 if there are no diffs.
|
||||
/// Implies `--fix-only`.
|
||||
/// Avoid writing any fixed files back; instead, output a diff for each changed file to stdout. Implies `--fix-only`.
|
||||
#[arg(long, conflicts_with = "show_fixes")]
|
||||
pub diff: bool,
|
||||
/// Run in watch mode by re-running whenever files change.
|
||||
#[arg(short, long)]
|
||||
pub watch: bool,
|
||||
/// Apply fixes to resolve lint violations, but don't report on, or exit non-zero for, leftover violations. Implies `--fix`.
|
||||
/// Apply fixes to resolve lint violations, but don't report on leftover violations. Implies `--fix`.
|
||||
/// Use `--no-fix-only` to disable or `--unsafe-fixes` to include unsafe fixes.
|
||||
#[arg(long, overrides_with("no_fix_only"))]
|
||||
fix_only: bool,
|
||||
@@ -225,12 +144,13 @@ pub struct CheckCommand {
|
||||
ignore_noqa: bool,
|
||||
|
||||
/// Output serialization format for violations.
|
||||
/// The default serialization format is "full".
|
||||
/// The default serialization format is "concise".
|
||||
/// In preview mode, the default serialization format is "full".
|
||||
#[arg(long, value_enum, env = "RUFF_OUTPUT_FORMAT")]
|
||||
pub output_format: Option<OutputFormat>,
|
||||
pub output_format: Option<SerializationFormat>,
|
||||
|
||||
/// Specify file to write the linter output to (default: stdout).
|
||||
#[arg(short, long, env = "RUFF_OUTPUT_FILE")]
|
||||
#[arg(short, long)]
|
||||
pub output_file: Option<PathBuf>,
|
||||
/// The minimum Python version that should be supported.
|
||||
#[arg(long, value_enum)]
|
||||
@@ -241,6 +161,20 @@ pub struct CheckCommand {
|
||||
preview: bool,
|
||||
#[clap(long, overrides_with("preview"), hide = true)]
|
||||
no_preview: bool,
|
||||
/// Either a path to a TOML configuration file (`pyproject.toml` or `ruff.toml`),
|
||||
/// or a TOML `<KEY> = <VALUE>` pair
|
||||
/// (such as you might find in a `ruff.toml` configuration file)
|
||||
/// overriding a specific configuration option.
|
||||
/// Overrides of individual settings using this option always take precedence
|
||||
/// over all configuration files, including configuration files that were also
|
||||
/// specified using `--config`.
|
||||
#[arg(
|
||||
long,
|
||||
action = clap::ArgAction::Append,
|
||||
value_name = "CONFIG_OPTION",
|
||||
value_parser = ConfigArgumentParser,
|
||||
)]
|
||||
pub config: Vec<SingleConfigArgument>,
|
||||
/// Comma-separated list of rule codes to enable (or ALL, to enable all rules).
|
||||
#[arg(
|
||||
long,
|
||||
@@ -372,13 +306,24 @@ pub struct CheckCommand {
|
||||
/// Disable cache reads.
|
||||
#[arg(short, long, env = "RUFF_NO_CACHE", help_heading = "Miscellaneous")]
|
||||
pub no_cache: bool,
|
||||
/// Ignore all configuration files.
|
||||
//
|
||||
// Note: We can't mark this as conflicting with `--config` here
|
||||
// as `--config` can be used for specifying configuration overrides
|
||||
// as well as configuration files.
|
||||
// Specifying a configuration file conflicts with `--isolated`;
|
||||
// specifying a configuration override does not.
|
||||
// If a user specifies `ruff check --isolated --config=ruff.toml`,
|
||||
// we emit an error later on, after the initial parsing by clap.
|
||||
#[arg(long, help_heading = "Miscellaneous")]
|
||||
pub isolated: bool,
|
||||
/// Path to the cache directory.
|
||||
#[arg(long, env = "RUFF_CACHE_DIR", help_heading = "Miscellaneous")]
|
||||
pub cache_dir: Option<PathBuf>,
|
||||
/// The name of the file when passing it through stdin.
|
||||
#[arg(long, help_heading = "Miscellaneous")]
|
||||
pub stdin_filename: Option<PathBuf>,
|
||||
/// List of mappings from file extension to language (one of `python`, `ipynb`, `pyi`). For
|
||||
/// List of mappings from file extension to language (one of ["python", "ipynb", "pyi"]). For
|
||||
/// example, to treat `.ipy` files as IPython notebooks, use `--extension ipy:ipynb`.
|
||||
#[arg(long, value_delimiter = ',')]
|
||||
pub extension: Option<Vec<ExtensionPair>>,
|
||||
@@ -398,6 +343,7 @@ pub struct CheckCommand {
|
||||
long,
|
||||
// Unsupported default-command arguments.
|
||||
conflicts_with = "diff",
|
||||
conflicts_with = "show_source",
|
||||
conflicts_with = "watch",
|
||||
)]
|
||||
pub statistics: bool,
|
||||
@@ -443,10 +389,13 @@ pub struct CheckCommand {
|
||||
conflicts_with = "watch",
|
||||
)]
|
||||
pub show_settings: bool,
|
||||
/// Dev-only argument to show fixes
|
||||
#[arg(long, hide = true)]
|
||||
pub ecosystem_ci: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, clap::Parser)]
|
||||
#[expect(clippy::struct_excessive_bools)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct FormatCommand {
|
||||
/// List of files or directories to format.
|
||||
#[clap(help = "List of files or directories to format [default: .]")]
|
||||
@@ -459,6 +408,20 @@ pub struct FormatCommand {
|
||||
/// difference between the current file and how the formatted file would look like.
|
||||
#[arg(long)]
|
||||
pub diff: bool,
|
||||
/// Either a path to a TOML configuration file (`pyproject.toml` or `ruff.toml`),
|
||||
/// or a TOML `<KEY> = <VALUE>` pair
|
||||
/// (such as you might find in a `ruff.toml` configuration file)
|
||||
/// overriding a specific configuration option.
|
||||
/// Overrides of individual settings using this option always take precedence
|
||||
/// over all configuration files, including configuration files that were also
|
||||
/// specified using `--config`.
|
||||
#[arg(
|
||||
long,
|
||||
action = clap::ArgAction::Append,
|
||||
value_name = "CONFIG_OPTION",
|
||||
value_parser = ConfigArgumentParser,
|
||||
)]
|
||||
pub config: Vec<SingleConfigArgument>,
|
||||
|
||||
/// Disable cache reads.
|
||||
#[arg(short, long, env = "RUFF_NO_CACHE", help_heading = "Miscellaneous")]
|
||||
@@ -499,10 +462,21 @@ pub struct FormatCommand {
|
||||
/// Set the line-length.
|
||||
#[arg(long, help_heading = "Format configuration")]
|
||||
pub line_length: Option<LineLength>,
|
||||
/// Ignore all configuration files.
|
||||
//
|
||||
// Note: We can't mark this as conflicting with `--config` here
|
||||
// as `--config` can be used for specifying configuration overrides
|
||||
// as well as configuration files.
|
||||
// Specifying a configuration file conflicts with `--isolated`;
|
||||
// specifying a configuration override does not.
|
||||
// If a user specifies `ruff check --isolated --config=ruff.toml`,
|
||||
// we emit an error later on, after the initial parsing by clap.
|
||||
#[arg(long, help_heading = "Miscellaneous")]
|
||||
pub isolated: bool,
|
||||
/// The name of the file when passing it through stdin.
|
||||
#[arg(long, help_heading = "Miscellaneous")]
|
||||
pub stdin_filename: Option<PathBuf>,
|
||||
/// List of mappings from file extension to language (one of `python`, `ipynb`, `pyi`). For
|
||||
/// List of mappings from file extension to language (one of ["python", "ipynb", "pyi"]). For
|
||||
/// example, to treat `.ipy` files as IPython notebooks, use `--extension ipy:ipynb`.
|
||||
#[arg(long, value_delimiter = ',')]
|
||||
pub extension: Option<Vec<ExtensionPair>>,
|
||||
@@ -530,28 +504,6 @@ pub struct FormatCommand {
|
||||
/// The option can only be used when formatting a single file. Range formatting of notebooks is unsupported.
|
||||
#[clap(long, help_heading = "Editor options", verbatim_doc_comment)]
|
||||
pub range: Option<FormatRange>,
|
||||
|
||||
/// Exit with a non-zero status code if any files were modified via format, even if all files were formatted successfully.
|
||||
#[arg(long, help_heading = "Miscellaneous", alias = "exit-non-zero-on-fix")]
|
||||
pub exit_non_zero_on_format: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, clap::Parser)]
|
||||
pub struct ServerCommand {
|
||||
/// Enable preview mode. Use `--no-preview` to disable.
|
||||
///
|
||||
/// This enables unstable server features and turns on the preview mode for the linter
|
||||
/// and the formatter.
|
||||
#[arg(long, overrides_with("no_preview"))]
|
||||
preview: bool,
|
||||
#[clap(long, overrides_with("preview"), hide = true)]
|
||||
no_preview: bool,
|
||||
}
|
||||
|
||||
impl ServerCommand {
|
||||
pub(crate) fn resolve_preview(self) -> Option<bool> {
|
||||
resolve_bool_arg(self.preview, self.no_preview)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, clap::ValueEnum)]
|
||||
@@ -560,8 +512,8 @@ pub enum HelpFormat {
|
||||
Json,
|
||||
}
|
||||
|
||||
#[expect(clippy::module_name_repetitions)]
|
||||
#[derive(Debug, Default, Clone, clap::Args)]
|
||||
#[allow(clippy::module_name_repetitions)]
|
||||
#[derive(Debug, clap::Args)]
|
||||
pub struct LogLevelArgs {
|
||||
/// Enable verbose logging.
|
||||
#[arg(
|
||||
@@ -609,10 +561,6 @@ impl From<&LogLevelArgs> for LogLevel {
|
||||
/// Configuration-related arguments passed via the CLI.
|
||||
#[derive(Default)]
|
||||
pub struct ConfigArguments {
|
||||
/// Whether the user specified --isolated on the command line
|
||||
pub(crate) isolated: bool,
|
||||
/// The logging level to be used, derived from command-line arguments passed
|
||||
pub(crate) log_level: LogLevel,
|
||||
/// Path to a pyproject.toml or ruff.toml configuration file (etc.).
|
||||
/// Either 0 or 1 configuration file paths may be provided on the command line.
|
||||
config_file: Option<PathBuf>,
|
||||
@@ -633,19 +581,21 @@ impl ConfigArguments {
|
||||
}
|
||||
|
||||
fn from_cli_arguments(
|
||||
global_options: GlobalConfigArgs,
|
||||
config_options: Vec<SingleConfigArgument>,
|
||||
per_flag_overrides: ExplicitConfigOverrides,
|
||||
isolated: bool,
|
||||
) -> anyhow::Result<Self> {
|
||||
let (log_level, config_options, isolated) = global_options.partition();
|
||||
let mut config_file: Option<PathBuf> = None;
|
||||
let mut overrides = Configuration::default();
|
||||
let mut new = Self {
|
||||
per_flag_overrides,
|
||||
..Self::default()
|
||||
};
|
||||
|
||||
for option in config_options {
|
||||
match option {
|
||||
SingleConfigArgument::SettingsOverride(overridden_option) => {
|
||||
let overridden_option = Arc::try_unwrap(overridden_option)
|
||||
.unwrap_or_else(|option| option.deref().clone());
|
||||
overrides = overrides.combine(Configuration::from_options(
|
||||
new.overrides = new.overrides.combine(Configuration::from_options(
|
||||
overridden_option,
|
||||
None,
|
||||
&path_dedot::CWD,
|
||||
@@ -664,7 +614,7 @@ The argument `--config={}` cannot be used with `--isolated`
|
||||
path.display()
|
||||
);
|
||||
}
|
||||
if let Some(ref config_file) = config_file {
|
||||
if let Some(ref config_file) = new.config_file {
|
||||
let (first, second) = (config_file.display(), path.display());
|
||||
bail!(
|
||||
"\
|
||||
@@ -675,17 +625,11 @@ You cannot specify more than one configuration file on the command line.
|
||||
"
|
||||
);
|
||||
}
|
||||
config_file = Some(path);
|
||||
new.config_file = Some(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(Self {
|
||||
isolated,
|
||||
log_level,
|
||||
config_file,
|
||||
overrides,
|
||||
per_flag_overrides,
|
||||
})
|
||||
Ok(new)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -699,17 +643,16 @@ impl ConfigurationTransformer for ConfigArguments {
|
||||
impl CheckCommand {
|
||||
/// Partition the CLI into command-line arguments and configuration
|
||||
/// overrides.
|
||||
pub fn partition(
|
||||
self,
|
||||
global_options: GlobalConfigArgs,
|
||||
) -> anyhow::Result<(CheckArguments, ConfigArguments)> {
|
||||
pub fn partition(self) -> anyhow::Result<(CheckArguments, ConfigArguments)> {
|
||||
let check_arguments = CheckArguments {
|
||||
add_noqa: self.add_noqa,
|
||||
diff: self.diff,
|
||||
ecosystem_ci: self.ecosystem_ci,
|
||||
exit_non_zero_on_fix: self.exit_non_zero_on_fix,
|
||||
exit_zero: self.exit_zero,
|
||||
files: self.files,
|
||||
ignore_noqa: self.ignore_noqa,
|
||||
isolated: self.isolated,
|
||||
no_cache: self.no_cache,
|
||||
output_file: self.output_file,
|
||||
show_files: self.show_files,
|
||||
@@ -735,7 +678,7 @@ impl CheckCommand {
|
||||
preview: resolve_bool_arg(self.preview, self.no_preview).map(PreviewMode::from),
|
||||
respect_gitignore: resolve_bool_arg(self.respect_gitignore, self.no_respect_gitignore),
|
||||
select: self.select,
|
||||
target_version: self.target_version.map(ast::PythonVersion::from),
|
||||
target_version: self.target_version,
|
||||
unfixable: self.unfixable,
|
||||
// TODO(charlie): Included in `pyproject.toml`, but not inherited.
|
||||
cache_dir: self.cache_dir,
|
||||
@@ -744,13 +687,17 @@ impl CheckCommand {
|
||||
unsafe_fixes: resolve_bool_arg(self.unsafe_fixes, self.no_unsafe_fixes)
|
||||
.map(UnsafeFixes::from),
|
||||
force_exclude: resolve_bool_arg(self.force_exclude, self.no_force_exclude),
|
||||
output_format: self.output_format,
|
||||
output_format: resolve_output_format(
|
||||
self.output_format,
|
||||
resolve_bool_arg(self.show_source, self.no_show_source),
|
||||
resolve_bool_arg(self.preview, self.no_preview).unwrap_or_default(),
|
||||
),
|
||||
show_fixes: resolve_bool_arg(self.show_fixes, self.no_show_fixes),
|
||||
extension: self.extension,
|
||||
..ExplicitConfigOverrides::default()
|
||||
};
|
||||
|
||||
let config_args = ConfigArguments::from_cli_arguments(global_options, cli_overrides)?;
|
||||
let config_args =
|
||||
ConfigArguments::from_cli_arguments(self.config, cli_overrides, self.isolated)?;
|
||||
Ok((check_arguments, config_args))
|
||||
}
|
||||
}
|
||||
@@ -758,18 +705,15 @@ impl CheckCommand {
|
||||
impl FormatCommand {
|
||||
/// Partition the CLI into command-line arguments and configuration
|
||||
/// overrides.
|
||||
pub fn partition(
|
||||
self,
|
||||
global_options: GlobalConfigArgs,
|
||||
) -> anyhow::Result<(FormatArguments, ConfigArguments)> {
|
||||
pub fn partition(self) -> anyhow::Result<(FormatArguments, ConfigArguments)> {
|
||||
let format_arguments = FormatArguments {
|
||||
check: self.check,
|
||||
diff: self.diff,
|
||||
files: self.files,
|
||||
isolated: self.isolated,
|
||||
no_cache: self.no_cache,
|
||||
stdin_filename: self.stdin_filename,
|
||||
range: self.range,
|
||||
exit_non_zero_on_format: self.exit_non_zero_on_format,
|
||||
};
|
||||
|
||||
let cli_overrides = ExplicitConfigOverrides {
|
||||
@@ -778,42 +722,16 @@ impl FormatCommand {
|
||||
exclude: self.exclude,
|
||||
preview: resolve_bool_arg(self.preview, self.no_preview).map(PreviewMode::from),
|
||||
force_exclude: resolve_bool_arg(self.force_exclude, self.no_force_exclude),
|
||||
target_version: self.target_version.map(ast::PythonVersion::from),
|
||||
target_version: self.target_version,
|
||||
cache_dir: self.cache_dir,
|
||||
extension: self.extension,
|
||||
|
||||
// Unsupported on the formatter CLI, but required on `Overrides`.
|
||||
..ExplicitConfigOverrides::default()
|
||||
};
|
||||
|
||||
let config_args = ConfigArguments::from_cli_arguments(global_options, cli_overrides)?;
|
||||
Ok((format_arguments, config_args))
|
||||
}
|
||||
}
|
||||
|
||||
impl AnalyzeGraphCommand {
|
||||
/// Partition the CLI into command-line arguments and configuration
|
||||
/// overrides.
|
||||
pub fn partition(
|
||||
self,
|
||||
global_options: GlobalConfigArgs,
|
||||
) -> anyhow::Result<(AnalyzeGraphArgs, ConfigArguments)> {
|
||||
let format_arguments = AnalyzeGraphArgs {
|
||||
files: self.files,
|
||||
direction: self.direction,
|
||||
python: self.python,
|
||||
};
|
||||
|
||||
let cli_overrides = ExplicitConfigOverrides {
|
||||
detect_string_imports: if self.detect_string_imports {
|
||||
Some(true)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
preview: resolve_bool_arg(self.preview, self.no_preview).map(PreviewMode::from),
|
||||
target_version: self.target_version.map(ast::PythonVersion::from),
|
||||
..ExplicitConfigOverrides::default()
|
||||
};
|
||||
|
||||
let config_args = ConfigArguments::from_cli_arguments(global_options, cli_overrides)?;
|
||||
let config_args =
|
||||
ConfigArguments::from_cli_arguments(self.config, cli_overrides, self.isolated)?;
|
||||
Ok((format_arguments, config_args))
|
||||
}
|
||||
}
|
||||
@@ -827,34 +745,38 @@ fn resolve_bool_arg(yes: bool, no: bool) -> Option<bool> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Enumeration of various ways in which a --config CLI flag
|
||||
/// could be invalid
|
||||
#[derive(Debug)]
|
||||
enum InvalidConfigFlagReason {
|
||||
InvalidToml(toml::de::Error),
|
||||
/// It was valid TOML, but not a valid ruff config file.
|
||||
/// E.g. the user tried to select a rule that doesn't exist,
|
||||
/// or tried to enable a setting that doesn't exist
|
||||
ValidTomlButInvalidRuffSchema(toml::de::Error),
|
||||
/// It was a valid ruff config file, but the user tried to pass a
|
||||
/// value for `extend` as part of the config override.
|
||||
/// `extend` is special, because it affects which config files we look at
|
||||
/// in the first place. We currently only parse --config overrides *after*
|
||||
/// we've combined them with all the arguments from the various config files
|
||||
/// that we found, so trying to override `extend` as part of a --config
|
||||
/// override is forbidden.
|
||||
ExtendPassedViaConfigFlag,
|
||||
enum TomlParseFailureKind {
|
||||
SyntaxError,
|
||||
UnknownOption,
|
||||
}
|
||||
|
||||
impl InvalidConfigFlagReason {
|
||||
const fn description(&self) -> &'static str {
|
||||
match self {
|
||||
Self::InvalidToml(_) => "The supplied argument is not valid TOML",
|
||||
Self::ValidTomlButInvalidRuffSchema(_) => {
|
||||
impl std::fmt::Display for TomlParseFailureKind {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
let display = match self {
|
||||
Self::SyntaxError => "The supplied argument is not valid TOML",
|
||||
Self::UnknownOption => {
|
||||
"Could not parse the supplied argument as a `ruff.toml` configuration option"
|
||||
}
|
||||
Self::ExtendPassedViaConfigFlag => "Cannot include `extend` in a --config flag value",
|
||||
}
|
||||
};
|
||||
write!(f, "{display}")
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct TomlParseFailure {
|
||||
kind: TomlParseFailureKind,
|
||||
underlying_error: toml::de::Error,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for TomlParseFailure {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
let TomlParseFailure {
|
||||
kind,
|
||||
underlying_error,
|
||||
} = self;
|
||||
let display = format!("{kind}:\n\n{underlying_error}");
|
||||
write!(f, "{}", display.trim_end())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -896,38 +818,27 @@ impl TypedValueParser for ConfigArgumentParser {
|
||||
arg: Option<&clap::Arg>,
|
||||
value: &std::ffi::OsStr,
|
||||
) -> Result<Self::Value, clap::Error> {
|
||||
// Convert to UTF-8.
|
||||
let Some(value) = value.to_str() else {
|
||||
// But respect non-UTF-8 paths.
|
||||
let path_to_config_file = PathBuf::from(value);
|
||||
if path_to_config_file.is_file() {
|
||||
return Ok(SingleConfigArgument::FilePath(path_to_config_file));
|
||||
}
|
||||
return Err(clap::Error::new(clap::error::ErrorKind::InvalidUtf8));
|
||||
};
|
||||
|
||||
// Expand environment variables and tildes.
|
||||
if let Ok(path_to_config_file) =
|
||||
shellexpand::full(value).map(|config| PathBuf::from(&*config))
|
||||
{
|
||||
if path_to_config_file.is_file() {
|
||||
return Ok(SingleConfigArgument::FilePath(path_to_config_file));
|
||||
}
|
||||
let path_to_config_file = PathBuf::from(value);
|
||||
if path_to_config_file.exists() {
|
||||
return Ok(SingleConfigArgument::FilePath(path_to_config_file));
|
||||
}
|
||||
|
||||
let config_parse_error = match toml::Table::from_str(value) {
|
||||
Ok(table) => match table.try_into::<Options>() {
|
||||
Ok(option) => {
|
||||
if option.extend.is_none() {
|
||||
return Ok(SingleConfigArgument::SettingsOverride(Arc::new(option)));
|
||||
}
|
||||
InvalidConfigFlagReason::ExtendPassedViaConfigFlag
|
||||
}
|
||||
Err(underlying_error) => {
|
||||
InvalidConfigFlagReason::ValidTomlButInvalidRuffSchema(underlying_error)
|
||||
}
|
||||
let value = value
|
||||
.to_str()
|
||||
.ok_or_else(|| clap::Error::new(clap::error::ErrorKind::InvalidUtf8))?;
|
||||
|
||||
let toml_parse_error = match toml::Table::from_str(value) {
|
||||
Ok(table) => match table.try_into() {
|
||||
Ok(option) => return Ok(SingleConfigArgument::SettingsOverride(Arc::new(option))),
|
||||
Err(underlying_error) => TomlParseFailure {
|
||||
kind: TomlParseFailureKind::UnknownOption,
|
||||
underlying_error,
|
||||
},
|
||||
},
|
||||
Err(underlying_error) => TomlParseFailure {
|
||||
kind: TomlParseFailureKind::SyntaxError,
|
||||
underlying_error,
|
||||
},
|
||||
Err(underlying_error) => InvalidConfigFlagReason::InvalidToml(underlying_error),
|
||||
};
|
||||
|
||||
let mut new_error = clap::Error::new(clap::error::ErrorKind::ValueValidation).with_cmd(cmd);
|
||||
@@ -942,21 +853,6 @@ impl TypedValueParser for ConfigArgumentParser {
|
||||
clap::error::ContextValue::String(value.to_string()),
|
||||
);
|
||||
|
||||
let underlying_error = match &config_parse_error {
|
||||
InvalidConfigFlagReason::ExtendPassedViaConfigFlag => {
|
||||
let tip = config_parse_error.description().into();
|
||||
new_error.insert(
|
||||
clap::error::ContextKind::Suggested,
|
||||
clap::error::ContextValue::StyledStrs(vec![tip]),
|
||||
);
|
||||
return Err(new_error);
|
||||
}
|
||||
InvalidConfigFlagReason::InvalidToml(underlying_error)
|
||||
| InvalidConfigFlagReason::ValidTomlButInvalidRuffSchema(underlying_error) => {
|
||||
underlying_error
|
||||
}
|
||||
};
|
||||
|
||||
// small hack so that multiline tips
|
||||
// have the same indent on the left-hand side:
|
||||
let tip_indent = " ".repeat(" tip: ".len());
|
||||
@@ -972,73 +868,80 @@ A `--config` flag must either be a path to a `.toml` configuration file
|
||||
// the user was trying to pass in a path to a configuration file
|
||||
// or some inline TOML.
|
||||
// We want to display the most helpful error to the user as possible.
|
||||
if Path::new(value)
|
||||
if std::path::Path::new(value)
|
||||
.extension()
|
||||
.is_some_and(|ext| ext.eq_ignore_ascii_case("toml"))
|
||||
.map_or(false, |ext| ext.eq_ignore_ascii_case("toml"))
|
||||
{
|
||||
if !value.contains('=') {
|
||||
let _ = write!(
|
||||
&mut tip,
|
||||
tip.push_str(&format!(
|
||||
"
|
||||
|
||||
It looks like you were trying to pass a path to a configuration file.
|
||||
The path `{value}` does not point to a configuration file"
|
||||
);
|
||||
}
|
||||
} else if let Some((key, value)) = value.split_once('=') {
|
||||
let key = key.trim_ascii();
|
||||
let value = value.trim_ascii_start();
|
||||
|
||||
match Options::metadata().find(key) {
|
||||
Some(OptionEntry::Set(set)) if !value.starts_with('{') => {
|
||||
let prefixed_subfields = set
|
||||
.collect_fields()
|
||||
.iter()
|
||||
.map(|(name, _)| format!("- `{key}.{name}`"))
|
||||
.join("\n");
|
||||
|
||||
let _ = write!(
|
||||
&mut tip,
|
||||
"
|
||||
|
||||
`{key}` is a table of configuration options.
|
||||
Did you want to override one of the table's subkeys?
|
||||
|
||||
Possible choices:
|
||||
|
||||
{prefixed_subfields}"
|
||||
);
|
||||
}
|
||||
_ => {
|
||||
let _ = write!(
|
||||
&mut tip,
|
||||
"\n\n{}:\n\n{underlying_error}",
|
||||
config_parse_error.description()
|
||||
);
|
||||
}
|
||||
The path `{value}` does not exist"
|
||||
));
|
||||
}
|
||||
} else if value.contains('=') {
|
||||
tip.push_str(&format!("\n\n{toml_parse_error}"));
|
||||
}
|
||||
let tip = tip.trim_end().to_owned().into();
|
||||
|
||||
new_error.insert(
|
||||
clap::error::ContextKind::Suggested,
|
||||
clap::error::ContextValue::StyledStrs(vec![tip]),
|
||||
clap::error::ContextValue::StyledStrs(vec![tip.into()]),
|
||||
);
|
||||
|
||||
Err(new_error)
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_output_format(
|
||||
output_format: Option<SerializationFormat>,
|
||||
show_sources: Option<bool>,
|
||||
preview: bool,
|
||||
) -> Option<SerializationFormat> {
|
||||
Some(match (output_format, show_sources) {
|
||||
(Some(o), None) => o,
|
||||
(Some(SerializationFormat::Grouped), Some(true)) => {
|
||||
warn_user!("`--show-source` with `--output-format=grouped` is deprecated, and will not show source files. Use `--output-format=full` to show source information.");
|
||||
SerializationFormat::Grouped
|
||||
}
|
||||
(Some(fmt), Some(true)) => {
|
||||
warn_user!("The `--show-source` argument is deprecated and has been ignored in favor of `--output-format={fmt}`.");
|
||||
fmt
|
||||
}
|
||||
(Some(fmt), Some(false)) => {
|
||||
warn_user!("The `--no-show-source` argument is deprecated and has been ignored in favor of `--output-format={fmt}`.");
|
||||
fmt
|
||||
}
|
||||
(None, Some(true)) => {
|
||||
warn_user!("The `--show-source` argument is deprecated. Use `--output-format=full` instead.");
|
||||
SerializationFormat::Full
|
||||
}
|
||||
(None, Some(false)) => {
|
||||
warn_user!("The `--no-show-source` argument is deprecated. Use `--output-format=concise` instead.");
|
||||
SerializationFormat::Concise
|
||||
}
|
||||
(None, None) => return None
|
||||
}).map(|format| match format {
|
||||
SerializationFormat::Text => {
|
||||
warn_user!("`--output-format=text` is deprecated. Use `--output-format=full` or `--output-format=concise` instead. `text` will be treated as `{}`.", SerializationFormat::default(preview));
|
||||
SerializationFormat::default(preview)
|
||||
},
|
||||
other => other
|
||||
})
|
||||
}
|
||||
|
||||
/// CLI settings that are distinct from configuration (commands, lists of files,
|
||||
/// etc.).
|
||||
#[expect(clippy::struct_excessive_bools)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct CheckArguments {
|
||||
pub add_noqa: bool,
|
||||
pub diff: bool,
|
||||
pub ecosystem_ci: bool,
|
||||
pub exit_non_zero_on_fix: bool,
|
||||
pub exit_zero: bool,
|
||||
pub files: Vec<PathBuf>,
|
||||
pub ignore_noqa: bool,
|
||||
pub isolated: bool,
|
||||
pub no_cache: bool,
|
||||
pub output_file: Option<PathBuf>,
|
||||
pub show_files: bool,
|
||||
@@ -1050,15 +953,15 @@ pub struct CheckArguments {
|
||||
|
||||
/// CLI settings that are distinct from configuration (commands, lists of files,
|
||||
/// etc.).
|
||||
#[expect(clippy::struct_excessive_bools)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct FormatArguments {
|
||||
pub check: bool,
|
||||
pub no_cache: bool,
|
||||
pub diff: bool,
|
||||
pub files: Vec<PathBuf>,
|
||||
pub isolated: bool,
|
||||
pub stdin_filename: Option<PathBuf>,
|
||||
pub range: Option<FormatRange>,
|
||||
pub exit_non_zero_on_format: bool,
|
||||
}
|
||||
|
||||
/// A text range specified by line and column numbers.
|
||||
@@ -1073,9 +976,8 @@ impl FormatRange {
|
||||
///
|
||||
/// Returns an empty range if the start range is past the end of `source`.
|
||||
pub(super) fn to_text_range(self, source: &str, line_index: &LineIndex) -> TextRange {
|
||||
let start_byte_offset =
|
||||
line_index.offset(self.start.into(), source, PositionEncoding::Utf32);
|
||||
let end_byte_offset = line_index.offset(self.end.into(), source, PositionEncoding::Utf32);
|
||||
let start_byte_offset = line_index.offset(self.start.line, self.start.column, source);
|
||||
let end_byte_offset = line_index.offset(self.end.line, self.end.column, source);
|
||||
|
||||
TextRange::new(start_byte_offset, end_byte_offset)
|
||||
}
|
||||
@@ -1126,10 +1028,10 @@ impl std::fmt::Display for FormatRangeParseError {
|
||||
write!(
|
||||
f,
|
||||
"the start position '{start_invalid}' is greater than the end position '{end_invalid}'.\n {tip} Try switching start and end: '{end}-{start}'",
|
||||
start_invalid = start.to_string().bold().yellow(),
|
||||
end_invalid = end.to_string().bold().yellow(),
|
||||
start = start.to_string().green().bold(),
|
||||
end = end.to_string().green().bold()
|
||||
start_invalid=start.to_string().bold().yellow(),
|
||||
end_invalid=end.to_string().bold().yellow(),
|
||||
start=start.to_string().green().bold(),
|
||||
end=end.to_string().green().bold()
|
||||
)
|
||||
}
|
||||
FormatRangeParseError::InvalidStart(inner) => inner.write(f, true),
|
||||
@@ -1146,15 +1048,6 @@ pub struct LineColumn {
|
||||
pub column: OneIndexed,
|
||||
}
|
||||
|
||||
impl From<LineColumn> for ruff_source_file::SourceLocation {
|
||||
fn from(value: LineColumn) -> Self {
|
||||
Self {
|
||||
line: value.line,
|
||||
character_offset: value.column,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for LineColumn {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{line}:{column}", line = self.line, column = self.column)
|
||||
@@ -1230,53 +1123,40 @@ impl LineColumnParseError {
|
||||
|
||||
match self {
|
||||
LineColumnParseError::ColumnParseError(inner) => {
|
||||
write!(
|
||||
f,
|
||||
"the {range}s column is not a valid number ({inner})'\n {tip} The format is 'line:column'."
|
||||
)
|
||||
write!(f, "the {range}s column is not a valid number ({inner})'\n {tip} The format is 'line:column'.")
|
||||
}
|
||||
LineColumnParseError::LineParseError(inner) => {
|
||||
write!(
|
||||
f,
|
||||
"the {range} line is not a valid number ({inner})\n {tip} The format is 'line:column'."
|
||||
)
|
||||
write!(f, "the {range} line is not a valid number ({inner})\n {tip} The format is 'line:column'.")
|
||||
}
|
||||
LineColumnParseError::ZeroColumnIndex { line } => {
|
||||
write!(
|
||||
f,
|
||||
"the {range} column is 0, but it should be 1 or greater.\n {tip} The column numbers start at 1.\n {tip} Try {suggestion} instead.",
|
||||
suggestion = format!("{line}:1").green().bold()
|
||||
suggestion=format!("{line}:1").green().bold()
|
||||
)
|
||||
}
|
||||
LineColumnParseError::ZeroLineIndex { column } => {
|
||||
write!(
|
||||
f,
|
||||
"the {range} line is 0, but it should be 1 or greater.\n {tip} The line numbers start at 1.\n {tip} Try {suggestion} instead.",
|
||||
suggestion = format!("1:{column}").green().bold()
|
||||
suggestion=format!("1:{column}").green().bold()
|
||||
)
|
||||
}
|
||||
LineColumnParseError::ZeroLineAndColumnIndex => {
|
||||
write!(
|
||||
f,
|
||||
"the {range} line and column are both 0, but they should be 1 or greater.\n {tip} The line and column numbers start at 1.\n {tip} Try {suggestion} instead.",
|
||||
suggestion = "1:1".to_string().green().bold()
|
||||
suggestion="1:1".to_string().green().bold()
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// CLI settings that are distinct from configuration (commands, lists of files, etc.).
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct AnalyzeGraphArgs {
|
||||
pub files: Vec<PathBuf>,
|
||||
pub direction: Direction,
|
||||
pub python: Option<PathBuf>,
|
||||
}
|
||||
|
||||
/// Configuration overrides provided via dedicated CLI flags:
|
||||
/// `--line-length`, `--respect-gitignore`, etc.
|
||||
#[derive(Clone, Default)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
struct ExplicitConfigOverrides {
|
||||
dummy_variable_rgx: Option<Regex>,
|
||||
exclude: Option<Vec<FilePattern>>,
|
||||
@@ -1293,7 +1173,7 @@ struct ExplicitConfigOverrides {
|
||||
preview: Option<PreviewMode>,
|
||||
respect_gitignore: Option<bool>,
|
||||
select: Option<Vec<RuleSelector>>,
|
||||
target_version: Option<ast::PythonVersion>,
|
||||
target_version: Option<PythonVersion>,
|
||||
unfixable: Option<Vec<RuleSelector>>,
|
||||
// TODO(charlie): Captured in pyproject.toml as a default, but not part of `Settings`.
|
||||
cache_dir: Option<PathBuf>,
|
||||
@@ -1301,10 +1181,9 @@ struct ExplicitConfigOverrides {
|
||||
fix_only: Option<bool>,
|
||||
unsafe_fixes: Option<UnsafeFixes>,
|
||||
force_exclude: Option<bool>,
|
||||
output_format: Option<OutputFormat>,
|
||||
output_format: Option<SerializationFormat>,
|
||||
show_fixes: Option<bool>,
|
||||
extension: Option<Vec<ExtensionPair>>,
|
||||
detect_string_imports: Option<bool>,
|
||||
}
|
||||
|
||||
impl ConfigurationTransformer for ExplicitConfigOverrides {
|
||||
@@ -1389,9 +1268,6 @@ impl ConfigurationTransformer for ExplicitConfigOverrides {
|
||||
if let Some(extension) = &self.extension {
|
||||
config.extension = Some(extension.iter().cloned().collect());
|
||||
}
|
||||
if let Some(detect_string_imports) = &self.detect_string_imports {
|
||||
config.analyze.detect_string_imports = Some(*detect_string_imports);
|
||||
}
|
||||
|
||||
config
|
||||
}
|
||||
|
||||
@@ -3,8 +3,8 @@ use std::fs::{self, File};
|
||||
use std::hash::Hasher;
|
||||
use std::io::{self, BufReader, Write};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Mutex;
|
||||
use std::sync::atomic::{AtomicU64, Ordering};
|
||||
use std::sync::Mutex;
|
||||
use std::time::{Duration, SystemTime};
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
@@ -13,21 +13,21 @@ use itertools::Itertools;
|
||||
use log::{debug, error};
|
||||
use rayon::iter::ParallelIterator;
|
||||
use rayon::iter::{IntoParallelIterator, ParallelBridge};
|
||||
use ruff_linter::codes::Rule;
|
||||
use rustc_hash::FxHashMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tempfile::NamedTempFile;
|
||||
|
||||
use ruff_cache::{CacheKey, CacheKeyHasher};
|
||||
use ruff_diagnostics::Fix;
|
||||
use ruff_diagnostics::{DiagnosticKind, Fix};
|
||||
use ruff_linter::message::Message;
|
||||
use ruff_linter::package::PackageRoot;
|
||||
use ruff_linter::{VERSION, warn_user};
|
||||
use ruff_linter::{warn_user, VERSION};
|
||||
use ruff_macros::CacheKey;
|
||||
use ruff_notebook::NotebookIndex;
|
||||
use ruff_python_ast::imports::ImportMap;
|
||||
use ruff_source_file::SourceFileBuilder;
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
use ruff_workspace::Settings;
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
use ruff_workspace::resolver::Resolver;
|
||||
use ruff_workspace::Settings;
|
||||
|
||||
use crate::diagnostics::Diagnostics;
|
||||
|
||||
@@ -86,7 +86,7 @@ pub(crate) struct Cache {
|
||||
changes: Mutex<Vec<Change>>,
|
||||
/// The "current" timestamp used as cache for the updates of
|
||||
/// [`FileCache::last_seen`]
|
||||
#[expect(clippy::struct_field_names)]
|
||||
#[allow(clippy::struct_field_names)]
|
||||
last_seen_cache: u64,
|
||||
}
|
||||
|
||||
@@ -112,24 +112,23 @@ impl Cache {
|
||||
return Cache::empty(path, package_root);
|
||||
}
|
||||
Err(err) => {
|
||||
warn_user!("Failed to open cache file `{}`: {err}", path.display());
|
||||
warn_user!("Failed to open cache file '{}': {err}", path.display());
|
||||
return Cache::empty(path, package_root);
|
||||
}
|
||||
};
|
||||
|
||||
let mut package: PackageCache =
|
||||
match bincode::decode_from_reader(BufReader::new(file), bincode::config::standard()) {
|
||||
Ok(package) => package,
|
||||
Err(err) => {
|
||||
warn_user!("Failed parse cache file `{}`: {err}", path.display());
|
||||
return Cache::empty(path, package_root);
|
||||
}
|
||||
};
|
||||
let mut package: PackageCache = match bincode::deserialize_from(BufReader::new(file)) {
|
||||
Ok(package) => package,
|
||||
Err(err) => {
|
||||
warn_user!("Failed parse cache file '{}': {err}", path.display());
|
||||
return Cache::empty(path, package_root);
|
||||
}
|
||||
};
|
||||
|
||||
// Sanity check.
|
||||
if package.package_root != package_root {
|
||||
warn_user!(
|
||||
"Different package root in cache: expected `{}`, got `{}`",
|
||||
"Different package root in cache: expected '{}', got '{}'",
|
||||
package_root.display(),
|
||||
package.package_root.display(),
|
||||
);
|
||||
@@ -147,7 +146,7 @@ impl Cache {
|
||||
Cache::new(path, package)
|
||||
}
|
||||
|
||||
#[expect(clippy::cast_possible_truncation)]
|
||||
#[allow(clippy::cast_possible_truncation)]
|
||||
fn new(path: PathBuf, package: PackageCache) -> Self {
|
||||
Cache {
|
||||
path,
|
||||
@@ -176,36 +175,24 @@ impl Cache {
|
||||
|
||||
// Serialize to in-memory buffer because hyperfine benchmark showed that it's faster than
|
||||
// using a `BufWriter` and our cache files are small enough that streaming isn't necessary.
|
||||
let serialized = bincode::encode_to_vec(&self.package, bincode::config::standard())
|
||||
.context("Failed to serialize cache data")?;
|
||||
let serialized =
|
||||
bincode::serialize(&self.package).context("Failed to serialize cache data")?;
|
||||
temp_file
|
||||
.write_all(&serialized)
|
||||
.context("Failed to write serialized cache to temporary file.")?;
|
||||
|
||||
if let Err(err) = temp_file.persist(&self.path) {
|
||||
// On Windows, writing to the cache file can fail if the file is still open (e.g., if
|
||||
// the user is running Ruff from multiple processes over the same directory).
|
||||
if cfg!(windows) && err.error.kind() == io::ErrorKind::PermissionDenied {
|
||||
warn_user!(
|
||||
"Failed to write cache file `{}`: {}",
|
||||
self.path.display(),
|
||||
err.error
|
||||
);
|
||||
} else {
|
||||
return Err(err).with_context(|| {
|
||||
format!(
|
||||
"Failed to rename temporary cache file to {}",
|
||||
self.path.display()
|
||||
)
|
||||
});
|
||||
}
|
||||
}
|
||||
temp_file.persist(&self.path).with_context(|| {
|
||||
format!(
|
||||
"Failed to rename temporary cache file to {}",
|
||||
self.path.display()
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Applies the pending changes without storing the cache to disk.
|
||||
#[expect(clippy::cast_possible_truncation)]
|
||||
#[allow(clippy::cast_possible_truncation)]
|
||||
pub(crate) fn save(&mut self) -> bool {
|
||||
/// Maximum duration for which we keep a file in cache that hasn't been seen.
|
||||
const MAX_LAST_SEEN: Duration = Duration::from_secs(30 * 24 * 60 * 60); // 30 days.
|
||||
@@ -312,7 +299,7 @@ impl Cache {
|
||||
}
|
||||
|
||||
/// On disk representation of a cache of a package.
|
||||
#[derive(bincode::Encode, Debug, bincode::Decode)]
|
||||
#[derive(Deserialize, Debug, Serialize)]
|
||||
struct PackageCache {
|
||||
/// Path to the root of the package.
|
||||
///
|
||||
@@ -324,7 +311,7 @@ struct PackageCache {
|
||||
}
|
||||
|
||||
/// On disk representation of the cache per source file.
|
||||
#[derive(bincode::Decode, Debug, bincode::Encode)]
|
||||
#[derive(Deserialize, Debug, Serialize)]
|
||||
pub(crate) struct FileCache {
|
||||
/// Key that determines if the cached item is still valid.
|
||||
key: u64,
|
||||
@@ -347,17 +334,12 @@ impl FileCache {
|
||||
let file = SourceFileBuilder::new(path.to_string_lossy(), &*lint.source).finish();
|
||||
lint.messages
|
||||
.iter()
|
||||
.map(|msg| {
|
||||
Message::diagnostic(
|
||||
msg.body.clone(),
|
||||
msg.suggestion.clone(),
|
||||
msg.range,
|
||||
msg.fix.clone(),
|
||||
msg.parent,
|
||||
file.clone(),
|
||||
msg.noqa_offset,
|
||||
msg.rule,
|
||||
)
|
||||
.map(|msg| Message {
|
||||
kind: msg.kind.clone(),
|
||||
range: msg.range,
|
||||
fix: msg.fix.clone(),
|
||||
file: file.clone(),
|
||||
noqa_offset: msg.noqa_offset,
|
||||
})
|
||||
.collect()
|
||||
};
|
||||
@@ -366,12 +348,12 @@ impl FileCache {
|
||||
} else {
|
||||
FxHashMap::default()
|
||||
};
|
||||
Diagnostics::new(messages, notebook_indexes)
|
||||
Diagnostics::new(messages, lint.imports.clone(), notebook_indexes)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, bincode::Decode, bincode::Encode)]
|
||||
#[derive(Debug, Default, Deserialize, Serialize)]
|
||||
struct FileCacheData {
|
||||
lint: Option<LintCacheData>,
|
||||
formatted: bool,
|
||||
@@ -393,26 +375,24 @@ pub(crate) fn init(path: &Path) -> Result<()> {
|
||||
fs::create_dir_all(path.join(VERSION))?;
|
||||
|
||||
// Add the CACHEDIR.TAG.
|
||||
cachedir::ensure_tag(path)?;
|
||||
if !cachedir::is_tagged(path)? {
|
||||
cachedir::add_tag(path)?;
|
||||
}
|
||||
|
||||
// Add the .gitignore.
|
||||
match fs::OpenOptions::new()
|
||||
.write(true)
|
||||
.create_new(true)
|
||||
.open(path.join(".gitignore"))
|
||||
{
|
||||
Ok(mut file) => file.write_all(b"# Automatically created by ruff.\n*\n")?,
|
||||
Err(err) if err.kind() == io::ErrorKind::AlreadyExists => (),
|
||||
Err(err) => return Err(err.into()),
|
||||
let gitignore_path = path.join(".gitignore");
|
||||
if !gitignore_path.exists() {
|
||||
let mut file = fs::File::create(gitignore_path)?;
|
||||
file.write_all(b"*")?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(bincode::Decode, Debug, bincode::Encode, PartialEq)]
|
||||
#[derive(Deserialize, Debug, Serialize, PartialEq)]
|
||||
pub(crate) struct LintCacheData {
|
||||
/// Imports made.
|
||||
// pub(super) imports: ImportMap,
|
||||
pub(super) imports: ImportMap,
|
||||
/// Diagnostic messages.
|
||||
pub(super) messages: Vec<CacheMessage>,
|
||||
/// Source code of the file.
|
||||
@@ -422,47 +402,41 @@ pub(crate) struct LintCacheData {
|
||||
/// This will be empty if `messages` is empty.
|
||||
pub(super) source: String,
|
||||
/// Notebook index if this file is a Jupyter Notebook.
|
||||
#[bincode(with_serde)]
|
||||
pub(super) notebook_index: Option<NotebookIndex>,
|
||||
}
|
||||
|
||||
impl LintCacheData {
|
||||
pub(crate) fn from_messages(
|
||||
messages: &[Message],
|
||||
imports: ImportMap,
|
||||
notebook_index: Option<NotebookIndex>,
|
||||
) -> Self {
|
||||
let source = if let Some(msg) = messages.first() {
|
||||
msg.source_file().source_text().to_owned()
|
||||
msg.file.source_text().to_owned()
|
||||
} else {
|
||||
String::new() // No messages, no need to keep the source!
|
||||
};
|
||||
|
||||
let messages = messages
|
||||
.iter()
|
||||
// Parse the kebab-case rule name into a `Rule`. This will fail for syntax errors, so
|
||||
// this also serves to filter them out, but we shouldn't be caching files with syntax
|
||||
// errors anyway.
|
||||
.filter_map(|msg| Some((msg.name().parse().ok()?, msg)))
|
||||
.map(|(rule, msg)| {
|
||||
.map(|msg| {
|
||||
// Make sure that all message use the same source file.
|
||||
assert_eq!(
|
||||
msg.source_file(),
|
||||
messages.first().unwrap().source_file(),
|
||||
msg.file,
|
||||
messages.first().unwrap().file,
|
||||
"message uses a different source file"
|
||||
);
|
||||
CacheMessage {
|
||||
rule,
|
||||
body: msg.body().to_string(),
|
||||
suggestion: msg.suggestion().map(ToString::to_string),
|
||||
range: msg.range(),
|
||||
parent: msg.parent,
|
||||
fix: msg.fix().cloned(),
|
||||
noqa_offset: msg.noqa_offset(),
|
||||
kind: msg.kind.clone(),
|
||||
range: msg.range,
|
||||
fix: msg.fix.clone(),
|
||||
noqa_offset: msg.noqa_offset,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
Self {
|
||||
imports,
|
||||
messages,
|
||||
source,
|
||||
notebook_index,
|
||||
@@ -471,24 +445,13 @@ impl LintCacheData {
|
||||
}
|
||||
|
||||
/// On disk representation of a diagnostic message.
|
||||
#[derive(bincode::Decode, Debug, bincode::Encode, PartialEq)]
|
||||
#[derive(Deserialize, Debug, Serialize, PartialEq)]
|
||||
pub(super) struct CacheMessage {
|
||||
/// The rule for the cached diagnostic.
|
||||
#[bincode(with_serde)]
|
||||
rule: Rule,
|
||||
/// The message body to display to the user, to explain the diagnostic.
|
||||
body: String,
|
||||
/// The message to display to the user, to explain the suggested fix.
|
||||
suggestion: Option<String>,
|
||||
kind: DiagnosticKind,
|
||||
/// Range into the message's [`FileCache::source`].
|
||||
#[bincode(with_serde)]
|
||||
range: TextRange,
|
||||
#[bincode(with_serde)]
|
||||
parent: Option<TextSize>,
|
||||
#[bincode(with_serde)]
|
||||
fix: Option<Fix>,
|
||||
#[bincode(with_serde)]
|
||||
noqa_offset: Option<TextSize>,
|
||||
noqa_offset: TextSize,
|
||||
}
|
||||
|
||||
pub(crate) trait PackageCaches {
|
||||
@@ -520,7 +483,7 @@ pub(crate) struct PackageCacheMap<'a>(FxHashMap<&'a Path, Cache>);
|
||||
|
||||
impl<'a> PackageCacheMap<'a> {
|
||||
pub(crate) fn init(
|
||||
package_roots: &FxHashMap<&'a Path, Option<PackageRoot<'a>>>,
|
||||
package_roots: &FxHashMap<&'a Path, Option<&'a Path>>,
|
||||
resolver: &Resolver,
|
||||
) -> Self {
|
||||
fn init_cache(path: &Path) {
|
||||
@@ -536,9 +499,7 @@ impl<'a> PackageCacheMap<'a> {
|
||||
Self(
|
||||
package_roots
|
||||
.iter()
|
||||
.map(|(package, package_root)| {
|
||||
package_root.map(PackageRoot::path).unwrap_or(package)
|
||||
})
|
||||
.map(|(package, package_root)| package_root.unwrap_or(package))
|
||||
.unique()
|
||||
.par_bridge()
|
||||
.map(|cache_root| {
|
||||
@@ -606,23 +567,20 @@ mod tests {
|
||||
use std::time::SystemTime;
|
||||
|
||||
use anyhow::Result;
|
||||
use filetime::{FileTime, set_file_mtime};
|
||||
use filetime::{set_file_mtime, FileTime};
|
||||
use itertools::Itertools;
|
||||
use ruff_linter::settings::LinterSettings;
|
||||
use test_case::test_case;
|
||||
|
||||
use ruff_cache::CACHE_DIR_NAME;
|
||||
use ruff_linter::message::Message;
|
||||
use ruff_linter::package::PackageRoot;
|
||||
use ruff_linter::settings::flags;
|
||||
use ruff_linter::settings::types::UnsafeFixes;
|
||||
use ruff_python_ast::{PySourceType, PythonVersion};
|
||||
use ruff_python_ast::PySourceType;
|
||||
use ruff_workspace::Settings;
|
||||
|
||||
use crate::cache::{self, FileCache, FileCacheData, FileCacheKey};
|
||||
use crate::cache::{Cache, RelativePathBuf};
|
||||
use crate::commands::format::{FormatCommandError, FormatMode, FormatResult, format_path};
|
||||
use crate::diagnostics::{Diagnostics, lint_path};
|
||||
use crate::commands::format::{format_path, FormatCommandError, FormatMode, FormatResult};
|
||||
use crate::diagnostics::{lint_path, Diagnostics};
|
||||
|
||||
#[test_case("../ruff_linter/resources/test/fixtures", "ruff_tests/cache_same_results_ruff_linter"; "ruff_linter_fixtures")]
|
||||
#[test_case("../ruff_notebook/resources/test/fixtures", "ruff_tests/cache_same_results_ruff_notebook"; "ruff_notebook_fixtures")]
|
||||
@@ -634,10 +592,6 @@ mod tests {
|
||||
|
||||
let settings = Settings {
|
||||
cache_dir,
|
||||
linter: LinterSettings {
|
||||
unresolved_target_version: PythonVersion::latest().into(),
|
||||
..Default::default()
|
||||
},
|
||||
..Settings::default()
|
||||
};
|
||||
|
||||
@@ -672,7 +626,7 @@ mod tests {
|
||||
|
||||
let diagnostics = lint_path(
|
||||
&path,
|
||||
Some(PackageRoot::root(&package_root)),
|
||||
Some(&package_root),
|
||||
&settings.linter,
|
||||
Some(&cache),
|
||||
flags::Noqa::Enabled,
|
||||
@@ -680,7 +634,11 @@ mod tests {
|
||||
UnsafeFixes::Enabled,
|
||||
)
|
||||
.unwrap();
|
||||
if diagnostics.messages.iter().any(Message::is_syntax_error) {
|
||||
if diagnostics
|
||||
.messages
|
||||
.iter()
|
||||
.any(|m| m.kind.name == "SyntaxError")
|
||||
{
|
||||
parse_errors.push(path.clone());
|
||||
}
|
||||
paths.push(path);
|
||||
@@ -705,7 +663,7 @@ mod tests {
|
||||
|
||||
assert!(
|
||||
cache.package.files.contains_key(relative_path),
|
||||
"missing file from cache: `{}`",
|
||||
"missing file from cache: '{}'",
|
||||
relative_path.display()
|
||||
);
|
||||
}
|
||||
@@ -714,7 +672,7 @@ mod tests {
|
||||
for path in paths {
|
||||
got_diagnostics += lint_path(
|
||||
&path,
|
||||
Some(PackageRoot::root(&package_root)),
|
||||
Some(&package_root),
|
||||
&settings.linter,
|
||||
Some(&cache),
|
||||
flags::Noqa::Enabled,
|
||||
@@ -724,10 +682,7 @@ mod tests {
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
assert_eq!(
|
||||
expected_diagnostics, got_diagnostics,
|
||||
"left == {expected_diagnostics:#?}, right == {got_diagnostics:#?}",
|
||||
);
|
||||
assert_eq!(expected_diagnostics, got_diagnostics);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -853,6 +808,7 @@ mod tests {
|
||||
// Regression test for issue #3086.
|
||||
|
||||
#[cfg(unix)]
|
||||
#[allow(clippy::items_after_statements)]
|
||||
fn flip_execute_permission_bit(path: &Path) -> io::Result<()> {
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
let file = fs::OpenOptions::new().write(true).open(path)?;
|
||||
@@ -861,6 +817,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
#[allow(clippy::items_after_statements)]
|
||||
fn flip_read_only_permission(path: &Path) -> io::Result<()> {
|
||||
let file = fs::OpenOptions::new().write(true).open(path)?;
|
||||
let mut perms = file.metadata()?.permissions();
|
||||
@@ -1088,7 +1045,7 @@ mod tests {
|
||||
) -> Result<Diagnostics, anyhow::Error> {
|
||||
lint_path(
|
||||
&self.package_root.join(path),
|
||||
Some(PackageRoot::root(&self.package_root)),
|
||||
Some(&self.package_root),
|
||||
&self.settings.linter,
|
||||
Some(cache),
|
||||
flags::Noqa::Enabled,
|
||||
|
||||
@@ -10,9 +10,7 @@ use ruff_linter::linter::add_noqa_to_path;
|
||||
use ruff_linter::source_kind::SourceKind;
|
||||
use ruff_linter::warn_user_once;
|
||||
use ruff_python_ast::{PySourceType, SourceType};
|
||||
use ruff_workspace::resolver::{
|
||||
PyprojectConfig, ResolvedFile, match_exclusion, python_files_in_path,
|
||||
};
|
||||
use ruff_workspace::resolver::{python_files_in_path, PyprojectConfig, ResolvedFile};
|
||||
|
||||
use crate::args::ConfigArguments;
|
||||
|
||||
@@ -26,7 +24,7 @@ pub(crate) fn add_noqa(
|
||||
let start = Instant::now();
|
||||
let (paths, resolver) = python_files_in_path(files, pyproject_config, config_arguments)?;
|
||||
let duration = start.elapsed();
|
||||
debug!("Identified files to lint in: {duration:?}");
|
||||
debug!("Identified files to lint in: {:?}", duration);
|
||||
|
||||
if paths.is_empty() {
|
||||
warn_user_once!("No Python files found under the given path(s)");
|
||||
@@ -59,15 +57,6 @@ pub(crate) fn add_noqa(
|
||||
.and_then(|parent| package_roots.get(parent))
|
||||
.and_then(|package| *package);
|
||||
let settings = resolver.resolve(path);
|
||||
if (settings.file_resolver.force_exclude || !resolved_file.is_root())
|
||||
&& match_exclusion(
|
||||
resolved_file.path(),
|
||||
resolved_file.file_name(),
|
||||
&settings.linter.exclude,
|
||||
)
|
||||
{
|
||||
return None;
|
||||
}
|
||||
let source_kind = match SourceKind::from_path(path, source_type) {
|
||||
Ok(Some(source_kind)) => source_kind,
|
||||
Ok(None) => return None,
|
||||
@@ -87,7 +76,7 @@ pub(crate) fn add_noqa(
|
||||
.sum();
|
||||
|
||||
let duration = start.elapsed();
|
||||
debug!("Added noqa to files in: {duration:?}");
|
||||
debug!("Added noqa to files in: {:?}", duration);
|
||||
|
||||
Ok(modifications)
|
||||
}
|
||||
|
||||
@@ -1,262 +0,0 @@
|
||||
use crate::args::{AnalyzeGraphArgs, ConfigArguments};
|
||||
use crate::resolve::resolve;
|
||||
use crate::{ExitStatus, resolve_default_files};
|
||||
use anyhow::Result;
|
||||
use log::{debug, warn};
|
||||
use path_absolutize::CWD;
|
||||
use ruff_db::system::{SystemPath, SystemPathBuf};
|
||||
use ruff_graph::{Direction, ImportMap, ModuleDb, ModuleImports};
|
||||
use ruff_linter::package::PackageRoot;
|
||||
use ruff_linter::{warn_user, warn_user_once};
|
||||
use ruff_python_ast::{PySourceType, SourceType};
|
||||
use ruff_workspace::resolver::{ResolvedFile, match_exclusion, python_files_in_path};
|
||||
use rustc_hash::FxHashMap;
|
||||
use std::io::Write;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
/// Generate an import map.
|
||||
pub(crate) fn analyze_graph(
|
||||
args: AnalyzeGraphArgs,
|
||||
config_arguments: &ConfigArguments,
|
||||
) -> Result<ExitStatus> {
|
||||
// Construct the "default" settings. These are used when no `pyproject.toml`
|
||||
// files are present, or files are injected from outside the hierarchy.
|
||||
let pyproject_config = resolve(config_arguments, None)?;
|
||||
if pyproject_config.settings.analyze.preview.is_disabled() {
|
||||
warn_user!("`ruff analyze graph` is experimental and may change without warning");
|
||||
}
|
||||
|
||||
// Write all paths relative to the current working directory.
|
||||
let root =
|
||||
SystemPathBuf::from_path_buf(CWD.clone()).expect("Expected a UTF-8 working directory");
|
||||
|
||||
// Find all Python files.
|
||||
let files = resolve_default_files(args.files, false);
|
||||
let (paths, resolver) = python_files_in_path(&files, &pyproject_config, config_arguments)?;
|
||||
|
||||
if paths.is_empty() {
|
||||
warn_user_once!("No Python files found under the given path(s)");
|
||||
return Ok(ExitStatus::Success);
|
||||
}
|
||||
|
||||
// Resolve all package roots.
|
||||
let package_roots = resolver
|
||||
.package_roots(
|
||||
&paths
|
||||
.iter()
|
||||
.flatten()
|
||||
.map(ResolvedFile::path)
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
.into_iter()
|
||||
.map(|(path, package)| {
|
||||
(
|
||||
path.to_path_buf(),
|
||||
package.map(PackageRoot::path).map(Path::to_path_buf),
|
||||
)
|
||||
})
|
||||
.collect::<FxHashMap<_, _>>();
|
||||
|
||||
// Create a database from the source roots.
|
||||
let src_roots = package_roots
|
||||
.values()
|
||||
.filter_map(|package| package.as_deref())
|
||||
.filter_map(|package| package.parent())
|
||||
.map(Path::to_path_buf)
|
||||
.filter_map(|path| SystemPathBuf::from_path_buf(path).ok())
|
||||
.collect();
|
||||
|
||||
let db = ModuleDb::from_src_roots(
|
||||
src_roots,
|
||||
pyproject_config
|
||||
.settings
|
||||
.analyze
|
||||
.target_version
|
||||
.as_tuple()
|
||||
.into(),
|
||||
args.python
|
||||
.and_then(|python| SystemPathBuf::from_path_buf(python).ok()),
|
||||
)?;
|
||||
|
||||
let imports = {
|
||||
// Create a cache for resolved globs.
|
||||
let glob_resolver = Arc::new(Mutex::new(GlobResolver::default()));
|
||||
|
||||
// Collect and resolve the imports for each file.
|
||||
let result = Arc::new(Mutex::new(Vec::new()));
|
||||
let inner_result = Arc::clone(&result);
|
||||
let db = db.clone();
|
||||
|
||||
rayon::scope(move |scope| {
|
||||
for resolved_file in paths {
|
||||
let Ok(resolved_file) = resolved_file else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let path = resolved_file.path();
|
||||
let package = path
|
||||
.parent()
|
||||
.and_then(|parent| package_roots.get(parent))
|
||||
.and_then(Clone::clone);
|
||||
|
||||
// Resolve the per-file settings.
|
||||
let settings = resolver.resolve(path);
|
||||
let string_imports = settings.analyze.detect_string_imports;
|
||||
let include_dependencies = settings.analyze.include_dependencies.get(path).cloned();
|
||||
|
||||
// Skip excluded files.
|
||||
if (settings.file_resolver.force_exclude || !resolved_file.is_root())
|
||||
&& match_exclusion(
|
||||
resolved_file.path(),
|
||||
resolved_file.file_name(),
|
||||
&settings.analyze.exclude,
|
||||
)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Ignore non-Python files.
|
||||
let source_type = match settings.analyze.extension.get(path) {
|
||||
None => match SourceType::from(&path) {
|
||||
SourceType::Python(source_type) => source_type,
|
||||
SourceType::Toml(_) => {
|
||||
debug!("Ignoring TOML file: {}", path.display());
|
||||
continue;
|
||||
}
|
||||
},
|
||||
Some(language) => PySourceType::from(language),
|
||||
};
|
||||
if matches!(source_type, PySourceType::Ipynb) {
|
||||
debug!("Ignoring Jupyter notebook: {}", path.display());
|
||||
continue;
|
||||
}
|
||||
|
||||
// Convert to system paths.
|
||||
let Ok(package) = package.map(SystemPathBuf::from_path_buf).transpose() else {
|
||||
warn!("Failed to convert package to system path");
|
||||
continue;
|
||||
};
|
||||
let Ok(path) = SystemPathBuf::from_path_buf(resolved_file.into_path()) else {
|
||||
warn!("Failed to convert path to system path");
|
||||
continue;
|
||||
};
|
||||
|
||||
let db = db.clone();
|
||||
let glob_resolver = glob_resolver.clone();
|
||||
let root = root.clone();
|
||||
let result = inner_result.clone();
|
||||
scope.spawn(move |_| {
|
||||
// Identify any imports via static analysis.
|
||||
let mut imports =
|
||||
ModuleImports::detect(&db, &path, package.as_deref(), string_imports)
|
||||
.unwrap_or_else(|err| {
|
||||
warn!("Failed to generate import map for {path}: {err}");
|
||||
ModuleImports::default()
|
||||
});
|
||||
|
||||
debug!("Discovered {} imports for {}", imports.len(), path);
|
||||
|
||||
// Append any imports that were statically defined in the configuration.
|
||||
if let Some((root, globs)) = include_dependencies {
|
||||
let mut glob_resolver = glob_resolver.lock().unwrap();
|
||||
imports.extend(glob_resolver.resolve(root, globs));
|
||||
}
|
||||
|
||||
// Convert the path (and imports) to be relative to the working directory.
|
||||
let path = path
|
||||
.strip_prefix(&root)
|
||||
.map(SystemPath::to_path_buf)
|
||||
.unwrap_or(path);
|
||||
let imports = imports.relative_to(&root);
|
||||
|
||||
result.lock().unwrap().push((path, imports));
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Collect the results.
|
||||
Arc::into_inner(result).unwrap().into_inner()?
|
||||
};
|
||||
|
||||
// Generate the import map.
|
||||
let import_map = match args.direction {
|
||||
Direction::Dependencies => ImportMap::dependencies(imports),
|
||||
Direction::Dependents => ImportMap::dependents(imports),
|
||||
};
|
||||
|
||||
// Print to JSON.
|
||||
writeln!(
|
||||
std::io::stdout(),
|
||||
"{}",
|
||||
serde_json::to_string_pretty(&import_map)?
|
||||
)?;
|
||||
|
||||
std::mem::forget(db);
|
||||
|
||||
Ok(ExitStatus::Success)
|
||||
}
|
||||
|
||||
/// A resolver for glob sets.
|
||||
#[derive(Default, Debug)]
|
||||
struct GlobResolver {
|
||||
cache: GlobCache,
|
||||
}
|
||||
|
||||
impl GlobResolver {
|
||||
/// Resolve a set of globs, anchored at a given root.
|
||||
fn resolve(&mut self, root: PathBuf, globs: Vec<String>) -> Vec<SystemPathBuf> {
|
||||
if let Some(cached) = self.cache.get(&root, &globs) {
|
||||
return cached.clone();
|
||||
}
|
||||
|
||||
let walker = match globwalk::GlobWalkerBuilder::from_patterns(&root, &globs)
|
||||
.file_type(globwalk::FileType::FILE)
|
||||
.build()
|
||||
{
|
||||
Ok(walker) => walker,
|
||||
Err(err) => {
|
||||
warn!("Failed to read glob walker: {err}");
|
||||
return Vec::new();
|
||||
}
|
||||
};
|
||||
|
||||
let mut paths = Vec::new();
|
||||
for entry in walker {
|
||||
let entry = match entry {
|
||||
Ok(entry) => entry,
|
||||
Err(err) => {
|
||||
warn!("Failed to read glob entry: {err}");
|
||||
continue;
|
||||
}
|
||||
};
|
||||
let path = match SystemPathBuf::from_path_buf(entry.into_path()) {
|
||||
Ok(path) => path,
|
||||
Err(err) => {
|
||||
warn!("Failed to convert path to system path: {}", err.display());
|
||||
continue;
|
||||
}
|
||||
};
|
||||
paths.push(path);
|
||||
}
|
||||
|
||||
self.cache.insert(root, globs, paths.clone());
|
||||
paths
|
||||
}
|
||||
}
|
||||
|
||||
/// A cache for resolved globs.
|
||||
#[derive(Default, Debug)]
|
||||
struct GlobCache(FxHashMap<PathBuf, FxHashMap<Vec<String>, Vec<SystemPathBuf>>>);
|
||||
|
||||
impl GlobCache {
|
||||
/// Insert a resolved glob.
|
||||
fn insert(&mut self, root: PathBuf, globs: Vec<String>, paths: Vec<SystemPathBuf>) {
|
||||
self.0.entry(root).or_default().insert(globs, paths);
|
||||
}
|
||||
|
||||
/// Get a resolved glob.
|
||||
fn get(&self, root: &Path, globs: &[String]) -> Option<&Vec<SystemPathBuf>> {
|
||||
self.0.get(root).and_then(|map| map.get(globs))
|
||||
}
|
||||
}
|
||||
@@ -11,25 +11,26 @@ use log::{debug, error, warn};
|
||||
use rayon::prelude::*;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_db::panic::catch_unwind;
|
||||
use ruff_linter::OldDiagnostic;
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_linter::message::Message;
|
||||
use ruff_linter::package::PackageRoot;
|
||||
use ruff_linter::registry::Rule;
|
||||
use ruff_linter::settings::types::UnsafeFixes;
|
||||
use ruff_linter::settings::{LinterSettings, flags};
|
||||
use ruff_linter::{IOError, fs, warn_user_once};
|
||||
use ruff_linter::settings::{flags, LinterSettings};
|
||||
use ruff_linter::{fs, warn_user_once, IOError};
|
||||
use ruff_python_ast::imports::ImportMap;
|
||||
use ruff_source_file::SourceFileBuilder;
|
||||
use ruff_text_size::TextRange;
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
use ruff_workspace::resolver::{
|
||||
PyprojectConfig, ResolvedFile, match_exclusion, python_files_in_path,
|
||||
match_exclusion, python_files_in_path, PyprojectConfig, ResolvedFile,
|
||||
};
|
||||
|
||||
use crate::args::ConfigArguments;
|
||||
use crate::cache::{Cache, PackageCacheMap, PackageCaches};
|
||||
use crate::diagnostics::Diagnostics;
|
||||
use crate::panic::catch_unwind;
|
||||
|
||||
/// Run the linter over a collection of files.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub(crate) fn check(
|
||||
files: &[PathBuf],
|
||||
pyproject_config: &PyprojectConfig,
|
||||
@@ -59,7 +60,7 @@ pub(crate) fn check(
|
||||
);
|
||||
|
||||
// Load the caches.
|
||||
let caches = if cache.is_enabled() {
|
||||
let caches = if bool::from(cache) {
|
||||
Some(PackageCacheMap::init(&package_roots, &resolver))
|
||||
} else {
|
||||
None
|
||||
@@ -87,9 +88,7 @@ pub(crate) fn check(
|
||||
return None;
|
||||
}
|
||||
|
||||
let cache_root = package
|
||||
.map(PackageRoot::path)
|
||||
.unwrap_or_else(|| path.parent().unwrap_or(path));
|
||||
let cache_root = package.unwrap_or_else(|| path.parent().unwrap_or(path));
|
||||
let cache = caches.get(cache_root);
|
||||
|
||||
lint_path(
|
||||
@@ -131,9 +130,11 @@ pub(crate) fn check(
|
||||
|
||||
Diagnostics::new(
|
||||
vec![Message::from_diagnostic(
|
||||
OldDiagnostic::new(IOError { message }, TextRange::default(), &dummy),
|
||||
None,
|
||||
Diagnostic::new(IOError { message }, TextRange::default()),
|
||||
dummy,
|
||||
TextSize::default(),
|
||||
)],
|
||||
ImportMap::default(),
|
||||
FxHashMap::default(),
|
||||
)
|
||||
} else {
|
||||
@@ -172,16 +173,17 @@ pub(crate) fn check(
|
||||
caches.persist()?;
|
||||
|
||||
let duration = start.elapsed();
|
||||
debug!("Checked {checked_files:?} files in: {duration:?}");
|
||||
debug!("Checked {:?} files in: {:?}", checked_files, duration);
|
||||
|
||||
Ok(all_diagnostics)
|
||||
}
|
||||
|
||||
/// Wraps [`lint_path`](crate::diagnostics::lint_path) in a [`catch_unwind`](std::panic::catch_unwind) and emits
|
||||
/// a diagnostic if the linting the file panics.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn lint_path(
|
||||
path: &Path,
|
||||
package: Option<PackageRoot<'_>>,
|
||||
package: Option<&Path>,
|
||||
settings: &LinterSettings,
|
||||
cache: Option<&Cache>,
|
||||
noqa: flags::Noqa,
|
||||
@@ -227,9 +229,9 @@ mod test {
|
||||
use ruff_linter::message::{Emitter, EmitterContext, TextEmitter};
|
||||
use ruff_linter::registry::Rule;
|
||||
use ruff_linter::settings::types::UnsafeFixes;
|
||||
use ruff_linter::settings::{LinterSettings, flags};
|
||||
use ruff_workspace::Settings;
|
||||
use ruff_linter::settings::{flags, LinterSettings};
|
||||
use ruff_workspace::resolver::{PyprojectConfig, PyprojectDiscoveryStrategy};
|
||||
use ruff_workspace::Settings;
|
||||
|
||||
use crate::args::ConfigArguments;
|
||||
|
||||
@@ -250,7 +252,6 @@ mod test {
|
||||
for file in [&pyproject_toml, &python_file, ¬ebook] {
|
||||
fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.truncate(true)
|
||||
.write(true)
|
||||
.mode(0o000)
|
||||
.open(file)?;
|
||||
@@ -268,7 +269,8 @@ mod test {
|
||||
|
||||
// Run
|
||||
let diagnostics = check(
|
||||
&[tempdir.path().to_path_buf()],
|
||||
// Notebooks are not included by default
|
||||
&[tempdir.path().to_path_buf(), notebook],
|
||||
&pyproject_config,
|
||||
&ConfigArguments::default(),
|
||||
flags::Cache::Disabled,
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use ruff_linter::package::PackageRoot;
|
||||
|
||||
use ruff_linter::packaging;
|
||||
use ruff_linter::settings::flags;
|
||||
use ruff_workspace::resolver::{PyprojectConfig, Resolver, match_exclusion, python_file_at_path};
|
||||
use ruff_workspace::resolver::{match_exclusion, python_file_at_path, PyprojectConfig, Resolver};
|
||||
|
||||
use crate::args::ConfigArguments;
|
||||
use crate::diagnostics::{Diagnostics, lint_stdin};
|
||||
use crate::diagnostics::{lint_stdin, Diagnostics};
|
||||
use crate::stdin::{parrot_stdin, read_from_stdin};
|
||||
|
||||
/// Run the linter over a single file, read from `stdin`.
|
||||
@@ -42,7 +42,6 @@ pub(crate) fn check_stdin(
|
||||
let stdin = read_from_stdin()?;
|
||||
let package_root = filename.and_then(Path::parent).and_then(|path| {
|
||||
packaging::detect_package_root(path, &resolver.base_settings().linter.namespace_packages)
|
||||
.map(PackageRoot::root)
|
||||
});
|
||||
let mut diagnostics = lint_stdin(
|
||||
filename,
|
||||
|
||||
@@ -1,156 +0,0 @@
|
||||
use clap::builder::{PossibleValue, TypedValueParser, ValueParserFactory};
|
||||
use itertools::Itertools;
|
||||
use std::str::FromStr;
|
||||
|
||||
use ruff_options_metadata::{OptionField, OptionSet, OptionsMetadata, Visit};
|
||||
use ruff_workspace::options::Options;
|
||||
|
||||
#[derive(Default)]
|
||||
struct CollectOptionsVisitor {
|
||||
values: Vec<(String, String)>,
|
||||
parents: Vec<String>,
|
||||
}
|
||||
|
||||
impl IntoIterator for CollectOptionsVisitor {
|
||||
type Item = (String, String);
|
||||
type IntoIter = std::vec::IntoIter<Self::Item>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.values.into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl Visit for CollectOptionsVisitor {
|
||||
fn record_set(&mut self, name: &str, group: OptionSet) {
|
||||
let fully_qualified_name = self
|
||||
.parents
|
||||
.iter()
|
||||
.map(String::as_str)
|
||||
.chain(std::iter::once(name))
|
||||
.collect::<Vec<_>>()
|
||||
.join(".");
|
||||
|
||||
// Only add the set to completion list if it has it's own documentation.
|
||||
self.values.push((
|
||||
fully_qualified_name,
|
||||
group.documentation().unwrap_or("").to_owned(),
|
||||
));
|
||||
|
||||
self.parents.push(name.to_owned());
|
||||
group.record(self);
|
||||
self.parents.pop();
|
||||
}
|
||||
|
||||
fn record_field(&mut self, name: &str, field: OptionField) {
|
||||
let fqn = self
|
||||
.parents
|
||||
.iter()
|
||||
.map(String::as_str)
|
||||
.chain(std::iter::once(name))
|
||||
.collect::<Vec<_>>()
|
||||
.join(".");
|
||||
|
||||
self.values.push((fqn, field.doc.to_owned()));
|
||||
}
|
||||
}
|
||||
|
||||
/// Opaque type used solely to enable tab completions
|
||||
/// for `ruff option [OPTION]` command.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct OptionString(String);
|
||||
|
||||
impl From<String> for OptionString {
|
||||
fn from(s: String) -> Self {
|
||||
OptionString(s)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<OptionString> for String {
|
||||
fn from(value: OptionString) -> Self {
|
||||
value.0
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for OptionString {
|
||||
fn from(s: &str) -> Self {
|
||||
OptionString(s.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Deref for OptionString {
|
||||
type Target = str;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for OptionString {
|
||||
type Err = ();
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
Options::metadata()
|
||||
.has(s)
|
||||
.then(|| OptionString(s.to_owned()))
|
||||
.ok_or(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct OptionStringParser;
|
||||
|
||||
impl ValueParserFactory for OptionString {
|
||||
type Parser = OptionStringParser;
|
||||
|
||||
fn value_parser() -> Self::Parser {
|
||||
OptionStringParser
|
||||
}
|
||||
}
|
||||
|
||||
impl TypedValueParser for OptionStringParser {
|
||||
type Value = OptionString;
|
||||
|
||||
fn parse_ref(
|
||||
&self,
|
||||
cmd: &clap::Command,
|
||||
arg: Option<&clap::Arg>,
|
||||
value: &std::ffi::OsStr,
|
||||
) -> Result<Self::Value, clap::Error> {
|
||||
let value = value
|
||||
.to_str()
|
||||
.ok_or_else(|| clap::Error::new(clap::error::ErrorKind::InvalidUtf8))?;
|
||||
|
||||
value.parse().map_err(|()| {
|
||||
let mut error = clap::Error::new(clap::error::ErrorKind::ValueValidation).with_cmd(cmd);
|
||||
if let Some(arg) = arg {
|
||||
error.insert(
|
||||
clap::error::ContextKind::InvalidArg,
|
||||
clap::error::ContextValue::String(arg.to_string()),
|
||||
);
|
||||
}
|
||||
error.insert(
|
||||
clap::error::ContextKind::InvalidValue,
|
||||
clap::error::ContextValue::String(value.to_string()),
|
||||
);
|
||||
error
|
||||
})
|
||||
}
|
||||
|
||||
fn possible_values(&self) -> Option<Box<dyn Iterator<Item = PossibleValue> + '_>> {
|
||||
let mut visitor = CollectOptionsVisitor::default();
|
||||
Options::metadata().record(&mut visitor);
|
||||
|
||||
Some(Box::new(visitor.into_iter().map(|(name, doc)| {
|
||||
let first_paragraph = doc
|
||||
.lines()
|
||||
.take_while(|line| !line.trim_end().is_empty())
|
||||
// Replace double quotes with single quotes,to avoid clap's lack of escaping
|
||||
// when creating zsh completions. This has no security implications, as it only
|
||||
// affects the help string, which is never executed
|
||||
.map(|s| s.replace('"', "'"))
|
||||
.join(" ");
|
||||
|
||||
PossibleValue::new(name).help(first_paragraph)
|
||||
})))
|
||||
}
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
pub(crate) mod config;
|
||||
@@ -1,38 +1,19 @@
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{anyhow, Result};
|
||||
|
||||
use crate::args::HelpFormat;
|
||||
|
||||
use ruff_options_metadata::OptionsMetadata;
|
||||
use ruff_workspace::options::Options;
|
||||
use ruff_workspace::options_base::OptionsMetadata;
|
||||
|
||||
#[expect(clippy::print_stdout)]
|
||||
pub(crate) fn config(key: Option<&str>, format: HelpFormat) -> Result<()> {
|
||||
#[allow(clippy::print_stdout)]
|
||||
pub(crate) fn config(key: Option<&str>) -> Result<()> {
|
||||
match key {
|
||||
None => {
|
||||
let metadata = Options::metadata();
|
||||
match format {
|
||||
HelpFormat::Text => {
|
||||
println!("{metadata}");
|
||||
}
|
||||
|
||||
HelpFormat::Json => {
|
||||
println!("{}", &serde_json::to_string_pretty(&metadata)?);
|
||||
}
|
||||
}
|
||||
}
|
||||
None => print!("{}", Options::metadata()),
|
||||
Some(key) => match Options::metadata().find(key) {
|
||||
None => {
|
||||
return Err(anyhow!("Unknown option: {key}"));
|
||||
}
|
||||
Some(entry) => match format {
|
||||
HelpFormat::Text => {
|
||||
print!("{entry}");
|
||||
}
|
||||
|
||||
HelpFormat::Json => {
|
||||
println!("{}", &serde_json::to_string_pretty(&entry)?);
|
||||
}
|
||||
},
|
||||
Some(entry) => {
|
||||
print!("{entry}");
|
||||
}
|
||||
},
|
||||
}
|
||||
Ok(())
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::fmt::{Display, Formatter};
|
||||
use std::fs::File;
|
||||
use std::io;
|
||||
use std::io::{Write, stderr, stdout};
|
||||
use std::io::{stderr, stdout, Write};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::time::Instant;
|
||||
|
||||
@@ -11,31 +11,29 @@ use itertools::Itertools;
|
||||
use log::{error, warn};
|
||||
use rayon::iter::Either::{Left, Right};
|
||||
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
|
||||
use ruff_python_parser::ParseError;
|
||||
use rustc_hash::FxHashSet;
|
||||
use thiserror::Error;
|
||||
use tracing::debug;
|
||||
|
||||
use ruff_db::panic::{PanicError, catch_unwind};
|
||||
use ruff_diagnostics::SourceMap;
|
||||
use ruff_linter::fs;
|
||||
use ruff_linter::logging::{DisplayParseError, LogLevel};
|
||||
use ruff_linter::package::PackageRoot;
|
||||
use ruff_linter::registry::Rule;
|
||||
use ruff_linter::rules::flake8_quotes::settings::Quote;
|
||||
use ruff_linter::source_kind::{SourceError, SourceKind};
|
||||
use ruff_linter::warn_user_once;
|
||||
use ruff_python_ast::{PySourceType, SourceType};
|
||||
use ruff_python_formatter::{FormatModuleError, QuoteStyle, format_module_source, format_range};
|
||||
use ruff_python_formatter::{format_module_source, format_range, FormatModuleError, QuoteStyle};
|
||||
use ruff_source_file::LineIndex;
|
||||
use ruff_text_size::{TextLen, TextRange, TextSize};
|
||||
use ruff_workspace::resolver::{match_exclusion, python_files_in_path, ResolvedFile, Resolver};
|
||||
use ruff_workspace::FormatterSettings;
|
||||
use ruff_workspace::resolver::{ResolvedFile, Resolver, match_exclusion, python_files_in_path};
|
||||
|
||||
use crate::args::{ConfigArguments, FormatArguments, FormatRange};
|
||||
use crate::cache::{Cache, FileCacheKey, PackageCacheMap, PackageCaches};
|
||||
use crate::panic::{catch_unwind, PanicError};
|
||||
use crate::resolve::resolve;
|
||||
use crate::{ExitStatus, resolve_default_files};
|
||||
use crate::{resolve_default_files, ExitStatus};
|
||||
|
||||
#[derive(Debug, Copy, Clone, is_macro::Is)]
|
||||
pub(crate) enum FormatMode {
|
||||
@@ -63,8 +61,13 @@ impl FormatMode {
|
||||
pub(crate) fn format(
|
||||
cli: FormatArguments,
|
||||
config_arguments: &ConfigArguments,
|
||||
log_level: LogLevel,
|
||||
) -> Result<ExitStatus> {
|
||||
let pyproject_config = resolve(config_arguments, cli.stdin_filename.as_deref())?;
|
||||
let pyproject_config = resolve(
|
||||
cli.isolated,
|
||||
config_arguments,
|
||||
cli.stdin_filename.as_deref(),
|
||||
)?;
|
||||
let mode = FormatMode::from_cli(&cli);
|
||||
let files = resolve_default_files(cli.files, false);
|
||||
let (paths, resolver) = python_files_in_path(&files, &pyproject_config, config_arguments)?;
|
||||
@@ -138,9 +141,7 @@ pub(crate) fn format(
|
||||
.parent()
|
||||
.and_then(|parent| package_roots.get(parent).copied())
|
||||
.flatten();
|
||||
let cache_root = package
|
||||
.map(PackageRoot::path)
|
||||
.unwrap_or_else(|| path.parent().unwrap_or(path));
|
||||
let cache_root = package.unwrap_or_else(|| path.parent().unwrap_or(path));
|
||||
let cache = caches.get(cache_root);
|
||||
|
||||
Some(
|
||||
@@ -160,7 +161,7 @@ pub(crate) fn format(
|
||||
}),
|
||||
Err(error) => Err(FormatCommandError::Panic(
|
||||
Some(resolved_file.path().to_path_buf()),
|
||||
Box::new(error),
|
||||
error,
|
||||
)),
|
||||
},
|
||||
)
|
||||
@@ -180,7 +181,6 @@ pub(crate) fn format(
|
||||
duration
|
||||
);
|
||||
|
||||
// Store the caches.
|
||||
caches.persist()?;
|
||||
|
||||
// Report on any errors.
|
||||
@@ -202,7 +202,7 @@ pub(crate) fn format(
|
||||
}
|
||||
|
||||
// Report on the formatting changes.
|
||||
if config_arguments.log_level >= LogLevel::Default {
|
||||
if log_level >= LogLevel::Default {
|
||||
if mode.is_diff() {
|
||||
// Allow piping the diff to e.g. a file by writing the summary to stderr
|
||||
results.write_summary(&mut stderr().lock())?;
|
||||
@@ -214,11 +214,7 @@ pub(crate) fn format(
|
||||
match mode {
|
||||
FormatMode::Write => {
|
||||
if errors.is_empty() {
|
||||
if cli.exit_non_zero_on_format && results.any_formatted() {
|
||||
Ok(ExitStatus::Failure)
|
||||
} else {
|
||||
Ok(ExitStatus::Success)
|
||||
}
|
||||
Ok(ExitStatus::Success)
|
||||
} else {
|
||||
Ok(ExitStatus::Error)
|
||||
}
|
||||
@@ -238,7 +234,7 @@ pub(crate) fn format(
|
||||
}
|
||||
|
||||
/// Format the file at the given [`Path`].
|
||||
#[tracing::instrument(level = "debug", skip_all, fields(path = %path.display()))]
|
||||
#[tracing::instrument(level="debug", skip_all, fields(path = %path.display()))]
|
||||
pub(crate) fn format_path(
|
||||
path: &Path,
|
||||
settings: &FormatterSettings,
|
||||
@@ -346,7 +342,7 @@ pub(crate) fn format_source(
|
||||
) -> Result<FormattedSource, FormatCommandError> {
|
||||
match &source_kind {
|
||||
SourceKind::Python(unformatted) => {
|
||||
let options = settings.to_format_options(source_type, unformatted, path);
|
||||
let options = settings.to_format_options(source_type, unformatted);
|
||||
|
||||
let formatted = if let Some(range) = range {
|
||||
let line_index = LineIndex::from_source_text(unformatted);
|
||||
@@ -362,7 +358,7 @@ pub(crate) fn format_source(
|
||||
})
|
||||
} else {
|
||||
// Using `Printed::into_code` requires adding `ruff_formatter` as a direct dependency, and I suspect that Rust can optimize the closure away regardless.
|
||||
#[expect(clippy::redundant_closure_for_method_calls)]
|
||||
#[allow(clippy::redundant_closure_for_method_calls)]
|
||||
format_module_source(unformatted, options).map(|formatted| formatted.into_code())
|
||||
};
|
||||
|
||||
@@ -396,7 +392,7 @@ pub(crate) fn format_source(
|
||||
));
|
||||
}
|
||||
|
||||
let options = settings.to_format_options(source_type, notebook.source_code(), path);
|
||||
let options = settings.to_format_options(source_type, notebook.source_code());
|
||||
|
||||
let mut output: Option<String> = None;
|
||||
let mut last: Option<TextSize> = None;
|
||||
@@ -411,12 +407,8 @@ pub(crate) fn format_source(
|
||||
let formatted =
|
||||
format_module_source(unformatted, options.clone()).map_err(|err| {
|
||||
if let FormatModuleError::ParseError(err) = err {
|
||||
// Offset the error by the start of the cell
|
||||
DisplayParseError::from_source_kind(
|
||||
ParseError {
|
||||
error: err.error,
|
||||
location: err.location.checked_add(*start).unwrap(),
|
||||
},
|
||||
err,
|
||||
path.map(Path::to_path_buf),
|
||||
source_kind,
|
||||
)
|
||||
@@ -540,7 +532,7 @@ impl<'a> FormatResults<'a> {
|
||||
})
|
||||
.sorted_unstable_by_key(|(path, _, _)| *path)
|
||||
{
|
||||
write!(f, "{}", unformatted.diff(formatted, Some(path)).unwrap())?;
|
||||
unformatted.diff(formatted, Some(path), f)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -635,7 +627,7 @@ impl<'a> FormatResults<'a> {
|
||||
pub(crate) enum FormatCommandError {
|
||||
Ignore(#[from] ignore::Error),
|
||||
Parse(#[from] DisplayParseError),
|
||||
Panic(Option<PathBuf>, Box<PanicError>),
|
||||
Panic(Option<PathBuf>, PanicError),
|
||||
Read(Option<PathBuf>, SourceError),
|
||||
Format(Option<PathBuf>, FormatModuleError),
|
||||
Write(Option<PathBuf>, SourceError),
|
||||
@@ -797,6 +789,8 @@ pub(super) fn warn_incompatible_formatter_settings(resolver: &Resolver) {
|
||||
let mut incompatible_rules = FxHashSet::default();
|
||||
for setting in resolver.settings() {
|
||||
for rule in [
|
||||
// The formatter might collapse implicit string concatenation on a single line.
|
||||
Rule::SingleLineImplicitStringConcatenation,
|
||||
// Flags missing trailing commas when all arguments are on its own line:
|
||||
// ```python
|
||||
// def args(
|
||||
@@ -805,8 +799,6 @@ pub(super) fn warn_incompatible_formatter_settings(resolver: &Resolver) {
|
||||
// pass
|
||||
// ```
|
||||
Rule::MissingTrailingComma,
|
||||
// The formatter always removes blank lines before the docstring.
|
||||
Rule::IncorrectBlankLineBeforeClass,
|
||||
] {
|
||||
if setting.linter.rules.enabled(rule) {
|
||||
incompatible_rules.insert(rule);
|
||||
@@ -820,16 +812,7 @@ pub(super) fn warn_incompatible_formatter_settings(resolver: &Resolver) {
|
||||
.map(|rule| format!("`{}`", rule.noqa_code()))
|
||||
.collect();
|
||||
rule_names.sort();
|
||||
if let [rule] = rule_names.as_slice() {
|
||||
warn_user_once!(
|
||||
"The following rule may cause conflicts when used with the formatter: {rule}. To avoid unexpected behavior, we recommend disabling this rule, either by removing it from the `lint.select` or `lint.extend-select` configuration, or adding it to the `lint.ignore` configuration."
|
||||
);
|
||||
} else {
|
||||
warn_user_once!(
|
||||
"The following rules may cause conflicts when used with the formatter: {}. To avoid unexpected behavior, we recommend disabling these rules, either by removing them from the `lint.select` or `lint.extend-select` configuration, or adding them to the `lint.ignore` configuration.",
|
||||
rule_names.join(", ")
|
||||
);
|
||||
}
|
||||
warn_user_once!("The following rules may cause conflicts when used with the formatter: {}. To avoid unexpected behavior, we recommend disabling these rules, either by removing them from the `select` or `extend-select` configuration, or adding them to the `ignore` configuration.", rule_names.join(", "));
|
||||
}
|
||||
|
||||
// Next, validate settings-specific incompatibilities.
|
||||
@@ -838,33 +821,14 @@ pub(super) fn warn_incompatible_formatter_settings(resolver: &Resolver) {
|
||||
if setting.linter.rules.enabled(Rule::TabIndentation)
|
||||
&& setting.formatter.indent_style.is_tab()
|
||||
{
|
||||
warn_user_once!(
|
||||
"The `format.indent-style=\"tab\"` option is incompatible with `W191`, which lints against all uses of tabs. We recommend disabling these rules when using the formatter, which enforces a consistent indentation style. Alternatively, set the `format.indent-style` option to `\"space\"`."
|
||||
);
|
||||
}
|
||||
|
||||
if !setting
|
||||
.linter
|
||||
.rules
|
||||
.enabled(Rule::SingleLineImplicitStringConcatenation)
|
||||
&& setting
|
||||
.linter
|
||||
.rules
|
||||
.enabled(Rule::MultiLineImplicitStringConcatenation)
|
||||
&& !setting.linter.flake8_implicit_str_concat.allow_multiline
|
||||
{
|
||||
warn_user_once!(
|
||||
"The `lint.flake8-implicit-str-concat.allow-multiline = false` option is incompatible with the formatter unless `ISC001` is enabled. We recommend enabling `ISC001` or setting `allow-multiline=true`."
|
||||
);
|
||||
warn_user_once!("The `format.indent-style=\"tab\"` option is incompatible with `W191`, which lints against all uses of tabs. We recommend disabling these rules when using the formatter, which enforces a consistent indentation style. Alternatively, set the `format.indent-style` option to `\"space\"`.");
|
||||
}
|
||||
|
||||
// Validate all rules that rely on tab styles.
|
||||
if setting.linter.rules.enabled(Rule::DocstringTabIndentation)
|
||||
if setting.linter.rules.enabled(Rule::IndentWithSpaces)
|
||||
&& setting.formatter.indent_style.is_tab()
|
||||
{
|
||||
warn_user_once!(
|
||||
"The `format.indent-style=\"tab\"` option is incompatible with `D206`, with requires space-based indentation. We recommend disabling these rules when using the formatter, which enforces a consistent indentation style. Alternatively, set the `format.indent-style` option to `\"space\"`."
|
||||
);
|
||||
warn_user_once!("The `format.indent-style=\"tab\"` option is incompatible with `D206`, with requires space-based indentation. We recommend disabling these rules when using the formatter, which enforces a consistent indentation style. Alternatively, set the `format.indent-style` option to `\"space\"`.");
|
||||
}
|
||||
|
||||
// Validate all rules that rely on custom indent widths.
|
||||
@@ -873,9 +837,7 @@ pub(super) fn warn_incompatible_formatter_settings(resolver: &Resolver) {
|
||||
Rule::IndentationWithInvalidMultipleComment,
|
||||
]) && setting.formatter.indent_width.value() != 4
|
||||
{
|
||||
warn_user_once!(
|
||||
"The `format.indent-width` option with a value other than 4 is incompatible with `E111` and `E114`. We recommend disabling these rules when using the formatter, which enforces a consistent indentation width. Alternatively, set the `format.indent-width` option to `4`."
|
||||
);
|
||||
warn_user_once!("The `format.indent-width` option with a value other than 4 is incompatible with `E111` and `E114`. We recommend disabling these rules when using the formatter, which enforces a consistent indentation width. Alternatively, set the `format.indent-width` option to `4`.");
|
||||
}
|
||||
|
||||
// Validate all rules that rely on quote styles.
|
||||
@@ -889,14 +851,10 @@ pub(super) fn warn_incompatible_formatter_settings(resolver: &Resolver) {
|
||||
setting.formatter.quote_style,
|
||||
) {
|
||||
(Quote::Double, QuoteStyle::Single) => {
|
||||
warn_user_once!(
|
||||
"The `flake8-quotes.inline-quotes=\"double\"` option is incompatible with the formatter's `format.quote-style=\"single\"`. We recommend disabling `Q000` and `Q003` when using the formatter, which enforces a consistent quote style. Alternatively, set both options to either `\"single\"` or `\"double\"`."
|
||||
);
|
||||
warn_user_once!("The `flake8-quotes.inline-quotes=\"double\"` option is incompatible with the formatter's `format.quote-style=\"single\"`. We recommend disabling `Q000` and `Q003` when using the formatter, which enforces a consistent quote style. Alternatively, set both options to either `\"single\"` or `\"double\"`.");
|
||||
}
|
||||
(Quote::Single, QuoteStyle::Double) => {
|
||||
warn_user_once!(
|
||||
"The `flake8-quotes.inline-quotes=\"single\"` option is incompatible with the formatter's `format.quote-style=\"double\"`. We recommend disabling `Q000` and `Q003` when using the formatter, which enforces a consistent quote style. Alternatively, set both options to either `\"single\"` or `\"double\"`."
|
||||
);
|
||||
warn_user_once!("The `flake8-quotes.inline-quotes=\"single\"` option is incompatible with the formatter's `format.quote-style=\"double\"`. We recommend disabling `Q000` and `Q003` when using the formatter, which enforces a consistent quote style. Alternatively, set both options to either `\"single\"` or `\"double\"`.");
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
@@ -904,26 +862,14 @@ pub(super) fn warn_incompatible_formatter_settings(resolver: &Resolver) {
|
||||
|
||||
if setting.linter.rules.enabled(Rule::BadQuotesMultilineString)
|
||||
&& setting.linter.flake8_quotes.multiline_quotes == Quote::Single
|
||||
&& matches!(
|
||||
setting.formatter.quote_style,
|
||||
QuoteStyle::Single | QuoteStyle::Double
|
||||
)
|
||||
{
|
||||
warn_user_once!(
|
||||
"The `flake8-quotes.multiline-quotes=\"single\"` option is incompatible with the formatter. We recommend disabling `Q001` when using the formatter, which enforces double quotes for multiline strings. Alternatively, set the `flake8-quotes.multiline-quotes` option to `\"double\"`.`"
|
||||
);
|
||||
warn_user_once!("The `flake8-quotes.multiline-quotes=\"single\"` option is incompatible with the formatter. We recommend disabling `Q001` when using the formatter, which enforces double quotes for multiline strings. Alternatively, set the `flake8-quotes.multiline-quotes` option to `\"double\"`.`");
|
||||
}
|
||||
|
||||
if setting.linter.rules.enabled(Rule::BadQuotesDocstring)
|
||||
&& setting.linter.flake8_quotes.docstring_quotes == Quote::Single
|
||||
&& matches!(
|
||||
setting.formatter.quote_style,
|
||||
QuoteStyle::Single | QuoteStyle::Double
|
||||
)
|
||||
{
|
||||
warn_user_once!(
|
||||
"The `flake8-quotes.docstring-quotes=\"single\"` option is incompatible with the formatter. We recommend disabling `Q002` when using the formatter, which enforces double quotes for docstrings. Alternatively, set the `flake8-quotes.docstring-quotes` option to `\"double\"`.`"
|
||||
);
|
||||
warn_user_once!("The `flake8-quotes.multiline-quotes=\"single\"` option is incompatible with the formatter. We recommend disabling `Q002` when using the formatter, which enforces double quotes for docstrings. Alternatively, set the `flake8-quotes.docstring-quotes` option to `\"double\"`.`");
|
||||
}
|
||||
|
||||
// Validate all isort settings.
|
||||
@@ -931,16 +877,12 @@ pub(super) fn warn_incompatible_formatter_settings(resolver: &Resolver) {
|
||||
// The formatter removes empty lines if the value is larger than 2 but always inserts a empty line after imports.
|
||||
// Two empty lines are okay because `isort` only uses this setting for top-level imports (not in nested blocks).
|
||||
if !matches!(setting.linter.isort.lines_after_imports, 1 | 2 | -1) {
|
||||
warn_user_once!(
|
||||
"The isort option `isort.lines-after-imports` with a value other than `-1`, `1` or `2` is incompatible with the formatter. To avoid unexpected behavior, we recommend setting the option to one of: `2`, `1`, or `-1` (default)."
|
||||
);
|
||||
warn_user_once!("The isort option `isort.lines-after-imports` with a value other than `-1`, `1` or `2` is incompatible with the formatter. To avoid unexpected behavior, we recommend setting the option to one of: `2`, `1`, or `-1` (default).");
|
||||
}
|
||||
|
||||
// Values larger than two get reduced to one line by the formatter if the import is in a nested block.
|
||||
if setting.linter.isort.lines_between_types > 1 {
|
||||
warn_user_once!(
|
||||
"The isort option `isort.lines-between-types` with a value greater than 1 is incompatible with the formatter. To avoid unexpected behavior, we recommend setting the option to one of: `1` or `0` (default)."
|
||||
);
|
||||
warn_user_once!("The isort option `isort.lines-between-types` with a value greater than 1 is incompatible with the formatter. To avoid unexpected behavior, we recommend setting the option to one of: `1` or `0` (default).");
|
||||
}
|
||||
|
||||
// isort inserts a trailing comma which the formatter preserves, but only if `skip-magic-trailing-comma` isn't false.
|
||||
@@ -949,15 +891,11 @@ pub(super) fn warn_incompatible_formatter_settings(resolver: &Resolver) {
|
||||
&& !setting.linter.isort.force_single_line
|
||||
{
|
||||
if setting.linter.isort.force_wrap_aliases {
|
||||
warn_user_once!(
|
||||
"The isort option `isort.force-wrap-aliases` is incompatible with the formatter `format.skip-magic-trailing-comma=true` option. To avoid unexpected behavior, we recommend either setting `isort.force-wrap-aliases=false` or `format.skip-magic-trailing-comma=false`."
|
||||
);
|
||||
warn_user_once!("The isort option `isort.force-wrap-aliases` is incompatible with the formatter `format.skip-magic-trailing-comma=true` option. To avoid unexpected behavior, we recommend either setting `isort.force-wrap-aliases=false` or `format.skip-magic-trailing-comma=false`.");
|
||||
}
|
||||
|
||||
if setting.linter.isort.split_on_trailing_comma {
|
||||
warn_user_once!(
|
||||
"The isort option `isort.split-on-trailing-comma` is incompatible with the formatter `format.skip-magic-trailing-comma=true` option. To avoid unexpected behavior, we recommend either setting `isort.split-on-trailing-comma=false` or `format.skip-magic-trailing-comma=false`."
|
||||
);
|
||||
warn_user_once!("The isort option `isort.split-on-trailing-comma` is incompatible with the formatter `format.skip-magic-trailing-comma=true` option. To avoid unexpected behavior, we recommend either setting `isort.split-on-trailing-comma=false` or `format.skip-magic-trailing-comma=false`.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,24 +6,28 @@ use log::error;
|
||||
|
||||
use ruff_linter::source_kind::SourceKind;
|
||||
use ruff_python_ast::{PySourceType, SourceType};
|
||||
use ruff_workspace::resolver::{match_exclusion, python_file_at_path, Resolver};
|
||||
use ruff_workspace::FormatterSettings;
|
||||
use ruff_workspace::resolver::{Resolver, match_exclusion, python_file_at_path};
|
||||
|
||||
use crate::ExitStatus;
|
||||
use crate::args::{ConfigArguments, FormatArguments, FormatRange};
|
||||
use crate::commands::format::{
|
||||
FormatCommandError, FormatMode, FormatResult, FormattedSource, format_source,
|
||||
warn_incompatible_formatter_settings,
|
||||
format_source, warn_incompatible_formatter_settings, FormatCommandError, FormatMode,
|
||||
FormatResult, FormattedSource,
|
||||
};
|
||||
use crate::resolve::resolve;
|
||||
use crate::stdin::{parrot_stdin, read_from_stdin};
|
||||
use crate::ExitStatus;
|
||||
|
||||
/// Run the formatter over a single file, read from `stdin`.
|
||||
pub(crate) fn format_stdin(
|
||||
cli: &FormatArguments,
|
||||
config_arguments: &ConfigArguments,
|
||||
) -> Result<ExitStatus> {
|
||||
let pyproject_config = resolve(config_arguments, cli.stdin_filename.as_deref())?;
|
||||
let pyproject_config = resolve(
|
||||
cli.isolated,
|
||||
config_arguments,
|
||||
cli.stdin_filename.as_deref(),
|
||||
)?;
|
||||
|
||||
let mut resolver = Resolver::new(&pyproject_config);
|
||||
warn_incompatible_formatter_settings(&resolver);
|
||||
@@ -118,13 +122,9 @@ fn format_source_code(
|
||||
}
|
||||
FormatMode::Check => {}
|
||||
FormatMode::Diff => {
|
||||
use std::io::Write;
|
||||
write!(
|
||||
&mut stdout().lock(),
|
||||
"{}",
|
||||
source_kind.diff(formatted, path).unwrap()
|
||||
)
|
||||
.map_err(|err| FormatCommandError::Diff(path.map(Path::to_path_buf), err))?;
|
||||
source_kind
|
||||
.diff(formatted, path, &mut stdout().lock())
|
||||
.map_err(|err| FormatCommandError::Diff(path.map(Path::to_path_buf), err))?;
|
||||
}
|
||||
},
|
||||
FormattedSource::Unchanged => {
|
||||
|
||||
@@ -1,7 +1,4 @@
|
||||
pub(crate) mod completions;
|
||||
|
||||
pub(crate) mod add_noqa;
|
||||
pub(crate) mod analyze_graph;
|
||||
pub(crate) mod check;
|
||||
pub(crate) mod check_stdin;
|
||||
pub(crate) mod clean;
|
||||
@@ -10,7 +7,6 @@ pub(crate) mod format;
|
||||
pub(crate) mod format_stdin;
|
||||
pub(crate) mod linter;
|
||||
pub(crate) mod rule;
|
||||
pub(crate) mod server;
|
||||
pub(crate) mod show_files;
|
||||
pub(crate) mod show_settings;
|
||||
pub(crate) mod version;
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
use std::fmt::Write as _;
|
||||
use std::io::{self, BufWriter, Write};
|
||||
|
||||
use anyhow::Result;
|
||||
@@ -6,7 +5,7 @@ use serde::ser::SerializeSeq;
|
||||
use serde::{Serialize, Serializer};
|
||||
use strum::IntoEnumIterator;
|
||||
|
||||
use ruff_linter::FixAvailability;
|
||||
use ruff_diagnostics::FixAvailability;
|
||||
use ruff_linter::registry::{Linter, Rule, RuleNamespace};
|
||||
|
||||
use crate::args::HelpFormat;
|
||||
@@ -19,7 +18,7 @@ struct Explanation<'a> {
|
||||
summary: &'a str,
|
||||
message_formats: &'a [&'a str],
|
||||
fix: String,
|
||||
#[expect(clippy::struct_field_names)]
|
||||
#[allow(clippy::struct_field_names)]
|
||||
explanation: Option<&'a str>,
|
||||
preview: bool,
|
||||
}
|
||||
@@ -30,30 +29,26 @@ impl<'a> Explanation<'a> {
|
||||
let (linter, _) = Linter::parse_code(&code).unwrap();
|
||||
let fix = rule.fixable().to_string();
|
||||
Self {
|
||||
name: rule.name().as_str(),
|
||||
name: rule.as_ref(),
|
||||
code,
|
||||
linter: linter.name(),
|
||||
summary: rule.message_formats()[0],
|
||||
message_formats: rule.message_formats(),
|
||||
fix,
|
||||
explanation: rule.explanation(),
|
||||
preview: rule.is_preview(),
|
||||
preview: rule.is_preview() || rule.is_nursery(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn format_rule_text(rule: Rule) -> String {
|
||||
let mut output = String::new();
|
||||
let _ = write!(&mut output, "# {} ({})", rule.name(), rule.noqa_code());
|
||||
output.push_str(&format!("# {} ({})", rule.as_ref(), rule.noqa_code()));
|
||||
output.push('\n');
|
||||
output.push('\n');
|
||||
|
||||
let (linter, _) = Linter::parse_code(&rule.noqa_code().to_string()).unwrap();
|
||||
let _ = write!(
|
||||
&mut output,
|
||||
"Derived from the **{}** linter.",
|
||||
linter.name()
|
||||
);
|
||||
output.push_str(&format!("Derived from the **{}** linter.", linter.name()));
|
||||
output.push('\n');
|
||||
output.push('\n');
|
||||
|
||||
@@ -67,7 +62,7 @@ fn format_rule_text(rule: Rule) -> String {
|
||||
output.push('\n');
|
||||
}
|
||||
|
||||
if rule.is_preview() {
|
||||
if rule.is_preview() || rule.is_nursery() {
|
||||
output.push_str(
|
||||
r"This rule is in preview and is not stable. The `--preview` flag is required for use.",
|
||||
);
|
||||
@@ -81,7 +76,7 @@ fn format_rule_text(rule: Rule) -> String {
|
||||
output.push_str("Message formats:");
|
||||
for format in rule.message_formats() {
|
||||
output.push('\n');
|
||||
let _ = write!(&mut output, "* {format}");
|
||||
output.push_str(&format!("* {format}"));
|
||||
}
|
||||
}
|
||||
output
|
||||
@@ -97,7 +92,7 @@ pub(crate) fn rule(rule: Rule, format: HelpFormat) -> Result<()> {
|
||||
HelpFormat::Json => {
|
||||
serde_json::to_writer_pretty(stdout, &Explanation::from_rule(&rule))?;
|
||||
}
|
||||
}
|
||||
};
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
use crate::ExitStatus;
|
||||
use anyhow::Result;
|
||||
|
||||
pub(crate) fn run_server(preview: Option<bool>) -> Result<ExitStatus> {
|
||||
ruff_server::run(preview)?;
|
||||
Ok(ExitStatus::Success)
|
||||
}
|
||||
@@ -5,7 +5,7 @@ use anyhow::Result;
|
||||
use itertools::Itertools;
|
||||
|
||||
use ruff_linter::warn_user_once;
|
||||
use ruff_workspace::resolver::{PyprojectConfig, ResolvedFile, python_files_in_path};
|
||||
use ruff_workspace::resolver::{python_files_in_path, PyprojectConfig, ResolvedFile};
|
||||
|
||||
use crate::args::ConfigArguments;
|
||||
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
use std::io::Write;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use anyhow::{Result, bail};
|
||||
use anyhow::{bail, Result};
|
||||
use itertools::Itertools;
|
||||
|
||||
use ruff_workspace::resolver::{PyprojectConfig, ResolvedFile, python_files_in_path};
|
||||
use ruff_workspace::resolver::{python_files_in_path, PyprojectConfig, ResolvedFile};
|
||||
|
||||
use crate::args::ConfigArguments;
|
||||
|
||||
|
||||
@@ -16,6 +16,6 @@ pub(crate) fn version(output_format: HelpFormat) -> Result<()> {
|
||||
HelpFormat::Json => {
|
||||
serde_json::to_writer_pretty(stdout, &version_info)?;
|
||||
}
|
||||
}
|
||||
};
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -3,29 +3,29 @@
|
||||
use std::borrow::Cow;
|
||||
use std::fs::File;
|
||||
use std::io;
|
||||
use std::io::Write;
|
||||
use std::ops::{Add, AddAssign};
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use colored::Colorize;
|
||||
use log::{debug, warn};
|
||||
use log::{debug, error, warn};
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_linter::OldDiagnostic;
|
||||
use ruff_linter::codes::Rule;
|
||||
use ruff_linter::linter::{FixTable, FixerResult, LinterResult, ParseSource, lint_fix, lint_only};
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_linter::linter::{lint_fix, lint_only, FixTable, FixerResult, LinterResult, ParseSource};
|
||||
use ruff_linter::logging::DisplayParseError;
|
||||
use ruff_linter::message::Message;
|
||||
use ruff_linter::package::PackageRoot;
|
||||
use ruff_linter::pyproject_toml::lint_pyproject_toml;
|
||||
use ruff_linter::registry::AsRule;
|
||||
use ruff_linter::settings::types::UnsafeFixes;
|
||||
use ruff_linter::settings::{LinterSettings, flags};
|
||||
use ruff_linter::settings::{flags, LinterSettings};
|
||||
use ruff_linter::source_kind::{SourceError, SourceKind};
|
||||
use ruff_linter::{IOError, fs};
|
||||
use ruff_linter::{fs, IOError, SyntaxError};
|
||||
use ruff_notebook::{Notebook, NotebookError, NotebookIndex};
|
||||
use ruff_python_ast::imports::ImportMap;
|
||||
use ruff_python_ast::{PySourceType, SourceType, TomlSourceType};
|
||||
use ruff_source_file::SourceFileBuilder;
|
||||
use ruff_text_size::TextRange;
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
use ruff_workspace::Settings;
|
||||
|
||||
use crate::cache::{Cache, FileCacheKey, LintCacheData};
|
||||
@@ -34,17 +34,20 @@ use crate::cache::{Cache, FileCacheKey, LintCacheData};
|
||||
pub(crate) struct Diagnostics {
|
||||
pub(crate) messages: Vec<Message>,
|
||||
pub(crate) fixed: FixMap,
|
||||
pub(crate) imports: ImportMap,
|
||||
pub(crate) notebook_indexes: FxHashMap<String, NotebookIndex>,
|
||||
}
|
||||
|
||||
impl Diagnostics {
|
||||
pub(crate) fn new(
|
||||
messages: Vec<Message>,
|
||||
imports: ImportMap,
|
||||
notebook_indexes: FxHashMap<String, NotebookIndex>,
|
||||
) -> Self {
|
||||
Self {
|
||||
messages,
|
||||
fixed: FixMap::default(),
|
||||
imports,
|
||||
notebook_indexes,
|
||||
}
|
||||
}
|
||||
@@ -55,57 +58,58 @@ impl Diagnostics {
|
||||
path: Option<&Path>,
|
||||
settings: &LinterSettings,
|
||||
) -> Self {
|
||||
match err {
|
||||
let diagnostic = match err {
|
||||
// IO errors.
|
||||
SourceError::Io(_)
|
||||
| SourceError::Notebook(NotebookError::Io(_) | NotebookError::Json(_)) => {
|
||||
if settings.rules.enabled(Rule::IOError) {
|
||||
let name = path.map_or_else(|| "-".into(), Path::to_string_lossy);
|
||||
let source_file = SourceFileBuilder::new(name, "").finish();
|
||||
Self::new(
|
||||
vec![Message::from_diagnostic(
|
||||
OldDiagnostic::new(
|
||||
IOError {
|
||||
message: err.to_string(),
|
||||
},
|
||||
TextRange::default(),
|
||||
&source_file,
|
||||
),
|
||||
None,
|
||||
)],
|
||||
FxHashMap::default(),
|
||||
)
|
||||
} else {
|
||||
match path {
|
||||
Some(path) => {
|
||||
warn!(
|
||||
"{}{}{} {err}",
|
||||
"Failed to lint ".bold(),
|
||||
fs::relativize_path(path).bold(),
|
||||
":".bold()
|
||||
);
|
||||
}
|
||||
None => {
|
||||
warn!("{}{} {err}", "Failed to lint".bold(), ":".bold());
|
||||
}
|
||||
}
|
||||
|
||||
Self::default()
|
||||
}
|
||||
Diagnostic::new(
|
||||
IOError {
|
||||
message: err.to_string(),
|
||||
},
|
||||
TextRange::default(),
|
||||
)
|
||||
}
|
||||
// Syntax errors.
|
||||
SourceError::Notebook(
|
||||
NotebookError::InvalidJson(_)
|
||||
| NotebookError::InvalidSchema(_)
|
||||
| NotebookError::InvalidFormat(_),
|
||||
) => {
|
||||
let name = path.map_or_else(|| "-".into(), Path::to_string_lossy);
|
||||
let dummy = SourceFileBuilder::new(name, "").finish();
|
||||
Self::new(
|
||||
vec![Message::syntax_error(err, TextRange::default(), dummy)],
|
||||
FxHashMap::default(),
|
||||
)
|
||||
) => Diagnostic::new(
|
||||
SyntaxError {
|
||||
message: err.to_string(),
|
||||
},
|
||||
TextRange::default(),
|
||||
),
|
||||
};
|
||||
|
||||
if settings.rules.enabled(diagnostic.kind.rule()) {
|
||||
let name = path.map_or_else(|| "-".into(), Path::to_string_lossy);
|
||||
let dummy = SourceFileBuilder::new(name, "").finish();
|
||||
Self::new(
|
||||
vec![Message::from_diagnostic(
|
||||
diagnostic,
|
||||
dummy,
|
||||
TextSize::default(),
|
||||
)],
|
||||
ImportMap::default(),
|
||||
FxHashMap::default(),
|
||||
)
|
||||
} else {
|
||||
match path {
|
||||
Some(path) => {
|
||||
warn!(
|
||||
"{}{}{} {err}",
|
||||
"Failed to lint ".bold(),
|
||||
fs::relativize_path(path).bold(),
|
||||
":".bold()
|
||||
);
|
||||
}
|
||||
None => {
|
||||
warn!("{}{} {err}", "Failed to lint".bold(), ":".bold());
|
||||
}
|
||||
}
|
||||
|
||||
Self::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -122,6 +126,7 @@ impl Add for Diagnostics {
|
||||
impl AddAssign for Diagnostics {
|
||||
fn add_assign(&mut self, other: Self) {
|
||||
self.messages.extend(other.messages);
|
||||
self.imports.extend(other.imports);
|
||||
self.fixed += other.fixed;
|
||||
self.notebook_indexes.extend(other.notebook_indexes);
|
||||
}
|
||||
@@ -165,9 +170,9 @@ impl AddAssign for FixMap {
|
||||
continue;
|
||||
}
|
||||
let fixed_in_file = self.0.entry(filename).or_default();
|
||||
for (rule, name, count) in fixed.iter() {
|
||||
for (rule, count) in fixed {
|
||||
if count > 0 {
|
||||
*fixed_in_file.entry(rule).or_default(name) += count;
|
||||
*fixed_in_file.entry(rule).or_default() += count;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -177,7 +182,7 @@ impl AddAssign for FixMap {
|
||||
/// Lint the source code at the given `Path`.
|
||||
pub(crate) fn lint_path(
|
||||
path: &Path,
|
||||
package: Option<PackageRoot<'_>>,
|
||||
package: Option<&Path>,
|
||||
settings: &LinterSettings,
|
||||
cache: Option<&Cache>,
|
||||
noqa: flags::Noqa,
|
||||
@@ -186,7 +191,7 @@ pub(crate) fn lint_path(
|
||||
) -> Result<Diagnostics> {
|
||||
// Check the cache.
|
||||
let caching = match cache {
|
||||
Some(cache) if noqa.is_enabled() => {
|
||||
Some(cache) if noqa.into() => {
|
||||
let relative_path = cache
|
||||
.relative_path(path)
|
||||
.expect("wrong package cache for file");
|
||||
@@ -235,7 +240,7 @@ pub(crate) fn lint_path(
|
||||
};
|
||||
let source_file =
|
||||
SourceFileBuilder::new(path.to_string_lossy(), contents).finish();
|
||||
lint_pyproject_toml(&source_file, settings)
|
||||
lint_pyproject_toml(source_file, settings)
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
@@ -259,56 +264,48 @@ pub(crate) fn lint_path(
|
||||
};
|
||||
|
||||
// Lint the file.
|
||||
let (result, transformed, fixed) =
|
||||
if matches!(fix_mode, flags::FixMode::Apply | flags::FixMode::Diff) {
|
||||
if let Ok(FixerResult {
|
||||
result,
|
||||
transformed,
|
||||
fixed,
|
||||
}) = lint_fix(
|
||||
path,
|
||||
package,
|
||||
noqa,
|
||||
unsafe_fixes,
|
||||
settings,
|
||||
&source_kind,
|
||||
source_type,
|
||||
) {
|
||||
if !fixed.is_empty() {
|
||||
match fix_mode {
|
||||
flags::FixMode::Apply => transformed.write(&mut File::create(path)?)?,
|
||||
flags::FixMode::Diff => {
|
||||
write!(
|
||||
&mut io::stdout().lock(),
|
||||
"{}",
|
||||
source_kind.diff(&transformed, Some(path)).unwrap()
|
||||
)?;
|
||||
}
|
||||
flags::FixMode::Generate => {}
|
||||
let (
|
||||
LinterResult {
|
||||
data: (messages, imports),
|
||||
error: parse_error,
|
||||
},
|
||||
transformed,
|
||||
fixed,
|
||||
) = if matches!(fix_mode, flags::FixMode::Apply | flags::FixMode::Diff) {
|
||||
if let Ok(FixerResult {
|
||||
result,
|
||||
transformed,
|
||||
fixed,
|
||||
}) = lint_fix(
|
||||
path,
|
||||
package,
|
||||
noqa,
|
||||
unsafe_fixes,
|
||||
settings,
|
||||
&source_kind,
|
||||
source_type,
|
||||
) {
|
||||
if !fixed.is_empty() {
|
||||
match fix_mode {
|
||||
flags::FixMode::Apply => transformed.write(&mut File::create(path)?)?,
|
||||
flags::FixMode::Diff => {
|
||||
source_kind.diff(
|
||||
transformed.as_ref(),
|
||||
Some(path),
|
||||
&mut io::stdout().lock(),
|
||||
)?;
|
||||
}
|
||||
flags::FixMode::Generate => {}
|
||||
}
|
||||
let transformed = if let Cow::Owned(transformed) = transformed {
|
||||
transformed
|
||||
} else {
|
||||
source_kind
|
||||
};
|
||||
(result, transformed, fixed)
|
||||
} else {
|
||||
// If we fail to fix, lint the original source code.
|
||||
let result = lint_only(
|
||||
path,
|
||||
package,
|
||||
settings,
|
||||
noqa,
|
||||
&source_kind,
|
||||
source_type,
|
||||
ParseSource::None,
|
||||
);
|
||||
let transformed = source_kind;
|
||||
let fixed = FixTable::default();
|
||||
(result, transformed, fixed)
|
||||
}
|
||||
let transformed = if let Cow::Owned(transformed) = transformed {
|
||||
transformed
|
||||
} else {
|
||||
source_kind
|
||||
};
|
||||
(result, transformed, fixed)
|
||||
} else {
|
||||
// If we fail to fix, lint the original source code.
|
||||
let result = lint_only(
|
||||
path,
|
||||
package,
|
||||
@@ -319,16 +316,29 @@ pub(crate) fn lint_path(
|
||||
ParseSource::None,
|
||||
);
|
||||
let transformed = source_kind;
|
||||
let fixed = FixTable::default();
|
||||
let fixed = FxHashMap::default();
|
||||
(result, transformed, fixed)
|
||||
};
|
||||
}
|
||||
} else {
|
||||
let result = lint_only(
|
||||
path,
|
||||
package,
|
||||
settings,
|
||||
noqa,
|
||||
&source_kind,
|
||||
source_type,
|
||||
ParseSource::None,
|
||||
);
|
||||
let transformed = source_kind;
|
||||
let fixed = FxHashMap::default();
|
||||
(result, transformed, fixed)
|
||||
};
|
||||
|
||||
let has_error = result.has_syntax_errors();
|
||||
let messages = result.messages;
|
||||
let imports = imports.unwrap_or_default();
|
||||
|
||||
if let Some((cache, relative_path, key)) = caching {
|
||||
// We don't cache parsing errors.
|
||||
if !has_error {
|
||||
if parse_error.is_none() {
|
||||
// `FixMode::Apply` and `FixMode::Diff` rely on side-effects (writing to disk,
|
||||
// and writing the diff to stdout, respectively). If a file has diagnostics, we
|
||||
// need to avoid reading from and writing to the cache in these modes.
|
||||
@@ -343,6 +353,7 @@ pub(crate) fn lint_path(
|
||||
&key,
|
||||
LintCacheData::from_messages(
|
||||
&messages,
|
||||
imports.clone(),
|
||||
transformed.as_ipy_notebook().map(Notebook::index).cloned(),
|
||||
),
|
||||
);
|
||||
@@ -350,6 +361,13 @@ pub(crate) fn lint_path(
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(error) = parse_error {
|
||||
error!(
|
||||
"{}",
|
||||
DisplayParseError::from_source_kind(error, Some(path.to_path_buf()), &transformed)
|
||||
);
|
||||
}
|
||||
|
||||
let notebook_indexes = if let SourceKind::IpyNotebook(notebook) = transformed {
|
||||
FxHashMap::from_iter([(path.to_string_lossy().to_string(), notebook.into_index())])
|
||||
} else {
|
||||
@@ -359,50 +377,28 @@ pub(crate) fn lint_path(
|
||||
Ok(Diagnostics {
|
||||
messages,
|
||||
fixed: FixMap::from_iter([(fs::relativize_path(path), fixed)]),
|
||||
imports,
|
||||
notebook_indexes,
|
||||
})
|
||||
}
|
||||
|
||||
/// Generate `Diagnostic`s from source code content derived from stdin.
|
||||
/// Generate `Diagnostic`s from source code content derived from
|
||||
/// stdin.
|
||||
pub(crate) fn lint_stdin(
|
||||
path: Option<&Path>,
|
||||
package: Option<PackageRoot<'_>>,
|
||||
package: Option<&Path>,
|
||||
contents: String,
|
||||
settings: &Settings,
|
||||
noqa: flags::Noqa,
|
||||
fix_mode: flags::FixMode,
|
||||
) -> Result<Diagnostics> {
|
||||
// TODO(charlie): Support `pyproject.toml`.
|
||||
let source_type = match path.and_then(|path| settings.linter.extension.get(path)) {
|
||||
None => match path.map(SourceType::from).unwrap_or_default() {
|
||||
SourceType::Python(source_type) => source_type,
|
||||
|
||||
SourceType::Toml(source_type) if source_type.is_pyproject() => {
|
||||
if !settings
|
||||
.linter
|
||||
.rules
|
||||
.iter_enabled()
|
||||
.any(|rule_code| rule_code.lint_source().is_pyproject_toml())
|
||||
{
|
||||
return Ok(Diagnostics::default());
|
||||
}
|
||||
|
||||
let path = path.unwrap();
|
||||
let source_file =
|
||||
SourceFileBuilder::new(path.to_string_lossy(), contents.clone()).finish();
|
||||
|
||||
match fix_mode {
|
||||
flags::FixMode::Diff | flags::FixMode::Generate => {}
|
||||
flags::FixMode::Apply => write!(&mut io::stdout().lock(), "{contents}")?,
|
||||
}
|
||||
|
||||
return Ok(Diagnostics {
|
||||
messages: lint_pyproject_toml(&source_file, &settings.linter),
|
||||
fixed: FixMap::from_iter([(fs::relativize_path(path), FixTable::default())]),
|
||||
notebook_indexes: FxHashMap::default(),
|
||||
});
|
||||
SourceType::Toml(_) => {
|
||||
return Ok(Diagnostics::default());
|
||||
}
|
||||
|
||||
SourceType::Toml(_) => return Ok(Diagnostics::default()),
|
||||
},
|
||||
Some(language) => PySourceType::from(language),
|
||||
};
|
||||
@@ -417,66 +413,48 @@ pub(crate) fn lint_stdin(
|
||||
};
|
||||
|
||||
// Lint the inputs.
|
||||
let (LinterResult { messages, .. }, transformed, fixed) =
|
||||
if matches!(fix_mode, flags::FixMode::Apply | flags::FixMode::Diff) {
|
||||
if let Ok(FixerResult {
|
||||
result,
|
||||
transformed,
|
||||
fixed,
|
||||
}) = lint_fix(
|
||||
path.unwrap_or_else(|| Path::new("-")),
|
||||
package,
|
||||
noqa,
|
||||
settings.unsafe_fixes,
|
||||
&settings.linter,
|
||||
&source_kind,
|
||||
source_type,
|
||||
) {
|
||||
match fix_mode {
|
||||
flags::FixMode::Apply => {
|
||||
// Write the contents to stdout, regardless of whether any errors were fixed.
|
||||
transformed.write(&mut io::stdout().lock())?;
|
||||
}
|
||||
flags::FixMode::Diff => {
|
||||
// But only write a diff if it's non-empty.
|
||||
if !fixed.is_empty() {
|
||||
write!(
|
||||
&mut io::stdout().lock(),
|
||||
"{}",
|
||||
source_kind.diff(&transformed, path).unwrap()
|
||||
)?;
|
||||
}
|
||||
}
|
||||
flags::FixMode::Generate => {}
|
||||
let (
|
||||
LinterResult {
|
||||
data: (messages, imports),
|
||||
error: parse_error,
|
||||
},
|
||||
transformed,
|
||||
fixed,
|
||||
) = if matches!(fix_mode, flags::FixMode::Apply | flags::FixMode::Diff) {
|
||||
if let Ok(FixerResult {
|
||||
result,
|
||||
transformed,
|
||||
fixed,
|
||||
}) = lint_fix(
|
||||
path.unwrap_or_else(|| Path::new("-")),
|
||||
package,
|
||||
noqa,
|
||||
settings.unsafe_fixes,
|
||||
&settings.linter,
|
||||
&source_kind,
|
||||
source_type,
|
||||
) {
|
||||
match fix_mode {
|
||||
flags::FixMode::Apply => {
|
||||
// Write the contents to stdout, regardless of whether any errors were fixed.
|
||||
transformed.write(&mut io::stdout().lock())?;
|
||||
}
|
||||
let transformed = if let Cow::Owned(transformed) = transformed {
|
||||
transformed
|
||||
} else {
|
||||
source_kind
|
||||
};
|
||||
(result, transformed, fixed)
|
||||
} else {
|
||||
// If we fail to fix, lint the original source code.
|
||||
let result = lint_only(
|
||||
path.unwrap_or_else(|| Path::new("-")),
|
||||
package,
|
||||
&settings.linter,
|
||||
noqa,
|
||||
&source_kind,
|
||||
source_type,
|
||||
ParseSource::None,
|
||||
);
|
||||
|
||||
// Write the contents to stdout anyway.
|
||||
if fix_mode.is_apply() {
|
||||
source_kind.write(&mut io::stdout().lock())?;
|
||||
flags::FixMode::Diff => {
|
||||
// But only write a diff if it's non-empty.
|
||||
if !fixed.is_empty() {
|
||||
source_kind.diff(transformed.as_ref(), path, &mut io::stdout().lock())?;
|
||||
}
|
||||
}
|
||||
|
||||
let transformed = source_kind;
|
||||
let fixed = FixTable::default();
|
||||
(result, transformed, fixed)
|
||||
flags::FixMode::Generate => {}
|
||||
}
|
||||
let transformed = if let Cow::Owned(transformed) = transformed {
|
||||
transformed
|
||||
} else {
|
||||
source_kind
|
||||
};
|
||||
(result, transformed, fixed)
|
||||
} else {
|
||||
// If we fail to fix, lint the original source code.
|
||||
let result = lint_only(
|
||||
path.unwrap_or_else(|| Path::new("-")),
|
||||
package,
|
||||
@@ -486,10 +464,39 @@ pub(crate) fn lint_stdin(
|
||||
source_type,
|
||||
ParseSource::None,
|
||||
);
|
||||
|
||||
// Write the contents to stdout anyway.
|
||||
if fix_mode.is_apply() {
|
||||
source_kind.write(&mut io::stdout().lock())?;
|
||||
}
|
||||
|
||||
let transformed = source_kind;
|
||||
let fixed = FixTable::default();
|
||||
let fixed = FxHashMap::default();
|
||||
(result, transformed, fixed)
|
||||
};
|
||||
}
|
||||
} else {
|
||||
let result = lint_only(
|
||||
path.unwrap_or_else(|| Path::new("-")),
|
||||
package,
|
||||
&settings.linter,
|
||||
noqa,
|
||||
&source_kind,
|
||||
source_type,
|
||||
ParseSource::None,
|
||||
);
|
||||
let transformed = source_kind;
|
||||
let fixed = FxHashMap::default();
|
||||
(result, transformed, fixed)
|
||||
};
|
||||
|
||||
let imports = imports.unwrap_or_default();
|
||||
|
||||
if let Some(error) = parse_error {
|
||||
error!(
|
||||
"{}",
|
||||
DisplayParseError::from_source_kind(error, path.map(Path::to_path_buf), &transformed)
|
||||
);
|
||||
}
|
||||
|
||||
let notebook_indexes = if let SourceKind::IpyNotebook(notebook) = transformed {
|
||||
FxHashMap::from_iter([(
|
||||
@@ -506,6 +513,7 @@ pub(crate) fn lint_stdin(
|
||||
fs::relativize_path(path.unwrap_or_else(|| Path::new("-"))),
|
||||
fixed,
|
||||
)]),
|
||||
imports,
|
||||
notebook_indexes,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#![allow(clippy::print_stdout)]
|
||||
|
||||
use std::fs::File;
|
||||
use std::io::{self, BufWriter, Write, stdout};
|
||||
use std::io::{self, stdout, BufWriter, Write};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::ExitCode;
|
||||
use std::sync::mpsc::channel;
|
||||
@@ -10,24 +10,22 @@ use anyhow::Result;
|
||||
use clap::CommandFactory;
|
||||
use colored::Colorize;
|
||||
use log::warn;
|
||||
use notify::{RecursiveMode, Watcher, recommended_watcher};
|
||||
use notify::{recommended_watcher, RecursiveMode, Watcher};
|
||||
|
||||
use args::{GlobalConfigArgs, ServerCommand};
|
||||
use ruff_linter::logging::{LogLevel, set_up_logging};
|
||||
use ruff_linter::logging::{set_up_logging, LogLevel};
|
||||
use ruff_linter::settings::flags::FixMode;
|
||||
use ruff_linter::settings::types::OutputFormat;
|
||||
use ruff_linter::settings::types::SerializationFormat;
|
||||
use ruff_linter::{fs, warn_user, warn_user_once};
|
||||
use ruff_workspace::Settings;
|
||||
|
||||
use crate::args::{
|
||||
AnalyzeCommand, AnalyzeGraphCommand, Args, CheckCommand, Command, FormatCommand,
|
||||
};
|
||||
use crate::args::{Args, CheckCommand, Command, FormatCommand, HelpFormat};
|
||||
use crate::printer::{Flags as PrinterFlags, Printer};
|
||||
|
||||
pub mod args;
|
||||
mod cache;
|
||||
mod commands;
|
||||
mod diagnostics;
|
||||
mod panic;
|
||||
mod printer;
|
||||
pub mod resolve;
|
||||
mod stdin;
|
||||
@@ -62,19 +60,11 @@ enum ChangeKind {
|
||||
/// Return the [`ChangeKind`] based on the list of modified file paths.
|
||||
///
|
||||
/// Returns `None` if no relevant changes were detected.
|
||||
fn change_detected(event: ¬ify::Event) -> Option<ChangeKind> {
|
||||
fn change_detected(paths: &[PathBuf]) -> Option<ChangeKind> {
|
||||
// If any `.toml` files were modified, return `ChangeKind::Configuration`. Otherwise, return
|
||||
// `ChangeKind::SourceFile` if any `.py`, `.pyi`, `.pyw`, or `.ipynb` files were modified.
|
||||
let mut source_file = false;
|
||||
|
||||
if event.kind.is_access() || event.kind.is_other() {
|
||||
return None;
|
||||
}
|
||||
|
||||
if event.need_rescan() {
|
||||
return Some(ChangeKind::Configuration);
|
||||
}
|
||||
for path in &event.paths {
|
||||
for path in paths {
|
||||
if let Some(suffix) = path.extension() {
|
||||
match suffix.to_str() {
|
||||
Some("toml") => {
|
||||
@@ -111,7 +101,7 @@ fn is_stdin(files: &[PathBuf], stdin_filename: Option<&Path>) -> bool {
|
||||
file == Path::new("-")
|
||||
}
|
||||
|
||||
/// Returns the default set of files if none are provided, otherwise returns provided files.
|
||||
/// Returns the default set of files if none are provided, otherwise returns `None`.
|
||||
fn resolve_default_files(files: Vec<PathBuf>, is_stdin: bool) -> Vec<PathBuf> {
|
||||
if files.is_empty() {
|
||||
if is_stdin {
|
||||
@@ -124,16 +114,25 @@ fn resolve_default_files(files: Vec<PathBuf>, is_stdin: bool) -> Vec<PathBuf> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the actual value of the `format` desired from either `output_format`
|
||||
/// or `format`, and warn the user if they're using the deprecated form.
|
||||
fn resolve_help_output_format(output_format: HelpFormat, format: Option<HelpFormat>) -> HelpFormat {
|
||||
if format.is_some() {
|
||||
warn_user!("The `--format` argument is deprecated. Use `--output-format` instead.");
|
||||
}
|
||||
format.unwrap_or(output_format)
|
||||
}
|
||||
|
||||
pub fn run(
|
||||
Args {
|
||||
command,
|
||||
global_options,
|
||||
log_level_args,
|
||||
}: Args,
|
||||
) -> Result<ExitStatus> {
|
||||
{
|
||||
let default_panic_hook = std::panic::take_hook();
|
||||
std::panic::set_hook(Box::new(move |info| {
|
||||
#[expect(clippy::print_stderr)]
|
||||
#[allow(clippy::print_stderr)]
|
||||
{
|
||||
eprintln!(
|
||||
r#"
|
||||
@@ -152,11 +151,12 @@ pub fn run(
|
||||
}));
|
||||
}
|
||||
|
||||
// Don't set up logging for the server command, as it has its own logging setup
|
||||
// and setting the global logger can only be done once.
|
||||
if !matches!(command, Command::Server { .. }) {
|
||||
set_up_logging(global_options.log_level())?;
|
||||
}
|
||||
// Enabled ANSI colors on Windows 10.
|
||||
#[cfg(windows)]
|
||||
assert!(colored::control::set_virtual_terminal(true).is_ok());
|
||||
|
||||
let log_level = LogLevel::from(&log_level_args);
|
||||
set_up_logging(&log_level)?;
|
||||
|
||||
match command {
|
||||
Command::Version { output_format } => {
|
||||
@@ -166,8 +166,10 @@ pub fn run(
|
||||
Command::Rule {
|
||||
rule,
|
||||
all,
|
||||
output_format,
|
||||
format,
|
||||
mut output_format,
|
||||
} => {
|
||||
output_format = resolve_help_output_format(output_format, format);
|
||||
if all {
|
||||
commands::rule::rules(output_format)?;
|
||||
}
|
||||
@@ -176,68 +178,55 @@ pub fn run(
|
||||
}
|
||||
Ok(ExitStatus::Success)
|
||||
}
|
||||
Command::Config {
|
||||
option,
|
||||
output_format,
|
||||
} => {
|
||||
commands::config::config(option.as_deref(), output_format)?;
|
||||
Command::Config { option } => {
|
||||
commands::config::config(option.as_deref())?;
|
||||
Ok(ExitStatus::Success)
|
||||
}
|
||||
Command::Linter { output_format } => {
|
||||
Command::Linter {
|
||||
format,
|
||||
mut output_format,
|
||||
} => {
|
||||
output_format = resolve_help_output_format(output_format, format);
|
||||
commands::linter::linter(output_format)?;
|
||||
Ok(ExitStatus::Success)
|
||||
}
|
||||
Command::Clean => {
|
||||
commands::clean::clean(global_options.log_level())?;
|
||||
commands::clean::clean(log_level)?;
|
||||
Ok(ExitStatus::Success)
|
||||
}
|
||||
Command::GenerateShellCompletion { shell } => {
|
||||
shell.generate(&mut Args::command(), &mut stdout());
|
||||
Ok(ExitStatus::Success)
|
||||
}
|
||||
Command::Check(args) => check(args, global_options),
|
||||
Command::Format(args) => format(args, global_options),
|
||||
Command::Server(args) => server(args),
|
||||
Command::Analyze(AnalyzeCommand::Graph(args)) => analyze_graph(args, global_options),
|
||||
Command::Check(args) => check(args, log_level),
|
||||
Command::Format(args) => format(args, log_level),
|
||||
}
|
||||
}
|
||||
|
||||
fn format(args: FormatCommand, global_options: GlobalConfigArgs) -> Result<ExitStatus> {
|
||||
let (cli, config_arguments) = args.partition(global_options)?;
|
||||
fn format(args: FormatCommand, log_level: LogLevel) -> Result<ExitStatus> {
|
||||
let (cli, config_arguments) = args.partition()?;
|
||||
|
||||
if is_stdin(&cli.files, cli.stdin_filename.as_deref()) {
|
||||
commands::format_stdin::format_stdin(&cli, &config_arguments)
|
||||
} else {
|
||||
commands::format::format(cli, &config_arguments)
|
||||
commands::format::format(cli, &config_arguments, log_level)
|
||||
}
|
||||
}
|
||||
|
||||
fn analyze_graph(
|
||||
args: AnalyzeGraphCommand,
|
||||
global_options: GlobalConfigArgs,
|
||||
) -> Result<ExitStatus> {
|
||||
let (cli, config_arguments) = args.partition(global_options)?;
|
||||
|
||||
commands::analyze_graph::analyze_graph(cli, &config_arguments)
|
||||
}
|
||||
|
||||
fn server(args: ServerCommand) -> Result<ExitStatus> {
|
||||
commands::server::run_server(args.resolve_preview())
|
||||
}
|
||||
|
||||
pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<ExitStatus> {
|
||||
let (cli, config_arguments) = args.partition(global_options)?;
|
||||
pub fn check(args: CheckCommand, log_level: LogLevel) -> Result<ExitStatus> {
|
||||
let (cli, config_arguments) = args.partition()?;
|
||||
|
||||
// Construct the "default" settings. These are used when no `pyproject.toml`
|
||||
// files are present, or files are injected from outside of the hierarchy.
|
||||
let pyproject_config = resolve::resolve(&config_arguments, cli.stdin_filename.as_deref())?;
|
||||
let pyproject_config = resolve::resolve(
|
||||
cli.isolated,
|
||||
&config_arguments,
|
||||
cli.stdin_filename.as_deref(),
|
||||
)?;
|
||||
|
||||
let mut writer: Box<dyn Write> = match cli.output_file {
|
||||
Some(path) if !cli.watch => {
|
||||
colored::control::set_override(false);
|
||||
if let Some(parent) = path.parent() {
|
||||
std::fs::create_dir_all(parent)?;
|
||||
}
|
||||
let file = File::create(path)?;
|
||||
Box::new(BufWriter::new(file))
|
||||
}
|
||||
@@ -283,8 +272,8 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<Exi
|
||||
// - If `--fix` or `--fix-only` is set, apply applicable fixes to the filesystem (or
|
||||
// print them to stdout, if we're reading from stdin).
|
||||
// - If `--diff` or `--fix-only` are set, don't print any violations (only applicable fixes)
|
||||
// - By default, applicable fixes only include [`Applicability::Automatic`], but if
|
||||
// `--unsafe-fixes` is set, then [`Applicability::Suggested`] fixes are included.
|
||||
// - By default, applicable fixes only include [`Applicablility::Automatic`], but if
|
||||
// `--unsafe-fixes` is set, then [`Applicablility::Suggested`] fixes are included.
|
||||
|
||||
let fix_mode = if cli.diff {
|
||||
FixMode::Diff
|
||||
@@ -303,6 +292,13 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<Exi
|
||||
if show_fixes {
|
||||
printer_flags |= PrinterFlags::SHOW_FIX_SUMMARY;
|
||||
}
|
||||
if cli.ecosystem_ci {
|
||||
warn_user!(
|
||||
"The formatting of fixes emitted by this option is a work-in-progress, subject to \
|
||||
change at any time, and intended only for internal use."
|
||||
);
|
||||
printer_flags |= PrinterFlags::SHOW_FIX_DIFF;
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
if cache {
|
||||
@@ -317,9 +313,9 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<Exi
|
||||
}
|
||||
let modifications =
|
||||
commands::add_noqa::add_noqa(&files, &pyproject_config, &config_arguments)?;
|
||||
if modifications > 0 && config_arguments.log_level >= LogLevel::Default {
|
||||
if modifications > 0 && log_level >= LogLevel::Default {
|
||||
let s = if modifications == 1 { "" } else { "s" };
|
||||
#[expect(clippy::print_stderr)]
|
||||
#[allow(clippy::print_stderr)]
|
||||
{
|
||||
eprintln!("Added {modifications} noqa directive{s}.");
|
||||
}
|
||||
@@ -329,7 +325,7 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<Exi
|
||||
|
||||
let printer = Printer::new(
|
||||
output_format,
|
||||
config_arguments.log_level,
|
||||
log_level,
|
||||
fix_mode,
|
||||
unsafe_fixes,
|
||||
printer_flags,
|
||||
@@ -342,10 +338,10 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<Exi
|
||||
let preview = pyproject_config.settings.linter.preview.is_enabled();
|
||||
|
||||
if cli.watch {
|
||||
if output_format != OutputFormat::default() {
|
||||
if output_format != SerializationFormat::default(preview) {
|
||||
warn_user!(
|
||||
"`--output-format {}` is always used in watch mode.",
|
||||
OutputFormat::default()
|
||||
SerializationFormat::default(preview)
|
||||
);
|
||||
}
|
||||
|
||||
@@ -381,13 +377,16 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<Exi
|
||||
loop {
|
||||
match rx.recv() {
|
||||
Ok(event) => {
|
||||
let Some(change_kind) = change_detected(&event?) else {
|
||||
let Some(change_kind) = change_detected(&event?.paths) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
if matches!(change_kind, ChangeKind::Configuration) {
|
||||
pyproject_config =
|
||||
resolve::resolve(&config_arguments, cli.stdin_filename.as_deref())?;
|
||||
pyproject_config = resolve::resolve(
|
||||
cli.isolated,
|
||||
&config_arguments,
|
||||
cli.stdin_filename.as_deref(),
|
||||
)?;
|
||||
}
|
||||
Printer::clear_screen()?;
|
||||
printer.write_to_user("File change detected...\n");
|
||||
@@ -481,119 +480,79 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<Exi
|
||||
mod test_file_change_detector {
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::{ChangeKind, change_detected};
|
||||
use crate::{change_detected, ChangeKind};
|
||||
|
||||
#[test]
|
||||
fn detect_correct_file_change() {
|
||||
assert_eq!(
|
||||
Some(ChangeKind::Configuration),
|
||||
change_detected(¬ify::Event {
|
||||
kind: notify::EventKind::Create(notify::event::CreateKind::File),
|
||||
paths: vec![
|
||||
PathBuf::from("tmp/pyproject.toml"),
|
||||
PathBuf::from("tmp/bin/ruff.rs"),
|
||||
],
|
||||
attrs: notify::event::EventAttributes::default(),
|
||||
}),
|
||||
change_detected(&[
|
||||
PathBuf::from("tmp/pyproject.toml"),
|
||||
PathBuf::from("tmp/bin/ruff.rs"),
|
||||
]),
|
||||
);
|
||||
assert_eq!(
|
||||
Some(ChangeKind::Configuration),
|
||||
change_detected(¬ify::Event {
|
||||
kind: notify::EventKind::Create(notify::event::CreateKind::File),
|
||||
paths: vec![
|
||||
PathBuf::from("pyproject.toml"),
|
||||
PathBuf::from("tmp/bin/ruff.rs"),
|
||||
],
|
||||
attrs: notify::event::EventAttributes::default(),
|
||||
}),
|
||||
change_detected(&[
|
||||
PathBuf::from("pyproject.toml"),
|
||||
PathBuf::from("tmp/bin/ruff.rs"),
|
||||
]),
|
||||
);
|
||||
assert_eq!(
|
||||
Some(ChangeKind::Configuration),
|
||||
change_detected(¬ify::Event {
|
||||
kind: notify::EventKind::Create(notify::event::CreateKind::File),
|
||||
paths: vec![
|
||||
PathBuf::from("tmp1/tmp2/tmp3/pyproject.toml"),
|
||||
PathBuf::from("tmp/bin/ruff.rs"),
|
||||
],
|
||||
attrs: notify::event::EventAttributes::default(),
|
||||
}),
|
||||
change_detected(&[
|
||||
PathBuf::from("tmp1/tmp2/tmp3/pyproject.toml"),
|
||||
PathBuf::from("tmp/bin/ruff.rs"),
|
||||
]),
|
||||
);
|
||||
assert_eq!(
|
||||
Some(ChangeKind::Configuration),
|
||||
change_detected(¬ify::Event {
|
||||
kind: notify::EventKind::Create(notify::event::CreateKind::File),
|
||||
paths: vec![
|
||||
PathBuf::from("tmp/ruff.toml"),
|
||||
PathBuf::from("tmp/bin/ruff.rs"),
|
||||
],
|
||||
attrs: notify::event::EventAttributes::default(),
|
||||
}),
|
||||
change_detected(&[
|
||||
PathBuf::from("tmp/ruff.toml"),
|
||||
PathBuf::from("tmp/bin/ruff.rs"),
|
||||
]),
|
||||
);
|
||||
assert_eq!(
|
||||
Some(ChangeKind::Configuration),
|
||||
change_detected(¬ify::Event {
|
||||
kind: notify::EventKind::Create(notify::event::CreateKind::File),
|
||||
paths: vec![
|
||||
PathBuf::from("tmp/.ruff.toml"),
|
||||
PathBuf::from("tmp/bin/ruff.rs"),
|
||||
],
|
||||
attrs: notify::event::EventAttributes::default(),
|
||||
}),
|
||||
change_detected(&[
|
||||
PathBuf::from("tmp/.ruff.toml"),
|
||||
PathBuf::from("tmp/bin/ruff.rs"),
|
||||
]),
|
||||
);
|
||||
assert_eq!(
|
||||
Some(ChangeKind::SourceFile),
|
||||
change_detected(¬ify::Event {
|
||||
kind: notify::EventKind::Create(notify::event::CreateKind::File),
|
||||
paths: vec![
|
||||
PathBuf::from("tmp/rule.py"),
|
||||
PathBuf::from("tmp/bin/ruff.rs"),
|
||||
],
|
||||
attrs: notify::event::EventAttributes::default(),
|
||||
}),
|
||||
change_detected(&[
|
||||
PathBuf::from("tmp/rule.py"),
|
||||
PathBuf::from("tmp/bin/ruff.rs"),
|
||||
]),
|
||||
);
|
||||
assert_eq!(
|
||||
Some(ChangeKind::SourceFile),
|
||||
change_detected(¬ify::Event {
|
||||
kind: notify::EventKind::Create(notify::event::CreateKind::File),
|
||||
paths: vec![
|
||||
PathBuf::from("tmp/rule.pyi"),
|
||||
PathBuf::from("tmp/bin/ruff.rs"),
|
||||
],
|
||||
attrs: notify::event::EventAttributes::default(),
|
||||
}),
|
||||
change_detected(&[
|
||||
PathBuf::from("tmp/rule.pyi"),
|
||||
PathBuf::from("tmp/bin/ruff.rs"),
|
||||
]),
|
||||
);
|
||||
assert_eq!(
|
||||
Some(ChangeKind::Configuration),
|
||||
change_detected(¬ify::Event {
|
||||
kind: notify::EventKind::Create(notify::event::CreateKind::File),
|
||||
paths: vec![
|
||||
PathBuf::from("pyproject.toml"),
|
||||
PathBuf::from("tmp/rule.py"),
|
||||
],
|
||||
attrs: notify::event::EventAttributes::default(),
|
||||
}),
|
||||
change_detected(&[
|
||||
PathBuf::from("pyproject.toml"),
|
||||
PathBuf::from("tmp/rule.py"),
|
||||
]),
|
||||
);
|
||||
assert_eq!(
|
||||
Some(ChangeKind::Configuration),
|
||||
change_detected(¬ify::Event {
|
||||
kind: notify::EventKind::Create(notify::event::CreateKind::File),
|
||||
paths: vec![
|
||||
PathBuf::from("tmp/rule.py"),
|
||||
PathBuf::from("pyproject.toml"),
|
||||
],
|
||||
attrs: notify::event::EventAttributes::default(),
|
||||
}),
|
||||
change_detected(&[
|
||||
PathBuf::from("tmp/rule.py"),
|
||||
PathBuf::from("pyproject.toml"),
|
||||
]),
|
||||
);
|
||||
assert_eq!(
|
||||
None,
|
||||
change_detected(¬ify::Event {
|
||||
kind: notify::EventKind::Create(notify::event::CreateKind::File),
|
||||
paths: vec![
|
||||
PathBuf::from("tmp/rule.js"),
|
||||
PathBuf::from("tmp/bin/ruff.rs"),
|
||||
],
|
||||
attrs: notify::event::EventAttributes::default(),
|
||||
}),
|
||||
change_detected(&[
|
||||
PathBuf::from("tmp/rule.js"),
|
||||
PathBuf::from("tmp/bin/ruff.rs"),
|
||||
]),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
use std::io::Write;
|
||||
use std::process::ExitCode;
|
||||
|
||||
use clap::Parser;
|
||||
use clap::{Parser, Subcommand};
|
||||
use colored::Colorize;
|
||||
|
||||
use ruff::args::Args;
|
||||
use ruff::{ExitStatus, run};
|
||||
use ruff::args::{Args, Command};
|
||||
use ruff::{run, ExitStatus};
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
#[global_allocator]
|
||||
@@ -14,8 +13,6 @@ static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
||||
#[cfg(all(
|
||||
not(target_os = "windows"),
|
||||
not(target_os = "openbsd"),
|
||||
not(target_os = "aix"),
|
||||
not(target_os = "android"),
|
||||
any(
|
||||
target_arch = "x86_64",
|
||||
target_arch = "aarch64",
|
||||
@@ -26,51 +23,54 @@ static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
||||
static GLOBAL: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
|
||||
|
||||
pub fn main() -> ExitCode {
|
||||
// Enabled ANSI colors on Windows 10.
|
||||
#[cfg(windows)]
|
||||
assert!(colored::control::set_virtual_terminal(true).is_ok());
|
||||
let args = wild::args_os();
|
||||
let mut args =
|
||||
argfile::expand_args_from(args, argfile::parse_fromfile, argfile::PREFIX).unwrap();
|
||||
|
||||
// support FORCE_COLOR env var
|
||||
if let Some(force_color) = std::env::var_os("FORCE_COLOR") {
|
||||
if !force_color.is_empty() {
|
||||
colored::control::set_override(true);
|
||||
// Clap doesn't support default subcommands but we want to run `check` by
|
||||
// default for convenience and backwards-compatibility, so we just
|
||||
// preprocess the arguments accordingly before passing them to Clap.
|
||||
if let Some(arg) = args.get(1) {
|
||||
if arg
|
||||
.to_str()
|
||||
.is_some_and(|arg| !Command::has_subcommand(rewrite_legacy_subcommand(arg)))
|
||||
&& arg != "-h"
|
||||
&& arg != "--help"
|
||||
&& arg != "-V"
|
||||
&& arg != "--version"
|
||||
&& arg != "help"
|
||||
{
|
||||
args.insert(1, "check".into());
|
||||
}
|
||||
}
|
||||
|
||||
let args = wild::args_os();
|
||||
let args = argfile::expand_args_from(args, argfile::parse_fromfile, argfile::PREFIX).unwrap();
|
||||
|
||||
let args = Args::parse_from(args);
|
||||
|
||||
match run(args) {
|
||||
Ok(code) => code.into(),
|
||||
Err(err) => {
|
||||
#[allow(clippy::print_stderr)]
|
||||
{
|
||||
// Exit "gracefully" on broken pipe errors.
|
||||
//
|
||||
// See: https://github.com/BurntSushi/ripgrep/blob/bf63fe8f258afc09bae6caa48f0ae35eaf115005/crates/core/main.rs#L47C1-L61C14
|
||||
for cause in err.chain() {
|
||||
if let Some(ioerr) = cause.downcast_ref::<std::io::Error>() {
|
||||
if ioerr.kind() == std::io::ErrorKind::BrokenPipe {
|
||||
return ExitCode::from(0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Use `writeln` instead of `eprintln` to avoid panicking when the stderr pipe is broken.
|
||||
let mut stderr = std::io::stderr().lock();
|
||||
|
||||
// This communicates that this isn't a linter error but ruff itself hard-errored for
|
||||
// some reason (e.g. failed to resolve the configuration)
|
||||
writeln!(stderr, "{}", "ruff failed".red().bold()).ok();
|
||||
eprintln!("{}", "ruff failed".red().bold());
|
||||
// Currently we generally only see one error, but e.g. with io errors when resolving
|
||||
// the configuration it is help to chain errors ("resolving configuration failed" ->
|
||||
// "failed to read file: subdir/pyproject.toml")
|
||||
for cause in err.chain() {
|
||||
writeln!(stderr, " {} {cause}", "Cause:".bold()).ok();
|
||||
eprintln!(" {} {cause}", "Cause:".bold());
|
||||
}
|
||||
}
|
||||
ExitStatus::Error.into()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn rewrite_legacy_subcommand(cmd: &str) -> &str {
|
||||
match cmd {
|
||||
"--explain" => "rule",
|
||||
"--clean" => "clean",
|
||||
"--generate-shell-completion" => "generate-shell-completion",
|
||||
cmd => cmd,
|
||||
}
|
||||
}
|
||||
|
||||
46
crates/ruff/src/panic.rs
Normal file
46
crates/ruff/src/panic.rs
Normal file
@@ -0,0 +1,46 @@
|
||||
#[derive(Default, Debug)]
|
||||
pub(crate) struct PanicError {
|
||||
pub(crate) info: String,
|
||||
pub(crate) backtrace: Option<std::backtrace::Backtrace>,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for PanicError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
writeln!(f, "{}", self.info)?;
|
||||
if let Some(backtrace) = &self.backtrace {
|
||||
writeln!(f, "Backtrace: {backtrace}")
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
thread_local! {
|
||||
static LAST_PANIC: std::cell::Cell<Option<PanicError>> = std::cell::Cell::new(None);
|
||||
}
|
||||
|
||||
/// [`catch_unwind`](std::panic::catch_unwind) wrapper that sets a custom [`set_hook`](std::panic::set_hook)
|
||||
/// to extract the backtrace. The original panic-hook gets restored before returning.
|
||||
pub(crate) fn catch_unwind<F, R>(f: F) -> Result<R, PanicError>
|
||||
where
|
||||
F: FnOnce() -> R + std::panic::UnwindSafe,
|
||||
{
|
||||
let prev = std::panic::take_hook();
|
||||
std::panic::set_hook(Box::new(|info| {
|
||||
let info = info.to_string();
|
||||
let backtrace = std::backtrace::Backtrace::force_capture();
|
||||
LAST_PANIC.with(|cell| {
|
||||
cell.set(Some(PanicError {
|
||||
info,
|
||||
backtrace: Some(backtrace),
|
||||
}));
|
||||
});
|
||||
}));
|
||||
|
||||
let result = std::panic::catch_unwind(f)
|
||||
.map_err(|_| LAST_PANIC.with(std::cell::Cell::take).unwrap_or_default());
|
||||
|
||||
std::panic::set_hook(prev);
|
||||
|
||||
result
|
||||
}
|
||||
@@ -1,25 +1,24 @@
|
||||
use std::cmp::Reverse;
|
||||
use std::fmt::Display;
|
||||
use std::hash::Hash;
|
||||
use std::io::Write;
|
||||
|
||||
use anyhow::Result;
|
||||
use bitflags::bitflags;
|
||||
use colored::Colorize;
|
||||
use itertools::{Itertools, iterate};
|
||||
use ruff_linter::codes::NoqaCode;
|
||||
use ruff_linter::linter::FixTable;
|
||||
use itertools::{iterate, Itertools};
|
||||
use serde::Serialize;
|
||||
|
||||
use ruff_linter::fs::relativize_path;
|
||||
use ruff_linter::logging::LogLevel;
|
||||
use ruff_linter::message::{
|
||||
AzureEmitter, Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter,
|
||||
JsonEmitter, JsonLinesEmitter, JunitEmitter, Message, PylintEmitter, RdjsonEmitter,
|
||||
SarifEmitter, TextEmitter,
|
||||
JsonEmitter, JsonLinesEmitter, JunitEmitter, PylintEmitter, SarifEmitter, TextEmitter,
|
||||
};
|
||||
use ruff_linter::notify_user;
|
||||
use ruff_linter::registry::{AsRule, Rule};
|
||||
use ruff_linter::settings::flags::{self};
|
||||
use ruff_linter::settings::types::{OutputFormat, UnsafeFixes};
|
||||
use ruff_linter::settings::types::{SerializationFormat, UnsafeFixes};
|
||||
|
||||
use crate::diagnostics::{Diagnostics, FixMap};
|
||||
|
||||
@@ -27,24 +26,47 @@ bitflags! {
|
||||
#[derive(Default, Debug, Copy, Clone)]
|
||||
pub(crate) struct Flags: u8 {
|
||||
/// Whether to show violations when emitting diagnostics.
|
||||
const SHOW_VIOLATIONS = 1 << 0;
|
||||
const SHOW_VIOLATIONS = 0b0000_0001;
|
||||
/// Whether to show a summary of the fixed violations when emitting diagnostics.
|
||||
const SHOW_FIX_SUMMARY = 1 << 1;
|
||||
const SHOW_FIX_SUMMARY = 0b0000_0100;
|
||||
/// Whether to show a diff of each fixed violation when emitting diagnostics.
|
||||
const SHOW_FIX_DIFF = 1 << 2;
|
||||
const SHOW_FIX_DIFF = 0b0000_1000;
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct ExpandedStatistics {
|
||||
code: Option<NoqaCode>,
|
||||
name: &'static str,
|
||||
struct ExpandedStatistics<'a> {
|
||||
code: SerializeRuleAsCode,
|
||||
message: &'a str,
|
||||
count: usize,
|
||||
fixable: bool,
|
||||
}
|
||||
|
||||
struct SerializeRuleAsCode(Rule);
|
||||
|
||||
impl Serialize for SerializeRuleAsCode {
|
||||
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
serializer.serialize_str(&self.0.noqa_code().to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for SerializeRuleAsCode {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.0.noqa_code())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Rule> for SerializeRuleAsCode {
|
||||
fn from(rule: Rule) -> Self {
|
||||
Self(rule)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct Printer {
|
||||
format: OutputFormat,
|
||||
format: SerializationFormat,
|
||||
log_level: LogLevel,
|
||||
fix_mode: flags::FixMode,
|
||||
unsafe_fixes: UnsafeFixes,
|
||||
@@ -53,7 +75,7 @@ pub(crate) struct Printer {
|
||||
|
||||
impl Printer {
|
||||
pub(crate) const fn new(
|
||||
format: OutputFormat,
|
||||
format: SerializationFormat,
|
||||
log_level: LogLevel,
|
||||
fix_mode: flags::FixMode,
|
||||
unsafe_fixes: UnsafeFixes,
|
||||
@@ -81,7 +103,7 @@ impl Printer {
|
||||
let fixed = diagnostics
|
||||
.fixed
|
||||
.values()
|
||||
.flat_map(FixTable::counts)
|
||||
.flat_map(std::collections::HashMap::values)
|
||||
.sum::<usize>();
|
||||
|
||||
if self.flags.intersects(Flags::SHOW_VIOLATIONS) {
|
||||
@@ -96,24 +118,21 @@ impl Printer {
|
||||
} else if remaining > 0 {
|
||||
let s = if remaining == 1 { "" } else { "s" };
|
||||
writeln!(writer, "Found {remaining} error{s}.")?;
|
||||
} else if remaining == 0 {
|
||||
writeln!(writer, "All checks passed!")?;
|
||||
}
|
||||
|
||||
if let Some(fixables) = fixables {
|
||||
let fix_prefix = format!("[{}]", "*".cyan());
|
||||
|
||||
if self.unsafe_fixes.is_hint() {
|
||||
if fixables.applicable > 0 && fixables.inapplicable_unsafe > 0 {
|
||||
let es = if fixables.inapplicable_unsafe == 1 {
|
||||
if fixables.applicable > 0 && fixables.unapplicable_unsafe > 0 {
|
||||
let es = if fixables.unapplicable_unsafe == 1 {
|
||||
""
|
||||
} else {
|
||||
"es"
|
||||
};
|
||||
writeln!(
|
||||
writer,
|
||||
writeln!(writer,
|
||||
"{fix_prefix} {} fixable with the `--fix` option ({} hidden fix{es} can be enabled with the `--unsafe-fixes` option).",
|
||||
fixables.applicable, fixables.inapplicable_unsafe
|
||||
fixables.applicable, fixables.unapplicable_unsafe
|
||||
)?;
|
||||
} else if fixables.applicable > 0 {
|
||||
// Only applicable fixes
|
||||
@@ -123,16 +142,15 @@ impl Printer {
|
||||
fixables.applicable,
|
||||
)?;
|
||||
} else {
|
||||
// Only inapplicable fixes
|
||||
let es = if fixables.inapplicable_unsafe == 1 {
|
||||
// Only unapplicable fixes
|
||||
let es = if fixables.unapplicable_unsafe == 1 {
|
||||
""
|
||||
} else {
|
||||
"es"
|
||||
};
|
||||
writeln!(
|
||||
writer,
|
||||
writeln!(writer,
|
||||
"No fixes available ({} hidden fix{es} can be enabled with the `--unsafe-fixes` option).",
|
||||
fixables.inapplicable_unsafe
|
||||
fixables.unapplicable_unsafe
|
||||
)?;
|
||||
}
|
||||
} else {
|
||||
@@ -149,7 +167,7 @@ impl Printer {
|
||||
// Check if there are unapplied fixes
|
||||
let unapplied = {
|
||||
if let Some(fixables) = fixables {
|
||||
fixables.inapplicable_unsafe
|
||||
fixables.unapplicable_unsafe
|
||||
} else {
|
||||
0
|
||||
}
|
||||
@@ -160,27 +178,15 @@ impl Printer {
|
||||
if fixed > 0 {
|
||||
let s = if fixed == 1 { "" } else { "s" };
|
||||
if self.fix_mode.is_apply() {
|
||||
writeln!(
|
||||
writer,
|
||||
"Fixed {fixed} error{s} ({unapplied} additional fix{es} available with `--unsafe-fixes`)."
|
||||
)?;
|
||||
writeln!(writer, "Fixed {fixed} error{s} ({unapplied} additional fix{es} available with `--unsafe-fixes`).")?;
|
||||
} else {
|
||||
writeln!(
|
||||
writer,
|
||||
"Would fix {fixed} error{s} ({unapplied} additional fix{es} available with `--unsafe-fixes`)."
|
||||
)?;
|
||||
writeln!(writer, "Would fix {fixed} error{s} ({unapplied} additional fix{es} available with `--unsafe-fixes`).")?;
|
||||
}
|
||||
} else {
|
||||
if self.fix_mode.is_apply() {
|
||||
writeln!(
|
||||
writer,
|
||||
"No errors fixed ({unapplied} fix{es} available with `--unsafe-fixes`)."
|
||||
)?;
|
||||
writeln!(writer, "No errors fixed ({unapplied} fix{es} available with `--unsafe-fixes`).")?;
|
||||
} else {
|
||||
writeln!(
|
||||
writer,
|
||||
"No errors would be fixed ({unapplied} fix{es} available with `--unsafe-fixes`)."
|
||||
)?;
|
||||
writeln!(writer, "No errors would be fixed ({unapplied} fix{es} available with `--unsafe-fixes`).")?;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@@ -210,7 +216,10 @@ impl Printer {
|
||||
if !self.flags.intersects(Flags::SHOW_VIOLATIONS) {
|
||||
if matches!(
|
||||
self.format,
|
||||
OutputFormat::Full | OutputFormat::Concise | OutputFormat::Grouped
|
||||
SerializationFormat::Text
|
||||
| SerializationFormat::Full
|
||||
| SerializationFormat::Concise
|
||||
| SerializationFormat::Grouped
|
||||
) {
|
||||
if self.flags.intersects(Flags::SHOW_FIX_SUMMARY) {
|
||||
if !diagnostics.fixed.is_empty() {
|
||||
@@ -228,23 +237,21 @@ impl Printer {
|
||||
let fixables = FixableStatistics::try_from(diagnostics, self.unsafe_fixes);
|
||||
|
||||
match self.format {
|
||||
OutputFormat::Json => {
|
||||
SerializationFormat::Json => {
|
||||
JsonEmitter.emit(writer, &diagnostics.messages, &context)?;
|
||||
}
|
||||
OutputFormat::Rdjson => {
|
||||
RdjsonEmitter.emit(writer, &diagnostics.messages, &context)?;
|
||||
}
|
||||
OutputFormat::JsonLines => {
|
||||
SerializationFormat::JsonLines => {
|
||||
JsonLinesEmitter.emit(writer, &diagnostics.messages, &context)?;
|
||||
}
|
||||
OutputFormat::Junit => {
|
||||
SerializationFormat::Junit => {
|
||||
JunitEmitter.emit(writer, &diagnostics.messages, &context)?;
|
||||
}
|
||||
OutputFormat::Concise | OutputFormat::Full => {
|
||||
SerializationFormat::Concise
|
||||
| SerializationFormat::Full => {
|
||||
TextEmitter::default()
|
||||
.with_show_fix_status(show_fix_status(self.fix_mode, fixables.as_ref()))
|
||||
.with_show_fix_diff(self.flags.intersects(Flags::SHOW_FIX_DIFF))
|
||||
.with_show_source(self.format == OutputFormat::Full)
|
||||
.with_show_source(self.format == SerializationFormat::Full)
|
||||
.with_unsafe_fixes(self.unsafe_fixes)
|
||||
.emit(writer, &diagnostics.messages, &context)?;
|
||||
|
||||
@@ -258,7 +265,7 @@ impl Printer {
|
||||
|
||||
self.write_summary_text(writer, diagnostics)?;
|
||||
}
|
||||
OutputFormat::Grouped => {
|
||||
SerializationFormat::Grouped => {
|
||||
GroupedEmitter::default()
|
||||
.with_show_fix_status(show_fix_status(self.fix_mode, fixables.as_ref()))
|
||||
.with_unsafe_fixes(self.unsafe_fixes)
|
||||
@@ -273,21 +280,22 @@ impl Printer {
|
||||
}
|
||||
self.write_summary_text(writer, diagnostics)?;
|
||||
}
|
||||
OutputFormat::Github => {
|
||||
SerializationFormat::Github => {
|
||||
GithubEmitter.emit(writer, &diagnostics.messages, &context)?;
|
||||
}
|
||||
OutputFormat::Gitlab => {
|
||||
SerializationFormat::Gitlab => {
|
||||
GitlabEmitter::default().emit(writer, &diagnostics.messages, &context)?;
|
||||
}
|
||||
OutputFormat::Pylint => {
|
||||
SerializationFormat::Pylint => {
|
||||
PylintEmitter.emit(writer, &diagnostics.messages, &context)?;
|
||||
}
|
||||
OutputFormat::Azure => {
|
||||
SerializationFormat::Azure => {
|
||||
AzureEmitter.emit(writer, &diagnostics.messages, &context)?;
|
||||
}
|
||||
OutputFormat::Sarif => {
|
||||
SerializationFormat::Sarif => {
|
||||
SarifEmitter.emit(writer, &diagnostics.messages, &context)?;
|
||||
}
|
||||
SerializationFormat::Text => unreachable!("Text is deprecated and should have been automatically converted to the default serialization format")
|
||||
}
|
||||
|
||||
writer.flush()?;
|
||||
@@ -303,31 +311,30 @@ impl Printer {
|
||||
let statistics: Vec<ExpandedStatistics> = diagnostics
|
||||
.messages
|
||||
.iter()
|
||||
.map(|message| (message.noqa_code(), message))
|
||||
.sorted_by_key(|(code, message)| (*code, message.fixable()))
|
||||
.fold(
|
||||
vec![],
|
||||
|mut acc: Vec<((Option<NoqaCode>, &Message), usize)>, (code, message)| {
|
||||
if let Some(((prev_code, _prev_message), count)) = acc.last_mut() {
|
||||
if *prev_code == code {
|
||||
*count += 1;
|
||||
return acc;
|
||||
}
|
||||
.map(|message| {
|
||||
(
|
||||
message.kind.rule(),
|
||||
&message.kind.body,
|
||||
message.fix.is_some(),
|
||||
)
|
||||
})
|
||||
.sorted()
|
||||
.fold(vec![], |mut acc, (rule, body, fixable)| {
|
||||
if let Some((prev_rule, _, _, count)) = acc.last_mut() {
|
||||
if *prev_rule == rule {
|
||||
*count += 1;
|
||||
return acc;
|
||||
}
|
||||
acc.push(((code, message), 1));
|
||||
acc
|
||||
},
|
||||
)
|
||||
}
|
||||
acc.push((rule, body, fixable, 1));
|
||||
acc
|
||||
})
|
||||
.iter()
|
||||
.map(|&((code, message), count)| ExpandedStatistics {
|
||||
code,
|
||||
name: message.name(),
|
||||
count,
|
||||
fixable: if let Some(fix) = message.fix() {
|
||||
fix.applies(self.unsafe_fixes.required_applicability())
|
||||
} else {
|
||||
false
|
||||
},
|
||||
.map(|(rule, message, fixable, count)| ExpandedStatistics {
|
||||
code: (*rule).into(),
|
||||
count: *count,
|
||||
message,
|
||||
fixable: *fixable,
|
||||
})
|
||||
.sorted_by_key(|statistic| Reverse(statistic.count))
|
||||
.collect();
|
||||
@@ -337,7 +344,9 @@ impl Printer {
|
||||
}
|
||||
|
||||
match self.format {
|
||||
OutputFormat::Full | OutputFormat::Concise => {
|
||||
SerializationFormat::Text
|
||||
| SerializationFormat::Full
|
||||
| SerializationFormat::Concise => {
|
||||
// Compute the maximum number of digits in the count and code, for all messages,
|
||||
// to enable pretty-printing.
|
||||
let count_width = num_digits(
|
||||
@@ -349,12 +358,7 @@ impl Printer {
|
||||
);
|
||||
let code_width = statistics
|
||||
.iter()
|
||||
.map(|statistic| {
|
||||
statistic
|
||||
.code
|
||||
.map_or_else(String::new, |rule| rule.to_string())
|
||||
.len()
|
||||
})
|
||||
.map(|statistic| statistic.code.to_string().len())
|
||||
.max()
|
||||
.unwrap();
|
||||
let any_fixable = statistics.iter().any(|statistic| statistic.fixable);
|
||||
@@ -368,11 +372,7 @@ impl Printer {
|
||||
writer,
|
||||
"{:>count_width$}\t{:<code_width$}\t{}{}",
|
||||
statistic.count.to_string().bold(),
|
||||
statistic
|
||||
.code
|
||||
.map_or_else(String::new, |rule| rule.to_string())
|
||||
.red()
|
||||
.bold(),
|
||||
statistic.code.to_string().red().bold(),
|
||||
if any_fixable {
|
||||
if statistic.fixable {
|
||||
&fixable
|
||||
@@ -382,14 +382,12 @@ impl Printer {
|
||||
} else {
|
||||
""
|
||||
},
|
||||
statistic.name,
|
||||
statistic.message,
|
||||
)?;
|
||||
}
|
||||
|
||||
self.write_summary_text(writer, diagnostics)?;
|
||||
return Ok(());
|
||||
}
|
||||
OutputFormat::Json => {
|
||||
SerializationFormat::Json => {
|
||||
writeln!(writer, "{}", serde_json::to_string_pretty(&statistics)?)?;
|
||||
}
|
||||
_ => {
|
||||
@@ -473,13 +471,13 @@ fn show_fix_status(fix_mode: flags::FixMode, fixables: Option<&FixableStatistics
|
||||
fn print_fix_summary(writer: &mut dyn Write, fixed: &FixMap) -> Result<()> {
|
||||
let total = fixed
|
||||
.values()
|
||||
.map(|table| table.counts().sum::<usize>())
|
||||
.map(|table| table.values().sum::<usize>())
|
||||
.sum::<usize>();
|
||||
assert!(total > 0);
|
||||
let num_digits = num_digits(
|
||||
fixed
|
||||
*fixed
|
||||
.values()
|
||||
.filter_map(|table| table.counts().max())
|
||||
.filter_map(|table| table.values().max())
|
||||
.max()
|
||||
.unwrap(),
|
||||
);
|
||||
@@ -499,11 +497,12 @@ fn print_fix_summary(writer: &mut dyn Write, fixed: &FixMap) -> Result<()> {
|
||||
relativize_path(filename).bold(),
|
||||
":".cyan()
|
||||
)?;
|
||||
for (code, name, count) in table.iter().sorted_by_key(|(.., count)| Reverse(*count)) {
|
||||
for (rule, count) in table.iter().sorted_by_key(|(.., count)| Reverse(*count)) {
|
||||
writeln!(
|
||||
writer,
|
||||
" {count:>num_digits$} × {code} ({name})",
|
||||
code = code.to_string().red().bold(),
|
||||
" {count:>num_digits$} × {} ({})",
|
||||
rule.noqa_code().to_string().red().bold(),
|
||||
rule.as_ref(),
|
||||
)?;
|
||||
}
|
||||
}
|
||||
@@ -514,33 +513,33 @@ fn print_fix_summary(writer: &mut dyn Write, fixed: &FixMap) -> Result<()> {
|
||||
#[derive(Debug)]
|
||||
struct FixableStatistics {
|
||||
applicable: u32,
|
||||
inapplicable_unsafe: u32,
|
||||
unapplicable_unsafe: u32,
|
||||
}
|
||||
|
||||
impl FixableStatistics {
|
||||
fn try_from(diagnostics: &Diagnostics, unsafe_fixes: UnsafeFixes) -> Option<Self> {
|
||||
let mut applicable = 0;
|
||||
let mut inapplicable_unsafe = 0;
|
||||
let mut unapplicable_unsafe = 0;
|
||||
|
||||
for message in &diagnostics.messages {
|
||||
if let Some(fix) = message.fix() {
|
||||
if let Some(fix) = &message.fix {
|
||||
if fix.applies(unsafe_fixes.required_applicability()) {
|
||||
applicable += 1;
|
||||
} else {
|
||||
// Do not include inapplicable fixes at other levels that do not provide an opt-in
|
||||
// Do not include unapplicable fixes at other levels that do not provide an opt-in
|
||||
if fix.applicability().is_unsafe() {
|
||||
inapplicable_unsafe += 1;
|
||||
unapplicable_unsafe += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if applicable == 0 && inapplicable_unsafe == 0 {
|
||||
if applicable == 0 && unapplicable_unsafe == 0 {
|
||||
None
|
||||
} else {
|
||||
Some(Self {
|
||||
applicable,
|
||||
inapplicable_unsafe,
|
||||
unapplicable_unsafe,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,34 +1,29 @@
|
||||
use std::path::Path;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use anyhow::{Result, bail};
|
||||
use anyhow::Result;
|
||||
use log::debug;
|
||||
use path_absolutize::path_dedot;
|
||||
|
||||
use ruff_workspace::configuration::Configuration;
|
||||
use ruff_workspace::pyproject::{self, find_fallback_target_version};
|
||||
use ruff_workspace::pyproject;
|
||||
use ruff_workspace::resolver::{
|
||||
ConfigurationOrigin, ConfigurationTransformer, PyprojectConfig, PyprojectDiscoveryStrategy,
|
||||
resolve_root_settings,
|
||||
resolve_root_settings, ConfigurationTransformer, PyprojectConfig, PyprojectDiscoveryStrategy,
|
||||
Relativity,
|
||||
};
|
||||
|
||||
use ruff_python_ast as ast;
|
||||
|
||||
use crate::args::ConfigArguments;
|
||||
|
||||
/// Resolve the relevant settings strategy and defaults for the current
|
||||
/// invocation.
|
||||
pub fn resolve(
|
||||
isolated: bool,
|
||||
config_arguments: &ConfigArguments,
|
||||
stdin_filename: Option<&Path>,
|
||||
) -> Result<PyprojectConfig> {
|
||||
let Ok(cwd) = std::env::current_dir() else {
|
||||
bail!("Working directory does not exist")
|
||||
};
|
||||
|
||||
// First priority: if we're running in isolated mode, use the default settings.
|
||||
if config_arguments.isolated {
|
||||
if isolated {
|
||||
let config = config_arguments.transform(Configuration::default());
|
||||
let settings = config.into_settings(&cwd)?;
|
||||
let settings = config.into_settings(&path_dedot::CWD)?;
|
||||
debug!("Isolated mode, not reading any pyproject.toml");
|
||||
return Ok(PyprojectConfig::new(
|
||||
PyprojectDiscoveryStrategy::Fixed,
|
||||
@@ -40,12 +35,13 @@ pub fn resolve(
|
||||
// Second priority: the user specified a `pyproject.toml` file. Use that
|
||||
// `pyproject.toml` for _all_ configuration, and resolve paths relative to the
|
||||
// current working directory. (This matches ESLint's behavior.)
|
||||
if let Some(pyproject) = config_arguments.config_file() {
|
||||
let settings = resolve_root_settings(
|
||||
pyproject,
|
||||
config_arguments,
|
||||
ConfigurationOrigin::UserSpecified,
|
||||
)?;
|
||||
if let Some(pyproject) = config_arguments
|
||||
.config_file()
|
||||
.map(|config| config.display().to_string())
|
||||
.map(|config| shellexpand::full(&config).map(|config| PathBuf::from(config.as_ref())))
|
||||
.transpose()?
|
||||
{
|
||||
let settings = resolve_root_settings(&pyproject, Relativity::Cwd, config_arguments)?;
|
||||
debug!(
|
||||
"Using user-specified configuration file at: {}",
|
||||
pyproject.display()
|
||||
@@ -53,7 +49,7 @@ pub fn resolve(
|
||||
return Ok(PyprojectConfig::new(
|
||||
PyprojectDiscoveryStrategy::Fixed,
|
||||
settings,
|
||||
Some(pyproject.to_path_buf()),
|
||||
Some(pyproject),
|
||||
));
|
||||
}
|
||||
|
||||
@@ -62,13 +58,16 @@ pub fn resolve(
|
||||
// that directory. (With `Strategy::Hierarchical`, we'll end up finding
|
||||
// the "closest" `pyproject.toml` file for every Python file later on,
|
||||
// so these act as the "default" settings.)
|
||||
if let Some(pyproject) = pyproject::find_settings_toml(stdin_filename.unwrap_or(&cwd))? {
|
||||
if let Some(pyproject) = pyproject::find_settings_toml(
|
||||
stdin_filename
|
||||
.as_ref()
|
||||
.unwrap_or(&path_dedot::CWD.as_path()),
|
||||
)? {
|
||||
debug!(
|
||||
"Using configuration file (via parent) at: {}",
|
||||
pyproject.display()
|
||||
);
|
||||
let settings =
|
||||
resolve_root_settings(&pyproject, config_arguments, ConfigurationOrigin::Ancestor)?;
|
||||
let settings = resolve_root_settings(&pyproject, Relativity::Parent, config_arguments)?;
|
||||
return Ok(PyprojectConfig::new(
|
||||
PyprojectDiscoveryStrategy::Hierarchical,
|
||||
settings,
|
||||
@@ -81,35 +80,11 @@ pub fn resolve(
|
||||
// end up the "closest" `pyproject.toml` file for every Python file later on, so
|
||||
// these act as the "default" settings.)
|
||||
if let Some(pyproject) = pyproject::find_user_settings_toml() {
|
||||
struct FallbackTransformer<'a> {
|
||||
arguments: &'a ConfigArguments,
|
||||
}
|
||||
|
||||
impl ConfigurationTransformer for FallbackTransformer<'_> {
|
||||
fn transform(&self, mut configuration: Configuration) -> Configuration {
|
||||
// The `requires-python` constraint from the `pyproject.toml` takes precedence
|
||||
// over the `target-version` from the user configuration.
|
||||
let fallback = find_fallback_target_version(&*path_dedot::CWD);
|
||||
if let Some(fallback) = fallback {
|
||||
debug!("Derived `target-version` from found `requires-python`: {fallback:?}");
|
||||
configuration.target_version = Some(fallback.into());
|
||||
}
|
||||
|
||||
self.arguments.transform(configuration)
|
||||
}
|
||||
}
|
||||
|
||||
debug!(
|
||||
"Using configuration file (via cwd) at: {}",
|
||||
pyproject.display()
|
||||
);
|
||||
let settings = resolve_root_settings(
|
||||
&pyproject,
|
||||
&FallbackTransformer {
|
||||
arguments: config_arguments,
|
||||
},
|
||||
ConfigurationOrigin::UserSettings,
|
||||
)?;
|
||||
let settings = resolve_root_settings(&pyproject, Relativity::Cwd, config_arguments)?;
|
||||
return Ok(PyprojectConfig::new(
|
||||
PyprojectDiscoveryStrategy::Hierarchical,
|
||||
settings,
|
||||
@@ -122,21 +97,8 @@ pub fn resolve(
|
||||
// "closest" `pyproject.toml` file for every Python file later on, so these act
|
||||
// as the "default" settings.)
|
||||
debug!("Using Ruff default settings");
|
||||
let mut config = config_arguments.transform(Configuration::default());
|
||||
if config.target_version.is_none() {
|
||||
// If we have arrived here we know that there was no `pyproject.toml`
|
||||
// containing a `[tool.ruff]` section found in an ancestral directory.
|
||||
// (This is an implicit requirement in the function
|
||||
// `pyproject::find_settings_toml`.)
|
||||
// However, there may be a `pyproject.toml` with a `requires-python`
|
||||
// specified, and that is what we look for in this step.
|
||||
let fallback = find_fallback_target_version(stdin_filename.unwrap_or(&cwd));
|
||||
if let Some(version) = fallback {
|
||||
debug!("Derived `target-version` from found `requires-python`: {version:?}");
|
||||
}
|
||||
config.target_version = fallback.map(ast::PythonVersion::from);
|
||||
}
|
||||
let settings = config.into_settings(&cwd)?;
|
||||
let config = config_arguments.transform(Configuration::default());
|
||||
let settings = config.into_settings(&path_dedot::CWD)?;
|
||||
Ok(PyprojectConfig::new(
|
||||
PyprojectDiscoveryStrategy::Hierarchical,
|
||||
settings,
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
---
|
||||
source: crates/ruff/src/version.rs
|
||||
expression: version
|
||||
snapshot_kind: text
|
||||
---
|
||||
0.0.0
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
---
|
||||
source: crates/ruff/src/version.rs
|
||||
expression: version
|
||||
snapshot_kind: text
|
||||
---
|
||||
0.0.0 (53b0f5d92 2023-10-19)
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
---
|
||||
source: crates/ruff/src/version.rs
|
||||
expression: version
|
||||
snapshot_kind: text
|
||||
---
|
||||
0.0.0+24 (53b0f5d92 2023-10-19)
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
---
|
||||
source: crates/ruff/src/version.rs
|
||||
expression: version
|
||||
snapshot_kind: text
|
||||
---
|
||||
{
|
||||
"version": "0.0.0",
|
||||
|
||||
@@ -70,7 +70,7 @@ pub(crate) fn version() -> VersionInfo {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use insta::{assert_json_snapshot, assert_snapshot};
|
||||
use insta::{assert_display_snapshot, assert_json_snapshot};
|
||||
|
||||
use super::{CommitInfo, VersionInfo};
|
||||
|
||||
@@ -80,7 +80,7 @@ mod tests {
|
||||
version: "0.0.0".to_string(),
|
||||
commit_info: None,
|
||||
};
|
||||
assert_snapshot!(version);
|
||||
assert_display_snapshot!(version);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -95,7 +95,7 @@ mod tests {
|
||||
commits_since_last_tag: 0,
|
||||
}),
|
||||
};
|
||||
assert_snapshot!(version);
|
||||
assert_display_snapshot!(version);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -110,7 +110,7 @@ mod tests {
|
||||
commits_since_last_tag: 24,
|
||||
}),
|
||||
};
|
||||
assert_snapshot!(version);
|
||||
assert_display_snapshot!(version);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -1,574 +0,0 @@
|
||||
//! Tests the interaction of the `analyze graph` command.
|
||||
|
||||
#![cfg(not(target_arch = "wasm32"))]
|
||||
#![cfg(not(windows))]
|
||||
|
||||
use assert_fs::prelude::*;
|
||||
use std::process::Command;
|
||||
use std::str;
|
||||
|
||||
use anyhow::Result;
|
||||
use assert_fs::fixture::ChildPath;
|
||||
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
|
||||
use tempfile::TempDir;
|
||||
|
||||
fn command() -> Command {
|
||||
let mut command = Command::new(get_cargo_bin("ruff"));
|
||||
command.arg("analyze");
|
||||
command.arg("graph");
|
||||
command.arg("--preview");
|
||||
command
|
||||
}
|
||||
|
||||
const INSTA_FILTERS: &[(&str, &str)] = &[
|
||||
// Rewrite Windows output to Unix output
|
||||
(r"\\", "/"),
|
||||
];
|
||||
|
||||
#[test]
|
||||
fn dependencies() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let root = ChildPath::new(tempdir.path());
|
||||
|
||||
root.child("ruff").child("__init__.py").write_str("")?;
|
||||
root.child("ruff")
|
||||
.child("a.py")
|
||||
.write_str(indoc::indoc! {r#"
|
||||
import ruff.b
|
||||
"#})?;
|
||||
root.child("ruff")
|
||||
.child("b.py")
|
||||
.write_str(indoc::indoc! {r#"
|
||||
from ruff import c
|
||||
"#})?;
|
||||
root.child("ruff")
|
||||
.child("c.py")
|
||||
.write_str(indoc::indoc! {r#"
|
||||
from . import d
|
||||
"#})?;
|
||||
root.child("ruff")
|
||||
.child("d.py")
|
||||
.write_str(indoc::indoc! {r#"
|
||||
from .e import f
|
||||
"#})?;
|
||||
root.child("ruff")
|
||||
.child("e.py")
|
||||
.write_str(indoc::indoc! {r#"
|
||||
def f(): pass
|
||||
"#})?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(command().current_dir(&root), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/c.py": [
|
||||
"ruff/d.py"
|
||||
],
|
||||
"ruff/d.py": [
|
||||
"ruff/e.py"
|
||||
],
|
||||
"ruff/e.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dependents() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
|
||||
let root = ChildPath::new(tempdir.path());
|
||||
|
||||
root.child("ruff").child("__init__.py").write_str("")?;
|
||||
root.child("ruff")
|
||||
.child("a.py")
|
||||
.write_str(indoc::indoc! {r#"
|
||||
import ruff.b
|
||||
"#})?;
|
||||
root.child("ruff")
|
||||
.child("b.py")
|
||||
.write_str(indoc::indoc! {r#"
|
||||
from ruff import c
|
||||
"#})?;
|
||||
root.child("ruff")
|
||||
.child("c.py")
|
||||
.write_str(indoc::indoc! {r#"
|
||||
from . import d
|
||||
"#})?;
|
||||
root.child("ruff")
|
||||
.child("d.py")
|
||||
.write_str(indoc::indoc! {r#"
|
||||
from .e import f
|
||||
"#})?;
|
||||
root.child("ruff")
|
||||
.child("e.py")
|
||||
.write_str(indoc::indoc! {r#"
|
||||
def f(): pass
|
||||
"#})?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(command().arg("--direction").arg("dependents").current_dir(&root), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [],
|
||||
"ruff/b.py": [
|
||||
"ruff/a.py"
|
||||
],
|
||||
"ruff/c.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/d.py": [
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/e.py": [
|
||||
"ruff/d.py"
|
||||
]
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn string_detection() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
|
||||
let root = ChildPath::new(tempdir.path());
|
||||
|
||||
root.child("ruff").child("__init__.py").write_str("")?;
|
||||
root.child("ruff")
|
||||
.child("a.py")
|
||||
.write_str(indoc::indoc! {r#"
|
||||
import ruff.b
|
||||
"#})?;
|
||||
root.child("ruff")
|
||||
.child("b.py")
|
||||
.write_str(indoc::indoc! {r#"
|
||||
import importlib
|
||||
|
||||
importlib.import_module("ruff.c")
|
||||
"#})?;
|
||||
root.child("ruff").child("c.py").write_str("")?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(command().current_dir(&root), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [],
|
||||
"ruff/c.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
});
|
||||
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(command().arg("--detect-string-imports").current_dir(&root), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/c.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn globs() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
|
||||
let root = ChildPath::new(tempdir.path());
|
||||
|
||||
root.child("ruff.toml").write_str(indoc::indoc! {r#"
|
||||
[analyze]
|
||||
include-dependencies = { "ruff/a.py" = ["ruff/b.py"], "ruff/b.py" = ["ruff/*.py"], "ruff/c.py" = ["*.json"] }
|
||||
"#})?;
|
||||
|
||||
root.child("ruff").child("__init__.py").write_str("")?;
|
||||
root.child("ruff").child("a.py").write_str("")?;
|
||||
root.child("ruff").child("b.py").write_str("")?;
|
||||
root.child("ruff").child("c.py").write_str("")?;
|
||||
root.child("ruff").child("d.json").write_str("")?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(command().current_dir(&root), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [
|
||||
"ruff/__init__.py",
|
||||
"ruff/a.py",
|
||||
"ruff/b.py",
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/c.py": [
|
||||
"ruff/d.json"
|
||||
]
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn exclude() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let root = ChildPath::new(tempdir.path());
|
||||
|
||||
root.child("ruff.toml").write_str(indoc::indoc! {r#"
|
||||
[analyze]
|
||||
exclude = ["ruff/c.py"]
|
||||
"#})?;
|
||||
|
||||
root.child("ruff").child("__init__.py").write_str("")?;
|
||||
root.child("ruff")
|
||||
.child("a.py")
|
||||
.write_str(indoc::indoc! {r#"
|
||||
import ruff.b
|
||||
"#})?;
|
||||
root.child("ruff").child("b.py").write_str("")?;
|
||||
root.child("ruff").child("c.py").write_str("")?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(command().current_dir(&root), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn wildcard() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let root = ChildPath::new(tempdir.path());
|
||||
|
||||
root.child("ruff").child("__init__.py").write_str("")?;
|
||||
root.child("ruff")
|
||||
.child("a.py")
|
||||
.write_str(indoc::indoc! {r#"
|
||||
from ruff.b import *
|
||||
"#})?;
|
||||
root.child("ruff")
|
||||
.child("b.py")
|
||||
.write_str(indoc::indoc! {r#"
|
||||
from ruff import c
|
||||
"#})?;
|
||||
root.child("ruff")
|
||||
.child("c.py")
|
||||
.write_str(indoc::indoc! {r#"
|
||||
from ruff.utils import *
|
||||
"#})?;
|
||||
|
||||
root.child("ruff")
|
||||
.child("utils")
|
||||
.child("__init__.py")
|
||||
.write_str("from .helpers import *")?;
|
||||
root.child("ruff")
|
||||
.child("utils")
|
||||
.child("helpers.py")
|
||||
.write_str("")?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(command().current_dir(&root), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/c.py": [
|
||||
"ruff/utils/__init__.py"
|
||||
],
|
||||
"ruff/utils/__init__.py": [
|
||||
"ruff/utils/helpers.py"
|
||||
],
|
||||
"ruff/utils/helpers.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nested_imports() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let root = ChildPath::new(tempdir.path());
|
||||
|
||||
root.child("ruff").child("__init__.py").write_str("")?;
|
||||
root.child("ruff")
|
||||
.child("a.py")
|
||||
.write_str(indoc::indoc! {r#"
|
||||
match x:
|
||||
case 1:
|
||||
import ruff.b
|
||||
"#})?;
|
||||
root.child("ruff")
|
||||
.child("b.py")
|
||||
.write_str(indoc::indoc! {r#"
|
||||
try:
|
||||
import ruff.c
|
||||
except ImportError as e:
|
||||
import ruff.d
|
||||
"#})?;
|
||||
root.child("ruff")
|
||||
.child("c.py")
|
||||
.write_str(indoc::indoc! {r#"def c(): ..."#})?;
|
||||
root.child("ruff")
|
||||
.child("d.py")
|
||||
.write_str(indoc::indoc! {r#"def d(): ..."#})?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(command().current_dir(&root), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [
|
||||
"ruff/c.py",
|
||||
"ruff/d.py"
|
||||
],
|
||||
"ruff/c.py": [],
|
||||
"ruff/d.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"#);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test for venv resolution with the `--python` flag.
|
||||
///
|
||||
/// Based on the [albatross-virtual-workspace] example from the uv repo and the report in [#16598].
|
||||
///
|
||||
/// [albatross-virtual-workspace]: https://github.com/astral-sh/uv/tree/aa629c4a/scripts/workspaces/albatross-virtual-workspace
|
||||
/// [#16598]: https://github.com/astral-sh/ruff/issues/16598
|
||||
#[test]
|
||||
fn venv() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let root = ChildPath::new(tempdir.path());
|
||||
|
||||
// packages
|
||||
// ├── albatross
|
||||
// │ ├── check_installed_albatross.py
|
||||
// │ ├── pyproject.toml
|
||||
// │ └── src
|
||||
// │ └── albatross
|
||||
// │ └── __init__.py
|
||||
// └── bird-feeder
|
||||
// ├── check_installed_bird_feeder.py
|
||||
// ├── pyproject.toml
|
||||
// └── src
|
||||
// └── bird_feeder
|
||||
// └── __init__.py
|
||||
|
||||
let packages = root.child("packages");
|
||||
|
||||
let albatross = packages.child("albatross");
|
||||
albatross
|
||||
.child("check_installed_albatross.py")
|
||||
.write_str("from albatross import fly")?;
|
||||
albatross
|
||||
.child("pyproject.toml")
|
||||
.write_str(indoc::indoc! {r#"
|
||||
[project]
|
||||
name = "albatross"
|
||||
version = "0.1.0"
|
||||
requires-python = ">=3.12"
|
||||
dependencies = ["bird-feeder", "tqdm>=4,<5"]
|
||||
|
||||
[tool.uv.sources]
|
||||
bird-feeder = { workspace = true }
|
||||
"#})?;
|
||||
albatross
|
||||
.child("src")
|
||||
.child("albatross")
|
||||
.child("__init__.py")
|
||||
.write_str("import tqdm; from bird_feeder import use")?;
|
||||
|
||||
let bird_feeder = packages.child("bird-feeder");
|
||||
bird_feeder
|
||||
.child("check_installed_bird_feeder.py")
|
||||
.write_str("from bird_feeder import use; from albatross import fly")?;
|
||||
bird_feeder
|
||||
.child("pyproject.toml")
|
||||
.write_str(indoc::indoc! {r#"
|
||||
[project]
|
||||
name = "bird-feeder"
|
||||
version = "1.0.0"
|
||||
requires-python = ">=3.12"
|
||||
dependencies = ["anyio>=4.3.0,<5"]
|
||||
"#})?;
|
||||
bird_feeder
|
||||
.child("src")
|
||||
.child("bird_feeder")
|
||||
.child("__init__.py")
|
||||
.write_str("import anyio")?;
|
||||
|
||||
let venv = root.child(".venv");
|
||||
let bin = venv.child("bin");
|
||||
bin.child("python").touch()?;
|
||||
let home = format!("home = {}", bin.to_string_lossy());
|
||||
venv.child("pyvenv.cfg").write_str(&home)?;
|
||||
let site_packages = venv.child("lib").child("python3.12").child("site-packages");
|
||||
site_packages
|
||||
.child("_albatross.pth")
|
||||
.write_str(&albatross.join("src").to_string_lossy())?;
|
||||
site_packages
|
||||
.child("_bird_feeder.pth")
|
||||
.write_str(&bird_feeder.join("src").to_string_lossy())?;
|
||||
site_packages.child("tqdm").child("__init__.py").touch()?;
|
||||
|
||||
// without `--python .venv`, the result should only include dependencies within the albatross
|
||||
// package
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(
|
||||
command().arg("packages/albatross").current_dir(&root),
|
||||
@r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"packages/albatross/check_installed_albatross.py": [
|
||||
"packages/albatross/src/albatross/__init__.py"
|
||||
],
|
||||
"packages/albatross/src/albatross/__init__.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"#);
|
||||
});
|
||||
|
||||
// with `--python .venv` both workspace and third-party dependencies are included
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(
|
||||
command().args(["--python", ".venv"]).arg("packages/albatross").current_dir(&root),
|
||||
@r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"packages/albatross/check_installed_albatross.py": [
|
||||
"packages/albatross/src/albatross/__init__.py"
|
||||
],
|
||||
"packages/albatross/src/albatross/__init__.py": [
|
||||
".venv/lib/python3.12/site-packages/tqdm/__init__.py",
|
||||
"packages/bird-feeder/src/bird_feeder/__init__.py"
|
||||
]
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"#);
|
||||
});
|
||||
|
||||
// test the error message for a non-existent venv. it's important that the `ruff analyze graph`
|
||||
// flag matches the ty flag used to generate the error message (`--python`)
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(
|
||||
command().args(["--python", "none"]).arg("packages/albatross").current_dir(&root),
|
||||
@r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
ruff failed
|
||||
Cause: Invalid search path settings
|
||||
Cause: Failed to discover the site-packages directory: Invalid `--python` argument `none`: does not point to a Python executable or a directory on disk
|
||||
");
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,56 +0,0 @@
|
||||
//! Tests for the `ruff config` subcommand.
|
||||
use std::process::Command;
|
||||
|
||||
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
|
||||
|
||||
const BIN_NAME: &str = "ruff";
|
||||
|
||||
#[test]
|
||||
fn lint_select() {
|
||||
assert_cmd_snapshot!(
|
||||
Command::new(get_cargo_bin(BIN_NAME)).arg("config").arg("lint.select"), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
A list of rule codes or prefixes to enable. Prefixes can specify exact
|
||||
rules (like `F841`), entire categories (like `F`), or anything in
|
||||
between.
|
||||
|
||||
When breaking ties between enabled and disabled rules (via `select` and
|
||||
`ignore`, respectively), more specific prefixes override less
|
||||
specific prefixes. `ignore` takes precedence over `select` if the
|
||||
same prefix appears in both.
|
||||
|
||||
Default value: ["E4", "E7", "E9", "F"]
|
||||
Type: list[RuleSelector]
|
||||
Example usage:
|
||||
```toml
|
||||
# On top of the defaults (`E4`, E7`, `E9`, and `F`), enable flake8-bugbear (`B`) and flake8-quotes (`Q`).
|
||||
select = ["E4", "E7", "E9", "F", "B", "Q"]
|
||||
```
|
||||
|
||||
----- stderr -----
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn lint_select_json() {
|
||||
assert_cmd_snapshot!(
|
||||
Command::new(get_cargo_bin(BIN_NAME)).arg("config").arg("lint.select").arg("--output-format").arg("json"), @r##"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"doc": "A list of rule codes or prefixes to enable. Prefixes can specify exact\nrules (like `F841`), entire categories (like `F`), or anything in\nbetween.\n\nWhen breaking ties between enabled and disabled rules (via `select` and\n`ignore`, respectively), more specific prefixes override less\nspecific prefixes. `ignore` takes precedence over `select` if the\nsame prefix appears in both.",
|
||||
"default": "[\"E4\", \"E7\", \"E9\", \"F\"]",
|
||||
"value_type": "list[RuleSelector]",
|
||||
"scope": null,
|
||||
"example": "# On top of the defaults (`E4`, E7`, `E9`, and `F`), enable flake8-bugbear (`B`) and flake8-quotes (`Q`).\nselect = [\"E4\", \"E7\", \"E9\", \"F\", \"B\", \"Q\"]",
|
||||
"deprecated": null
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"##
|
||||
);
|
||||
}
|
||||
149
crates/ruff/tests/deprecation.rs
Normal file
149
crates/ruff/tests/deprecation.rs
Normal file
@@ -0,0 +1,149 @@
|
||||
//! A test suite that ensures deprecated command line options have appropriate warnings / behaviors
|
||||
|
||||
use ruff_linter::settings::types::SerializationFormat;
|
||||
use std::process::Command;
|
||||
|
||||
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
|
||||
|
||||
const BIN_NAME: &str = "ruff";
|
||||
|
||||
const STDIN: &str = "l = 1";
|
||||
|
||||
fn ruff_check(show_source: Option<bool>, output_format: Option<String>) -> Command {
|
||||
let mut cmd = Command::new(get_cargo_bin(BIN_NAME));
|
||||
let output_format = output_format.unwrap_or(format!("{}", SerializationFormat::default(false)));
|
||||
cmd.arg("--output-format");
|
||||
cmd.arg(output_format);
|
||||
cmd.arg("--no-cache");
|
||||
match show_source {
|
||||
Some(true) => {
|
||||
cmd.arg("--show-source");
|
||||
}
|
||||
Some(false) => {
|
||||
cmd.arg("--no-show-source");
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
cmd.arg("-");
|
||||
|
||||
cmd
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ensure_show_source_is_deprecated() {
|
||||
assert_cmd_snapshot!(ruff_check(Some(true), None).pass_stdin(STDIN), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:1:1: E741 Ambiguous variable name: `l`
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
warning: The `--show-source` argument is deprecated and has been ignored in favor of `--output-format=concise`.
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ensure_no_show_source_is_deprecated() {
|
||||
assert_cmd_snapshot!(ruff_check(Some(false), None).pass_stdin(STDIN), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:1:1: E741 Ambiguous variable name: `l`
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
warning: The `--no-show-source` argument is deprecated and has been ignored in favor of `--output-format=concise`.
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ensure_output_format_is_deprecated() {
|
||||
assert_cmd_snapshot!(ruff_check(None, Some("text".into())).pass_stdin(STDIN), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:1:1: E741 Ambiguous variable name: `l`
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
warning: `--output-format=text` is deprecated. Use `--output-format=full` or `--output-format=concise` instead. `text` will be treated as `concise`.
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ensure_output_format_overrides_show_source() {
|
||||
assert_cmd_snapshot!(ruff_check(Some(true), Some("concise".into())).pass_stdin(STDIN), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:1:1: E741 Ambiguous variable name: `l`
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
warning: The `--show-source` argument is deprecated and has been ignored in favor of `--output-format=concise`.
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ensure_full_output_format_overrides_no_show_source() {
|
||||
assert_cmd_snapshot!(ruff_check(Some(false), Some("full".into())).pass_stdin(STDIN), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:1:1: E741 Ambiguous variable name: `l`
|
||||
|
|
||||
1 | l = 1
|
||||
| ^ E741
|
||||
|
|
||||
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
warning: The `--no-show-source` argument is deprecated and has been ignored in favor of `--output-format=full`.
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ensure_output_format_uses_concise_over_no_show_source() {
|
||||
assert_cmd_snapshot!(ruff_check(Some(false), Some("concise".into())).pass_stdin(STDIN), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:1:1: E741 Ambiguous variable name: `l`
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
warning: The `--no-show-source` argument is deprecated and has been ignored in favor of `--output-format=concise`.
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ensure_deprecated_output_format_overrides_show_source() {
|
||||
assert_cmd_snapshot!(ruff_check(Some(true), Some("text".into())).pass_stdin(STDIN), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:1:1: E741 Ambiguous variable name: `l`
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
warning: The `--show-source` argument is deprecated and has been ignored in favor of `--output-format=text`.
|
||||
warning: `--output-format=text` is deprecated. Use `--output-format=full` or `--output-format=concise` instead. `text` will be treated as `concise`.
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ensure_deprecated_output_format_overrides_no_show_source() {
|
||||
assert_cmd_snapshot!(ruff_check(Some(false), Some("text".into())).pass_stdin(STDIN), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:1:1: E741 Ambiguous variable name: `l`
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
warning: The `--no-show-source` argument is deprecated and has been ignored in favor of `--output-format=text`.
|
||||
warning: `--output-format=text` is deprecated. Use `--output-format=full` or `--output-format=concise` instead. `text` will be treated as `concise`.
|
||||
"###);
|
||||
}
|
||||
@@ -1,30 +0,0 @@
|
||||
//! Test to verify Ruff's behavior when run from deleted directory.
|
||||
//! It has to be isolated in a separate module.
|
||||
//! Tests in the same module become flaky under `cargo test`s parallel execution
|
||||
//! due to in-test working directory manipulation.
|
||||
|
||||
#![cfg(target_family = "unix")]
|
||||
|
||||
use std::env::set_current_dir;
|
||||
use std::process::Command;
|
||||
|
||||
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
|
||||
const BIN_NAME: &str = "ruff";
|
||||
|
||||
#[test]
|
||||
fn check_in_deleted_directory_errors() {
|
||||
let temp_dir = tempfile::tempdir().unwrap();
|
||||
let temp_path = temp_dir.path().to_path_buf();
|
||||
set_current_dir(&temp_path).unwrap();
|
||||
drop(temp_dir);
|
||||
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)).arg("check"), @r###"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
ruff failed
|
||||
Cause: Working directory does not exist
|
||||
"###);
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -29,7 +29,7 @@ fn check_project_include_defaults() {
|
||||
filters => TEST_FILTERS.to_vec()
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(["check", "--show-files"]).current_dir(Path::new("./resources/test/fixtures/include-test")), @r"
|
||||
.args(["check", "--show-files"]).current_dir(Path::new("./resources/test/fixtures/include-test")), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -41,7 +41,7 @@ fn check_project_include_defaults() {
|
||||
----- stderr -----
|
||||
warning: The top-level linter settings are deprecated in favour of their counterparts in the `lint` section. Please update the following options in `nested-project/pyproject.toml`:
|
||||
- 'select' -> 'lint.select'
|
||||
");
|
||||
"###);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -53,14 +53,14 @@ fn check_project_respects_direct_paths() {
|
||||
filters => TEST_FILTERS.to_vec()
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(["check", "--show-files", "b.py"]).current_dir(Path::new("./resources/test/fixtures/include-test")), @r"
|
||||
.args(["check", "--show-files", "b.py"]).current_dir(Path::new("./resources/test/fixtures/include-test")), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
[BASEPATH]/include-test/b.py
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
"###);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -72,14 +72,14 @@ fn check_project_respects_subdirectory_includes() {
|
||||
filters => TEST_FILTERS.to_vec()
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(["check", "--show-files", "subdirectory"]).current_dir(Path::new("./resources/test/fixtures/include-test")), @r"
|
||||
.args(["check", "--show-files", "subdirectory"]).current_dir(Path::new("./resources/test/fixtures/include-test")), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
[BASEPATH]/include-test/subdirectory/c.py
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
"###);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -91,13 +91,13 @@ fn check_project_from_project_subdirectory_respects_includes() {
|
||||
filters => TEST_FILTERS.to_vec()
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(["check", "--show-files"]).current_dir(Path::new("./resources/test/fixtures/include-test/subdirectory")), @r"
|
||||
.args(["check", "--show-files"]).current_dir(Path::new("./resources/test/fixtures/include-test/subdirectory")), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
[BASEPATH]/include-test/subdirectory/c.py
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
"###);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,57 +1,33 @@
|
||||
use anyhow::Context;
|
||||
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
|
||||
use std::path::Path;
|
||||
use std::process::Command;
|
||||
use tempfile::TempDir;
|
||||
|
||||
const BIN_NAME: &str = "ruff";
|
||||
|
||||
#[test]
|
||||
fn display_default_settings() -> anyhow::Result<()> {
|
||||
let tempdir = TempDir::new().context("Failed to create temp directory.")?;
|
||||
fn display_default_settings() {
|
||||
// Navigate from the crate directory to the workspace root.
|
||||
let base_path = Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||
.parent()
|
||||
.unwrap()
|
||||
.parent()
|
||||
.unwrap();
|
||||
let base_path = base_path.to_string_lossy();
|
||||
|
||||
// Tempdir path's on macos are symlinks, which doesn't play nicely with
|
||||
// our snapshot filtering.
|
||||
let project_dir = tempdir
|
||||
.path()
|
||||
.canonicalize()
|
||||
.context("Failed to canonical tempdir path.")?;
|
||||
// Escape the backslashes for the regex.
|
||||
let base_path = regex::escape(&base_path);
|
||||
|
||||
std::fs::write(
|
||||
project_dir.join("pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
name = "ruff"
|
||||
version = "0.9.2"
|
||||
requires-python = ">=3.7"
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
let test_filters = &[(base_path.as_ref(), "[BASEPATH]")];
|
||||
|
||||
[tool.ruff]
|
||||
line-length = 100
|
||||
#[cfg(target_os = "windows")]
|
||||
let test_filters = &[
|
||||
(base_path.as_ref(), "[BASEPATH]"),
|
||||
(r#"\\+(\w\w|\s|\.|")"#, "/$1"),
|
||||
];
|
||||
|
||||
[tool.ruff.lint]
|
||||
ignore = [
|
||||
# Conflicts with the formatter
|
||||
"COM812", "ISC001"
|
||||
]
|
||||
|
||||
"#,
|
||||
)?;
|
||||
|
||||
std::fs::write(project_dir.join("test.py"), r#"print("Hello")"#)
|
||||
.context("Failed to write test.py.")?;
|
||||
|
||||
insta::with_settings!({filters => vec![
|
||||
(&*tempdir_filter(&project_dir), "<temp_dir>/"),
|
||||
(r#"\\(\w\w|\s|\.|")"#, "/$1"),
|
||||
]}, {
|
||||
insta::with_settings!({ filters => test_filters.to_vec() }, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(["check", "--show-settings", "test.py"])
|
||||
.current_dir(project_dir));
|
||||
.args(["check", "--show-settings", "unformatted.py"]).current_dir(Path::new("./resources/test/fixtures")));
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn tempdir_filter(project_dir: &Path) -> String {
|
||||
format!(r#"{}\\?/?"#, regex::escape(project_dir.to_str().unwrap()))
|
||||
}
|
||||
|
||||
@@ -0,0 +1,71 @@
|
||||
---
|
||||
source: crates/ruff/tests/integration_test.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- "--explain"
|
||||
- F401
|
||||
---
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
# unused-import (F401)
|
||||
|
||||
Derived from the **Pyflakes** linter.
|
||||
|
||||
Fix is sometimes available.
|
||||
|
||||
## What it does
|
||||
Checks for unused imports.
|
||||
|
||||
## Why is this bad?
|
||||
Unused imports add a performance overhead at runtime, and risk creating
|
||||
import cycles. They also increase the cognitive load of reading the code.
|
||||
|
||||
If an import statement is used to check for the availability or existence
|
||||
of a module, consider using `importlib.util.find_spec` instead.
|
||||
|
||||
If an import statement is used to re-export a symbol as part of a module's
|
||||
public interface, consider using a "redundant" import alias, which
|
||||
instructs Ruff (and other tools) to respect the re-export, and avoid
|
||||
marking it as unused, as in:
|
||||
|
||||
```python
|
||||
from module import member as member
|
||||
```
|
||||
|
||||
## Example
|
||||
```python
|
||||
import numpy as np # unused import
|
||||
|
||||
|
||||
def area(radius):
|
||||
return 3.14 * radius**2
|
||||
```
|
||||
|
||||
Use instead:
|
||||
```python
|
||||
def area(radius):
|
||||
return 3.14 * radius**2
|
||||
```
|
||||
|
||||
To check the availability of a module, use `importlib.util.find_spec`:
|
||||
```python
|
||||
from importlib.util import find_spec
|
||||
|
||||
if find_spec("numpy") is not None:
|
||||
print("numpy is installed")
|
||||
else:
|
||||
print("numpy is not installed")
|
||||
```
|
||||
|
||||
## Options
|
||||
- `lint.ignore-init-module-imports`
|
||||
|
||||
## References
|
||||
- [Python documentation: `import`](https://docs.python.org/3/reference/simple_stmts.html#the-import-statement)
|
||||
- [Python documentation: `importlib.util.find_spec`](https://docs.python.org/3/library/importlib.html#importlib.util.find_spec)
|
||||
- [Typing documentation: interface conventions](https://typing.readthedocs.io/en/latest/source/libraries.html#library-interface-public-and-private-symbols)
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -1,100 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/integration_test.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- rule
|
||||
- F401
|
||||
---
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
# unused-import (F401)
|
||||
|
||||
Derived from the **Pyflakes** linter.
|
||||
|
||||
Fix is sometimes available.
|
||||
|
||||
## What it does
|
||||
Checks for unused imports.
|
||||
|
||||
## Why is this bad?
|
||||
Unused imports add a performance overhead at runtime, and risk creating
|
||||
import cycles. They also increase the cognitive load of reading the code.
|
||||
|
||||
If an import statement is used to check for the availability or existence
|
||||
of a module, consider using `importlib.util.find_spec` instead.
|
||||
|
||||
If an import statement is used to re-export a symbol as part of a module's
|
||||
public interface, consider using a "redundant" import alias, which
|
||||
instructs Ruff (and other tools) to respect the re-export, and avoid
|
||||
marking it as unused, as in:
|
||||
|
||||
```python
|
||||
from module import member as member
|
||||
```
|
||||
|
||||
Alternatively, you can use `__all__` to declare a symbol as part of the module's
|
||||
interface, as in:
|
||||
|
||||
```python
|
||||
# __init__.py
|
||||
import some_module
|
||||
|
||||
__all__ = ["some_module"]
|
||||
```
|
||||
|
||||
## Fix safety
|
||||
|
||||
Fixes to remove unused imports are safe, except in `__init__.py` files.
|
||||
|
||||
Applying fixes to `__init__.py` files is currently in preview. The fix offered depends on the
|
||||
type of the unused import. Ruff will suggest a safe fix to export first-party imports with
|
||||
either a redundant alias or, if already present in the file, an `__all__` entry. If multiple
|
||||
`__all__` declarations are present, Ruff will not offer a fix. Ruff will suggest an unsafe fix
|
||||
to remove third-party and standard library imports -- the fix is unsafe because the module's
|
||||
interface changes.
|
||||
|
||||
## Example
|
||||
|
||||
```python
|
||||
import numpy as np # unused import
|
||||
|
||||
|
||||
def area(radius):
|
||||
return 3.14 * radius**2
|
||||
```
|
||||
|
||||
Use instead:
|
||||
|
||||
```python
|
||||
def area(radius):
|
||||
return 3.14 * radius**2
|
||||
```
|
||||
|
||||
To check the availability of a module, use `importlib.util.find_spec`:
|
||||
|
||||
```python
|
||||
from importlib.util import find_spec
|
||||
|
||||
if find_spec("numpy") is not None:
|
||||
print("numpy is installed")
|
||||
else:
|
||||
print("numpy is not installed")
|
||||
```
|
||||
|
||||
## Preview
|
||||
When [preview](https://docs.astral.sh/ruff/preview/) is enabled,
|
||||
the criterion for determining whether an import is first-party
|
||||
is stricter, which could affect the suggested fix. See [this FAQ section](https://docs.astral.sh/ruff/faq/#how-does-ruff-determine-which-of-my-imports-are-first-party-third-party-etc) for more details.
|
||||
|
||||
## Options
|
||||
- `lint.ignore-init-module-imports`
|
||||
- `lint.pyflakes.allowed-unused-imports`
|
||||
|
||||
## References
|
||||
- [Python documentation: `import`](https://docs.python.org/3/reference/simple_stmts.html#the-import-statement)
|
||||
- [Python documentation: `importlib.util.find_spec`](https://docs.python.org/3/library/importlib.html#importlib.util.find_spec)
|
||||
- [Typing documentation: interface conventions](https://typing.python.org/en/latest/source/libraries.html#library-interface-public-and-private-symbols)
|
||||
|
||||
----- stderr -----
|
||||
@@ -1,31 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- concise
|
||||
- "--config"
|
||||
- "lint.isort.required-imports = [\"import pandas\"]"
|
||||
- "--select"
|
||||
- "I002,ICN001,F401"
|
||||
- "--stdin-filename"
|
||||
- test.py
|
||||
- "--unsafe-fixes"
|
||||
- "--fix"
|
||||
- "-"
|
||||
stdin: "1"
|
||||
snapshot_kind: text
|
||||
---
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
ruff failed
|
||||
Cause: Required import specified in `lint.isort.required-imports` (I002) conflicts with the required import alias specified in either `lint.flake8-import-conventions.aliases` or `lint.flake8-import-conventions.extend-aliases` (ICN001):
|
||||
- `pandas` -> `pd`
|
||||
|
||||
Help: Remove the required import or alias from your configuration.
|
||||
@@ -1,27 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- concise
|
||||
- "--config"
|
||||
- "lint.isort.required-imports = [\"import pandas as pd\"]"
|
||||
- "--select"
|
||||
- "I002,ICN001,F401"
|
||||
- "--stdin-filename"
|
||||
- test.py
|
||||
- "--unsafe-fixes"
|
||||
- "--fix"
|
||||
- "-"
|
||||
stdin: "1"
|
||||
---
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
import pandas as pd
|
||||
1
|
||||
----- stderr -----
|
||||
Found 1 error (1 fixed, 0 remaining).
|
||||
@@ -1,22 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- concise
|
||||
- "--isolated"
|
||||
- "--select"
|
||||
- F401
|
||||
- "-"
|
||||
stdin: "\n# ruff: noqa: AAA101\nprint(\"Hello world!\")\n"
|
||||
---
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
warning: Invalid rule code provided to `# ruff: noqa` at -:2: AAA101
|
||||
@@ -5,19 +5,19 @@ info:
|
||||
args:
|
||||
- check
|
||||
- "--show-settings"
|
||||
- test.py
|
||||
- unformatted.py
|
||||
---
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
Resolved settings for: "<temp_dir>/test.py"
|
||||
Settings path: "<temp_dir>/pyproject.toml"
|
||||
Resolved settings for: "[BASEPATH]/crates/ruff/resources/test/fixtures/unformatted.py"
|
||||
Settings path: "[BASEPATH]/pyproject.toml"
|
||||
|
||||
# General Settings
|
||||
cache_dir = "<temp_dir>/.ruff_cache"
|
||||
cache_dir = "[BASEPATH]/.ruff_cache"
|
||||
fix = false
|
||||
fix_only = false
|
||||
output_format = full
|
||||
output_format = concise
|
||||
show_fixes = false
|
||||
unsafe_fixes = hint
|
||||
|
||||
@@ -44,176 +44,178 @@ file_resolver.exclude = [
|
||||
"__pypackages__",
|
||||
"_build",
|
||||
"buck-out",
|
||||
"build",
|
||||
"dist",
|
||||
"node_modules",
|
||||
"site-packages",
|
||||
"venv",
|
||||
]
|
||||
file_resolver.extend_exclude = []
|
||||
file_resolver.extend_exclude = [
|
||||
"crates/ruff_linter/resources/",
|
||||
"crates/ruff_python_formatter/resources/",
|
||||
]
|
||||
file_resolver.force_exclude = false
|
||||
file_resolver.include = [
|
||||
"*.py",
|
||||
"*.pyi",
|
||||
"*.ipynb",
|
||||
"**/pyproject.toml",
|
||||
]
|
||||
file_resolver.extend_include = []
|
||||
file_resolver.respect_gitignore = true
|
||||
file_resolver.project_root = "<temp_dir>/"
|
||||
file_resolver.project_root = "[BASEPATH]"
|
||||
|
||||
# Linter Settings
|
||||
linter.exclude = []
|
||||
linter.project_root = "<temp_dir>/"
|
||||
linter.project_root = "[BASEPATH]"
|
||||
linter.rules.enabled = [
|
||||
multiple-imports-on-one-line (E401),
|
||||
module-import-not-at-top-of-file (E402),
|
||||
multiple-statements-on-one-line-colon (E701),
|
||||
multiple-statements-on-one-line-semicolon (E702),
|
||||
useless-semicolon (E703),
|
||||
none-comparison (E711),
|
||||
true-false-comparison (E712),
|
||||
not-in-test (E713),
|
||||
not-is-test (E714),
|
||||
type-comparison (E721),
|
||||
bare-except (E722),
|
||||
lambda-assignment (E731),
|
||||
ambiguous-variable-name (E741),
|
||||
ambiguous-class-name (E742),
|
||||
ambiguous-function-name (E743),
|
||||
io-error (E902),
|
||||
unused-import (F401),
|
||||
import-shadowed-by-loop-var (F402),
|
||||
undefined-local-with-import-star (F403),
|
||||
late-future-import (F404),
|
||||
undefined-local-with-import-star-usage (F405),
|
||||
undefined-local-with-nested-import-star-usage (F406),
|
||||
future-feature-not-defined (F407),
|
||||
percent-format-invalid-format (F501),
|
||||
percent-format-expected-mapping (F502),
|
||||
percent-format-expected-sequence (F503),
|
||||
percent-format-extra-named-arguments (F504),
|
||||
percent-format-missing-argument (F505),
|
||||
percent-format-mixed-positional-and-named (F506),
|
||||
percent-format-positional-count-mismatch (F507),
|
||||
percent-format-star-requires-sequence (F508),
|
||||
percent-format-unsupported-format-character (F509),
|
||||
string-dot-format-invalid-format (F521),
|
||||
string-dot-format-extra-named-arguments (F522),
|
||||
string-dot-format-extra-positional-arguments (F523),
|
||||
string-dot-format-missing-arguments (F524),
|
||||
string-dot-format-mixing-automatic (F525),
|
||||
f-string-missing-placeholders (F541),
|
||||
multi-value-repeated-key-literal (F601),
|
||||
multi-value-repeated-key-variable (F602),
|
||||
expressions-in-star-assignment (F621),
|
||||
multiple-starred-expressions (F622),
|
||||
assert-tuple (F631),
|
||||
is-literal (F632),
|
||||
invalid-print-syntax (F633),
|
||||
if-tuple (F634),
|
||||
break-outside-loop (F701),
|
||||
continue-outside-loop (F702),
|
||||
yield-outside-function (F704),
|
||||
return-outside-function (F706),
|
||||
default-except-not-last (F707),
|
||||
forward-annotation-syntax-error (F722),
|
||||
redefined-while-unused (F811),
|
||||
undefined-name (F821),
|
||||
undefined-export (F822),
|
||||
undefined-local (F823),
|
||||
unused-variable (F841),
|
||||
unused-annotation (F842),
|
||||
raise-not-implemented (F901),
|
||||
MultipleImportsOnOneLine,
|
||||
ModuleImportNotAtTopOfFile,
|
||||
MultipleStatementsOnOneLineColon,
|
||||
MultipleStatementsOnOneLineSemicolon,
|
||||
UselessSemicolon,
|
||||
NoneComparison,
|
||||
TrueFalseComparison,
|
||||
NotInTest,
|
||||
NotIsTest,
|
||||
TypeComparison,
|
||||
BareExcept,
|
||||
LambdaAssignment,
|
||||
AmbiguousVariableName,
|
||||
AmbiguousClassName,
|
||||
AmbiguousFunctionName,
|
||||
IOError,
|
||||
SyntaxError,
|
||||
UnusedImport,
|
||||
ImportShadowedByLoopVar,
|
||||
UndefinedLocalWithImportStar,
|
||||
LateFutureImport,
|
||||
UndefinedLocalWithImportStarUsage,
|
||||
UndefinedLocalWithNestedImportStarUsage,
|
||||
FutureFeatureNotDefined,
|
||||
PercentFormatInvalidFormat,
|
||||
PercentFormatExpectedMapping,
|
||||
PercentFormatExpectedSequence,
|
||||
PercentFormatExtraNamedArguments,
|
||||
PercentFormatMissingArgument,
|
||||
PercentFormatMixedPositionalAndNamed,
|
||||
PercentFormatPositionalCountMismatch,
|
||||
PercentFormatStarRequiresSequence,
|
||||
PercentFormatUnsupportedFormatCharacter,
|
||||
StringDotFormatInvalidFormat,
|
||||
StringDotFormatExtraNamedArguments,
|
||||
StringDotFormatExtraPositionalArguments,
|
||||
StringDotFormatMissingArguments,
|
||||
StringDotFormatMixingAutomatic,
|
||||
FStringMissingPlaceholders,
|
||||
MultiValueRepeatedKeyLiteral,
|
||||
MultiValueRepeatedKeyVariable,
|
||||
ExpressionsInStarAssignment,
|
||||
MultipleStarredExpressions,
|
||||
AssertTuple,
|
||||
IsLiteral,
|
||||
InvalidPrintSyntax,
|
||||
IfTuple,
|
||||
BreakOutsideLoop,
|
||||
ContinueOutsideLoop,
|
||||
YieldOutsideFunction,
|
||||
ReturnOutsideFunction,
|
||||
DefaultExceptNotLast,
|
||||
ForwardAnnotationSyntaxError,
|
||||
RedefinedWhileUnused,
|
||||
UndefinedName,
|
||||
UndefinedExport,
|
||||
UndefinedLocal,
|
||||
UnusedVariable,
|
||||
UnusedAnnotation,
|
||||
RaiseNotImplemented,
|
||||
]
|
||||
linter.rules.should_fix = [
|
||||
multiple-imports-on-one-line (E401),
|
||||
module-import-not-at-top-of-file (E402),
|
||||
multiple-statements-on-one-line-colon (E701),
|
||||
multiple-statements-on-one-line-semicolon (E702),
|
||||
useless-semicolon (E703),
|
||||
none-comparison (E711),
|
||||
true-false-comparison (E712),
|
||||
not-in-test (E713),
|
||||
not-is-test (E714),
|
||||
type-comparison (E721),
|
||||
bare-except (E722),
|
||||
lambda-assignment (E731),
|
||||
ambiguous-variable-name (E741),
|
||||
ambiguous-class-name (E742),
|
||||
ambiguous-function-name (E743),
|
||||
io-error (E902),
|
||||
unused-import (F401),
|
||||
import-shadowed-by-loop-var (F402),
|
||||
undefined-local-with-import-star (F403),
|
||||
late-future-import (F404),
|
||||
undefined-local-with-import-star-usage (F405),
|
||||
undefined-local-with-nested-import-star-usage (F406),
|
||||
future-feature-not-defined (F407),
|
||||
percent-format-invalid-format (F501),
|
||||
percent-format-expected-mapping (F502),
|
||||
percent-format-expected-sequence (F503),
|
||||
percent-format-extra-named-arguments (F504),
|
||||
percent-format-missing-argument (F505),
|
||||
percent-format-mixed-positional-and-named (F506),
|
||||
percent-format-positional-count-mismatch (F507),
|
||||
percent-format-star-requires-sequence (F508),
|
||||
percent-format-unsupported-format-character (F509),
|
||||
string-dot-format-invalid-format (F521),
|
||||
string-dot-format-extra-named-arguments (F522),
|
||||
string-dot-format-extra-positional-arguments (F523),
|
||||
string-dot-format-missing-arguments (F524),
|
||||
string-dot-format-mixing-automatic (F525),
|
||||
f-string-missing-placeholders (F541),
|
||||
multi-value-repeated-key-literal (F601),
|
||||
multi-value-repeated-key-variable (F602),
|
||||
expressions-in-star-assignment (F621),
|
||||
multiple-starred-expressions (F622),
|
||||
assert-tuple (F631),
|
||||
is-literal (F632),
|
||||
invalid-print-syntax (F633),
|
||||
if-tuple (F634),
|
||||
break-outside-loop (F701),
|
||||
continue-outside-loop (F702),
|
||||
yield-outside-function (F704),
|
||||
return-outside-function (F706),
|
||||
default-except-not-last (F707),
|
||||
forward-annotation-syntax-error (F722),
|
||||
redefined-while-unused (F811),
|
||||
undefined-name (F821),
|
||||
undefined-export (F822),
|
||||
undefined-local (F823),
|
||||
unused-variable (F841),
|
||||
unused-annotation (F842),
|
||||
raise-not-implemented (F901),
|
||||
MultipleImportsOnOneLine,
|
||||
ModuleImportNotAtTopOfFile,
|
||||
MultipleStatementsOnOneLineColon,
|
||||
MultipleStatementsOnOneLineSemicolon,
|
||||
UselessSemicolon,
|
||||
NoneComparison,
|
||||
TrueFalseComparison,
|
||||
NotInTest,
|
||||
NotIsTest,
|
||||
TypeComparison,
|
||||
BareExcept,
|
||||
LambdaAssignment,
|
||||
AmbiguousVariableName,
|
||||
AmbiguousClassName,
|
||||
AmbiguousFunctionName,
|
||||
IOError,
|
||||
SyntaxError,
|
||||
UnusedImport,
|
||||
ImportShadowedByLoopVar,
|
||||
UndefinedLocalWithImportStar,
|
||||
LateFutureImport,
|
||||
UndefinedLocalWithImportStarUsage,
|
||||
UndefinedLocalWithNestedImportStarUsage,
|
||||
FutureFeatureNotDefined,
|
||||
PercentFormatInvalidFormat,
|
||||
PercentFormatExpectedMapping,
|
||||
PercentFormatExpectedSequence,
|
||||
PercentFormatExtraNamedArguments,
|
||||
PercentFormatMissingArgument,
|
||||
PercentFormatMixedPositionalAndNamed,
|
||||
PercentFormatPositionalCountMismatch,
|
||||
PercentFormatStarRequiresSequence,
|
||||
PercentFormatUnsupportedFormatCharacter,
|
||||
StringDotFormatInvalidFormat,
|
||||
StringDotFormatExtraNamedArguments,
|
||||
StringDotFormatExtraPositionalArguments,
|
||||
StringDotFormatMissingArguments,
|
||||
StringDotFormatMixingAutomatic,
|
||||
FStringMissingPlaceholders,
|
||||
MultiValueRepeatedKeyLiteral,
|
||||
MultiValueRepeatedKeyVariable,
|
||||
ExpressionsInStarAssignment,
|
||||
MultipleStarredExpressions,
|
||||
AssertTuple,
|
||||
IsLiteral,
|
||||
InvalidPrintSyntax,
|
||||
IfTuple,
|
||||
BreakOutsideLoop,
|
||||
ContinueOutsideLoop,
|
||||
YieldOutsideFunction,
|
||||
ReturnOutsideFunction,
|
||||
DefaultExceptNotLast,
|
||||
ForwardAnnotationSyntaxError,
|
||||
RedefinedWhileUnused,
|
||||
UndefinedName,
|
||||
UndefinedExport,
|
||||
UndefinedLocal,
|
||||
UnusedVariable,
|
||||
UnusedAnnotation,
|
||||
RaiseNotImplemented,
|
||||
]
|
||||
linter.per_file_ignores = {}
|
||||
linter.safety_table.forced_safe = []
|
||||
linter.safety_table.forced_unsafe = []
|
||||
linter.unresolved_target_version = 3.7
|
||||
linter.per_file_target_version = {}
|
||||
linter.target_version = Py37
|
||||
linter.preview = disabled
|
||||
linter.explicit_preview_rules = false
|
||||
linter.extension = ExtensionMapping({})
|
||||
linter.extension.mapping = {}
|
||||
linter.allowed_confusables = []
|
||||
linter.builtins = []
|
||||
linter.dummy_variable_rgx = ^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$
|
||||
linter.external = []
|
||||
linter.ignore_init_module_imports = true
|
||||
linter.ignore_init_module_imports = false
|
||||
linter.logger_objects = []
|
||||
linter.namespace_packages = []
|
||||
linter.src = [
|
||||
"<temp_dir>/",
|
||||
"<temp_dir>/src",
|
||||
"[BASEPATH]",
|
||||
]
|
||||
linter.tab_size = 4
|
||||
linter.line_length = 100
|
||||
linter.line_length = 88
|
||||
linter.task_tags = [
|
||||
TODO,
|
||||
FIXME,
|
||||
XXX,
|
||||
]
|
||||
linter.typing_modules = []
|
||||
linter.typing_extensions = true
|
||||
|
||||
# Linter Plugins
|
||||
linter.flake8_annotations.mypy_init_return = false
|
||||
@@ -227,14 +229,10 @@ linter.flake8_bandit.hardcoded_tmp_directory = [
|
||||
/dev/shm,
|
||||
]
|
||||
linter.flake8_bandit.check_typed_exception = false
|
||||
linter.flake8_bandit.extend_markup_names = []
|
||||
linter.flake8_bandit.allowed_markup_calls = []
|
||||
linter.flake8_bugbear.extend_immutable_calls = []
|
||||
linter.flake8_builtins.allowed_modules = []
|
||||
linter.flake8_builtins.ignorelist = []
|
||||
linter.flake8_builtins.strict_checking = false
|
||||
linter.flake8_builtins.builtins_ignorelist = []
|
||||
linter.flake8_comprehensions.allow_dict_calls_with_keyword_arguments = false
|
||||
linter.flake8_copyright.notice_rgx = (?i)Copyright\s+((?:\(C\)|©)\s+)?\d{4}((-|,\s)\d{4})*
|
||||
linter.flake8_copyright.notice_rgx = (?i)Copyright\s+(\(C\)\s+)?\d{4}(-\d{4})*
|
||||
linter.flake8_copyright.author = none
|
||||
linter.flake8_copyright.min_file_size = 0
|
||||
linter.flake8_errmsg.max_string_length = 0
|
||||
@@ -244,27 +242,10 @@ linter.flake8_gettext.functions_names = [
|
||||
ngettext,
|
||||
]
|
||||
linter.flake8_implicit_str_concat.allow_multiline = true
|
||||
linter.flake8_import_conventions.aliases = {
|
||||
altair = alt,
|
||||
holoviews = hv,
|
||||
matplotlib = mpl,
|
||||
matplotlib.pyplot = plt,
|
||||
networkx = nx,
|
||||
numpy = np,
|
||||
numpy.typing = npt,
|
||||
pandas = pd,
|
||||
panel = pn,
|
||||
plotly.express = px,
|
||||
polars = pl,
|
||||
pyarrow = pa,
|
||||
seaborn = sns,
|
||||
tensorflow = tf,
|
||||
tkinter = tk,
|
||||
xml.etree.ElementTree = ET,
|
||||
}
|
||||
linter.flake8_import_conventions.aliases = {"matplotlib": "mpl", "matplotlib.pyplot": "plt", "pandas": "pd", "seaborn": "sns", "tensorflow": "tf", "networkx": "nx", "plotly.express": "px", "polars": "pl", "numpy": "np", "panel": "pn", "pyarrow": "pa", "altair": "alt", "tkinter": "tk", "holoviews": "hv"}
|
||||
linter.flake8_import_conventions.banned_aliases = {}
|
||||
linter.flake8_import_conventions.banned_from = []
|
||||
linter.flake8_pytest_style.fixture_parentheses = false
|
||||
linter.flake8_pytest_style.fixture_parentheses = true
|
||||
linter.flake8_pytest_style.parametrize_names_type = tuple
|
||||
linter.flake8_pytest_style.parametrize_values_type = list
|
||||
linter.flake8_pytest_style.parametrize_values_row_type = tuple
|
||||
@@ -278,7 +259,7 @@ linter.flake8_pytest_style.raises_require_match_for = [
|
||||
socket.error,
|
||||
]
|
||||
linter.flake8_pytest_style.raises_extend_require_match_for = []
|
||||
linter.flake8_pytest_style.mark_parentheses = false
|
||||
linter.flake8_pytest_style.mark_parentheses = true
|
||||
linter.flake8_quotes.inline_quotes = double
|
||||
linter.flake8_quotes.multiline_quotes = double
|
||||
linter.flake8_quotes.docstring_quotes = double
|
||||
@@ -331,7 +312,6 @@ linter.isort.section_order = [
|
||||
known { type = first_party },
|
||||
known { type = local_folder },
|
||||
]
|
||||
linter.isort.default_section = known { type = third_party }
|
||||
linter.isort.no_sections = false
|
||||
linter.isort.from_first = false
|
||||
linter.isort.length_sort = false
|
||||
@@ -353,11 +333,10 @@ linter.pep8_naming.ignore_names = [
|
||||
]
|
||||
linter.pep8_naming.classmethod_decorators = []
|
||||
linter.pep8_naming.staticmethod_decorators = []
|
||||
linter.pycodestyle.max_line_length = 100
|
||||
linter.pycodestyle.max_line_length = 88
|
||||
linter.pycodestyle.max_doc_length = none
|
||||
linter.pycodestyle.ignore_overlong_task_comments = false
|
||||
linter.pyflakes.extend_generics = []
|
||||
linter.pyflakes.allowed_unused_imports = []
|
||||
linter.pylint.allow_magic_value_types = [
|
||||
str,
|
||||
bytes,
|
||||
@@ -372,14 +351,12 @@ linter.pylint.max_statements = 50
|
||||
linter.pylint.max_public_methods = 20
|
||||
linter.pylint.max_locals = 15
|
||||
linter.pyupgrade.keep_runtime_typing = false
|
||||
linter.ruff.parenthesize_tuple_in_subscript = false
|
||||
|
||||
# Formatter Settings
|
||||
formatter.exclude = []
|
||||
formatter.unresolved_target_version = 3.7
|
||||
formatter.per_file_target_version = {}
|
||||
formatter.target_version = Py37
|
||||
formatter.preview = disabled
|
||||
formatter.line_width = 100
|
||||
formatter.line_width = 88
|
||||
formatter.line_ending = auto
|
||||
formatter.indent_style = space
|
||||
formatter.indent_width = 4
|
||||
@@ -388,12 +365,5 @@ formatter.magic_trailing_comma = respect
|
||||
formatter.docstring_code_format = disabled
|
||||
formatter.docstring_code_line_width = dynamic
|
||||
|
||||
# Analyze Settings
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.7
|
||||
analyze.detect_string_imports = false
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
----- stderr -----
|
||||
|
||||
|
||||
@@ -1,105 +0,0 @@
|
||||
//! Tests for the --version command
|
||||
use std::fs;
|
||||
use std::process::Command;
|
||||
|
||||
use anyhow::Result;
|
||||
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
|
||||
use tempfile::TempDir;
|
||||
|
||||
const BIN_NAME: &str = "ruff";
|
||||
const VERSION_FILTER: [(&str, &str); 1] = [(
|
||||
r"\d+\.\d+\.\d+(\+\d+)?( \(\w{9} \d\d\d\d-\d\d-\d\d\))?",
|
||||
"[VERSION]",
|
||||
)];
|
||||
|
||||
#[test]
|
||||
fn version_basics() {
|
||||
insta::with_settings!({filters => VERSION_FILTER.to_vec()}, {
|
||||
assert_cmd_snapshot!(
|
||||
Command::new(get_cargo_bin(BIN_NAME)).arg("version"), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
ruff [VERSION]
|
||||
|
||||
----- stderr -----
|
||||
"
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/// `--config` is a global option,
|
||||
/// so it's allowed to pass --config to subcommands such as `version`
|
||||
/// -- the flag is simply ignored
|
||||
#[test]
|
||||
fn config_option_allowed_but_ignored() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_dot_toml = tempdir.path().join("ruff.toml");
|
||||
fs::File::create(&ruff_dot_toml)?;
|
||||
insta::with_settings!({filters => VERSION_FILTER.to_vec()}, {
|
||||
assert_cmd_snapshot!(
|
||||
Command::new(get_cargo_bin(BIN_NAME))
|
||||
.arg("version")
|
||||
.arg("--config")
|
||||
.arg(&ruff_dot_toml)
|
||||
.args(["--config", "lint.isort.extra-standard-library = ['foo', 'bar']"]), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
ruff [VERSION]
|
||||
|
||||
----- stderr -----
|
||||
"
|
||||
);
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
#[test]
|
||||
fn config_option_ignored_but_validated() {
|
||||
insta::with_settings!({filters => VERSION_FILTER.to_vec()}, {
|
||||
assert_cmd_snapshot!(
|
||||
Command::new(get_cargo_bin(BIN_NAME))
|
||||
.arg("version")
|
||||
.args(["--config", "foo = bar"]), @r#"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: invalid value 'foo = bar' for '--config <CONFIG_OPTION>'
|
||||
|
||||
tip: A `--config` flag must either be a path to a `.toml` configuration file
|
||||
or a TOML `<KEY> = <VALUE>` pair overriding a specific configuration
|
||||
option
|
||||
|
||||
The supplied argument is not valid TOML:
|
||||
|
||||
TOML parse error at line 1, column 7
|
||||
|
|
||||
1 | foo = bar
|
||||
| ^
|
||||
invalid string
|
||||
expected `"`, `'`
|
||||
|
||||
For more information, try '--help'.
|
||||
"#
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/// `--isolated` is also a global option,
|
||||
#[test]
|
||||
fn isolated_option_allowed() {
|
||||
insta::with_settings!({filters => VERSION_FILTER.to_vec()}, {
|
||||
assert_cmd_snapshot!(
|
||||
Command::new(get_cargo_bin(BIN_NAME)).arg("version").arg("--isolated"), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
ruff [VERSION]
|
||||
|
||||
----- stderr -----
|
||||
"
|
||||
);
|
||||
});
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user