Compare commits
5 Commits
support-py
...
jane/docs/
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7f9c31720b | ||
|
|
6034a4e0ba | ||
|
|
e7a7da9ae9 | ||
|
|
2fda0038d8 | ||
|
|
800ff0a693 |
@@ -8,7 +8,3 @@ benchmark = "bench -p ruff_benchmark --bench linter --bench formatter --"
|
||||
# See: https://github.com/astral-sh/ruff/issues/11503
|
||||
[target.'cfg(all(target_env="msvc", target_os = "windows"))']
|
||||
rustflags = ["-C", "target-feature=+crt-static"]
|
||||
|
||||
[target.'wasm32-unknown-unknown']
|
||||
# See https://docs.rs/getrandom/latest/getrandom/#webassembly-support
|
||||
rustflags = ["--cfg", 'getrandom_backend="wasm_js"']
|
||||
@@ -6,10 +6,3 @@ failure-output = "immediate-final"
|
||||
fail-fast = false
|
||||
|
||||
status-level = "skip"
|
||||
|
||||
# Mark tests that take longer than 1s as slow.
|
||||
# Terminate after 60s as a stop-gap measure to terminate on deadlock.
|
||||
slow-timeout = { period = "1s", terminate-after = 60 }
|
||||
|
||||
# Show slow jobs in the final summary
|
||||
final-status-level = "slow"
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
"rust-lang.rust-analyzer",
|
||||
"fill-labs.dependi",
|
||||
"serayuzgur.crates",
|
||||
"tamasfe.even-better-toml",
|
||||
"Swellaby.vscode-rust-test-adapter",
|
||||
"charliermarsh.ruff"
|
||||
|
||||
@@ -17,7 +17,4 @@ indent_size = 4
|
||||
trim_trailing_whitespace = false
|
||||
|
||||
[*.md]
|
||||
max_line_length = 100
|
||||
|
||||
[*.toml]
|
||||
indent_size = 4
|
||||
max_line_length = 100
|
||||
4
.gitattributes
vendored
4
.gitattributes
vendored
@@ -14,7 +14,5 @@ crates/ruff_python_parser/resources/invalid/re_lex_logical_token_mac_eol.py text
|
||||
|
||||
crates/ruff_python_parser/resources/inline linguist-generated=true
|
||||
|
||||
ruff.schema.json -diff linguist-generated=true text=auto eol=lf
|
||||
crates/ruff_python_ast/src/generated.rs -diff linguist-generated=true text=auto eol=lf
|
||||
crates/ruff_python_formatter/src/generated.rs -diff linguist-generated=true text=auto eol=lf
|
||||
ruff.schema.json linguist-generated=true text=auto eol=lf
|
||||
*.md.snap linguist-language=Markdown
|
||||
|
||||
10
.github/CODEOWNERS
vendored
10
.github/CODEOWNERS
vendored
@@ -9,16 +9,12 @@
|
||||
/crates/ruff_formatter/ @MichaReiser
|
||||
/crates/ruff_python_formatter/ @MichaReiser
|
||||
/crates/ruff_python_parser/ @MichaReiser @dhruvmanila
|
||||
/crates/ruff_annotate_snippets/ @BurntSushi
|
||||
|
||||
# flake8-pyi
|
||||
/crates/ruff_linter/src/rules/flake8_pyi/ @AlexWaygood
|
||||
|
||||
# Script for fuzzing the parser/red-knot etc.
|
||||
/python/py-fuzzer/ @AlexWaygood
|
||||
# Script for fuzzing the parser
|
||||
/scripts/fuzz-parser/ @AlexWaygood
|
||||
|
||||
# red-knot
|
||||
/crates/red_knot* @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
|
||||
/crates/ruff_db/ @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
|
||||
/scripts/knot_benchmark/ @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
|
||||
/crates/red_knot_python_semantic @carljm @AlexWaygood @sharkdp @dcreager
|
||||
/crates/red_knot/ @carljm @MichaReiser
|
||||
|
||||
12
.github/ISSUE_TEMPLATE.md
vendored
Normal file
12
.github/ISSUE_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
<!--
|
||||
Thank you for taking the time to report an issue! We're glad to have you involved with Ruff.
|
||||
|
||||
If you're filing a bug report, please consider including the following information:
|
||||
|
||||
* List of keywords you searched for before creating this issue. Write them down here so that others can find this issue more easily and help provide feedback.
|
||||
e.g. "RUF001", "unused variable", "Jupyter notebook"
|
||||
* A minimal code snippet that reproduces the bug.
|
||||
* The command you invoked (e.g., `ruff /path/to/file.py --fix`), ideally including the `--isolated` flag.
|
||||
* The current Ruff settings (any relevant sections from your `pyproject.toml`).
|
||||
* The current Ruff version (`ruff --version`).
|
||||
-->
|
||||
31
.github/ISSUE_TEMPLATE/1_bug_report.yaml
vendored
31
.github/ISSUE_TEMPLATE/1_bug_report.yaml
vendored
@@ -1,31 +0,0 @@
|
||||
name: Bug report
|
||||
description: Report an error or unexpected behavior
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for taking the time to report an issue! We're glad to have you involved with Ruff.
|
||||
|
||||
**Before reporting, please make sure to search through [existing issues](https://github.com/astral-sh/ruff/issues?q=is:issue+is:open+label:bug) (including [closed](https://github.com/astral-sh/ruff/issues?q=is:issue%20state:closed%20label:bug)).**
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: |
|
||||
A clear and concise description of the bug, including a minimal reproducible example.
|
||||
|
||||
Be sure to include the command you invoked (e.g., `ruff check /path/to/file.py --fix`), ideally including the `--isolated` flag and
|
||||
the current Ruff settings (e.g., relevant sections from your `pyproject.toml`).
|
||||
|
||||
If possible, try to include the [playground](https://play.ruff.rs) link that reproduces this issue.
|
||||
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
attributes:
|
||||
label: Version
|
||||
description: What version of ruff are you using? (see `ruff version`)
|
||||
placeholder: e.g., ruff 0.9.3 (90589372d 2025-01-23)
|
||||
validations:
|
||||
required: false
|
||||
10
.github/ISSUE_TEMPLATE/2_rule_request.yaml
vendored
10
.github/ISSUE_TEMPLATE/2_rule_request.yaml
vendored
@@ -1,10 +0,0 @@
|
||||
name: Rule request
|
||||
description: Anything related to lint rules (proposing new rules, changes to existing rules, auto-fixes, etc.)
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: |
|
||||
A clear and concise description of the relevant request. If applicable, please describe the current behavior as well.
|
||||
validations:
|
||||
required: true
|
||||
18
.github/ISSUE_TEMPLATE/3_question.yaml
vendored
18
.github/ISSUE_TEMPLATE/3_question.yaml
vendored
@@ -1,18 +0,0 @@
|
||||
name: Question
|
||||
description: Ask a question about Ruff
|
||||
labels: ["question"]
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Question
|
||||
description: Describe your question in detail.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
attributes:
|
||||
label: Version
|
||||
description: What version of ruff are you using? (see `ruff version`)
|
||||
placeholder: e.g., ruff 0.9.3 (90589372d 2025-01-23)
|
||||
validations:
|
||||
required: false
|
||||
8
.github/ISSUE_TEMPLATE/config.yml
vendored
8
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,8 +0,0 @@
|
||||
blank_issues_enabled: true
|
||||
contact_links:
|
||||
- name: Documentation
|
||||
url: https://docs.astral.sh/ruff
|
||||
about: Please consult the documentation before creating an issue.
|
||||
- name: Community
|
||||
url: https://discord.com/invite/astral-sh
|
||||
about: Join our Discord community to ask questions and collaborate.
|
||||
10
.github/actionlint.yaml
vendored
10
.github/actionlint.yaml
vendored
@@ -1,10 +0,0 @@
|
||||
# Configuration for the actionlint tool, which we run via pre-commit
|
||||
# to verify the correctness of the syntax in our GitHub Actions workflows.
|
||||
|
||||
self-hosted-runner:
|
||||
# Various runners we use that aren't recognized out-of-the-box by actionlint:
|
||||
labels:
|
||||
- depot-ubuntu-latest-8
|
||||
- depot-ubuntu-22.04-16
|
||||
- github-windows-2025-x86_64-8
|
||||
- github-windows-2025-x86_64-16
|
||||
55
.github/renovate.json5
vendored
55
.github/renovate.json5
vendored
@@ -8,55 +8,27 @@
|
||||
semanticCommits: "disabled",
|
||||
separateMajorMinor: false,
|
||||
prHourlyLimit: 10,
|
||||
enabledManagers: ["github-actions", "pre-commit", "cargo", "pep621", "pip_requirements", "npm"],
|
||||
enabledManagers: ["github-actions", "pre-commit", "cargo", "pep621", "npm"],
|
||||
cargo: {
|
||||
// See https://docs.renovatebot.com/configuration-options/#rangestrategy
|
||||
rangeStrategy: "update-lockfile",
|
||||
},
|
||||
pep621: {
|
||||
// The default for this package manager is to only search for `pyproject.toml` files
|
||||
// found at the repository root: https://docs.renovatebot.com/modules/manager/pep621/#file-matching
|
||||
fileMatch: ["^(python|scripts)/.*pyproject\\.toml$"],
|
||||
},
|
||||
pip_requirements: {
|
||||
// The default for this package manager is to run on all requirements.txt files:
|
||||
// https://docs.renovatebot.com/modules/manager/pip_requirements/#file-matching
|
||||
// `fileMatch` doesn't work for excluding files; to exclude `requirements.txt` files
|
||||
// outside the `doc/` directory, we instead have to use `ignorePaths`. Unlike `fileMatch`,
|
||||
// which takes a regex string, `ignorePaths` takes a glob string, so we have to use
|
||||
// a "negative glob pattern".
|
||||
// See:
|
||||
// - https://docs.renovatebot.com/modules/manager/#ignoring-files-that-match-the-default-filematch
|
||||
// - https://docs.renovatebot.com/configuration-options/#ignorepaths
|
||||
// - https://docs.renovatebot.com/string-pattern-matching/#negative-matching
|
||||
ignorePaths: ["!docs/requirements*.txt"]
|
||||
},
|
||||
npm: {
|
||||
// The default for this package manager is to only search for `package.json` files
|
||||
// found at the repository root: https://docs.renovatebot.com/modules/manager/npm/#file-matching
|
||||
fileMatch: ["^playground/.*package\\.json$"],
|
||||
},
|
||||
"pre-commit": {
|
||||
enabled: true,
|
||||
},
|
||||
packageRules: [
|
||||
// Pin GitHub Actions to immutable SHAs.
|
||||
{
|
||||
matchDepTypes: ["action"],
|
||||
pinDigests: true,
|
||||
},
|
||||
// Annotate GitHub Actions SHAs with a SemVer version.
|
||||
{
|
||||
extends: ["helpers:pinGitHubActionDigests"],
|
||||
extractVersion: "^(?<version>v?\\d+\\.\\d+\\.\\d+)$",
|
||||
versioning: "regex:^v?(?<major>\\d+)(\\.(?<minor>\\d+)\\.(?<patch>\\d+))?$",
|
||||
},
|
||||
{
|
||||
// Group upload/download artifact updates, the versions are dependent
|
||||
groupName: "Artifact GitHub Actions dependencies",
|
||||
matchManagers: ["github-actions"],
|
||||
matchDatasources: ["gitea-tags", "github-tags"],
|
||||
matchPackageNames: ["actions/.*-artifact"],
|
||||
matchPackagePatterns: ["actions/.*-artifact"],
|
||||
description: "Weekly update of artifact-related GitHub Actions dependencies",
|
||||
},
|
||||
{
|
||||
@@ -72,18 +44,10 @@
|
||||
{
|
||||
// Disable updates of `zip-rs`; intentionally pinned for now due to ownership change
|
||||
// See: https://github.com/astral-sh/uv/issues/3642
|
||||
matchPackageNames: ["zip"],
|
||||
matchPackagePatterns: ["zip"],
|
||||
matchManagers: ["cargo"],
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
// `mkdocs-material` requires a manual update to keep the version in sync
|
||||
// with `mkdocs-material-insider`.
|
||||
// See: https://squidfunk.github.io/mkdocs-material/insiders/upgrade/
|
||||
matchManagers: ["pip_requirements"],
|
||||
matchPackageNames: ["mkdocs-material"],
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
groupName: "pre-commit dependencies",
|
||||
matchManagers: ["pre-commit"],
|
||||
@@ -98,15 +62,22 @@
|
||||
{
|
||||
groupName: "Monaco",
|
||||
matchManagers: ["npm"],
|
||||
matchPackageNames: ["monaco"],
|
||||
matchPackagePatterns: ["monaco"],
|
||||
description: "Weekly update of the Monaco editor",
|
||||
},
|
||||
{
|
||||
groupName: "strum",
|
||||
matchManagers: ["cargo"],
|
||||
matchPackageNames: ["strum"],
|
||||
matchPackagePatterns: ["strum"],
|
||||
description: "Weekly update of strum dependencies",
|
||||
}
|
||||
},
|
||||
{
|
||||
groupName: "ESLint",
|
||||
matchManagers: ["npm"],
|
||||
matchPackageNames: ["eslint"],
|
||||
allowedVersions: "<9",
|
||||
description: "Constraint ESLint to version 8 until TypeScript-eslint supports ESLint 9", // https://github.com/typescript-eslint/typescript-eslint/issues/8211
|
||||
},
|
||||
],
|
||||
vulnerabilityAlerts: {
|
||||
commitMessageSuffix: "",
|
||||
|
||||
480
.github/workflows/build-binaries.yml
vendored
480
.github/workflows/build-binaries.yml
vendored
@@ -1,480 +0,0 @@
|
||||
# Build ruff on all platforms.
|
||||
#
|
||||
# Generates both wheels (for PyPI) and archived binaries (for GitHub releases).
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a local
|
||||
# artifacts job within `cargo-dist`.
|
||||
name: "Build binaries"
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
pull_request:
|
||||
paths:
|
||||
# When we change pyproject.toml, we want to ensure that the maturin builds still work.
|
||||
- pyproject.toml
|
||||
# And when we change this workflow itself...
|
||||
- .github/workflows/build-binaries.yml
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
env:
|
||||
PACKAGE_NAME: ruff
|
||||
MODULE_NAME: ruff
|
||||
PYTHON_VERSION: "3.13"
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
sdist:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build sdist"
|
||||
uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
|
||||
with:
|
||||
command: sdist
|
||||
args: --out dist
|
||||
- name: "Test sdist"
|
||||
run: |
|
||||
pip install dist/"${PACKAGE_NAME}"-*.tar.gz --force-reinstall
|
||||
"${MODULE_NAME}" --help
|
||||
python -m "${MODULE_NAME}" --help
|
||||
- name: "Upload sdist"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: wheels-sdist
|
||||
path: dist
|
||||
|
||||
macos-x86_64:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: macos-14
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels - x86_64"
|
||||
uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
|
||||
with:
|
||||
target: x86_64
|
||||
args: --release --locked --out dist
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: wheels-macos-x86_64
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
TARGET=x86_64-apple-darwin
|
||||
ARCHIVE_NAME=ruff-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
mkdir -p $ARCHIVE_NAME
|
||||
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: artifacts-macos-x86_64
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
macos-aarch64:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: macos-14
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: arm64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels - aarch64"
|
||||
uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
|
||||
with:
|
||||
target: aarch64
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel - aarch64"
|
||||
run: |
|
||||
pip install dist/"${PACKAGE_NAME}"-*.whl --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: wheels-aarch64-apple-darwin
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
TARGET=aarch64-apple-darwin
|
||||
ARCHIVE_NAME=ruff-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
mkdir -p $ARCHIVE_NAME
|
||||
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: artifacts-aarch64-apple-darwin
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
windows:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: windows-latest
|
||||
strategy:
|
||||
matrix:
|
||||
platform:
|
||||
- target: x86_64-pc-windows-msvc
|
||||
arch: x64
|
||||
- target: i686-pc-windows-msvc
|
||||
arch: x86
|
||||
- target: aarch64-pc-windows-msvc
|
||||
arch: x64
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: ${{ matrix.platform.arch }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
args: --release --locked --out dist
|
||||
env:
|
||||
# aarch64 build fails, see https://github.com/PyO3/maturin/issues/2110
|
||||
XWIN_VERSION: 16
|
||||
- name: "Test wheel"
|
||||
if: ${{ !startsWith(matrix.platform.target, 'aarch64') }}
|
||||
shell: bash
|
||||
run: |
|
||||
python -m pip install dist/"${PACKAGE_NAME}"-*.whl --force-reinstall
|
||||
"${MODULE_NAME}" --help
|
||||
python -m "${MODULE_NAME}" --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: wheels-${{ matrix.platform.target }}
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.zip
|
||||
7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/ruff.exe
|
||||
sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: artifacts-${{ matrix.platform.target }}
|
||||
path: |
|
||||
*.zip
|
||||
*.sha256
|
||||
|
||||
linux:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target:
|
||||
- x86_64-unknown-linux-gnu
|
||||
- i686-unknown-linux-gnu
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: auto
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel"
|
||||
if: ${{ startsWith(matrix.target, 'x86_64') }}
|
||||
run: |
|
||||
pip install dist/"${PACKAGE_NAME}"-*.whl --force-reinstall
|
||||
"${MODULE_NAME}" --help
|
||||
python -m "${MODULE_NAME}" --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: wheels-${{ matrix.target }}
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
TARGET=${{ matrix.target }}
|
||||
ARCHIVE_NAME=ruff-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
mkdir -p $ARCHIVE_NAME
|
||||
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: artifacts-${{ matrix.target }}
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
linux-cross:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
platform:
|
||||
- target: aarch64-unknown-linux-gnu
|
||||
arch: aarch64
|
||||
# see https://github.com/astral-sh/ruff/issues/3791
|
||||
# and https://github.com/gnzlbg/jemallocator/issues/170#issuecomment-1503228963
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
- target: armv7-unknown-linux-gnueabihf
|
||||
arch: armv7
|
||||
- target: s390x-unknown-linux-gnu
|
||||
arch: s390x
|
||||
- target: powerpc64le-unknown-linux-gnu
|
||||
arch: ppc64le
|
||||
# see https://github.com/astral-sh/ruff/issues/10073
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
- target: powerpc64-unknown-linux-gnu
|
||||
arch: ppc64
|
||||
# see https://github.com/astral-sh/ruff/issues/10073
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
- target: arm-unknown-linux-musleabihf
|
||||
arch: arm
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
args: --release --locked --out dist
|
||||
- uses: uraimo/run-on-arch-action@ac33288c3728ca72563c97b8b88dda5a65a84448 # v2
|
||||
if: ${{ matrix.platform.arch != 'ppc64' && matrix.platform.arch != 'ppc64le'}}
|
||||
name: Test wheel
|
||||
with:
|
||||
arch: ${{ matrix.platform.arch == 'arm' && 'armv6' || matrix.platform.arch }}
|
||||
distro: ${{ matrix.platform.arch == 'arm' && 'bullseye' || 'ubuntu20.04' }}
|
||||
githubToken: ${{ github.token }}
|
||||
install: |
|
||||
apt-get update
|
||||
apt-get install -y --no-install-recommends python3 python3-pip
|
||||
pip3 install -U pip
|
||||
run: |
|
||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: wheels-${{ matrix.platform.target }}
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
TARGET=${{ matrix.platform.target }}
|
||||
ARCHIVE_NAME=ruff-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
mkdir -p $ARCHIVE_NAME
|
||||
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: artifacts-${{ matrix.platform.target }}
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
musllinux:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target:
|
||||
- x86_64-unknown-linux-musl
|
||||
- i686-unknown-linux-musl
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: musllinux_1_2
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel"
|
||||
if: matrix.target == 'x86_64-unknown-linux-musl'
|
||||
uses: addnab/docker-run-action@4f65fabd2431ebc8d299f8e5a018d79a769ae185 # v3
|
||||
with:
|
||||
image: alpine:latest
|
||||
options: -v ${{ github.workspace }}:/io -w /io
|
||||
run: |
|
||||
apk add python3
|
||||
python -m venv .venv
|
||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
.venv/bin/${{ env.MODULE_NAME }} --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: wheels-${{ matrix.target }}
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
TARGET=${{ matrix.target }}
|
||||
ARCHIVE_NAME=ruff-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
mkdir -p $ARCHIVE_NAME
|
||||
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: artifacts-${{ matrix.target }}
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
musllinux-cross:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
platform:
|
||||
- target: aarch64-unknown-linux-musl
|
||||
arch: aarch64
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
- target: armv7-unknown-linux-musleabihf
|
||||
arch: armv7
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: musllinux_1_2
|
||||
args: --release --locked --out dist
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
- uses: uraimo/run-on-arch-action@ac33288c3728ca72563c97b8b88dda5a65a84448 # v2
|
||||
name: Test wheel
|
||||
with:
|
||||
arch: ${{ matrix.platform.arch }}
|
||||
distro: alpine_latest
|
||||
githubToken: ${{ github.token }}
|
||||
install: |
|
||||
apk add python3
|
||||
run: |
|
||||
python -m venv .venv
|
||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
.venv/bin/${{ env.MODULE_NAME }} --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: wheels-${{ matrix.platform.target }}
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
TARGET=${{ matrix.platform.target }}
|
||||
ARCHIVE_NAME=ruff-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
mkdir -p $ARCHIVE_NAME
|
||||
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: artifacts-${{ matrix.platform.target }}
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
299
.github/workflows/build-docker.yml
vendored
299
.github/workflows/build-docker.yml
vendored
@@ -1,299 +0,0 @@
|
||||
# Build and publish a Docker image.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a local
|
||||
# artifacts job within `cargo-dist`.
|
||||
#
|
||||
# TODO(charlie): Ideally, the publish step would happen as a publish job within `cargo-dist`, but
|
||||
# sharing the built image as an artifact between jobs is challenging.
|
||||
name: "[ruff] Build Docker image"
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/build-docker.yml
|
||||
|
||||
env:
|
||||
RUFF_BASE_IMG: ghcr.io/${{ github.repository_owner }}/ruff
|
||||
|
||||
jobs:
|
||||
docker-build:
|
||||
name: Build Docker image (ghcr.io/astral-sh/ruff) for ${{ matrix.platform }}
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux/amd64
|
||||
- linux/arm64
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
|
||||
- uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3
|
||||
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Check tag consistency
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
env:
|
||||
TAG: ${{ inputs.plan != '' && fromJson(inputs.plan).announcement_tag || 'dry-run' }}
|
||||
run: |
|
||||
version=$(grep -m 1 "^version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
|
||||
if [ "${TAG}" != "${version}" ]; then
|
||||
echo "The input tag does not match the version from pyproject.toml:" >&2
|
||||
echo "${TAG}" >&2
|
||||
echo "${version}" >&2
|
||||
exit 1
|
||||
else
|
||||
echo "Releasing ${version}"
|
||||
fi
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5
|
||||
with:
|
||||
images: ${{ env.RUFF_BASE_IMG }}
|
||||
# Defining this makes sure the org.opencontainers.image.version OCI label becomes the actual release version and not the branch name
|
||||
tags: |
|
||||
type=raw,value=dry-run,enable=${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
type=pep440,pattern={{ version }},value=${{ inputs.plan != '' && fromJson(inputs.plan).announcement_tag || 'dry-run' }},enable=${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
|
||||
- name: Normalize Platform Pair (replace / with -)
|
||||
run: |
|
||||
platform=${{ matrix.platform }}
|
||||
echo "PLATFORM_TUPLE=${platform//\//-}" >> "$GITHUB_ENV"
|
||||
|
||||
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
|
||||
- name: Build and push by digest
|
||||
id: build
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6
|
||||
with:
|
||||
context: .
|
||||
platforms: ${{ matrix.platform }}
|
||||
cache-from: type=gha,scope=ruff-${{ env.PLATFORM_TUPLE }}
|
||||
cache-to: type=gha,mode=min,scope=ruff-${{ env.PLATFORM_TUPLE }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
outputs: type=image,name=${{ env.RUFF_BASE_IMG }},push-by-digest=true,name-canonical=true,push=${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
|
||||
- name: Export digests
|
||||
env:
|
||||
digest: ${{ steps.build.outputs.digest }}
|
||||
run: |
|
||||
mkdir -p /tmp/digests
|
||||
touch "/tmp/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digests
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: digests-${{ env.PLATFORM_TUPLE }}
|
||||
path: /tmp/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
docker-publish:
|
||||
name: Publish Docker image (ghcr.io/astral-sh/ruff)
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
needs:
|
||||
- docker-build
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5
|
||||
with:
|
||||
images: ${{ env.RUFF_BASE_IMG }}
|
||||
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
|
||||
tags: |
|
||||
type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
|
||||
- name: Create manifest list and push
|
||||
working-directory: /tmp/digests
|
||||
# The jq command expands the docker/metadata json "tags" array entry to `-t tag1 -t tag2 ...` for each tag in the array
|
||||
# The printf will expand the base image with the `<RUFF_BASE_IMG>@sha256:<sha256> ...` for each sha256 in the directory
|
||||
# The final command becomes `docker buildx imagetools create -t tag1 -t tag2 ... <RUFF_BASE_IMG>@sha256:<sha256_1> <RUFF_BASE_IMG>@sha256:<sha256_2> ...`
|
||||
run: |
|
||||
# shellcheck disable=SC2046
|
||||
docker buildx imagetools create \
|
||||
$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf "${RUFF_BASE_IMG}@sha256:%s " *)
|
||||
|
||||
docker-publish-extra:
|
||||
name: Publish additional Docker image based on ${{ matrix.image-mapping }}
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
needs:
|
||||
- docker-publish
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# Mapping of base image followed by a comma followed by one or more base tags (comma separated)
|
||||
# Note, org.opencontainers.image.version label will use the first base tag (use the most specific tag first)
|
||||
image-mapping:
|
||||
- alpine:3.21,alpine3.21,alpine
|
||||
- debian:bookworm-slim,bookworm-slim,debian-slim
|
||||
- buildpack-deps:bookworm,bookworm,debian
|
||||
steps:
|
||||
- uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3
|
||||
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate Dynamic Dockerfile Tags
|
||||
shell: bash
|
||||
env:
|
||||
TAG_VALUE: ${{ fromJson(inputs.plan).announcement_tag }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the image and tags from the matrix variable
|
||||
IFS=',' read -r BASE_IMAGE BASE_TAGS <<< "${{ matrix.image-mapping }}"
|
||||
|
||||
# Generate Dockerfile content
|
||||
cat <<EOF > Dockerfile
|
||||
FROM ${BASE_IMAGE}
|
||||
COPY --from=${RUFF_BASE_IMG}:latest /ruff /usr/local/bin/ruff
|
||||
ENTRYPOINT []
|
||||
CMD ["/usr/local/bin/ruff"]
|
||||
EOF
|
||||
|
||||
# Initialize a variable to store all tag docker metadata patterns
|
||||
TAG_PATTERNS=""
|
||||
|
||||
# Loop through all base tags and append its docker metadata pattern to the list
|
||||
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
|
||||
IFS=','; for TAG in ${BASE_TAGS}; do
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ version }},suffix=-${TAG},value=${TAG_VALUE}\n"
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ major }}.{{ minor }},suffix=-${TAG},value=${TAG_VALUE}\n"
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=raw,value=${TAG}\n"
|
||||
done
|
||||
|
||||
# Remove the trailing newline from the pattern list
|
||||
TAG_PATTERNS="${TAG_PATTERNS%\\n}"
|
||||
|
||||
# Export image cache name
|
||||
echo "IMAGE_REF=${BASE_IMAGE//:/-}" >> "$GITHUB_ENV"
|
||||
|
||||
# Export tag patterns using the multiline env var syntax
|
||||
{
|
||||
echo "TAG_PATTERNS<<EOF"
|
||||
echo -e "${TAG_PATTERNS}"
|
||||
echo EOF
|
||||
} >> "$GITHUB_ENV"
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5
|
||||
# ghcr.io prefers index level annotations
|
||||
env:
|
||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||
with:
|
||||
images: ${{ env.RUFF_BASE_IMG }}
|
||||
flavor: |
|
||||
latest=false
|
||||
tags: |
|
||||
${{ env.TAG_PATTERNS }}
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
# We do not really need to cache here as the Dockerfile is tiny
|
||||
#cache-from: type=gha,scope=ruff-${{ env.IMAGE_REF }}
|
||||
#cache-to: type=gha,mode=min,scope=ruff-${{ env.IMAGE_REF }}
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
annotations: ${{ steps.meta.outputs.annotations }}
|
||||
|
||||
# This is effectively a duplicate of `docker-publish` to make https://github.com/astral-sh/ruff/pkgs/container/ruff
|
||||
# show the ruff base image first since GitHub always shows the last updated image digests
|
||||
# This works by annotating the original digests (previously non-annotated) which triggers an update to ghcr.io
|
||||
docker-republish:
|
||||
name: Annotate Docker image (ghcr.io/astral-sh/ruff)
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
needs:
|
||||
- docker-publish-extra
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5
|
||||
env:
|
||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||
with:
|
||||
images: ${{ env.RUFF_BASE_IMG }}
|
||||
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
|
||||
tags: |
|
||||
type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
|
||||
- name: Create manifest list and push
|
||||
working-directory: /tmp/digests
|
||||
# The readarray part is used to make sure the quoting and special characters are preserved on expansion (e.g. spaces)
|
||||
# The jq command expands the docker/metadata json "tags" array entry to `-t tag1 -t tag2 ...` for each tag in the array
|
||||
# The printf will expand the base image with the `<RUFF_BASE_IMG>@sha256:<sha256> ...` for each sha256 in the directory
|
||||
# The final command becomes `docker buildx imagetools create -t tag1 -t tag2 ... <RUFF_BASE_IMG>@sha256:<sha256_1> <RUFF_BASE_IMG>@sha256:<sha256_2> ...`
|
||||
run: |
|
||||
readarray -t lines <<< "$DOCKER_METADATA_OUTPUT_ANNOTATIONS"; annotations=(); for line in "${lines[@]}"; do annotations+=(--annotation "$line"); done
|
||||
|
||||
# shellcheck disable=SC2046
|
||||
docker buildx imagetools create \
|
||||
"${annotations[@]}" \
|
||||
$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf "${RUFF_BASE_IMG}@sha256:%s " *)
|
||||
614
.github/workflows/ci.yaml
vendored
614
.github/workflows/ci.yaml
vendored
@@ -1,7 +1,5 @@
|
||||
name: CI
|
||||
|
||||
permissions: {}
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
@@ -18,7 +16,7 @@ env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
PACKAGE_NAME: ruff
|
||||
PYTHON_VERSION: "3.13"
|
||||
PYTHON_VERSION: "3.11"
|
||||
|
||||
jobs:
|
||||
determine_changes:
|
||||
@@ -26,179 +24,73 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
# Flag that is raised when any code that affects parser is changed
|
||||
parser: ${{ steps.check_parser.outputs.changed }}
|
||||
parser: ${{ steps.changed.outputs.parser_any_changed }}
|
||||
# Flag that is raised when any code that affects linter is changed
|
||||
linter: ${{ steps.check_linter.outputs.changed }}
|
||||
linter: ${{ steps.changed.outputs.linter_any_changed }}
|
||||
# Flag that is raised when any code that affects formatter is changed
|
||||
formatter: ${{ steps.check_formatter.outputs.changed }}
|
||||
formatter: ${{ steps.changed.outputs.formatter_any_changed }}
|
||||
# Flag that is raised when any code is changed
|
||||
# This is superset of the linter and formatter
|
||||
code: ${{ steps.check_code.outputs.changed }}
|
||||
# Flag that is raised when any code that affects the fuzzer is changed
|
||||
fuzz: ${{ steps.check_fuzzer.outputs.changed }}
|
||||
# Flag that is set to "true" when code related to red-knot changes.
|
||||
red_knot: ${{ steps.check_red_knot.outputs.changed }}
|
||||
|
||||
# Flag that is set to "true" when code related to the playground changes.
|
||||
playground: ${{ steps.check_playground.outputs.changed }}
|
||||
code: ${{ steps.changed.outputs.code_any_changed }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Determine merge base
|
||||
id: merge_base
|
||||
env:
|
||||
BASE_REF: ${{ github.event.pull_request.base.ref || 'main' }}
|
||||
run: |
|
||||
sha=$(git merge-base HEAD "origin/${BASE_REF}")
|
||||
echo "sha=${sha}" >> "$GITHUB_OUTPUT"
|
||||
- uses: tj-actions/changed-files@v44
|
||||
id: changed
|
||||
with:
|
||||
files_yaml: |
|
||||
parser:
|
||||
- Cargo.toml
|
||||
- Cargo.lock
|
||||
- crates/ruff_python_trivia/**
|
||||
- crates/ruff_source_file/**
|
||||
- crates/ruff_text_size/**
|
||||
- crates/ruff_python_ast/**
|
||||
- crates/ruff_python_parser/**
|
||||
- scripts/fuzz-parser/**
|
||||
- .github/workflows/ci.yaml
|
||||
|
||||
- name: Check if the parser code changed
|
||||
id: check_parser
|
||||
env:
|
||||
MERGE_BASE: ${{ steps.merge_base.outputs.sha }}
|
||||
run: |
|
||||
if git diff --quiet "${MERGE_BASE}...HEAD" -- \
|
||||
':Cargo.toml' \
|
||||
':Cargo.lock' \
|
||||
':crates/ruff_python_trivia/**' \
|
||||
':crates/ruff_source_file/**' \
|
||||
':crates/ruff_text_size/**' \
|
||||
':crates/ruff_python_ast/**' \
|
||||
':crates/ruff_python_parser/**' \
|
||||
':python/py-fuzzer/**' \
|
||||
':.github/workflows/ci.yaml' \
|
||||
; then
|
||||
echo "changed=false" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "changed=true" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
linter:
|
||||
- Cargo.toml
|
||||
- Cargo.lock
|
||||
- crates/**
|
||||
- "!crates/ruff_python_formatter/**"
|
||||
- "!crates/ruff_formatter/**"
|
||||
- "!crates/ruff_dev/**"
|
||||
- scripts/*
|
||||
- python/**
|
||||
- .github/workflows/ci.yaml
|
||||
|
||||
- name: Check if the linter code changed
|
||||
id: check_linter
|
||||
env:
|
||||
MERGE_BASE: ${{ steps.merge_base.outputs.sha }}
|
||||
run: |
|
||||
if git diff --quiet "${MERGE_BASE}...HEAD" -- ':Cargo.toml' \
|
||||
':Cargo.lock' \
|
||||
':crates/**' \
|
||||
':!crates/red_knot*/**' \
|
||||
':!crates/ruff_python_formatter/**' \
|
||||
':!crates/ruff_formatter/**' \
|
||||
':!crates/ruff_dev/**' \
|
||||
':!crates/ruff_db/**' \
|
||||
':scripts/*' \
|
||||
':python/**' \
|
||||
':.github/workflows/ci.yaml' \
|
||||
; then
|
||||
echo "changed=false" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "changed=true" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
formatter:
|
||||
- Cargo.toml
|
||||
- Cargo.lock
|
||||
- crates/ruff_python_formatter/**
|
||||
- crates/ruff_formatter/**
|
||||
- crates/ruff_python_trivia/**
|
||||
- crates/ruff_python_ast/**
|
||||
- crates/ruff_source_file/**
|
||||
- crates/ruff_python_index/**
|
||||
- crates/ruff_text_size/**
|
||||
- crates/ruff_python_parser/**
|
||||
- crates/ruff_dev/**
|
||||
- scripts/*
|
||||
- python/**
|
||||
- .github/workflows/ci.yaml
|
||||
|
||||
- name: Check if the formatter code changed
|
||||
id: check_formatter
|
||||
env:
|
||||
MERGE_BASE: ${{ steps.merge_base.outputs.sha }}
|
||||
run: |
|
||||
if git diff --quiet "${MERGE_BASE}...HEAD" -- ':Cargo.toml' \
|
||||
':Cargo.lock' \
|
||||
':crates/ruff_python_formatter/**' \
|
||||
':crates/ruff_formatter/**' \
|
||||
':crates/ruff_python_trivia/**' \
|
||||
':crates/ruff_python_ast/**' \
|
||||
':crates/ruff_source_file/**' \
|
||||
':crates/ruff_python_index/**' \
|
||||
':crates/ruff_python_index/**' \
|
||||
':crates/ruff_text_size/**' \
|
||||
':crates/ruff_python_parser/**' \
|
||||
':scripts/*' \
|
||||
':python/**' \
|
||||
':.github/workflows/ci.yaml' \
|
||||
; then
|
||||
echo "changed=false" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "changed=true" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- name: Check if the fuzzer code changed
|
||||
id: check_fuzzer
|
||||
env:
|
||||
MERGE_BASE: ${{ steps.merge_base.outputs.sha }}
|
||||
run: |
|
||||
if git diff --quiet "${MERGE_BASE}...HEAD" -- ':Cargo.toml' \
|
||||
':Cargo.lock' \
|
||||
':fuzz/fuzz_targets/**' \
|
||||
':.github/workflows/ci.yaml' \
|
||||
; then
|
||||
echo "changed=false" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "changed=true" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- name: Check if there was any code related change
|
||||
id: check_code
|
||||
env:
|
||||
MERGE_BASE: ${{ steps.merge_base.outputs.sha }}
|
||||
run: |
|
||||
if git diff --quiet "${MERGE_BASE}...HEAD" -- ':**' \
|
||||
':!**/*.md' \
|
||||
':crates/red_knot_python_semantic/resources/mdtest/**/*.md' \
|
||||
':!docs/**' \
|
||||
':!assets/**' \
|
||||
':.github/workflows/ci.yaml' \
|
||||
; then
|
||||
echo "changed=false" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "changed=true" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- name: Check if there was any playground related change
|
||||
id: check_playground
|
||||
env:
|
||||
MERGE_BASE: ${{ steps.merge_base.outputs.sha }}
|
||||
run: |
|
||||
if git diff --quiet "${MERGE_BASE}...HEAD" -- \
|
||||
':playground/**' \
|
||||
; then
|
||||
echo "changed=false" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "changed=true" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- name: Check if the red-knot code changed
|
||||
id: check_red_knot
|
||||
env:
|
||||
MERGE_BASE: ${{ steps.merge_base.outputs.sha }}
|
||||
run: |
|
||||
if git diff --quiet "${MERGE_BASE}...HEAD" -- \
|
||||
':Cargo.toml' \
|
||||
':Cargo.lock' \
|
||||
':crates/red_knot*/**' \
|
||||
':crates/ruff_db/**' \
|
||||
':crates/ruff_annotate_snippets/**' \
|
||||
':crates/ruff_python_ast/**' \
|
||||
':crates/ruff_python_parser/**' \
|
||||
':crates/ruff_python_trivia/**' \
|
||||
':crates/ruff_source_file/**' \
|
||||
':crates/ruff_text_size/**' \
|
||||
':.github/workflows/ci.yaml' \
|
||||
; then
|
||||
echo "changed=false" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "changed=true" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
code:
|
||||
- "**/*"
|
||||
- "!**/*.md"
|
||||
- "!docs/**"
|
||||
- "!assets/**"
|
||||
|
||||
cargo-fmt:
|
||||
name: "cargo fmt"
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup component add rustfmt
|
||||
- run: cargo fmt --all --check
|
||||
@@ -210,50 +102,38 @@ jobs:
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
run: |
|
||||
rustup component add clippy
|
||||
rustup target add wasm32-unknown-unknown
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Clippy"
|
||||
run: cargo clippy --workspace --all-targets --all-features --locked -- -D warnings
|
||||
- name: "Clippy (wasm)"
|
||||
run: cargo clippy -p ruff_wasm -p red_knot_wasm --target wasm32-unknown-unknown --all-features --locked -- -D warnings
|
||||
run: cargo clippy -p ruff_wasm --target wasm32-unknown-unknown --all-features --locked -- -D warnings
|
||||
|
||||
cargo-test-linux:
|
||||
name: "cargo test (linux)"
|
||||
runs-on: depot-ubuntu-22.04-16
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
|
||||
uses: rui314/setup-mold@v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@09dc018eee06ae1c9e0409786563f534210ceb83 # v2
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@09dc018eee06ae1c9e0409786563f534210ceb83 # v2
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: Red-knot mdtests (GitHub annotations)
|
||||
if: ${{ needs.determine_changes.outputs.red_knot == 'true' }}
|
||||
env:
|
||||
NO_COLOR: 1
|
||||
MDTEST_GITHUB_ANNOTATIONS_FORMAT: 1
|
||||
# Ignore errors if this step fails; we want to continue to later steps in the workflow anyway.
|
||||
# This step is just to get nice GitHub annotations on the PR diff in the files-changed tab.
|
||||
run: cargo test -p red_knot_python_semantic --test mdtest || true
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
env:
|
||||
@@ -262,71 +142,32 @@ jobs:
|
||||
|
||||
# Check for broken links in the documentation.
|
||||
- run: cargo doc --all --no-deps
|
||||
env:
|
||||
RUSTDOCFLAGS: "-D warnings"
|
||||
# Use --document-private-items so that all our doc comments are kept in
|
||||
# sync, not just public items. Eventually we should do this for all
|
||||
# crates; for now add crates here as they are warning-clean to prevent
|
||||
# regression.
|
||||
- run: cargo doc --no-deps -p red_knot_python_semantic -p red_knot -p red_knot_test -p ruff_db --document-private-items
|
||||
env:
|
||||
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
|
||||
RUSTDOCFLAGS: "-D warnings"
|
||||
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ruff
|
||||
path: target/debug/ruff
|
||||
|
||||
cargo-test-linux-release:
|
||||
name: "cargo test (linux, release)"
|
||||
runs-on: depot-ubuntu-22.04-16
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@09dc018eee06ae1c9e0409786563f534210ceb83 # v2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@09dc018eee06ae1c9e0409786563f534210ceb83 # v2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
env:
|
||||
NEXTEST_PROFILE: "ci"
|
||||
run: cargo insta test --release --all-features --unreferenced reject --test-runner nextest
|
||||
|
||||
cargo-test-windows:
|
||||
name: "cargo test (windows)"
|
||||
runs-on: github-windows-2025-x86_64-16
|
||||
runs-on: windows-latest
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@09dc018eee06ae1c9e0409786563f534210ceb83 # v2
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
env:
|
||||
NEXTEST_PROFILE: "ci"
|
||||
# Workaround for <https://github.com/nextest-rs/nextest/issues/1493>.
|
||||
RUSTUP_WINDOWS_PATH_ADD_BIN: 1
|
||||
run: |
|
||||
@@ -337,103 +178,87 @@ jobs:
|
||||
name: "cargo test (wasm)"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4.3.0
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
node-version: 18
|
||||
cache: "npm"
|
||||
cache-dependency-path: playground/package-lock.json
|
||||
- uses: jetli/wasm-pack-action@0d096b08b4e5a7de8c28de67e11e945404e9eefa # v0.4.0
|
||||
with:
|
||||
version: v0.13.1
|
||||
- name: "Test ruff_wasm"
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run wasm-pack"
|
||||
run: |
|
||||
cd crates/ruff_wasm
|
||||
wasm-pack test --node
|
||||
- name: "Test red_knot_wasm"
|
||||
run: |
|
||||
cd crates/red_knot_wasm
|
||||
wasm-pack test --node
|
||||
|
||||
cargo-build-release:
|
||||
name: "cargo build (release)"
|
||||
runs-on: macos-latest
|
||||
if: ${{ github.ref == 'refs/heads/main' }}
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
|
||||
uses: rui314/setup-mold@v1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Build"
|
||||
run: cargo build --release --locked
|
||||
|
||||
cargo-build-msrv:
|
||||
name: "cargo build (msrv)"
|
||||
runs-on: depot-ubuntu-latest-8
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: SebRollen/toml-action@b1b3628f55fc3a28208d4203ada8b737e9687876 # v1.2.0
|
||||
- uses: actions/checkout@v4
|
||||
- uses: SebRollen/toml-action@v1.2.0
|
||||
id: msrv
|
||||
with:
|
||||
file: "Cargo.toml"
|
||||
field: "workspace.package.rust-version"
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Install Rust toolchain"
|
||||
env:
|
||||
MSRV: ${{ steps.msrv.outputs.value }}
|
||||
run: rustup default "${MSRV}"
|
||||
run: rustup default ${{ steps.msrv.outputs.value }}
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
|
||||
uses: rui314/setup-mold@v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@09dc018eee06ae1c9e0409786563f534210ceb83 # v2
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@09dc018eee06ae1c9e0409786563f534210ceb83 # v2
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
env:
|
||||
NEXTEST_PROFILE: "ci"
|
||||
MSRV: ${{ steps.msrv.outputs.value }}
|
||||
run: cargo "+${MSRV}" insta test --all-features --unreferenced reject --test-runner nextest
|
||||
run: cargo +${{ steps.msrv.outputs.value }} insta test --all-features --unreferenced reject --test-runner nextest
|
||||
|
||||
cargo-fuzz-build:
|
||||
name: "cargo fuzz build"
|
||||
cargo-fuzz:
|
||||
name: "cargo fuzz"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ github.ref == 'refs/heads/main' || needs.determine_changes.outputs.fuzz == 'true' || needs.determine_changes.outputs.code == 'true' }}
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
with:
|
||||
workspaces: "fuzz -> target"
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
workspaces: "fuzz -> target"
|
||||
- name: "Install cargo-binstall"
|
||||
uses: cargo-bins/cargo-binstall@63aaa5c1932cebabc34eceda9d92a70215dcead6 # v1.12.3
|
||||
uses: cargo-bins/cargo-binstall@main
|
||||
with:
|
||||
tool: cargo-fuzz@0.11.2
|
||||
- name: "Install cargo-fuzz"
|
||||
@@ -442,63 +267,49 @@ jobs:
|
||||
- run: cargo fuzz build -s none
|
||||
|
||||
fuzz-parser:
|
||||
name: "fuzz parser"
|
||||
name: "Fuzz the parser"
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- cargo-test-linux
|
||||
- determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && needs.determine_changes.outputs.parser == 'true' }}
|
||||
if: ${{ needs.determine_changes.outputs.parser == 'true' }}
|
||||
timeout-minutes: 20
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
|
||||
- uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: Install uv
|
||||
run: curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
- name: Install Python requirements
|
||||
run: uv pip install -r scripts/fuzz-parser/requirements.txt --system
|
||||
- uses: actions/download-artifact@v4
|
||||
name: Download Ruff binary to test
|
||||
id: download-cached-binary
|
||||
with:
|
||||
name: ruff
|
||||
path: ruff-to-test
|
||||
- name: Fuzz
|
||||
env:
|
||||
DOWNLOAD_PATH: ${{ steps.download-cached-binary.outputs.download-path }}
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x "${DOWNLOAD_PATH}/ruff"
|
||||
chmod +x ${{ steps.download-cached-binary.outputs.download-path }}/ruff
|
||||
|
||||
(
|
||||
uvx \
|
||||
--python="${PYTHON_VERSION}" \
|
||||
--from=./python/py-fuzzer \
|
||||
fuzz \
|
||||
--test-executable="${DOWNLOAD_PATH}/ruff" \
|
||||
--bin=ruff \
|
||||
0-500
|
||||
)
|
||||
python scripts/fuzz-parser/fuzz.py 0-500 --test-executable ${{ steps.download-cached-binary.outputs.download-path }}/ruff
|
||||
|
||||
scripts:
|
||||
name: "test scripts"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 5
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup component add rustfmt
|
||||
# Run all code generation scripts, and verify that the current output is
|
||||
# already checked into git.
|
||||
- run: python crates/ruff_python_ast/generate.py
|
||||
- run: python crates/ruff_python_formatter/generate.py
|
||||
- run: test -z "$(git status --porcelain)"
|
||||
# Verify that adding a plugin or rule produces clean code.
|
||||
- run: ./scripts/add_rule.py --name DoTheThing --prefix F --code 999 --linter pyflakes
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- run: ./scripts/add_rule.py --name DoTheThing --prefix PL --code C0999 --linter pylint
|
||||
- run: cargo check
|
||||
- run: cargo fmt --all --check
|
||||
- run: |
|
||||
@@ -509,30 +320,28 @@ jobs:
|
||||
|
||||
ecosystem:
|
||||
name: "ecosystem"
|
||||
runs-on: depot-ubuntu-latest-8
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- cargo-test-linux
|
||||
- determine_changes
|
||||
# Only runs on pull requests, since that is the only we way we can find the base version for comparison.
|
||||
# Ecosystem check needs linter and/or formatter changes.
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && github.event_name == 'pull_request' && needs.determine_changes.outputs.code == 'true' }}
|
||||
if: ${{ github.event_name == 'pull_request' && needs.determine_changes.outputs.code == 'true' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
- uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
|
||||
- uses: actions/download-artifact@v4
|
||||
name: Download comparison Ruff binary
|
||||
id: ruff-target
|
||||
with:
|
||||
name: ruff
|
||||
path: target/debug
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
- uses: dawidd6/action-download-artifact@v6
|
||||
name: Download baseline Ruff binary
|
||||
with:
|
||||
name: ruff
|
||||
@@ -546,72 +355,64 @@ jobs:
|
||||
|
||||
- name: Run `ruff check` stable ecosystem check
|
||||
if: ${{ needs.determine_changes.outputs.linter == 'true' }}
|
||||
env:
|
||||
DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }}
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x ./ruff "${DOWNLOAD_PATH}/ruff"
|
||||
chmod +x ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
|
||||
|
||||
# Set pipefail to avoid hiding errors with tee
|
||||
set -eo pipefail
|
||||
|
||||
ruff-ecosystem check ./ruff "${DOWNLOAD_PATH}/ruff" --cache ./checkouts --output-format markdown | tee ecosystem-result-check-stable
|
||||
ruff-ecosystem check ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown | tee ecosystem-result-check-stable
|
||||
|
||||
cat ecosystem-result-check-stable > "$GITHUB_STEP_SUMMARY"
|
||||
cat ecosystem-result-check-stable > $GITHUB_STEP_SUMMARY
|
||||
echo "### Linter (stable)" > ecosystem-result
|
||||
cat ecosystem-result-check-stable >> ecosystem-result
|
||||
echo "" >> ecosystem-result
|
||||
|
||||
- name: Run `ruff check` preview ecosystem check
|
||||
if: ${{ needs.determine_changes.outputs.linter == 'true' }}
|
||||
env:
|
||||
DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }}
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x ./ruff "${DOWNLOAD_PATH}/ruff"
|
||||
chmod +x ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
|
||||
|
||||
# Set pipefail to avoid hiding errors with tee
|
||||
set -eo pipefail
|
||||
|
||||
ruff-ecosystem check ./ruff "${DOWNLOAD_PATH}/ruff" --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-check-preview
|
||||
ruff-ecosystem check ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-check-preview
|
||||
|
||||
cat ecosystem-result-check-preview > "$GITHUB_STEP_SUMMARY"
|
||||
cat ecosystem-result-check-preview > $GITHUB_STEP_SUMMARY
|
||||
echo "### Linter (preview)" >> ecosystem-result
|
||||
cat ecosystem-result-check-preview >> ecosystem-result
|
||||
echo "" >> ecosystem-result
|
||||
|
||||
- name: Run `ruff format` stable ecosystem check
|
||||
if: ${{ needs.determine_changes.outputs.formatter == 'true' }}
|
||||
env:
|
||||
DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }}
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x ./ruff "${DOWNLOAD_PATH}/ruff"
|
||||
chmod +x ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
|
||||
|
||||
# Set pipefail to avoid hiding errors with tee
|
||||
set -eo pipefail
|
||||
|
||||
ruff-ecosystem format ./ruff "${DOWNLOAD_PATH}/ruff" --cache ./checkouts --output-format markdown | tee ecosystem-result-format-stable
|
||||
ruff-ecosystem format ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown | tee ecosystem-result-format-stable
|
||||
|
||||
cat ecosystem-result-format-stable > "$GITHUB_STEP_SUMMARY"
|
||||
cat ecosystem-result-format-stable > $GITHUB_STEP_SUMMARY
|
||||
echo "### Formatter (stable)" >> ecosystem-result
|
||||
cat ecosystem-result-format-stable >> ecosystem-result
|
||||
echo "" >> ecosystem-result
|
||||
|
||||
- name: Run `ruff format` preview ecosystem check
|
||||
if: ${{ needs.determine_changes.outputs.formatter == 'true' }}
|
||||
env:
|
||||
DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }}
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x ./ruff "${DOWNLOAD_PATH}/ruff"
|
||||
chmod +x ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
|
||||
|
||||
# Set pipefail to avoid hiding errors with tee
|
||||
set -eo pipefail
|
||||
|
||||
ruff-ecosystem format ./ruff "${DOWNLOAD_PATH}/ruff" --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-format-preview
|
||||
ruff-ecosystem format ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-format-preview
|
||||
|
||||
cat ecosystem-result-format-preview > "$GITHUB_STEP_SUMMARY"
|
||||
cat ecosystem-result-format-preview > $GITHUB_STEP_SUMMARY
|
||||
echo "### Formatter (preview)" >> ecosystem-result
|
||||
cat ecosystem-result-format-preview >> ecosystem-result
|
||||
echo "" >> ecosystem-result
|
||||
@@ -620,13 +421,13 @@ jobs:
|
||||
run: |
|
||||
echo ${{ github.event.number }} > pr-number
|
||||
|
||||
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
- uses: actions/upload-artifact@v4
|
||||
name: Upload PR Number
|
||||
with:
|
||||
name: pr-number
|
||||
path: pr-number
|
||||
|
||||
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
- uses: actions/upload-artifact@v4
|
||||
name: Upload Results
|
||||
with:
|
||||
name: ecosystem-result
|
||||
@@ -638,10 +439,8 @@ jobs:
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: cargo-bins/cargo-binstall@63aaa5c1932cebabc34eceda9d92a70215dcead6 # v1.12.3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: cargo-bins/cargo-binstall@main
|
||||
- run: cargo binstall --no-confirm cargo-shear
|
||||
- run: cargo shear
|
||||
|
||||
@@ -649,25 +448,22 @@ jobs:
|
||||
name: "python package"
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
args: --out dist
|
||||
- name: "Test wheel"
|
||||
run: |
|
||||
pip install --force-reinstall --find-links dist "${PACKAGE_NAME}"
|
||||
pip install --force-reinstall --find-links dist ${{ env.PACKAGE_NAME }}
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Remove wheels from cache"
|
||||
@@ -675,28 +471,32 @@ jobs:
|
||||
|
||||
pre-commit:
|
||||
name: "pre-commit"
|
||||
runs-on: depot-ubuntu-22.04-16
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install pre-commit"
|
||||
run: pip install pre-commit
|
||||
- name: "Cache pre-commit"
|
||||
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.cache/pre-commit
|
||||
key: pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: "Run pre-commit"
|
||||
run: |
|
||||
echo '```console' > "$GITHUB_STEP_SUMMARY"
|
||||
echo '```console' > $GITHUB_STEP_SUMMARY
|
||||
# Enable color output for pre-commit and remove it for the summary
|
||||
# Use --hook-stage=manual to enable slower pre-commit hooks that are skipped by default
|
||||
SKIP=cargo-fmt,clippy,dev-generate-all uvx --python="${PYTHON_VERSION}" pre-commit run --all-files --show-diff-on-failure --color=always --hook-stage=manual | \
|
||||
tee >(sed -E 's/\x1B\[([0-9]{1,2}(;[0-9]{1,2})*)?[mGK]//g' >> "$GITHUB_STEP_SUMMARY") >&1
|
||||
exit_code="${PIPESTATUS[0]}"
|
||||
echo '```' >> "$GITHUB_STEP_SUMMARY"
|
||||
exit "$exit_code"
|
||||
SKIP=cargo-fmt,clippy,dev-generate-all pre-commit run --all-files --show-diff-on-failure --color=always | \
|
||||
tee >(sed -E 's/\x1B\[([0-9]{1,2}(;[0-9]{1,2})*)?[mGK]//g' >> $GITHUB_STEP_SUMMARY) >&1
|
||||
exit_code=${PIPESTATUS[0]}
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
exit $exit_code
|
||||
|
||||
docs:
|
||||
name: "mkdocs"
|
||||
@@ -705,28 +505,22 @@ jobs:
|
||||
env:
|
||||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
with:
|
||||
python-version: "3.13"
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@a6f90b1f127823b31d4d4a8d96047790581349bd # v0.9.1
|
||||
uses: webfactory/ssh-agent@v0.9.0
|
||||
with:
|
||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: uv pip install -r docs/requirements-insiders.txt --system
|
||||
run: pip install -r docs/requirements-insiders.txt
|
||||
- name: "Install dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: uv pip install -r docs/requirements.txt --system
|
||||
run: pip install -r docs/requirements.txt
|
||||
- name: "Update README File"
|
||||
run: python scripts/transform_readme.py --target mkdocs
|
||||
- name: "Generate docs"
|
||||
@@ -744,21 +538,20 @@ jobs:
|
||||
name: "formatter instabilities and black similarity"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.formatter == 'true' || github.ref == 'refs/heads/main') }}
|
||||
if: needs.determine_changes.outputs.formatter == 'true' || github.ref == 'refs/heads/main'
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Run checks"
|
||||
- name: "Cache rust"
|
||||
uses: Swatinem/rust-cache@v2
|
||||
- name: "Formatter progress"
|
||||
run: scripts/formatter_ecosystem_checks.sh
|
||||
- name: "Github step summary"
|
||||
run: cat target/formatter-ecosystem/stats.txt > "$GITHUB_STEP_SUMMARY"
|
||||
run: cat target/progress_projects_stats.txt > $GITHUB_STEP_SUMMARY
|
||||
- name: "Remove checkouts from cache"
|
||||
run: rm -r target/formatter-ecosystem
|
||||
run: rm -r target/progress_projects
|
||||
|
||||
check-ruff-lsp:
|
||||
name: "test ruff-lsp"
|
||||
@@ -767,24 +560,22 @@ jobs:
|
||||
needs:
|
||||
- cargo-test-linux
|
||||
- determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
steps:
|
||||
- uses: extractions/setup-just@dd310ad5a97d8e7b41793f8ef055398d51ad4de6 # v2
|
||||
- uses: extractions/setup-just@v2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
name: "Download ruff-lsp source"
|
||||
with:
|
||||
persist-credentials: false
|
||||
repository: "astral-sh/ruff-lsp"
|
||||
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
# installation fails on 3.13 and newer
|
||||
python-version: "3.12"
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
- uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
|
||||
- uses: actions/download-artifact@v4
|
||||
name: Download development ruff binary
|
||||
id: ruff-target
|
||||
with:
|
||||
@@ -796,76 +587,39 @@ jobs:
|
||||
just install
|
||||
|
||||
- name: Run ruff-lsp tests
|
||||
env:
|
||||
DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }}
|
||||
run: |
|
||||
# Setup development binary
|
||||
pip uninstall --yes ruff
|
||||
chmod +x "${DOWNLOAD_PATH}/ruff"
|
||||
export PATH="${DOWNLOAD_PATH}:${PATH}"
|
||||
chmod +x ${{ steps.ruff-target.outputs.download-path }}/ruff
|
||||
export PATH=${{ steps.ruff-target.outputs.download-path }}:$PATH
|
||||
ruff version
|
||||
|
||||
just test
|
||||
|
||||
check-playground:
|
||||
name: "check playground"
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
needs:
|
||||
- determine_changes
|
||||
if: ${{ (needs.determine_changes.outputs.playground == 'true') }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4.3.0
|
||||
with:
|
||||
node-version: 22
|
||||
cache: "npm"
|
||||
cache-dependency-path: playground/package-lock.json
|
||||
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
|
||||
- name: "Install Node dependencies"
|
||||
run: npm ci
|
||||
working-directory: playground
|
||||
- name: "Build playgrounds"
|
||||
run: npm run dev:wasm
|
||||
working-directory: playground
|
||||
- name: "Run TypeScript checks"
|
||||
run: npm run check
|
||||
working-directory: playground
|
||||
- name: "Check formatting"
|
||||
run: npm run fmt:check
|
||||
working-directory: playground
|
||||
|
||||
benchmarks:
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ github.repository == 'astral-sh/ruff' && !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
if: ${{ github.repository == 'astral-sh/ruff' && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- name: "Checkout Branch"
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- name: "Install codspeed"
|
||||
uses: taiki-e/install-action@09dc018eee06ae1c9e0409786563f534210ceb83 # v2
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: "Build benchmarks"
|
||||
run: cargo codspeed build --features codspeed -p ruff_benchmark
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@0010eb0ca6e89b80c88e8edaaa07cfe5f3e6664d # v3.5.0
|
||||
uses: CodSpeedHQ/action@v2
|
||||
with:
|
||||
run: cargo codspeed run
|
||||
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||
|
||||
30
.github/workflows/daily_fuzz.yaml
vendored
30
.github/workflows/daily_fuzz.yaml
vendored
@@ -31,31 +31,25 @@ jobs:
|
||||
# Don't run the cron job on forks:
|
||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
|
||||
python-version: "3.12"
|
||||
- name: Install uv
|
||||
run: curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
- name: Install Python requirements
|
||||
run: uv pip install -r scripts/fuzz-parser/requirements.txt --system
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
uses: rui314/setup-mold@v1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Build ruff
|
||||
# A debug build means the script runs slower once it gets started,
|
||||
# but this is outweighed by the fact that a release build takes *much* longer to compile in CI
|
||||
run: cargo build --locked
|
||||
- name: Fuzz
|
||||
run: |
|
||||
# shellcheck disable=SC2046
|
||||
(
|
||||
uvx \
|
||||
--python=3.12 \
|
||||
--from=./python/py-fuzzer \
|
||||
fuzz \
|
||||
--test-executable=target/debug/ruff \
|
||||
--bin=ruff \
|
||||
$(shuf -i 0-9999999999999999999 -n 1000)
|
||||
)
|
||||
run: python scripts/fuzz-parser/fuzz.py $(shuf -i 0-9999999999999999999 -n 1000) --test-executable target/debug/ruff
|
||||
|
||||
create-issue-on-failure:
|
||||
name: Create an issue if the daily fuzz surfaced any bugs
|
||||
@@ -65,7 +59,7 @@ jobs:
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
- uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
@@ -73,6 +67,6 @@ jobs:
|
||||
owner: "astral-sh",
|
||||
repo: "ruff",
|
||||
title: `Daily parser fuzz failed on ${new Date().toDateString()}`,
|
||||
body: "Run listed here: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}",
|
||||
body: "Runs listed here: https://github.com/astral-sh/ruff/actions/workflows/daily_fuzz.yml",
|
||||
labels: ["bug", "parser", "fuzzer"],
|
||||
})
|
||||
|
||||
72
.github/workflows/daily_property_tests.yaml
vendored
72
.github/workflows/daily_property_tests.yaml
vendored
@@ -1,72 +0,0 @@
|
||||
name: Daily property test run
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "0 12 * * *"
|
||||
pull_request:
|
||||
paths:
|
||||
- ".github/workflows/daily_property_tests.yaml"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
FORCE_COLOR: 1
|
||||
|
||||
jobs:
|
||||
property_tests:
|
||||
name: Property tests
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
# Don't run the cron job on forks:
|
||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: Build Red Knot
|
||||
# A release build takes longer (2 min vs 1 min), but the property tests run much faster in release
|
||||
# mode (1.5 min vs 14 min), so the overall time is shorter with a release build.
|
||||
run: cargo build --locked --release --package red_knot_python_semantic --tests
|
||||
- name: Run property tests
|
||||
shell: bash
|
||||
run: |
|
||||
export QUICKCHECK_TESTS=100000
|
||||
for _ in {1..5}; do
|
||||
cargo test --locked --release --package red_knot_python_semantic -- --ignored list::property_tests
|
||||
cargo test --locked --release --package red_knot_python_semantic -- --ignored types::property_tests::stable
|
||||
done
|
||||
|
||||
create-issue-on-failure:
|
||||
name: Create an issue if the daily property test run surfaced any bugs
|
||||
runs-on: ubuntu-latest
|
||||
needs: property_tests
|
||||
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && needs.property_tests.result == 'failure' }}
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
await github.rest.issues.create({
|
||||
owner: "astral-sh",
|
||||
repo: "ruff",
|
||||
title: `Daily property test run failed on ${new Date().toDateString()}`,
|
||||
body: "Run listed here: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}",
|
||||
labels: ["bug", "red-knot", "testing"],
|
||||
})
|
||||
55
.github/workflows/docs.yaml
vendored
Normal file
55
.github/workflows/docs.yaml
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
name: mkdocs
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
ref:
|
||||
description: "The commit SHA, tag, or branch to publish. Uses the default branch if not specified."
|
||||
default: ""
|
||||
type: string
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
mkdocs:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
- uses: actions/setup-python@v5
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@v0.9.0
|
||||
with:
|
||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: pip install -r docs/requirements-insiders.txt
|
||||
- name: "Install dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: pip install -r docs/requirements.txt
|
||||
- name: "Copy README File"
|
||||
run: |
|
||||
python scripts/transform_readme.py --target mkdocs
|
||||
python scripts/generate_mkdocs.py
|
||||
- name: "Build Insiders docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.insiders.yml
|
||||
- name: "Build docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.public.yml
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@v3.6.1
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
# `github.head_ref` is only set during pull requests and for manual runs or tags we use `main` to deploy to production
|
||||
command: pages deploy site --project-name=astral-docs --branch ${{ github.head_ref || 'main' }} --commit-hash ${GITHUB_SHA}
|
||||
96
.github/workflows/mypy_primer.yaml
vendored
96
.github/workflows/mypy_primer.yaml
vendored
@@ -1,96 +0,0 @@
|
||||
name: Run mypy_primer
|
||||
|
||||
permissions: {}
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "crates/red_knot*/**"
|
||||
- "crates/ruff_db"
|
||||
- "crates/ruff_python_ast"
|
||||
- "crates/ruff_python_parser"
|
||||
- ".github/workflows/mypy_primer.yaml"
|
||||
- ".github/workflows/mypy_primer_comment.yaml"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.event.pull_request.number || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
mypy_primer:
|
||||
name: Run mypy_primer
|
||||
runs-on: depot-ubuntu-22.04-16
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
path: ruff
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
with:
|
||||
workspaces: "ruff"
|
||||
- name: Install Rust toolchain
|
||||
run: rustup show
|
||||
|
||||
- name: Install mypy_primer
|
||||
run: |
|
||||
uv tool install "git+https://github.com/astral-sh/mypy_primer.git@add-red-knot-support-v5"
|
||||
|
||||
- name: Run mypy_primer
|
||||
shell: bash
|
||||
run: |
|
||||
cd ruff
|
||||
|
||||
PRIMER_SELECTOR="$(paste -s -d'|' crates/red_knot_python_semantic/resources/primer/good.txt)"
|
||||
|
||||
echo "new commit"
|
||||
git rev-list --format=%s --max-count=1 "$GITHUB_SHA"
|
||||
|
||||
MERGE_BASE="$(git merge-base "$GITHUB_SHA" "origin/$GITHUB_BASE_REF")"
|
||||
git checkout -b base_commit "$MERGE_BASE"
|
||||
echo "base commit"
|
||||
git rev-list --format=%s --max-count=1 base_commit
|
||||
|
||||
cd ..
|
||||
|
||||
echo "Project selector: $PRIMER_SELECTOR"
|
||||
# Allow the exit code to be 0 or 1, only fail for actual mypy_primer crashes/bugs
|
||||
uvx mypy_primer \
|
||||
--repo ruff \
|
||||
--type-checker knot \
|
||||
--old base_commit \
|
||||
--new "$GITHUB_SHA" \
|
||||
--project-selector "/($PRIMER_SELECTOR)\$" \
|
||||
--output concise \
|
||||
--debug > mypy_primer.diff || [ $? -eq 1 ]
|
||||
|
||||
# Output diff with ANSI color codes
|
||||
cat mypy_primer.diff
|
||||
|
||||
# Remove ANSI color codes before uploading
|
||||
sed -ie 's/\x1b\[[0-9;]*m//g' mypy_primer.diff
|
||||
|
||||
echo ${{ github.event.number }} > pr-number
|
||||
|
||||
- name: Upload diff
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: mypy_primer_diff
|
||||
path: mypy_primer.diff
|
||||
|
||||
- name: Upload pr-number
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pr-number
|
||||
path: pr-number
|
||||
97
.github/workflows/mypy_primer_comment.yaml
vendored
97
.github/workflows/mypy_primer_comment.yaml
vendored
@@ -1,97 +0,0 @@
|
||||
name: PR comment (mypy_primer)
|
||||
|
||||
on: # zizmor: ignore[dangerous-triggers]
|
||||
workflow_run:
|
||||
workflows: [Run mypy_primer]
|
||||
types: [completed]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
workflow_run_id:
|
||||
description: The mypy_primer workflow that triggers the workflow run
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: Download PR number
|
||||
with:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Parse pull request number
|
||||
id: pr-number
|
||||
run: |
|
||||
if [[ -f pr-number ]]
|
||||
then
|
||||
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: "Download mypy_primer results"
|
||||
id: download-mypy_primer_diff
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
name: mypy_primer_diff
|
||||
workflow: mypy_primer.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/mypy_primer_diff
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
if: steps.download-mypy_primer_diff.outputs.found_artifact == 'true'
|
||||
run: |
|
||||
# Guard against malicious mypy_primer results that symlink to a secret
|
||||
# file on this runner
|
||||
if [[ -L pr/mypy_primer_diff/mypy_primer.diff ]]
|
||||
then
|
||||
echo "Error: mypy_primer.diff cannot be a symlink"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Note this identifier is used to find the comment to update on
|
||||
# subsequent runs
|
||||
echo '<!-- generated-comment mypy_primer -->' >> comment.txt
|
||||
|
||||
echo '## `mypy_primer` results' >> comment.txt
|
||||
if [ -s "pr/mypy_primer_diff/mypy_primer.diff" ]; then
|
||||
echo '<details>' >> comment.txt
|
||||
echo '<summary>Changes were detected when running on open source projects</summary>' >> comment.txt
|
||||
echo '' >> comment.txt
|
||||
echo '```diff' >> comment.txt
|
||||
cat pr/mypy_primer_diff/mypy_primer.diff >> comment.txt
|
||||
echo '```' >> comment.txt
|
||||
echo '</details>' >> comment.txt
|
||||
else
|
||||
echo 'No ecosystem changes detected ✅' >> comment.txt
|
||||
fi
|
||||
|
||||
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||
cat comment.txt >> "$GITHUB_OUTPUT"
|
||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Find existing comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3
|
||||
if: steps.generate-comment.outcome == 'success'
|
||||
id: find-comment
|
||||
with:
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
comment-author: "github-actions[bot]"
|
||||
body-includes: "<!-- generated-comment mypy_primer -->"
|
||||
|
||||
- name: Create or update comment
|
||||
if: steps.find-comment.outcome == 'success'
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
body-path: comment.txt
|
||||
edit-mode: replace
|
||||
29
.github/workflows/notify-dependents.yml
vendored
29
.github/workflows/notify-dependents.yml
vendored
@@ -1,29 +0,0 @@
|
||||
# Notify downstream repositories of a new release.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a post-announce
|
||||
# job within `cargo-dist`.
|
||||
name: "[ruff] Notify dependents"
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
update-dependents:
|
||||
name: Notify dependents
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: "Update pre-commit mirror"
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
with:
|
||||
github-token: ${{ secrets.RUFF_PRE_COMMIT_PAT }}
|
||||
script: |
|
||||
github.rest.actions.createWorkflowDispatch({
|
||||
owner: 'astral-sh',
|
||||
repo: 'ruff-pre-commit',
|
||||
workflow_id: 'main.yml',
|
||||
ref: 'main',
|
||||
})
|
||||
48
.github/workflows/playground.yaml
vendored
Normal file
48
.github/workflows/playground.yaml
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
name: "[Playground] Release"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18
|
||||
cache: "npm"
|
||||
cache-dependency-path: playground/package-lock.json
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
- uses: jetli/wasm-bindgen-action@v0.2.0
|
||||
- name: "Run wasm-pack"
|
||||
run: wasm-pack build --target web --out-dir ../../playground/src/pkg crates/ruff_wasm
|
||||
- name: "Install Node dependencies"
|
||||
run: npm ci
|
||||
working-directory: playground
|
||||
- name: "Run TypeScript checks"
|
||||
run: npm run check
|
||||
working-directory: playground
|
||||
- name: "Build JavaScript bundle"
|
||||
run: npm run build
|
||||
working-directory: playground
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@v3.6.1
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
# `github.head_ref` is only set during pull requests and for manual runs or tags we use `main` to deploy to production
|
||||
command: pages deploy playground/dist --project-name=ruff-playground --branch ${{ github.head_ref || 'main' }} --commit-hash ${GITHUB_SHA}
|
||||
23
.github/workflows/pr-comment.yaml
vendored
23
.github/workflows/pr-comment.yaml
vendored
@@ -10,29 +10,29 @@ on:
|
||||
description: The ecosystem workflow that triggers the workflow run
|
||||
required: true
|
||||
|
||||
permissions:
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
- uses: dawidd6/action-download-artifact@v6
|
||||
name: Download pull request number
|
||||
with:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Parse pull request number
|
||||
id: pr-number
|
||||
run: |
|
||||
if [[ -f pr-number ]]
|
||||
then
|
||||
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||
echo "pr-number=$(<pr-number)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
- uses: dawidd6/action-download-artifact@v6
|
||||
name: "Download ecosystem results"
|
||||
id: download-ecosystem-result
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
@@ -43,7 +43,6 @@ jobs:
|
||||
path: pr/ecosystem
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
@@ -65,12 +64,12 @@ jobs:
|
||||
cat pr/ecosystem/ecosystem-result >> comment.txt
|
||||
echo "" >> comment.txt
|
||||
|
||||
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||
cat comment.txt >> "$GITHUB_OUTPUT"
|
||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||
echo 'comment<<EOF' >> $GITHUB_OUTPUT
|
||||
cat comment.txt >> $GITHUB_OUTPUT
|
||||
echo 'EOF' >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Find existing comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3
|
||||
uses: peter-evans/find-comment@v3
|
||||
if: steps.generate-comment.outcome == 'success'
|
||||
id: find-comment
|
||||
with:
|
||||
@@ -80,7 +79,7 @@ jobs:
|
||||
|
||||
- name: Create or update comment
|
||||
if: steps.find-comment.outcome == 'success'
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
|
||||
144
.github/workflows/publish-docs.yml
vendored
144
.github/workflows/publish-docs.yml
vendored
@@ -1,144 +0,0 @@
|
||||
# Publish the Ruff documentation.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a post-announce
|
||||
# job within `cargo-dist`.
|
||||
name: mkdocs
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
ref:
|
||||
description: "The commit SHA, tag, or branch to publish. Uses the default branch if not specified."
|
||||
default: ""
|
||||
type: string
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
mkdocs:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
persist-credentials: true
|
||||
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
with:
|
||||
python-version: 3.12
|
||||
|
||||
- name: "Set docs version"
|
||||
env:
|
||||
version: ${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || inputs.ref }}
|
||||
run: |
|
||||
# if version is missing, use 'latest'
|
||||
if [ -z "$version" ]; then
|
||||
echo "Using 'latest' as version"
|
||||
version="latest"
|
||||
fi
|
||||
|
||||
# Use version as display name for now
|
||||
display_name="$version"
|
||||
|
||||
echo "version=$version" >> "$GITHUB_ENV"
|
||||
echo "display_name=$display_name" >> "$GITHUB_ENV"
|
||||
|
||||
- name: "Set branch name"
|
||||
run: |
|
||||
timestamp="$(date +%s)"
|
||||
|
||||
# create branch_display_name from display_name by replacing all
|
||||
# characters disallowed in git branch names with hyphens
|
||||
branch_display_name="$(echo "${display_name}" | tr -c '[:alnum:]._' '-' | tr -s '-')"
|
||||
|
||||
echo "branch_name=update-docs-$branch_display_name-$timestamp" >> "$GITHUB_ENV"
|
||||
echo "timestamp=$timestamp" >> "$GITHUB_ENV"
|
||||
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@a6f90b1f127823b31d4d4a8d96047790581349bd # v0.9.1
|
||||
with:
|
||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: pip install -r docs/requirements-insiders.txt
|
||||
|
||||
- name: "Install dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: pip install -r docs/requirements.txt
|
||||
|
||||
- name: "Copy README File"
|
||||
run: |
|
||||
python scripts/transform_readme.py --target mkdocs
|
||||
python scripts/generate_mkdocs.py
|
||||
|
||||
- name: "Build Insiders docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.insiders.yml
|
||||
|
||||
- name: "Build docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.public.yml
|
||||
|
||||
- name: "Clone docs repo"
|
||||
run: git clone https://${{ secrets.ASTRAL_DOCS_PAT }}@github.com/astral-sh/docs.git astral-docs
|
||||
|
||||
- name: "Copy docs"
|
||||
run: rm -rf astral-docs/site/ruff && mkdir -p astral-docs/site && cp -r site/ruff astral-docs/site/
|
||||
|
||||
- name: "Commit docs"
|
||||
working-directory: astral-docs
|
||||
run: |
|
||||
git config user.name "astral-docs-bot"
|
||||
git config user.email "176161322+astral-docs-bot@users.noreply.github.com"
|
||||
|
||||
git checkout -b "${branch_name}"
|
||||
git add site/ruff
|
||||
git commit -m "Update ruff documentation for $version"
|
||||
|
||||
- name: "Create Pull Request"
|
||||
working-directory: astral-docs
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
|
||||
run: |
|
||||
# set the PR title
|
||||
pull_request_title="Update ruff documentation for ${display_name}"
|
||||
|
||||
# Delete any existing pull requests that are open for this version
|
||||
# by checking against pull_request_title because the new PR will
|
||||
# supersede the old one.
|
||||
gh pr list --state open --json title --jq '.[] | select(.title == "$pull_request_title") | .number' | \
|
||||
xargs -I {} gh pr close {}
|
||||
|
||||
# push the branch to GitHub
|
||||
git push origin "${branch_name}"
|
||||
|
||||
# create the PR
|
||||
gh pr create \
|
||||
--base=main \
|
||||
--head="${branch_name}" \
|
||||
--title="${pull_request_title}" \
|
||||
--body="Automated documentation update for ${display_name}" \
|
||||
--label="documentation"
|
||||
|
||||
- name: "Merge Pull Request"
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
working-directory: astral-docs
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
|
||||
run: |
|
||||
# auto-merge the PR if the build was triggered by a release. Manual builds should be reviewed by a human.
|
||||
# give the PR a few seconds to be created before trying to auto-merge it
|
||||
sleep 10
|
||||
gh pr merge --squash "${branch_name}"
|
||||
58
.github/workflows/publish-knot-playground.yml
vendored
58
.github/workflows/publish-knot-playground.yml
vendored
@@ -1,58 +0,0 @@
|
||||
# Publish the Red Knot playground.
|
||||
name: "[Knot Playground] Release"
|
||||
|
||||
permissions: {}
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "crates/red_knot*/**"
|
||||
- "crates/ruff_db/**"
|
||||
- "crates/ruff_python_ast/**"
|
||||
- "crates/ruff_python_parser/**"
|
||||
- "playground/**"
|
||||
- ".github/workflows/publish-knot-playground.yml"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4.3.0
|
||||
with:
|
||||
node-version: 22
|
||||
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
|
||||
- name: "Install Node dependencies"
|
||||
run: npm ci
|
||||
working-directory: playground
|
||||
- name: "Run TypeScript checks"
|
||||
run: npm run check
|
||||
working-directory: playground
|
||||
- name: "Build Knot playground"
|
||||
run: npm run build --workspace knot-playground
|
||||
working-directory: playground
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3.14.1
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
# `github.head_ref` is only set during pull requests and for manual runs or tags we use `main` to deploy to production
|
||||
command: pages deploy playground/knot/dist --project-name=knot-playground --branch ${{ github.head_ref || 'main' }} --commit-hash ${GITHUB_SHA}
|
||||
54
.github/workflows/publish-playground.yml
vendored
54
.github/workflows/publish-playground.yml
vendored
@@ -1,54 +0,0 @@
|
||||
# Publish the Ruff playground.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a post-announce
|
||||
# job within `cargo-dist`.
|
||||
name: "[Playground] Release"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4.3.0
|
||||
with:
|
||||
node-version: 22
|
||||
cache: "npm"
|
||||
cache-dependency-path: playground/package-lock.json
|
||||
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
|
||||
- name: "Install Node dependencies"
|
||||
run: npm ci
|
||||
working-directory: playground
|
||||
- name: "Run TypeScript checks"
|
||||
run: npm run check
|
||||
working-directory: playground
|
||||
- name: "Build Ruff playground"
|
||||
run: npm run build --workspace ruff-playground
|
||||
working-directory: playground
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3.14.1
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
# `github.head_ref` is only set during pull requests and for manual runs or tags we use `main` to deploy to production
|
||||
command: pages deploy playground/ruff/dist --project-name=ruff-playground --branch ${{ github.head_ref || 'main' }} --commit-hash ${GITHUB_SHA}
|
||||
32
.github/workflows/publish-pypi.yml
vendored
32
.github/workflows/publish-pypi.yml
vendored
@@ -1,32 +0,0 @@
|
||||
# Publish a release to PyPI.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a publish job
|
||||
# within `cargo-dist`.
|
||||
name: "[ruff] Publish to PyPI"
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
pypi-publish:
|
||||
name: Upload to PyPI
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
permissions:
|
||||
# For PyPI's trusted publishing.
|
||||
id-token: write
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
|
||||
- uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
|
||||
with:
|
||||
pattern: wheels-*
|
||||
path: wheels
|
||||
merge-multiple: true
|
||||
- name: Publish to PyPi
|
||||
run: uv publish -v wheels/*
|
||||
59
.github/workflows/publish-wasm.yml
vendored
59
.github/workflows/publish-wasm.yml
vendored
@@ -1,59 +0,0 @@
|
||||
# Build and publish ruff-api for wasm.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a publish
|
||||
# job within `cargo-dist`.
|
||||
name: "Build and publish wasm"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
ruff_wasm:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
strategy:
|
||||
matrix:
|
||||
target: [web, bundler, nodejs]
|
||||
fail-fast: false
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: jetli/wasm-pack-action@0d096b08b4e5a7de8c28de67e11e945404e9eefa # v0.4.0
|
||||
with:
|
||||
version: v0.13.1
|
||||
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
|
||||
- name: "Run wasm-pack build"
|
||||
run: wasm-pack build --target ${{ matrix.target }} crates/ruff_wasm
|
||||
- name: "Rename generated package"
|
||||
run: | # Replace the package name w/ jq
|
||||
jq '.name="@astral-sh/ruff-wasm-${{ matrix.target }}"' crates/ruff_wasm/pkg/package.json > /tmp/package.json
|
||||
mv /tmp/package.json crates/ruff_wasm/pkg
|
||||
- run: cp LICENSE crates/ruff_wasm/pkg # wasm-pack does not put the LICENSE file in the pkg
|
||||
- uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4.3.0
|
||||
with:
|
||||
node-version: 20
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- name: "Publish (dry-run)"
|
||||
if: ${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
run: npm publish --dry-run crates/ruff_wasm/pkg
|
||||
- name: "Publish"
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
run: npm publish --provenance --access public crates/ruff_wasm/pkg
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
603
.github/workflows/release.yaml
vendored
Normal file
603
.github/workflows/release.yaml
vendored
Normal file
@@ -0,0 +1,603 @@
|
||||
name: "[ruff] Release"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
description: "The version to tag, without the leading 'v'. If omitted, will initiate a dry run (no uploads)."
|
||||
type: string
|
||||
sha:
|
||||
description: "The full sha of the commit to be released. If omitted, the latest commit on the default branch will be used."
|
||||
default: ""
|
||||
type: string
|
||||
pull_request:
|
||||
paths:
|
||||
# When we change pyproject.toml, we want to ensure that the maturin builds still work
|
||||
- pyproject.toml
|
||||
# And when we change this workflow itself...
|
||||
- .github/workflows/release.yaml
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
PACKAGE_NAME: ruff
|
||||
PYTHON_VERSION: "3.11"
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
sdist:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build sdist"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
command: sdist
|
||||
args: --out dist
|
||||
- name: "Test sdist"
|
||||
run: |
|
||||
pip install dist/${{ env.PACKAGE_NAME }}-*.tar.gz --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload sdist"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-sdist
|
||||
path: dist
|
||||
|
||||
macos-x86_64:
|
||||
runs-on: macos-12
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels - x86_64"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: x86_64
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel - x86_64"
|
||||
run: |
|
||||
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-macos-x86_64
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-x86_64-apple-darwin.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/x86_64-apple-darwin/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: binaries-macos-x86_64
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
macos-aarch64:
|
||||
runs-on: macos-14
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: arm64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels - aarch64"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: aarch64
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel - aarch64"
|
||||
run: |
|
||||
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-aarch64-apple-darwin
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-aarch64-apple-darwin.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/aarch64-apple-darwin/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: binaries-aarch64-apple-darwin
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
windows:
|
||||
runs-on: windows-latest
|
||||
strategy:
|
||||
matrix:
|
||||
platform:
|
||||
- target: x86_64-pc-windows-msvc
|
||||
arch: x64
|
||||
- target: i686-pc-windows-msvc
|
||||
arch: x86
|
||||
- target: aarch64-pc-windows-msvc
|
||||
arch: x64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: ${{ matrix.platform.arch }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
args: --release --locked --out dist
|
||||
env:
|
||||
# aarch64 build fails, see https://github.com/PyO3/maturin/issues/2110
|
||||
XWIN_VERSION: 16
|
||||
- name: "Test wheel"
|
||||
if: ${{ !startsWith(matrix.platform.target, 'aarch64') }}
|
||||
shell: bash
|
||||
run: |
|
||||
python -m pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-${{ matrix.platform.target }}
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.zip
|
||||
7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/ruff.exe
|
||||
sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: binaries-${{ matrix.platform.target }}
|
||||
path: |
|
||||
*.zip
|
||||
*.sha256
|
||||
|
||||
linux:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target:
|
||||
- x86_64-unknown-linux-gnu
|
||||
- i686-unknown-linux-gnu
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: auto
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel"
|
||||
if: ${{ startsWith(matrix.target, 'x86_64') }}
|
||||
run: |
|
||||
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-${{ matrix.target }}
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.target }}.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: binaries-${{ matrix.target }}
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
linux-cross:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
platform:
|
||||
- target: aarch64-unknown-linux-gnu
|
||||
arch: aarch64
|
||||
# see https://github.com/astral-sh/ruff/issues/3791
|
||||
# and https://github.com/gnzlbg/jemallocator/issues/170#issuecomment-1503228963
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
- target: armv7-unknown-linux-gnueabihf
|
||||
arch: armv7
|
||||
- target: s390x-unknown-linux-gnu
|
||||
arch: s390x
|
||||
- target: powerpc64le-unknown-linux-gnu
|
||||
arch: ppc64le
|
||||
# see https://github.com/astral-sh/ruff/issues/10073
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
- target: powerpc64-unknown-linux-gnu
|
||||
arch: ppc64
|
||||
# see https://github.com/astral-sh/ruff/issues/10073
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
args: --release --locked --out dist
|
||||
- uses: uraimo/run-on-arch-action@v2
|
||||
if: matrix.platform.arch != 'ppc64'
|
||||
name: Test wheel
|
||||
with:
|
||||
arch: ${{ matrix.platform.arch }}
|
||||
distro: ubuntu20.04
|
||||
githubToken: ${{ github.token }}
|
||||
install: |
|
||||
apt-get update
|
||||
apt-get install -y --no-install-recommends python3 python3-pip
|
||||
pip3 install -U pip
|
||||
run: |
|
||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-${{ matrix.platform.target }}
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: binaries-${{ matrix.platform.target }}
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
musllinux:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target:
|
||||
- x86_64-unknown-linux-musl
|
||||
- i686-unknown-linux-musl
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: musllinux_1_2
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel"
|
||||
if: matrix.target == 'x86_64-unknown-linux-musl'
|
||||
uses: addnab/docker-run-action@v3
|
||||
with:
|
||||
image: alpine:latest
|
||||
options: -v ${{ github.workspace }}:/io -w /io
|
||||
run: |
|
||||
apk add python3
|
||||
python -m venv .venv
|
||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
.venv/bin/ruff check --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-${{ matrix.target }}
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.target }}.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: binaries-${{ matrix.target }}
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
musllinux-cross:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
platform:
|
||||
- target: aarch64-unknown-linux-musl
|
||||
arch: aarch64
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
- target: armv7-unknown-linux-musleabihf
|
||||
arch: armv7
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: musllinux_1_2
|
||||
args: --release --locked --out dist
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
- uses: uraimo/run-on-arch-action@v2
|
||||
name: Test wheel
|
||||
with:
|
||||
arch: ${{ matrix.platform.arch }}
|
||||
distro: alpine_latest
|
||||
githubToken: ${{ github.token }}
|
||||
install: |
|
||||
apk add python3
|
||||
run: |
|
||||
python -m venv .venv
|
||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
.venv/bin/ruff check --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-${{ matrix.platform.target }}
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: binaries-${{ matrix.platform.target }}
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
validate-tag:
|
||||
name: Validate tag
|
||||
runs-on: ubuntu-latest
|
||||
# If you don't set an input tag, it's a dry run (no uploads).
|
||||
if: ${{ inputs.tag }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: main # We checkout the main branch to check for the commit
|
||||
- name: Check main branch
|
||||
if: ${{ inputs.sha }}
|
||||
run: |
|
||||
# Fetch the main branch since a shallow checkout is used by default
|
||||
git fetch origin main --unshallow
|
||||
if ! git branch --contains ${{ inputs.sha }} | grep -E '(^|\s)main$'; then
|
||||
echo "The specified sha is not on the main branch" >&2
|
||||
exit 1
|
||||
fi
|
||||
- name: Check tag consistency
|
||||
run: |
|
||||
# Switch to the commit we want to release
|
||||
git checkout ${{ inputs.sha }}
|
||||
version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
|
||||
if [ "${{ inputs.tag }}" != "${version}" ]; then
|
||||
echo "The input tag does not match the version from pyproject.toml:" >&2
|
||||
echo "${{ inputs.tag }}" >&2
|
||||
echo "${version}" >&2
|
||||
exit 1
|
||||
else
|
||||
echo "Releasing ${version}"
|
||||
fi
|
||||
|
||||
upload-release:
|
||||
name: Upload to PyPI
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- macos-aarch64
|
||||
- macos-x86_64
|
||||
- windows
|
||||
- linux
|
||||
- linux-cross
|
||||
- musllinux
|
||||
- musllinux-cross
|
||||
- validate-tag
|
||||
# If you don't set an input tag, it's a dry run (no uploads).
|
||||
if: ${{ inputs.tag }}
|
||||
environment:
|
||||
name: release
|
||||
permissions:
|
||||
# For pypi trusted publishing
|
||||
id-token: write
|
||||
steps:
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: wheels-*
|
||||
path: wheels
|
||||
merge-multiple: true
|
||||
- name: Publish to PyPi
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
skip-existing: true
|
||||
packages-dir: wheels
|
||||
verbose: true
|
||||
|
||||
tag-release:
|
||||
name: Tag release
|
||||
runs-on: ubuntu-latest
|
||||
needs: upload-release
|
||||
# If you don't set an input tag, it's a dry run (no uploads).
|
||||
if: ${{ inputs.tag }}
|
||||
permissions:
|
||||
# For git tag
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- name: git tag
|
||||
run: |
|
||||
git config user.email "hey@astral.sh"
|
||||
git config user.name "Ruff Release CI"
|
||||
git tag -m "v${{ inputs.tag }}" "v${{ inputs.tag }}"
|
||||
# If there is duplicate tag, this will fail. The publish to pypi action will have been a noop (due to skip
|
||||
# existing), so we make a non-destructive exit here
|
||||
git push --tags
|
||||
|
||||
publish-release:
|
||||
name: Publish to GitHub
|
||||
runs-on: ubuntu-latest
|
||||
needs: tag-release
|
||||
# If you don't set an input tag, it's a dry run (no uploads).
|
||||
if: ${{ inputs.tag }}
|
||||
permissions:
|
||||
# For GitHub release publishing
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: binaries-*
|
||||
path: binaries
|
||||
merge-multiple: true
|
||||
- name: "Publish to GitHub"
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
draft: true
|
||||
files: binaries/*
|
||||
tag_name: v${{ inputs.tag }}
|
||||
|
||||
docker-publish:
|
||||
# This action doesn't need to wait on any other task, it's easy to re-tag if something failed and we're validating
|
||||
# the tag here also
|
||||
name: Push Docker image ghcr.io/astral-sh/ruff
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
permissions:
|
||||
# For the docker push
|
||||
packages: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
- uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ghcr.io/astral-sh/ruff
|
||||
|
||||
- name: Check tag consistency
|
||||
# Unlike validate-tag we don't check if the commit is on the main branch, but it seems good enough since we can
|
||||
# change docker tags
|
||||
if: ${{ inputs.tag }}
|
||||
run: |
|
||||
version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
|
||||
if [ "${{ inputs.tag }}" != "${version}" ]; then
|
||||
echo "The input tag does not match the version from pyproject.toml:" >&2
|
||||
echo "${{ inputs.tag }}" >&2
|
||||
echo "${version}" >&2
|
||||
exit 1
|
||||
else
|
||||
echo "Releasing ${version}"
|
||||
fi
|
||||
|
||||
- name: "Build and push Docker image"
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
# Reuse the builder
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
push: ${{ inputs.tag != '' }}
|
||||
tags: ghcr.io/astral-sh/ruff:latest,ghcr.io/astral-sh/ruff:${{ inputs.tag || 'dry-run' }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
|
||||
# After the release has been published, we update downstream repositories
|
||||
# This is separate because if this fails the release is still fine, we just need to do some manual workflow triggers
|
||||
update-dependents:
|
||||
name: Update dependents
|
||||
runs-on: ubuntu-latest
|
||||
needs: publish-release
|
||||
steps:
|
||||
- name: "Update pre-commit mirror"
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.RUFF_PRE_COMMIT_PAT }}
|
||||
script: |
|
||||
github.rest.actions.createWorkflowDispatch({
|
||||
owner: 'astral-sh',
|
||||
repo: 'ruff-pre-commit',
|
||||
workflow_id: 'main.yml',
|
||||
ref: 'main',
|
||||
})
|
||||
305
.github/workflows/release.yml
vendored
305
.github/workflows/release.yml
vendored
@@ -1,305 +0,0 @@
|
||||
# This file was autogenerated by dist: https://github.com/astral-sh/cargo-dist
|
||||
#
|
||||
# Copyright 2022-2024, axodotdev
|
||||
# Copyright 2025 Astral Software Inc.
|
||||
# SPDX-License-Identifier: MIT or Apache-2.0
|
||||
#
|
||||
# CI that:
|
||||
#
|
||||
# * checks for a Git Tag that looks like a release
|
||||
# * builds artifacts with dist (archives, installers, hashes)
|
||||
# * uploads those artifacts to temporary workflow zip
|
||||
# * on success, uploads the artifacts to a GitHub Release
|
||||
#
|
||||
# Note that the GitHub Release will be created with a generated
|
||||
# title/body based on your changelogs.
|
||||
|
||||
name: Release
|
||||
permissions:
|
||||
"contents": "write"
|
||||
|
||||
# This task will run whenever you workflow_dispatch with a tag that looks like a version
|
||||
# like "1.0.0", "v0.1.0-prerelease.1", "my-app/0.1.0", "releases/v1.0.0", etc.
|
||||
# Various formats will be parsed into a VERSION and an optional PACKAGE_NAME, where
|
||||
# PACKAGE_NAME must be the name of a Cargo package in your workspace, and VERSION
|
||||
# must be a Cargo-style SemVer Version (must have at least major.minor.patch).
|
||||
#
|
||||
# If PACKAGE_NAME is specified, then the announcement will be for that
|
||||
# package (erroring out if it doesn't have the given version or isn't dist-able).
|
||||
#
|
||||
# If PACKAGE_NAME isn't specified, then the announcement will be for all
|
||||
# (dist-able) packages in the workspace with that version (this mode is
|
||||
# intended for workspaces with only one dist-able package, or with all dist-able
|
||||
# packages versioned/released in lockstep).
|
||||
#
|
||||
# If you push multiple tags at once, separate instances of this workflow will
|
||||
# spin up, creating an independent announcement for each one. However, GitHub
|
||||
# will hard limit this to 3 tags per commit, as it will assume more tags is a
|
||||
# mistake.
|
||||
#
|
||||
# If there's a prerelease-style suffix to the version, then the release(s)
|
||||
# will be marked as a prerelease.
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
description: Release Tag
|
||||
required: true
|
||||
default: dry-run
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
# Run 'dist plan' (or host) to determine what tasks we need to do
|
||||
plan:
|
||||
runs-on: "depot-ubuntu-latest-4"
|
||||
outputs:
|
||||
val: ${{ steps.plan.outputs.manifest }}
|
||||
tag: ${{ (inputs.tag != 'dry-run' && inputs.tag) || '' }}
|
||||
tag-flag: ${{ inputs.tag && inputs.tag != 'dry-run' && format('--tag={0}', inputs.tag) || '' }}
|
||||
publishing: ${{ inputs.tag && inputs.tag != 'dry-run' }}
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Install dist
|
||||
# we specify bash to get pipefail; it guards against the `curl` command
|
||||
# failing. otherwise `sh` won't catch that `curl` returned non-0
|
||||
shell: bash
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/cargo-dist/releases/download/v0.28.4-prerelease.1/cargo-dist-installer.sh | sh"
|
||||
- name: Cache dist
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/dist
|
||||
# sure would be cool if github gave us proper conditionals...
|
||||
# so here's a doubly-nested ternary-via-truthiness to try to provide the best possible
|
||||
# functionality based on whether this is a pull_request, and whether it's from a fork.
|
||||
# (PRs run on the *source* but secrets are usually on the *target* -- that's *good*
|
||||
# but also really annoying to build CI around when it needs secrets to work right.)
|
||||
- id: plan
|
||||
run: |
|
||||
dist ${{ (inputs.tag && inputs.tag != 'dry-run' && format('host --steps=create --tag={0}', inputs.tag)) || 'plan' }} --output-format=json > plan-dist-manifest.json
|
||||
echo "dist ran successfully"
|
||||
cat plan-dist-manifest.json
|
||||
echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
||||
- name: "Upload dist-manifest.json"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02
|
||||
with:
|
||||
name: artifacts-plan-dist-manifest
|
||||
path: plan-dist-manifest.json
|
||||
|
||||
custom-build-binaries:
|
||||
needs:
|
||||
- plan
|
||||
if: ${{ needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload' || inputs.tag == 'dry-run' }}
|
||||
uses: ./.github/workflows/build-binaries.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
|
||||
custom-build-docker:
|
||||
needs:
|
||||
- plan
|
||||
if: ${{ needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload' || inputs.tag == 'dry-run' }}
|
||||
uses: ./.github/workflows/build-docker.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
permissions:
|
||||
"contents": "read"
|
||||
"packages": "write"
|
||||
|
||||
# Build and package all the platform-agnostic(ish) things
|
||||
build-global-artifacts:
|
||||
needs:
|
||||
- plan
|
||||
- custom-build-binaries
|
||||
- custom-build-docker
|
||||
runs-on: "depot-ubuntu-latest-4"
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Install cached dist
|
||||
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/
|
||||
- run: chmod +x ~/.cargo/bin/dist
|
||||
# Get all the local artifacts for the global tasks to use (for e.g. checksums)
|
||||
- name: Fetch local artifacts
|
||||
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: target/distrib/
|
||||
merge-multiple: true
|
||||
- id: cargo-dist
|
||||
shell: bash
|
||||
run: |
|
||||
dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json
|
||||
echo "dist ran successfully"
|
||||
|
||||
# Parse out what we just built and upload it to scratch storage
|
||||
echo "paths<<EOF" >> "$GITHUB_OUTPUT"
|
||||
jq --raw-output ".upload_files[]" dist-manifest.json >> "$GITHUB_OUTPUT"
|
||||
echo "EOF" >> "$GITHUB_OUTPUT"
|
||||
|
||||
cp dist-manifest.json "$BUILD_MANIFEST_NAME"
|
||||
- name: "Upload artifacts"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02
|
||||
with:
|
||||
name: artifacts-build-global
|
||||
path: |
|
||||
${{ steps.cargo-dist.outputs.paths }}
|
||||
${{ env.BUILD_MANIFEST_NAME }}
|
||||
# Determines if we should publish/announce
|
||||
host:
|
||||
needs:
|
||||
- plan
|
||||
- custom-build-binaries
|
||||
- custom-build-docker
|
||||
- build-global-artifacts
|
||||
# Only run if we're "publishing", and only if local and global didn't fail (skipped is fine)
|
||||
if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.custom-build-binaries.result == 'skipped' || needs.custom-build-binaries.result == 'success') && (needs.custom-build-docker.result == 'skipped' || needs.custom-build-docker.result == 'success') }}
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
runs-on: "depot-ubuntu-latest-4"
|
||||
outputs:
|
||||
val: ${{ steps.host.outputs.manifest }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Install cached dist
|
||||
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/
|
||||
- run: chmod +x ~/.cargo/bin/dist
|
||||
# Fetch artifacts from scratch-storage
|
||||
- name: Fetch artifacts
|
||||
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: target/distrib/
|
||||
merge-multiple: true
|
||||
# This is a harmless no-op for GitHub Releases, hosting for that happens in "announce"
|
||||
- id: host
|
||||
shell: bash
|
||||
run: |
|
||||
dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json
|
||||
echo "artifacts uploaded and released successfully"
|
||||
cat dist-manifest.json
|
||||
echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
||||
- name: "Upload dist-manifest.json"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02
|
||||
with:
|
||||
# Overwrite the previous copy
|
||||
name: artifacts-dist-manifest
|
||||
path: dist-manifest.json
|
||||
|
||||
custom-publish-pypi:
|
||||
needs:
|
||||
- plan
|
||||
- host
|
||||
if: ${{ !fromJson(needs.plan.outputs.val).announcement_is_prerelease || fromJson(needs.plan.outputs.val).publish_prereleases }}
|
||||
uses: ./.github/workflows/publish-pypi.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
# publish jobs get escalated permissions
|
||||
permissions:
|
||||
"id-token": "write"
|
||||
"packages": "write"
|
||||
|
||||
custom-publish-wasm:
|
||||
needs:
|
||||
- plan
|
||||
- host
|
||||
if: ${{ !fromJson(needs.plan.outputs.val).announcement_is_prerelease || fromJson(needs.plan.outputs.val).publish_prereleases }}
|
||||
uses: ./.github/workflows/publish-wasm.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
# publish jobs get escalated permissions
|
||||
permissions:
|
||||
"contents": "read"
|
||||
"id-token": "write"
|
||||
"packages": "write"
|
||||
|
||||
# Create a GitHub Release while uploading all files to it
|
||||
announce:
|
||||
needs:
|
||||
- plan
|
||||
- host
|
||||
- custom-publish-pypi
|
||||
- custom-publish-wasm
|
||||
# use "always() && ..." to allow us to wait for all publish jobs while
|
||||
# still allowing individual publish jobs to skip themselves (for prereleases).
|
||||
# "host" however must run to completion, no skipping allowed!
|
||||
if: ${{ always() && needs.host.result == 'success' && (needs.custom-publish-pypi.result == 'skipped' || needs.custom-publish-pypi.result == 'success') && (needs.custom-publish-wasm.result == 'skipped' || needs.custom-publish-wasm.result == 'success') }}
|
||||
runs-on: "depot-ubuntu-latest-4"
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
# Create a GitHub Release while uploading all files to it
|
||||
- name: "Download GitHub Artifacts"
|
||||
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: artifacts
|
||||
merge-multiple: true
|
||||
- name: Cleanup
|
||||
run: |
|
||||
# Remove the granular manifests
|
||||
rm -f artifacts/*-dist-manifest.json
|
||||
- name: Create GitHub Release
|
||||
env:
|
||||
PRERELEASE_FLAG: "${{ fromJson(needs.host.outputs.val).announcement_is_prerelease && '--prerelease' || '' }}"
|
||||
ANNOUNCEMENT_TITLE: "${{ fromJson(needs.host.outputs.val).announcement_title }}"
|
||||
ANNOUNCEMENT_BODY: "${{ fromJson(needs.host.outputs.val).announcement_github_body }}"
|
||||
RELEASE_COMMIT: "${{ github.sha }}"
|
||||
run: |
|
||||
# Write and read notes from a file to avoid quoting breaking things
|
||||
echo "$ANNOUNCEMENT_BODY" > $RUNNER_TEMP/notes.txt
|
||||
|
||||
gh release create "${{ needs.plan.outputs.tag }}" --target "$RELEASE_COMMIT" $PRERELEASE_FLAG --title "$ANNOUNCEMENT_TITLE" --notes-file "$RUNNER_TEMP/notes.txt" artifacts/*
|
||||
|
||||
custom-notify-dependents:
|
||||
needs:
|
||||
- plan
|
||||
- announce
|
||||
uses: ./.github/workflows/notify-dependents.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
|
||||
custom-publish-docs:
|
||||
needs:
|
||||
- plan
|
||||
- announce
|
||||
uses: ./.github/workflows/publish-docs.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
|
||||
custom-publish-playground:
|
||||
needs:
|
||||
- plan
|
||||
- announce
|
||||
uses: ./.github/workflows/publish-playground.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
27
.github/workflows/sync_typeshed.yaml
vendored
27
.github/workflows/sync_typeshed.yaml
vendored
@@ -21,17 +21,15 @@ jobs:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
name: Checkout Ruff
|
||||
with:
|
||||
path: ruff
|
||||
persist-credentials: true
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
name: Checkout typeshed
|
||||
with:
|
||||
repository: python/typeshed
|
||||
path: typeshed
|
||||
persist-credentials: false
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
@@ -39,13 +37,13 @@ jobs:
|
||||
- name: Sync typeshed
|
||||
id: sync
|
||||
run: |
|
||||
rm -rf ruff/crates/red_knot_vendored/vendor/typeshed
|
||||
mkdir ruff/crates/red_knot_vendored/vendor/typeshed
|
||||
cp typeshed/README.md ruff/crates/red_knot_vendored/vendor/typeshed
|
||||
cp typeshed/LICENSE ruff/crates/red_knot_vendored/vendor/typeshed
|
||||
cp -r typeshed/stdlib ruff/crates/red_knot_vendored/vendor/typeshed/stdlib
|
||||
rm -rf ruff/crates/red_knot_vendored/vendor/typeshed/stdlib/@tests
|
||||
git -C typeshed rev-parse HEAD > ruff/crates/red_knot_vendored/vendor/typeshed/source_commit.txt
|
||||
rm -rf ruff/crates/red_knot_module_resolver/vendor/typeshed
|
||||
mkdir ruff/crates/red_knot_module_resolver/vendor/typeshed
|
||||
cp typeshed/README.md ruff/crates/red_knot_module_resolver/vendor/typeshed
|
||||
cp typeshed/LICENSE ruff/crates/red_knot_module_resolver/vendor/typeshed
|
||||
cp -r typeshed/stdlib ruff/crates/red_knot_module_resolver/vendor/typeshed/stdlib
|
||||
rm -rf ruff/crates/red_knot_module_resolver/vendor/typeshed/stdlib/@tests
|
||||
git -C typeshed rev-parse HEAD > ruff/crates/red_knot_module_resolver/vendor/typeshed/source_commit.txt
|
||||
- name: Commit the changes
|
||||
id: commit
|
||||
if: ${{ steps.sync.outcome == 'success' }}
|
||||
@@ -59,7 +57,7 @@ jobs:
|
||||
run: |
|
||||
cd ruff
|
||||
git push --force origin typeshedbot/sync-typeshed
|
||||
gh pr list --repo "$GITHUB_REPOSITORY" --head typeshedbot/sync-typeshed --json id --jq length | grep 1 && exit 0 # exit if there is existing pr
|
||||
gh pr list --repo $GITHUB_REPOSITORY --head typeshedbot/sync-typeshed --json id --jq length | grep 1 && exit 0 # exit if there is existing pr
|
||||
gh pr create --title "Sync vendored typeshed stubs" --body "Close and reopen this PR to trigger CI" --label "internal"
|
||||
|
||||
create-issue-on-failure:
|
||||
@@ -70,7 +68,7 @@ jobs:
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
- uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
@@ -78,6 +76,5 @@ jobs:
|
||||
owner: "astral-sh",
|
||||
repo: "ruff",
|
||||
title: `Automated typeshed sync failed on ${new Date().toDateString()}`,
|
||||
body: "Run listed here: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}",
|
||||
labels: ["bug", "red-knot"],
|
||||
body: "Runs are listed here: https://github.com/astral-sh/ruff/actions/workflows/sync_typeshed.yaml",
|
||||
})
|
||||
|
||||
19
.github/zizmor.yml
vendored
19
.github/zizmor.yml
vendored
@@ -1,19 +0,0 @@
|
||||
# Configuration for the zizmor static analysis tool, run via pre-commit in CI
|
||||
# https://woodruffw.github.io/zizmor/configuration/
|
||||
#
|
||||
# TODO: can we remove the ignores here so that our workflows are more secure?
|
||||
rules:
|
||||
dangerous-triggers:
|
||||
ignore:
|
||||
- pr-comment.yaml
|
||||
cache-poisoning:
|
||||
ignore:
|
||||
- build-docker.yml
|
||||
- publish-playground.yml
|
||||
excessive-permissions:
|
||||
# it's hard to test what the impact of removing these ignores would be
|
||||
# without actually running the release workflow...
|
||||
ignore:
|
||||
- build-docker.yml
|
||||
- publish-playground.yml
|
||||
- publish-docs.yml
|
||||
12
.gitignore
vendored
12
.gitignore
vendored
@@ -21,18 +21,6 @@ flamegraph.svg
|
||||
# `CARGO_TARGET_DIR=target-llvm-lines RUSTFLAGS="-Csymbol-mangling-version=v0" cargo llvm-lines -p ruff --lib`
|
||||
/target*
|
||||
|
||||
# samply profiles
|
||||
profile.json
|
||||
|
||||
# tracing-flame traces
|
||||
tracing.folded
|
||||
tracing-flamechart.svg
|
||||
tracing-flamegraph.svg
|
||||
|
||||
# insta
|
||||
*.rs.pending-snap
|
||||
|
||||
|
||||
###
|
||||
# Rust.gitignore
|
||||
###
|
||||
|
||||
@@ -14,18 +14,7 @@ MD041: false
|
||||
# MD013/line-length
|
||||
MD013: false
|
||||
|
||||
# MD014/commands-show-output
|
||||
MD014: false
|
||||
|
||||
# MD024/no-duplicate-heading
|
||||
MD024:
|
||||
# Allow when nested under different parents e.g. CHANGELOG.md
|
||||
siblings_only: true
|
||||
|
||||
# MD046/code-block-style
|
||||
#
|
||||
# Ignore this because it conflicts with the code block style used in content
|
||||
# tabs of mkdocs-material which is to add a blank line after the content title.
|
||||
#
|
||||
# Ref: https://github.com/astral-sh/ruff/pull/15011#issuecomment-2544790854
|
||||
MD046: false
|
||||
|
||||
@@ -1,15 +1,10 @@
|
||||
fail_fast: false
|
||||
fail_fast: true
|
||||
|
||||
exclude: |
|
||||
(?x)^(
|
||||
.github/workflows/release.yml|
|
||||
crates/red_knot_vendored/vendor/.*|
|
||||
crates/red_knot_project/resources/.*|
|
||||
crates/ruff_benchmark/resources/.*|
|
||||
crates/red_knot_module_resolver/vendor/.*|
|
||||
crates/ruff_linter/resources/.*|
|
||||
crates/ruff_linter/src/rules/.*/snapshots/.*|
|
||||
crates/ruff_notebook/resources/.*|
|
||||
crates/ruff_server/resources/.*|
|
||||
crates/ruff/resources/.*|
|
||||
crates/ruff_python_formatter/resources/.*|
|
||||
crates/ruff_python_formatter/tests/snapshots/.*|
|
||||
@@ -18,23 +13,18 @@ exclude: |
|
||||
)$
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v5.0.0
|
||||
hooks:
|
||||
- id: check-merge-conflict
|
||||
|
||||
- repo: https://github.com/abravalheri/validate-pyproject
|
||||
rev: v0.24.1
|
||||
rev: v0.18
|
||||
hooks:
|
||||
- id: validate-pyproject
|
||||
|
||||
- repo: https://github.com/executablebooks/mdformat
|
||||
rev: 0.7.22
|
||||
rev: 0.7.17
|
||||
hooks:
|
||||
- id: mdformat
|
||||
additional_dependencies:
|
||||
- mdformat-mkdocs==4.0.0
|
||||
- mdformat-footnote==0.1.1
|
||||
- mdformat-mkdocs
|
||||
- mdformat-admon
|
||||
exclude: |
|
||||
(?x)^(
|
||||
docs/formatter/black\.md
|
||||
@@ -42,7 +32,7 @@ repos:
|
||||
)$
|
||||
|
||||
- repo: https://github.com/igorshubovych/markdownlint-cli
|
||||
rev: v0.44.0
|
||||
rev: v0.41.0
|
||||
hooks:
|
||||
- id: markdownlint-fix
|
||||
exclude: |
|
||||
@@ -51,21 +41,8 @@ repos:
|
||||
| docs/\w+\.md
|
||||
)$
|
||||
|
||||
- repo: https://github.com/adamchainz/blacken-docs
|
||||
rev: 1.19.1
|
||||
hooks:
|
||||
- id: blacken-docs
|
||||
args: ["--pyi", "--line-length", "130"]
|
||||
files: '^crates/.*/resources/mdtest/.*\.md'
|
||||
exclude: |
|
||||
(?x)^(
|
||||
.*?invalid(_.+)*_syntax\.md
|
||||
)$
|
||||
additional_dependencies:
|
||||
- black==25.1.0
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.31.1
|
||||
rev: v1.22.9
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
@@ -79,52 +56,25 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.11.6
|
||||
rev: v0.4.10
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
args: [--fix, --exit-non-zero-on-fix]
|
||||
types_or: [python, pyi]
|
||||
require_serial: true
|
||||
exclude: |
|
||||
(?x)^(
|
||||
crates/ruff_linter/resources/.*|
|
||||
crates/ruff_python_formatter/resources/.*
|
||||
)$
|
||||
|
||||
# Prettier
|
||||
- repo: https://github.com/rbubley/mirrors-prettier
|
||||
rev: v3.5.3
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: v3.1.0
|
||||
hooks:
|
||||
- id: prettier
|
||||
types: [yaml]
|
||||
|
||||
# zizmor detects security vulnerabilities in GitHub Actions workflows.
|
||||
# Additional configuration for the tool is found in `.github/zizmor.yml`
|
||||
- repo: https://github.com/woodruffw/zizmor-pre-commit
|
||||
rev: v1.6.0
|
||||
hooks:
|
||||
- id: zizmor
|
||||
|
||||
- repo: https://github.com/python-jsonschema/check-jsonschema
|
||||
rev: 0.33.0
|
||||
hooks:
|
||||
- id: check-github-workflows
|
||||
|
||||
# `actionlint` hook, for verifying correct syntax in GitHub Actions workflows.
|
||||
# Some additional configuration for `actionlint` can be found in `.github/actionlint.yaml`.
|
||||
- repo: https://github.com/rhysd/actionlint
|
||||
rev: v1.7.7
|
||||
hooks:
|
||||
- id: actionlint
|
||||
stages:
|
||||
# This hook is disabled by default, since it's quite slow.
|
||||
# To run all hooks *including* this hook, use `uvx pre-commit run -a --hook-stage=manual`.
|
||||
# To run *just* this hook, use `uvx pre-commit run -a actionlint --hook-stage=manual`.
|
||||
- manual
|
||||
args:
|
||||
- "-ignore=SC2129" # ignorable stylistic lint from shellcheck
|
||||
- "-ignore=SC2016" # another shellcheck lint: seems to have false positives?
|
||||
additional_dependencies:
|
||||
# actionlint has a shellcheck integration which extracts shell scripts in `run:` steps from GitHub Actions
|
||||
# and checks these with shellcheck. This is arguably its most useful feature,
|
||||
# but the integration only works if shellcheck is installed
|
||||
- "github.com/wasilibs/go-shellcheck/cmd/shellcheck@v0.10.0"
|
||||
|
||||
ci:
|
||||
skip: [cargo-fmt, dev-generate-all]
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
# Auto-generated by `cargo-dist`.
|
||||
.github/workflows/release.yml
|
||||
@@ -1,142 +1,5 @@
|
||||
# Breaking Changes
|
||||
|
||||
## 0.11.0
|
||||
|
||||
This is a follow-up to release 0.10.0. Because of a mistake in the release process, the `requires-python` inference changes were not included in that release. Ruff 0.11.0 now includes this change as well as the stabilization of the preview behavior for `PGH004`.
|
||||
|
||||
- **Changes to how the Python version is inferred when a `target-version` is not specified** ([#16319](https://github.com/astral-sh/ruff/pull/16319))
|
||||
|
||||
In previous versions of Ruff, you could specify your Python version with:
|
||||
|
||||
- The `target-version` option in a `ruff.toml` file or the `[tool.ruff]` section of a pyproject.toml file.
|
||||
- The `project.requires-python` field in a `pyproject.toml` file with a `[tool.ruff]` section.
|
||||
|
||||
These options worked well in most cases, and are still recommended for fine control of the Python version. However, because of the way Ruff discovers config files, `pyproject.toml` files without a `[tool.ruff]` section would be ignored, including the `requires-python` setting. Ruff would then use the default Python version (3.9 as of this writing) instead, which is surprising when you've attempted to request another version.
|
||||
|
||||
In v0.10, config discovery has been updated to address this issue:
|
||||
|
||||
- If Ruff finds a `ruff.toml` file without a `target-version`, it will check
|
||||
for a `pyproject.toml` file in the same directory and respect its
|
||||
`requires-python` version, even if it does not contain a `[tool.ruff]`
|
||||
section.
|
||||
- If Ruff finds a user-level configuration, the `requires-python` field of the closest `pyproject.toml` in a parent directory will take precedence.
|
||||
- If there is no config file (`ruff.toml`or `pyproject.toml` with a
|
||||
`[tool.ruff]` section) in the directory of the file being checked, Ruff will
|
||||
search for the closest `pyproject.toml` in the parent directories and use its
|
||||
`requires-python` setting.
|
||||
|
||||
## 0.10.0
|
||||
|
||||
- **Changes to how the Python version is inferred when a `target-version` is not specified** ([#16319](https://github.com/astral-sh/ruff/pull/16319))
|
||||
|
||||
Because of a mistake in the release process, the `requires-python` inference changes are not included in this release and instead shipped as part of 0.11.0.
|
||||
You can find a description of this change in the 0.11.0 section.
|
||||
|
||||
- **Updated `TYPE_CHECKING` behavior** ([#16669](https://github.com/astral-sh/ruff/pull/16669))
|
||||
|
||||
Previously, Ruff only recognized typechecking blocks that tested the `typing.TYPE_CHECKING` symbol. Now, Ruff recognizes any local variable named `TYPE_CHECKING`. This release also removes support for the legacy `if 0:` and `if False:` typechecking checks. Use a local `TYPE_CHECKING` variable instead.
|
||||
|
||||
- **More robust noqa parsing** ([#16483](https://github.com/astral-sh/ruff/pull/16483))
|
||||
|
||||
The syntax for both file-level and in-line suppression comments has been unified and made more robust to certain errors. In most cases, this will result in more suppression comments being read by Ruff, but there are a few instances where previously read comments will now log an error to the user instead. Please refer to the documentation on [_Error suppression_](https://docs.astral.sh/ruff/linter/#error-suppression) for the full specification.
|
||||
|
||||
- **Avoid unnecessary parentheses around with statements with a single context manager and a trailing comment** ([#14005](https://github.com/astral-sh/ruff/pull/14005))
|
||||
|
||||
This change fixes a bug in the formatter where it introduced unnecessary parentheses around with statements with a single context manager and a trailing comment. This change may result in a change in formatting for some users.
|
||||
|
||||
- **Bump alpine default tag to 3.21 for derived Docker images** ([#16456](https://github.com/astral-sh/ruff/pull/16456))
|
||||
|
||||
Alpine 3.21 was released in Dec 2024 and is used in the official Alpine-based Python images. Now the ruff:alpine image will use 3.21 instead of 3.20 and ruff:alpine3.20 will no longer be updated.
|
||||
|
||||
- **\[`unsafe-markup-use`\]: `RUF035` has been recoded to `S704`** ([#15957](https://github.com/astral-sh/ruff/pull/15957))
|
||||
|
||||
## 0.9.0
|
||||
|
||||
Ruff now formats your code according to the 2025 style guide. As a result, your code might now get formatted differently. See the [changelog](./CHANGELOG.md#090) for a detailed list of changes.
|
||||
|
||||
## 0.8.0
|
||||
|
||||
- **Default to Python 3.9**
|
||||
|
||||
Ruff now defaults to Python 3.9 instead of 3.8 if no explicit Python version is configured using [`ruff.target-version`](https://docs.astral.sh/ruff/settings/#target-version) or [`project.requires-python`](https://packaging.python.org/en/latest/guides/writing-pyproject-toml/#python-requires) ([#13896](https://github.com/astral-sh/ruff/pull/13896))
|
||||
|
||||
- **Changed location of `pydoclint` diagnostics**
|
||||
|
||||
[`pydoclint`](https://docs.astral.sh/ruff/rules/#pydoclint-doc) diagnostics now point to the first-line of the problematic docstring. Previously, this was not the case.
|
||||
|
||||
If you've opted into these preview rules but have them suppressed using
|
||||
[`noqa`](https://docs.astral.sh/ruff/linter/#error-suppression) comments in
|
||||
some places, this change may mean that you need to move the `noqa` suppression
|
||||
comments. Most users should be unaffected by this change.
|
||||
|
||||
- **Use XDG (i.e. `~/.local/bin`) instead of the Cargo home directory in the standalone installer**
|
||||
|
||||
Previously, Ruff's installer used `$CARGO_HOME` or `~/.cargo/bin` for its target install directory. Now, Ruff will be installed into `$XDG_BIN_HOME`, `$XDG_DATA_HOME/../bin`, or `~/.local/bin` (in that order).
|
||||
|
||||
This change is only relevant to users of the standalone Ruff installer (using the shell or PowerShell script). If you installed Ruff using uv or pip, you should be unaffected.
|
||||
|
||||
- **Changes to the line width calculation**
|
||||
|
||||
Ruff now uses a new version of the [unicode-width](https://github.com/unicode-rs/unicode-width) Rust crate to calculate the line width. In very rare cases, this may lead to lines containing Unicode characters being reformatted, or being considered too long when they were not before ([`E501`](https://docs.astral.sh/ruff/rules/line-too-long/)).
|
||||
|
||||
## 0.7.0
|
||||
|
||||
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments
|
||||
([#12838](https://github.com/astral-sh/ruff/pull/12838), [#13292](https://github.com/astral-sh/ruff/pull/13292)).
|
||||
This was a change that we attempted to make in Ruff v0.6.0, but only partially made due to an error on our part.
|
||||
See the [blog post](https://astral.sh/blog/ruff-v0.7.0) for more details.
|
||||
- The `useless-try-except` rule (in our `tryceratops` category) has been recoded from `TRY302` to
|
||||
`TRY203` ([#13502](https://github.com/astral-sh/ruff/pull/13502)). This ensures Ruff's code is consistent with
|
||||
the same rule in the [`tryceratops`](https://github.com/guilatrova/tryceratops) linter.
|
||||
- The `lint.allow-unused-imports` setting has been removed ([#13677](https://github.com/astral-sh/ruff/pull/13677)). Use
|
||||
[`lint.pyflakes.allow-unused-imports`](https://docs.astral.sh/ruff/settings/#lint_pyflakes_allowed-unused-imports)
|
||||
instead.
|
||||
|
||||
## 0.6.0
|
||||
|
||||
- Detect imports in `src` layouts by default for `isort` rules ([#12848](https://github.com/astral-sh/ruff/pull/12848))
|
||||
|
||||
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments ([#12838](https://github.com/astral-sh/ruff/pull/12838)).
|
||||
|
||||
- Lint and format Jupyter Notebook by default ([#12878](https://github.com/astral-sh/ruff/pull/12878)).
|
||||
|
||||
You can disable specific rules for notebooks using [`per-file-ignores`](https://docs.astral.sh/ruff/settings/#lint_per-file-ignores):
|
||||
|
||||
```toml
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"*.ipynb" = ["E501"] # disable line-too-long in notebooks
|
||||
```
|
||||
|
||||
If you'd prefer to either only lint or only format Jupyter Notebook files, you can use the
|
||||
section-specific `exclude` option to do so. For example, the following would only lint Jupyter
|
||||
Notebook files and not format them:
|
||||
|
||||
```toml
|
||||
[tool.ruff.format]
|
||||
exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
And, conversely, the following would only format Jupyter Notebook files and not lint them:
|
||||
|
||||
```toml
|
||||
[tool.ruff.lint]
|
||||
exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
You can completely disable Jupyter Notebook support by updating the [`extend-exclude`](https://docs.astral.sh/ruff/settings/#extend-exclude) setting:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
extend-exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
## 0.5.0
|
||||
|
||||
- Follow the XDG specification to discover user-level configurations on macOS (same as on other Unix platforms)
|
||||
- Selecting `ALL` now excludes deprecated rules
|
||||
- The released archives now include an extra level of nesting, which can be removed with `--strip-components=1` when untarring.
|
||||
- The release artifact's file name no longer includes the version tag. This enables users to install via `/latest` URLs on GitHub.
|
||||
|
||||
## 0.3.0
|
||||
|
||||
### Ruff 2024.2 style
|
||||
@@ -246,7 +109,7 @@ flag or `unsafe-fixes` configuration option can be used to enable unsafe fixes.
|
||||
|
||||
See the [docs](https://docs.astral.sh/ruff/configuration/#fix-safety) for details.
|
||||
|
||||
### Remove formatter-conflicting rules from the default rule set ([#7900](https://github.com/astral-sh/ruff/pull/7900))
|
||||
### Remove formatter-conflicting rules from the default rule set ([#7900](https://github.com/astral-sh/ruff/pull/7900))
|
||||
|
||||
Previously, Ruff enabled all implemented rules in Pycodestyle (`E`) by default. Ruff now only includes the
|
||||
Pycodestyle prefixes `E4`, `E7`, and `E9` to exclude rules that conflict with automatic formatters. Consequently,
|
||||
@@ -259,8 +122,8 @@ This change only affects those using Ruff under its default rule set. Users that
|
||||
|
||||
### Remove support for emoji identifiers ([#7212](https://github.com/astral-sh/ruff/pull/7212))
|
||||
|
||||
Previously, Ruff supported non-standards-compliant emoji identifiers such as `📦 = 1`.
|
||||
We decided to remove this non-standard language extension. Ruff now reports syntax errors for invalid emoji identifiers in your code, the same as CPython.
|
||||
Previously, Ruff supported the non-standard compliant emoji identifiers e.g. `📦 = 1`.
|
||||
We decided to remove this non-standard language extension, and Ruff now reports syntax errors for emoji identifiers in your code, the same as CPython.
|
||||
|
||||
### Improved GitLab fingerprints ([#7203](https://github.com/astral-sh/ruff/pull/7203))
|
||||
|
||||
|
||||
1958
CHANGELOG.md
1958
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
138
CONTRIBUTING.md
138
CONTRIBUTING.md
@@ -2,6 +2,35 @@
|
||||
|
||||
Welcome! We're happy to have you here. Thank you in advance for your contribution to Ruff.
|
||||
|
||||
- [The Basics](#the-basics)
|
||||
- [Prerequisites](#prerequisites)
|
||||
- [Development](#development)
|
||||
- [Project Structure](#project-structure)
|
||||
- [Example: Adding a new lint rule](#example-adding-a-new-lint-rule)
|
||||
- [Rule naming convention](#rule-naming-convention)
|
||||
- [Rule testing: fixtures and snapshots](#rule-testing-fixtures-and-snapshots)
|
||||
- [Example: Adding a new configuration option](#example-adding-a-new-configuration-option)
|
||||
- [MkDocs](#mkdocs)
|
||||
- [Release Process](#release-process)
|
||||
- [Creating a new release](#creating-a-new-release)
|
||||
- [Ecosystem CI](#ecosystem-ci)
|
||||
- [Benchmarking and Profiling](#benchmarking-and-profiling)
|
||||
- [CPython Benchmark](#cpython-benchmark)
|
||||
- [Microbenchmarks](#microbenchmarks)
|
||||
- [Benchmark-driven Development](#benchmark-driven-development)
|
||||
- [PR Summary](#pr-summary)
|
||||
- [Tips](#tips)
|
||||
- [Profiling Projects](#profiling-projects)
|
||||
- [Linux](#linux)
|
||||
- [Mac](#mac)
|
||||
- [`cargo dev`](#cargo-dev)
|
||||
- [Subsystems](#subsystems)
|
||||
- [Compilation Pipeline](#compilation-pipeline)
|
||||
- [Import Categorization](#import-categorization)
|
||||
- [Project root](#project-root)
|
||||
- [Package root](#package-root)
|
||||
- [Import categorization](#import-categorization-1)
|
||||
|
||||
## The Basics
|
||||
|
||||
Ruff welcomes contributions in the form of pull requests.
|
||||
@@ -29,14 +58,16 @@ You'll also need [Insta](https://insta.rs/docs/) to update snapshot tests:
|
||||
cargo install cargo-insta
|
||||
```
|
||||
|
||||
You'll need [uv](https://docs.astral.sh/uv/getting-started/installation/) (or `pipx` and `pip`) to
|
||||
run Python utility commands.
|
||||
And you'll need pre-commit to run some validation checks:
|
||||
|
||||
```shell
|
||||
pipx install pre-commit # or `pip install pre-commit` if you have a virtualenv
|
||||
```
|
||||
|
||||
You can optionally install pre-commit hooks to automatically run the validation checks
|
||||
when making a commit:
|
||||
|
||||
```shell
|
||||
uv tool install pre-commit
|
||||
pre-commit install
|
||||
```
|
||||
|
||||
@@ -64,7 +95,7 @@ and that it passes both the lint and test validation checks:
|
||||
```shell
|
||||
cargo clippy --workspace --all-targets --all-features -- -D warnings # Rust linting
|
||||
RUFF_UPDATE_SCHEMA=1 cargo test # Rust testing and updating ruff.schema.json
|
||||
uvx pre-commit run --all-files --show-diff-on-failure # Rust and Python formatting, Markdown and Python linting, etc.
|
||||
pre-commit run --all-files --show-diff-on-failure # Rust and Python formatting, Markdown and Python linting, etc.
|
||||
```
|
||||
|
||||
These checks will run on GitHub Actions when you open your pull request, but running them locally
|
||||
@@ -139,7 +170,7 @@ At a high level, the steps involved in adding a new lint rule are as follows:
|
||||
1. Create a file for your rule (e.g., `crates/ruff_linter/src/rules/flake8_bugbear/rules/assert_false.rs`).
|
||||
|
||||
1. In that file, define a violation struct (e.g., `pub struct AssertFalse`). You can grep for
|
||||
`#[derive(ViolationMetadata)]` to see examples.
|
||||
`#[violation]` to see examples.
|
||||
|
||||
1. In that file, define a function that adds the violation to the diagnostic list as appropriate
|
||||
(e.g., `pub(crate) fn assert_false`) based on whatever inputs are required for the rule (e.g.,
|
||||
@@ -249,7 +280,7 @@ These represent, respectively: the schema used to parse the `pyproject.toml` fil
|
||||
intermediate representation; and the final, internal representation used to power Ruff.
|
||||
|
||||
To add a new configuration option, you'll likely want to modify these latter few files (along with
|
||||
`args.rs`, if appropriate). If you want to pattern-match against an existing example, grep for
|
||||
`arg.rs`, if appropriate). If you want to pattern-match against an existing example, grep for
|
||||
`dummy_variable_rgx`, which defines a regular expression to match against acceptable unused
|
||||
variables (e.g., `_`).
|
||||
|
||||
@@ -265,20 +296,26 @@ To preview any changes to the documentation locally:
|
||||
|
||||
1. Install the [Rust toolchain](https://www.rust-lang.org/tools/install).
|
||||
|
||||
1. Install MkDocs and Material for MkDocs with:
|
||||
|
||||
```shell
|
||||
pip install -r docs/requirements.txt
|
||||
```
|
||||
|
||||
1. Generate the MkDocs site with:
|
||||
|
||||
```shell
|
||||
uv run --no-project --isolated --with-requirements docs/requirements.txt scripts/generate_mkdocs.py
|
||||
python scripts/generate_mkdocs.py
|
||||
```
|
||||
|
||||
1. Run the development server with:
|
||||
|
||||
```shell
|
||||
# For contributors.
|
||||
uvx --with-requirements docs/requirements.txt -- mkdocs serve -f mkdocs.public.yml
|
||||
mkdocs serve -f mkdocs.public.yml
|
||||
|
||||
# For members of the Astral org, which has access to MkDocs Insiders via sponsorship.
|
||||
uvx --with-requirements docs/requirements-insiders.txt -- mkdocs serve -f mkdocs.insiders.yml
|
||||
mkdocs serve -f mkdocs.insiders.yml
|
||||
```
|
||||
|
||||
The documentation should then be available locally at
|
||||
@@ -296,34 +333,23 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
|
||||
### Creating a new release
|
||||
|
||||
1. Install `uv`: `curl -LsSf https://astral.sh/uv/install.sh | sh`
|
||||
|
||||
1. Run `./scripts/release.sh`; this command will:
|
||||
|
||||
1. Run `./scripts/release/bump.sh`; this command will:
|
||||
- Generate a temporary virtual environment with `rooster`
|
||||
- Generate a changelog entry in `CHANGELOG.md`
|
||||
- Update versions in `pyproject.toml` and `Cargo.toml`
|
||||
- Update references to versions in the `README.md` and documentation
|
||||
- Display contributors for the release
|
||||
|
||||
1. The changelog should then be editorialized for consistency
|
||||
|
||||
- Often labels will be missing from pull requests they will need to be manually organized into the proper section
|
||||
- Changes should be edited to be user-facing descriptions, avoiding internal details
|
||||
|
||||
1. Highlight any breaking changes in `BREAKING_CHANGES.md`
|
||||
|
||||
1. Run `cargo check`. This should update the lock file with new versions.
|
||||
|
||||
1. Create a pull request with the changelog and version updates
|
||||
|
||||
1. Merge the PR
|
||||
|
||||
1. Run the [release workflow](https://github.com/astral-sh/ruff/actions/workflows/release.yml) with:
|
||||
|
||||
1. Run the [release workflow](https://github.com/astral-sh/ruff/actions/workflows/release.yaml) with:
|
||||
- The new version number (without starting `v`)
|
||||
|
||||
- The commit hash of the merged release pull request on `main`
|
||||
1. The release workflow will do the following:
|
||||
|
||||
1. Build all the assets. If this fails (even though we tested in step 4), we haven't tagged or
|
||||
uploaded anything, you can restart after pushing a fix. If you just need to rerun the build,
|
||||
make sure you're [re-running all the failed
|
||||
@@ -334,25 +360,16 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
|
||||
1. Attach artifacts to draft GitHub release
|
||||
1. Trigger downstream repositories. This can fail non-catastrophically, as we can run any
|
||||
downstream jobs manually if needed.
|
||||
|
||||
1. Verify the GitHub release:
|
||||
|
||||
1. The Changelog should match the content of `CHANGELOG.md`
|
||||
1. Append the contributors from the `scripts/release.sh` script
|
||||
|
||||
1. Publish the GitHub release
|
||||
1. Open the draft release in the GitHub release section
|
||||
1. Copy the changelog for the release into the GitHub release
|
||||
- See previous releases for formatting of section headers
|
||||
1. Append the contributors from the `bump.sh` script
|
||||
1. If needed, [update the schemastore](https://github.com/astral-sh/ruff/blob/main/scripts/update_schemastore.py).
|
||||
|
||||
1. One can determine if an update is needed when
|
||||
`git diff old-version-tag new-version-tag -- ruff.schema.json` returns a non-empty diff.
|
||||
1. Once run successfully, you should follow the link in the output to create a PR.
|
||||
|
||||
1. If needed, update the [`ruff-lsp`](https://github.com/astral-sh/ruff-lsp) and
|
||||
[`ruff-vscode`](https://github.com/astral-sh/ruff-vscode) repositories and follow
|
||||
the release instructions in those repositories. `ruff-lsp` should always be updated
|
||||
before `ruff-vscode`.
|
||||
|
||||
This step is generally not required for a patch release, but should always be done
|
||||
for a minor release.
|
||||
1. If needed, update the `ruff-lsp` and `ruff-vscode` repositories.
|
||||
|
||||
## Ecosystem CI
|
||||
|
||||
@@ -360,8 +377,9 @@ GitHub Actions will run your changes against a number of real-world projects fro
|
||||
report on any linter or formatter differences. You can also run those checks locally via:
|
||||
|
||||
```shell
|
||||
uvx --from ./python/ruff-ecosystem ruff-ecosystem check ruff "./target/debug/ruff"
|
||||
uvx --from ./python/ruff-ecosystem ruff-ecosystem format ruff "./target/debug/ruff"
|
||||
pip install -e ./python/ruff-ecosystem
|
||||
ruff-ecosystem check ruff "./target/debug/ruff"
|
||||
ruff-ecosystem format ruff "./target/debug/ruff"
|
||||
```
|
||||
|
||||
See the [ruff-ecosystem package](https://github.com/astral-sh/ruff/tree/main/python/ruff-ecosystem) for more details.
|
||||
@@ -374,7 +392,7 @@ We have several ways of benchmarking and profiling Ruff:
|
||||
- Microbenchmarks which run the linter or the formatter on individual files. These run on pull requests.
|
||||
- Profiling the linter on either the microbenchmarks or entire projects
|
||||
|
||||
> **Note**
|
||||
> \[!NOTE\]
|
||||
> When running benchmarks, ensure that your CPU is otherwise idle (e.g., close any background
|
||||
> applications, like web browsers). You may also want to switch your CPU to a "performance"
|
||||
> mode, if it exists, especially when benchmarking short-lived processes.
|
||||
@@ -388,18 +406,12 @@ which makes it a good target for benchmarking.
|
||||
git clone --branch 3.10 https://github.com/python/cpython.git crates/ruff_linter/resources/test/cpython
|
||||
```
|
||||
|
||||
Install `hyperfine`:
|
||||
|
||||
```shell
|
||||
cargo install hyperfine
|
||||
```
|
||||
|
||||
To benchmark the release build:
|
||||
|
||||
```shell
|
||||
cargo build --release && hyperfine --warmup 10 \
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache -e" \
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ -e"
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache -e" \
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ -e"
|
||||
|
||||
Benchmark 1: ./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache
|
||||
Time (mean ± σ): 293.8 ms ± 3.2 ms [User: 2384.6 ms, System: 90.3 ms]
|
||||
@@ -418,7 +430,7 @@ To benchmark against the ecosystem's existing tools:
|
||||
|
||||
```shell
|
||||
hyperfine --ignore-failure --warmup 5 \
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache" \
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache" \
|
||||
"pyflakes crates/ruff_linter/resources/test/cpython" \
|
||||
"autoflake --recursive --expand-star-imports --remove-all-unused-imports --remove-unused-variables --remove-duplicate-keys resources/test/cpython" \
|
||||
"pycodestyle crates/ruff_linter/resources/test/cpython" \
|
||||
@@ -464,10 +476,10 @@ To benchmark a subset of rules, e.g. `LineTooLong` and `DocLineTooLong`:
|
||||
|
||||
```shell
|
||||
cargo build --release && hyperfine --warmup 10 \
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache -e --select W505,E501"
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache -e --select W505,E501"
|
||||
```
|
||||
|
||||
You can run `uv venv --project ./scripts/benchmarks`, activate the venv and then run `uv sync --project ./scripts/benchmarks` to create a working environment for the
|
||||
You can run `poetry install` from `./scripts/benchmarks` to create a working environment for the
|
||||
above. All reported benchmarks were computed using the versions specified by
|
||||
`./scripts/benchmarks/pyproject.toml` on Python 3.11.
|
||||
|
||||
@@ -521,12 +533,10 @@ You can run the benchmarks with
|
||||
cargo benchmark
|
||||
```
|
||||
|
||||
`cargo benchmark` is an alias for `cargo bench -p ruff_benchmark --bench linter --bench formatter --`
|
||||
|
||||
#### Benchmark-driven Development
|
||||
|
||||
Ruff uses [Criterion.rs](https://bheisler.github.io/criterion.rs/book/) for benchmarks. You can use
|
||||
`--save-baseline=<name>` to store an initial baseline benchmark (e.g., on `main`) and then use
|
||||
`--save-baseline=<name>` to store an initial baseline benchmark (e.g. on `main`) and then use
|
||||
`--benchmark=<name>` to compare against that benchmark. Criterion will print a message telling you
|
||||
if the benchmark improved/regressed compared to that baseline.
|
||||
|
||||
@@ -561,7 +571,7 @@ cargo install critcmp
|
||||
|
||||
#### Tips
|
||||
|
||||
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo bench -p ruff_benchmark lexer`
|
||||
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo benchmark lexer`
|
||||
to only run the lexer benchmarks.
|
||||
- Use `cargo bench -p ruff_benchmark -- --quiet` for a more cleaned up output (without statistical relevance)
|
||||
- Use `cargo bench -p ruff_benchmark -- --quick` to get faster results (more prone to noise)
|
||||
@@ -678,9 +688,9 @@ utils with it:
|
||||
23 Newline 24
|
||||
```
|
||||
|
||||
- `cargo dev print-cst <file>`: Print the CST of a Python file using
|
||||
- `cargo dev print-cst <file>`: Print the CST of a python file using
|
||||
[LibCST](https://github.com/Instagram/LibCST), which is used in addition to the RustPython parser
|
||||
in Ruff. For example, for `if True: pass # comment`, everything, including the whitespace, is represented:
|
||||
in Ruff. E.g. for `if True: pass # comment` everything including the whitespace is represented:
|
||||
|
||||
```text
|
||||
Module {
|
||||
@@ -863,7 +873,7 @@ each configuration file.
|
||||
|
||||
The package root is used to determine a file's "module path". Consider, again, `baz.py`. In that
|
||||
case, `./my_project/src/foo` was identified as the package root, so the module path for `baz.py`
|
||||
would resolve to `foo.bar.baz` — as computed by taking the relative path from the package root
|
||||
would resolve to `foo.bar.baz` — as computed by taking the relative path from the package root
|
||||
(inclusive of the root itself). The module path can be thought of as "the path you would use to
|
||||
import the module" (e.g., `import foo.bar.baz`).
|
||||
|
||||
@@ -898,11 +908,15 @@ There are three ways in which an import can be categorized as "first-party":
|
||||
package (e.g., `from foo import bar` or `import foo.bar`), they'll be classified as first-party
|
||||
automatically. This check is as simple as comparing the first segment of the current file's
|
||||
module path to the first segment of the import.
|
||||
1. **Source roots**: Ruff supports a [`src`](https://docs.astral.sh/ruff/settings/#src) setting, which
|
||||
1. **Source roots**: Ruff supports a `[src](https://docs.astral.sh/ruff/settings/#src)` setting, which
|
||||
sets the directories to scan when identifying first-party imports. The algorithm is
|
||||
straightforward: given an import, like `import foo`, iterate over the directories enumerated in
|
||||
the `src` setting and, for each directory, check for the existence of a subdirectory `foo` or a
|
||||
file `foo.py`.
|
||||
|
||||
By default, `src` is set to the project root, along with `"src"` subdirectory in the project root.
|
||||
This ensures that Ruff supports both flat and "src" layouts out of the box.
|
||||
By default, `src` is set to the project root. In the above example, we'd want to set
|
||||
`src = ["./src"]` to ensure that we locate `./my_project/src/foo` and thus categorize `import foo`
|
||||
as first-party in `baz.py`. In practice, for this limited example, setting `src = ["./src"]` is
|
||||
unnecessary, as all imports within `./my_project/src/foo` would be categorized as first-party via
|
||||
the same-package heuristic; but if your project contains multiple packages, you'll want to set `src`
|
||||
explicitly.
|
||||
|
||||
2879
Cargo.lock
generated
2879
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
197
Cargo.toml
197
Cargo.toml
@@ -4,7 +4,7 @@ resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
edition = "2021"
|
||||
rust-version = "1.84"
|
||||
rust-version = "1.75"
|
||||
homepage = "https://docs.astral.sh/ruff"
|
||||
documentation = "https://docs.astral.sh/ruff"
|
||||
repository = "https://github.com/astral-sh/ruff"
|
||||
@@ -13,12 +13,10 @@ license = "MIT"
|
||||
|
||||
[workspace.dependencies]
|
||||
ruff = { path = "crates/ruff" }
|
||||
ruff_annotate_snippets = { path = "crates/ruff_annotate_snippets" }
|
||||
ruff_cache = { path = "crates/ruff_cache" }
|
||||
ruff_db = { path = "crates/ruff_db", default-features = false }
|
||||
ruff_db = { path = "crates/ruff_db" }
|
||||
ruff_diagnostics = { path = "crates/ruff_diagnostics" }
|
||||
ruff_formatter = { path = "crates/ruff_formatter" }
|
||||
ruff_graph = { path = "crates/ruff_graph" }
|
||||
ruff_index = { path = "crates/ruff_index" }
|
||||
ruff_linter = { path = "crates/ruff_linter" }
|
||||
ruff_macros = { path = "crates/ruff_macros" }
|
||||
@@ -35,66 +33,50 @@ ruff_python_trivia = { path = "crates/ruff_python_trivia" }
|
||||
ruff_server = { path = "crates/ruff_server" }
|
||||
ruff_source_file = { path = "crates/ruff_source_file" }
|
||||
ruff_text_size = { path = "crates/ruff_text_size" }
|
||||
red_knot_vendored = { path = "crates/red_knot_vendored" }
|
||||
ruff_workspace = { path = "crates/ruff_workspace" }
|
||||
|
||||
red_knot_ide = { path = "crates/red_knot_ide" }
|
||||
red_knot_project = { path = "crates/red_knot_project", default-features = false }
|
||||
red_knot_python_semantic = { path = "crates/red_knot_python_semantic" }
|
||||
red_knot_server = { path = "crates/red_knot_server" }
|
||||
red_knot_test = { path = "crates/red_knot_test" }
|
||||
red_knot_module_resolver = { path = "crates/red_knot_module_resolver" }
|
||||
|
||||
aho-corasick = { version = "1.1.3" }
|
||||
anstream = { version = "0.6.18" }
|
||||
anstyle = { version = "1.0.10" }
|
||||
annotate-snippets = { version = "0.9.2", features = ["color"] }
|
||||
anyhow = { version = "1.0.80" }
|
||||
assert_fs = { version = "1.1.0" }
|
||||
argfile = { version = "0.2.0" }
|
||||
bincode = { version = "1.3.3" }
|
||||
bitflags = { version = "2.5.0" }
|
||||
bstr = { version = "1.9.1" }
|
||||
cachedir = { version = "0.3.1" }
|
||||
camino = { version = "1.1.7" }
|
||||
chrono = { version = "0.4.35", default-features = false, features = ["clock"] }
|
||||
clap = { version = "4.5.3", features = ["derive"] }
|
||||
clap_complete_command = { version = "0.6.0" }
|
||||
clearscreen = { version = "4.0.0" }
|
||||
clap_complete_command = { version = "0.5.1" }
|
||||
clearscreen = { version = "3.0.0" }
|
||||
codspeed-criterion-compat = { version = "2.6.0", default-features = false }
|
||||
colored = { version = "3.0.0" }
|
||||
colored = { version = "2.1.0" }
|
||||
console_error_panic_hook = { version = "0.1.7" }
|
||||
console_log = { version = "1.0.0" }
|
||||
countme = { version = "3.0.1" }
|
||||
compact_str = "0.9.0"
|
||||
criterion = { version = "0.5.1", default-features = false }
|
||||
crossbeam = { version = "0.8.4" }
|
||||
dashmap = { version = "6.0.1" }
|
||||
dir-test = { version = "0.4.0" }
|
||||
dunce = { version = "1.0.5" }
|
||||
dashmap = { version = "5.5.3" }
|
||||
dirs = { version = "5.0.0" }
|
||||
drop_bomb = { version = "0.1.5" }
|
||||
env_logger = { version = "0.11.0" }
|
||||
etcetera = { version = "0.10.0" }
|
||||
fern = { version = "0.7.0" }
|
||||
fern = { version = "0.6.1" }
|
||||
filetime = { version = "0.2.23" }
|
||||
getrandom = { version = "0.3.1" }
|
||||
glob = { version = "0.3.1" }
|
||||
globset = { version = "0.4.14" }
|
||||
globwalk = { version = "0.9.1" }
|
||||
hashbrown = { version = "0.15.0", default-features = false, features = [
|
||||
"raw-entry",
|
||||
"equivalent",
|
||||
"inline-more",
|
||||
] }
|
||||
hashbrown = "0.14.3"
|
||||
ignore = { version = "0.4.22" }
|
||||
imara-diff = { version = "0.1.5" }
|
||||
imperative = { version = "1.0.4" }
|
||||
indexmap = { version = "2.6.0" }
|
||||
indexmap = { version = "2.2.6" }
|
||||
indicatif = { version = "0.17.8" }
|
||||
indoc = { version = "2.0.4" }
|
||||
insta = { version = "1.35.1" }
|
||||
insta-cmd = { version = "0.6.0" }
|
||||
is-macro = { version = "0.3.5" }
|
||||
is-wsl = { version = "0.4.0" }
|
||||
itertools = { version = "0.14.0" }
|
||||
jiff = { version = "0.2.0" }
|
||||
itertools = { version = "0.13.0" }
|
||||
js-sys = { version = "0.3.69" }
|
||||
jod-thread = { version = "0.1.2" }
|
||||
libc = { version = "0.2.153" }
|
||||
@@ -102,29 +84,29 @@ libcst = { version = "1.1.0", default-features = false }
|
||||
log = { version = "0.4.17" }
|
||||
lsp-server = { version = "0.7.6" }
|
||||
lsp-types = { git = "https://github.com/astral-sh/lsp-types.git", rev = "3512a9f", features = [
|
||||
"proposed",
|
||||
"proposed",
|
||||
] }
|
||||
matchit = { version = "0.8.1" }
|
||||
memchr = { version = "2.7.1" }
|
||||
mimalloc = { version = "0.1.39" }
|
||||
natord = { version = "1.0.9" }
|
||||
notify = { version = "8.0.0" }
|
||||
ordermap = { version = "0.5.0" }
|
||||
notify = { version = "6.1.1" }
|
||||
once_cell = { version = "1.19.0" }
|
||||
path-absolutize = { version = "3.1.1" }
|
||||
path-slash = { version = "0.2.1" }
|
||||
pathdiff = { version = "0.2.1" }
|
||||
pep440_rs = { version = "0.7.1" }
|
||||
parking_lot = "0.12.1"
|
||||
pep440_rs = { version = "0.6.0", features = ["serde"] }
|
||||
pretty_assertions = "1.3.0"
|
||||
proc-macro2 = { version = "1.0.79" }
|
||||
pyproject-toml = { version = "0.13.4" }
|
||||
quick-junit = { version = "0.5.0" }
|
||||
pyproject-toml = { version = "0.9.0" }
|
||||
quick-junit = { version = "0.4.0" }
|
||||
quote = { version = "1.0.23" }
|
||||
rand = { version = "0.9.0" }
|
||||
rand = { version = "0.8.5" }
|
||||
rayon = { version = "1.10.0" }
|
||||
regex = { version = "1.10.2" }
|
||||
rustc-hash = { version = "2.0.0" }
|
||||
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "87bf6b6c2d5f6479741271da73bd9d30c2580c26" }
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "f706aa2d32d473ee633a77c1af01d180c85da308" }
|
||||
schemars = { version = "0.8.16" }
|
||||
seahash = { version = "4.1.0" }
|
||||
serde = { version = "1.0.197", features = ["derive"] }
|
||||
@@ -132,66 +114,48 @@ serde-wasm-bindgen = { version = "0.6.4" }
|
||||
serde_json = { version = "1.0.113" }
|
||||
serde_test = { version = "1.0.152" }
|
||||
serde_with = { version = "3.6.0", default-features = false, features = [
|
||||
"macros",
|
||||
"macros",
|
||||
] }
|
||||
shellexpand = { version = "3.0.0" }
|
||||
similar = { version = "2.4.0", features = ["inline"] }
|
||||
smallvec = { version = "1.13.2" }
|
||||
snapbox = { version = "0.6.0", features = [
|
||||
"diff",
|
||||
"term-svg",
|
||||
"cmd",
|
||||
"examples",
|
||||
] }
|
||||
smol_str = { version = "0.2.2" }
|
||||
static_assertions = "1.1.0"
|
||||
strum = { version = "0.27.0", features = ["strum_macros"] }
|
||||
strum_macros = { version = "0.27.0" }
|
||||
strum = { version = "0.26.0", features = ["strum_macros"] }
|
||||
strum_macros = { version = "0.26.0" }
|
||||
syn = { version = "2.0.55" }
|
||||
tempfile = { version = "3.9.0" }
|
||||
test-case = { version = "3.3.1" }
|
||||
thiserror = { version = "2.0.0" }
|
||||
tikv-jemallocator = { version = "0.6.0" }
|
||||
thiserror = { version = "1.0.58" }
|
||||
tikv-jemallocator = { version = "0.5.0" }
|
||||
toml = { version = "0.8.11" }
|
||||
tracing = { version = "0.1.40" }
|
||||
tracing-flame = { version = "0.2.0" }
|
||||
tracing-indicatif = { version = "0.3.6" }
|
||||
tracing-log = { version = "0.2.0" }
|
||||
tracing-subscriber = { version = "0.3.18", default-features = false, features = [
|
||||
"env-filter",
|
||||
"fmt",
|
||||
] }
|
||||
tracing-tree = { version = "0.4.0" }
|
||||
tryfn = { version = "0.2.1" }
|
||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
|
||||
tracing-tree = { version = "0.3.0" }
|
||||
typed-arena = { version = "2.0.2" }
|
||||
unic-ucd-category = { version = "0.9" }
|
||||
unicode-ident = { version = "1.0.12" }
|
||||
unicode-width = { version = "0.2.0" }
|
||||
unicode-width = { version = "0.1.11" }
|
||||
unicode_names2 = { version = "1.2.2" }
|
||||
unicode-normalization = { version = "0.1.23" }
|
||||
ureq = { version = "2.9.6" }
|
||||
url = { version = "2.5.0" }
|
||||
uuid = { version = "1.6.1", features = [
|
||||
"v4",
|
||||
"fast-rng",
|
||||
"macro-diagnostics",
|
||||
"js",
|
||||
"v4",
|
||||
"fast-rng",
|
||||
"macro-diagnostics",
|
||||
"js",
|
||||
] }
|
||||
walkdir = { version = "2.3.2" }
|
||||
wasm-bindgen = { version = "0.2.92" }
|
||||
wasm-bindgen-test = { version = "0.3.42" }
|
||||
wild = { version = "2" }
|
||||
zip = { version = "0.6.6", default-features = false }
|
||||
|
||||
[workspace.metadata.cargo-shear]
|
||||
ignored = ["getrandom"]
|
||||
|
||||
zip = { version = "0.6.6", default-features = false, features = ["zstd"] }
|
||||
|
||||
[workspace.lints.rust]
|
||||
unsafe_code = "warn"
|
||||
unreachable_pub = "warn"
|
||||
unexpected_cfgs = { level = "warn", check-cfg = [
|
||||
"cfg(fuzzing)",
|
||||
"cfg(codspeed)",
|
||||
] }
|
||||
|
||||
[workspace.lints.clippy]
|
||||
pedantic = { level = "warn", priority = -2 }
|
||||
@@ -207,10 +171,8 @@ missing_panics_doc = "allow"
|
||||
module_name_repetitions = "allow"
|
||||
must_use_candidate = "allow"
|
||||
similar_names = "allow"
|
||||
single_match_else = "allow"
|
||||
too_many_lines = "allow"
|
||||
needless_continue = "allow" # An explicit continue can be more readable, especially if the alternative is an empty block.
|
||||
# Without the hashes we run into a `rustfmt` bug in some snapshot tests, see #13250
|
||||
# To allow `#[allow(clippy::all)]` in `crates/ruff_python_parser/src/python.rs`.
|
||||
needless_raw_string_hashes = "allow"
|
||||
# Disallowed restriction lints
|
||||
print_stdout = "warn"
|
||||
@@ -223,13 +185,6 @@ get_unwrap = "warn"
|
||||
rc_buffer = "warn"
|
||||
rc_mutex = "warn"
|
||||
rest_pat_in_fully_bound_structs = "warn"
|
||||
# nursery rules
|
||||
redundant_clone = "warn"
|
||||
debug_assert_with_mut_call = "warn"
|
||||
unused_peekable = "warn"
|
||||
|
||||
# Diagnostics are not actionable: Enable once https://github.com/rust-lang/rust-clippy/issues/13774 is resolved.
|
||||
large_stack_arrays = "allow"
|
||||
|
||||
[profile.release]
|
||||
# Note that we set these explicitly, and these values
|
||||
@@ -264,77 +219,3 @@ opt-level = 1
|
||||
[profile.profiling]
|
||||
inherits = "release"
|
||||
debug = 1
|
||||
|
||||
# The profile that 'cargo dist' will build with.
|
||||
[profile.dist]
|
||||
inherits = "release"
|
||||
|
||||
# Config for 'dist'
|
||||
[workspace.metadata.dist]
|
||||
# The preferred dist version to use in CI (Cargo.toml SemVer syntax)
|
||||
cargo-dist-version = "0.28.4-prerelease.1"
|
||||
# CI backends to support
|
||||
ci = "github"
|
||||
# The installers to generate for each app
|
||||
installers = ["shell", "powershell"]
|
||||
# The archive format to use for windows builds (defaults .zip)
|
||||
windows-archive = ".zip"
|
||||
# The archive format to use for non-windows builds (defaults .tar.xz)
|
||||
unix-archive = ".tar.gz"
|
||||
# Target platforms to build apps for (Rust target-triple syntax)
|
||||
targets = [
|
||||
"aarch64-apple-darwin",
|
||||
"aarch64-pc-windows-msvc",
|
||||
"aarch64-unknown-linux-gnu",
|
||||
"aarch64-unknown-linux-musl",
|
||||
"arm-unknown-linux-musleabihf",
|
||||
"armv7-unknown-linux-gnueabihf",
|
||||
"armv7-unknown-linux-musleabihf",
|
||||
"i686-pc-windows-msvc",
|
||||
"i686-unknown-linux-gnu",
|
||||
"i686-unknown-linux-musl",
|
||||
"powerpc64-unknown-linux-gnu",
|
||||
"powerpc64le-unknown-linux-gnu",
|
||||
"s390x-unknown-linux-gnu",
|
||||
"x86_64-apple-darwin",
|
||||
"x86_64-pc-windows-msvc",
|
||||
"x86_64-unknown-linux-gnu",
|
||||
"x86_64-unknown-linux-musl",
|
||||
]
|
||||
# Whether to auto-include files like READMEs, LICENSEs, and CHANGELOGs (default true)
|
||||
auto-includes = false
|
||||
# Whether dist should create a Github Release or use an existing draft
|
||||
create-release = true
|
||||
# Which actions to run on pull requests
|
||||
pr-run-mode = "skip"
|
||||
# Whether CI should trigger releases with dispatches instead of tag pushes
|
||||
dispatch-releases = true
|
||||
# Which phase dist should use to create the GitHub release
|
||||
github-release = "announce"
|
||||
# Whether CI should include auto-generated code to build local artifacts
|
||||
build-local-artifacts = false
|
||||
# Local artifacts jobs to run in CI
|
||||
local-artifacts-jobs = ["./build-binaries", "./build-docker"]
|
||||
# Publish jobs to run in CI
|
||||
publish-jobs = ["./publish-pypi", "./publish-wasm"]
|
||||
# Post-announce jobs to run in CI
|
||||
post-announce-jobs = [
|
||||
"./notify-dependents",
|
||||
"./publish-docs",
|
||||
"./publish-playground",
|
||||
]
|
||||
# Custom permissions for GitHub Jobs
|
||||
github-custom-job-permissions = { "build-docker" = { packages = "write", contents = "read" }, "publish-wasm" = { contents = "read", id-token = "write", packages = "write" } }
|
||||
# Whether to install an updater program
|
||||
install-updater = false
|
||||
# Path that installers should place binaries in
|
||||
install-path = ["$XDG_BIN_HOME/", "$XDG_DATA_HOME/../bin", "~/.local/bin"]
|
||||
|
||||
[workspace.metadata.dist.github-custom-runners]
|
||||
global = "depot-ubuntu-latest-4"
|
||||
|
||||
[workspace.metadata.dist.github-action-commits]
|
||||
"actions/checkout" = "11bd71901bbe5b1630ceea73d27597364c9af683" # v4
|
||||
"actions/upload-artifact" = "ea165f8d65b6e75b540449e92b4886f43607fa02" # v4.6.2
|
||||
"actions/download-artifact" = "95815c38cf2ff2164869cbab79da8d1f422bc89e" # v4.2.1
|
||||
"actions/attest-build-provenance" = "c074443f1aee8d4aeeae555aebba3282517141b2" #v2.2.3
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM --platform=$BUILDPLATFORM ubuntu AS build
|
||||
FROM --platform=$BUILDPLATFORM ubuntu as build
|
||||
ENV HOME="/root"
|
||||
WORKDIR $HOME
|
||||
|
||||
|
||||
25
LICENSE
25
LICENSE
@@ -1371,28 +1371,3 @@ are:
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
"""
|
||||
|
||||
- pydoclint, licensed as follows:
|
||||
"""
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2023 jsh9
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
"""
|
||||
|
||||
73
README.md
73
README.md
@@ -29,14 +29,14 @@ An extremely fast Python linter and code formatter, written in Rust.
|
||||
- 🐍 Installable via `pip`
|
||||
- 🛠️ `pyproject.toml` support
|
||||
- 🤝 Python 3.13 compatibility
|
||||
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruffs-linter-compare-to-flake8), isort, and [Black](https://docs.astral.sh/ruff/faq/#how-does-ruffs-formatter-compare-to-black)
|
||||
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruff-compare-to-flake8), isort, and Black
|
||||
- 📦 Built-in caching, to avoid re-analyzing unchanged files
|
||||
- 🔧 Fix support, for automatic error correction (e.g., automatically remove unused imports)
|
||||
- 📏 Over [800 built-in rules](https://docs.astral.sh/ruff/rules/), with native re-implementations
|
||||
of popular Flake8 plugins, like flake8-bugbear
|
||||
- ⌨️ First-party [editor integrations](https://docs.astral.sh/ruff/integrations/) for
|
||||
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://docs.astral.sh/ruff/editors/setup)
|
||||
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://docs.astral.sh/ruff/configuration/#config-file-discovery)
|
||||
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://github.com/astral-sh/ruff-lsp)
|
||||
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://docs.astral.sh/ruff/configuration/#pyprojecttoml-discovery)
|
||||
|
||||
Ruff aims to be orders of magnitude faster than alternative tools while integrating more
|
||||
functionality behind a single, common interface.
|
||||
@@ -110,47 +110,16 @@ For more, see the [documentation](https://docs.astral.sh/ruff/).
|
||||
1. [Who's Using Ruff?](#whos-using-ruff)
|
||||
1. [License](#license)
|
||||
|
||||
## Getting Started<a id="getting-started"></a>
|
||||
## Getting Started
|
||||
|
||||
For more, see the [documentation](https://docs.astral.sh/ruff/).
|
||||
|
||||
### Installation
|
||||
|
||||
Ruff is available as [`ruff`](https://pypi.org/project/ruff/) on PyPI.
|
||||
|
||||
Invoke Ruff directly with [`uvx`](https://docs.astral.sh/uv/):
|
||||
Ruff is available as [`ruff`](https://pypi.org/project/ruff/) on PyPI:
|
||||
|
||||
```shell
|
||||
uvx ruff check # Lint all files in the current directory.
|
||||
uvx ruff format # Format all files in the current directory.
|
||||
```
|
||||
|
||||
Or install Ruff with `uv` (recommended), `pip`, or `pipx`:
|
||||
|
||||
```shell
|
||||
# With uv.
|
||||
uv tool install ruff@latest # Install Ruff globally.
|
||||
uv add --dev ruff # Or add Ruff to your project.
|
||||
|
||||
# With pip.
|
||||
pip install ruff
|
||||
|
||||
# With pipx.
|
||||
pipx install ruff
|
||||
```
|
||||
|
||||
Starting with version `0.5.0`, Ruff can be installed with our standalone installers:
|
||||
|
||||
```shell
|
||||
# On macOS and Linux.
|
||||
curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
|
||||
# On Windows.
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.11.6/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.11.6/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -183,7 +152,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.11.6
|
||||
rev: v0.4.10
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
@@ -192,10 +161,11 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
- id: ruff-format
|
||||
```
|
||||
|
||||
Ruff can also be used as a [VS Code extension](https://github.com/astral-sh/ruff-vscode) or with [various other editors](https://docs.astral.sh/ruff/editors/setup).
|
||||
Ruff can also be used as a [VS Code extension](https://github.com/astral-sh/ruff-vscode) or
|
||||
alongside any other editor through the [Ruff LSP](https://github.com/astral-sh/ruff-lsp).
|
||||
|
||||
Ruff can also be used as a [GitHub Action](https://github.com/features/actions) via
|
||||
[`ruff-action`](https://github.com/astral-sh/ruff-action):
|
||||
[`ruff-action`](https://github.com/chartboost/ruff-action):
|
||||
|
||||
```yaml
|
||||
name: Ruff
|
||||
@@ -205,10 +175,10 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: astral-sh/ruff-action@v3
|
||||
- uses: chartboost/ruff-action@v1
|
||||
```
|
||||
|
||||
### Configuration<a id="configuration"></a>
|
||||
### Configuration
|
||||
|
||||
Ruff can be configured through a `pyproject.toml`, `ruff.toml`, or `.ruff.toml` file (see:
|
||||
[_Configuration_](https://docs.astral.sh/ruff/configuration/), or [_Settings_](https://docs.astral.sh/ruff/settings/)
|
||||
@@ -251,8 +221,8 @@ exclude = [
|
||||
line-length = 88
|
||||
indent-width = 4
|
||||
|
||||
# Assume Python 3.9
|
||||
target-version = "py39"
|
||||
# Assume Python 3.8
|
||||
target-version = "py38"
|
||||
|
||||
[lint]
|
||||
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
|
||||
@@ -304,7 +274,7 @@ features that may change prior to stabilization.
|
||||
See `ruff help` for more on Ruff's top-level commands, or `ruff help check` and `ruff help format`
|
||||
for more on the linting and formatting commands, respectively.
|
||||
|
||||
## Rules<a id="rules"></a>
|
||||
## Rules
|
||||
|
||||
<!-- Begin section: Rules -->
|
||||
|
||||
@@ -364,6 +334,7 @@ quality tools, including:
|
||||
- [flake8-super](https://pypi.org/project/flake8-super/)
|
||||
- [flake8-tidy-imports](https://pypi.org/project/flake8-tidy-imports/)
|
||||
- [flake8-todos](https://pypi.org/project/flake8-todos/)
|
||||
- [flake8-trio](https://pypi.org/project/flake8-trio/)
|
||||
- [flake8-type-checking](https://pypi.org/project/flake8-type-checking/)
|
||||
- [flake8-use-pathlib](https://pypi.org/project/flake8-use-pathlib/)
|
||||
- [flynt](https://pypi.org/project/flynt/) ([#2102](https://github.com/astral-sh/ruff/issues/2102))
|
||||
@@ -380,21 +351,21 @@ quality tools, including:
|
||||
|
||||
For a complete enumeration of the supported rules, see [_Rules_](https://docs.astral.sh/ruff/rules/).
|
||||
|
||||
## Contributing<a id="contributing"></a>
|
||||
## Contributing
|
||||
|
||||
Contributions are welcome and highly appreciated. To get started, check out the
|
||||
[**contributing guidelines**](https://docs.astral.sh/ruff/contributing/).
|
||||
|
||||
You can also join us on [**Discord**](https://discord.com/invite/astral-sh).
|
||||
|
||||
## Support<a id="support"></a>
|
||||
## Support
|
||||
|
||||
Having trouble? Check out the existing issues on [**GitHub**](https://github.com/astral-sh/ruff/issues),
|
||||
or feel free to [**open a new one**](https://github.com/astral-sh/ruff/issues/new).
|
||||
|
||||
You can also ask for help on [**Discord**](https://discord.com/invite/astral-sh).
|
||||
|
||||
## Acknowledgements<a id="acknowledgements"></a>
|
||||
## Acknowledgements
|
||||
|
||||
Ruff's linter draws on both the APIs and implementation details of many other
|
||||
tools in the Python ecosystem, especially [Flake8](https://github.com/PyCQA/flake8), [Pyflakes](https://github.com/PyCQA/pyflakes),
|
||||
@@ -418,7 +389,7 @@ Ruff is the beneficiary of a large number of [contributors](https://github.com/a
|
||||
|
||||
Ruff is released under the MIT license.
|
||||
|
||||
## Who's Using Ruff?<a id="whos-using-ruff"></a>
|
||||
## Who's Using Ruff?
|
||||
|
||||
Ruff is used by a number of major open-source projects and companies, including:
|
||||
|
||||
@@ -430,14 +401,12 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [Babel](https://github.com/python-babel/babel)
|
||||
- Benchling ([Refac](https://github.com/benchling/refac))
|
||||
- [Bokeh](https://github.com/bokeh/bokeh)
|
||||
- CrowdCent ([NumerBlox](https://github.com/crowdcent/numerblox)) <!-- typos: ignore -->
|
||||
- [Cryptography (PyCA)](https://github.com/pyca/cryptography)
|
||||
- CERN ([Indico](https://getindico.io/))
|
||||
- [DVC](https://github.com/iterative/dvc)
|
||||
- [Dagger](https://github.com/dagger/dagger)
|
||||
- [Dagster](https://github.com/dagster-io/dagster)
|
||||
- Databricks ([MLflow](https://github.com/mlflow/mlflow))
|
||||
- [Dify](https://github.com/langgenius/dify)
|
||||
- [FastAPI](https://github.com/tiangolo/fastapi)
|
||||
- [Godot](https://github.com/godotengine/godot)
|
||||
- [Gradio](https://github.com/gradio-app/gradio)
|
||||
@@ -448,11 +417,9 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- Hugging Face ([Transformers](https://github.com/huggingface/transformers),
|
||||
[Datasets](https://github.com/huggingface/datasets),
|
||||
[Diffusers](https://github.com/huggingface/diffusers))
|
||||
- IBM ([Qiskit](https://github.com/Qiskit/qiskit))
|
||||
- ING Bank ([popmon](https://github.com/ing-bank/popmon), [probatus](https://github.com/ing-bank/probatus))
|
||||
- [Ibis](https://github.com/ibis-project/ibis)
|
||||
- [ivy](https://github.com/unifyai/ivy)
|
||||
- [JAX](https://github.com/jax-ml/jax)
|
||||
- [Jupyter](https://github.com/jupyter-server/jupyter_server)
|
||||
- [Kraken Tech](https://kraken.tech/)
|
||||
- [LangChain](https://github.com/hwchase17/langchain)
|
||||
@@ -539,7 +506,7 @@ If you're using Ruff, consider adding the Ruff badge to your project's `README.m
|
||||
<a href="https://github.com/astral-sh/ruff"><img src="https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json" alt="Ruff" style="max-width:100%;"></a>
|
||||
```
|
||||
|
||||
## License<a id="license"></a>
|
||||
## License
|
||||
|
||||
This repository is licensed under the [MIT License](https://github.com/astral-sh/ruff/blob/main/LICENSE)
|
||||
|
||||
|
||||
15
SECURITY.md
15
SECURITY.md
@@ -1,15 +0,0 @@
|
||||
# Security policy
|
||||
|
||||
## Reporting a vulnerability
|
||||
|
||||
If you have found a possible vulnerability, please email `security at astral dot sh`.
|
||||
|
||||
## Bug bounties
|
||||
|
||||
While we sincerely appreciate and encourage reports of suspected security problems, please note that
|
||||
Astral does not currently run any bug bounty programs.
|
||||
|
||||
## Vulnerability disclosures
|
||||
|
||||
Critical vulnerabilities will be disclosed via GitHub's
|
||||
[security advisory](https://github.com/astral-sh/ruff/security) system.
|
||||
21
_typos.toml
21
_typos.toml
@@ -1,10 +1,6 @@
|
||||
[files]
|
||||
# https://github.com/crate-ci/typos/issues/868
|
||||
extend-exclude = [
|
||||
"crates/red_knot_vendored/vendor/**/*",
|
||||
"**/resources/**/*",
|
||||
"**/snapshots/**/*",
|
||||
]
|
||||
extend-exclude = ["crates/red_knot_module_resolver/vendor/**/*", "**/resources/**/*", "**/snapshots/**/*"]
|
||||
|
||||
[default.extend-words]
|
||||
"arange" = "arange" # e.g. `numpy.arange`
|
||||
@@ -12,22 +8,13 @@ hel = "hel"
|
||||
whos = "whos"
|
||||
spawnve = "spawnve"
|
||||
ned = "ned"
|
||||
pn = "pn" # `import panel as pn` is a thing
|
||||
pn = "pn" # `import panel as pd` is a thing
|
||||
poit = "poit"
|
||||
BA = "BA" # acronym for "Bad Allowed", used in testing.
|
||||
jod = "jod" # e.g., `jod-thread`
|
||||
Numer = "Numer" # Library name 'NumerBlox' in "Who's Using Ruff?"
|
||||
|
||||
[default]
|
||||
extend-ignore-re = [
|
||||
# Line ignore with trailing "spellchecker:disable-line"
|
||||
"(?Rm)^.*#\\s*spellchecker:disable-line$",
|
||||
"LICENSEs",
|
||||
# Various third party dependencies uses `typ` as struct field names (e.g., lsp_types::LogMessageParams)
|
||||
"typ",
|
||||
# TODO: Remove this once the `TYP` redirects are removed from `rule_redirects.rs`
|
||||
"TYP",
|
||||
# Line ignore with trailing "spellchecker:disable-line"
|
||||
"(?Rm)^.*#\\s*spellchecker:disable-line$"
|
||||
]
|
||||
|
||||
[default.extend-identifiers]
|
||||
"FrIeNdLy" = "FrIeNdLy"
|
||||
|
||||
34
clippy.toml
34
clippy.toml
@@ -1,25 +1,13 @@
|
||||
doc-valid-idents = [
|
||||
"..",
|
||||
"CodeQL",
|
||||
"FastAPI",
|
||||
"IPython",
|
||||
"LangChain",
|
||||
"LibCST",
|
||||
"McCabe",
|
||||
"NumPy",
|
||||
"SCREAMING_SNAKE_CASE",
|
||||
"SQLAlchemy",
|
||||
"StackOverflow",
|
||||
"PyCharm",
|
||||
"SNMPv1",
|
||||
"SNMPv2",
|
||||
"SNMPv3",
|
||||
"PyFlakes"
|
||||
]
|
||||
|
||||
ignore-interior-mutability = [
|
||||
# Interned is read-only. The wrapped `Rc` never gets updated.
|
||||
"ruff_formatter::format_element::Interned",
|
||||
# The expression is read-only.
|
||||
"ruff_python_ast::hashable::HashableExpr",
|
||||
"..",
|
||||
"CodeQL",
|
||||
"FastAPI",
|
||||
"IPython",
|
||||
"LangChain",
|
||||
"LibCST",
|
||||
"McCabe",
|
||||
"NumPy",
|
||||
"SCREAMING_SNAKE_CASE",
|
||||
"SQLAlchemy",
|
||||
"StackOverflow",
|
||||
]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "red_knot"
|
||||
version = "0.0.0"
|
||||
version = "0.1.0"
|
||||
edition.workspace = true
|
||||
rust-version.workspace = true
|
||||
homepage.workspace = true
|
||||
@@ -12,38 +12,33 @@ license.workspace = true
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
red_knot_python_semantic = { workspace = true }
|
||||
red_knot_project = { workspace = true, features = ["zstd"] }
|
||||
red_knot_server = { workspace = true }
|
||||
ruff_db = { workspace = true, features = ["os", "cache"] }
|
||||
red_knot_module_resolver = { workspace = true }
|
||||
|
||||
ruff_python_parser = { workspace = true }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_text_size = { workspace = true }
|
||||
ruff_index = { workspace = true }
|
||||
ruff_notebook = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
argfile = { workspace = true }
|
||||
clap = { workspace = true, features = ["wrap_help"] }
|
||||
colored = { workspace = true }
|
||||
countme = { workspace = true, features = ["enable"] }
|
||||
bitflags = { workspace = true }
|
||||
crossbeam = { workspace = true }
|
||||
ctrlc = { version = "3.4.4" }
|
||||
jiff = { workspace = true }
|
||||
dashmap = { workspace = true }
|
||||
hashbrown = { workspace = true }
|
||||
indexmap = { workspace = true }
|
||||
is-macro = { workspace = true }
|
||||
notify = { workspace = true }
|
||||
parking_lot = { workspace = true }
|
||||
rayon = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
tracing = { workspace = true, features = ["release_max_level_debug"] }
|
||||
tracing-subscriber = { workspace = true, features = ["env-filter", "fmt"] }
|
||||
tracing-flame = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
smol_str = { version = "0.2.1" }
|
||||
tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true }
|
||||
tracing-tree = { workspace = true }
|
||||
wild = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
ruff_db = { workspace = true, features = ["testing"] }
|
||||
ruff_python_trivia = { workspace = true }
|
||||
|
||||
insta = { workspace = true, features = ["filters"] }
|
||||
insta-cmd = { workspace = true }
|
||||
filetime = { workspace = true }
|
||||
regex = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
# Red Knot
|
||||
|
||||
Red Knot is an extremely fast type checker.
|
||||
Currently, it is a work-in-progress and not ready for user testing.
|
||||
|
||||
Red Knot is designed to prioritize good type inference, even in unannotated code,
|
||||
and aims to avoid false positives.
|
||||
|
||||
While Red Knot will produce similar results to mypy and pyright on many codebases,
|
||||
100% compatibility with these tools is a non-goal.
|
||||
On some codebases, Red Knot's design decisions lead to different outcomes
|
||||
than you would get from running one of these more established tools.
|
||||
|
||||
## Contributing
|
||||
|
||||
Core type checking tests are written as Markdown code blocks.
|
||||
They can be found in [`red_knot_python_semantic/resources/mdtest`][resources-mdtest].
|
||||
See [`red_knot_test/README.md`][mdtest-readme] for more information
|
||||
on the test framework itself.
|
||||
|
||||
The list of open issues can be found [here][open-issues].
|
||||
|
||||
[mdtest-readme]: ../red_knot_test/README.md
|
||||
[open-issues]: https://github.com/astral-sh/ruff/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20label%3Ared-knot
|
||||
[resources-mdtest]: ../red_knot_python_semantic/resources/mdtest
|
||||
@@ -1,104 +0,0 @@
|
||||
use std::{
|
||||
fs,
|
||||
path::{Path, PathBuf},
|
||||
process::Command,
|
||||
};
|
||||
|
||||
fn main() {
|
||||
// The workspace root directory is not available without walking up the tree
|
||||
// https://github.com/rust-lang/cargo/issues/3946
|
||||
let workspace_root = Path::new(&std::env::var("CARGO_MANIFEST_DIR").unwrap())
|
||||
.join("..")
|
||||
.join("..");
|
||||
|
||||
commit_info(&workspace_root);
|
||||
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
let target = std::env::var("TARGET").unwrap();
|
||||
println!("cargo::rustc-env=RUST_HOST_TARGET={target}");
|
||||
}
|
||||
|
||||
fn commit_info(workspace_root: &Path) {
|
||||
// If not in a git repository, do not attempt to retrieve commit information
|
||||
let git_dir = workspace_root.join(".git");
|
||||
if !git_dir.exists() {
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(git_head_path) = git_head(&git_dir) {
|
||||
println!("cargo:rerun-if-changed={}", git_head_path.display());
|
||||
|
||||
let git_head_contents = fs::read_to_string(git_head_path);
|
||||
if let Ok(git_head_contents) = git_head_contents {
|
||||
// The contents are either a commit or a reference in the following formats
|
||||
// - "<commit>" when the head is detached
|
||||
// - "ref <ref>" when working on a branch
|
||||
// If a commit, checking if the HEAD file has changed is sufficient
|
||||
// If a ref, we need to add the head file for that ref to rebuild on commit
|
||||
let mut git_ref_parts = git_head_contents.split_whitespace();
|
||||
git_ref_parts.next();
|
||||
if let Some(git_ref) = git_ref_parts.next() {
|
||||
let git_ref_path = git_dir.join(git_ref);
|
||||
println!("cargo:rerun-if-changed={}", git_ref_path.display());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let output = match Command::new("git")
|
||||
.arg("log")
|
||||
.arg("-1")
|
||||
.arg("--date=short")
|
||||
.arg("--abbrev=9")
|
||||
.arg("--format=%H %h %cd %(describe)")
|
||||
.output()
|
||||
{
|
||||
Ok(output) if output.status.success() => output,
|
||||
_ => return,
|
||||
};
|
||||
let stdout = String::from_utf8(output.stdout).unwrap();
|
||||
let mut parts = stdout.split_whitespace();
|
||||
let mut next = || parts.next().unwrap();
|
||||
let _commit_hash = next();
|
||||
println!("cargo::rustc-env=RED_KNOT_COMMIT_SHORT_HASH={}", next());
|
||||
println!("cargo::rustc-env=RED_KNOT_COMMIT_DATE={}", next());
|
||||
|
||||
// Describe can fail for some commits
|
||||
// https://git-scm.com/docs/pretty-formats#Documentation/pretty-formats.txt-emdescribeoptionsem
|
||||
if let Some(describe) = parts.next() {
|
||||
let mut describe_parts = describe.split('-');
|
||||
let _last_tag = describe_parts.next().unwrap();
|
||||
|
||||
// If this is the tagged commit, this component will be missing
|
||||
println!(
|
||||
"cargo::rustc-env=RED_KNOT_LAST_TAG_DISTANCE={}",
|
||||
describe_parts.next().unwrap_or("0")
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn git_head(git_dir: &Path) -> Option<PathBuf> {
|
||||
// The typical case is a standard git repository.
|
||||
let git_head_path = git_dir.join("HEAD");
|
||||
if git_head_path.exists() {
|
||||
return Some(git_head_path);
|
||||
}
|
||||
if !git_dir.is_file() {
|
||||
return None;
|
||||
}
|
||||
// If `.git/HEAD` doesn't exist and `.git` is actually a file,
|
||||
// then let's try to attempt to read it as a worktree. If it's
|
||||
// a worktree, then its contents will look like this, e.g.:
|
||||
//
|
||||
// gitdir: /home/andrew/astral/uv/main/.git/worktrees/pr2
|
||||
//
|
||||
// And the HEAD file we want to watch will be at:
|
||||
//
|
||||
// /home/andrew/astral/uv/main/.git/worktrees/pr2/HEAD
|
||||
let contents = fs::read_to_string(git_dir).ok()?;
|
||||
let (label, worktree_path) = contents.split_once(':')?;
|
||||
if label != "gitdir" {
|
||||
return None;
|
||||
}
|
||||
let worktree_path = worktree_path.trim();
|
||||
Some(PathBuf::from(worktree_path))
|
||||
}
|
||||
@@ -1,61 +0,0 @@
|
||||
# Running `mypy_primer`
|
||||
|
||||
## Basics
|
||||
|
||||
For now, we use our own [fork of mypy primer]. It can be run using `uvx --from "…" mypy_primer`. For example, to see the help message, run:
|
||||
|
||||
```sh
|
||||
uvx --from "git+https://github.com/astral-sh/mypy_primer.git@add-red-knot-support" mypy_primer -h
|
||||
```
|
||||
|
||||
Alternatively, you can install the forked version of `mypy_primer` using:
|
||||
|
||||
```sh
|
||||
uv tool install "git+https://github.com/astral-sh/mypy_primer.git@add-red-knot-support"
|
||||
```
|
||||
|
||||
and then run it using `uvx mypy_primer` or just `mypy_primer`, if your `PATH` is set up accordingly (see: [Tool executables]).
|
||||
|
||||
## Showing the diagnostics diff between two Git revisions
|
||||
|
||||
To show the diagnostics diff between two Git revisions (e.g. your feature branch and `main`), run:
|
||||
|
||||
```sh
|
||||
mypy_primer \
|
||||
--type-checker knot \
|
||||
--old origin/main \
|
||||
--new my/feature \
|
||||
--debug \
|
||||
--output concise \
|
||||
--project-selector '/black$'
|
||||
```
|
||||
|
||||
This will show the diagnostics diff for the `black` project between the `main` branch and your `my/feature` branch. To run the
|
||||
diff for all projects we currently enable in CI, use `--project-selector "/($(paste -s -d'|' crates/red_knot_python_semantic/resources/primer/good.txt))\$"`.
|
||||
|
||||
You can also take a look at the [full list of ecosystem projects]. Note that some of them might still need a `knot_paths` configuration
|
||||
option to work correctly.
|
||||
|
||||
## Avoiding recompilation
|
||||
|
||||
If you want to run `mypy_primer` repeatedly, e.g. for different projects, but for the same combination of `--old` and `--new`, you
|
||||
can use set the `MYPY_PRIMER_NO_REBUILD` environment variable to avoid recompilation of Red Knot:
|
||||
|
||||
```sh
|
||||
MYPY_PRIMER_NO_REBUILD=1 mypy_primer …
|
||||
```
|
||||
|
||||
## Running from a local copy of the repository
|
||||
|
||||
If you are working on a local branch, you can use `mypy_primer`'s `--repo` option to specify the path to your local copy of the `ruff` repository.
|
||||
This allows `mypy_primer` to check out local branches:
|
||||
|
||||
```sh
|
||||
mypy_primer --repo /path/to/ruff --old origin/main --new my/local-branch …
|
||||
```
|
||||
|
||||
Note that you might need to clean up `/tmp/mypy_primer` in order for this to work correctly.
|
||||
|
||||
[fork of mypy primer]: https://github.com/astral-sh/mypy_primer/tree/add-red-knot-support
|
||||
[full list of ecosystem projects]: https://github.com/astral-sh/mypy_primer/blob/add-red-knot-support/mypy_primer/projects.py
|
||||
[tool executables]: https://docs.astral.sh/uv/concepts/tools/#tool-executables
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 40 KiB |
@@ -1,128 +0,0 @@
|
||||
# Tracing
|
||||
|
||||
Traces are a useful tool to narrow down the location of a bug or, at least, to understand why the compiler is doing a particular thing.
|
||||
Note, tracing messages with severity `debug` or greater are user-facing. They should be phrased accordingly.
|
||||
Tracing spans are only shown when using `-vvv`.
|
||||
|
||||
## Verbosity levels
|
||||
|
||||
The CLI supports different verbosity levels.
|
||||
|
||||
- default: Only show errors and warnings.
|
||||
- `-v` activates `info!`: Show generally useful information such as paths of configuration files, detected platform, etc., but it's not a lot of messages, it's something you'll activate in CI by default. cargo build e.g. shows you which packages are fresh.
|
||||
- `-vv` activates `debug!` and timestamps: This should be enough information to get to the bottom of bug reports. When you're processing many packages or files, you'll get pages and pages of output, but each line is link to a specific action or state change.
|
||||
- `-vvv` activates `trace!` (only in debug builds) and shows tracing-spans: At this level, you're logging everything. Most of this is wasted, it's really slow, we dump e.g. the entire resolution graph. Only useful to developers, and you almost certainly want to use `RED_KNOT_LOG` to filter it down to the area your investigating.
|
||||
|
||||
## Better logging with `RED_KNOT_LOG` and `RAYON_NUM_THREADS`
|
||||
|
||||
By default, the CLI shows messages from the `ruff` and `red_knot` crates. Tracing messages from other crates are not shown.
|
||||
The `RED_KNOT_LOG` environment variable allows you to customize which messages are shown by specifying one
|
||||
or more [filter directives](https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives).
|
||||
|
||||
The `RAYON_NUM_THREADS` environment variable, meanwhile, can be used to control the level of concurrency red-knot uses.
|
||||
By default, red-knot will attempt to parallelize its work so that multiple files are checked simultaneously,
|
||||
but this can result in a confused logging output where messages from different threads are intertwined.
|
||||
To switch off concurrency entirely and have more readable logs, use `RAYON_NUM_THREADS=1`.
|
||||
|
||||
### Examples
|
||||
|
||||
#### Show all debug messages
|
||||
|
||||
Shows debug messages from all crates.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG=debug
|
||||
```
|
||||
|
||||
#### Show salsa query execution messages
|
||||
|
||||
Show the salsa `execute: my_query` messages in addition to all red knot messages.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG=ruff=trace,red_knot=trace,salsa=info
|
||||
```
|
||||
|
||||
#### Show typing traces
|
||||
|
||||
Only show traces for the `red_knot_python_semantic::types` module.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG="red_knot_python_semantic::types"
|
||||
```
|
||||
|
||||
Note: Ensure that you use `-vvv` to see tracing spans.
|
||||
|
||||
#### Show messages for a single file
|
||||
|
||||
Shows all messages that are inside of a span for a specific file.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG=red_knot[{file=/home/micha/astral/test/x.py}]=trace
|
||||
```
|
||||
|
||||
**Note**: Tracing still shows all spans because tracing can't know at the time of entering the span
|
||||
whether one if its children has the file `x.py`.
|
||||
|
||||
**Note**: Salsa currently logs the entire memoized values. In our case, the source text and parsed AST.
|
||||
This very quickly leads to extremely long outputs.
|
||||
|
||||
## Tracing and Salsa
|
||||
|
||||
Be mindful about using `tracing` in Salsa queries, especially when using `warn` or `error` because it isn't guaranteed
|
||||
that the query will execute after restoring from a persistent cache. In which case the user won't see the message.
|
||||
|
||||
For example, don't use `tracing` to show the user a message when generating a lint violation failed
|
||||
because the message would only be shown when linting the file the first time, but not on subsequent analysis
|
||||
runs or when restoring from a persistent cache. This can be confusing for users because they
|
||||
don't understand why a specific lint violation isn't raised. Instead, change your
|
||||
query to return the failure as part of the query's result or use a Salsa accumulator.
|
||||
|
||||
## Tracing in tests
|
||||
|
||||
You can use `ruff_db::testing::setup_logging` or `ruff_db::testing::setup_logging_with_filter` to set up logging in tests.
|
||||
|
||||
```rust
|
||||
use ruff_db::testing::setup_logging;
|
||||
|
||||
#[test]
|
||||
fn test() {
|
||||
let _logging = setup_logging();
|
||||
|
||||
tracing::info!("This message will be printed to stderr");
|
||||
}
|
||||
```
|
||||
|
||||
Note: Most test runners capture stderr and only show its output when a test fails.
|
||||
|
||||
Note also that `setup_logging` only sets up logging for the current thread because [`set_global_default`](https://docs.rs/tracing/latest/tracing/subscriber/fn.set_global_default.html) can only be
|
||||
called **once**.
|
||||
|
||||
## Release builds
|
||||
|
||||
`trace!` events are removed in release builds.
|
||||
|
||||
## Profiling
|
||||
|
||||
Red Knot generates a folded stack trace to the current directory named `tracing.folded` when setting the environment variable `RED_KNOT_LOG_PROFILE` to `1` or `true`.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG_PROFILE=1 red_knot -- --current-directory=../test -vvv
|
||||
```
|
||||
|
||||
You can convert the textual representation into a visual one using `inferno`.
|
||||
|
||||
```shell
|
||||
cargo install inferno
|
||||
```
|
||||
|
||||
```shell
|
||||
# flamegraph
|
||||
cat tracing.folded | inferno-flamegraph > tracing-flamegraph.svg
|
||||
|
||||
# flamechart
|
||||
cat tracing.folded | inferno-flamegraph --flamechart > tracing-flamechart.svg
|
||||
```
|
||||
|
||||

|
||||
|
||||
See [`tracing-flame`](https://crates.io/crates/tracing-flame) for more details.
|
||||
@@ -1,280 +0,0 @@
|
||||
use crate::logging::Verbosity;
|
||||
use crate::python_version::PythonVersion;
|
||||
use clap::{ArgAction, ArgMatches, Error, Parser};
|
||||
use red_knot_project::metadata::options::{EnvironmentOptions, Options, TerminalOptions};
|
||||
use red_knot_project::metadata::value::{RangedValue, RelativePathBuf};
|
||||
use red_knot_python_semantic::lint;
|
||||
use ruff_db::system::SystemPathBuf;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(
|
||||
author,
|
||||
name = "red-knot",
|
||||
about = "An extremely fast Python type checker."
|
||||
)]
|
||||
#[command(version)]
|
||||
pub(crate) struct Args {
|
||||
#[command(subcommand)]
|
||||
pub(crate) command: Command,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Subcommand)]
|
||||
pub(crate) enum Command {
|
||||
/// Check a project for type errors.
|
||||
Check(CheckCommand),
|
||||
|
||||
/// Start the language server
|
||||
Server,
|
||||
|
||||
/// Display Red Knot's version
|
||||
Version,
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
pub(crate) struct CheckCommand {
|
||||
/// List of files or directories to check.
|
||||
#[clap(
|
||||
help = "List of files or directories to check [default: the project root]",
|
||||
value_name = "PATH"
|
||||
)]
|
||||
pub paths: Vec<SystemPathBuf>,
|
||||
|
||||
/// Run the command within the given project directory.
|
||||
///
|
||||
/// All `pyproject.toml` files will be discovered by walking up the directory tree from the given project directory,
|
||||
/// as will the project's virtual environment (`.venv`) unless the `venv-path` option is set.
|
||||
///
|
||||
/// Other command-line arguments (such as relative paths) will be resolved relative to the current working directory.
|
||||
#[arg(long, value_name = "PROJECT")]
|
||||
pub(crate) project: Option<SystemPathBuf>,
|
||||
|
||||
/// Path to the Python installation from which Red Knot resolves type information and third-party dependencies.
|
||||
///
|
||||
/// If not specified, Red Knot will look at the `VIRTUAL_ENV` environment variable.
|
||||
///
|
||||
/// Red Knot will search in the path's `site-packages` directories for type information and
|
||||
/// third-party imports.
|
||||
///
|
||||
/// This option is commonly used to specify the path to a virtual environment.
|
||||
#[arg(long, value_name = "PATH")]
|
||||
pub(crate) python: Option<SystemPathBuf>,
|
||||
|
||||
/// Custom directory to use for stdlib typeshed stubs.
|
||||
#[arg(long, value_name = "PATH", alias = "custom-typeshed-dir")]
|
||||
pub(crate) typeshed: Option<SystemPathBuf>,
|
||||
|
||||
/// Additional path to use as a module-resolution source (can be passed multiple times).
|
||||
#[arg(long, value_name = "PATH")]
|
||||
pub(crate) extra_search_path: Option<Vec<SystemPathBuf>>,
|
||||
|
||||
/// Python version to assume when resolving types.
|
||||
#[arg(long, value_name = "VERSION", alias = "target-version")]
|
||||
pub(crate) python_version: Option<PythonVersion>,
|
||||
|
||||
/// Target platform to assume when resolving types.
|
||||
///
|
||||
/// This is used to specialize the type of `sys.platform` and will affect the visibility
|
||||
/// of platform-specific functions and attributes. If the value is set to `all`, no
|
||||
/// assumptions are made about the target platform. If unspecified, the current system's
|
||||
/// platform will be used.
|
||||
#[arg(long, value_name = "PLATFORM", alias = "platform")]
|
||||
pub(crate) python_platform: Option<String>,
|
||||
|
||||
#[clap(flatten)]
|
||||
pub(crate) verbosity: Verbosity,
|
||||
|
||||
#[clap(flatten)]
|
||||
pub(crate) rules: RulesArg,
|
||||
|
||||
/// The format to use for printing diagnostic messages.
|
||||
#[arg(long)]
|
||||
pub(crate) output_format: Option<OutputFormat>,
|
||||
|
||||
/// Control when colored output is used.
|
||||
#[arg(long, value_name = "WHEN")]
|
||||
pub(crate) color: Option<TerminalColor>,
|
||||
|
||||
/// Use exit code 1 if there are any warning-level diagnostics.
|
||||
#[arg(long, conflicts_with = "exit_zero", default_missing_value = "true", num_args=0..1)]
|
||||
pub(crate) error_on_warning: Option<bool>,
|
||||
|
||||
/// Always use exit code 0, even when there are error-level diagnostics.
|
||||
#[arg(long)]
|
||||
pub(crate) exit_zero: bool,
|
||||
|
||||
/// Watch files for changes and recheck files related to the changed files.
|
||||
#[arg(long, short = 'W')]
|
||||
pub(crate) watch: bool,
|
||||
}
|
||||
|
||||
impl CheckCommand {
|
||||
pub(crate) fn into_options(self) -> Options {
|
||||
let rules = if self.rules.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(
|
||||
self.rules
|
||||
.into_iter()
|
||||
.map(|(rule, level)| (RangedValue::cli(rule), RangedValue::cli(level)))
|
||||
.collect(),
|
||||
)
|
||||
};
|
||||
|
||||
Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
python_version: self
|
||||
.python_version
|
||||
.map(|version| RangedValue::cli(version.into())),
|
||||
python_platform: self
|
||||
.python_platform
|
||||
.map(|platform| RangedValue::cli(platform.into())),
|
||||
python: self.python.map(RelativePathBuf::cli),
|
||||
typeshed: self.typeshed.map(RelativePathBuf::cli),
|
||||
extra_paths: self.extra_search_path.map(|extra_search_paths| {
|
||||
extra_search_paths
|
||||
.into_iter()
|
||||
.map(RelativePathBuf::cli)
|
||||
.collect()
|
||||
}),
|
||||
}),
|
||||
terminal: Some(TerminalOptions {
|
||||
output_format: self
|
||||
.output_format
|
||||
.map(|output_format| RangedValue::cli(output_format.into())),
|
||||
error_on_warning: self.error_on_warning,
|
||||
}),
|
||||
rules,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A list of rules to enable or disable with a given severity.
|
||||
///
|
||||
/// This type is used to parse the `--error`, `--warn`, and `--ignore` arguments
|
||||
/// while preserving the order in which they were specified (arguments last override previous severities).
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct RulesArg(Vec<(String, lint::Level)>);
|
||||
|
||||
impl RulesArg {
|
||||
fn is_empty(&self) -> bool {
|
||||
self.0.is_empty()
|
||||
}
|
||||
|
||||
fn into_iter(self) -> impl Iterator<Item = (String, lint::Level)> {
|
||||
self.0.into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl clap::FromArgMatches for RulesArg {
|
||||
fn from_arg_matches(matches: &ArgMatches) -> Result<Self, Error> {
|
||||
let mut rules = Vec::new();
|
||||
|
||||
for (level, arg_id) in [
|
||||
(lint::Level::Ignore, "ignore"),
|
||||
(lint::Level::Warn, "warn"),
|
||||
(lint::Level::Error, "error"),
|
||||
] {
|
||||
let indices = matches.indices_of(arg_id).into_iter().flatten();
|
||||
let levels = matches.get_many::<String>(arg_id).into_iter().flatten();
|
||||
rules.extend(
|
||||
indices
|
||||
.zip(levels)
|
||||
.map(|(index, rule)| (index, rule, level)),
|
||||
);
|
||||
}
|
||||
|
||||
// Sort by their index so that values specified later override earlier ones.
|
||||
rules.sort_by_key(|(index, _, _)| *index);
|
||||
|
||||
Ok(Self(
|
||||
rules
|
||||
.into_iter()
|
||||
.map(|(_, rule, level)| (rule.to_owned(), level))
|
||||
.collect(),
|
||||
))
|
||||
}
|
||||
|
||||
fn update_from_arg_matches(&mut self, matches: &ArgMatches) -> Result<(), Error> {
|
||||
self.0 = Self::from_arg_matches(matches)?.0;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl clap::Args for RulesArg {
|
||||
fn augment_args(cmd: clap::Command) -> clap::Command {
|
||||
const HELP_HEADING: &str = "Enabling / disabling rules";
|
||||
|
||||
cmd.arg(
|
||||
clap::Arg::new("error")
|
||||
.long("error")
|
||||
.action(ArgAction::Append)
|
||||
.help("Treat the given rule as having severity 'error'. Can be specified multiple times.")
|
||||
.value_name("RULE")
|
||||
.help_heading(HELP_HEADING),
|
||||
)
|
||||
.arg(
|
||||
clap::Arg::new("warn")
|
||||
.long("warn")
|
||||
.action(ArgAction::Append)
|
||||
.help("Treat the given rule as having severity 'warn'. Can be specified multiple times.")
|
||||
.value_name("RULE")
|
||||
.help_heading(HELP_HEADING),
|
||||
)
|
||||
.arg(
|
||||
clap::Arg::new("ignore")
|
||||
.long("ignore")
|
||||
.action(ArgAction::Append)
|
||||
.help("Disables the rule. Can be specified multiple times.")
|
||||
.value_name("RULE")
|
||||
.help_heading(HELP_HEADING),
|
||||
)
|
||||
}
|
||||
|
||||
fn augment_args_for_update(cmd: clap::Command) -> clap::Command {
|
||||
Self::augment_args(cmd)
|
||||
}
|
||||
}
|
||||
|
||||
/// The diagnostic output format.
|
||||
#[derive(Copy, Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord, Default, clap::ValueEnum)]
|
||||
pub enum OutputFormat {
|
||||
/// Print diagnostics verbosely, with context and helpful hints.
|
||||
///
|
||||
/// Diagnostic messages may include additional context and
|
||||
/// annotations on the input to help understand the message.
|
||||
#[default]
|
||||
#[value(name = "full")]
|
||||
Full,
|
||||
/// Print diagnostics concisely, one per line.
|
||||
///
|
||||
/// This will guarantee that each diagnostic is printed on
|
||||
/// a single line. Only the most important or primary aspects
|
||||
/// of the diagnostic are included. Contextual information is
|
||||
/// dropped.
|
||||
#[value(name = "concise")]
|
||||
Concise,
|
||||
}
|
||||
|
||||
impl From<OutputFormat> for ruff_db::diagnostic::DiagnosticFormat {
|
||||
fn from(format: OutputFormat) -> ruff_db::diagnostic::DiagnosticFormat {
|
||||
match format {
|
||||
OutputFormat::Full => Self::Full,
|
||||
OutputFormat::Concise => Self::Concise,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Control when colored output is used.
|
||||
#[derive(Copy, Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord, Default, clap::ValueEnum)]
|
||||
pub(crate) enum TerminalColor {
|
||||
/// Display colors if the output goes to an interactive terminal.
|
||||
#[default]
|
||||
Auto,
|
||||
|
||||
/// Always display colors.
|
||||
Always,
|
||||
|
||||
/// Never display colors.
|
||||
Never,
|
||||
}
|
||||
418
crates/red_knot/src/ast_ids.rs
Normal file
418
crates/red_knot/src/ast_ids.rs
Normal file
@@ -0,0 +1,418 @@
|
||||
use std::any::type_name;
|
||||
use std::fmt::{Debug, Formatter};
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::marker::PhantomData;
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_index::{Idx, IndexVec};
|
||||
use ruff_python_ast::visitor::source_order;
|
||||
use ruff_python_ast::visitor::source_order::{SourceOrderVisitor, TraversalSignal};
|
||||
use ruff_python_ast::{
|
||||
AnyNodeRef, AstNode, ExceptHandler, ExceptHandlerExceptHandler, Expr, MatchCase, ModModule,
|
||||
NodeKind, Parameter, Stmt, StmtAnnAssign, StmtAssign, StmtAugAssign, StmtClassDef,
|
||||
StmtFunctionDef, StmtGlobal, StmtImport, StmtImportFrom, StmtNonlocal, StmtTypeAlias,
|
||||
TypeParam, TypeParamParamSpec, TypeParamTypeVar, TypeParamTypeVarTuple, WithItem,
|
||||
};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
/// A type agnostic ID that uniquely identifies an AST node in a file.
|
||||
#[ruff_index::newtype_index]
|
||||
pub struct AstId;
|
||||
|
||||
/// A typed ID that uniquely identifies an AST node in a file.
|
||||
///
|
||||
/// This is different from [`AstId`] in that it is a combination of ID and the type of the node the ID identifies.
|
||||
/// Typing the ID prevents mixing IDs of different node types and allows to restrict the API to only accept
|
||||
/// nodes for which an ID has been created (not all AST nodes get an ID).
|
||||
pub struct TypedAstId<N: HasAstId> {
|
||||
erased: AstId,
|
||||
_marker: PhantomData<fn() -> N>,
|
||||
}
|
||||
|
||||
impl<N: HasAstId> TypedAstId<N> {
|
||||
/// Upcasts this ID from a more specific node type to a more general node type.
|
||||
pub fn upcast<M: HasAstId>(self) -> TypedAstId<M>
|
||||
where
|
||||
N: Into<M>,
|
||||
{
|
||||
TypedAstId {
|
||||
erased: self.erased,
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> Copy for TypedAstId<N> {}
|
||||
impl<N: HasAstId> Clone for TypedAstId<N> {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> PartialEq for TypedAstId<N> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.erased == other.erased
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> Eq for TypedAstId<N> {}
|
||||
impl<N: HasAstId> Hash for TypedAstId<N> {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.erased.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> Debug for TypedAstId<N> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_tuple("TypedAstId")
|
||||
.field(&self.erased)
|
||||
.field(&type_name::<N>())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct AstIds {
|
||||
ids: IndexVec<AstId, NodeKey>,
|
||||
reverse: FxHashMap<NodeKey, AstId>,
|
||||
}
|
||||
|
||||
impl AstIds {
|
||||
// TODO rust analyzer doesn't allocate an ID for every node. It only allocates ids for
|
||||
// nodes with a corresponding HIR element, that is nodes that are definitions.
|
||||
pub fn from_module(module: &ModModule) -> Self {
|
||||
let mut visitor = AstIdsVisitor::default();
|
||||
|
||||
// TODO: visit_module?
|
||||
// Make sure we visit the root
|
||||
visitor.create_id(module);
|
||||
visitor.visit_body(&module.body);
|
||||
|
||||
while let Some(deferred) = visitor.deferred.pop() {
|
||||
match deferred {
|
||||
DeferredNode::FunctionDefinition(def) => {
|
||||
def.visit_source_order(&mut visitor);
|
||||
}
|
||||
DeferredNode::ClassDefinition(def) => def.visit_source_order(&mut visitor),
|
||||
}
|
||||
}
|
||||
|
||||
AstIds {
|
||||
ids: visitor.ids,
|
||||
reverse: visitor.reverse,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the ID to the root node.
|
||||
pub fn root(&self) -> NodeKey {
|
||||
self.ids[AstId::new(0)]
|
||||
}
|
||||
|
||||
/// Returns the [`TypedAstId`] for a node.
|
||||
pub fn ast_id<N: HasAstId>(&self, node: &N) -> TypedAstId<N> {
|
||||
let key = node.syntax_node_key();
|
||||
TypedAstId {
|
||||
erased: self.reverse.get(&key).copied().unwrap(),
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the [`TypedAstId`] for the node identified with the given [`TypedNodeKey`].
|
||||
pub fn ast_id_for_key<N: HasAstId>(&self, node: &TypedNodeKey<N>) -> TypedAstId<N> {
|
||||
let ast_id = self.ast_id_for_node_key(node.inner);
|
||||
|
||||
TypedAstId {
|
||||
erased: ast_id,
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the untyped [`AstId`] for the node identified by the given `node` key.
|
||||
pub fn ast_id_for_node_key(&self, node: NodeKey) -> AstId {
|
||||
self.reverse
|
||||
.get(&node)
|
||||
.copied()
|
||||
.expect("Can't find node in AstIds map.")
|
||||
}
|
||||
|
||||
/// Returns the [`TypedNodeKey`] for the node identified by the given [`TypedAstId`].
|
||||
pub fn key<N: HasAstId>(&self, id: TypedAstId<N>) -> TypedNodeKey<N> {
|
||||
let syntax_key = self.ids[id.erased];
|
||||
|
||||
TypedNodeKey::new(syntax_key).unwrap()
|
||||
}
|
||||
|
||||
pub fn node_key<H: HasAstId>(&self, id: TypedAstId<H>) -> NodeKey {
|
||||
self.ids[id.erased]
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for AstIds {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
let mut map = f.debug_map();
|
||||
for (key, value) in self.ids.iter_enumerated() {
|
||||
map.entry(&key, &value);
|
||||
}
|
||||
|
||||
map.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for AstIds {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.ids == other.ids
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for AstIds {}
|
||||
|
||||
#[derive(Default)]
|
||||
struct AstIdsVisitor<'a> {
|
||||
ids: IndexVec<AstId, NodeKey>,
|
||||
reverse: FxHashMap<NodeKey, AstId>,
|
||||
deferred: Vec<DeferredNode<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> AstIdsVisitor<'a> {
|
||||
fn create_id<A: HasAstId>(&mut self, node: &A) {
|
||||
let node_key = node.syntax_node_key();
|
||||
|
||||
let id = self.ids.push(node_key);
|
||||
self.reverse.insert(node_key, id);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> SourceOrderVisitor<'a> for AstIdsVisitor<'a> {
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt) {
|
||||
match stmt {
|
||||
Stmt::FunctionDef(def) => {
|
||||
self.create_id(def);
|
||||
self.deferred.push(DeferredNode::FunctionDefinition(def));
|
||||
return;
|
||||
}
|
||||
// TODO defer visiting the assignment body, type alias parameters etc?
|
||||
Stmt::ClassDef(def) => {
|
||||
self.create_id(def);
|
||||
self.deferred.push(DeferredNode::ClassDefinition(def));
|
||||
return;
|
||||
}
|
||||
Stmt::Expr(_) => {
|
||||
// Skip
|
||||
return;
|
||||
}
|
||||
Stmt::Return(_) => {}
|
||||
Stmt::Delete(_) => {}
|
||||
Stmt::Assign(assignment) => self.create_id(assignment),
|
||||
Stmt::AugAssign(assignment) => {
|
||||
self.create_id(assignment);
|
||||
}
|
||||
Stmt::AnnAssign(assignment) => self.create_id(assignment),
|
||||
Stmt::TypeAlias(assignment) => self.create_id(assignment),
|
||||
Stmt::For(_) => {}
|
||||
Stmt::While(_) => {}
|
||||
Stmt::If(_) => {}
|
||||
Stmt::With(_) => {}
|
||||
Stmt::Match(_) => {}
|
||||
Stmt::Raise(_) => {}
|
||||
Stmt::Try(_) => {}
|
||||
Stmt::Assert(_) => {}
|
||||
Stmt::Import(import) => self.create_id(import),
|
||||
Stmt::ImportFrom(import_from) => self.create_id(import_from),
|
||||
Stmt::Global(global) => self.create_id(global),
|
||||
Stmt::Nonlocal(non_local) => self.create_id(non_local),
|
||||
Stmt::Pass(_) => {}
|
||||
Stmt::Break(_) => {}
|
||||
Stmt::Continue(_) => {}
|
||||
Stmt::IpyEscapeCommand(_) => {}
|
||||
}
|
||||
|
||||
source_order::walk_stmt(self, stmt);
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, _expr: &'a Expr) {}
|
||||
|
||||
fn visit_parameter(&mut self, parameter: &'a Parameter) {
|
||||
self.create_id(parameter);
|
||||
source_order::walk_parameter(self, parameter);
|
||||
}
|
||||
|
||||
fn visit_except_handler(&mut self, except_handler: &'a ExceptHandler) {
|
||||
match except_handler {
|
||||
ExceptHandler::ExceptHandler(except_handler) => {
|
||||
self.create_id(except_handler);
|
||||
}
|
||||
}
|
||||
|
||||
source_order::walk_except_handler(self, except_handler);
|
||||
}
|
||||
|
||||
fn visit_with_item(&mut self, with_item: &'a WithItem) {
|
||||
self.create_id(with_item);
|
||||
source_order::walk_with_item(self, with_item);
|
||||
}
|
||||
|
||||
fn visit_match_case(&mut self, match_case: &'a MatchCase) {
|
||||
self.create_id(match_case);
|
||||
source_order::walk_match_case(self, match_case);
|
||||
}
|
||||
|
||||
fn visit_type_param(&mut self, type_param: &'a TypeParam) {
|
||||
self.create_id(type_param);
|
||||
}
|
||||
}
|
||||
|
||||
enum DeferredNode<'a> {
|
||||
FunctionDefinition(&'a StmtFunctionDef),
|
||||
ClassDefinition(&'a StmtClassDef),
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub struct TypedNodeKey<N: AstNode> {
|
||||
/// The type erased node key.
|
||||
inner: NodeKey,
|
||||
_marker: PhantomData<fn() -> N>,
|
||||
}
|
||||
|
||||
impl<N: AstNode> TypedNodeKey<N> {
|
||||
pub fn from_node(node: &N) -> Self {
|
||||
let inner = NodeKey::from_node(node.as_any_node_ref());
|
||||
Self {
|
||||
inner,
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new(node_key: NodeKey) -> Option<Self> {
|
||||
N::can_cast(node_key.kind).then_some(TypedNodeKey {
|
||||
inner: node_key,
|
||||
_marker: PhantomData,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn resolve<'a>(&self, root: AnyNodeRef<'a>) -> Option<N::Ref<'a>> {
|
||||
let node_ref = self.inner.resolve(root)?;
|
||||
|
||||
Some(N::cast_ref(node_ref).unwrap())
|
||||
}
|
||||
|
||||
pub fn resolve_unwrap<'a>(&self, root: AnyNodeRef<'a>) -> N::Ref<'a> {
|
||||
self.resolve(root).expect("node should resolve")
|
||||
}
|
||||
|
||||
pub fn erased(&self) -> &NodeKey {
|
||||
&self.inner
|
||||
}
|
||||
}
|
||||
|
||||
struct FindNodeKeyVisitor<'a> {
|
||||
key: NodeKey,
|
||||
result: Option<AnyNodeRef<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> SourceOrderVisitor<'a> for FindNodeKeyVisitor<'a> {
|
||||
fn enter_node(&mut self, node: AnyNodeRef<'a>) -> TraversalSignal {
|
||||
if self.result.is_some() {
|
||||
return TraversalSignal::Skip;
|
||||
}
|
||||
|
||||
if node.range() == self.key.range && node.kind() == self.key.kind {
|
||||
self.result = Some(node);
|
||||
TraversalSignal::Skip
|
||||
} else if node.range().contains_range(self.key.range) {
|
||||
TraversalSignal::Traverse
|
||||
} else {
|
||||
TraversalSignal::Skip
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_body(&mut self, body: &'a [Stmt]) {
|
||||
// TODO it would be more efficient to use binary search instead of linear
|
||||
for stmt in body {
|
||||
if stmt.range().start() > self.key.range.end() {
|
||||
break;
|
||||
}
|
||||
|
||||
self.visit_stmt(stmt);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO an alternative to this is to have a `NodeId` on each node (in increasing order depending on the position).
|
||||
// This would allow to reduce the size of this to a u32.
|
||||
// What would be nice if we could use an `Arc::weak_ref` here but that only works if we use
|
||||
// `Arc` internally
|
||||
// TODO: Implement the logic to resolve a node, given a db (and the correct file).
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub struct NodeKey {
|
||||
kind: NodeKind,
|
||||
range: TextRange,
|
||||
}
|
||||
|
||||
impl NodeKey {
|
||||
pub fn from_node(node: AnyNodeRef) -> Self {
|
||||
NodeKey {
|
||||
kind: node.kind(),
|
||||
range: node.range(),
|
||||
}
|
||||
}
|
||||
pub fn resolve<'a>(&self, root: AnyNodeRef<'a>) -> Option<AnyNodeRef<'a>> {
|
||||
// We need to do a binary search here. Only traverse into a node if the range is withint the node
|
||||
let mut visitor = FindNodeKeyVisitor {
|
||||
key: *self,
|
||||
result: None,
|
||||
};
|
||||
|
||||
if visitor.enter_node(root) == TraversalSignal::Traverse {
|
||||
root.visit_preorder(&mut visitor);
|
||||
}
|
||||
|
||||
visitor.result
|
||||
}
|
||||
}
|
||||
|
||||
/// Marker trait implemented by AST nodes for which we extract the `AstId`.
|
||||
pub trait HasAstId: AstNode {
|
||||
fn node_key(&self) -> TypedNodeKey<Self>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
TypedNodeKey {
|
||||
inner: self.syntax_node_key(),
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
fn syntax_node_key(&self) -> NodeKey {
|
||||
NodeKey {
|
||||
kind: self.as_any_node_ref().kind(),
|
||||
range: self.range(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HasAstId for StmtFunctionDef {}
|
||||
impl HasAstId for StmtClassDef {}
|
||||
impl HasAstId for StmtAnnAssign {}
|
||||
impl HasAstId for StmtAugAssign {}
|
||||
impl HasAstId for StmtAssign {}
|
||||
impl HasAstId for StmtTypeAlias {}
|
||||
|
||||
impl HasAstId for ModModule {}
|
||||
|
||||
impl HasAstId for StmtImport {}
|
||||
|
||||
impl HasAstId for StmtImportFrom {}
|
||||
|
||||
impl HasAstId for Parameter {}
|
||||
|
||||
impl HasAstId for TypeParam {}
|
||||
impl HasAstId for Stmt {}
|
||||
impl HasAstId for TypeParamTypeVar {}
|
||||
impl HasAstId for TypeParamTypeVarTuple {}
|
||||
impl HasAstId for TypeParamParamSpec {}
|
||||
impl HasAstId for StmtGlobal {}
|
||||
impl HasAstId for StmtNonlocal {}
|
||||
|
||||
impl HasAstId for ExceptHandlerExceptHandler {}
|
||||
impl HasAstId for WithItem {}
|
||||
impl HasAstId for MatchCase {}
|
||||
165
crates/red_knot/src/cache.rs
Normal file
165
crates/red_knot/src/cache.rs
Normal file
@@ -0,0 +1,165 @@
|
||||
use std::fmt::Formatter;
|
||||
use std::hash::Hash;
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
|
||||
use crate::db::QueryResult;
|
||||
use dashmap::mapref::entry::Entry;
|
||||
|
||||
use crate::FxDashMap;
|
||||
|
||||
/// Simple key value cache that locks on a per-key level.
|
||||
pub struct KeyValueCache<K, V> {
|
||||
map: FxDashMap<K, V>,
|
||||
statistics: CacheStatistics,
|
||||
}
|
||||
|
||||
impl<K, V> KeyValueCache<K, V>
|
||||
where
|
||||
K: Eq + Hash + Clone,
|
||||
V: Clone,
|
||||
{
|
||||
pub fn try_get(&self, key: &K) -> Option<V> {
|
||||
if let Some(existing) = self.map.get(key) {
|
||||
self.statistics.hit();
|
||||
Some(existing.clone())
|
||||
} else {
|
||||
self.statistics.miss();
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get<F>(&self, key: &K, compute: F) -> QueryResult<V>
|
||||
where
|
||||
F: FnOnce(&K) -> QueryResult<V>,
|
||||
{
|
||||
Ok(match self.map.entry(key.clone()) {
|
||||
Entry::Occupied(cached) => {
|
||||
self.statistics.hit();
|
||||
|
||||
cached.get().clone()
|
||||
}
|
||||
Entry::Vacant(vacant) => {
|
||||
self.statistics.miss();
|
||||
|
||||
let value = compute(key)?;
|
||||
vacant.insert(value.clone());
|
||||
value
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn set(&mut self, key: K, value: V) {
|
||||
self.map.insert(key, value);
|
||||
}
|
||||
|
||||
pub fn remove(&mut self, key: &K) -> Option<V> {
|
||||
self.map.remove(key).map(|(_, value)| value)
|
||||
}
|
||||
|
||||
pub fn clear(&mut self) {
|
||||
self.map.clear();
|
||||
self.map.shrink_to_fit();
|
||||
}
|
||||
|
||||
pub fn statistics(&self) -> Option<Statistics> {
|
||||
self.statistics.to_statistics()
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> Default for KeyValueCache<K, V>
|
||||
where
|
||||
K: Eq + Hash,
|
||||
V: Clone,
|
||||
{
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
map: FxDashMap::default(),
|
||||
statistics: CacheStatistics::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> std::fmt::Debug for KeyValueCache<K, V>
|
||||
where
|
||||
K: std::fmt::Debug + Eq + Hash,
|
||||
V: std::fmt::Debug,
|
||||
{
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
let mut debug = f.debug_map();
|
||||
|
||||
for entry in &self.map {
|
||||
debug.entry(&entry.value(), &entry.key());
|
||||
}
|
||||
|
||||
debug.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct Statistics {
|
||||
pub hits: usize,
|
||||
pub misses: usize,
|
||||
}
|
||||
|
||||
impl Statistics {
|
||||
#[allow(clippy::cast_precision_loss)]
|
||||
pub fn hit_rate(&self) -> Option<f64> {
|
||||
if self.hits + self.misses == 0 {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some((self.hits as f64) / (self.hits + self.misses) as f64)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
pub type CacheStatistics = DebugStatistics;
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
pub type CacheStatistics = ReleaseStatistics;
|
||||
|
||||
pub trait StatisticsRecorder {
|
||||
fn hit(&self);
|
||||
fn miss(&self);
|
||||
fn to_statistics(&self) -> Option<Statistics>;
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct DebugStatistics {
|
||||
hits: AtomicUsize,
|
||||
misses: AtomicUsize,
|
||||
}
|
||||
|
||||
impl StatisticsRecorder for DebugStatistics {
|
||||
// TODO figure out appropriate Ordering
|
||||
fn hit(&self) {
|
||||
self.hits.fetch_add(1, Ordering::SeqCst);
|
||||
}
|
||||
|
||||
fn miss(&self) {
|
||||
self.misses.fetch_add(1, Ordering::SeqCst);
|
||||
}
|
||||
|
||||
fn to_statistics(&self) -> Option<Statistics> {
|
||||
let hits = self.hits.load(Ordering::SeqCst);
|
||||
let misses = self.misses.load(Ordering::SeqCst);
|
||||
|
||||
Some(Statistics { hits, misses })
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct ReleaseStatistics;
|
||||
|
||||
impl StatisticsRecorder for ReleaseStatistics {
|
||||
#[inline]
|
||||
fn hit(&self) {}
|
||||
|
||||
#[inline]
|
||||
fn miss(&self) {}
|
||||
|
||||
#[inline]
|
||||
fn to_statistics(&self) -> Option<Statistics> {
|
||||
None
|
||||
}
|
||||
}
|
||||
42
crates/red_knot/src/cancellation.rs
Normal file
42
crates/red_knot/src/cancellation.rs
Normal file
@@ -0,0 +1,42 @@
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct CancellationTokenSource {
|
||||
signal: Arc<AtomicBool>,
|
||||
}
|
||||
|
||||
impl CancellationTokenSource {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
signal: Arc::new(AtomicBool::new(false)),
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "trace", skip_all)]
|
||||
pub fn cancel(&self) {
|
||||
self.signal.store(true, std::sync::atomic::Ordering::SeqCst);
|
||||
}
|
||||
|
||||
pub fn is_cancelled(&self) -> bool {
|
||||
self.signal.load(std::sync::atomic::Ordering::SeqCst)
|
||||
}
|
||||
|
||||
pub fn token(&self) -> CancellationToken {
|
||||
CancellationToken {
|
||||
signal: self.signal.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct CancellationToken {
|
||||
signal: Arc<AtomicBool>,
|
||||
}
|
||||
|
||||
impl CancellationToken {
|
||||
/// Returns `true` if cancellation has been requested.
|
||||
pub fn is_cancelled(&self) -> bool {
|
||||
self.signal.load(std::sync::atomic::Ordering::SeqCst)
|
||||
}
|
||||
}
|
||||
248
crates/red_knot/src/db.rs
Normal file
248
crates/red_knot/src/db.rs
Normal file
@@ -0,0 +1,248 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
pub use jars::{HasJar, HasJars};
|
||||
pub use query::{QueryError, QueryResult};
|
||||
pub use runtime::DbRuntime;
|
||||
pub use storage::JarsStorage;
|
||||
|
||||
use crate::files::FileId;
|
||||
use crate::lint::{LintSemanticStorage, LintSyntaxStorage};
|
||||
use crate::module::ModuleResolver;
|
||||
use crate::parse::ParsedStorage;
|
||||
use crate::semantic::SemanticIndexStorage;
|
||||
use crate::semantic::TypeStore;
|
||||
use crate::source::SourceStorage;
|
||||
|
||||
mod jars;
|
||||
mod query;
|
||||
mod runtime;
|
||||
mod storage;
|
||||
|
||||
pub trait Database {
|
||||
/// Returns a reference to the runtime of the current worker.
|
||||
fn runtime(&self) -> &DbRuntime;
|
||||
|
||||
/// Returns a mutable reference to the runtime. Only one worker can hold a mutable reference to the runtime.
|
||||
fn runtime_mut(&mut self) -> &mut DbRuntime;
|
||||
|
||||
/// Returns `Ok` if the queries have not been cancelled and `Err(QueryError::Cancelled)` otherwise.
|
||||
fn cancelled(&self) -> QueryResult<()> {
|
||||
self.runtime().cancelled()
|
||||
}
|
||||
|
||||
/// Returns `true` if the queries have been cancelled.
|
||||
fn is_cancelled(&self) -> bool {
|
||||
self.runtime().is_cancelled()
|
||||
}
|
||||
}
|
||||
|
||||
/// Database that supports running queries from multiple threads.
|
||||
pub trait ParallelDatabase: Database + Send {
|
||||
/// Creates a snapshot of the database state that can be used to query the database in another thread.
|
||||
///
|
||||
/// The snapshot is a read-only view of the database but query results are shared between threads.
|
||||
/// All queries will be automatically cancelled when applying any mutations (calling [`HasJars::jars_mut`])
|
||||
/// to the database (not the snapshot, because they're readonly).
|
||||
///
|
||||
/// ## Creating a snapshot
|
||||
///
|
||||
/// Creating a snapshot of the database's jars is cheap but creating a snapshot of
|
||||
/// other state stored on the database might require deep-cloning data. That's why you should
|
||||
/// avoid creating snapshots in a hot function (e.g. don't create a snapshot for each file, instead
|
||||
/// create a snapshot when scheduling the check of an entire program).
|
||||
///
|
||||
/// ## Salsa compatibility
|
||||
/// Salsa prohibits creating a snapshot while running a local query (it's fine if other workers run a query) [[source](https://github.com/salsa-rs/salsa/issues/80)].
|
||||
/// We should avoid creating snapshots while running a query because we might want to adopt Salsa in the future (if we can figure out persistent caching).
|
||||
/// Unfortunately, the infrastructure doesn't provide an automated way of knowing when a query is run, that's
|
||||
/// why we have to "enforce" this constraint manually.
|
||||
#[must_use]
|
||||
fn snapshot(&self) -> Snapshot<Self>;
|
||||
}
|
||||
|
||||
pub trait DbWithJar<Jar>: Database + HasJar<Jar> {}
|
||||
|
||||
/// Readonly snapshot of a database.
|
||||
///
|
||||
/// ## Dead locks
|
||||
/// A snapshot should always be dropped as soon as it is no longer necessary to run queries.
|
||||
/// Storing the snapshot without running a query or periodically checking if cancellation was requested
|
||||
/// can lead to deadlocks because mutating the [`Database`] requires cancels all pending queries
|
||||
/// and waiting for all [`Snapshot`]s to be dropped.
|
||||
#[derive(Debug)]
|
||||
pub struct Snapshot<DB: ?Sized>
|
||||
where
|
||||
DB: ParallelDatabase,
|
||||
{
|
||||
db: DB,
|
||||
}
|
||||
|
||||
impl<DB> Snapshot<DB>
|
||||
where
|
||||
DB: ParallelDatabase,
|
||||
{
|
||||
pub fn new(db: DB) -> Self {
|
||||
Snapshot { db }
|
||||
}
|
||||
}
|
||||
|
||||
impl<DB> std::ops::Deref for Snapshot<DB>
|
||||
where
|
||||
DB: ParallelDatabase,
|
||||
{
|
||||
type Target = DB;
|
||||
|
||||
fn deref(&self) -> &DB {
|
||||
&self.db
|
||||
}
|
||||
}
|
||||
|
||||
pub trait Upcast<T: ?Sized> {
|
||||
fn upcast(&self) -> &T;
|
||||
}
|
||||
|
||||
// Red knot specific databases code.
|
||||
|
||||
pub trait SourceDb: DbWithJar<SourceJar> {
|
||||
// queries
|
||||
fn file_id(&self, path: &std::path::Path) -> FileId;
|
||||
|
||||
fn file_path(&self, file_id: FileId) -> Arc<std::path::Path>;
|
||||
}
|
||||
|
||||
pub trait SemanticDb: SourceDb + DbWithJar<SemanticJar> + Upcast<dyn SourceDb> {}
|
||||
|
||||
pub trait LintDb: SemanticDb + DbWithJar<LintJar> + Upcast<dyn SemanticDb> {}
|
||||
|
||||
pub trait Db: LintDb + Upcast<dyn LintDb> {}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SourceJar {
|
||||
pub sources: SourceStorage,
|
||||
pub parsed: ParsedStorage,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SemanticJar {
|
||||
pub module_resolver: ModuleResolver,
|
||||
pub semantic_indices: SemanticIndexStorage,
|
||||
pub type_store: TypeStore,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct LintJar {
|
||||
pub lint_syntax: LintSyntaxStorage,
|
||||
pub lint_semantic: LintSemanticStorage,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::db::{
|
||||
Database, DbRuntime, DbWithJar, HasJar, HasJars, JarsStorage, LintDb, LintJar, QueryResult,
|
||||
SourceDb, SourceJar, Upcast,
|
||||
};
|
||||
use crate::files::{FileId, Files};
|
||||
|
||||
use super::{SemanticDb, SemanticJar};
|
||||
|
||||
// This can be a partial database used in a single crate for testing.
|
||||
// It would hold fewer data than the full database.
|
||||
#[derive(Debug, Default)]
|
||||
pub(crate) struct TestDb {
|
||||
files: Files,
|
||||
jars: JarsStorage<Self>,
|
||||
}
|
||||
|
||||
impl HasJar<SourceJar> for TestDb {
|
||||
fn jar(&self) -> QueryResult<&SourceJar> {
|
||||
Ok(&self.jars()?.0)
|
||||
}
|
||||
|
||||
fn jar_mut(&mut self) -> &mut SourceJar {
|
||||
&mut self.jars_mut().0
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJar<SemanticJar> for TestDb {
|
||||
fn jar(&self) -> QueryResult<&SemanticJar> {
|
||||
Ok(&self.jars()?.1)
|
||||
}
|
||||
|
||||
fn jar_mut(&mut self) -> &mut SemanticJar {
|
||||
&mut self.jars_mut().1
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJar<LintJar> for TestDb {
|
||||
fn jar(&self) -> QueryResult<&LintJar> {
|
||||
Ok(&self.jars()?.2)
|
||||
}
|
||||
|
||||
fn jar_mut(&mut self) -> &mut LintJar {
|
||||
&mut self.jars_mut().2
|
||||
}
|
||||
}
|
||||
|
||||
impl SourceDb for TestDb {
|
||||
fn file_id(&self, path: &Path) -> FileId {
|
||||
self.files.intern(path)
|
||||
}
|
||||
|
||||
fn file_path(&self, file_id: FileId) -> Arc<Path> {
|
||||
self.files.path(file_id)
|
||||
}
|
||||
}
|
||||
|
||||
impl DbWithJar<SourceJar> for TestDb {}
|
||||
|
||||
impl Upcast<dyn SourceDb> for TestDb {
|
||||
fn upcast(&self) -> &(dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl SemanticDb for TestDb {}
|
||||
|
||||
impl DbWithJar<SemanticJar> for TestDb {}
|
||||
|
||||
impl Upcast<dyn SemanticDb> for TestDb {
|
||||
fn upcast(&self) -> &(dyn SemanticDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl LintDb for TestDb {}
|
||||
|
||||
impl Upcast<dyn LintDb> for TestDb {
|
||||
fn upcast(&self) -> &(dyn LintDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl DbWithJar<LintJar> for TestDb {}
|
||||
|
||||
impl HasJars for TestDb {
|
||||
type Jars = (SourceJar, SemanticJar, LintJar);
|
||||
|
||||
fn jars(&self) -> QueryResult<&Self::Jars> {
|
||||
self.jars.jars()
|
||||
}
|
||||
|
||||
fn jars_mut(&mut self) -> &mut Self::Jars {
|
||||
self.jars.jars_mut()
|
||||
}
|
||||
}
|
||||
|
||||
impl Database for TestDb {
|
||||
fn runtime(&self) -> &DbRuntime {
|
||||
self.jars.runtime()
|
||||
}
|
||||
|
||||
fn runtime_mut(&mut self) -> &mut DbRuntime {
|
||||
self.jars.runtime_mut()
|
||||
}
|
||||
}
|
||||
}
|
||||
37
crates/red_knot/src/db/jars.rs
Normal file
37
crates/red_knot/src/db/jars.rs
Normal file
@@ -0,0 +1,37 @@
|
||||
use crate::db::query::QueryResult;
|
||||
|
||||
/// Gives access to a specific jar in the database.
|
||||
///
|
||||
/// Nope, the terminology isn't borrowed from Java but from Salsa <https://salsa-rs.github.io/salsa/>,
|
||||
/// which is an analogy to storing the salsa in different jars.
|
||||
///
|
||||
/// The basic idea is that each crate can define its own jar and the jars can be combined to a single
|
||||
/// database in the top level crate. Each crate also defines its own `Database` trait. The combination of
|
||||
/// `Database` trait and the jar allows to write queries in isolation without having to know how they get composed at the upper levels.
|
||||
///
|
||||
/// Salsa further defines a `HasIngredient` trait which slices the jar to a specific storage (e.g. a specific cache).
|
||||
/// We don't need this just yet because we write our queries by hand. We may want a similar trait if we decide
|
||||
/// to use a macro to generate the queries.
|
||||
pub trait HasJar<T> {
|
||||
/// Gives a read-only reference to the jar.
|
||||
fn jar(&self) -> QueryResult<&T>;
|
||||
|
||||
/// Gives a mutable reference to the jar.
|
||||
fn jar_mut(&mut self) -> &mut T;
|
||||
}
|
||||
|
||||
/// Gives access to the jars in a database.
|
||||
pub trait HasJars {
|
||||
/// A type storing the jars.
|
||||
///
|
||||
/// Most commonly, this is a tuple where each jar is a tuple element.
|
||||
type Jars: Default;
|
||||
|
||||
/// Gives access to the underlying jars but tests if the queries have been cancelled.
|
||||
///
|
||||
/// Returns `Err(QueryError::Cancelled)` if the queries have been cancelled.
|
||||
fn jars(&self) -> QueryResult<&Self::Jars>;
|
||||
|
||||
/// Gives mutable access to the underlying jars.
|
||||
fn jars_mut(&mut self) -> &mut Self::Jars;
|
||||
}
|
||||
20
crates/red_knot/src/db/query.rs
Normal file
20
crates/red_knot/src/db/query.rs
Normal file
@@ -0,0 +1,20 @@
|
||||
use std::fmt::{Display, Formatter};
|
||||
|
||||
/// Reason why a db query operation failed.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum QueryError {
|
||||
/// The query was cancelled because the DB was mutated or the query was cancelled by the host (e.g. on a file change or when pressing CTRL+C).
|
||||
Cancelled,
|
||||
}
|
||||
|
||||
impl Display for QueryError {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
QueryError::Cancelled => f.write_str("query was cancelled"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for QueryError {}
|
||||
|
||||
pub type QueryResult<T> = Result<T, QueryError>;
|
||||
41
crates/red_knot/src/db/runtime.rs
Normal file
41
crates/red_knot/src/db/runtime.rs
Normal file
@@ -0,0 +1,41 @@
|
||||
use crate::cancellation::CancellationTokenSource;
|
||||
use crate::db::{QueryError, QueryResult};
|
||||
|
||||
/// Holds the jar agnostic state of the database.
|
||||
#[derive(Debug, Default)]
|
||||
pub struct DbRuntime {
|
||||
/// The cancellation token source used to signal other works that the queries should be aborted and
|
||||
/// exit at the next possible point.
|
||||
cancellation_token: CancellationTokenSource,
|
||||
}
|
||||
|
||||
impl DbRuntime {
|
||||
pub(super) fn snapshot(&self) -> Self {
|
||||
Self {
|
||||
cancellation_token: self.cancellation_token.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Cancels the pending queries of other workers. The current worker cannot have any pending
|
||||
/// queries because we're holding a mutable reference to the runtime.
|
||||
pub(super) fn cancel_other_workers(&mut self) {
|
||||
self.cancellation_token.cancel();
|
||||
// Set a new cancellation token so that we're in a non-cancelled state again when running the next
|
||||
// query.
|
||||
self.cancellation_token = CancellationTokenSource::default();
|
||||
}
|
||||
|
||||
/// Returns `Ok` if the queries have not been cancelled and `Err(QueryError::Cancelled)` otherwise.
|
||||
pub(super) fn cancelled(&self) -> QueryResult<()> {
|
||||
if self.cancellation_token.is_cancelled() {
|
||||
Err(QueryError::Cancelled)
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the queries have been cancelled.
|
||||
pub(super) fn is_cancelled(&self) -> bool {
|
||||
self.cancellation_token.is_cancelled()
|
||||
}
|
||||
}
|
||||
117
crates/red_knot/src/db/storage.rs
Normal file
117
crates/red_knot/src/db/storage.rs
Normal file
@@ -0,0 +1,117 @@
|
||||
use std::fmt::Formatter;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crossbeam::sync::WaitGroup;
|
||||
|
||||
use crate::db::query::QueryResult;
|
||||
use crate::db::runtime::DbRuntime;
|
||||
use crate::db::{HasJars, ParallelDatabase};
|
||||
|
||||
/// Stores the jars of a database and the state for each worker.
|
||||
///
|
||||
/// Today, all state is shared across all workers, but it may be desired to store data per worker in the future.
|
||||
pub struct JarsStorage<T>
|
||||
where
|
||||
T: HasJars + Sized,
|
||||
{
|
||||
// It's important that `jars_wait_group` is declared after `jars` to ensure that `jars` is dropped first.
|
||||
// See https://doc.rust-lang.org/reference/destructors.html
|
||||
/// Stores the jars of the database.
|
||||
jars: Arc<T::Jars>,
|
||||
|
||||
/// Used to count the references to `jars`. Allows implementing `jars_mut` without requiring to clone `jars`.
|
||||
jars_wait_group: WaitGroup,
|
||||
|
||||
/// The data agnostic state.
|
||||
runtime: DbRuntime,
|
||||
}
|
||||
|
||||
impl<Db> JarsStorage<Db>
|
||||
where
|
||||
Db: HasJars,
|
||||
{
|
||||
pub(super) fn new() -> Self {
|
||||
Self {
|
||||
jars: Arc::new(Db::Jars::default()),
|
||||
jars_wait_group: WaitGroup::default(),
|
||||
runtime: DbRuntime::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a snapshot of the jars.
|
||||
///
|
||||
/// Creating the snapshot is cheap because it doesn't clone the jars, it only increments a ref counter.
|
||||
#[must_use]
|
||||
pub fn snapshot(&self) -> JarsStorage<Db>
|
||||
where
|
||||
Db: ParallelDatabase,
|
||||
{
|
||||
Self {
|
||||
jars: self.jars.clone(),
|
||||
jars_wait_group: self.jars_wait_group.clone(),
|
||||
runtime: self.runtime.snapshot(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn jars(&self) -> QueryResult<&Db::Jars> {
|
||||
self.runtime.cancelled()?;
|
||||
Ok(&self.jars)
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the jars without cloning their content.
|
||||
///
|
||||
/// The method cancels any pending queries of other works and waits for them to complete so that
|
||||
/// this instance is the only instance holding a reference to the jars.
|
||||
pub(crate) fn jars_mut(&mut self) -> &mut Db::Jars {
|
||||
// We have a mutable ref here, so no more workers can be spawned between calling this function and taking the mut ref below.
|
||||
self.cancel_other_workers();
|
||||
|
||||
// Now all other references to `self.jars` should have been released. We can now safely return a mutable reference
|
||||
// to the Arc's content.
|
||||
let jars =
|
||||
Arc::get_mut(&mut self.jars).expect("All references to jars should have been released");
|
||||
|
||||
jars
|
||||
}
|
||||
|
||||
pub(crate) fn runtime(&self) -> &DbRuntime {
|
||||
&self.runtime
|
||||
}
|
||||
|
||||
pub(crate) fn runtime_mut(&mut self) -> &mut DbRuntime {
|
||||
// Note: This method may need to use a similar trick to `jars_mut` if `DbRuntime` is ever to store data that is shared between workers.
|
||||
&mut self.runtime
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "trace", skip(self))]
|
||||
fn cancel_other_workers(&mut self) {
|
||||
self.runtime.cancel_other_workers();
|
||||
|
||||
// Wait for all other works to complete.
|
||||
let existing_wait = std::mem::take(&mut self.jars_wait_group);
|
||||
existing_wait.wait();
|
||||
}
|
||||
}
|
||||
|
||||
impl<Db> Default for JarsStorage<Db>
|
||||
where
|
||||
Db: HasJars,
|
||||
{
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> std::fmt::Debug for JarsStorage<T>
|
||||
where
|
||||
T: HasJars,
|
||||
<T as HasJars>::Jars: std::fmt::Debug,
|
||||
{
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("SharedStorage")
|
||||
.field("jars", &self.jars)
|
||||
.field("jars_wait_group", &self.jars_wait_group)
|
||||
.field("runtime", &self.runtime)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
180
crates/red_knot/src/files.rs
Normal file
180
crates/red_knot/src/files.rs
Normal file
@@ -0,0 +1,180 @@
|
||||
use std::fmt::{Debug, Formatter};
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
|
||||
use hashbrown::hash_map::RawEntryMut;
|
||||
use parking_lot::RwLock;
|
||||
use rustc_hash::FxHasher;
|
||||
|
||||
use ruff_index::{newtype_index, IndexVec};
|
||||
|
||||
type Map<K, V> = hashbrown::HashMap<K, V, ()>;
|
||||
|
||||
#[newtype_index]
|
||||
pub struct FileId;
|
||||
|
||||
// TODO we'll need a higher level virtual file system abstraction that allows testing if a file exists
|
||||
// or retrieving its content (ideally lazily and in a way that the memory can be retained later)
|
||||
// I suspect that we'll end up with a FileSystem trait and our own Path abstraction.
|
||||
#[derive(Default)]
|
||||
pub struct Files {
|
||||
inner: Arc<RwLock<FilesInner>>,
|
||||
}
|
||||
|
||||
impl Files {
|
||||
#[tracing::instrument(level = "debug", skip(self))]
|
||||
pub fn intern(&self, path: &Path) -> FileId {
|
||||
self.inner.write().intern(path)
|
||||
}
|
||||
|
||||
pub fn try_get(&self, path: &Path) -> Option<FileId> {
|
||||
self.inner.read().try_get(path)
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(self))]
|
||||
pub fn path(&self, id: FileId) -> Arc<Path> {
|
||||
self.inner.read().path(id)
|
||||
}
|
||||
|
||||
/// Snapshots files for a new database snapshot.
|
||||
///
|
||||
/// This method should not be used outside a database snapshot.
|
||||
#[must_use]
|
||||
pub fn snapshot(&self) -> Files {
|
||||
Files {
|
||||
inner: self.inner.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Files {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
let files = self.inner.read();
|
||||
let mut debug = f.debug_map();
|
||||
for item in files.iter() {
|
||||
debug.entry(&item.0, &item.1);
|
||||
}
|
||||
|
||||
debug.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Files {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.inner.read().eq(&other.inner.read())
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for Files {}
|
||||
|
||||
#[derive(Default)]
|
||||
struct FilesInner {
|
||||
by_path: Map<FileId, ()>,
|
||||
// TODO should we use a map here to reclaim the space for removed files?
|
||||
// TODO I think we should use our own path abstraction here to avoid having to normalize paths
|
||||
// and dealing with non-utf paths everywhere.
|
||||
by_id: IndexVec<FileId, Arc<Path>>,
|
||||
}
|
||||
|
||||
impl FilesInner {
|
||||
/// Inserts the path and returns a new id for it or returns the id if it is an existing path.
|
||||
// TODO should this accept Path or PathBuf?
|
||||
pub(crate) fn intern(&mut self, path: &Path) -> FileId {
|
||||
let hash = FilesInner::hash_path(path);
|
||||
|
||||
let entry = self
|
||||
.by_path
|
||||
.raw_entry_mut()
|
||||
.from_hash(hash, |existing_file| &*self.by_id[*existing_file] == path);
|
||||
|
||||
match entry {
|
||||
RawEntryMut::Occupied(entry) => *entry.key(),
|
||||
RawEntryMut::Vacant(entry) => {
|
||||
let id = self.by_id.push(Arc::from(path));
|
||||
entry.insert_with_hasher(hash, id, (), |file| {
|
||||
FilesInner::hash_path(&self.by_id[*file])
|
||||
});
|
||||
id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn hash_path(path: &Path) -> u64 {
|
||||
let mut hasher = FxHasher::default();
|
||||
path.hash(&mut hasher);
|
||||
hasher.finish()
|
||||
}
|
||||
|
||||
pub(crate) fn try_get(&self, path: &Path) -> Option<FileId> {
|
||||
let mut hasher = FxHasher::default();
|
||||
path.hash(&mut hasher);
|
||||
let hash = hasher.finish();
|
||||
|
||||
Some(
|
||||
*self
|
||||
.by_path
|
||||
.raw_entry()
|
||||
.from_hash(hash, |existing_file| &*self.by_id[*existing_file] == path)?
|
||||
.0,
|
||||
)
|
||||
}
|
||||
|
||||
/// Returns the path for the file with the given id.
|
||||
pub(crate) fn path(&self, id: FileId) -> Arc<Path> {
|
||||
self.by_id[id].clone()
|
||||
}
|
||||
|
||||
pub(crate) fn iter(&self) -> impl Iterator<Item = (FileId, Arc<Path>)> + '_ {
|
||||
self.by_path.keys().map(|id| (*id, self.by_id[*id].clone()))
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for FilesInner {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.by_id == other.by_id
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for FilesInner {}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[test]
|
||||
fn insert_path_twice_same_id() {
|
||||
let files = Files::default();
|
||||
let path = PathBuf::from("foo/bar");
|
||||
let id1 = files.intern(&path);
|
||||
let id2 = files.intern(&path);
|
||||
assert_eq!(id1, id2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_different_paths_different_ids() {
|
||||
let files = Files::default();
|
||||
let path1 = PathBuf::from("foo/bar");
|
||||
let path2 = PathBuf::from("foo/bar/baz");
|
||||
let id1 = files.intern(&path1);
|
||||
let id2 = files.intern(&path2);
|
||||
assert_ne!(id1, id2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn four_files() {
|
||||
let files = Files::default();
|
||||
let foo_path = PathBuf::from("foo");
|
||||
let foo_id = files.intern(&foo_path);
|
||||
let bar_path = PathBuf::from("bar");
|
||||
files.intern(&bar_path);
|
||||
let baz_path = PathBuf::from("baz");
|
||||
files.intern(&baz_path);
|
||||
let qux_path = PathBuf::from("qux");
|
||||
files.intern(&qux_path);
|
||||
|
||||
let foo_id_2 = files.try_get(&foo_path).expect("foo_path to be found");
|
||||
assert_eq!(foo_id_2, foo_id);
|
||||
}
|
||||
}
|
||||
67
crates/red_knot/src/hir.rs
Normal file
67
crates/red_knot/src/hir.rs
Normal file
@@ -0,0 +1,67 @@
|
||||
//! Key observations
|
||||
//!
|
||||
//! The HIR (High-Level Intermediate Representation) avoids allocations to large extends by:
|
||||
//! * Using an arena per node type
|
||||
//! * using ids and id ranges to reference items.
|
||||
//!
|
||||
//! Using separate arena per node type has the advantage that the IDs are relatively stable, because
|
||||
//! they only change when a node of the same kind has been added or removed. (What's unclear is if that matters or if
|
||||
//! it still triggers a re-compute because the AST-id in the node has changed).
|
||||
//!
|
||||
//! The HIR does not store all details. It mainly stores the *public* interface. There's a reference
|
||||
//! back to the AST node to get more details.
|
||||
//!
|
||||
//!
|
||||
|
||||
use crate::ast_ids::{HasAstId, TypedAstId};
|
||||
use crate::files::FileId;
|
||||
use std::fmt::Formatter;
|
||||
use std::hash::{Hash, Hasher};
|
||||
|
||||
pub struct HirAstId<N: HasAstId> {
|
||||
file_id: FileId,
|
||||
node_id: TypedAstId<N>,
|
||||
}
|
||||
|
||||
impl<N: HasAstId> Copy for HirAstId<N> {}
|
||||
impl<N: HasAstId> Clone for HirAstId<N> {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> PartialEq for HirAstId<N> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.file_id == other.file_id && self.node_id == other.node_id
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> Eq for HirAstId<N> {}
|
||||
|
||||
impl<N: HasAstId> std::fmt::Debug for HirAstId<N> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("HirAstId")
|
||||
.field("file_id", &self.file_id)
|
||||
.field("node_id", &self.node_id)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> Hash for HirAstId<N> {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.file_id.hash(state);
|
||||
self.node_id.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> HirAstId<N> {
|
||||
pub fn upcast<M: HasAstId>(self) -> HirAstId<M>
|
||||
where
|
||||
N: Into<M>,
|
||||
{
|
||||
HirAstId {
|
||||
file_id: self.file_id,
|
||||
node_id: self.node_id.upcast(),
|
||||
}
|
||||
}
|
||||
}
|
||||
556
crates/red_knot/src/hir/definition.rs
Normal file
556
crates/red_knot/src/hir/definition.rs
Normal file
@@ -0,0 +1,556 @@
|
||||
use std::ops::{Index, Range};
|
||||
|
||||
use ruff_index::{newtype_index, IndexVec};
|
||||
use ruff_python_ast::visitor::preorder;
|
||||
use ruff_python_ast::visitor::preorder::PreorderVisitor;
|
||||
use ruff_python_ast::{
|
||||
Decorator, ExceptHandler, ExceptHandlerExceptHandler, Expr, MatchCase, ModModule, Stmt,
|
||||
StmtAnnAssign, StmtAssign, StmtClassDef, StmtFunctionDef, StmtGlobal, StmtImport,
|
||||
StmtImportFrom, StmtNonlocal, StmtTypeAlias, TypeParam, TypeParamParamSpec, TypeParamTypeVar,
|
||||
TypeParamTypeVarTuple, WithItem,
|
||||
};
|
||||
|
||||
use crate::ast_ids::{AstIds, HasAstId};
|
||||
use crate::files::FileId;
|
||||
use crate::hir::HirAstId;
|
||||
use crate::Name;
|
||||
|
||||
#[newtype_index]
|
||||
pub struct FunctionId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Function {
|
||||
ast_id: HirAstId<StmtFunctionDef>,
|
||||
name: Name,
|
||||
parameters: Range<ParameterId>,
|
||||
type_parameters: Range<TypeParameterId>, // TODO: type_parameters, return expression, decorators
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct ParameterId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Parameter {
|
||||
kind: ParameterKind,
|
||||
name: Name,
|
||||
default: Option<()>, // TODO use expression HIR
|
||||
ast_id: HirAstId<ruff_python_ast::Parameter>,
|
||||
}
|
||||
|
||||
// TODO or should `Parameter` be an enum?
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub enum ParameterKind {
|
||||
PositionalOnly,
|
||||
Arguments,
|
||||
Vararg,
|
||||
KeywordOnly,
|
||||
Kwarg,
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct ClassId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Class {
|
||||
name: Name,
|
||||
ast_id: HirAstId<StmtClassDef>,
|
||||
// TODO type parameters, inheritance, decorators, members
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct AssignmentId;
|
||||
|
||||
// This can have more than one name...
|
||||
// but that means we can't implement `name()` on `ModuleItem`.
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Assignment {
|
||||
// TODO: Handle multiple names / targets
|
||||
name: Name,
|
||||
ast_id: HirAstId<StmtAssign>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct AnnotatedAssignment {
|
||||
name: Name,
|
||||
ast_id: HirAstId<StmtAnnAssign>,
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct AnnotatedAssignmentId;
|
||||
|
||||
#[newtype_index]
|
||||
pub struct TypeAliasId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct TypeAlias {
|
||||
name: Name,
|
||||
ast_id: HirAstId<StmtTypeAlias>,
|
||||
parameters: Range<TypeParameterId>,
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct TypeParameterId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub enum TypeParameter {
|
||||
TypeVar(TypeParameterTypeVar),
|
||||
ParamSpec(TypeParameterParamSpec),
|
||||
TypeVarTuple(TypeParameterTypeVarTuple),
|
||||
}
|
||||
|
||||
impl TypeParameter {
|
||||
pub fn ast_id(&self) -> HirAstId<TypeParam> {
|
||||
match self {
|
||||
TypeParameter::TypeVar(type_var) => type_var.ast_id.upcast(),
|
||||
TypeParameter::ParamSpec(param_spec) => param_spec.ast_id.upcast(),
|
||||
TypeParameter::TypeVarTuple(type_var_tuple) => type_var_tuple.ast_id.upcast(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct TypeParameterTypeVar {
|
||||
name: Name,
|
||||
ast_id: HirAstId<TypeParamTypeVar>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct TypeParameterParamSpec {
|
||||
name: Name,
|
||||
ast_id: HirAstId<TypeParamParamSpec>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct TypeParameterTypeVarTuple {
|
||||
name: Name,
|
||||
ast_id: HirAstId<TypeParamTypeVarTuple>,
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct GlobalId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Global {
|
||||
// TODO track names
|
||||
ast_id: HirAstId<StmtGlobal>,
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct NonLocalId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct NonLocal {
|
||||
// TODO track names
|
||||
ast_id: HirAstId<StmtNonlocal>,
|
||||
}
|
||||
|
||||
pub enum DefinitionId {
|
||||
Function(FunctionId),
|
||||
Parameter(ParameterId),
|
||||
Class(ClassId),
|
||||
Assignment(AssignmentId),
|
||||
AnnotatedAssignment(AnnotatedAssignmentId),
|
||||
Global(GlobalId),
|
||||
NonLocal(NonLocalId),
|
||||
TypeParameter(TypeParameterId),
|
||||
TypeAlias(TypeAlias),
|
||||
}
|
||||
|
||||
pub enum DefinitionItem {
|
||||
Function(Function),
|
||||
Parameter(Parameter),
|
||||
Class(Class),
|
||||
Assignment(Assignment),
|
||||
AnnotatedAssignment(AnnotatedAssignment),
|
||||
Global(Global),
|
||||
NonLocal(NonLocal),
|
||||
TypeParameter(TypeParameter),
|
||||
TypeAlias(TypeAlias),
|
||||
}
|
||||
|
||||
// The closest is rust-analyzers item-tree. It only represents "Items" which make the public interface of a module
|
||||
// (it excludes any other statement or expressions). rust-analyzer uses it as the main input to the name resolution
|
||||
// algorithm
|
||||
// > It is the input to the name resolution algorithm, as well as to the queries defined in `adt.rs`,
|
||||
// > `data.rs`, and most things in `attr.rs`.
|
||||
//
|
||||
// > One important purpose of this layer is to provide an "invalidation barrier" for incremental
|
||||
// > computations: when typing inside an item body, the `ItemTree` of the modified file is typically
|
||||
// > unaffected, so we don't have to recompute name resolution results or item data (see `data.rs`).
|
||||
//
|
||||
// I haven't fully figured this out but I think that this composes the "public" interface of a module?
|
||||
// But maybe that's too optimistic.
|
||||
//
|
||||
//
|
||||
#[derive(Debug, Clone, Default, Eq, PartialEq)]
|
||||
pub struct Definitions {
|
||||
functions: IndexVec<FunctionId, Function>,
|
||||
parameters: IndexVec<ParameterId, Parameter>,
|
||||
classes: IndexVec<ClassId, Class>,
|
||||
assignments: IndexVec<AssignmentId, Assignment>,
|
||||
annotated_assignments: IndexVec<AnnotatedAssignmentId, AnnotatedAssignment>,
|
||||
type_aliases: IndexVec<TypeAliasId, TypeAlias>,
|
||||
type_parameters: IndexVec<TypeParameterId, TypeParameter>,
|
||||
globals: IndexVec<GlobalId, Global>,
|
||||
non_locals: IndexVec<NonLocalId, NonLocal>,
|
||||
}
|
||||
|
||||
impl Definitions {
|
||||
pub fn from_module(module: &ModModule, ast_ids: &AstIds, file_id: FileId) -> Self {
|
||||
let mut visitor = DefinitionsVisitor {
|
||||
definitions: Definitions::default(),
|
||||
ast_ids,
|
||||
file_id,
|
||||
};
|
||||
|
||||
visitor.visit_body(&module.body);
|
||||
|
||||
visitor.definitions
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<FunctionId> for Definitions {
|
||||
type Output = Function;
|
||||
|
||||
fn index(&self, index: FunctionId) -> &Self::Output {
|
||||
&self.functions[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<ParameterId> for Definitions {
|
||||
type Output = Parameter;
|
||||
|
||||
fn index(&self, index: ParameterId) -> &Self::Output {
|
||||
&self.parameters[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<ClassId> for Definitions {
|
||||
type Output = Class;
|
||||
|
||||
fn index(&self, index: ClassId) -> &Self::Output {
|
||||
&self.classes[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<AssignmentId> for Definitions {
|
||||
type Output = Assignment;
|
||||
|
||||
fn index(&self, index: AssignmentId) -> &Self::Output {
|
||||
&self.assignments[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<AnnotatedAssignmentId> for Definitions {
|
||||
type Output = AnnotatedAssignment;
|
||||
|
||||
fn index(&self, index: AnnotatedAssignmentId) -> &Self::Output {
|
||||
&self.annotated_assignments[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<TypeAliasId> for Definitions {
|
||||
type Output = TypeAlias;
|
||||
|
||||
fn index(&self, index: TypeAliasId) -> &Self::Output {
|
||||
&self.type_aliases[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<GlobalId> for Definitions {
|
||||
type Output = Global;
|
||||
|
||||
fn index(&self, index: GlobalId) -> &Self::Output {
|
||||
&self.globals[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<NonLocalId> for Definitions {
|
||||
type Output = NonLocal;
|
||||
|
||||
fn index(&self, index: NonLocalId) -> &Self::Output {
|
||||
&self.non_locals[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<TypeParameterId> for Definitions {
|
||||
type Output = TypeParameter;
|
||||
|
||||
fn index(&self, index: TypeParameterId) -> &Self::Output {
|
||||
&self.type_parameters[index]
|
||||
}
|
||||
}
|
||||
|
||||
struct DefinitionsVisitor<'a> {
|
||||
definitions: Definitions,
|
||||
ast_ids: &'a AstIds,
|
||||
file_id: FileId,
|
||||
}
|
||||
|
||||
impl DefinitionsVisitor<'_> {
|
||||
fn ast_id<N: HasAstId>(&self, node: &N) -> HirAstId<N> {
|
||||
HirAstId {
|
||||
file_id: self.file_id,
|
||||
node_id: self.ast_ids.ast_id(node),
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_function_def(&mut self, function: &StmtFunctionDef) -> FunctionId {
|
||||
let name = Name::new(&function.name);
|
||||
|
||||
let first_type_parameter_id = self.definitions.type_parameters.next_index();
|
||||
let mut last_type_parameter_id = first_type_parameter_id;
|
||||
|
||||
if let Some(type_params) = &function.type_params {
|
||||
for parameter in &type_params.type_params {
|
||||
let id = self.lower_type_parameter(parameter);
|
||||
last_type_parameter_id = id;
|
||||
}
|
||||
}
|
||||
|
||||
let parameters = self.lower_parameters(&function.parameters);
|
||||
|
||||
self.definitions.functions.push(Function {
|
||||
name,
|
||||
ast_id: self.ast_id(function),
|
||||
parameters,
|
||||
type_parameters: first_type_parameter_id..last_type_parameter_id,
|
||||
})
|
||||
}
|
||||
|
||||
fn lower_parameters(&mut self, parameters: &ruff_python_ast::Parameters) -> Range<ParameterId> {
|
||||
let first_parameter_id = self.definitions.parameters.next_index();
|
||||
let mut last_parameter_id = first_parameter_id;
|
||||
|
||||
for parameter in ¶meters.posonlyargs {
|
||||
last_parameter_id = self.definitions.parameters.push(Parameter {
|
||||
kind: ParameterKind::PositionalOnly,
|
||||
name: Name::new(¶meter.parameter.name),
|
||||
default: None,
|
||||
ast_id: self.ast_id(¶meter.parameter),
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(vararg) = ¶meters.vararg {
|
||||
last_parameter_id = self.definitions.parameters.push(Parameter {
|
||||
kind: ParameterKind::Vararg,
|
||||
name: Name::new(&vararg.name),
|
||||
default: None,
|
||||
ast_id: self.ast_id(vararg),
|
||||
});
|
||||
}
|
||||
|
||||
for parameter in ¶meters.kwonlyargs {
|
||||
last_parameter_id = self.definitions.parameters.push(Parameter {
|
||||
kind: ParameterKind::KeywordOnly,
|
||||
name: Name::new(¶meter.parameter.name),
|
||||
default: None,
|
||||
ast_id: self.ast_id(¶meter.parameter),
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(kwarg) = ¶meters.kwarg {
|
||||
last_parameter_id = self.definitions.parameters.push(Parameter {
|
||||
kind: ParameterKind::KeywordOnly,
|
||||
name: Name::new(&kwarg.name),
|
||||
default: None,
|
||||
ast_id: self.ast_id(kwarg),
|
||||
});
|
||||
}
|
||||
|
||||
first_parameter_id..last_parameter_id
|
||||
}
|
||||
|
||||
fn lower_class_def(&mut self, class: &StmtClassDef) -> ClassId {
|
||||
let name = Name::new(&class.name);
|
||||
|
||||
self.definitions.classes.push(Class {
|
||||
name,
|
||||
ast_id: self.ast_id(class),
|
||||
})
|
||||
}
|
||||
|
||||
fn lower_assignment(&mut self, assignment: &StmtAssign) {
|
||||
// FIXME handle multiple names
|
||||
if let Some(Expr::Name(name)) = assignment.targets.first() {
|
||||
self.definitions.assignments.push(Assignment {
|
||||
name: Name::new(&name.id),
|
||||
ast_id: self.ast_id(assignment),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_annotated_assignment(&mut self, annotated_assignment: &StmtAnnAssign) {
|
||||
if let Expr::Name(name) = &*annotated_assignment.target {
|
||||
self.definitions
|
||||
.annotated_assignments
|
||||
.push(AnnotatedAssignment {
|
||||
name: Name::new(&name.id),
|
||||
ast_id: self.ast_id(annotated_assignment),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_type_alias(&mut self, type_alias: &StmtTypeAlias) {
|
||||
if let Expr::Name(name) = &*type_alias.name {
|
||||
let name = Name::new(&name.id);
|
||||
|
||||
let lower_parameters_id = self.definitions.type_parameters.next_index();
|
||||
let mut last_parameter_id = lower_parameters_id;
|
||||
|
||||
if let Some(type_params) = &type_alias.type_params {
|
||||
for type_parameter in &type_params.type_params {
|
||||
let id = self.lower_type_parameter(type_parameter);
|
||||
last_parameter_id = id;
|
||||
}
|
||||
}
|
||||
|
||||
self.definitions.type_aliases.push(TypeAlias {
|
||||
name,
|
||||
ast_id: self.ast_id(type_alias),
|
||||
parameters: lower_parameters_id..last_parameter_id,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_type_parameter(&mut self, type_parameter: &TypeParam) -> TypeParameterId {
|
||||
match type_parameter {
|
||||
TypeParam::TypeVar(type_var) => {
|
||||
self.definitions
|
||||
.type_parameters
|
||||
.push(TypeParameter::TypeVar(TypeParameterTypeVar {
|
||||
name: Name::new(&type_var.name),
|
||||
ast_id: self.ast_id(type_var),
|
||||
}))
|
||||
}
|
||||
TypeParam::ParamSpec(param_spec) => {
|
||||
self.definitions
|
||||
.type_parameters
|
||||
.push(TypeParameter::ParamSpec(TypeParameterParamSpec {
|
||||
name: Name::new(¶m_spec.name),
|
||||
ast_id: self.ast_id(param_spec),
|
||||
}))
|
||||
}
|
||||
TypeParam::TypeVarTuple(type_var_tuple) => {
|
||||
self.definitions
|
||||
.type_parameters
|
||||
.push(TypeParameter::TypeVarTuple(TypeParameterTypeVarTuple {
|
||||
name: Name::new(&type_var_tuple.name),
|
||||
ast_id: self.ast_id(type_var_tuple),
|
||||
}))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_import(&mut self, _import: &StmtImport) {
|
||||
// TODO
|
||||
}
|
||||
|
||||
fn lower_import_from(&mut self, _import_from: &StmtImportFrom) {
|
||||
// TODO
|
||||
}
|
||||
|
||||
fn lower_global(&mut self, global: &StmtGlobal) -> GlobalId {
|
||||
self.definitions.globals.push(Global {
|
||||
ast_id: self.ast_id(global),
|
||||
})
|
||||
}
|
||||
|
||||
fn lower_non_local(&mut self, non_local: &StmtNonlocal) -> NonLocalId {
|
||||
self.definitions.non_locals.push(NonLocal {
|
||||
ast_id: self.ast_id(non_local),
|
||||
})
|
||||
}
|
||||
|
||||
fn lower_except_handler(&mut self, _except_handler: &ExceptHandlerExceptHandler) {
|
||||
// TODO
|
||||
}
|
||||
|
||||
fn lower_with_item(&mut self, _with_item: &WithItem) {
|
||||
// TODO
|
||||
}
|
||||
|
||||
fn lower_match_case(&mut self, _match_case: &MatchCase) {
|
||||
// TODO
|
||||
}
|
||||
}
|
||||
|
||||
impl PreorderVisitor<'_> for DefinitionsVisitor<'_> {
|
||||
fn visit_stmt(&mut self, stmt: &Stmt) {
|
||||
match stmt {
|
||||
// Definition statements
|
||||
Stmt::FunctionDef(definition) => {
|
||||
self.lower_function_def(definition);
|
||||
self.visit_body(&definition.body);
|
||||
}
|
||||
Stmt::ClassDef(definition) => {
|
||||
self.lower_class_def(definition);
|
||||
self.visit_body(&definition.body);
|
||||
}
|
||||
Stmt::Assign(assignment) => {
|
||||
self.lower_assignment(assignment);
|
||||
}
|
||||
Stmt::AnnAssign(annotated_assignment) => {
|
||||
self.lower_annotated_assignment(annotated_assignment);
|
||||
}
|
||||
Stmt::TypeAlias(type_alias) => {
|
||||
self.lower_type_alias(type_alias);
|
||||
}
|
||||
|
||||
Stmt::Import(import) => self.lower_import(import),
|
||||
Stmt::ImportFrom(import_from) => self.lower_import_from(import_from),
|
||||
Stmt::Global(global) => {
|
||||
self.lower_global(global);
|
||||
}
|
||||
Stmt::Nonlocal(non_local) => {
|
||||
self.lower_non_local(non_local);
|
||||
}
|
||||
|
||||
// Visit the compound statement bodies because they can contain other definitions.
|
||||
Stmt::For(_)
|
||||
| Stmt::While(_)
|
||||
| Stmt::If(_)
|
||||
| Stmt::With(_)
|
||||
| Stmt::Match(_)
|
||||
| Stmt::Try(_) => {
|
||||
preorder::walk_stmt(self, stmt);
|
||||
}
|
||||
|
||||
// Skip over simple statements because they can't contain any other definitions.
|
||||
Stmt::Return(_)
|
||||
| Stmt::Delete(_)
|
||||
| Stmt::AugAssign(_)
|
||||
| Stmt::Raise(_)
|
||||
| Stmt::Assert(_)
|
||||
| Stmt::Expr(_)
|
||||
| Stmt::Pass(_)
|
||||
| Stmt::Break(_)
|
||||
| Stmt::Continue(_)
|
||||
| Stmt::IpyEscapeCommand(_) => {
|
||||
// No op
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, _: &'_ Expr) {}
|
||||
|
||||
fn visit_decorator(&mut self, _decorator: &'_ Decorator) {}
|
||||
|
||||
fn visit_except_handler(&mut self, except_handler: &'_ ExceptHandler) {
|
||||
match except_handler {
|
||||
ExceptHandler::ExceptHandler(except_handler) => {
|
||||
self.lower_except_handler(except_handler);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_with_item(&mut self, with_item: &'_ WithItem) {
|
||||
self.lower_with_item(with_item);
|
||||
}
|
||||
|
||||
fn visit_match_case(&mut self, match_case: &'_ MatchCase) {
|
||||
self.lower_match_case(match_case);
|
||||
self.visit_body(&match_case.body);
|
||||
}
|
||||
}
|
||||
108
crates/red_knot/src/lib.rs
Normal file
108
crates/red_knot/src/lib.rs
Normal file
@@ -0,0 +1,108 @@
|
||||
use std::fmt::Formatter;
|
||||
use std::hash::BuildHasherDefault;
|
||||
use std::ops::Deref;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use rustc_hash::{FxHashSet, FxHasher};
|
||||
|
||||
use crate::files::FileId;
|
||||
|
||||
pub mod ast_ids;
|
||||
pub mod cache;
|
||||
pub mod cancellation;
|
||||
pub mod db;
|
||||
pub mod files;
|
||||
pub mod hir;
|
||||
pub mod lint;
|
||||
pub mod module;
|
||||
mod parse;
|
||||
pub mod program;
|
||||
mod semantic;
|
||||
pub mod source;
|
||||
pub mod watch;
|
||||
|
||||
pub(crate) type FxDashMap<K, V> = dashmap::DashMap<K, V, BuildHasherDefault<FxHasher>>;
|
||||
#[allow(unused)]
|
||||
pub(crate) type FxDashSet<V> = dashmap::DashSet<V, BuildHasherDefault<FxHasher>>;
|
||||
pub(crate) type FxIndexSet<V> = indexmap::set::IndexSet<V, BuildHasherDefault<FxHasher>>;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Workspace {
|
||||
/// TODO this should be a resolved path. We should probably use a newtype wrapper that guarantees that
|
||||
/// PATH is a UTF-8 path and is normalized.
|
||||
root: PathBuf,
|
||||
/// The files that are open in the workspace.
|
||||
///
|
||||
/// * Editor: The files that are actively being edited in the editor (the user has a tab open with the file).
|
||||
/// * CLI: The resolved files passed as arguments to the CLI.
|
||||
open_files: FxHashSet<FileId>,
|
||||
}
|
||||
|
||||
impl Workspace {
|
||||
pub fn new(root: PathBuf) -> Self {
|
||||
Self {
|
||||
root,
|
||||
open_files: FxHashSet::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn root(&self) -> &Path {
|
||||
self.root.as_path()
|
||||
}
|
||||
|
||||
// TODO having the content in workspace feels wrong.
|
||||
pub fn open_file(&mut self, file_id: FileId) {
|
||||
self.open_files.insert(file_id);
|
||||
}
|
||||
|
||||
pub fn close_file(&mut self, file_id: FileId) {
|
||||
self.open_files.remove(&file_id);
|
||||
}
|
||||
|
||||
// TODO introduce an `OpenFile` type instead of using an anonymous tuple.
|
||||
pub fn open_files(&self) -> impl Iterator<Item = FileId> + '_ {
|
||||
self.open_files.iter().copied()
|
||||
}
|
||||
|
||||
pub fn is_file_open(&self, file_id: FileId) -> bool {
|
||||
self.open_files.contains(&file_id)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
|
||||
pub struct Name(smol_str::SmolStr);
|
||||
|
||||
impl Name {
|
||||
#[inline]
|
||||
pub fn new(name: &str) -> Self {
|
||||
Self(smol_str::SmolStr::new(name))
|
||||
}
|
||||
|
||||
pub fn as_str(&self) -> &str {
|
||||
self.0.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Name {
|
||||
type Target = str;
|
||||
|
||||
#[inline]
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> From<T> for Name
|
||||
where
|
||||
T: Into<smol_str::SmolStr>,
|
||||
{
|
||||
fn from(value: T) -> Self {
|
||||
Self(value.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Name {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
332
crates/red_knot/src/lint.rs
Normal file
332
crates/red_knot/src/lint.rs
Normal file
@@ -0,0 +1,332 @@
|
||||
use std::cell::RefCell;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use ruff_python_ast::visitor::Visitor;
|
||||
use ruff_python_ast::{ModModule, StringLiteral};
|
||||
use ruff_python_parser::Parsed;
|
||||
|
||||
use crate::cache::KeyValueCache;
|
||||
use crate::db::{LintDb, LintJar, QueryResult};
|
||||
use crate::files::FileId;
|
||||
use crate::module::{resolve_module, ModuleName};
|
||||
use crate::parse::parse;
|
||||
use crate::semantic::{infer_definition_type, infer_symbol_public_type, Type};
|
||||
use crate::semantic::{
|
||||
resolve_global_symbol, semantic_index, Definition, GlobalSymbolId, SemanticIndex, SymbolId,
|
||||
};
|
||||
use crate::source::{source_text, Source};
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub(crate) fn lint_syntax(db: &dyn LintDb, file_id: FileId) -> QueryResult<Diagnostics> {
|
||||
let lint_jar: &LintJar = db.jar()?;
|
||||
let storage = &lint_jar.lint_syntax;
|
||||
|
||||
#[allow(clippy::print_stdout)]
|
||||
if std::env::var("RED_KNOT_SLOW_LINT").is_ok() {
|
||||
for i in 0..10 {
|
||||
db.cancelled()?;
|
||||
println!("RED_KNOT_SLOW_LINT is set, sleeping for {i}/10 seconds");
|
||||
std::thread::sleep(Duration::from_secs(1));
|
||||
}
|
||||
}
|
||||
|
||||
storage.get(&file_id, |file_id| {
|
||||
let mut diagnostics = Vec::new();
|
||||
|
||||
let source = source_text(db.upcast(), *file_id)?;
|
||||
lint_lines(source.text(), &mut diagnostics);
|
||||
|
||||
let parsed = parse(db.upcast(), *file_id)?;
|
||||
|
||||
if parsed.errors().is_empty() {
|
||||
let ast = parsed.syntax();
|
||||
|
||||
let mut visitor = SyntaxLintVisitor {
|
||||
diagnostics,
|
||||
source: source.text(),
|
||||
};
|
||||
visitor.visit_body(&ast.body);
|
||||
diagnostics = visitor.diagnostics;
|
||||
} else {
|
||||
diagnostics.extend(parsed.errors().iter().map(std::string::ToString::to_string));
|
||||
}
|
||||
|
||||
Ok(Diagnostics::from(diagnostics))
|
||||
})
|
||||
}
|
||||
|
||||
fn lint_lines(source: &str, diagnostics: &mut Vec<String>) {
|
||||
for (line_number, line) in source.lines().enumerate() {
|
||||
if line.len() < 88 {
|
||||
continue;
|
||||
}
|
||||
|
||||
let char_count = line.chars().count();
|
||||
if char_count > 88 {
|
||||
diagnostics.push(format!(
|
||||
"Line {} is too long ({} characters)",
|
||||
line_number + 1,
|
||||
char_count
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub(crate) fn lint_semantic(db: &dyn LintDb, file_id: FileId) -> QueryResult<Diagnostics> {
|
||||
let lint_jar: &LintJar = db.jar()?;
|
||||
let storage = &lint_jar.lint_semantic;
|
||||
|
||||
storage.get(&file_id, |file_id| {
|
||||
let source = source_text(db.upcast(), *file_id)?;
|
||||
let parsed = parse(db.upcast(), *file_id)?;
|
||||
let semantic_index = semantic_index(db.upcast(), *file_id)?;
|
||||
|
||||
let context = SemanticLintContext {
|
||||
file_id: *file_id,
|
||||
source,
|
||||
parsed: &parsed,
|
||||
semantic_index,
|
||||
db,
|
||||
diagnostics: RefCell::new(Vec::new()),
|
||||
};
|
||||
|
||||
lint_unresolved_imports(&context)?;
|
||||
lint_bad_overrides(&context)?;
|
||||
|
||||
Ok(Diagnostics::from(context.diagnostics.take()))
|
||||
})
|
||||
}
|
||||
|
||||
fn lint_unresolved_imports(context: &SemanticLintContext) -> QueryResult<()> {
|
||||
// TODO: Consider iterating over the dependencies (imports) only instead of all definitions.
|
||||
for (symbol, definition) in context.semantic_index().symbol_table().all_definitions() {
|
||||
match definition {
|
||||
Definition::Import(import) => {
|
||||
let ty = context.infer_symbol_public_type(symbol)?;
|
||||
|
||||
if ty.is_unknown() {
|
||||
context.push_diagnostic(format!("Unresolved module {}", import.module));
|
||||
}
|
||||
}
|
||||
Definition::ImportFrom(import) => {
|
||||
let ty = context.infer_symbol_public_type(symbol)?;
|
||||
|
||||
if ty.is_unknown() {
|
||||
let module_name = import.module().map(Deref::deref).unwrap_or_default();
|
||||
let message = if import.level() > 0 {
|
||||
format!(
|
||||
"Unresolved relative import '{}' from {}{}",
|
||||
import.name(),
|
||||
".".repeat(import.level() as usize),
|
||||
module_name
|
||||
)
|
||||
} else {
|
||||
format!(
|
||||
"Unresolved import '{}' from '{}'",
|
||||
import.name(),
|
||||
module_name
|
||||
)
|
||||
};
|
||||
|
||||
context.push_diagnostic(message);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn lint_bad_overrides(context: &SemanticLintContext) -> QueryResult<()> {
|
||||
// TODO we should have a special marker on the real typing module (from typeshed) so if you
|
||||
// have your own "typing" module in your project, we don't consider it THE typing module (and
|
||||
// same for other stdlib modules that our lint rules care about)
|
||||
let Some(typing_override) = context.resolve_global_symbol("typing", "override")? else {
|
||||
// TODO once we bundle typeshed, this should be unreachable!()
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
// TODO we should maybe index definitions by type instead of iterating all, or else iterate all
|
||||
// just once, match, and branch to all lint rules that care about a type of definition
|
||||
for (symbol, definition) in context.semantic_index().symbol_table().all_definitions() {
|
||||
if !matches!(definition, Definition::FunctionDef(_)) {
|
||||
continue;
|
||||
}
|
||||
let ty = infer_definition_type(
|
||||
context.db.upcast(),
|
||||
GlobalSymbolId {
|
||||
file_id: context.file_id,
|
||||
symbol_id: symbol,
|
||||
},
|
||||
definition.clone(),
|
||||
)?;
|
||||
let Type::Function(func) = ty else {
|
||||
unreachable!("type of a FunctionDef should always be a Function");
|
||||
};
|
||||
let Some(class) = func.get_containing_class(context.db.upcast())? else {
|
||||
// not a method of a class
|
||||
continue;
|
||||
};
|
||||
if func.has_decorator(context.db.upcast(), typing_override)? {
|
||||
let method_name = func.name(context.db.upcast())?;
|
||||
if class
|
||||
.get_super_class_member(context.db.upcast(), &method_name)?
|
||||
.is_none()
|
||||
{
|
||||
// TODO should have a qualname() method to support nested classes
|
||||
context.push_diagnostic(
|
||||
format!(
|
||||
"Method {}.{} is decorated with `typing.override` but does not override any base class method",
|
||||
class.name(context.db.upcast())?,
|
||||
method_name,
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub struct SemanticLintContext<'a> {
|
||||
file_id: FileId,
|
||||
source: Source,
|
||||
parsed: &'a Parsed<ModModule>,
|
||||
semantic_index: Arc<SemanticIndex>,
|
||||
db: &'a dyn LintDb,
|
||||
diagnostics: RefCell<Vec<String>>,
|
||||
}
|
||||
|
||||
impl<'a> SemanticLintContext<'a> {
|
||||
pub fn source_text(&self) -> &str {
|
||||
self.source.text()
|
||||
}
|
||||
|
||||
pub fn file_id(&self) -> FileId {
|
||||
self.file_id
|
||||
}
|
||||
|
||||
pub fn ast(&self) -> &'a ModModule {
|
||||
self.parsed.syntax()
|
||||
}
|
||||
|
||||
pub fn semantic_index(&self) -> &SemanticIndex {
|
||||
&self.semantic_index
|
||||
}
|
||||
|
||||
pub fn infer_symbol_public_type(&self, symbol_id: SymbolId) -> QueryResult<Type> {
|
||||
infer_symbol_public_type(
|
||||
self.db.upcast(),
|
||||
GlobalSymbolId {
|
||||
file_id: self.file_id,
|
||||
symbol_id,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
pub fn push_diagnostic(&self, diagnostic: String) {
|
||||
self.diagnostics.borrow_mut().push(diagnostic);
|
||||
}
|
||||
|
||||
pub fn extend_diagnostics(&mut self, diagnostics: impl IntoIterator<Item = String>) {
|
||||
self.diagnostics.get_mut().extend(diagnostics);
|
||||
}
|
||||
|
||||
pub fn resolve_global_symbol(
|
||||
&self,
|
||||
module: &str,
|
||||
symbol_name: &str,
|
||||
) -> QueryResult<Option<GlobalSymbolId>> {
|
||||
let Some(module) = resolve_module(self.db.upcast(), ModuleName::new(module))? else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
resolve_global_symbol(self.db.upcast(), module, symbol_name)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct SyntaxLintVisitor<'a> {
|
||||
diagnostics: Vec<String>,
|
||||
source: &'a str,
|
||||
}
|
||||
|
||||
impl Visitor<'_> for SyntaxLintVisitor<'_> {
|
||||
fn visit_string_literal(&mut self, string_literal: &'_ StringLiteral) {
|
||||
// A very naive implementation of use double quotes
|
||||
let text = &self.source[string_literal.range];
|
||||
|
||||
if text.starts_with('\'') {
|
||||
self.diagnostics
|
||||
.push("Use double quotes for strings".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Diagnostics {
|
||||
Empty,
|
||||
List(Arc<Vec<String>>),
|
||||
}
|
||||
|
||||
impl Diagnostics {
|
||||
pub fn as_slice(&self) -> &[String] {
|
||||
match self {
|
||||
Diagnostics::Empty => &[],
|
||||
Diagnostics::List(list) => list.as_slice(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Diagnostics {
|
||||
type Target = [String];
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.as_slice()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Vec<String>> for Diagnostics {
|
||||
fn from(value: Vec<String>) -> Self {
|
||||
if value.is_empty() {
|
||||
Diagnostics::Empty
|
||||
} else {
|
||||
Diagnostics::List(Arc::new(value))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub struct LintSyntaxStorage(KeyValueCache<FileId, Diagnostics>);
|
||||
|
||||
impl Deref for LintSyntaxStorage {
|
||||
type Target = KeyValueCache<FileId, Diagnostics>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for LintSyntaxStorage {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub struct LintSemanticStorage(KeyValueCache<FileId, Diagnostics>);
|
||||
|
||||
impl Deref for LintSemanticStorage {
|
||||
type Target = KeyValueCache<FileId, Diagnostics>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for LintSemanticStorage {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
@@ -1,254 +0,0 @@
|
||||
//! Sets up logging for Red Knot
|
||||
|
||||
use anyhow::Context;
|
||||
use colored::Colorize;
|
||||
use std::fmt;
|
||||
use std::fs::File;
|
||||
use std::io::BufWriter;
|
||||
use tracing::{Event, Subscriber};
|
||||
use tracing_subscriber::filter::LevelFilter;
|
||||
use tracing_subscriber::fmt::format::Writer;
|
||||
use tracing_subscriber::fmt::{FmtContext, FormatEvent, FormatFields};
|
||||
use tracing_subscriber::registry::LookupSpan;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
|
||||
/// Logging flags to `#[command(flatten)]` into your CLI
|
||||
#[derive(clap::Args, Debug, Clone, Default)]
|
||||
#[command(about = None, long_about = None)]
|
||||
pub(crate) struct Verbosity {
|
||||
#[arg(
|
||||
long,
|
||||
short = 'v',
|
||||
help = "Use verbose output (or `-vv` and `-vvv` for more verbose output)",
|
||||
action = clap::ArgAction::Count,
|
||||
global = true,
|
||||
)]
|
||||
verbose: u8,
|
||||
}
|
||||
|
||||
impl Verbosity {
|
||||
/// Returns the verbosity level based on the number of `-v` flags.
|
||||
///
|
||||
/// Returns `None` if the user did not specify any verbosity flags.
|
||||
pub(crate) fn level(&self) -> VerbosityLevel {
|
||||
match self.verbose {
|
||||
0 => VerbosityLevel::Default,
|
||||
1 => VerbosityLevel::Verbose,
|
||||
2 => VerbosityLevel::ExtraVerbose,
|
||||
_ => VerbosityLevel::Trace,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
|
||||
pub(crate) enum VerbosityLevel {
|
||||
/// Default output level. Only shows Ruff and Red Knot events up to the [`WARN`](tracing::Level::WARN).
|
||||
Default,
|
||||
|
||||
/// Enables verbose output. Emits Ruff and Red Knot events up to the [`INFO`](tracing::Level::INFO).
|
||||
/// Corresponds to `-v`.
|
||||
Verbose,
|
||||
|
||||
/// Enables a more verbose tracing format and emits Ruff and Red Knot events up to [`DEBUG`](tracing::Level::DEBUG).
|
||||
/// Corresponds to `-vv`
|
||||
ExtraVerbose,
|
||||
|
||||
/// Enables all tracing events and uses a tree-like output format. Corresponds to `-vvv`.
|
||||
Trace,
|
||||
}
|
||||
|
||||
impl VerbosityLevel {
|
||||
const fn level_filter(self) -> LevelFilter {
|
||||
match self {
|
||||
VerbosityLevel::Default => LevelFilter::WARN,
|
||||
VerbosityLevel::Verbose => LevelFilter::INFO,
|
||||
VerbosityLevel::ExtraVerbose => LevelFilter::DEBUG,
|
||||
VerbosityLevel::Trace => LevelFilter::TRACE,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) const fn is_trace(self) -> bool {
|
||||
matches!(self, VerbosityLevel::Trace)
|
||||
}
|
||||
|
||||
pub(crate) const fn is_extra_verbose(self) -> bool {
|
||||
matches!(self, VerbosityLevel::ExtraVerbose)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn setup_tracing(level: VerbosityLevel) -> anyhow::Result<TracingGuard> {
|
||||
use tracing_subscriber::prelude::*;
|
||||
|
||||
// The `RED_KNOT_LOG` environment variable overrides the default log level.
|
||||
let filter = if let Ok(log_env_variable) = std::env::var("RED_KNOT_LOG") {
|
||||
EnvFilter::builder()
|
||||
.parse(log_env_variable)
|
||||
.context("Failed to parse directives specified in RED_KNOT_LOG environment variable.")?
|
||||
} else {
|
||||
match level {
|
||||
VerbosityLevel::Default => {
|
||||
// Show warning traces
|
||||
EnvFilter::default().add_directive(LevelFilter::WARN.into())
|
||||
}
|
||||
level => {
|
||||
let level_filter = level.level_filter();
|
||||
|
||||
// Show info|debug|trace events, but allow `RED_KNOT_LOG` to override
|
||||
let filter = EnvFilter::default().add_directive(
|
||||
format!("red_knot={level_filter}")
|
||||
.parse()
|
||||
.expect("Hardcoded directive to be valid"),
|
||||
);
|
||||
|
||||
filter.add_directive(
|
||||
format!("ruff={level_filter}")
|
||||
.parse()
|
||||
.expect("Hardcoded directive to be valid"),
|
||||
)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let (profiling_layer, guard) = setup_profile();
|
||||
|
||||
let registry = tracing_subscriber::registry()
|
||||
.with(filter)
|
||||
.with(profiling_layer);
|
||||
|
||||
if level.is_trace() {
|
||||
let subscriber = registry.with(
|
||||
tracing_tree::HierarchicalLayer::default()
|
||||
.with_indent_lines(true)
|
||||
.with_indent_amount(2)
|
||||
.with_bracketed_fields(true)
|
||||
.with_thread_ids(true)
|
||||
.with_targets(true)
|
||||
.with_writer(std::io::stderr)
|
||||
.with_timer(tracing_tree::time::Uptime::default()),
|
||||
);
|
||||
|
||||
subscriber.init();
|
||||
} else {
|
||||
let subscriber = registry.with(
|
||||
tracing_subscriber::fmt::layer()
|
||||
.event_format(RedKnotFormat {
|
||||
display_level: true,
|
||||
display_timestamp: level.is_extra_verbose(),
|
||||
show_spans: false,
|
||||
})
|
||||
.with_writer(std::io::stderr),
|
||||
);
|
||||
|
||||
subscriber.init();
|
||||
}
|
||||
|
||||
Ok(TracingGuard {
|
||||
_flame_guard: guard,
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn setup_profile<S>() -> (
|
||||
Option<tracing_flame::FlameLayer<S, BufWriter<File>>>,
|
||||
Option<tracing_flame::FlushGuard<BufWriter<File>>>,
|
||||
)
|
||||
where
|
||||
S: Subscriber + for<'span> LookupSpan<'span>,
|
||||
{
|
||||
if let Ok("1" | "true") = std::env::var("RED_KNOT_LOG_PROFILE").as_deref() {
|
||||
let (layer, guard) = tracing_flame::FlameLayer::with_file("tracing.folded")
|
||||
.expect("Flame layer to be created");
|
||||
(Some(layer), Some(guard))
|
||||
} else {
|
||||
(None, None)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct TracingGuard {
|
||||
_flame_guard: Option<tracing_flame::FlushGuard<BufWriter<File>>>,
|
||||
}
|
||||
|
||||
struct RedKnotFormat {
|
||||
display_timestamp: bool,
|
||||
display_level: bool,
|
||||
show_spans: bool,
|
||||
}
|
||||
|
||||
/// See <https://docs.rs/tracing-subscriber/0.3.18/src/tracing_subscriber/fmt/format/mod.rs.html#1026-1156>
|
||||
impl<S, N> FormatEvent<S, N> for RedKnotFormat
|
||||
where
|
||||
S: Subscriber + for<'a> LookupSpan<'a>,
|
||||
N: for<'a> FormatFields<'a> + 'static,
|
||||
{
|
||||
fn format_event(
|
||||
&self,
|
||||
ctx: &FmtContext<'_, S, N>,
|
||||
mut writer: Writer<'_>,
|
||||
event: &Event<'_>,
|
||||
) -> fmt::Result {
|
||||
let meta = event.metadata();
|
||||
let ansi = writer.has_ansi_escapes();
|
||||
|
||||
if self.display_timestamp {
|
||||
let timestamp = jiff::Zoned::now()
|
||||
.strftime("%Y-%m-%d %H:%M:%S.%f")
|
||||
.to_string();
|
||||
if ansi {
|
||||
write!(writer, "{} ", timestamp.dimmed())?;
|
||||
} else {
|
||||
write!(
|
||||
writer,
|
||||
"{} ",
|
||||
jiff::Zoned::now().strftime("%Y-%m-%d %H:%M:%S.%f")
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
if self.display_level {
|
||||
let level = meta.level();
|
||||
// Same colors as tracing
|
||||
if ansi {
|
||||
let formatted_level = level.to_string();
|
||||
match *level {
|
||||
tracing::Level::TRACE => {
|
||||
write!(writer, "{} ", formatted_level.purple().bold())?;
|
||||
}
|
||||
tracing::Level::DEBUG => write!(writer, "{} ", formatted_level.blue().bold())?,
|
||||
tracing::Level::INFO => write!(writer, "{} ", formatted_level.green().bold())?,
|
||||
tracing::Level::WARN => write!(writer, "{} ", formatted_level.yellow().bold())?,
|
||||
tracing::Level::ERROR => write!(writer, "{} ", level.to_string().red().bold())?,
|
||||
}
|
||||
} else {
|
||||
write!(writer, "{level} ")?;
|
||||
}
|
||||
}
|
||||
|
||||
if self.show_spans {
|
||||
let span = event.parent();
|
||||
let mut seen = false;
|
||||
|
||||
let span = span
|
||||
.and_then(|id| ctx.span(id))
|
||||
.or_else(|| ctx.lookup_current());
|
||||
|
||||
let scope = span.into_iter().flat_map(|span| span.scope().from_root());
|
||||
|
||||
for span in scope {
|
||||
seen = true;
|
||||
if ansi {
|
||||
write!(writer, "{}:", span.metadata().name().bold())?;
|
||||
} else {
|
||||
write!(writer, "{}:", span.metadata().name())?;
|
||||
}
|
||||
}
|
||||
|
||||
if seen {
|
||||
writer.write_char(' ')?;
|
||||
}
|
||||
}
|
||||
|
||||
ctx.field_format().format_fields(writer.by_ref(), event)?;
|
||||
|
||||
writeln!(writer)
|
||||
}
|
||||
}
|
||||
@@ -1,138 +1,65 @@
|
||||
use std::io::{self, stdout, BufWriter, Write};
|
||||
use std::process::{ExitCode, Termination};
|
||||
#![allow(clippy::dbg_macro)]
|
||||
|
||||
use anyhow::Result;
|
||||
use std::path::Path;
|
||||
use std::sync::Mutex;
|
||||
|
||||
use crate::args::{Args, CheckCommand, Command, TerminalColor};
|
||||
use crate::logging::setup_tracing;
|
||||
use anyhow::{anyhow, Context};
|
||||
use clap::Parser;
|
||||
use colored::Colorize;
|
||||
use crossbeam::channel as crossbeam_channel;
|
||||
use red_knot_project::metadata::options::Options;
|
||||
use red_knot_project::watch::ProjectWatcher;
|
||||
use red_knot_project::{watch, Db};
|
||||
use red_knot_project::{ProjectDatabase, ProjectMetadata};
|
||||
use red_knot_server::run_server;
|
||||
use ruff_db::diagnostic::{Diagnostic, DisplayDiagnosticConfig, Severity};
|
||||
use ruff_db::system::{OsSystem, SystemPath, SystemPathBuf};
|
||||
use salsa::plumbing::ZalsaDatabase;
|
||||
use tracing::subscriber::Interest;
|
||||
use tracing::{Level, Metadata};
|
||||
use tracing_subscriber::filter::LevelFilter;
|
||||
use tracing_subscriber::layer::{Context, Filter, SubscriberExt};
|
||||
use tracing_subscriber::{Layer, Registry};
|
||||
use tracing_tree::time::Uptime;
|
||||
|
||||
mod args;
|
||||
mod logging;
|
||||
mod python_version;
|
||||
mod version;
|
||||
use red_knot::db::{HasJar, ParallelDatabase, QueryError, SourceDb, SourceJar};
|
||||
use red_knot::module::{set_module_search_paths, ModuleResolutionInputs};
|
||||
use red_knot::program::check::ExecutionMode;
|
||||
use red_knot::program::{FileWatcherChange, Program};
|
||||
use red_knot::watch::FileWatcher;
|
||||
use red_knot::Workspace;
|
||||
|
||||
#[allow(clippy::print_stdout, clippy::unnecessary_wraps, clippy::print_stderr)]
|
||||
pub fn main() -> ExitStatus {
|
||||
run().unwrap_or_else(|error| {
|
||||
use std::io::Write;
|
||||
fn main() -> anyhow::Result<()> {
|
||||
setup_tracing();
|
||||
|
||||
// Use `writeln` instead of `eprintln` to avoid panicking when the stderr pipe is broken.
|
||||
let mut stderr = std::io::stderr().lock();
|
||||
let arguments: Vec<_> = std::env::args().collect();
|
||||
|
||||
// This communicates that this isn't a linter error but Red Knot itself hard-errored for
|
||||
// some reason (e.g. failed to resolve the configuration)
|
||||
writeln!(stderr, "{}", "Red Knot failed".red().bold()).ok();
|
||||
// Currently we generally only see one error, but e.g. with io errors when resolving
|
||||
// the configuration it is help to chain errors ("resolving configuration failed" ->
|
||||
// "failed to read file: subdir/pyproject.toml")
|
||||
for cause in error.chain() {
|
||||
// Exit "gracefully" on broken pipe errors.
|
||||
//
|
||||
// See: https://github.com/BurntSushi/ripgrep/blob/bf63fe8f258afc09bae6caa48f0ae35eaf115005/crates/core/main.rs#L47C1-L61C14
|
||||
if let Some(ioerr) = cause.downcast_ref::<io::Error>() {
|
||||
if ioerr.kind() == io::ErrorKind::BrokenPipe {
|
||||
return ExitStatus::Success;
|
||||
}
|
||||
}
|
||||
|
||||
writeln!(stderr, " {} {cause}", "Cause:".bold()).ok();
|
||||
}
|
||||
|
||||
ExitStatus::Error
|
||||
})
|
||||
}
|
||||
|
||||
fn run() -> anyhow::Result<ExitStatus> {
|
||||
let args = wild::args_os();
|
||||
let args = argfile::expand_args_from(args, argfile::parse_fromfile, argfile::PREFIX)
|
||||
.context("Failed to read CLI arguments from file")?;
|
||||
let args = Args::parse_from(args);
|
||||
|
||||
match args.command {
|
||||
Command::Server => run_server().map(|()| ExitStatus::Success),
|
||||
Command::Check(check_args) => run_check(check_args),
|
||||
Command::Version => version().map(|()| ExitStatus::Success),
|
||||
if arguments.len() < 2 {
|
||||
eprintln!("Usage: red_knot <path>");
|
||||
return Err(anyhow::anyhow!("Invalid arguments"));
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn version() -> Result<()> {
|
||||
let mut stdout = BufWriter::new(io::stdout().lock());
|
||||
let version_info = crate::version::version();
|
||||
writeln!(stdout, "red knot {}", &version_info)?;
|
||||
Ok(())
|
||||
}
|
||||
let entry_point = Path::new(&arguments[1]);
|
||||
|
||||
fn run_check(args: CheckCommand) -> anyhow::Result<ExitStatus> {
|
||||
set_colored_override(args.color);
|
||||
if !entry_point.exists() {
|
||||
eprintln!("The entry point does not exist.");
|
||||
return Err(anyhow::anyhow!("Invalid arguments"));
|
||||
}
|
||||
|
||||
let verbosity = args.verbosity.level();
|
||||
countme::enable(verbosity.is_trace());
|
||||
let _guard = setup_tracing(verbosity)?;
|
||||
if !entry_point.is_file() {
|
||||
eprintln!("The entry point is not a file.");
|
||||
return Err(anyhow::anyhow!("Invalid arguments"));
|
||||
}
|
||||
|
||||
tracing::debug!("Version: {}", version::version());
|
||||
let workspace_folder = entry_point.parent().unwrap();
|
||||
let workspace = Workspace::new(workspace_folder.to_path_buf());
|
||||
|
||||
// The base path to which all CLI arguments are relative to.
|
||||
let cwd = {
|
||||
let cwd = std::env::current_dir().context("Failed to get the current working directory")?;
|
||||
SystemPathBuf::from_path_buf(cwd)
|
||||
.map_err(|path| {
|
||||
anyhow!(
|
||||
"The current working directory `{}` contains non-Unicode characters. Red Knot only supports Unicode paths.",
|
||||
path.display()
|
||||
)
|
||||
})?
|
||||
let workspace_search_path = workspace.root().to_path_buf();
|
||||
|
||||
let search_paths = ModuleResolutionInputs {
|
||||
extra_paths: vec![],
|
||||
workspace_root: workspace_search_path,
|
||||
site_packages: None,
|
||||
custom_typeshed: None,
|
||||
};
|
||||
|
||||
let project_path = args
|
||||
.project
|
||||
.as_ref()
|
||||
.map(|project| {
|
||||
if project.as_std_path().is_dir() {
|
||||
Ok(SystemPath::absolute(project, &cwd))
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
"Provided project path `{project}` is not a directory"
|
||||
))
|
||||
}
|
||||
})
|
||||
.transpose()?
|
||||
.unwrap_or_else(|| cwd.clone());
|
||||
let mut program = Program::new(workspace);
|
||||
set_module_search_paths(&mut program, search_paths);
|
||||
|
||||
let check_paths: Vec<_> = args
|
||||
.paths
|
||||
.iter()
|
||||
.map(|path| SystemPath::absolute(path, &cwd))
|
||||
.collect();
|
||||
let entry_id = program.file_id(entry_point);
|
||||
program.workspace_mut().open_file(entry_id);
|
||||
|
||||
let system = OsSystem::new(cwd);
|
||||
let watch = args.watch;
|
||||
let exit_zero = args.exit_zero;
|
||||
|
||||
let cli_options = args.into_options();
|
||||
let mut project_metadata = ProjectMetadata::discover(&project_path, &system)?;
|
||||
project_metadata.apply_cli_options(cli_options.clone());
|
||||
project_metadata.apply_configuration_files(&system)?;
|
||||
|
||||
let mut db = ProjectDatabase::new(project_metadata, system)?;
|
||||
|
||||
if !check_paths.is_empty() {
|
||||
db.project().set_included_paths(&mut db, check_paths);
|
||||
}
|
||||
|
||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new(cli_options);
|
||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new();
|
||||
|
||||
// Listen to Ctrl+C and abort the watch mode.
|
||||
let main_loop_cancellation_token = Mutex::new(Some(main_loop_cancellation_token));
|
||||
@@ -144,201 +71,122 @@ fn run_check(args: CheckCommand) -> anyhow::Result<ExitStatus> {
|
||||
}
|
||||
})?;
|
||||
|
||||
let exit_status = if watch {
|
||||
main_loop.watch(&mut db)?
|
||||
} else {
|
||||
main_loop.run(&mut db)?
|
||||
};
|
||||
let file_changes_notifier = main_loop.file_changes_notifier();
|
||||
|
||||
tracing::trace!("Counts for entire CLI run:\n{}", countme::get_all());
|
||||
// Watch for file changes and re-trigger the analysis.
|
||||
let mut file_watcher = FileWatcher::new(move |changes| {
|
||||
file_changes_notifier.notify(changes);
|
||||
})?;
|
||||
|
||||
std::mem::forget(db);
|
||||
file_watcher.watch_folder(workspace_folder)?;
|
||||
|
||||
if exit_zero {
|
||||
Ok(ExitStatus::Success)
|
||||
} else {
|
||||
Ok(exit_status)
|
||||
}
|
||||
}
|
||||
main_loop.run(&mut program);
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub enum ExitStatus {
|
||||
/// Checking was successful and there were no errors.
|
||||
Success = 0,
|
||||
let source_jar: &SourceJar = program.jar().unwrap();
|
||||
|
||||
/// Checking was successful but there were errors.
|
||||
Failure = 1,
|
||||
dbg!(source_jar.parsed.statistics());
|
||||
dbg!(source_jar.sources.statistics());
|
||||
|
||||
/// Checking failed.
|
||||
Error = 2,
|
||||
}
|
||||
|
||||
impl Termination for ExitStatus {
|
||||
fn report(self) -> ExitCode {
|
||||
ExitCode::from(self as u8)
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct MainLoop {
|
||||
/// Sender that can be used to send messages to the main loop.
|
||||
sender: crossbeam_channel::Sender<MainLoopMessage>,
|
||||
|
||||
/// Receiver for the messages sent **to** the main loop.
|
||||
receiver: crossbeam_channel::Receiver<MainLoopMessage>,
|
||||
|
||||
/// The file system watcher, if running in watch mode.
|
||||
watcher: Option<ProjectWatcher>,
|
||||
|
||||
cli_options: Options,
|
||||
orchestrator_sender: crossbeam_channel::Sender<OrchestratorMessage>,
|
||||
main_loop_receiver: crossbeam_channel::Receiver<MainLoopMessage>,
|
||||
}
|
||||
|
||||
impl MainLoop {
|
||||
fn new(cli_options: Options) -> (Self, MainLoopCancellationToken) {
|
||||
let (sender, receiver) = crossbeam_channel::bounded(10);
|
||||
fn new() -> (Self, MainLoopCancellationToken) {
|
||||
let (orchestrator_sender, orchestrator_receiver) = crossbeam_channel::bounded(1);
|
||||
let (main_loop_sender, main_loop_receiver) = crossbeam_channel::bounded(1);
|
||||
|
||||
let mut orchestrator = Orchestrator {
|
||||
receiver: orchestrator_receiver,
|
||||
sender: main_loop_sender.clone(),
|
||||
revision: 0,
|
||||
};
|
||||
|
||||
std::thread::spawn(move || {
|
||||
orchestrator.run();
|
||||
});
|
||||
|
||||
(
|
||||
Self {
|
||||
sender: sender.clone(),
|
||||
receiver,
|
||||
watcher: None,
|
||||
cli_options,
|
||||
orchestrator_sender,
|
||||
main_loop_receiver,
|
||||
},
|
||||
MainLoopCancellationToken {
|
||||
sender: main_loop_sender,
|
||||
},
|
||||
MainLoopCancellationToken { sender },
|
||||
)
|
||||
}
|
||||
|
||||
fn watch(mut self, db: &mut ProjectDatabase) -> Result<ExitStatus> {
|
||||
tracing::debug!("Starting watch mode");
|
||||
let sender = self.sender.clone();
|
||||
let watcher = watch::directory_watcher(move |event| {
|
||||
sender.send(MainLoopMessage::ApplyChanges(event)).unwrap();
|
||||
})?;
|
||||
|
||||
self.watcher = Some(ProjectWatcher::new(watcher, db));
|
||||
|
||||
self.run(db)?;
|
||||
|
||||
Ok(ExitStatus::Success)
|
||||
fn file_changes_notifier(&self) -> FileChangesNotifier {
|
||||
FileChangesNotifier {
|
||||
sender: self.orchestrator_sender.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn run(mut self, db: &mut ProjectDatabase) -> Result<ExitStatus> {
|
||||
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
|
||||
fn run(self, program: &mut Program) {
|
||||
self.orchestrator_sender
|
||||
.send(OrchestratorMessage::Run)
|
||||
.unwrap();
|
||||
|
||||
let result = self.main_loop(db);
|
||||
for message in &self.main_loop_receiver {
|
||||
tracing::trace!("Main Loop: Tick");
|
||||
|
||||
tracing::debug!("Exiting main loop");
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn main_loop(&mut self, db: &mut ProjectDatabase) -> Result<ExitStatus> {
|
||||
// Schedule the first check.
|
||||
tracing::debug!("Starting main loop");
|
||||
|
||||
let mut revision = 0u64;
|
||||
|
||||
while let Ok(message) = self.receiver.recv() {
|
||||
match message {
|
||||
MainLoopMessage::CheckWorkspace => {
|
||||
let db = db.clone();
|
||||
let sender = self.sender.clone();
|
||||
MainLoopMessage::CheckProgram { revision } => {
|
||||
let program = program.snapshot();
|
||||
let sender = self.orchestrator_sender.clone();
|
||||
|
||||
// Spawn a new task that checks the project. This needs to be done in a separate thread
|
||||
// Spawn a new task that checks the program. This needs to be done in a separate thread
|
||||
// to prevent blocking the main loop here.
|
||||
rayon::spawn(move || {
|
||||
if let Ok(result) = db.check() {
|
||||
// Send the result back to the main loop for printing.
|
||||
rayon::spawn(move || match program.check(ExecutionMode::ThreadPool) {
|
||||
Ok(result) => {
|
||||
sender
|
||||
.send(MainLoopMessage::CheckCompleted { result, revision })
|
||||
.send(OrchestratorMessage::CheckProgramCompleted {
|
||||
diagnostics: result,
|
||||
revision,
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
Err(QueryError::Cancelled) => {}
|
||||
});
|
||||
}
|
||||
|
||||
MainLoopMessage::CheckCompleted {
|
||||
result,
|
||||
revision: check_revision,
|
||||
} => {
|
||||
let terminal_settings = db.project().settings(db).terminal();
|
||||
let display_config = DisplayDiagnosticConfig::default()
|
||||
.format(terminal_settings.output_format)
|
||||
.color(colored::control::SHOULD_COLORIZE.should_colorize());
|
||||
|
||||
let min_error_severity = if terminal_settings.error_on_warning {
|
||||
Severity::Warning
|
||||
} else {
|
||||
Severity::Error
|
||||
};
|
||||
|
||||
if check_revision == revision {
|
||||
if db.project().files(db).is_empty() {
|
||||
tracing::warn!("No python files found under the given path(s)");
|
||||
}
|
||||
|
||||
let mut stdout = stdout().lock();
|
||||
|
||||
if result.is_empty() {
|
||||
writeln!(stdout, "All checks passed!")?;
|
||||
|
||||
if self.watcher.is_none() {
|
||||
return Ok(ExitStatus::Success);
|
||||
}
|
||||
} else {
|
||||
let mut failed = false;
|
||||
let diagnostics_count = result.len();
|
||||
|
||||
for diagnostic in result {
|
||||
write!(stdout, "{}", diagnostic.display(db, &display_config))?;
|
||||
|
||||
failed |= diagnostic.severity() >= min_error_severity;
|
||||
}
|
||||
|
||||
writeln!(
|
||||
stdout,
|
||||
"Found {} diagnostic{}",
|
||||
diagnostics_count,
|
||||
if diagnostics_count > 1 { "s" } else { "" }
|
||||
)?;
|
||||
|
||||
if self.watcher.is_none() {
|
||||
return Ok(if failed {
|
||||
ExitStatus::Failure
|
||||
} else {
|
||||
ExitStatus::Success
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
tracing::debug!(
|
||||
"Discarding check result for outdated revision: current: {revision}, result revision: {check_revision}"
|
||||
);
|
||||
}
|
||||
|
||||
tracing::trace!("Counts after last check:\n{}", countme::get_all());
|
||||
}
|
||||
|
||||
MainLoopMessage::ApplyChanges(changes) => {
|
||||
revision += 1;
|
||||
// Automatically cancels any pending queries and waits for them to complete.
|
||||
db.apply_changes(changes, Some(&self.cli_options));
|
||||
if let Some(watcher) = self.watcher.as_mut() {
|
||||
watcher.update(db);
|
||||
}
|
||||
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
|
||||
program.apply_changes(changes);
|
||||
}
|
||||
MainLoopMessage::CheckCompleted(diagnostics) => {
|
||||
dbg!(diagnostics);
|
||||
}
|
||||
MainLoopMessage::Exit => {
|
||||
// Cancel any pending queries and wait for them to complete.
|
||||
// TODO: Don't use Salsa internal APIs
|
||||
// [Zulip-Thread](https://salsa.zulipchat.com/#narrow/stream/333573-salsa-3.2E0/topic/Expose.20an.20API.20to.20cancel.20other.20queries)
|
||||
let _ = db.zalsa_mut();
|
||||
return Ok(ExitStatus::Success);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
tracing::debug!("Waiting for next main loop message.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(ExitStatus::Success)
|
||||
impl Drop for MainLoop {
|
||||
fn drop(&mut self) {
|
||||
self.orchestrator_sender
|
||||
.send(OrchestratorMessage::Shutdown)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct FileChangesNotifier {
|
||||
sender: crossbeam_channel::Sender<OrchestratorMessage>,
|
||||
}
|
||||
|
||||
impl FileChangesNotifier {
|
||||
fn notify(&self, changes: Vec<FileWatcherChange>) {
|
||||
self.sender
|
||||
.send(OrchestratorMessage::FileChanges(changes))
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -353,33 +201,164 @@ impl MainLoopCancellationToken {
|
||||
}
|
||||
}
|
||||
|
||||
struct Orchestrator {
|
||||
/// Sends messages to the main loop.
|
||||
sender: crossbeam_channel::Sender<MainLoopMessage>,
|
||||
/// Receives messages from the main loop.
|
||||
receiver: crossbeam_channel::Receiver<OrchestratorMessage>,
|
||||
revision: usize,
|
||||
}
|
||||
|
||||
impl Orchestrator {
|
||||
fn run(&mut self) {
|
||||
while let Ok(message) = self.receiver.recv() {
|
||||
match message {
|
||||
OrchestratorMessage::Run => {
|
||||
self.sender
|
||||
.send(MainLoopMessage::CheckProgram {
|
||||
revision: self.revision,
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
OrchestratorMessage::CheckProgramCompleted {
|
||||
diagnostics,
|
||||
revision,
|
||||
} => {
|
||||
// Only take the diagnostics if they are for the latest revision.
|
||||
if self.revision == revision {
|
||||
self.sender
|
||||
.send(MainLoopMessage::CheckCompleted(diagnostics))
|
||||
.unwrap();
|
||||
} else {
|
||||
tracing::debug!("Discarding diagnostics for outdated revision {revision} (current: {}).", self.revision);
|
||||
}
|
||||
}
|
||||
|
||||
OrchestratorMessage::FileChanges(changes) => {
|
||||
// Request cancellation, but wait until all analysis tasks have completed to
|
||||
// avoid stale messages in the next main loop.
|
||||
|
||||
self.revision += 1;
|
||||
self.debounce_changes(changes);
|
||||
}
|
||||
OrchestratorMessage::Shutdown => {
|
||||
return self.shutdown();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn debounce_changes(&self, mut changes: Vec<FileWatcherChange>) {
|
||||
loop {
|
||||
// Consume possibly incoming file change messages before running a new analysis, but don't wait for more than 100ms.
|
||||
crossbeam_channel::select! {
|
||||
recv(self.receiver) -> message => {
|
||||
match message {
|
||||
Ok(OrchestratorMessage::Shutdown) => {
|
||||
return self.shutdown();
|
||||
}
|
||||
Ok(OrchestratorMessage::FileChanges(file_changes)) => {
|
||||
changes.extend(file_changes);
|
||||
}
|
||||
|
||||
Ok(OrchestratorMessage::CheckProgramCompleted { .. })=> {
|
||||
// disregard any outdated completion message.
|
||||
}
|
||||
Ok(OrchestratorMessage::Run) => unreachable!("The orchestrator is already running."),
|
||||
|
||||
Err(_) => {
|
||||
// There are no more senders, no point in waiting for more messages
|
||||
return;
|
||||
}
|
||||
}
|
||||
},
|
||||
default(std::time::Duration::from_millis(10)) => {
|
||||
// No more file changes after 10 ms, send the changes and schedule a new analysis
|
||||
self.sender.send(MainLoopMessage::ApplyChanges(changes)).unwrap();
|
||||
self.sender.send(MainLoopMessage::CheckProgram { revision: self.revision}).unwrap();
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::unused_self)]
|
||||
fn shutdown(&self) {
|
||||
tracing::trace!("Shutting down orchestrator.");
|
||||
}
|
||||
}
|
||||
|
||||
/// Message sent from the orchestrator to the main loop.
|
||||
#[derive(Debug)]
|
||||
enum MainLoopMessage {
|
||||
CheckWorkspace,
|
||||
CheckCompleted {
|
||||
/// The diagnostics that were found during the check.
|
||||
result: Vec<Diagnostic>,
|
||||
revision: u64,
|
||||
},
|
||||
ApplyChanges(Vec<watch::ChangeEvent>),
|
||||
CheckProgram { revision: usize },
|
||||
CheckCompleted(Vec<String>),
|
||||
ApplyChanges(Vec<FileWatcherChange>),
|
||||
Exit,
|
||||
}
|
||||
|
||||
fn set_colored_override(color: Option<TerminalColor>) {
|
||||
let Some(color) = color else {
|
||||
return;
|
||||
};
|
||||
#[derive(Debug)]
|
||||
enum OrchestratorMessage {
|
||||
Run,
|
||||
Shutdown,
|
||||
|
||||
match color {
|
||||
TerminalColor::Auto => {
|
||||
colored::control::unset_override();
|
||||
}
|
||||
TerminalColor::Always => {
|
||||
colored::control::set_override(true);
|
||||
}
|
||||
TerminalColor::Never => {
|
||||
colored::control::set_override(false);
|
||||
}
|
||||
CheckProgramCompleted {
|
||||
diagnostics: Vec<String>,
|
||||
revision: usize,
|
||||
},
|
||||
|
||||
FileChanges(Vec<FileWatcherChange>),
|
||||
}
|
||||
|
||||
fn setup_tracing() {
|
||||
let subscriber = Registry::default().with(
|
||||
tracing_tree::HierarchicalLayer::default()
|
||||
.with_indent_lines(true)
|
||||
.with_indent_amount(2)
|
||||
.with_bracketed_fields(true)
|
||||
.with_thread_ids(true)
|
||||
.with_targets(true)
|
||||
.with_writer(|| Box::new(std::io::stderr()))
|
||||
.with_timer(Uptime::default())
|
||||
.with_filter(LoggingFilter {
|
||||
trace_level: Level::TRACE,
|
||||
}),
|
||||
);
|
||||
|
||||
tracing::subscriber::set_global_default(subscriber).unwrap();
|
||||
}
|
||||
|
||||
struct LoggingFilter {
|
||||
trace_level: Level,
|
||||
}
|
||||
|
||||
impl LoggingFilter {
|
||||
fn is_enabled(&self, meta: &Metadata<'_>) -> bool {
|
||||
let filter = if meta.target().starts_with("red_knot") || meta.target().starts_with("ruff") {
|
||||
self.trace_level
|
||||
} else {
|
||||
Level::INFO
|
||||
};
|
||||
|
||||
meta.level() <= &filter
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> Filter<S> for LoggingFilter {
|
||||
fn enabled(&self, meta: &Metadata<'_>, _cx: &Context<'_, S>) -> bool {
|
||||
self.is_enabled(meta)
|
||||
}
|
||||
|
||||
fn callsite_enabled(&self, meta: &'static Metadata<'static>) -> Interest {
|
||||
if self.is_enabled(meta) {
|
||||
Interest::always()
|
||||
} else {
|
||||
Interest::never()
|
||||
}
|
||||
}
|
||||
|
||||
fn max_level_hint(&self) -> Option<LevelFilter> {
|
||||
Some(LevelFilter::from_level(self.trace_level))
|
||||
}
|
||||
}
|
||||
|
||||
1282
crates/red_knot/src/module.rs
Normal file
1282
crates/red_knot/src/module.rs
Normal file
File diff suppressed because it is too large
Load Diff
41
crates/red_knot/src/parse.rs
Normal file
41
crates/red_knot/src/parse.rs
Normal file
@@ -0,0 +1,41 @@
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::sync::Arc;
|
||||
|
||||
use ruff_python_ast::ModModule;
|
||||
use ruff_python_parser::Parsed;
|
||||
|
||||
use crate::cache::KeyValueCache;
|
||||
use crate::db::{QueryResult, SourceDb};
|
||||
use crate::files::FileId;
|
||||
use crate::source::source_text;
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub(crate) fn parse(db: &dyn SourceDb, file_id: FileId) -> QueryResult<Arc<Parsed<ModModule>>> {
|
||||
let jar = db.jar()?;
|
||||
|
||||
jar.parsed.get(&file_id, |file_id| {
|
||||
let source = source_text(db, *file_id)?;
|
||||
|
||||
Ok(Arc::new(ruff_python_parser::parse_unchecked_source(
|
||||
source.text(),
|
||||
source.kind().into(),
|
||||
)))
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct ParsedStorage(KeyValueCache<FileId, Arc<Parsed<ModModule>>>);
|
||||
|
||||
impl Deref for ParsedStorage {
|
||||
type Target = KeyValueCache<FileId, Arc<Parsed<ModModule>>>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for ParsedStorage {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
413
crates/red_knot/src/program/check.rs
Normal file
413
crates/red_knot/src/program/check.rs
Normal file
@@ -0,0 +1,413 @@
|
||||
use rayon::{current_num_threads, yield_local};
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use crate::db::{Database, QueryError, QueryResult};
|
||||
use crate::files::FileId;
|
||||
use crate::lint::{lint_semantic, lint_syntax, Diagnostics};
|
||||
use crate::module::{file_to_module, resolve_module};
|
||||
use crate::program::Program;
|
||||
use crate::semantic::{semantic_index, Dependency};
|
||||
|
||||
impl Program {
|
||||
/// Checks all open files in the workspace and its dependencies.
|
||||
#[tracing::instrument(level = "debug", skip_all)]
|
||||
pub fn check(&self, mode: ExecutionMode) -> QueryResult<Vec<String>> {
|
||||
self.cancelled()?;
|
||||
|
||||
let mut context = CheckContext::new(self);
|
||||
|
||||
match mode {
|
||||
ExecutionMode::SingleThreaded => SingleThreadedExecutor.run(&mut context)?,
|
||||
ExecutionMode::ThreadPool => ThreadPoolExecutor.run(&mut context)?,
|
||||
};
|
||||
|
||||
Ok(context.finish())
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(self, context))]
|
||||
fn check_file(&self, file: FileId, context: &CheckFileContext) -> QueryResult<Diagnostics> {
|
||||
self.cancelled()?;
|
||||
|
||||
let index = semantic_index(self, file)?;
|
||||
let dependencies = index.symbol_table().dependencies();
|
||||
|
||||
if !dependencies.is_empty() {
|
||||
let module = file_to_module(self, file)?;
|
||||
|
||||
// TODO scheduling all dependencies here is wasteful if we don't infer any types on them
|
||||
// but I think that's unlikely, so it is okay?
|
||||
// Anyway, we need to figure out a way to retrieve the dependencies of a module
|
||||
// from the persistent cache. So maybe it should be a separate query after all.
|
||||
for dependency in dependencies {
|
||||
let dependency_name = match dependency {
|
||||
Dependency::Module(name) => Some(name.clone()),
|
||||
Dependency::Relative { .. } => match &module {
|
||||
Some(module) => module.resolve_dependency(self, dependency)?,
|
||||
None => None,
|
||||
},
|
||||
};
|
||||
|
||||
if let Some(dependency_name) = dependency_name {
|
||||
// TODO We may want to have a different check functions for non-first-party
|
||||
// files because we only need to index them and not check them.
|
||||
// Supporting non-first-party code also requires supporting typing stubs.
|
||||
if let Some(dependency) = resolve_module(self, dependency_name)? {
|
||||
if dependency.path(self)?.root().kind().is_first_party() {
|
||||
context.schedule_dependency(dependency.path(self)?.file());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut diagnostics = Vec::new();
|
||||
|
||||
if self.workspace().is_file_open(file) {
|
||||
diagnostics.extend_from_slice(&lint_syntax(self, file)?);
|
||||
diagnostics.extend_from_slice(&lint_semantic(self, file)?);
|
||||
}
|
||||
|
||||
Ok(Diagnostics::from(diagnostics))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub enum ExecutionMode {
|
||||
SingleThreaded,
|
||||
ThreadPool,
|
||||
}
|
||||
|
||||
/// Context that stores state information about the entire check operation.
|
||||
struct CheckContext<'a> {
|
||||
/// IDs of the files that have been queued for checking.
|
||||
///
|
||||
/// Used to avoid queuing the same file twice.
|
||||
scheduled_files: FxHashSet<FileId>,
|
||||
|
||||
/// Reference to the program that is checked.
|
||||
program: &'a Program,
|
||||
|
||||
/// The aggregated diagnostics
|
||||
diagnostics: Vec<String>,
|
||||
}
|
||||
|
||||
impl<'a> CheckContext<'a> {
|
||||
fn new(program: &'a Program) -> Self {
|
||||
Self {
|
||||
scheduled_files: FxHashSet::default(),
|
||||
program,
|
||||
diagnostics: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the tasks to check all open files in the workspace.
|
||||
fn check_open_files(&mut self) -> Vec<CheckOpenFileTask> {
|
||||
self.scheduled_files
|
||||
.extend(self.program.workspace().open_files());
|
||||
|
||||
self.program
|
||||
.workspace()
|
||||
.open_files()
|
||||
.map(|file_id| CheckOpenFileTask { file_id })
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Returns the task to check a dependency.
|
||||
fn check_dependency(&mut self, file_id: FileId) -> Option<CheckDependencyTask> {
|
||||
if self.scheduled_files.insert(file_id) {
|
||||
Some(CheckDependencyTask { file_id })
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Pushes the result for a single file check operation
|
||||
fn push_diagnostics(&mut self, diagnostics: &Diagnostics) {
|
||||
self.diagnostics.extend_from_slice(diagnostics);
|
||||
}
|
||||
|
||||
/// Returns a reference to the program that is being checked.
|
||||
fn program(&self) -> &'a Program {
|
||||
self.program
|
||||
}
|
||||
|
||||
/// Creates a task context that is used to check a single file.
|
||||
fn task_context<'b, S>(&self, dependency_scheduler: &'b S) -> CheckTaskContext<'a, 'b, S>
|
||||
where
|
||||
S: ScheduleDependency,
|
||||
{
|
||||
CheckTaskContext {
|
||||
program: self.program,
|
||||
dependency_scheduler,
|
||||
}
|
||||
}
|
||||
|
||||
fn finish(self) -> Vec<String> {
|
||||
self.diagnostics
|
||||
}
|
||||
}
|
||||
|
||||
/// Trait that abstracts away how a dependency of a file gets scheduled for checking.
|
||||
trait ScheduleDependency {
|
||||
/// Schedules the file with the given ID for checking.
|
||||
fn schedule(&self, file_id: FileId);
|
||||
}
|
||||
|
||||
impl<T> ScheduleDependency for T
|
||||
where
|
||||
T: Fn(FileId),
|
||||
{
|
||||
fn schedule(&self, file_id: FileId) {
|
||||
let f = self;
|
||||
f(file_id);
|
||||
}
|
||||
}
|
||||
|
||||
/// Context that is used to run a single file check task.
|
||||
///
|
||||
/// The task is generic over `S` because it is passed across thread boundaries and
|
||||
/// we don't want to add the requirement that [`ScheduleDependency`] must be [`Send`].
|
||||
struct CheckTaskContext<'a, 'scheduler, S>
|
||||
where
|
||||
S: ScheduleDependency,
|
||||
{
|
||||
dependency_scheduler: &'scheduler S,
|
||||
program: &'a Program,
|
||||
}
|
||||
|
||||
impl<'a, 'scheduler, S> CheckTaskContext<'a, 'scheduler, S>
|
||||
where
|
||||
S: ScheduleDependency,
|
||||
{
|
||||
fn as_file_context(&self) -> CheckFileContext<'scheduler> {
|
||||
CheckFileContext {
|
||||
dependency_scheduler: self.dependency_scheduler,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Context passed when checking a single file.
|
||||
///
|
||||
/// This is a trimmed down version of [`CheckTaskContext`] with the type parameter `S` erased
|
||||
/// to avoid monomorphization of [`Program:check_file`].
|
||||
struct CheckFileContext<'a> {
|
||||
dependency_scheduler: &'a dyn ScheduleDependency,
|
||||
}
|
||||
|
||||
impl<'a> CheckFileContext<'a> {
|
||||
fn schedule_dependency(&self, file_id: FileId) {
|
||||
self.dependency_scheduler.schedule(file_id);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum CheckFileTask {
|
||||
OpenFile(CheckOpenFileTask),
|
||||
Dependency(CheckDependencyTask),
|
||||
}
|
||||
|
||||
impl CheckFileTask {
|
||||
/// Runs the task and returns the results for checking this file.
|
||||
fn run<S>(&self, context: &CheckTaskContext<S>) -> QueryResult<Diagnostics>
|
||||
where
|
||||
S: ScheduleDependency,
|
||||
{
|
||||
match self {
|
||||
Self::OpenFile(task) => task.run(context),
|
||||
Self::Dependency(task) => task.run(context),
|
||||
}
|
||||
}
|
||||
|
||||
fn file_id(&self) -> FileId {
|
||||
match self {
|
||||
CheckFileTask::OpenFile(task) => task.file_id,
|
||||
CheckFileTask::Dependency(task) => task.file_id,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Task to check an open file.
|
||||
|
||||
#[derive(Debug)]
|
||||
struct CheckOpenFileTask {
|
||||
file_id: FileId,
|
||||
}
|
||||
|
||||
impl CheckOpenFileTask {
|
||||
fn run<S>(&self, context: &CheckTaskContext<S>) -> QueryResult<Diagnostics>
|
||||
where
|
||||
S: ScheduleDependency,
|
||||
{
|
||||
context
|
||||
.program
|
||||
.check_file(self.file_id, &context.as_file_context())
|
||||
}
|
||||
}
|
||||
|
||||
/// Task to check a dependency file.
|
||||
#[derive(Debug)]
|
||||
struct CheckDependencyTask {
|
||||
file_id: FileId,
|
||||
}
|
||||
|
||||
impl CheckDependencyTask {
|
||||
fn run<S>(&self, context: &CheckTaskContext<S>) -> QueryResult<Diagnostics>
|
||||
where
|
||||
S: ScheduleDependency,
|
||||
{
|
||||
context
|
||||
.program
|
||||
.check_file(self.file_id, &context.as_file_context())
|
||||
}
|
||||
}
|
||||
|
||||
/// Executor that schedules the checking of individual program files.
|
||||
trait CheckExecutor {
|
||||
fn run(self, context: &mut CheckContext) -> QueryResult<()>;
|
||||
}
|
||||
|
||||
/// Executor that runs all check operations on the current thread.
|
||||
///
|
||||
/// The executor does not schedule dependencies for checking.
|
||||
/// The main motivation for scheduling dependencies
|
||||
/// in a multithreaded environment is to parse and index the dependencies concurrently.
|
||||
/// However, that doesn't make sense in a single threaded environment, because the dependencies then compute
|
||||
/// with checking the open files. Checking dependencies in a single threaded environment is more likely
|
||||
/// to hurt performance because we end up analyzing files in their entirety, even if we only need to type check parts of them.
|
||||
#[derive(Debug, Default)]
|
||||
struct SingleThreadedExecutor;
|
||||
|
||||
impl CheckExecutor for SingleThreadedExecutor {
|
||||
fn run(self, context: &mut CheckContext) -> QueryResult<()> {
|
||||
let mut queue = context.check_open_files();
|
||||
|
||||
let noop_schedule_dependency = |_| {};
|
||||
|
||||
while let Some(file) = queue.pop() {
|
||||
context.program().cancelled()?;
|
||||
|
||||
let task_context = context.task_context(&noop_schedule_dependency);
|
||||
context.push_diagnostics(&file.run(&task_context)?);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Executor that runs the check operations on a thread pool.
|
||||
///
|
||||
/// The executor runs each check operation as its own task using a thread pool.
|
||||
///
|
||||
/// Other than [`SingleThreadedExecutor`], this executor schedules dependencies for checking. It
|
||||
/// even schedules dependencies for checking when the thread pool size is 1 for a better debugging experience.
|
||||
#[derive(Debug, Default)]
|
||||
struct ThreadPoolExecutor;
|
||||
|
||||
impl CheckExecutor for ThreadPoolExecutor {
|
||||
fn run(self, context: &mut CheckContext) -> QueryResult<()> {
|
||||
let num_threads = current_num_threads();
|
||||
let single_threaded = num_threads == 1;
|
||||
let span = tracing::trace_span!("ThreadPoolExecutor::run", num_threads);
|
||||
let _ = span.enter();
|
||||
|
||||
let mut queue: Vec<_> = context
|
||||
.check_open_files()
|
||||
.into_iter()
|
||||
.map(CheckFileTask::OpenFile)
|
||||
.collect();
|
||||
|
||||
let (sender, receiver) = if single_threaded {
|
||||
// Use an unbounded queue for single threaded execution to prevent deadlocks
|
||||
// when a single file schedules multiple dependencies.
|
||||
crossbeam::channel::unbounded()
|
||||
} else {
|
||||
// Use a bounded queue to apply backpressure when the orchestration thread isn't able to keep
|
||||
// up processing messages from the worker threads.
|
||||
crossbeam::channel::bounded(num_threads)
|
||||
};
|
||||
|
||||
let schedule_sender = sender.clone();
|
||||
let schedule_dependency = move |file_id| {
|
||||
schedule_sender
|
||||
.send(ThreadPoolMessage::ScheduleDependency(file_id))
|
||||
.unwrap();
|
||||
};
|
||||
|
||||
let result = rayon::in_place_scope(|scope| {
|
||||
let mut pending = 0usize;
|
||||
|
||||
loop {
|
||||
context.program().cancelled()?;
|
||||
|
||||
// 1. Try to get a queued message to ensure that we have always remaining space in the channel to prevent blocking the worker threads.
|
||||
// 2. Try to process a queued file
|
||||
// 3. If there's no queued file wait for the next incoming message.
|
||||
// 4. Exit if there are no more messages and no senders.
|
||||
let message = if let Ok(message) = receiver.try_recv() {
|
||||
message
|
||||
} else if let Some(task) = queue.pop() {
|
||||
pending += 1;
|
||||
|
||||
let task_context = context.task_context(&schedule_dependency);
|
||||
let sender = sender.clone();
|
||||
let task_span = tracing::trace_span!(
|
||||
parent: &span,
|
||||
"CheckFileTask::run",
|
||||
file_id = task.file_id().as_u32(),
|
||||
);
|
||||
|
||||
scope.spawn(move |_| {
|
||||
task_span.in_scope(|| match task.run(&task_context) {
|
||||
Ok(result) => {
|
||||
sender.send(ThreadPoolMessage::Completed(result)).unwrap();
|
||||
}
|
||||
Err(err) => sender.send(ThreadPoolMessage::Errored(err)).unwrap(),
|
||||
});
|
||||
});
|
||||
|
||||
// If this is a single threaded rayon thread pool, yield the current thread
|
||||
// or we never start processing the work items.
|
||||
if single_threaded {
|
||||
yield_local();
|
||||
}
|
||||
|
||||
continue;
|
||||
} else if let Ok(message) = receiver.recv() {
|
||||
message
|
||||
} else {
|
||||
break;
|
||||
};
|
||||
|
||||
match message {
|
||||
ThreadPoolMessage::ScheduleDependency(dependency) => {
|
||||
if let Some(task) = context.check_dependency(dependency) {
|
||||
queue.push(CheckFileTask::Dependency(task));
|
||||
}
|
||||
}
|
||||
ThreadPoolMessage::Completed(diagnostics) => {
|
||||
context.push_diagnostics(&diagnostics);
|
||||
pending -= 1;
|
||||
|
||||
if pending == 0 && queue.is_empty() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
ThreadPoolMessage::Errored(err) => {
|
||||
return Err(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
});
|
||||
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum ThreadPoolMessage {
|
||||
ScheduleDependency(FileId),
|
||||
Completed(Diagnostics),
|
||||
Errored(QueryError),
|
||||
}
|
||||
275
crates/red_knot/src/program/mod.rs
Normal file
275
crates/red_knot/src/program/mod.rs
Normal file
@@ -0,0 +1,275 @@
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::db::{
|
||||
Database, Db, DbRuntime, DbWithJar, HasJar, HasJars, JarsStorage, LintDb, LintJar,
|
||||
ParallelDatabase, QueryResult, SemanticDb, SemanticJar, Snapshot, SourceDb, SourceJar, Upcast,
|
||||
};
|
||||
use crate::files::{FileId, Files};
|
||||
use crate::Workspace;
|
||||
|
||||
pub mod check;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Program {
|
||||
jars: JarsStorage<Program>,
|
||||
files: Files,
|
||||
workspace: Workspace,
|
||||
}
|
||||
|
||||
impl Program {
|
||||
pub fn new(workspace: Workspace) -> Self {
|
||||
Self {
|
||||
jars: JarsStorage::default(),
|
||||
files: Files::default(),
|
||||
workspace,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn apply_changes<I>(&mut self, changes: I)
|
||||
where
|
||||
I: IntoIterator<Item = FileWatcherChange>,
|
||||
{
|
||||
let mut aggregated_changes = AggregatedChanges::default();
|
||||
|
||||
aggregated_changes.extend(changes.into_iter().map(|change| FileChange {
|
||||
id: self.files.intern(&change.path),
|
||||
kind: change.kind,
|
||||
}));
|
||||
|
||||
let (source, semantic, lint) = self.jars_mut();
|
||||
for change in aggregated_changes.iter() {
|
||||
semantic.module_resolver.remove_module_by_file(change.id);
|
||||
semantic.semantic_indices.remove(&change.id);
|
||||
source.sources.remove(&change.id);
|
||||
source.parsed.remove(&change.id);
|
||||
// TODO: remove all dependent modules as well
|
||||
semantic.type_store.remove_module(change.id);
|
||||
lint.lint_syntax.remove(&change.id);
|
||||
lint.lint_semantic.remove(&change.id);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn files(&self) -> &Files {
|
||||
&self.files
|
||||
}
|
||||
|
||||
pub fn workspace(&self) -> &Workspace {
|
||||
&self.workspace
|
||||
}
|
||||
|
||||
pub fn workspace_mut(&mut self) -> &mut Workspace {
|
||||
&mut self.workspace
|
||||
}
|
||||
}
|
||||
|
||||
impl SourceDb for Program {
|
||||
fn file_id(&self, path: &Path) -> FileId {
|
||||
self.files.intern(path)
|
||||
}
|
||||
|
||||
fn file_path(&self, file_id: FileId) -> Arc<Path> {
|
||||
self.files.path(file_id)
|
||||
}
|
||||
}
|
||||
|
||||
impl DbWithJar<SourceJar> for Program {}
|
||||
|
||||
impl SemanticDb for Program {}
|
||||
|
||||
impl DbWithJar<SemanticJar> for Program {}
|
||||
|
||||
impl LintDb for Program {}
|
||||
|
||||
impl DbWithJar<LintJar> for Program {}
|
||||
|
||||
impl Upcast<dyn SemanticDb> for Program {
|
||||
fn upcast(&self) -> &(dyn SemanticDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SourceDb> for Program {
|
||||
fn upcast(&self) -> &(dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn LintDb> for Program {
|
||||
fn upcast(&self) -> &(dyn LintDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Db for Program {}
|
||||
|
||||
impl Database for Program {
|
||||
fn runtime(&self) -> &DbRuntime {
|
||||
self.jars.runtime()
|
||||
}
|
||||
|
||||
fn runtime_mut(&mut self) -> &mut DbRuntime {
|
||||
self.jars.runtime_mut()
|
||||
}
|
||||
}
|
||||
|
||||
impl ParallelDatabase for Program {
|
||||
fn snapshot(&self) -> Snapshot<Self> {
|
||||
Snapshot::new(Self {
|
||||
jars: self.jars.snapshot(),
|
||||
files: self.files.snapshot(),
|
||||
workspace: self.workspace.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJars for Program {
|
||||
type Jars = (SourceJar, SemanticJar, LintJar);
|
||||
|
||||
fn jars(&self) -> QueryResult<&Self::Jars> {
|
||||
self.jars.jars()
|
||||
}
|
||||
|
||||
fn jars_mut(&mut self) -> &mut Self::Jars {
|
||||
self.jars.jars_mut()
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJar<SourceJar> for Program {
|
||||
fn jar(&self) -> QueryResult<&SourceJar> {
|
||||
Ok(&self.jars()?.0)
|
||||
}
|
||||
|
||||
fn jar_mut(&mut self) -> &mut SourceJar {
|
||||
&mut self.jars_mut().0
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJar<SemanticJar> for Program {
|
||||
fn jar(&self) -> QueryResult<&SemanticJar> {
|
||||
Ok(&self.jars()?.1)
|
||||
}
|
||||
|
||||
fn jar_mut(&mut self) -> &mut SemanticJar {
|
||||
&mut self.jars_mut().1
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJar<LintJar> for Program {
|
||||
fn jar(&self) -> QueryResult<&LintJar> {
|
||||
Ok(&self.jars()?.2)
|
||||
}
|
||||
|
||||
fn jar_mut(&mut self) -> &mut LintJar {
|
||||
&mut self.jars_mut().2
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct FileWatcherChange {
|
||||
path: PathBuf,
|
||||
kind: FileChangeKind,
|
||||
}
|
||||
|
||||
impl FileWatcherChange {
|
||||
pub fn new(path: PathBuf, kind: FileChangeKind) -> Self {
|
||||
Self { path, kind }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
struct FileChange {
|
||||
id: FileId,
|
||||
kind: FileChangeKind,
|
||||
}
|
||||
|
||||
impl FileChange {
|
||||
fn file_id(self) -> FileId {
|
||||
self.id
|
||||
}
|
||||
|
||||
fn kind(self) -> FileChangeKind {
|
||||
self.kind
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
pub enum FileChangeKind {
|
||||
Created,
|
||||
Modified,
|
||||
Deleted,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
struct AggregatedChanges {
|
||||
changes: FxHashMap<FileId, FileChangeKind>,
|
||||
}
|
||||
|
||||
impl AggregatedChanges {
|
||||
fn add(&mut self, change: FileChange) {
|
||||
match self.changes.entry(change.file_id()) {
|
||||
Entry::Occupied(mut entry) => {
|
||||
let merged = entry.get_mut();
|
||||
|
||||
match (merged, change.kind()) {
|
||||
(FileChangeKind::Created, FileChangeKind::Deleted) => {
|
||||
// Deletion after creations means that ruff never saw the file.
|
||||
entry.remove();
|
||||
}
|
||||
(FileChangeKind::Created, FileChangeKind::Modified) => {
|
||||
// No-op, for ruff, modifying a file that it doesn't yet know that it exists is still considered a creation.
|
||||
}
|
||||
|
||||
(FileChangeKind::Modified, FileChangeKind::Created) => {
|
||||
// Uhh, that should probably not happen. Continue considering it a modification.
|
||||
}
|
||||
|
||||
(FileChangeKind::Modified, FileChangeKind::Deleted) => {
|
||||
*entry.get_mut() = FileChangeKind::Deleted;
|
||||
}
|
||||
|
||||
(FileChangeKind::Deleted, FileChangeKind::Created) => {
|
||||
*entry.get_mut() = FileChangeKind::Modified;
|
||||
}
|
||||
|
||||
(FileChangeKind::Deleted, FileChangeKind::Modified) => {
|
||||
// That's weird, but let's consider it a modification.
|
||||
*entry.get_mut() = FileChangeKind::Modified;
|
||||
}
|
||||
|
||||
(FileChangeKind::Created, FileChangeKind::Created)
|
||||
| (FileChangeKind::Modified, FileChangeKind::Modified)
|
||||
| (FileChangeKind::Deleted, FileChangeKind::Deleted) => {
|
||||
// No-op transitions. Some of them should be impossible but we handle them anyway.
|
||||
}
|
||||
}
|
||||
}
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(change.kind());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn extend<I>(&mut self, changes: I)
|
||||
where
|
||||
I: IntoIterator<Item = FileChange>,
|
||||
{
|
||||
let iter = changes.into_iter();
|
||||
let (lower, _) = iter.size_hint();
|
||||
self.changes.reserve(lower);
|
||||
|
||||
for change in iter {
|
||||
self.add(change);
|
||||
}
|
||||
}
|
||||
|
||||
fn iter(&self) -> impl Iterator<Item = FileChange> + '_ {
|
||||
self.changes.iter().map(|(id, kind)| FileChange {
|
||||
id: *id,
|
||||
kind: *kind,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,68 +0,0 @@
|
||||
/// Enumeration of all supported Python versions
|
||||
///
|
||||
/// TODO: unify with the `PythonVersion` enum in the linter/formatter crates?
|
||||
#[derive(Copy, Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord, Default, clap::ValueEnum)]
|
||||
pub enum PythonVersion {
|
||||
#[value(name = "3.7")]
|
||||
Py37,
|
||||
#[value(name = "3.8")]
|
||||
Py38,
|
||||
#[default]
|
||||
#[value(name = "3.9")]
|
||||
Py39,
|
||||
#[value(name = "3.10")]
|
||||
Py310,
|
||||
#[value(name = "3.11")]
|
||||
Py311,
|
||||
#[value(name = "3.12")]
|
||||
Py312,
|
||||
#[value(name = "3.13")]
|
||||
Py313,
|
||||
}
|
||||
|
||||
impl PythonVersion {
|
||||
const fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
Self::Py37 => "3.7",
|
||||
Self::Py38 => "3.8",
|
||||
Self::Py39 => "3.9",
|
||||
Self::Py310 => "3.10",
|
||||
Self::Py311 => "3.11",
|
||||
Self::Py312 => "3.12",
|
||||
Self::Py313 => "3.13",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for PythonVersion {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<PythonVersion> for ruff_python_ast::PythonVersion {
|
||||
fn from(value: PythonVersion) -> Self {
|
||||
match value {
|
||||
PythonVersion::Py37 => Self::PY37,
|
||||
PythonVersion::Py38 => Self::PY38,
|
||||
PythonVersion::Py39 => Self::PY39,
|
||||
PythonVersion::Py310 => Self::PY310,
|
||||
PythonVersion::Py311 => Self::PY311,
|
||||
PythonVersion::Py312 => Self::PY312,
|
||||
PythonVersion::Py313 => Self::PY313,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::python_version::PythonVersion;
|
||||
|
||||
#[test]
|
||||
fn same_default_as_python_version() {
|
||||
assert_eq!(
|
||||
ruff_python_ast::PythonVersion::from(PythonVersion::default()),
|
||||
ruff_python_ast::PythonVersion::default()
|
||||
);
|
||||
}
|
||||
}
|
||||
882
crates/red_knot/src/semantic.rs
Normal file
882
crates/red_knot/src/semantic.rs
Normal file
@@ -0,0 +1,882 @@
|
||||
use std::num::NonZeroU32;
|
||||
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::visitor::source_order::SourceOrderVisitor;
|
||||
use ruff_python_ast::AstNode;
|
||||
|
||||
use crate::ast_ids::{NodeKey, TypedNodeKey};
|
||||
use crate::cache::KeyValueCache;
|
||||
use crate::db::{QueryResult, SemanticDb, SemanticJar};
|
||||
use crate::files::FileId;
|
||||
use crate::module::Module;
|
||||
use crate::module::ModuleName;
|
||||
use crate::parse::parse;
|
||||
use crate::Name;
|
||||
pub(crate) use definitions::Definition;
|
||||
use definitions::{ImportDefinition, ImportFromDefinition};
|
||||
pub(crate) use flow_graph::ConstrainedDefinition;
|
||||
use flow_graph::{FlowGraph, FlowGraphBuilder, FlowNodeId, ReachableDefinitionsIterator};
|
||||
use ruff_index::{newtype_index, IndexVec};
|
||||
use rustc_hash::FxHashMap;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::sync::Arc;
|
||||
pub(crate) use symbol_table::{Dependency, SymbolId};
|
||||
use symbol_table::{ScopeId, ScopeKind, SymbolFlags, SymbolTable, SymbolTableBuilder};
|
||||
pub(crate) use types::{infer_definition_type, infer_symbol_public_type, Type, TypeStore};
|
||||
|
||||
mod definitions;
|
||||
mod flow_graph;
|
||||
mod symbol_table;
|
||||
mod types;
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub fn semantic_index(db: &dyn SemanticDb, file_id: FileId) -> QueryResult<Arc<SemanticIndex>> {
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
|
||||
jar.semantic_indices.get(&file_id, |_| {
|
||||
let parsed = parse(db.upcast(), file_id)?;
|
||||
Ok(Arc::from(SemanticIndex::from_ast(parsed.syntax())))
|
||||
})
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub fn resolve_global_symbol(
|
||||
db: &dyn SemanticDb,
|
||||
module: Module,
|
||||
name: &str,
|
||||
) -> QueryResult<Option<GlobalSymbolId>> {
|
||||
let file_id = module.path(db)?.file();
|
||||
let symbol_table = &semantic_index(db, file_id)?.symbol_table;
|
||||
let Some(symbol_id) = symbol_table.root_symbol_id_by_name(name) else {
|
||||
return Ok(None);
|
||||
};
|
||||
Ok(Some(GlobalSymbolId { file_id, symbol_id }))
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct ExpressionId;
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub struct GlobalSymbolId {
|
||||
pub(crate) file_id: FileId,
|
||||
pub(crate) symbol_id: SymbolId,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct SemanticIndex {
|
||||
symbol_table: SymbolTable,
|
||||
flow_graph: FlowGraph,
|
||||
expressions: FxHashMap<NodeKey, ExpressionId>,
|
||||
expressions_by_id: IndexVec<ExpressionId, NodeKey>,
|
||||
}
|
||||
|
||||
impl SemanticIndex {
|
||||
pub fn from_ast(module: &ast::ModModule) -> Self {
|
||||
let root_scope_id = SymbolTable::root_scope_id();
|
||||
let mut indexer = SemanticIndexer {
|
||||
symbol_table_builder: SymbolTableBuilder::new(),
|
||||
flow_graph_builder: FlowGraphBuilder::new(),
|
||||
scopes: vec![ScopeState {
|
||||
scope_id: root_scope_id,
|
||||
current_flow_node_id: FlowGraph::start(),
|
||||
}],
|
||||
expressions: FxHashMap::default(),
|
||||
expressions_by_id: IndexVec::default(),
|
||||
current_definition: None,
|
||||
};
|
||||
indexer.visit_body(&module.body);
|
||||
indexer.finish()
|
||||
}
|
||||
|
||||
fn resolve_expression_id<'a>(
|
||||
&self,
|
||||
ast: &'a ast::ModModule,
|
||||
expression_id: ExpressionId,
|
||||
) -> ast::AnyNodeRef<'a> {
|
||||
let node_key = self.expressions_by_id[expression_id];
|
||||
node_key
|
||||
.resolve(ast.as_any_node_ref())
|
||||
.expect("node to resolve")
|
||||
}
|
||||
|
||||
/// Return an iterator over all definitions of `symbol_id` reachable from `use_expr`. The value
|
||||
/// of `symbol_id` in `use_expr` must originate from one of the iterated definitions (or from
|
||||
/// an external reassignment of the name outside of this scope).
|
||||
pub fn reachable_definitions(
|
||||
&self,
|
||||
symbol_id: SymbolId,
|
||||
use_expr: &ast::Expr,
|
||||
) -> ReachableDefinitionsIterator {
|
||||
let expression_id = self.expression_id(use_expr);
|
||||
ReachableDefinitionsIterator::new(
|
||||
&self.flow_graph,
|
||||
symbol_id,
|
||||
self.flow_graph.for_expr(expression_id),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn expression_id(&self, expression: &ast::Expr) -> ExpressionId {
|
||||
self.expressions[&NodeKey::from_node(expression.into())]
|
||||
}
|
||||
|
||||
pub fn symbol_table(&self) -> &SymbolTable {
|
||||
&self.symbol_table
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ScopeState {
|
||||
scope_id: ScopeId,
|
||||
current_flow_node_id: FlowNodeId,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct SemanticIndexer {
|
||||
symbol_table_builder: SymbolTableBuilder,
|
||||
flow_graph_builder: FlowGraphBuilder,
|
||||
scopes: Vec<ScopeState>,
|
||||
/// the definition whose target(s) we are currently walking
|
||||
current_definition: Option<Definition>,
|
||||
expressions: FxHashMap<NodeKey, ExpressionId>,
|
||||
expressions_by_id: IndexVec<ExpressionId, NodeKey>,
|
||||
}
|
||||
|
||||
impl SemanticIndexer {
|
||||
pub(crate) fn finish(mut self) -> SemanticIndex {
|
||||
let SemanticIndexer {
|
||||
flow_graph_builder,
|
||||
symbol_table_builder,
|
||||
..
|
||||
} = self;
|
||||
self.expressions.shrink_to_fit();
|
||||
self.expressions_by_id.shrink_to_fit();
|
||||
SemanticIndex {
|
||||
flow_graph: flow_graph_builder.finish(),
|
||||
symbol_table: symbol_table_builder.finish(),
|
||||
expressions: self.expressions,
|
||||
expressions_by_id: self.expressions_by_id,
|
||||
}
|
||||
}
|
||||
|
||||
fn set_current_flow_node(&mut self, new_flow_node_id: FlowNodeId) {
|
||||
let scope_state = self.scopes.last_mut().expect("scope stack is never empty");
|
||||
scope_state.current_flow_node_id = new_flow_node_id;
|
||||
}
|
||||
|
||||
fn current_flow_node(&self) -> FlowNodeId {
|
||||
self.scopes
|
||||
.last()
|
||||
.expect("scope stack is never empty")
|
||||
.current_flow_node_id
|
||||
}
|
||||
|
||||
fn add_or_update_symbol(&mut self, identifier: &str, flags: SymbolFlags) -> SymbolId {
|
||||
self.symbol_table_builder
|
||||
.add_or_update_symbol(self.cur_scope(), identifier, flags)
|
||||
}
|
||||
|
||||
fn add_or_update_symbol_with_def(
|
||||
&mut self,
|
||||
identifier: &str,
|
||||
definition: Definition,
|
||||
) -> SymbolId {
|
||||
let symbol_id = self.add_or_update_symbol(identifier, SymbolFlags::IS_DEFINED);
|
||||
self.symbol_table_builder
|
||||
.add_definition(symbol_id, definition.clone());
|
||||
let new_flow_node_id =
|
||||
self.flow_graph_builder
|
||||
.add_definition(symbol_id, definition, self.current_flow_node());
|
||||
self.set_current_flow_node(new_flow_node_id);
|
||||
symbol_id
|
||||
}
|
||||
|
||||
fn push_scope(
|
||||
&mut self,
|
||||
name: &str,
|
||||
kind: ScopeKind,
|
||||
definition: Option<Definition>,
|
||||
defining_symbol: Option<SymbolId>,
|
||||
) -> ScopeId {
|
||||
let scope_id = self.symbol_table_builder.add_child_scope(
|
||||
self.cur_scope(),
|
||||
name,
|
||||
kind,
|
||||
definition,
|
||||
defining_symbol,
|
||||
);
|
||||
self.scopes.push(ScopeState {
|
||||
scope_id,
|
||||
current_flow_node_id: FlowGraph::start(),
|
||||
});
|
||||
scope_id
|
||||
}
|
||||
|
||||
fn pop_scope(&mut self) -> ScopeId {
|
||||
self.scopes
|
||||
.pop()
|
||||
.expect("Scope stack should never be empty")
|
||||
.scope_id
|
||||
}
|
||||
|
||||
fn cur_scope(&self) -> ScopeId {
|
||||
self.scopes
|
||||
.last()
|
||||
.expect("Scope stack should never be empty")
|
||||
.scope_id
|
||||
}
|
||||
|
||||
fn record_scope_for_node(&mut self, node_key: NodeKey, scope_id: ScopeId) {
|
||||
self.symbol_table_builder
|
||||
.record_scope_for_node(node_key, scope_id);
|
||||
}
|
||||
|
||||
fn insert_constraint(&mut self, expr: &ast::Expr) {
|
||||
let node_key = NodeKey::from_node(expr.into());
|
||||
let expression_id = self.expressions[&node_key];
|
||||
let constraint = self
|
||||
.flow_graph_builder
|
||||
.add_constraint(self.current_flow_node(), expression_id);
|
||||
self.set_current_flow_node(constraint);
|
||||
}
|
||||
|
||||
fn with_type_params(
|
||||
&mut self,
|
||||
name: &str,
|
||||
params: &Option<Box<ast::TypeParams>>,
|
||||
definition: Option<Definition>,
|
||||
defining_symbol: Option<SymbolId>,
|
||||
nested: impl FnOnce(&mut Self) -> ScopeId,
|
||||
) -> ScopeId {
|
||||
if let Some(type_params) = params {
|
||||
self.push_scope(name, ScopeKind::Annotation, definition, defining_symbol);
|
||||
for type_param in &type_params.type_params {
|
||||
let name = match type_param {
|
||||
ast::TypeParam::TypeVar(ast::TypeParamTypeVar { name, .. }) => name,
|
||||
ast::TypeParam::ParamSpec(ast::TypeParamParamSpec { name, .. }) => name,
|
||||
ast::TypeParam::TypeVarTuple(ast::TypeParamTypeVarTuple { name, .. }) => name,
|
||||
};
|
||||
self.add_or_update_symbol(name, SymbolFlags::IS_DEFINED);
|
||||
}
|
||||
}
|
||||
let scope_id = nested(self);
|
||||
if params.is_some() {
|
||||
self.pop_scope();
|
||||
}
|
||||
scope_id
|
||||
}
|
||||
}
|
||||
|
||||
impl SourceOrderVisitor<'_> for SemanticIndexer {
|
||||
fn visit_expr(&mut self, expr: &ast::Expr) {
|
||||
let node_key = NodeKey::from_node(expr.into());
|
||||
let expression_id = self.expressions_by_id.push(node_key);
|
||||
|
||||
let flow_expression_id = self
|
||||
.flow_graph_builder
|
||||
.record_expr(self.current_flow_node());
|
||||
debug_assert_eq!(expression_id, flow_expression_id);
|
||||
|
||||
let symbol_expression_id = self
|
||||
.symbol_table_builder
|
||||
.record_expression(self.cur_scope());
|
||||
|
||||
debug_assert_eq!(expression_id, symbol_expression_id);
|
||||
|
||||
self.expressions.insert(node_key, expression_id);
|
||||
|
||||
match expr {
|
||||
ast::Expr::Name(ast::ExprName { id, ctx, .. }) => {
|
||||
let flags = match ctx {
|
||||
ast::ExprContext::Load => SymbolFlags::IS_USED,
|
||||
ast::ExprContext::Store => SymbolFlags::IS_DEFINED,
|
||||
ast::ExprContext::Del => SymbolFlags::IS_DEFINED,
|
||||
ast::ExprContext::Invalid => SymbolFlags::empty(),
|
||||
};
|
||||
self.add_or_update_symbol(id, flags);
|
||||
if flags.contains(SymbolFlags::IS_DEFINED) {
|
||||
if let Some(curdef) = self.current_definition.clone() {
|
||||
self.add_or_update_symbol_with_def(id, curdef);
|
||||
}
|
||||
}
|
||||
ast::visitor::source_order::walk_expr(self, expr);
|
||||
}
|
||||
ast::Expr::Named(node) => {
|
||||
debug_assert!(self.current_definition.is_none());
|
||||
self.current_definition =
|
||||
Some(Definition::NamedExpr(TypedNodeKey::from_node(node)));
|
||||
// TODO walrus in comprehensions is implicitly nonlocal
|
||||
self.visit_expr(&node.target);
|
||||
self.current_definition = None;
|
||||
self.visit_expr(&node.value);
|
||||
}
|
||||
ast::Expr::If(ast::ExprIf {
|
||||
body, test, orelse, ..
|
||||
}) => {
|
||||
// TODO detect statically known truthy or falsy test (via type inference, not naive
|
||||
// AST inspection, so we can't simplify here, need to record test expression in CFG
|
||||
// for later checking)
|
||||
|
||||
self.visit_expr(test);
|
||||
|
||||
let if_branch = self.flow_graph_builder.add_branch(self.current_flow_node());
|
||||
|
||||
self.set_current_flow_node(if_branch);
|
||||
self.insert_constraint(test);
|
||||
self.visit_expr(body);
|
||||
|
||||
let post_body = self.current_flow_node();
|
||||
|
||||
self.set_current_flow_node(if_branch);
|
||||
self.visit_expr(orelse);
|
||||
|
||||
let post_else = self
|
||||
.flow_graph_builder
|
||||
.add_phi(self.current_flow_node(), post_body);
|
||||
|
||||
self.set_current_flow_node(post_else);
|
||||
}
|
||||
_ => {
|
||||
ast::visitor::source_order::walk_expr(self, expr);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_stmt(&mut self, stmt: &ast::Stmt) {
|
||||
// TODO need to capture more definition statements here
|
||||
match stmt {
|
||||
ast::Stmt::ClassDef(node) => {
|
||||
let node_key = TypedNodeKey::from_node(node);
|
||||
let def = Definition::ClassDef(node_key.clone());
|
||||
let symbol_id = self.add_or_update_symbol_with_def(&node.name, def.clone());
|
||||
for decorator in &node.decorator_list {
|
||||
self.visit_decorator(decorator);
|
||||
}
|
||||
let scope_id = self.with_type_params(
|
||||
&node.name,
|
||||
&node.type_params,
|
||||
Some(def.clone()),
|
||||
Some(symbol_id),
|
||||
|indexer| {
|
||||
if let Some(arguments) = &node.arguments {
|
||||
indexer.visit_arguments(arguments);
|
||||
}
|
||||
let scope_id = indexer.push_scope(
|
||||
&node.name,
|
||||
ScopeKind::Class,
|
||||
Some(def.clone()),
|
||||
Some(symbol_id),
|
||||
);
|
||||
indexer.visit_body(&node.body);
|
||||
indexer.pop_scope();
|
||||
scope_id
|
||||
},
|
||||
);
|
||||
self.record_scope_for_node(*node_key.erased(), scope_id);
|
||||
}
|
||||
ast::Stmt::FunctionDef(node) => {
|
||||
let node_key = TypedNodeKey::from_node(node);
|
||||
let def = Definition::FunctionDef(node_key.clone());
|
||||
let symbol_id = self.add_or_update_symbol_with_def(&node.name, def.clone());
|
||||
for decorator in &node.decorator_list {
|
||||
self.visit_decorator(decorator);
|
||||
}
|
||||
let scope_id = self.with_type_params(
|
||||
&node.name,
|
||||
&node.type_params,
|
||||
Some(def.clone()),
|
||||
Some(symbol_id),
|
||||
|indexer| {
|
||||
indexer.visit_parameters(&node.parameters);
|
||||
for expr in &node.returns {
|
||||
indexer.visit_annotation(expr);
|
||||
}
|
||||
let scope_id = indexer.push_scope(
|
||||
&node.name,
|
||||
ScopeKind::Function,
|
||||
Some(def.clone()),
|
||||
Some(symbol_id),
|
||||
);
|
||||
indexer.visit_body(&node.body);
|
||||
indexer.pop_scope();
|
||||
scope_id
|
||||
},
|
||||
);
|
||||
self.record_scope_for_node(*node_key.erased(), scope_id);
|
||||
}
|
||||
ast::Stmt::Import(ast::StmtImport { names, .. }) => {
|
||||
for alias in names {
|
||||
let symbol_name = if let Some(asname) = &alias.asname {
|
||||
asname.id.as_str()
|
||||
} else {
|
||||
alias.name.id.split('.').next().unwrap()
|
||||
};
|
||||
|
||||
let module = ModuleName::new(&alias.name.id);
|
||||
|
||||
let def = Definition::Import(ImportDefinition {
|
||||
module: module.clone(),
|
||||
});
|
||||
self.add_or_update_symbol_with_def(symbol_name, def);
|
||||
self.symbol_table_builder
|
||||
.add_dependency(Dependency::Module(module));
|
||||
}
|
||||
}
|
||||
ast::Stmt::ImportFrom(ast::StmtImportFrom {
|
||||
module,
|
||||
names,
|
||||
level,
|
||||
..
|
||||
}) => {
|
||||
let module = module.as_ref().map(|m| ModuleName::new(&m.id));
|
||||
|
||||
for alias in names {
|
||||
let symbol_name = if let Some(asname) = &alias.asname {
|
||||
asname.id.as_str()
|
||||
} else {
|
||||
alias.name.id.as_str()
|
||||
};
|
||||
let def = Definition::ImportFrom(ImportFromDefinition {
|
||||
module: module.clone(),
|
||||
name: Name::new(&alias.name.id),
|
||||
level: *level,
|
||||
});
|
||||
self.add_or_update_symbol_with_def(symbol_name, def);
|
||||
}
|
||||
|
||||
let dependency = if let Some(module) = module {
|
||||
match NonZeroU32::new(*level) {
|
||||
Some(level) => Dependency::Relative {
|
||||
level,
|
||||
module: Some(module),
|
||||
},
|
||||
None => Dependency::Module(module),
|
||||
}
|
||||
} else {
|
||||
Dependency::Relative {
|
||||
level: NonZeroU32::new(*level)
|
||||
.expect("Import without a module to have a level > 0"),
|
||||
module,
|
||||
}
|
||||
};
|
||||
|
||||
self.symbol_table_builder.add_dependency(dependency);
|
||||
}
|
||||
ast::Stmt::Assign(node) => {
|
||||
debug_assert!(self.current_definition.is_none());
|
||||
self.visit_expr(&node.value);
|
||||
self.current_definition =
|
||||
Some(Definition::Assignment(TypedNodeKey::from_node(node)));
|
||||
for expr in &node.targets {
|
||||
self.visit_expr(expr);
|
||||
}
|
||||
|
||||
self.current_definition = None;
|
||||
}
|
||||
ast::Stmt::If(node) => {
|
||||
// TODO detect statically known truthy or falsy test (via type inference, not naive
|
||||
// AST inspection, so we can't simplify here, need to record test expression in CFG
|
||||
// for later checking)
|
||||
|
||||
// we visit the if "test" condition first regardless
|
||||
self.visit_expr(&node.test);
|
||||
|
||||
// create branch node: does the if test pass or not?
|
||||
let if_branch = self.flow_graph_builder.add_branch(self.current_flow_node());
|
||||
|
||||
// visit the body of the `if` clause
|
||||
self.set_current_flow_node(if_branch);
|
||||
self.insert_constraint(&node.test);
|
||||
self.visit_body(&node.body);
|
||||
|
||||
// Flow node for the last if/elif condition branch; represents the "no branch
|
||||
// taken yet" possibility (where "taking a branch" means that the condition in an
|
||||
// if or elif evaluated to true and control flow went into that clause).
|
||||
let mut prior_branch = if_branch;
|
||||
|
||||
// Flow node for the state after the prior if/elif/else clause; represents "we have
|
||||
// taken one of the branches up to this point." Initially set to the post-if-clause
|
||||
// state, later will be set to the phi node joining that possible path with the
|
||||
// possibility that we took a later if/elif/else clause instead.
|
||||
let mut post_prior_clause = self.current_flow_node();
|
||||
|
||||
// Flag to mark if the final clause is an "else" -- if so, that means the "match no
|
||||
// clauses" path is not possible, we have to go through one of the clauses.
|
||||
let mut last_branch_is_else = false;
|
||||
|
||||
for clause in &node.elif_else_clauses {
|
||||
if let Some(test) = &clause.test {
|
||||
self.visit_expr(test);
|
||||
// This is an elif clause. Create a new branch node. Its predecessor is the
|
||||
// previous branch node, because we can only take one branch in an entire
|
||||
// if/elif/else chain, so if we take this branch, it can only be because we
|
||||
// didn't take the previous one.
|
||||
prior_branch = self.flow_graph_builder.add_branch(prior_branch);
|
||||
self.set_current_flow_node(prior_branch);
|
||||
self.insert_constraint(test);
|
||||
} else {
|
||||
// This is an else clause. No need to create a branch node; there's no
|
||||
// branch here, if we haven't taken any previous branch, we definitely go
|
||||
// into the "else" clause.
|
||||
self.set_current_flow_node(prior_branch);
|
||||
last_branch_is_else = true;
|
||||
}
|
||||
self.visit_elif_else_clause(clause);
|
||||
// Update `post_prior_clause` to a new phi node joining the possibility that we
|
||||
// took any of the previous branches with the possibility that we took the one
|
||||
// just visited.
|
||||
post_prior_clause = self
|
||||
.flow_graph_builder
|
||||
.add_phi(self.current_flow_node(), post_prior_clause);
|
||||
}
|
||||
|
||||
if !last_branch_is_else {
|
||||
// Final branch was not an "else", which means it's possible we took zero
|
||||
// branches in the entire if/elif chain, so we need one more phi node to join
|
||||
// the "no branches taken" possibility.
|
||||
post_prior_clause = self
|
||||
.flow_graph_builder
|
||||
.add_phi(post_prior_clause, prior_branch);
|
||||
}
|
||||
|
||||
// Onward, with current flow node set to our final Phi node.
|
||||
self.set_current_flow_node(post_prior_clause);
|
||||
}
|
||||
_ => {
|
||||
ast::visitor::source_order::walk_stmt(self, stmt);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SemanticIndexStorage(KeyValueCache<FileId, Arc<SemanticIndex>>);
|
||||
|
||||
impl Deref for SemanticIndexStorage {
|
||||
type Target = KeyValueCache<FileId, Arc<SemanticIndex>>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for SemanticIndexStorage {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::semantic::symbol_table::{Symbol, SymbolIterator};
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::ModModule;
|
||||
use ruff_python_parser::{Mode, Parsed};
|
||||
|
||||
use super::{Definition, ScopeKind, SemanticIndex, SymbolId};
|
||||
|
||||
fn parse(code: &str) -> Parsed<ModModule> {
|
||||
ruff_python_parser::parse_unchecked(code, Mode::Module)
|
||||
.try_into_module()
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn names<I>(it: SymbolIterator<I>) -> Vec<&str>
|
||||
where
|
||||
I: Iterator<Item = SymbolId>,
|
||||
{
|
||||
let mut symbols: Vec<_> = it.map(Symbol::name).collect();
|
||||
symbols.sort_unstable();
|
||||
symbols
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn empty() {
|
||||
let parsed = parse("");
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()).len(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn simple() {
|
||||
let parsed = parse("x");
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()), vec!["x"]);
|
||||
assert_eq!(
|
||||
table
|
||||
.definitions(table.root_symbol_id_by_name("x").unwrap())
|
||||
.len(),
|
||||
0
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn annotation_only() {
|
||||
let parsed = parse("x: int");
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()), vec!["int", "x"]);
|
||||
// TODO record definition
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import() {
|
||||
let parsed = parse("import foo");
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()), vec!["foo"]);
|
||||
assert_eq!(
|
||||
table
|
||||
.definitions(table.root_symbol_id_by_name("foo").unwrap())
|
||||
.len(),
|
||||
1
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import_sub() {
|
||||
let parsed = parse("import foo.bar");
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()), vec!["foo"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import_as() {
|
||||
let parsed = parse("import foo.bar as baz");
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()), vec!["baz"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import_from() {
|
||||
let parsed = parse("from bar import foo");
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()), vec!["foo"]);
|
||||
assert_eq!(
|
||||
table
|
||||
.definitions(table.root_symbol_id_by_name("foo").unwrap())
|
||||
.len(),
|
||||
1
|
||||
);
|
||||
assert!(
|
||||
table.root_symbol_id_by_name("foo").is_some_and(|sid| {
|
||||
let s = sid.symbol(&table);
|
||||
s.is_defined() || !s.is_used()
|
||||
}),
|
||||
"symbols that are defined get the defined flag"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn assign() {
|
||||
let parsed = parse("x = foo");
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()), vec!["foo", "x"]);
|
||||
assert_eq!(
|
||||
table
|
||||
.definitions(table.root_symbol_id_by_name("x").unwrap())
|
||||
.len(),
|
||||
1
|
||||
);
|
||||
assert!(
|
||||
table.root_symbol_id_by_name("foo").is_some_and(|sid| {
|
||||
let s = sid.symbol(&table);
|
||||
!s.is_defined() && s.is_used()
|
||||
}),
|
||||
"a symbol used but not defined in a scope should have only the used flag"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn class_scope() {
|
||||
let parsed = parse(
|
||||
"
|
||||
class C:
|
||||
x = 1
|
||||
y = 2
|
||||
",
|
||||
);
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()), vec!["C", "y"]);
|
||||
let scopes = table.root_child_scope_ids();
|
||||
assert_eq!(scopes.len(), 1);
|
||||
let c_scope = scopes[0].scope(&table);
|
||||
assert_eq!(c_scope.kind(), ScopeKind::Class);
|
||||
assert_eq!(c_scope.name(), "C");
|
||||
assert_eq!(names(table.symbols_for_scope(scopes[0])), vec!["x"]);
|
||||
assert_eq!(
|
||||
table
|
||||
.definitions(table.root_symbol_id_by_name("C").unwrap())
|
||||
.len(),
|
||||
1
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn func_scope() {
|
||||
let parsed = parse(
|
||||
"
|
||||
def func():
|
||||
x = 1
|
||||
y = 2
|
||||
",
|
||||
);
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()), vec!["func", "y"]);
|
||||
let scopes = table.root_child_scope_ids();
|
||||
assert_eq!(scopes.len(), 1);
|
||||
let func_scope = scopes[0].scope(&table);
|
||||
assert_eq!(func_scope.kind(), ScopeKind::Function);
|
||||
assert_eq!(func_scope.name(), "func");
|
||||
assert_eq!(names(table.symbols_for_scope(scopes[0])), vec!["x"]);
|
||||
assert_eq!(
|
||||
table
|
||||
.definitions(table.root_symbol_id_by_name("func").unwrap())
|
||||
.len(),
|
||||
1
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dupes() {
|
||||
let parsed = parse(
|
||||
"
|
||||
def func():
|
||||
x = 1
|
||||
def func():
|
||||
y = 2
|
||||
",
|
||||
);
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()), vec!["func"]);
|
||||
let scopes = table.root_child_scope_ids();
|
||||
assert_eq!(scopes.len(), 2);
|
||||
let func_scope_1 = scopes[0].scope(&table);
|
||||
let func_scope_2 = scopes[1].scope(&table);
|
||||
assert_eq!(func_scope_1.kind(), ScopeKind::Function);
|
||||
assert_eq!(func_scope_1.name(), "func");
|
||||
assert_eq!(func_scope_2.kind(), ScopeKind::Function);
|
||||
assert_eq!(func_scope_2.name(), "func");
|
||||
assert_eq!(names(table.symbols_for_scope(scopes[0])), vec!["x"]);
|
||||
assert_eq!(names(table.symbols_for_scope(scopes[1])), vec!["y"]);
|
||||
assert_eq!(
|
||||
table
|
||||
.definitions(table.root_symbol_id_by_name("func").unwrap())
|
||||
.len(),
|
||||
2
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generic_func() {
|
||||
let parsed = parse(
|
||||
"
|
||||
def func[T]():
|
||||
x = 1
|
||||
",
|
||||
);
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()), vec!["func"]);
|
||||
let scopes = table.root_child_scope_ids();
|
||||
assert_eq!(scopes.len(), 1);
|
||||
let ann_scope_id = scopes[0];
|
||||
let ann_scope = ann_scope_id.scope(&table);
|
||||
assert_eq!(ann_scope.kind(), ScopeKind::Annotation);
|
||||
assert_eq!(ann_scope.name(), "func");
|
||||
assert_eq!(names(table.symbols_for_scope(ann_scope_id)), vec!["T"]);
|
||||
let scopes = table.child_scope_ids_of(ann_scope_id);
|
||||
assert_eq!(scopes.len(), 1);
|
||||
let func_scope_id = scopes[0];
|
||||
let func_scope = func_scope_id.scope(&table);
|
||||
assert_eq!(func_scope.kind(), ScopeKind::Function);
|
||||
assert_eq!(func_scope.name(), "func");
|
||||
assert_eq!(names(table.symbols_for_scope(func_scope_id)), vec!["x"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generic_class() {
|
||||
let parsed = parse(
|
||||
"
|
||||
class C[T]:
|
||||
x = 1
|
||||
",
|
||||
);
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()), vec!["C"]);
|
||||
let scopes = table.root_child_scope_ids();
|
||||
assert_eq!(scopes.len(), 1);
|
||||
let ann_scope_id = scopes[0];
|
||||
let ann_scope = ann_scope_id.scope(&table);
|
||||
assert_eq!(ann_scope.kind(), ScopeKind::Annotation);
|
||||
assert_eq!(ann_scope.name(), "C");
|
||||
assert_eq!(names(table.symbols_for_scope(ann_scope_id)), vec!["T"]);
|
||||
assert!(
|
||||
table
|
||||
.symbol_by_name(ann_scope_id, "T")
|
||||
.is_some_and(|s| s.is_defined() && !s.is_used()),
|
||||
"type parameters are defined by the scope that introduces them"
|
||||
);
|
||||
let scopes = table.child_scope_ids_of(ann_scope_id);
|
||||
assert_eq!(scopes.len(), 1);
|
||||
let func_scope_id = scopes[0];
|
||||
let func_scope = func_scope_id.scope(&table);
|
||||
assert_eq!(func_scope.kind(), ScopeKind::Class);
|
||||
assert_eq!(func_scope.name(), "C");
|
||||
assert_eq!(names(table.symbols_for_scope(func_scope_id)), vec!["x"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reachability_trivial() {
|
||||
let parsed = parse("x = 1; x");
|
||||
let ast = parsed.syntax();
|
||||
let index = SemanticIndex::from_ast(ast);
|
||||
let table = &index.symbol_table;
|
||||
let x_sym = table
|
||||
.root_symbol_id_by_name("x")
|
||||
.expect("x symbol should exist");
|
||||
let ast::Stmt::Expr(ast::StmtExpr { value: x_use, .. }) = &ast.body[1] else {
|
||||
panic!("should be an expr")
|
||||
};
|
||||
let x_defs: Vec<_> = index
|
||||
.reachable_definitions(x_sym, x_use)
|
||||
.map(|constrained_definition| constrained_definition.definition)
|
||||
.collect();
|
||||
assert_eq!(x_defs.len(), 1);
|
||||
let Definition::Assignment(node_key) = &x_defs[0] else {
|
||||
panic!("def should be an assignment")
|
||||
};
|
||||
let Some(def_node) = node_key.resolve(ast.into()) else {
|
||||
panic!("node key should resolve")
|
||||
};
|
||||
let ast::Expr::NumberLiteral(ast::ExprNumberLiteral {
|
||||
value: ast::Number::Int(num),
|
||||
..
|
||||
}) = &*def_node.value
|
||||
else {
|
||||
panic!("should be a number literal")
|
||||
};
|
||||
assert_eq!(*num, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn expression_scope() {
|
||||
let parsed = parse("x = 1;\ndef test():\n y = 4");
|
||||
let ast = parsed.syntax();
|
||||
let index = SemanticIndex::from_ast(ast);
|
||||
let table = &index.symbol_table;
|
||||
|
||||
let x_sym = table
|
||||
.root_symbol_by_name("x")
|
||||
.expect("x symbol should exist");
|
||||
|
||||
let x_stmt = ast.body[0].as_assign_stmt().unwrap();
|
||||
|
||||
let x_id = index.expression_id(&x_stmt.targets[0]);
|
||||
|
||||
assert_eq!(table.scope_of_expression(x_id).kind(), ScopeKind::Module);
|
||||
assert_eq!(table.scope_id_of_expression(x_id), x_sym.scope_id());
|
||||
|
||||
let def = ast.body[1].as_function_def_stmt().unwrap();
|
||||
let y_stmt = def.body[0].as_assign_stmt().unwrap();
|
||||
let y_id = index.expression_id(&y_stmt.targets[0]);
|
||||
|
||||
assert_eq!(table.scope_of_expression(y_id).kind(), ScopeKind::Function);
|
||||
}
|
||||
}
|
||||
52
crates/red_knot/src/semantic/definitions.rs
Normal file
52
crates/red_knot/src/semantic/definitions.rs
Normal file
@@ -0,0 +1,52 @@
|
||||
use crate::ast_ids::TypedNodeKey;
|
||||
use crate::semantic::ModuleName;
|
||||
use crate::Name;
|
||||
use ruff_python_ast as ast;
|
||||
|
||||
// TODO storing TypedNodeKey for definitions means we have to search to find them again in the AST;
|
||||
// this is at best O(log n). If looking up definitions is a bottleneck we should look for
|
||||
// alternatives here.
|
||||
// TODO intern Definitions in SymbolTable and reference using IDs?
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum Definition {
|
||||
// For the import cases, we don't need reference to any arbitrary AST subtrees (annotations,
|
||||
// RHS), and referencing just the import statement node is imprecise (a single import statement
|
||||
// can assign many symbols, we'd have to re-search for the one we care about), so we just copy
|
||||
// the small amount of information we need from the AST.
|
||||
Import(ImportDefinition),
|
||||
ImportFrom(ImportFromDefinition),
|
||||
ClassDef(TypedNodeKey<ast::StmtClassDef>),
|
||||
FunctionDef(TypedNodeKey<ast::StmtFunctionDef>),
|
||||
Assignment(TypedNodeKey<ast::StmtAssign>),
|
||||
AnnotatedAssignment(TypedNodeKey<ast::StmtAnnAssign>),
|
||||
NamedExpr(TypedNodeKey<ast::ExprNamed>),
|
||||
/// represents the implicit initial definition of every name as "unbound"
|
||||
Unbound,
|
||||
// TODO with statements, except handlers, function args...
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ImportDefinition {
|
||||
pub module: ModuleName,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ImportFromDefinition {
|
||||
pub module: Option<ModuleName>,
|
||||
pub name: Name,
|
||||
pub level: u32,
|
||||
}
|
||||
|
||||
impl ImportFromDefinition {
|
||||
pub fn module(&self) -> Option<&ModuleName> {
|
||||
self.module.as_ref()
|
||||
}
|
||||
|
||||
pub fn name(&self) -> &Name {
|
||||
&self.name
|
||||
}
|
||||
|
||||
pub fn level(&self) -> u32 {
|
||||
self.level
|
||||
}
|
||||
}
|
||||
270
crates/red_knot/src/semantic/flow_graph.rs
Normal file
270
crates/red_knot/src/semantic/flow_graph.rs
Normal file
@@ -0,0 +1,270 @@
|
||||
use super::symbol_table::SymbolId;
|
||||
use crate::semantic::{Definition, ExpressionId};
|
||||
use ruff_index::{newtype_index, IndexVec};
|
||||
use std::iter::FusedIterator;
|
||||
use std::ops::Range;
|
||||
|
||||
#[newtype_index]
|
||||
pub struct FlowNodeId;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum FlowNode {
|
||||
Start,
|
||||
Definition(DefinitionFlowNode),
|
||||
Branch(BranchFlowNode),
|
||||
Phi(PhiFlowNode),
|
||||
Constraint(ConstraintFlowNode),
|
||||
}
|
||||
|
||||
/// A point in control flow where a symbol is defined
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct DefinitionFlowNode {
|
||||
symbol_id: SymbolId,
|
||||
definition: Definition,
|
||||
predecessor: FlowNodeId,
|
||||
}
|
||||
|
||||
/// A branch in control flow
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct BranchFlowNode {
|
||||
predecessor: FlowNodeId,
|
||||
}
|
||||
|
||||
/// A join point where control flow paths come together
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct PhiFlowNode {
|
||||
first_predecessor: FlowNodeId,
|
||||
second_predecessor: FlowNodeId,
|
||||
}
|
||||
|
||||
/// A branch test which may apply constraints to a symbol's type
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct ConstraintFlowNode {
|
||||
predecessor: FlowNodeId,
|
||||
test_expression: ExpressionId,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct FlowGraph {
|
||||
flow_nodes_by_id: IndexVec<FlowNodeId, FlowNode>,
|
||||
expression_map: IndexVec<ExpressionId, FlowNodeId>,
|
||||
}
|
||||
|
||||
impl FlowGraph {
|
||||
pub fn start() -> FlowNodeId {
|
||||
FlowNodeId::from_usize(0)
|
||||
}
|
||||
|
||||
pub fn for_expr(&self, expr: ExpressionId) -> FlowNodeId {
|
||||
self.expression_map[expr]
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct FlowGraphBuilder {
|
||||
flow_graph: FlowGraph,
|
||||
}
|
||||
|
||||
impl FlowGraphBuilder {
|
||||
pub(crate) fn new() -> Self {
|
||||
let mut graph = FlowGraph {
|
||||
flow_nodes_by_id: IndexVec::default(),
|
||||
expression_map: IndexVec::default(),
|
||||
};
|
||||
graph.flow_nodes_by_id.push(FlowNode::Start);
|
||||
Self { flow_graph: graph }
|
||||
}
|
||||
|
||||
pub(crate) fn add(&mut self, node: FlowNode) -> FlowNodeId {
|
||||
self.flow_graph.flow_nodes_by_id.push(node)
|
||||
}
|
||||
|
||||
pub(crate) fn add_definition(
|
||||
&mut self,
|
||||
symbol_id: SymbolId,
|
||||
definition: Definition,
|
||||
predecessor: FlowNodeId,
|
||||
) -> FlowNodeId {
|
||||
self.add(FlowNode::Definition(DefinitionFlowNode {
|
||||
symbol_id,
|
||||
definition,
|
||||
predecessor,
|
||||
}))
|
||||
}
|
||||
|
||||
pub(crate) fn add_branch(&mut self, predecessor: FlowNodeId) -> FlowNodeId {
|
||||
self.add(FlowNode::Branch(BranchFlowNode { predecessor }))
|
||||
}
|
||||
|
||||
pub(crate) fn add_phi(
|
||||
&mut self,
|
||||
first_predecessor: FlowNodeId,
|
||||
second_predecessor: FlowNodeId,
|
||||
) -> FlowNodeId {
|
||||
self.add(FlowNode::Phi(PhiFlowNode {
|
||||
first_predecessor,
|
||||
second_predecessor,
|
||||
}))
|
||||
}
|
||||
|
||||
pub(crate) fn add_constraint(
|
||||
&mut self,
|
||||
predecessor: FlowNodeId,
|
||||
test_expression: ExpressionId,
|
||||
) -> FlowNodeId {
|
||||
self.add(FlowNode::Constraint(ConstraintFlowNode {
|
||||
predecessor,
|
||||
test_expression,
|
||||
}))
|
||||
}
|
||||
|
||||
pub(super) fn record_expr(&mut self, node_id: FlowNodeId) -> ExpressionId {
|
||||
self.flow_graph.expression_map.push(node_id)
|
||||
}
|
||||
|
||||
pub(super) fn finish(mut self) -> FlowGraph {
|
||||
self.flow_graph.flow_nodes_by_id.shrink_to_fit();
|
||||
self.flow_graph.expression_map.shrink_to_fit();
|
||||
self.flow_graph
|
||||
}
|
||||
}
|
||||
|
||||
/// A definition, and the set of constraints between a use and the definition
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ConstrainedDefinition {
|
||||
pub definition: Definition,
|
||||
pub constraints: Vec<ExpressionId>,
|
||||
}
|
||||
|
||||
/// A flow node and the constraints we passed through to reach it
|
||||
#[derive(Debug)]
|
||||
struct FlowState {
|
||||
node_id: FlowNodeId,
|
||||
constraints_range: Range<usize>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ReachableDefinitionsIterator<'a> {
|
||||
flow_graph: &'a FlowGraph,
|
||||
symbol_id: SymbolId,
|
||||
pending: Vec<FlowState>,
|
||||
constraints: Vec<ExpressionId>,
|
||||
}
|
||||
|
||||
impl<'a> ReachableDefinitionsIterator<'a> {
|
||||
pub fn new(flow_graph: &'a FlowGraph, symbol_id: SymbolId, start_node_id: FlowNodeId) -> Self {
|
||||
Self {
|
||||
flow_graph,
|
||||
symbol_id,
|
||||
pending: vec![FlowState {
|
||||
node_id: start_node_id,
|
||||
constraints_range: 0..0,
|
||||
}],
|
||||
constraints: vec![],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterator for ReachableDefinitionsIterator<'a> {
|
||||
type Item = ConstrainedDefinition;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let FlowState {
|
||||
mut node_id,
|
||||
mut constraints_range,
|
||||
} = self.pending.pop()?;
|
||||
self.constraints.truncate(constraints_range.end + 1);
|
||||
loop {
|
||||
match &self.flow_graph.flow_nodes_by_id[node_id] {
|
||||
FlowNode::Start => {
|
||||
// constraints on unbound are irrelevant
|
||||
return Some(ConstrainedDefinition {
|
||||
definition: Definition::Unbound,
|
||||
constraints: vec![],
|
||||
});
|
||||
}
|
||||
FlowNode::Definition(def_node) => {
|
||||
if def_node.symbol_id == self.symbol_id {
|
||||
return Some(ConstrainedDefinition {
|
||||
definition: def_node.definition.clone(),
|
||||
constraints: self.constraints[constraints_range].to_vec(),
|
||||
});
|
||||
}
|
||||
node_id = def_node.predecessor;
|
||||
}
|
||||
FlowNode::Branch(branch_node) => {
|
||||
node_id = branch_node.predecessor;
|
||||
}
|
||||
FlowNode::Phi(phi_node) => {
|
||||
self.pending.push(FlowState {
|
||||
node_id: phi_node.first_predecessor,
|
||||
constraints_range: constraints_range.clone(),
|
||||
});
|
||||
node_id = phi_node.second_predecessor;
|
||||
}
|
||||
FlowNode::Constraint(constraint_node) => {
|
||||
node_id = constraint_node.predecessor;
|
||||
self.constraints.push(constraint_node.test_expression);
|
||||
constraints_range.end += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> FusedIterator for ReachableDefinitionsIterator<'a> {}
|
||||
|
||||
impl std::fmt::Display for FlowGraph {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
writeln!(f, "flowchart TD")?;
|
||||
for (id, node) in self.flow_nodes_by_id.iter_enumerated() {
|
||||
write!(f, " id{}", id.as_u32())?;
|
||||
match node {
|
||||
FlowNode::Start => writeln!(f, r"[\Start/]")?,
|
||||
FlowNode::Definition(def_node) => {
|
||||
writeln!(f, r"(Define symbol {})", def_node.symbol_id.as_u32())?;
|
||||
writeln!(
|
||||
f,
|
||||
r" id{}-->id{}",
|
||||
def_node.predecessor.as_u32(),
|
||||
id.as_u32()
|
||||
)?;
|
||||
}
|
||||
FlowNode::Branch(branch_node) => {
|
||||
writeln!(f, r"{{Branch}}")?;
|
||||
writeln!(
|
||||
f,
|
||||
r" id{}-->id{}",
|
||||
branch_node.predecessor.as_u32(),
|
||||
id.as_u32()
|
||||
)?;
|
||||
}
|
||||
FlowNode::Phi(phi_node) => {
|
||||
writeln!(f, r"((Phi))")?;
|
||||
writeln!(
|
||||
f,
|
||||
r" id{}-->id{}",
|
||||
phi_node.second_predecessor.as_u32(),
|
||||
id.as_u32()
|
||||
)?;
|
||||
writeln!(
|
||||
f,
|
||||
r" id{}-->id{}",
|
||||
phi_node.first_predecessor.as_u32(),
|
||||
id.as_u32()
|
||||
)?;
|
||||
}
|
||||
FlowNode::Constraint(constraint_node) => {
|
||||
writeln!(f, r"((Constraint))")?;
|
||||
writeln!(
|
||||
f,
|
||||
r" id{}-->id{}",
|
||||
constraint_node.predecessor.as_u32(),
|
||||
id.as_u32()
|
||||
)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
560
crates/red_knot/src/semantic/symbol_table.rs
Normal file
560
crates/red_knot/src/semantic/symbol_table.rs
Normal file
@@ -0,0 +1,560 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::iter::{Copied, DoubleEndedIterator, FusedIterator};
|
||||
use std::num::NonZeroU32;
|
||||
|
||||
use bitflags::bitflags;
|
||||
use hashbrown::hash_map::{Keys, RawEntryMut};
|
||||
use rustc_hash::{FxHashMap, FxHasher};
|
||||
|
||||
use ruff_index::{newtype_index, IndexVec};
|
||||
|
||||
use crate::ast_ids::NodeKey;
|
||||
use crate::module::ModuleName;
|
||||
use crate::semantic::{Definition, ExpressionId};
|
||||
use crate::Name;
|
||||
|
||||
type Map<K, V> = hashbrown::HashMap<K, V, ()>;
|
||||
|
||||
#[newtype_index]
|
||||
pub struct ScopeId;
|
||||
|
||||
impl ScopeId {
|
||||
pub fn scope(self, table: &SymbolTable) -> &Scope {
|
||||
&table.scopes_by_id[self]
|
||||
}
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct SymbolId;
|
||||
|
||||
impl SymbolId {
|
||||
pub fn symbol(self, table: &SymbolTable) -> &Symbol {
|
||||
&table.symbols_by_id[self]
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq)]
|
||||
pub enum ScopeKind {
|
||||
Module,
|
||||
Annotation,
|
||||
Class,
|
||||
Function,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Scope {
|
||||
name: Name,
|
||||
kind: ScopeKind,
|
||||
parent: Option<ScopeId>,
|
||||
children: Vec<ScopeId>,
|
||||
/// the definition (e.g. class or function) that created this scope
|
||||
definition: Option<Definition>,
|
||||
/// the symbol (e.g. class or function) that owns this scope
|
||||
defining_symbol: Option<SymbolId>,
|
||||
/// symbol IDs, hashed by symbol name
|
||||
symbols_by_name: Map<SymbolId, ()>,
|
||||
}
|
||||
|
||||
impl Scope {
|
||||
pub fn name(&self) -> &str {
|
||||
self.name.as_str()
|
||||
}
|
||||
|
||||
pub fn kind(&self) -> ScopeKind {
|
||||
self.kind
|
||||
}
|
||||
|
||||
pub fn definition(&self) -> Option<Definition> {
|
||||
self.definition.clone()
|
||||
}
|
||||
|
||||
pub fn defining_symbol(&self) -> Option<SymbolId> {
|
||||
self.defining_symbol
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum Kind {
|
||||
FreeVar,
|
||||
CellVar,
|
||||
CellVarAssigned,
|
||||
ExplicitGlobal,
|
||||
ImplicitGlobal,
|
||||
}
|
||||
|
||||
bitflags! {
|
||||
#[derive(Copy,Clone,Debug)]
|
||||
pub struct SymbolFlags: u8 {
|
||||
const IS_USED = 1 << 0;
|
||||
const IS_DEFINED = 1 << 1;
|
||||
/// TODO: This flag is not yet set by anything
|
||||
const MARKED_GLOBAL = 1 << 2;
|
||||
/// TODO: This flag is not yet set by anything
|
||||
const MARKED_NONLOCAL = 1 << 3;
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Symbol {
|
||||
name: Name,
|
||||
flags: SymbolFlags,
|
||||
scope_id: ScopeId,
|
||||
// kind: Kind,
|
||||
}
|
||||
|
||||
impl Symbol {
|
||||
pub fn name(&self) -> &str {
|
||||
self.name.as_str()
|
||||
}
|
||||
|
||||
pub fn scope_id(&self) -> ScopeId {
|
||||
self.scope_id
|
||||
}
|
||||
|
||||
/// Is the symbol used in its containing scope?
|
||||
pub fn is_used(&self) -> bool {
|
||||
self.flags.contains(SymbolFlags::IS_USED)
|
||||
}
|
||||
|
||||
/// Is the symbol defined in its containing scope?
|
||||
pub fn is_defined(&self) -> bool {
|
||||
self.flags.contains(SymbolFlags::IS_DEFINED)
|
||||
}
|
||||
|
||||
// TODO: implement Symbol.kind 2-pass analysis to categorize as: free-var, cell-var,
|
||||
// explicit-global, implicit-global and implement Symbol.kind by modifying the preorder
|
||||
// traversal code
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Dependency {
|
||||
Module(ModuleName),
|
||||
Relative {
|
||||
level: NonZeroU32,
|
||||
module: Option<ModuleName>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Table of all symbols in all scopes for a module.
|
||||
#[derive(Debug)]
|
||||
pub struct SymbolTable {
|
||||
scopes_by_id: IndexVec<ScopeId, Scope>,
|
||||
symbols_by_id: IndexVec<SymbolId, Symbol>,
|
||||
/// the definitions for each symbol
|
||||
defs: FxHashMap<SymbolId, Vec<Definition>>,
|
||||
/// map of AST node (e.g. class/function def) to sub-scope it creates
|
||||
scopes_by_node: FxHashMap<NodeKey, ScopeId>,
|
||||
/// Maps expressions to their enclosing scope.
|
||||
expression_scopes: IndexVec<ExpressionId, ScopeId>,
|
||||
/// dependencies of this module
|
||||
dependencies: Vec<Dependency>,
|
||||
}
|
||||
|
||||
impl SymbolTable {
|
||||
pub fn dependencies(&self) -> &[Dependency] {
|
||||
&self.dependencies
|
||||
}
|
||||
|
||||
pub const fn root_scope_id() -> ScopeId {
|
||||
ScopeId::from_usize(0)
|
||||
}
|
||||
|
||||
pub fn root_scope(&self) -> &Scope {
|
||||
&self.scopes_by_id[SymbolTable::root_scope_id()]
|
||||
}
|
||||
|
||||
pub fn symbol_ids_for_scope(&self, scope_id: ScopeId) -> Copied<Keys<SymbolId, ()>> {
|
||||
self.scopes_by_id[scope_id].symbols_by_name.keys().copied()
|
||||
}
|
||||
|
||||
pub fn symbols_for_scope(
|
||||
&self,
|
||||
scope_id: ScopeId,
|
||||
) -> SymbolIterator<Copied<Keys<SymbolId, ()>>> {
|
||||
SymbolIterator {
|
||||
table: self,
|
||||
ids: self.symbol_ids_for_scope(scope_id),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn root_symbol_ids(&self) -> Copied<Keys<SymbolId, ()>> {
|
||||
self.symbol_ids_for_scope(SymbolTable::root_scope_id())
|
||||
}
|
||||
|
||||
pub fn root_symbols(&self) -> SymbolIterator<Copied<Keys<SymbolId, ()>>> {
|
||||
self.symbols_for_scope(SymbolTable::root_scope_id())
|
||||
}
|
||||
|
||||
pub fn child_scope_ids_of(&self, scope_id: ScopeId) -> &[ScopeId] {
|
||||
&self.scopes_by_id[scope_id].children
|
||||
}
|
||||
|
||||
pub fn child_scopes_of(&self, scope_id: ScopeId) -> ScopeIterator<&[ScopeId]> {
|
||||
ScopeIterator {
|
||||
table: self,
|
||||
ids: self.child_scope_ids_of(scope_id),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn root_child_scope_ids(&self) -> &[ScopeId] {
|
||||
self.child_scope_ids_of(SymbolTable::root_scope_id())
|
||||
}
|
||||
|
||||
pub fn root_child_scopes(&self) -> ScopeIterator<&[ScopeId]> {
|
||||
self.child_scopes_of(SymbolTable::root_scope_id())
|
||||
}
|
||||
|
||||
pub fn symbol_id_by_name(&self, scope_id: ScopeId, name: &str) -> Option<SymbolId> {
|
||||
let scope = &self.scopes_by_id[scope_id];
|
||||
let hash = SymbolTable::hash_name(name);
|
||||
let name = Name::new(name);
|
||||
Some(
|
||||
*scope
|
||||
.symbols_by_name
|
||||
.raw_entry()
|
||||
.from_hash(hash, |symid| self.symbols_by_id[*symid].name == name)?
|
||||
.0,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn symbol_by_name(&self, scope_id: ScopeId, name: &str) -> Option<&Symbol> {
|
||||
Some(&self.symbols_by_id[self.symbol_id_by_name(scope_id, name)?])
|
||||
}
|
||||
|
||||
pub fn root_symbol_id_by_name(&self, name: &str) -> Option<SymbolId> {
|
||||
self.symbol_id_by_name(SymbolTable::root_scope_id(), name)
|
||||
}
|
||||
|
||||
pub fn root_symbol_by_name(&self, name: &str) -> Option<&Symbol> {
|
||||
self.symbol_by_name(SymbolTable::root_scope_id(), name)
|
||||
}
|
||||
|
||||
pub fn scope_id_of_symbol(&self, symbol_id: SymbolId) -> ScopeId {
|
||||
self.symbols_by_id[symbol_id].scope_id
|
||||
}
|
||||
|
||||
pub fn scope_of_symbol(&self, symbol_id: SymbolId) -> &Scope {
|
||||
&self.scopes_by_id[self.scope_id_of_symbol(symbol_id)]
|
||||
}
|
||||
|
||||
pub fn scope_id_of_expression(&self, expression: ExpressionId) -> ScopeId {
|
||||
self.expression_scopes[expression]
|
||||
}
|
||||
|
||||
pub fn scope_of_expression(&self, expr_id: ExpressionId) -> &Scope {
|
||||
&self.scopes_by_id[self.scope_id_of_expression(expr_id)]
|
||||
}
|
||||
|
||||
pub fn parent_scopes(
|
||||
&self,
|
||||
scope_id: ScopeId,
|
||||
) -> ScopeIterator<impl Iterator<Item = ScopeId> + '_> {
|
||||
ScopeIterator {
|
||||
table: self,
|
||||
ids: std::iter::successors(Some(scope_id), |scope| self.scopes_by_id[*scope].parent),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parent_scope(&self, scope_id: ScopeId) -> Option<ScopeId> {
|
||||
self.scopes_by_id[scope_id].parent
|
||||
}
|
||||
|
||||
pub fn scope_id_for_node(&self, node_key: &NodeKey) -> ScopeId {
|
||||
self.scopes_by_node[node_key]
|
||||
}
|
||||
|
||||
pub fn definitions(&self, symbol_id: SymbolId) -> &[Definition] {
|
||||
self.defs
|
||||
.get(&symbol_id)
|
||||
.map(std::vec::Vec::as_slice)
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
pub fn all_definitions(&self) -> impl Iterator<Item = (SymbolId, &Definition)> + '_ {
|
||||
self.defs
|
||||
.iter()
|
||||
.flat_map(|(sym_id, defs)| defs.iter().map(move |def| (*sym_id, def)))
|
||||
}
|
||||
|
||||
fn hash_name(name: &str) -> u64 {
|
||||
let mut hasher = FxHasher::default();
|
||||
name.hash(&mut hasher);
|
||||
hasher.finish()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct SymbolIterator<'a, I> {
|
||||
table: &'a SymbolTable,
|
||||
ids: I,
|
||||
}
|
||||
|
||||
impl<'a, I> Iterator for SymbolIterator<'a, I>
|
||||
where
|
||||
I: Iterator<Item = SymbolId>,
|
||||
{
|
||||
type Item = &'a Symbol;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let id = self.ids.next()?;
|
||||
Some(&self.table.symbols_by_id[id])
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
self.ids.size_hint()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, I> FusedIterator for SymbolIterator<'a, I> where
|
||||
I: Iterator<Item = SymbolId> + FusedIterator
|
||||
{
|
||||
}
|
||||
|
||||
impl<'a, I> DoubleEndedIterator for SymbolIterator<'a, I>
|
||||
where
|
||||
I: Iterator<Item = SymbolId> + DoubleEndedIterator,
|
||||
{
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
let id = self.ids.next_back()?;
|
||||
Some(&self.table.symbols_by_id[id])
|
||||
}
|
||||
}
|
||||
|
||||
// TODO maybe get rid of this and just do all data access via methods on ScopeId?
|
||||
pub struct ScopeIterator<'a, I> {
|
||||
table: &'a SymbolTable,
|
||||
ids: I,
|
||||
}
|
||||
|
||||
/// iterate (`ScopeId`, `Scope`) pairs for given `ScopeId` iterator
|
||||
impl<'a, I> Iterator for ScopeIterator<'a, I>
|
||||
where
|
||||
I: Iterator<Item = ScopeId>,
|
||||
{
|
||||
type Item = (ScopeId, &'a Scope);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let id = self.ids.next()?;
|
||||
Some((id, &self.table.scopes_by_id[id]))
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
self.ids.size_hint()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, I> FusedIterator for ScopeIterator<'a, I> where I: Iterator<Item = ScopeId> + FusedIterator {}
|
||||
|
||||
impl<'a, I> DoubleEndedIterator for ScopeIterator<'a, I>
|
||||
where
|
||||
I: Iterator<Item = ScopeId> + DoubleEndedIterator,
|
||||
{
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
let id = self.ids.next_back()?;
|
||||
Some((id, &self.table.scopes_by_id[id]))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(super) struct SymbolTableBuilder {
|
||||
symbol_table: SymbolTable,
|
||||
}
|
||||
|
||||
impl SymbolTableBuilder {
|
||||
pub(super) fn new() -> Self {
|
||||
let mut table = SymbolTable {
|
||||
scopes_by_id: IndexVec::new(),
|
||||
symbols_by_id: IndexVec::new(),
|
||||
defs: FxHashMap::default(),
|
||||
scopes_by_node: FxHashMap::default(),
|
||||
expression_scopes: IndexVec::new(),
|
||||
dependencies: Vec::new(),
|
||||
};
|
||||
table.scopes_by_id.push(Scope {
|
||||
name: Name::new("<module>"),
|
||||
kind: ScopeKind::Module,
|
||||
parent: None,
|
||||
children: Vec::new(),
|
||||
definition: None,
|
||||
defining_symbol: None,
|
||||
symbols_by_name: Map::default(),
|
||||
});
|
||||
Self {
|
||||
symbol_table: table,
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn finish(self) -> SymbolTable {
|
||||
let mut symbol_table = self.symbol_table;
|
||||
symbol_table.scopes_by_id.shrink_to_fit();
|
||||
symbol_table.symbols_by_id.shrink_to_fit();
|
||||
symbol_table.defs.shrink_to_fit();
|
||||
symbol_table.scopes_by_node.shrink_to_fit();
|
||||
symbol_table.expression_scopes.shrink_to_fit();
|
||||
symbol_table.dependencies.shrink_to_fit();
|
||||
symbol_table
|
||||
}
|
||||
|
||||
pub(super) fn add_or_update_symbol(
|
||||
&mut self,
|
||||
scope_id: ScopeId,
|
||||
name: &str,
|
||||
flags: SymbolFlags,
|
||||
) -> SymbolId {
|
||||
let hash = SymbolTable::hash_name(name);
|
||||
let scope = &mut self.symbol_table.scopes_by_id[scope_id];
|
||||
let name = Name::new(name);
|
||||
|
||||
let entry = scope
|
||||
.symbols_by_name
|
||||
.raw_entry_mut()
|
||||
.from_hash(hash, |existing| {
|
||||
self.symbol_table.symbols_by_id[*existing].name == name
|
||||
});
|
||||
|
||||
match entry {
|
||||
RawEntryMut::Occupied(entry) => {
|
||||
if let Some(symbol) = self.symbol_table.symbols_by_id.get_mut(*entry.key()) {
|
||||
symbol.flags.insert(flags);
|
||||
};
|
||||
*entry.key()
|
||||
}
|
||||
RawEntryMut::Vacant(entry) => {
|
||||
let id = self.symbol_table.symbols_by_id.push(Symbol {
|
||||
name,
|
||||
flags,
|
||||
scope_id,
|
||||
});
|
||||
entry.insert_with_hasher(hash, id, (), |symid| {
|
||||
SymbolTable::hash_name(&self.symbol_table.symbols_by_id[*symid].name)
|
||||
});
|
||||
id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn add_definition(&mut self, symbol_id: SymbolId, definition: Definition) {
|
||||
self.symbol_table
|
||||
.defs
|
||||
.entry(symbol_id)
|
||||
.or_default()
|
||||
.push(definition);
|
||||
}
|
||||
|
||||
pub(super) fn add_child_scope(
|
||||
&mut self,
|
||||
parent_scope_id: ScopeId,
|
||||
name: &str,
|
||||
kind: ScopeKind,
|
||||
definition: Option<Definition>,
|
||||
defining_symbol: Option<SymbolId>,
|
||||
) -> ScopeId {
|
||||
let new_scope_id = self.symbol_table.scopes_by_id.push(Scope {
|
||||
name: Name::new(name),
|
||||
kind,
|
||||
parent: Some(parent_scope_id),
|
||||
children: Vec::new(),
|
||||
definition,
|
||||
defining_symbol,
|
||||
symbols_by_name: Map::default(),
|
||||
});
|
||||
let parent_scope = &mut self.symbol_table.scopes_by_id[parent_scope_id];
|
||||
parent_scope.children.push(new_scope_id);
|
||||
new_scope_id
|
||||
}
|
||||
|
||||
pub(super) fn record_scope_for_node(&mut self, node_key: NodeKey, scope_id: ScopeId) {
|
||||
self.symbol_table.scopes_by_node.insert(node_key, scope_id);
|
||||
}
|
||||
|
||||
pub(super) fn add_dependency(&mut self, dependency: Dependency) {
|
||||
self.symbol_table.dependencies.push(dependency);
|
||||
}
|
||||
|
||||
/// Records the scope for the current expression
|
||||
pub(super) fn record_expression(&mut self, scope: ScopeId) -> ExpressionId {
|
||||
self.symbol_table.expression_scopes.push(scope)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{ScopeKind, SymbolFlags, SymbolTable, SymbolTableBuilder};
|
||||
|
||||
#[test]
|
||||
fn insert_same_name_symbol_twice() {
|
||||
let mut builder = SymbolTableBuilder::new();
|
||||
let root_scope_id = SymbolTable::root_scope_id();
|
||||
let symbol_id_1 =
|
||||
builder.add_or_update_symbol(root_scope_id, "foo", SymbolFlags::IS_DEFINED);
|
||||
let symbol_id_2 = builder.add_or_update_symbol(root_scope_id, "foo", SymbolFlags::IS_USED);
|
||||
let table = builder.finish();
|
||||
|
||||
assert_eq!(symbol_id_1, symbol_id_2);
|
||||
assert!(symbol_id_1.symbol(&table).is_used(), "flags must merge");
|
||||
assert!(symbol_id_1.symbol(&table).is_defined(), "flags must merge");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_different_named_symbols() {
|
||||
let mut builder = SymbolTableBuilder::new();
|
||||
let root_scope_id = SymbolTable::root_scope_id();
|
||||
let symbol_id_1 = builder.add_or_update_symbol(root_scope_id, "foo", SymbolFlags::empty());
|
||||
let symbol_id_2 = builder.add_or_update_symbol(root_scope_id, "bar", SymbolFlags::empty());
|
||||
|
||||
assert_ne!(symbol_id_1, symbol_id_2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_child_scope_with_symbol() {
|
||||
let mut builder = SymbolTableBuilder::new();
|
||||
let root_scope_id = SymbolTable::root_scope_id();
|
||||
let foo_symbol_top =
|
||||
builder.add_or_update_symbol(root_scope_id, "foo", SymbolFlags::empty());
|
||||
let c_scope = builder.add_child_scope(root_scope_id, "C", ScopeKind::Class, None, None);
|
||||
let foo_symbol_inner = builder.add_or_update_symbol(c_scope, "foo", SymbolFlags::empty());
|
||||
|
||||
assert_ne!(foo_symbol_top, foo_symbol_inner);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scope_from_id() {
|
||||
let table = SymbolTableBuilder::new().finish();
|
||||
let root_scope_id = SymbolTable::root_scope_id();
|
||||
let scope = root_scope_id.scope(&table);
|
||||
|
||||
assert_eq!(scope.name.as_str(), "<module>");
|
||||
assert_eq!(scope.kind, ScopeKind::Module);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn symbol_from_id() {
|
||||
let mut builder = SymbolTableBuilder::new();
|
||||
let root_scope_id = SymbolTable::root_scope_id();
|
||||
let foo_symbol_id =
|
||||
builder.add_or_update_symbol(root_scope_id, "foo", SymbolFlags::empty());
|
||||
let table = builder.finish();
|
||||
let symbol = foo_symbol_id.symbol(&table);
|
||||
|
||||
assert_eq!(symbol.name(), "foo");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bigger_symbol_table() {
|
||||
let mut builder = SymbolTableBuilder::new();
|
||||
let root_scope_id = SymbolTable::root_scope_id();
|
||||
let foo_symbol_id =
|
||||
builder.add_or_update_symbol(root_scope_id, "foo", SymbolFlags::empty());
|
||||
builder.add_or_update_symbol(root_scope_id, "bar", SymbolFlags::empty());
|
||||
builder.add_or_update_symbol(root_scope_id, "baz", SymbolFlags::empty());
|
||||
builder.add_or_update_symbol(root_scope_id, "qux", SymbolFlags::empty());
|
||||
let table = builder.finish();
|
||||
|
||||
let foo_symbol_id_2 = table
|
||||
.root_symbol_id_by_name("foo")
|
||||
.expect("foo symbol to be found");
|
||||
|
||||
assert_eq!(foo_symbol_id_2, foo_symbol_id);
|
||||
}
|
||||
}
|
||||
1119
crates/red_knot/src/semantic/types.rs
Normal file
1119
crates/red_knot/src/semantic/types.rs
Normal file
File diff suppressed because it is too large
Load Diff
762
crates/red_knot/src/semantic/types/infer.rs
Normal file
762
crates/red_knot/src/semantic/types/infer.rs
Normal file
@@ -0,0 +1,762 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::AstNode;
|
||||
use std::fmt::Debug;
|
||||
|
||||
use crate::db::{QueryResult, SemanticDb, SemanticJar};
|
||||
|
||||
use crate::module::{resolve_module, ModuleName};
|
||||
use crate::parse::parse;
|
||||
use crate::semantic::types::{ModuleTypeId, Type};
|
||||
use crate::semantic::{
|
||||
resolve_global_symbol, semantic_index, ConstrainedDefinition, Definition, GlobalSymbolId,
|
||||
ImportDefinition, ImportFromDefinition,
|
||||
};
|
||||
use crate::{FileId, Name};
|
||||
|
||||
// FIXME: Figure out proper dead-lock free synchronisation now that this takes `&db` instead of `&mut db`.
|
||||
/// Resolve the public-facing type for a symbol (the type seen by other scopes: other modules, or
|
||||
/// nested functions). Because calls to nested functions and imports can occur anywhere in control
|
||||
/// flow, this type must be conservative and consider all definitions of the symbol that could
|
||||
/// possibly be seen by another scope. Currently we take the most conservative approach, which is
|
||||
/// the union of all definitions. We may be able to narrow this in future to eliminate definitions
|
||||
/// which can't possibly (or at least likely) be seen by any other scope, so that e.g. we could
|
||||
/// infer `Literal["1"]` instead of `Literal[1] | Literal["1"]` for `x` in `x = x; x = str(x);`.
|
||||
#[tracing::instrument(level = "trace", skip(db))]
|
||||
pub fn infer_symbol_public_type(db: &dyn SemanticDb, symbol: GlobalSymbolId) -> QueryResult<Type> {
|
||||
let index = semantic_index(db, symbol.file_id)?;
|
||||
let defs = index.symbol_table().definitions(symbol.symbol_id).to_vec();
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
|
||||
if let Some(ty) = jar.type_store.get_cached_symbol_public_type(symbol) {
|
||||
return Ok(ty);
|
||||
}
|
||||
|
||||
let ty = infer_type_from_definitions(db, symbol, defs.iter().cloned())?;
|
||||
|
||||
jar.type_store.cache_symbol_public_type(symbol, ty);
|
||||
|
||||
// TODO record dependencies
|
||||
Ok(ty)
|
||||
}
|
||||
|
||||
/// Infer type of a symbol as union of the given `Definitions`.
|
||||
fn infer_type_from_definitions<T>(
|
||||
db: &dyn SemanticDb,
|
||||
symbol: GlobalSymbolId,
|
||||
definitions: T,
|
||||
) -> QueryResult<Type>
|
||||
where
|
||||
T: Debug + IntoIterator<Item = Definition>,
|
||||
{
|
||||
infer_type_from_constrained_definitions(
|
||||
db,
|
||||
symbol,
|
||||
definitions
|
||||
.into_iter()
|
||||
.map(|definition| ConstrainedDefinition {
|
||||
definition,
|
||||
constraints: vec![],
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
/// Infer type of a symbol as union of the given `ConstrainedDefinitions`.
|
||||
fn infer_type_from_constrained_definitions<T>(
|
||||
db: &dyn SemanticDb,
|
||||
symbol: GlobalSymbolId,
|
||||
constrained_definitions: T,
|
||||
) -> QueryResult<Type>
|
||||
where
|
||||
T: IntoIterator<Item = ConstrainedDefinition>,
|
||||
{
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
let mut tys = constrained_definitions
|
||||
.into_iter()
|
||||
.map(|def| infer_constrained_definition_type(db, symbol, def.clone()))
|
||||
.peekable();
|
||||
if let Some(first) = tys.next() {
|
||||
if tys.peek().is_some() {
|
||||
Ok(jar.type_store.add_union(
|
||||
symbol.file_id,
|
||||
&Iterator::chain(std::iter::once(first), tys).collect::<QueryResult<Vec<_>>>()?,
|
||||
))
|
||||
} else {
|
||||
first
|
||||
}
|
||||
} else {
|
||||
Ok(Type::Unknown)
|
||||
}
|
||||
}
|
||||
|
||||
/// Infer type for a ConstrainedDefinition (intersection of the definition type and the
|
||||
/// constraints)
|
||||
#[tracing::instrument(level = "trace", skip(db))]
|
||||
pub fn infer_constrained_definition_type(
|
||||
db: &dyn SemanticDb,
|
||||
symbol: GlobalSymbolId,
|
||||
constrained_definition: ConstrainedDefinition,
|
||||
) -> QueryResult<Type> {
|
||||
let ConstrainedDefinition {
|
||||
definition,
|
||||
constraints,
|
||||
} = constrained_definition;
|
||||
let index = semantic_index(db, symbol.file_id)?;
|
||||
let parsed = parse(db.upcast(), symbol.file_id)?;
|
||||
let mut intersected_types = vec![infer_definition_type(db, symbol, definition)?];
|
||||
for constraint in constraints {
|
||||
if let Some(constraint_type) = infer_constraint_type(
|
||||
db,
|
||||
symbol,
|
||||
index.resolve_expression_id(parsed.syntax(), constraint),
|
||||
)? {
|
||||
intersected_types.push(constraint_type);
|
||||
}
|
||||
}
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
Ok(jar
|
||||
.type_store
|
||||
.add_intersection(symbol.file_id, &intersected_types, &[]))
|
||||
}
|
||||
|
||||
/// Infer a type for a Definition
|
||||
#[tracing::instrument(level = "trace", skip(db))]
|
||||
pub fn infer_definition_type(
|
||||
db: &dyn SemanticDb,
|
||||
symbol: GlobalSymbolId,
|
||||
definition: Definition,
|
||||
) -> QueryResult<Type> {
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
let type_store = &jar.type_store;
|
||||
let file_id = symbol.file_id;
|
||||
|
||||
match definition {
|
||||
Definition::Unbound => Ok(Type::Unbound),
|
||||
Definition::Import(ImportDefinition {
|
||||
module: module_name,
|
||||
}) => {
|
||||
if let Some(module) = resolve_module(db, module_name.clone())? {
|
||||
Ok(Type::Module(ModuleTypeId { module, file_id }))
|
||||
} else {
|
||||
Ok(Type::Unknown)
|
||||
}
|
||||
}
|
||||
Definition::ImportFrom(ImportFromDefinition {
|
||||
module,
|
||||
name,
|
||||
level,
|
||||
}) => {
|
||||
// TODO relative imports
|
||||
assert!(matches!(level, 0));
|
||||
let module_name = ModuleName::new(module.as_ref().expect("TODO relative imports"));
|
||||
let Some(module) = resolve_module(db, module_name.clone())? else {
|
||||
return Ok(Type::Unknown);
|
||||
};
|
||||
|
||||
if let Some(remote_symbol) = resolve_global_symbol(db, module, &name)? {
|
||||
infer_symbol_public_type(db, remote_symbol)
|
||||
} else {
|
||||
Ok(Type::Unknown)
|
||||
}
|
||||
}
|
||||
Definition::ClassDef(node_key) => {
|
||||
if let Some(ty) = type_store.get_cached_node_type(file_id, node_key.erased()) {
|
||||
Ok(ty)
|
||||
} else {
|
||||
let parsed = parse(db.upcast(), file_id)?;
|
||||
let ast = parsed.syntax();
|
||||
let index = semantic_index(db, file_id)?;
|
||||
let node = node_key.resolve_unwrap(ast.as_any_node_ref());
|
||||
|
||||
let mut bases = Vec::with_capacity(node.bases().len());
|
||||
|
||||
for base in node.bases() {
|
||||
bases.push(infer_expr_type(db, file_id, base)?);
|
||||
}
|
||||
let scope_id = index.symbol_table().scope_id_for_node(node_key.erased());
|
||||
let ty = type_store.add_class(file_id, &node.name.id, scope_id, bases);
|
||||
type_store.cache_node_type(file_id, *node_key.erased(), ty);
|
||||
Ok(ty)
|
||||
}
|
||||
}
|
||||
Definition::FunctionDef(node_key) => {
|
||||
if let Some(ty) = type_store.get_cached_node_type(file_id, node_key.erased()) {
|
||||
Ok(ty)
|
||||
} else {
|
||||
let parsed = parse(db.upcast(), file_id)?;
|
||||
let ast = parsed.syntax();
|
||||
let index = semantic_index(db, file_id)?;
|
||||
let node = node_key
|
||||
.resolve(ast.as_any_node_ref())
|
||||
.expect("node key should resolve");
|
||||
|
||||
let decorator_tys = node
|
||||
.decorator_list
|
||||
.iter()
|
||||
.map(|decorator| infer_expr_type(db, file_id, &decorator.expression))
|
||||
.collect::<QueryResult<_>>()?;
|
||||
let scope_id = index.symbol_table().scope_id_for_node(node_key.erased());
|
||||
let ty = type_store.add_function(
|
||||
file_id,
|
||||
&node.name.id,
|
||||
symbol.symbol_id,
|
||||
scope_id,
|
||||
decorator_tys,
|
||||
);
|
||||
type_store.cache_node_type(file_id, *node_key.erased(), ty);
|
||||
Ok(ty)
|
||||
}
|
||||
}
|
||||
Definition::Assignment(node_key) => {
|
||||
let parsed = parse(db.upcast(), file_id)?;
|
||||
let ast = parsed.syntax();
|
||||
let node = node_key.resolve_unwrap(ast.as_any_node_ref());
|
||||
// TODO handle unpacking assignment
|
||||
infer_expr_type(db, file_id, &node.value)
|
||||
}
|
||||
Definition::AnnotatedAssignment(node_key) => {
|
||||
let parsed = parse(db.upcast(), file_id)?;
|
||||
let ast = parsed.syntax();
|
||||
let node = node_key.resolve_unwrap(ast.as_any_node_ref());
|
||||
// TODO actually look at the annotation
|
||||
let Some(value) = &node.value else {
|
||||
return Ok(Type::Unknown);
|
||||
};
|
||||
// TODO handle unpacking assignment
|
||||
infer_expr_type(db, file_id, value)
|
||||
}
|
||||
Definition::NamedExpr(node_key) => {
|
||||
let parsed = parse(db.upcast(), file_id)?;
|
||||
let ast = parsed.syntax();
|
||||
let node = node_key.resolve_unwrap(ast.as_any_node_ref());
|
||||
infer_expr_type(db, file_id, &node.value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the type that the given constraint (an expression from a control-flow test) requires the
|
||||
/// given symbol to have. For example, returns the Type "~None" as the constraint type if given the
|
||||
/// symbol ID for x and the expression ID for `x is not None`. Returns (Rust) None if the given
|
||||
/// expression applies no constraints on the given symbol.
|
||||
#[tracing::instrument(level = "trace", skip(db))]
|
||||
fn infer_constraint_type(
|
||||
db: &dyn SemanticDb,
|
||||
symbol_id: GlobalSymbolId,
|
||||
// TODO this should preferably take an &ast::Expr instead of AnyNodeRef
|
||||
expression: ast::AnyNodeRef,
|
||||
) -> QueryResult<Option<Type>> {
|
||||
let file_id = symbol_id.file_id;
|
||||
let index = semantic_index(db, file_id)?;
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
let symbol_name = symbol_id.symbol_id.symbol(&index.symbol_table).name();
|
||||
// TODO narrowing attributes
|
||||
// TODO narrowing dict keys
|
||||
// TODO isinstance, ==/!=, type(...), literals, bools...
|
||||
match expression {
|
||||
ast::AnyNodeRef::ExprCompare(ast::ExprCompare {
|
||||
left,
|
||||
ops,
|
||||
comparators,
|
||||
..
|
||||
}) => {
|
||||
// TODO chained comparisons
|
||||
match left.as_ref() {
|
||||
ast::Expr::Name(ast::ExprName { id, .. }) if id == symbol_name => match ops[0] {
|
||||
ast::CmpOp::Is | ast::CmpOp::IsNot => {
|
||||
Ok(match infer_expr_type(db, file_id, &comparators[0])? {
|
||||
Type::None => Some(Type::None),
|
||||
_ => None,
|
||||
}
|
||||
.map(|ty| {
|
||||
if matches!(ops[0], ast::CmpOp::IsNot) {
|
||||
jar.type_store.add_intersection(file_id, &[], &[ty])
|
||||
} else {
|
||||
ty
|
||||
}
|
||||
}))
|
||||
}
|
||||
_ => Ok(None),
|
||||
},
|
||||
_ => Ok(None),
|
||||
}
|
||||
}
|
||||
_ => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
/// Infer type of the given expression.
|
||||
fn infer_expr_type(db: &dyn SemanticDb, file_id: FileId, expr: &ast::Expr) -> QueryResult<Type> {
|
||||
// TODO cache the resolution of the type on the node
|
||||
let index = semantic_index(db, file_id)?;
|
||||
match expr {
|
||||
ast::Expr::NoneLiteral(_) => Ok(Type::None),
|
||||
ast::Expr::NumberLiteral(ast::ExprNumberLiteral { value, .. }) => {
|
||||
match value {
|
||||
ast::Number::Int(n) => {
|
||||
// TODO support big int literals
|
||||
Ok(n.as_i64().map(Type::IntLiteral).unwrap_or(Type::Unknown))
|
||||
}
|
||||
// TODO builtins.float or builtins.complex
|
||||
_ => Ok(Type::Unknown),
|
||||
}
|
||||
}
|
||||
ast::Expr::Name(name) => {
|
||||
// TODO look up in the correct scope, don't assume global
|
||||
if let Some(symbol_id) = index.symbol_table().root_symbol_id_by_name(&name.id) {
|
||||
infer_type_from_constrained_definitions(
|
||||
db,
|
||||
GlobalSymbolId { file_id, symbol_id },
|
||||
index.reachable_definitions(symbol_id, expr),
|
||||
)
|
||||
} else {
|
||||
Ok(Type::Unknown)
|
||||
}
|
||||
}
|
||||
ast::Expr::Attribute(ast::ExprAttribute { value, attr, .. }) => {
|
||||
let value_type = infer_expr_type(db, file_id, value)?;
|
||||
let attr_name = &Name::new(&attr.id);
|
||||
value_type
|
||||
.get_member(db, attr_name)
|
||||
.map(|ty| ty.unwrap_or(Type::Unknown))
|
||||
}
|
||||
ast::Expr::BinOp(ast::ExprBinOp {
|
||||
left, op, right, ..
|
||||
}) => {
|
||||
let left_ty = infer_expr_type(db, file_id, left)?;
|
||||
let right_ty = infer_expr_type(db, file_id, right)?;
|
||||
// TODO add reverse bin op support if right <: left
|
||||
left_ty.resolve_bin_op(db, *op, right_ty)
|
||||
}
|
||||
ast::Expr::Named(ast::ExprNamed { value, .. }) => infer_expr_type(db, file_id, value),
|
||||
ast::Expr::If(ast::ExprIf { body, orelse, .. }) => {
|
||||
// TODO detect statically known truthy or falsy test
|
||||
let body_ty = infer_expr_type(db, file_id, body)?;
|
||||
let else_ty = infer_expr_type(db, file_id, orelse)?;
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
Ok(jar.type_store.add_union(file_id, &[body_ty, else_ty]))
|
||||
}
|
||||
_ => todo!("expression type resolution for {:?}", expr),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::db::{HasJar, SemanticJar};
|
||||
use crate::module::{
|
||||
resolve_module, set_module_search_paths, ModuleName, ModuleResolutionInputs,
|
||||
};
|
||||
use crate::semantic::{infer_symbol_public_type, resolve_global_symbol, Type};
|
||||
use crate::Name;
|
||||
|
||||
// TODO with virtual filesystem we shouldn't have to write files to disk for these
|
||||
// tests
|
||||
|
||||
struct TestCase {
|
||||
temp_dir: tempfile::TempDir,
|
||||
db: TestDb,
|
||||
|
||||
src: PathBuf,
|
||||
}
|
||||
|
||||
fn create_test() -> std::io::Result<TestCase> {
|
||||
let temp_dir = tempfile::tempdir()?;
|
||||
|
||||
let src = temp_dir.path().join("src");
|
||||
std::fs::create_dir(&src)?;
|
||||
let src = src.canonicalize()?;
|
||||
|
||||
let search_paths = ModuleResolutionInputs {
|
||||
extra_paths: vec![],
|
||||
workspace_root: src.clone(),
|
||||
site_packages: None,
|
||||
custom_typeshed: None,
|
||||
};
|
||||
|
||||
let mut db = TestDb::default();
|
||||
set_module_search_paths(&mut db, search_paths);
|
||||
|
||||
Ok(TestCase { temp_dir, db, src })
|
||||
}
|
||||
|
||||
fn write_to_path(case: &TestCase, relative_path: &str, contents: &str) -> anyhow::Result<()> {
|
||||
let path = case.src.join(relative_path);
|
||||
std::fs::write(path, contents)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_public_type(
|
||||
case: &TestCase,
|
||||
module_name: &str,
|
||||
variable_name: &str,
|
||||
) -> anyhow::Result<Type> {
|
||||
let db = &case.db;
|
||||
let module = resolve_module(db, ModuleName::new(module_name))?.expect("Module to exist");
|
||||
let symbol = resolve_global_symbol(db, module, variable_name)?.expect("symbol to exist");
|
||||
|
||||
Ok(infer_symbol_public_type(db, symbol)?)
|
||||
}
|
||||
|
||||
fn assert_public_type(
|
||||
case: &TestCase,
|
||||
module_name: &str,
|
||||
variable_name: &str,
|
||||
type_name: &str,
|
||||
) -> anyhow::Result<()> {
|
||||
let ty = get_public_type(case, module_name, variable_name)?;
|
||||
|
||||
let jar = HasJar::<SemanticJar>::jar(&case.db)?;
|
||||
assert_eq!(format!("{}", ty.display(&jar.type_store)), type_name);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn follow_import_to_class() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(&case, "a.py", "from b import C as D; E = D")?;
|
||||
write_to_path(&case, "b.py", "class C: pass")?;
|
||||
|
||||
assert_public_type(&case, "a", "E", "Literal[C]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_base_class_by_name() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"mod.py",
|
||||
"
|
||||
class Base: pass
|
||||
class Sub(Base): pass
|
||||
",
|
||||
)?;
|
||||
|
||||
let ty = get_public_type(&case, "mod", "Sub")?;
|
||||
|
||||
let Type::Class(class_id) = ty else {
|
||||
panic!("Sub is not a Class")
|
||||
};
|
||||
let jar = HasJar::<SemanticJar>::jar(&case.db)?;
|
||||
let base_names: Vec<_> = jar
|
||||
.type_store
|
||||
.get_class(class_id)
|
||||
.bases()
|
||||
.iter()
|
||||
.map(|base_ty| format!("{}", base_ty.display(&jar.type_store)))
|
||||
.collect();
|
||||
|
||||
assert_eq!(base_names, vec!["Literal[Base]"]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_method() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"mod.py",
|
||||
"
|
||||
class C:
|
||||
def f(self): pass
|
||||
",
|
||||
)?;
|
||||
|
||||
let ty = get_public_type(&case, "mod", "C")?;
|
||||
|
||||
let Type::Class(class_id) = ty else {
|
||||
panic!("C is not a Class");
|
||||
};
|
||||
|
||||
let member_ty = class_id
|
||||
.get_own_class_member(&case.db, &Name::new("f"))
|
||||
.expect("C.f to resolve");
|
||||
|
||||
let Some(Type::Function(func_id)) = member_ty else {
|
||||
panic!("C.f is not a Function");
|
||||
};
|
||||
|
||||
let jar = HasJar::<SemanticJar>::jar(&case.db)?;
|
||||
let function = jar.type_store.get_function(func_id);
|
||||
assert_eq!(function.name(), "f");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_module_member() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(&case, "a.py", "import b; D = b.C")?;
|
||||
write_to_path(&case, "b.py", "class C: pass")?;
|
||||
|
||||
assert_public_type(&case, "a", "D", "Literal[C]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_literal() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(&case, "a.py", "x = 1")?;
|
||||
|
||||
assert_public_type(&case, "a", "x", "Literal[1]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_union() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
if flag:
|
||||
x = 1
|
||||
else:
|
||||
x = 2
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_type(&case, "a", "x", "Literal[1, 2]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_visible_def() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
write_to_path(&case, "a.py", "y = 1; y = 2; x = y")?;
|
||||
assert_public_type(&case, "a", "x", "Literal[2]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn join_paths() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
y = 1
|
||||
y = 2
|
||||
if flag:
|
||||
y = 3
|
||||
x = y
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_type(&case, "a", "x", "Literal[2, 3]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn maybe_unbound() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
if flag:
|
||||
y = 1
|
||||
x = y
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_type(&case, "a", "x", "Literal[1] | Unbound")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn if_elif_else() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
y = 1
|
||||
y = 2
|
||||
if flag:
|
||||
y = 3
|
||||
elif flag2:
|
||||
y = 4
|
||||
else:
|
||||
r = y
|
||||
y = 5
|
||||
s = y
|
||||
x = y
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_type(&case, "a", "x", "Literal[3, 4, 5]")?;
|
||||
assert_public_type(&case, "a", "r", "Literal[2]")?;
|
||||
assert_public_type(&case, "a", "s", "Literal[5]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn if_elif() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
y = 1
|
||||
y = 2
|
||||
if flag:
|
||||
y = 3
|
||||
elif flag2:
|
||||
y = 4
|
||||
x = y
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_type(&case, "a", "x", "Literal[2, 3, 4]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn literal_int_arithmetic() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
a = 2 + 1
|
||||
b = a - 4
|
||||
c = a * b
|
||||
d = c / 3
|
||||
e = 5 % 3
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_type(&case, "a", "a", "Literal[3]")?;
|
||||
assert_public_type(&case, "a", "b", "Literal[-1]")?;
|
||||
assert_public_type(&case, "a", "c", "Literal[-3]")?;
|
||||
assert_public_type(&case, "a", "d", "Literal[-1]")?;
|
||||
assert_public_type(&case, "a", "e", "Literal[2]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn walrus() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
x = (y := 1) + 1
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_type(&case, "a", "x", "Literal[2]")?;
|
||||
assert_public_type(&case, "a", "y", "Literal[1]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ifexpr() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
x = 1 if flag else 2
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_type(&case, "a", "x", "Literal[1, 2]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ifexpr_walrus() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
y = z = 0
|
||||
x = (y := 1) if flag else (z := 2)
|
||||
a = y
|
||||
b = z
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_type(&case, "a", "x", "Literal[1, 2]")?;
|
||||
assert_public_type(&case, "a", "a", "Literal[0, 1]")?;
|
||||
assert_public_type(&case, "a", "b", "Literal[0, 2]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ifexpr_walrus_2() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
y = 0
|
||||
(y := 1) if flag else (y := 2)
|
||||
a = y
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_type(&case, "a", "a", "Literal[1, 2]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ifexpr_nested() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
x = 1 if flag else 2 if flag2 else 3
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_type(&case, "a", "x", "Literal[1, 2, 3]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn none() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
x = 1 if flag else None
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_type(&case, "a", "x", "Literal[1] | None")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn narrow_none() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
x = 1 if flag else None
|
||||
y = 0
|
||||
if x is not None:
|
||||
y = x
|
||||
z = y
|
||||
",
|
||||
)?;
|
||||
|
||||
// TODO normalization of unions and intersections: this type is technically correct but
|
||||
// begging for normalization
|
||||
assert_public_type(&case, "a", "z", "Literal[0] | Literal[1] | None & ~None")
|
||||
}
|
||||
}
|
||||
105
crates/red_knot/src/source.rs
Normal file
105
crates/red_knot/src/source.rs
Normal file
@@ -0,0 +1,105 @@
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::sync::Arc;
|
||||
|
||||
use ruff_notebook::Notebook;
|
||||
use ruff_python_ast::PySourceType;
|
||||
|
||||
use crate::cache::KeyValueCache;
|
||||
use crate::db::{QueryResult, SourceDb};
|
||||
use crate::files::FileId;
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub(crate) fn source_text(db: &dyn SourceDb, file_id: FileId) -> QueryResult<Source> {
|
||||
let jar = db.jar()?;
|
||||
let sources = &jar.sources;
|
||||
|
||||
sources.get(&file_id, |file_id| {
|
||||
let path = db.file_path(*file_id);
|
||||
|
||||
let source_text = std::fs::read_to_string(&path).unwrap_or_else(|err| {
|
||||
tracing::error!("Failed to read file '{path:?}: {err}'. Falling back to empty text");
|
||||
String::new()
|
||||
});
|
||||
|
||||
let python_ty = PySourceType::from(&path);
|
||||
|
||||
let kind = match python_ty {
|
||||
PySourceType::Python => {
|
||||
SourceKind::Python(Arc::from(source_text))
|
||||
}
|
||||
PySourceType::Stub => SourceKind::Stub(Arc::from(source_text)),
|
||||
PySourceType::Ipynb => {
|
||||
let notebook = Notebook::from_source_code(&source_text).unwrap_or_else(|err| {
|
||||
// TODO should this be changed to never fail?
|
||||
// or should we instead add a diagnostic somewhere? But what would we return in this case?
|
||||
tracing::error!(
|
||||
"Failed to parse notebook '{path:?}: {err}'. Falling back to an empty notebook"
|
||||
);
|
||||
Notebook::from_source_code("").unwrap()
|
||||
});
|
||||
|
||||
SourceKind::IpyNotebook(Arc::new(notebook))
|
||||
}
|
||||
};
|
||||
|
||||
Ok(Source { kind })
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum SourceKind {
|
||||
Python(Arc<str>),
|
||||
Stub(Arc<str>),
|
||||
IpyNotebook(Arc<Notebook>),
|
||||
}
|
||||
|
||||
impl<'a> From<&'a SourceKind> for PySourceType {
|
||||
fn from(value: &'a SourceKind) -> Self {
|
||||
match value {
|
||||
SourceKind::Python(_) => PySourceType::Python,
|
||||
SourceKind::Stub(_) => PySourceType::Stub,
|
||||
SourceKind::IpyNotebook(_) => PySourceType::Ipynb,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct Source {
|
||||
kind: SourceKind,
|
||||
}
|
||||
|
||||
impl Source {
|
||||
pub fn python<T: Into<Arc<str>>>(source: T) -> Self {
|
||||
Self {
|
||||
kind: SourceKind::Python(source.into()),
|
||||
}
|
||||
}
|
||||
pub fn kind(&self) -> &SourceKind {
|
||||
&self.kind
|
||||
}
|
||||
|
||||
pub fn text(&self) -> &str {
|
||||
match &self.kind {
|
||||
SourceKind::Python(text) => text,
|
||||
SourceKind::Stub(text) => text,
|
||||
SourceKind::IpyNotebook(notebook) => notebook.source_code(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SourceStorage(pub(crate) KeyValueCache<FileId, Source>);
|
||||
|
||||
impl Deref for SourceStorage {
|
||||
type Target = KeyValueCache<FileId, Source>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for SourceStorage {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
@@ -1,105 +0,0 @@
|
||||
//! Code for representing Red Knot's release version number.
|
||||
use std::fmt;
|
||||
|
||||
/// Information about the git repository where Red Knot was built from.
|
||||
pub(crate) struct CommitInfo {
|
||||
short_commit_hash: String,
|
||||
commit_date: String,
|
||||
commits_since_last_tag: u32,
|
||||
}
|
||||
|
||||
/// Red Knot's version.
|
||||
pub(crate) struct VersionInfo {
|
||||
/// Red Knot's version, such as "0.5.1"
|
||||
version: String,
|
||||
/// Information about the git commit we may have been built from.
|
||||
///
|
||||
/// `None` if not built from a git repo or if retrieval failed.
|
||||
commit_info: Option<CommitInfo>,
|
||||
}
|
||||
|
||||
impl fmt::Display for VersionInfo {
|
||||
/// Formatted version information: `<version>[+<commits>] (<commit> <date>)`
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", self.version)?;
|
||||
|
||||
if let Some(ref ci) = self.commit_info {
|
||||
if ci.commits_since_last_tag > 0 {
|
||||
write!(f, "+{}", ci.commits_since_last_tag)?;
|
||||
}
|
||||
write!(f, " ({} {})", ci.short_commit_hash, ci.commit_date)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns information about Red Knot's version.
|
||||
pub(crate) fn version() -> VersionInfo {
|
||||
// Environment variables are only read at compile-time
|
||||
macro_rules! option_env_str {
|
||||
($name:expr) => {
|
||||
option_env!($name).map(|s| s.to_string())
|
||||
};
|
||||
}
|
||||
|
||||
// This version is pulled from Cargo.toml and set by Cargo
|
||||
let version = option_env_str!("CARGO_PKG_VERSION").unwrap();
|
||||
|
||||
// Commit info is pulled from git and set by `build.rs`
|
||||
let commit_info =
|
||||
option_env_str!("RED_KNOT_COMMIT_SHORT_HASH").map(|short_commit_hash| CommitInfo {
|
||||
short_commit_hash,
|
||||
commit_date: option_env_str!("RED_KNOT_COMMIT_DATE").unwrap(),
|
||||
commits_since_last_tag: option_env_str!("RED_KNOT_LAST_TAG_DISTANCE")
|
||||
.as_deref()
|
||||
.map_or(0, |value| value.parse::<u32>().unwrap_or(0)),
|
||||
});
|
||||
|
||||
VersionInfo {
|
||||
version,
|
||||
commit_info,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use insta::assert_snapshot;
|
||||
|
||||
use super::{CommitInfo, VersionInfo};
|
||||
|
||||
#[test]
|
||||
fn version_formatting() {
|
||||
let version = VersionInfo {
|
||||
version: "0.0.0".to_string(),
|
||||
commit_info: None,
|
||||
};
|
||||
assert_snapshot!(version, @"0.0.0");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn version_formatting_with_commit_info() {
|
||||
let version = VersionInfo {
|
||||
version: "0.0.0".to_string(),
|
||||
commit_info: Some(CommitInfo {
|
||||
short_commit_hash: "53b0f5d92".to_string(),
|
||||
commit_date: "2023-10-19".to_string(),
|
||||
commits_since_last_tag: 0,
|
||||
}),
|
||||
};
|
||||
assert_snapshot!(version, @"0.0.0 (53b0f5d92 2023-10-19)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn version_formatting_with_commits_since_last_tag() {
|
||||
let version = VersionInfo {
|
||||
version: "0.0.0".to_string(),
|
||||
commit_info: Some(CommitInfo {
|
||||
short_commit_hash: "53b0f5d92".to_string(),
|
||||
commit_date: "2023-10-19".to_string(),
|
||||
commits_since_last_tag: 24,
|
||||
}),
|
||||
};
|
||||
assert_snapshot!(version, @"0.0.0+24 (53b0f5d92 2023-10-19)");
|
||||
}
|
||||
}
|
||||
77
crates/red_knot/src/watch.rs
Normal file
77
crates/red_knot/src/watch.rs
Normal file
@@ -0,0 +1,77 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Context;
|
||||
use notify::event::{CreateKind, RemoveKind};
|
||||
use notify::{recommended_watcher, Event, EventKind, RecommendedWatcher, RecursiveMode, Watcher};
|
||||
|
||||
use crate::program::{FileChangeKind, FileWatcherChange};
|
||||
|
||||
pub struct FileWatcher {
|
||||
watcher: RecommendedWatcher,
|
||||
}
|
||||
|
||||
pub trait EventHandler: Send + 'static {
|
||||
fn handle(&self, changes: Vec<FileWatcherChange>);
|
||||
}
|
||||
|
||||
impl<F> EventHandler for F
|
||||
where
|
||||
F: Fn(Vec<FileWatcherChange>) + Send + 'static,
|
||||
{
|
||||
fn handle(&self, changes: Vec<FileWatcherChange>) {
|
||||
let f = self;
|
||||
f(changes);
|
||||
}
|
||||
}
|
||||
|
||||
impl FileWatcher {
|
||||
pub fn new<E>(handler: E) -> anyhow::Result<Self>
|
||||
where
|
||||
E: EventHandler,
|
||||
{
|
||||
Self::from_handler(Box::new(handler))
|
||||
}
|
||||
|
||||
fn from_handler(handler: Box<dyn EventHandler>) -> anyhow::Result<Self> {
|
||||
let watcher = recommended_watcher(move |changes: notify::Result<Event>| {
|
||||
match changes {
|
||||
Ok(event) => {
|
||||
// TODO verify that this handles all events correctly
|
||||
let change_kind = match event.kind {
|
||||
EventKind::Create(CreateKind::File) => FileChangeKind::Created,
|
||||
EventKind::Modify(_) => FileChangeKind::Modified,
|
||||
EventKind::Remove(RemoveKind::File) => FileChangeKind::Deleted,
|
||||
_ => {
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let mut changes = Vec::new();
|
||||
|
||||
for path in event.paths {
|
||||
if path.is_file() {
|
||||
changes.push(FileWatcherChange::new(path, change_kind));
|
||||
}
|
||||
}
|
||||
|
||||
if !changes.is_empty() {
|
||||
handler.handle(changes);
|
||||
}
|
||||
}
|
||||
// TODO proper error handling
|
||||
Err(err) => {
|
||||
panic!("Error: {err}");
|
||||
}
|
||||
}
|
||||
})
|
||||
.context("Failed to create file watcher.")?;
|
||||
|
||||
Ok(Self { watcher })
|
||||
}
|
||||
|
||||
pub fn watch_folder(&mut self, path: &Path) -> anyhow::Result<()> {
|
||||
self.watcher.watch(path, RecursiveMode::Recursive)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,31 +0,0 @@
|
||||
[package]
|
||||
name = "red_knot_ide"
|
||||
version = "0.0.0"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
ruff_db = { workspace = true }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_python_parser = { workspace = true }
|
||||
ruff_text_size = { workspace = true }
|
||||
red_knot_python_semantic = { workspace = true }
|
||||
|
||||
rustc-hash = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
smallvec = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
red_knot_vendored = { workspace = true }
|
||||
|
||||
insta = { workspace = true, features = ["filters"] }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
@@ -1,138 +0,0 @@
|
||||
use red_knot_python_semantic::Db as SemanticDb;
|
||||
use ruff_db::{Db as SourceDb, Upcast};
|
||||
|
||||
#[salsa::db]
|
||||
pub trait Db: SemanticDb + Upcast<dyn SemanticDb> + Upcast<dyn SourceDb> {}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use std::sync::Arc;
|
||||
|
||||
use super::Db;
|
||||
use red_knot_python_semantic::lint::{LintRegistry, RuleSelection};
|
||||
use red_knot_python_semantic::{default_lint_registry, Db as SemanticDb, Program};
|
||||
use ruff_db::files::{File, Files};
|
||||
use ruff_db::system::{DbWithTestSystem, System, TestSystem};
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
use ruff_db::{Db as SourceDb, Upcast};
|
||||
|
||||
#[salsa::db]
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct TestDb {
|
||||
storage: salsa::Storage<Self>,
|
||||
files: Files,
|
||||
system: TestSystem,
|
||||
vendored: VendoredFileSystem,
|
||||
events: Arc<std::sync::Mutex<Vec<salsa::Event>>>,
|
||||
rule_selection: Arc<RuleSelection>,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl TestDb {
|
||||
pub(crate) fn new() -> Self {
|
||||
Self {
|
||||
storage: salsa::Storage::default(),
|
||||
system: TestSystem::default(),
|
||||
vendored: red_knot_vendored::file_system().clone(),
|
||||
events: Arc::default(),
|
||||
files: Files::default(),
|
||||
rule_selection: Arc::new(RuleSelection::from_registry(default_lint_registry())),
|
||||
}
|
||||
}
|
||||
|
||||
/// Takes the salsa events.
|
||||
///
|
||||
/// ## Panics
|
||||
/// If there are any pending salsa snapshots.
|
||||
pub(crate) fn take_salsa_events(&mut self) -> Vec<salsa::Event> {
|
||||
let inner = Arc::get_mut(&mut self.events).expect("no pending salsa snapshots");
|
||||
|
||||
let events = inner.get_mut().unwrap();
|
||||
std::mem::take(&mut *events)
|
||||
}
|
||||
|
||||
/// Clears the salsa events.
|
||||
///
|
||||
/// ## Panics
|
||||
/// If there are any pending salsa snapshots.
|
||||
pub(crate) fn clear_salsa_events(&mut self) {
|
||||
self.take_salsa_events();
|
||||
}
|
||||
}
|
||||
|
||||
impl DbWithTestSystem for TestDb {
|
||||
fn test_system(&self) -> &TestSystem {
|
||||
&self.system
|
||||
}
|
||||
|
||||
fn test_system_mut(&mut self) -> &mut TestSystem {
|
||||
&mut self.system
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl SourceDb for TestDb {
|
||||
fn vendored(&self) -> &VendoredFileSystem {
|
||||
&self.vendored
|
||||
}
|
||||
|
||||
fn system(&self) -> &dyn System {
|
||||
&self.system
|
||||
}
|
||||
|
||||
fn files(&self) -> &Files {
|
||||
&self.files
|
||||
}
|
||||
|
||||
fn python_version(&self) -> ruff_python_ast::PythonVersion {
|
||||
Program::get(self).python_version(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SourceDb> for TestDb {
|
||||
fn upcast(&self) -> &(dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
fn upcast_mut(&mut self) -> &mut (dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SemanticDb> for TestDb {
|
||||
fn upcast(&self) -> &(dyn SemanticDb + 'static) {
|
||||
self
|
||||
}
|
||||
|
||||
fn upcast_mut(&mut self) -> &mut dyn SemanticDb {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl SemanticDb for TestDb {
|
||||
fn is_file_open(&self, file: File) -> bool {
|
||||
!file.path(self).is_vendored_path()
|
||||
}
|
||||
|
||||
fn rule_selection(&self) -> Arc<RuleSelection> {
|
||||
self.rule_selection.clone()
|
||||
}
|
||||
|
||||
fn lint_registry(&self) -> &LintRegistry {
|
||||
default_lint_registry()
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl Db for TestDb {}
|
||||
|
||||
#[salsa::db]
|
||||
impl salsa::Database for TestDb {
|
||||
fn salsa_event(&self, event: &dyn Fn() -> salsa::Event) {
|
||||
let event = event();
|
||||
tracing::trace!("event: {event:?}");
|
||||
let mut events = self.events.lock().unwrap();
|
||||
events.push(event);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,106 +0,0 @@
|
||||
use ruff_python_ast::visitor::source_order::{SourceOrderVisitor, TraversalSignal};
|
||||
use ruff_python_ast::AnyNodeRef;
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
use std::fmt;
|
||||
use std::fmt::Formatter;
|
||||
|
||||
/// Returns the node with a minimal range that fully contains `range`.
|
||||
///
|
||||
/// If `range` is empty and falls within a parser *synthesized* node generated during error recovery,
|
||||
/// then the first node with the given range is returned.
|
||||
///
|
||||
/// ## Panics
|
||||
/// Panics if `range` is not contained within `root`.
|
||||
pub(crate) fn covering_node(root: AnyNodeRef, range: TextRange) -> CoveringNode {
|
||||
struct Visitor<'a> {
|
||||
range: TextRange,
|
||||
found: bool,
|
||||
ancestors: Vec<AnyNodeRef<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> SourceOrderVisitor<'a> for Visitor<'a> {
|
||||
fn enter_node(&mut self, node: AnyNodeRef<'a>) -> TraversalSignal {
|
||||
// If the node fully contains the range, than it is a possible match but traverse into its children
|
||||
// to see if there's a node with a narrower range.
|
||||
if !self.found && node.range().contains_range(self.range) {
|
||||
self.ancestors.push(node);
|
||||
TraversalSignal::Traverse
|
||||
} else {
|
||||
TraversalSignal::Skip
|
||||
}
|
||||
}
|
||||
|
||||
fn leave_node(&mut self, node: AnyNodeRef<'a>) {
|
||||
if !self.found && self.ancestors.last() == Some(&node) {
|
||||
self.found = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assert!(
|
||||
root.range().contains_range(range),
|
||||
"Range is not contained within root"
|
||||
);
|
||||
|
||||
let mut visitor = Visitor {
|
||||
range,
|
||||
found: false,
|
||||
ancestors: Vec::new(),
|
||||
};
|
||||
|
||||
root.visit_source_order(&mut visitor);
|
||||
|
||||
let minimal = visitor.ancestors.pop().unwrap_or(root);
|
||||
CoveringNode {
|
||||
node: minimal,
|
||||
ancestors: visitor.ancestors,
|
||||
}
|
||||
}
|
||||
|
||||
/// The node with a minimal range that fully contains the search range.
|
||||
pub(crate) struct CoveringNode<'a> {
|
||||
/// The node with a minimal range that fully contains the search range.
|
||||
node: AnyNodeRef<'a>,
|
||||
|
||||
/// The node's ancestor (the spine up to the root).
|
||||
ancestors: Vec<AnyNodeRef<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> CoveringNode<'a> {
|
||||
pub(crate) fn node(&self) -> AnyNodeRef<'a> {
|
||||
self.node
|
||||
}
|
||||
|
||||
/// Returns the node's parent.
|
||||
pub(crate) fn parent(&self) -> Option<AnyNodeRef<'a>> {
|
||||
self.ancestors.last().copied()
|
||||
}
|
||||
|
||||
/// Finds the minimal node that fully covers the range and fulfills the given predicate.
|
||||
pub(crate) fn find(mut self, f: impl Fn(AnyNodeRef<'a>) -> bool) -> Result<Self, Self> {
|
||||
if f(self.node) {
|
||||
return Ok(self);
|
||||
}
|
||||
|
||||
match self.ancestors.iter().rposition(|node| f(*node)) {
|
||||
Some(index) => {
|
||||
let node = self.ancestors[index];
|
||||
self.ancestors.truncate(index);
|
||||
|
||||
Ok(Self {
|
||||
node,
|
||||
ancestors: self.ancestors,
|
||||
})
|
||||
}
|
||||
None => Err(self),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for CoveringNode<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
f.debug_tuple("NodeWithAncestors")
|
||||
.field(&self.node)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
@@ -1,851 +0,0 @@
|
||||
use crate::find_node::covering_node;
|
||||
use crate::{Db, HasNavigationTargets, NavigationTargets, RangedValue};
|
||||
use red_knot_python_semantic::types::Type;
|
||||
use red_knot_python_semantic::{HasType, SemanticModel};
|
||||
use ruff_db::files::{File, FileRange};
|
||||
use ruff_db::parsed::{parsed_module, ParsedModule};
|
||||
use ruff_python_ast::{self as ast, AnyNodeRef};
|
||||
use ruff_python_parser::TokenKind;
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
|
||||
pub fn goto_type_definition(
|
||||
db: &dyn Db,
|
||||
file: File,
|
||||
offset: TextSize,
|
||||
) -> Option<RangedValue<NavigationTargets>> {
|
||||
let parsed = parsed_module(db.upcast(), file);
|
||||
let goto_target = find_goto_target(parsed, offset)?;
|
||||
|
||||
let model = SemanticModel::new(db.upcast(), file);
|
||||
let ty = goto_target.inferred_type(&model)?;
|
||||
|
||||
tracing::debug!(
|
||||
"Inferred type of covering node is {}",
|
||||
ty.display(db.upcast())
|
||||
);
|
||||
|
||||
let navigation_targets = ty.navigation_targets(db);
|
||||
|
||||
Some(RangedValue {
|
||||
range: FileRange::new(file, goto_target.range()),
|
||||
value: navigation_targets,
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub(crate) enum GotoTarget<'a> {
|
||||
Expression(ast::ExprRef<'a>),
|
||||
FunctionDef(&'a ast::StmtFunctionDef),
|
||||
ClassDef(&'a ast::StmtClassDef),
|
||||
Parameter(&'a ast::Parameter),
|
||||
Alias(&'a ast::Alias),
|
||||
|
||||
/// Go to on the module name of an import from
|
||||
/// ```py
|
||||
/// from foo import bar
|
||||
/// ^^^
|
||||
/// ```
|
||||
ImportedModule(&'a ast::StmtImportFrom),
|
||||
|
||||
/// Go to on the exception handler variable
|
||||
/// ```py
|
||||
/// try: ...
|
||||
/// except Exception as e: ...
|
||||
/// ^
|
||||
/// ```
|
||||
ExceptVariable(&'a ast::ExceptHandlerExceptHandler),
|
||||
|
||||
/// Go to on a keyword argument
|
||||
/// ```py
|
||||
/// test(a = 1)
|
||||
/// ^
|
||||
/// ```
|
||||
KeywordArgument(&'a ast::Keyword),
|
||||
|
||||
/// Go to on the rest parameter of a pattern match
|
||||
///
|
||||
/// ```py
|
||||
/// match x:
|
||||
/// case {"a": a, "b": b, **rest}: ...
|
||||
/// ^^^^
|
||||
/// ```
|
||||
PatternMatchRest(&'a ast::PatternMatchMapping),
|
||||
|
||||
/// Go to on a keyword argument of a class pattern
|
||||
///
|
||||
/// ```py
|
||||
/// match Point3D(0, 0, 0):
|
||||
/// case Point3D(x=0, y=0, z=0): ...
|
||||
/// ^ ^ ^
|
||||
/// ```
|
||||
PatternKeywordArgument(&'a ast::PatternKeyword),
|
||||
|
||||
/// Go to on a pattern star argument
|
||||
///
|
||||
/// ```py
|
||||
/// match array:
|
||||
/// case [*args]: ...
|
||||
/// ^^^^
|
||||
PatternMatchStarName(&'a ast::PatternMatchStar),
|
||||
|
||||
/// Go to on the name of a pattern match as pattern
|
||||
///
|
||||
/// ```py
|
||||
/// match x:
|
||||
/// case [x] as y: ...
|
||||
/// ^
|
||||
PatternMatchAsName(&'a ast::PatternMatchAs),
|
||||
|
||||
/// Go to on the name of a type variable
|
||||
///
|
||||
/// ```py
|
||||
/// type Alias[T: int = bool] = list[T]
|
||||
/// ^
|
||||
/// ```
|
||||
TypeParamTypeVarName(&'a ast::TypeParamTypeVar),
|
||||
|
||||
/// Go to on the name of a type param spec
|
||||
///
|
||||
/// ```py
|
||||
/// type Alias[**P = [int, str]] = Callable[P, int]
|
||||
/// ^
|
||||
/// ```
|
||||
TypeParamParamSpecName(&'a ast::TypeParamParamSpec),
|
||||
|
||||
/// Go to on the name of a type var tuple
|
||||
///
|
||||
/// ```py
|
||||
/// type Alias[*Ts = ()] = tuple[*Ts]
|
||||
/// ^^
|
||||
/// ```
|
||||
TypeParamTypeVarTupleName(&'a ast::TypeParamTypeVarTuple),
|
||||
|
||||
NonLocal {
|
||||
identifier: &'a ast::Identifier,
|
||||
},
|
||||
Globals {
|
||||
identifier: &'a ast::Identifier,
|
||||
},
|
||||
}
|
||||
|
||||
impl<'db> GotoTarget<'db> {
|
||||
pub(crate) fn inferred_type(self, model: &SemanticModel<'db>) -> Option<Type<'db>> {
|
||||
let ty = match self {
|
||||
GotoTarget::Expression(expression) => expression.inferred_type(model),
|
||||
GotoTarget::FunctionDef(function) => function.inferred_type(model),
|
||||
GotoTarget::ClassDef(class) => class.inferred_type(model),
|
||||
GotoTarget::Parameter(parameter) => parameter.inferred_type(model),
|
||||
GotoTarget::Alias(alias) => alias.inferred_type(model),
|
||||
GotoTarget::ExceptVariable(except) => except.inferred_type(model),
|
||||
GotoTarget::KeywordArgument(argument) => {
|
||||
// TODO: Pyright resolves the declared type of the matching parameter. This seems more accurate
|
||||
// than using the inferred value.
|
||||
argument.value.inferred_type(model)
|
||||
}
|
||||
// TODO: Support identifier targets
|
||||
GotoTarget::PatternMatchRest(_)
|
||||
| GotoTarget::PatternKeywordArgument(_)
|
||||
| GotoTarget::PatternMatchStarName(_)
|
||||
| GotoTarget::PatternMatchAsName(_)
|
||||
| GotoTarget::ImportedModule(_)
|
||||
| GotoTarget::TypeParamTypeVarName(_)
|
||||
| GotoTarget::TypeParamParamSpecName(_)
|
||||
| GotoTarget::TypeParamTypeVarTupleName(_)
|
||||
| GotoTarget::NonLocal { .. }
|
||||
| GotoTarget::Globals { .. } => return None,
|
||||
};
|
||||
|
||||
Some(ty)
|
||||
}
|
||||
}
|
||||
|
||||
impl Ranged for GotoTarget<'_> {
|
||||
fn range(&self) -> TextRange {
|
||||
match self {
|
||||
GotoTarget::Expression(expression) => expression.range(),
|
||||
GotoTarget::FunctionDef(function) => function.name.range,
|
||||
GotoTarget::ClassDef(class) => class.name.range,
|
||||
GotoTarget::Parameter(parameter) => parameter.name.range,
|
||||
GotoTarget::Alias(alias) => alias.name.range,
|
||||
GotoTarget::ImportedModule(module) => module.module.as_ref().unwrap().range,
|
||||
GotoTarget::ExceptVariable(except) => except.name.as_ref().unwrap().range,
|
||||
GotoTarget::KeywordArgument(keyword) => keyword.arg.as_ref().unwrap().range,
|
||||
GotoTarget::PatternMatchRest(rest) => rest.rest.as_ref().unwrap().range,
|
||||
GotoTarget::PatternKeywordArgument(keyword) => keyword.attr.range,
|
||||
GotoTarget::PatternMatchStarName(star) => star.name.as_ref().unwrap().range,
|
||||
GotoTarget::PatternMatchAsName(as_name) => as_name.name.as_ref().unwrap().range,
|
||||
GotoTarget::TypeParamTypeVarName(type_var) => type_var.name.range,
|
||||
GotoTarget::TypeParamParamSpecName(spec) => spec.name.range,
|
||||
GotoTarget::TypeParamTypeVarTupleName(tuple) => tuple.name.range,
|
||||
GotoTarget::NonLocal { identifier, .. } => identifier.range,
|
||||
GotoTarget::Globals { identifier, .. } => identifier.range,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn find_goto_target(parsed: &ParsedModule, offset: TextSize) -> Option<GotoTarget> {
|
||||
let token = parsed
|
||||
.tokens()
|
||||
.at_offset(offset)
|
||||
.max_by_key(|token| match token.kind() {
|
||||
TokenKind::Name
|
||||
| TokenKind::String
|
||||
| TokenKind::Complex
|
||||
| TokenKind::Float
|
||||
| TokenKind::Int => 1,
|
||||
_ => 0,
|
||||
})?;
|
||||
|
||||
let covering_node = covering_node(parsed.syntax().into(), token.range())
|
||||
.find(|node| node.is_identifier() || node.is_expression())
|
||||
.ok()?;
|
||||
|
||||
tracing::trace!("Covering node is of kind {:?}", covering_node.node().kind());
|
||||
|
||||
match covering_node.node() {
|
||||
AnyNodeRef::Identifier(identifier) => match covering_node.parent() {
|
||||
Some(AnyNodeRef::StmtFunctionDef(function)) => Some(GotoTarget::FunctionDef(function)),
|
||||
Some(AnyNodeRef::StmtClassDef(class)) => Some(GotoTarget::ClassDef(class)),
|
||||
Some(AnyNodeRef::Parameter(parameter)) => Some(GotoTarget::Parameter(parameter)),
|
||||
Some(AnyNodeRef::Alias(alias)) => Some(GotoTarget::Alias(alias)),
|
||||
Some(AnyNodeRef::StmtImportFrom(from)) => Some(GotoTarget::ImportedModule(from)),
|
||||
Some(AnyNodeRef::ExceptHandlerExceptHandler(handler)) => {
|
||||
Some(GotoTarget::ExceptVariable(handler))
|
||||
}
|
||||
Some(AnyNodeRef::Keyword(keyword)) => Some(GotoTarget::KeywordArgument(keyword)),
|
||||
Some(AnyNodeRef::PatternMatchMapping(mapping)) => {
|
||||
Some(GotoTarget::PatternMatchRest(mapping))
|
||||
}
|
||||
Some(AnyNodeRef::PatternKeyword(keyword)) => {
|
||||
Some(GotoTarget::PatternKeywordArgument(keyword))
|
||||
}
|
||||
Some(AnyNodeRef::PatternMatchStar(star)) => {
|
||||
Some(GotoTarget::PatternMatchStarName(star))
|
||||
}
|
||||
Some(AnyNodeRef::PatternMatchAs(as_pattern)) => {
|
||||
Some(GotoTarget::PatternMatchAsName(as_pattern))
|
||||
}
|
||||
Some(AnyNodeRef::TypeParamTypeVar(var)) => Some(GotoTarget::TypeParamTypeVarName(var)),
|
||||
Some(AnyNodeRef::TypeParamParamSpec(bound)) => {
|
||||
Some(GotoTarget::TypeParamParamSpecName(bound))
|
||||
}
|
||||
Some(AnyNodeRef::TypeParamTypeVarTuple(var_tuple)) => {
|
||||
Some(GotoTarget::TypeParamTypeVarTupleName(var_tuple))
|
||||
}
|
||||
Some(AnyNodeRef::ExprAttribute(attribute)) => {
|
||||
Some(GotoTarget::Expression(attribute.into()))
|
||||
}
|
||||
Some(AnyNodeRef::StmtNonlocal(_)) => Some(GotoTarget::NonLocal { identifier }),
|
||||
Some(AnyNodeRef::StmtGlobal(_)) => Some(GotoTarget::Globals { identifier }),
|
||||
None => None,
|
||||
Some(parent) => {
|
||||
tracing::debug!(
|
||||
"Missing `GoToTarget` for identifier with parent {:?}",
|
||||
parent.kind()
|
||||
);
|
||||
None
|
||||
}
|
||||
},
|
||||
|
||||
node => node.as_expr_ref().map(GotoTarget::Expression),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::tests::{cursor_test, CursorTest, IntoDiagnostic};
|
||||
use crate::{goto_type_definition, NavigationTarget};
|
||||
use insta::assert_snapshot;
|
||||
use ruff_db::diagnostic::{
|
||||
Annotation, Diagnostic, DiagnosticId, LintName, Severity, Span, SubDiagnostic,
|
||||
};
|
||||
use ruff_db::files::FileRange;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
#[test]
|
||||
fn goto_type_of_expression_with_class_type() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
class Test: ...
|
||||
|
||||
a<CURSOR>b = Test()
|
||||
"#,
|
||||
);
|
||||
|
||||
assert_snapshot!(test.goto_type_definition(), @r###"
|
||||
info: lint:goto-type-definition: Type definition
|
||||
--> /main.py:2:19
|
||||
|
|
||||
2 | class Test: ...
|
||||
| ^^^^
|
||||
3 |
|
||||
4 | ab = Test()
|
||||
|
|
||||
info: Source
|
||||
--> /main.py:4:13
|
||||
|
|
||||
2 | class Test: ...
|
||||
3 |
|
||||
4 | ab = Test()
|
||||
| ^^
|
||||
|
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn goto_type_of_expression_with_function_type() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
def foo(a, b): ...
|
||||
|
||||
ab = foo
|
||||
|
||||
a<CURSOR>b
|
||||
"#,
|
||||
);
|
||||
|
||||
assert_snapshot!(test.goto_type_definition(), @r###"
|
||||
info: lint:goto-type-definition: Type definition
|
||||
--> /main.py:2:17
|
||||
|
|
||||
2 | def foo(a, b): ...
|
||||
| ^^^
|
||||
3 |
|
||||
4 | ab = foo
|
||||
|
|
||||
info: Source
|
||||
--> /main.py:6:13
|
||||
|
|
||||
4 | ab = foo
|
||||
5 |
|
||||
6 | ab
|
||||
| ^^
|
||||
|
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn goto_type_of_expression_with_union_type() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
|
||||
def foo(a, b): ...
|
||||
|
||||
def bar(a, b): ...
|
||||
|
||||
if random.choice():
|
||||
a = foo
|
||||
else:
|
||||
a = bar
|
||||
|
||||
a<CURSOR>
|
||||
"#,
|
||||
);
|
||||
|
||||
assert_snapshot!(test.goto_type_definition(), @r"
|
||||
info: lint:goto-type-definition: Type definition
|
||||
--> /main.py:3:17
|
||||
|
|
||||
3 | def foo(a, b): ...
|
||||
| ^^^
|
||||
4 |
|
||||
5 | def bar(a, b): ...
|
||||
|
|
||||
info: Source
|
||||
--> /main.py:12:13
|
||||
|
|
||||
10 | a = bar
|
||||
11 |
|
||||
12 | a
|
||||
| ^
|
||||
|
|
||||
|
||||
info: lint:goto-type-definition: Type definition
|
||||
--> /main.py:5:17
|
||||
|
|
||||
3 | def foo(a, b): ...
|
||||
4 |
|
||||
5 | def bar(a, b): ...
|
||||
| ^^^
|
||||
6 |
|
||||
7 | if random.choice():
|
||||
|
|
||||
info: Source
|
||||
--> /main.py:12:13
|
||||
|
|
||||
10 | a = bar
|
||||
11 |
|
||||
12 | a
|
||||
| ^
|
||||
|
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn goto_type_of_expression_with_module() {
|
||||
let mut test = cursor_test(
|
||||
r#"
|
||||
import lib
|
||||
|
||||
lib<CURSOR>
|
||||
"#,
|
||||
);
|
||||
|
||||
test.write_file("lib.py", "a = 10").unwrap();
|
||||
|
||||
assert_snapshot!(test.goto_type_definition(), @r"
|
||||
info: lint:goto-type-definition: Type definition
|
||||
--> /lib.py:1:1
|
||||
|
|
||||
1 | a = 10
|
||||
| ^^^^^^
|
||||
|
|
||||
info: Source
|
||||
--> /main.py:4:13
|
||||
|
|
||||
2 | import lib
|
||||
3 |
|
||||
4 | lib
|
||||
| ^^^
|
||||
|
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn goto_type_of_expression_with_literal_type() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
a: str = "test"
|
||||
|
||||
a<CURSOR>
|
||||
"#,
|
||||
);
|
||||
|
||||
assert_snapshot!(test.goto_type_definition(), @r#"
|
||||
info: lint:goto-type-definition: Type definition
|
||||
--> stdlib/builtins.pyi:438:7
|
||||
|
|
||||
436 | def __getitem__(self, key: int, /) -> str | int | None: ...
|
||||
437 |
|
||||
438 | class str(Sequence[str]):
|
||||
| ^^^
|
||||
439 | @overload
|
||||
440 | def __new__(cls, object: object = ...) -> Self: ...
|
||||
|
|
||||
info: Source
|
||||
--> /main.py:4:13
|
||||
|
|
||||
2 | a: str = "test"
|
||||
3 |
|
||||
4 | a
|
||||
| ^
|
||||
|
|
||||
"#);
|
||||
}
|
||||
#[test]
|
||||
fn goto_type_of_expression_with_literal_node() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
a: str = "te<CURSOR>st"
|
||||
"#,
|
||||
);
|
||||
|
||||
assert_snapshot!(test.goto_type_definition(), @r#"
|
||||
info: lint:goto-type-definition: Type definition
|
||||
--> stdlib/builtins.pyi:438:7
|
||||
|
|
||||
436 | def __getitem__(self, key: int, /) -> str | int | None: ...
|
||||
437 |
|
||||
438 | class str(Sequence[str]):
|
||||
| ^^^
|
||||
439 | @overload
|
||||
440 | def __new__(cls, object: object = ...) -> Self: ...
|
||||
|
|
||||
info: Source
|
||||
--> /main.py:2:22
|
||||
|
|
||||
2 | a: str = "test"
|
||||
| ^^^^^^
|
||||
|
|
||||
"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn goto_type_of_expression_with_type_var_type() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
type Alias[T: int = bool] = list[T<CURSOR>]
|
||||
"#,
|
||||
);
|
||||
|
||||
assert_snapshot!(test.goto_type_definition(), @r###"
|
||||
info: lint:goto-type-definition: Type definition
|
||||
--> /main.py:2:24
|
||||
|
|
||||
2 | type Alias[T: int = bool] = list[T]
|
||||
| ^
|
||||
|
|
||||
info: Source
|
||||
--> /main.py:2:46
|
||||
|
|
||||
2 | type Alias[T: int = bool] = list[T]
|
||||
| ^
|
||||
|
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn goto_type_of_expression_with_type_param_spec() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
type Alias[**P = [int, str]] = Callable[P<CURSOR>, int]
|
||||
"#,
|
||||
);
|
||||
|
||||
// TODO: Goto type definition currently doesn't work for type param specs
|
||||
// because the inference doesn't support them yet.
|
||||
// This snapshot should show a single target pointing to `T`
|
||||
assert_snapshot!(test.goto_type_definition(), @"No type definitions found");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn goto_type_of_expression_with_type_var_tuple() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
type Alias[*Ts = ()] = tuple[*Ts<CURSOR>]
|
||||
"#,
|
||||
);
|
||||
|
||||
// TODO: Goto type definition currently doesn't work for type var tuples
|
||||
// because the inference doesn't support them yet.
|
||||
// This snapshot should show a single target pointing to `T`
|
||||
assert_snapshot!(test.goto_type_definition(), @"No type definitions found");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn goto_type_on_keyword_argument() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
def test(a: str): ...
|
||||
|
||||
test(a<CURSOR>= "123")
|
||||
"#,
|
||||
);
|
||||
|
||||
assert_snapshot!(test.goto_type_definition(), @r#"
|
||||
info: lint:goto-type-definition: Type definition
|
||||
--> stdlib/builtins.pyi:438:7
|
||||
|
|
||||
436 | def __getitem__(self, key: int, /) -> str | int | None: ...
|
||||
437 |
|
||||
438 | class str(Sequence[str]):
|
||||
| ^^^
|
||||
439 | @overload
|
||||
440 | def __new__(cls, object: object = ...) -> Self: ...
|
||||
|
|
||||
info: Source
|
||||
--> /main.py:4:18
|
||||
|
|
||||
2 | def test(a: str): ...
|
||||
3 |
|
||||
4 | test(a= "123")
|
||||
| ^
|
||||
|
|
||||
"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn goto_type_on_incorrectly_typed_keyword_argument() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
def test(a: str): ...
|
||||
|
||||
test(a<CURSOR>= 123)
|
||||
"#,
|
||||
);
|
||||
|
||||
// TODO: This should jump to `str` and not `int` because
|
||||
// the keyword is typed as a string. It's only the passed argument that
|
||||
// is an int. Navigating to `str` would match pyright's behavior.
|
||||
assert_snapshot!(test.goto_type_definition(), @r"
|
||||
info: lint:goto-type-definition: Type definition
|
||||
--> stdlib/builtins.pyi:231:7
|
||||
|
|
||||
229 | _LiteralInteger = _PositiveInteger | _NegativeInteger | Literal[0] # noqa: Y026 # TODO: Use TypeAlias once mypy bugs are fixed
|
||||
230 |
|
||||
231 | class int:
|
||||
| ^^^
|
||||
232 | @overload
|
||||
233 | def __new__(cls, x: ConvertibleToInt = ..., /) -> Self: ...
|
||||
|
|
||||
info: Source
|
||||
--> /main.py:4:18
|
||||
|
|
||||
2 | def test(a: str): ...
|
||||
3 |
|
||||
4 | test(a= 123)
|
||||
| ^
|
||||
|
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn goto_type_on_kwargs() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
def f(name: str): ...
|
||||
|
||||
kwargs = { "name": "test"}
|
||||
|
||||
f(**kwargs<CURSOR>)
|
||||
"#,
|
||||
);
|
||||
|
||||
assert_snapshot!(test.goto_type_definition(), @r#"
|
||||
info: lint:goto-type-definition: Type definition
|
||||
--> stdlib/builtins.pyi:1086:7
|
||||
|
|
||||
1084 | def __class_getitem__(cls, item: Any, /) -> GenericAlias: ...
|
||||
1085 |
|
||||
1086 | class dict(MutableMapping[_KT, _VT]):
|
||||
| ^^^^
|
||||
1087 | # __init__ should be kept roughly in line with `collections.UserDict.__init__`, which has similar semantics
|
||||
1088 | # Also multiprocessing.managers.SyncManager.dict()
|
||||
|
|
||||
info: Source
|
||||
--> /main.py:6:5
|
||||
|
|
||||
4 | kwargs = { "name": "test"}
|
||||
5 |
|
||||
6 | f(**kwargs)
|
||||
| ^^^^^^
|
||||
|
|
||||
"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn goto_type_of_expression_with_builtin() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
def foo(a: str):
|
||||
a<CURSOR>
|
||||
"#,
|
||||
);
|
||||
|
||||
assert_snapshot!(test.goto_type_definition(), @r"
|
||||
info: lint:goto-type-definition: Type definition
|
||||
--> stdlib/builtins.pyi:438:7
|
||||
|
|
||||
436 | def __getitem__(self, key: int, /) -> str | int | None: ...
|
||||
437 |
|
||||
438 | class str(Sequence[str]):
|
||||
| ^^^
|
||||
439 | @overload
|
||||
440 | def __new__(cls, object: object = ...) -> Self: ...
|
||||
|
|
||||
info: Source
|
||||
--> /main.py:3:17
|
||||
|
|
||||
2 | def foo(a: str):
|
||||
3 | a
|
||||
| ^
|
||||
|
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn goto_type_definition_cursor_between_object_and_attribute() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
class X:
|
||||
def foo(a, b): ...
|
||||
|
||||
x = X()
|
||||
|
||||
x<CURSOR>.foo()
|
||||
"#,
|
||||
);
|
||||
|
||||
assert_snapshot!(test.goto_type_definition(), @r###"
|
||||
info: lint:goto-type-definition: Type definition
|
||||
--> /main.py:2:19
|
||||
|
|
||||
2 | class X:
|
||||
| ^
|
||||
3 | def foo(a, b): ...
|
||||
|
|
||||
info: Source
|
||||
--> /main.py:7:13
|
||||
|
|
||||
5 | x = X()
|
||||
6 |
|
||||
7 | x.foo()
|
||||
| ^
|
||||
|
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn goto_between_call_arguments() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
def foo(a, b): ...
|
||||
|
||||
foo<CURSOR>()
|
||||
"#,
|
||||
);
|
||||
|
||||
assert_snapshot!(test.goto_type_definition(), @r###"
|
||||
info: lint:goto-type-definition: Type definition
|
||||
--> /main.py:2:17
|
||||
|
|
||||
2 | def foo(a, b): ...
|
||||
| ^^^
|
||||
3 |
|
||||
4 | foo()
|
||||
|
|
||||
info: Source
|
||||
--> /main.py:4:13
|
||||
|
|
||||
2 | def foo(a, b): ...
|
||||
3 |
|
||||
4 | foo()
|
||||
| ^^^
|
||||
|
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn goto_type_narrowing() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
def foo(a: str | None, b):
|
||||
if a is not None:
|
||||
print(a<CURSOR>)
|
||||
"#,
|
||||
);
|
||||
|
||||
assert_snapshot!(test.goto_type_definition(), @r"
|
||||
info: lint:goto-type-definition: Type definition
|
||||
--> stdlib/builtins.pyi:438:7
|
||||
|
|
||||
436 | def __getitem__(self, key: int, /) -> str | int | None: ...
|
||||
437 |
|
||||
438 | class str(Sequence[str]):
|
||||
| ^^^
|
||||
439 | @overload
|
||||
440 | def __new__(cls, object: object = ...) -> Self: ...
|
||||
|
|
||||
info: Source
|
||||
--> /main.py:4:27
|
||||
|
|
||||
2 | def foo(a: str | None, b):
|
||||
3 | if a is not None:
|
||||
4 | print(a)
|
||||
| ^
|
||||
|
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn goto_type_none() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
def foo(a: str | None, b):
|
||||
a<CURSOR>
|
||||
"#,
|
||||
);
|
||||
|
||||
assert_snapshot!(test.goto_type_definition(), @r"
|
||||
info: lint:goto-type-definition: Type definition
|
||||
--> stdlib/types.pyi:671:11
|
||||
|
|
||||
669 | if sys.version_info >= (3, 10):
|
||||
670 | @final
|
||||
671 | class NoneType:
|
||||
| ^^^^^^^^
|
||||
672 | def __bool__(self) -> Literal[False]: ...
|
||||
|
|
||||
info: Source
|
||||
--> /main.py:3:17
|
||||
|
|
||||
2 | def foo(a: str | None, b):
|
||||
3 | a
|
||||
| ^
|
||||
|
|
||||
|
||||
info: lint:goto-type-definition: Type definition
|
||||
--> stdlib/builtins.pyi:438:7
|
||||
|
|
||||
436 | def __getitem__(self, key: int, /) -> str | int | None: ...
|
||||
437 |
|
||||
438 | class str(Sequence[str]):
|
||||
| ^^^
|
||||
439 | @overload
|
||||
440 | def __new__(cls, object: object = ...) -> Self: ...
|
||||
|
|
||||
info: Source
|
||||
--> /main.py:3:17
|
||||
|
|
||||
2 | def foo(a: str | None, b):
|
||||
3 | a
|
||||
| ^
|
||||
|
|
||||
");
|
||||
}
|
||||
|
||||
impl CursorTest {
|
||||
fn goto_type_definition(&self) -> String {
|
||||
let Some(targets) = goto_type_definition(&self.db, self.file, self.cursor_offset)
|
||||
else {
|
||||
return "No goto target found".to_string();
|
||||
};
|
||||
|
||||
if targets.is_empty() {
|
||||
return "No type definitions found".to_string();
|
||||
}
|
||||
|
||||
let source = targets.range;
|
||||
self.render_diagnostics(
|
||||
targets
|
||||
.into_iter()
|
||||
.map(|target| GotoTypeDefinitionDiagnostic::new(source, &target)),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
struct GotoTypeDefinitionDiagnostic {
|
||||
source: FileRange,
|
||||
target: FileRange,
|
||||
}
|
||||
|
||||
impl GotoTypeDefinitionDiagnostic {
|
||||
fn new(source: FileRange, target: &NavigationTarget) -> Self {
|
||||
Self {
|
||||
source,
|
||||
target: FileRange::new(target.file(), target.focus_range()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoDiagnostic for GotoTypeDefinitionDiagnostic {
|
||||
fn into_diagnostic(self) -> Diagnostic {
|
||||
let mut source = SubDiagnostic::new(Severity::Info, "Source");
|
||||
source.annotate(Annotation::primary(
|
||||
Span::from(self.source.file()).with_range(self.source.range()),
|
||||
));
|
||||
|
||||
let mut main = Diagnostic::new(
|
||||
DiagnosticId::Lint(LintName::of("goto-type-definition")),
|
||||
Severity::Info,
|
||||
"Type definition".to_string(),
|
||||
);
|
||||
main.annotate(Annotation::primary(
|
||||
Span::from(self.target.file()).with_range(self.target.range()),
|
||||
));
|
||||
main.sub(source);
|
||||
|
||||
main
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,602 +0,0 @@
|
||||
use crate::goto::{find_goto_target, GotoTarget};
|
||||
use crate::{Db, MarkupKind, RangedValue};
|
||||
use red_knot_python_semantic::types::Type;
|
||||
use red_knot_python_semantic::SemanticModel;
|
||||
use ruff_db::files::{File, FileRange};
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_text_size::{Ranged, TextSize};
|
||||
use std::fmt;
|
||||
use std::fmt::Formatter;
|
||||
|
||||
pub fn hover(db: &dyn Db, file: File, offset: TextSize) -> Option<RangedValue<Hover>> {
|
||||
let parsed = parsed_module(db.upcast(), file);
|
||||
let goto_target = find_goto_target(parsed, offset)?;
|
||||
|
||||
if let GotoTarget::Expression(expr) = goto_target {
|
||||
if expr.is_literal_expr() {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
let model = SemanticModel::new(db.upcast(), file);
|
||||
let ty = goto_target.inferred_type(&model)?;
|
||||
|
||||
tracing::debug!(
|
||||
"Inferred type of covering node is {}",
|
||||
ty.display(db.upcast())
|
||||
);
|
||||
|
||||
// TODO: Add documentation of the symbol (not the type's definition).
|
||||
// TODO: Render the symbol's signature instead of just its type.
|
||||
let contents = vec![HoverContent::Type(ty)];
|
||||
|
||||
Some(RangedValue {
|
||||
range: FileRange::new(file, goto_target.range()),
|
||||
value: Hover { contents },
|
||||
})
|
||||
}
|
||||
|
||||
pub struct Hover<'db> {
|
||||
contents: Vec<HoverContent<'db>>,
|
||||
}
|
||||
|
||||
impl<'db> Hover<'db> {
|
||||
/// Renders the hover to a string using the specified markup kind.
|
||||
pub const fn display<'a>(&'a self, db: &'a dyn Db, kind: MarkupKind) -> DisplayHover<'a> {
|
||||
DisplayHover {
|
||||
db,
|
||||
hover: self,
|
||||
kind,
|
||||
}
|
||||
}
|
||||
|
||||
fn iter(&self) -> std::slice::Iter<'_, HoverContent<'db>> {
|
||||
self.contents.iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> IntoIterator for Hover<'db> {
|
||||
type Item = HoverContent<'db>;
|
||||
type IntoIter = std::vec::IntoIter<Self::Item>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.contents.into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'db> IntoIterator for &'a Hover<'db> {
|
||||
type Item = &'a HoverContent<'db>;
|
||||
type IntoIter = std::slice::Iter<'a, HoverContent<'db>>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.iter()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DisplayHover<'a> {
|
||||
db: &'a dyn Db,
|
||||
hover: &'a Hover<'a>,
|
||||
kind: MarkupKind,
|
||||
}
|
||||
|
||||
impl fmt::Display for DisplayHover<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
let mut first = true;
|
||||
for content in &self.hover.contents {
|
||||
if !first {
|
||||
self.kind.horizontal_line().fmt(f)?;
|
||||
}
|
||||
|
||||
content.display(self.db, self.kind).fmt(f)?;
|
||||
first = false;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub enum HoverContent<'db> {
|
||||
Type(Type<'db>),
|
||||
}
|
||||
|
||||
impl<'db> HoverContent<'db> {
|
||||
fn display(&self, db: &'db dyn Db, kind: MarkupKind) -> DisplayHoverContent<'_, 'db> {
|
||||
DisplayHoverContent {
|
||||
db,
|
||||
content: self,
|
||||
kind,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct DisplayHoverContent<'a, 'db> {
|
||||
db: &'db dyn Db,
|
||||
content: &'a HoverContent<'db>,
|
||||
kind: MarkupKind,
|
||||
}
|
||||
|
||||
impl fmt::Display for DisplayHoverContent<'_, '_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self.content {
|
||||
HoverContent::Type(ty) => self
|
||||
.kind
|
||||
.fenced_code_block(ty.display(self.db.upcast()), "text")
|
||||
.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::tests::{cursor_test, CursorTest};
|
||||
use crate::{hover, MarkupKind};
|
||||
use insta::assert_snapshot;
|
||||
use ruff_db::diagnostic::{
|
||||
Annotation, Diagnostic, DiagnosticFormat, DiagnosticId, DisplayDiagnosticConfig, LintName,
|
||||
Severity, Span,
|
||||
};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
#[test]
|
||||
fn hover_basic() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
a = 10
|
||||
|
||||
a<CURSOR>
|
||||
"#,
|
||||
);
|
||||
|
||||
assert_snapshot!(test.hover(), @r"
|
||||
Literal[10]
|
||||
---------------------------------------------
|
||||
```text
|
||||
Literal[10]
|
||||
```
|
||||
---------------------------------------------
|
||||
info: lint:hover: Hovered content is
|
||||
--> /main.py:4:9
|
||||
|
|
||||
2 | a = 10
|
||||
3 |
|
||||
4 | a
|
||||
| ^- Cursor offset
|
||||
| |
|
||||
| source
|
||||
|
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hover_member() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
class Foo:
|
||||
a: int = 10
|
||||
|
||||
def __init__(a: int, b: str):
|
||||
self.a = a
|
||||
self.b: str = b
|
||||
|
||||
foo = Foo()
|
||||
foo.<CURSOR>a
|
||||
"#,
|
||||
);
|
||||
|
||||
assert_snapshot!(test.hover(), @r"
|
||||
int
|
||||
---------------------------------------------
|
||||
```text
|
||||
int
|
||||
```
|
||||
---------------------------------------------
|
||||
info: lint:hover: Hovered content is
|
||||
--> /main.py:10:9
|
||||
|
|
||||
9 | foo = Foo()
|
||||
10 | foo.a
|
||||
| ^^^^-
|
||||
| | |
|
||||
| | Cursor offset
|
||||
| source
|
||||
|
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hover_function_typed_variable() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
def foo(a, b): ...
|
||||
|
||||
foo<CURSOR>
|
||||
"#,
|
||||
);
|
||||
|
||||
assert_snapshot!(test.hover(), @r###"
|
||||
def foo(a, b) -> Unknown
|
||||
---------------------------------------------
|
||||
```text
|
||||
def foo(a, b) -> Unknown
|
||||
```
|
||||
---------------------------------------------
|
||||
info: lint:hover: Hovered content is
|
||||
--> /main.py:4:13
|
||||
|
|
||||
2 | def foo(a, b): ...
|
||||
3 |
|
||||
4 | foo
|
||||
| ^^^- Cursor offset
|
||||
| |
|
||||
| source
|
||||
|
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hover_binary_expression() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
def foo(a: int, b: int, c: int):
|
||||
a + b ==<CURSOR> c
|
||||
"#,
|
||||
);
|
||||
|
||||
assert_snapshot!(test.hover(), @r"
|
||||
bool
|
||||
---------------------------------------------
|
||||
```text
|
||||
bool
|
||||
```
|
||||
---------------------------------------------
|
||||
info: lint:hover: Hovered content is
|
||||
--> /main.py:3:17
|
||||
|
|
||||
2 | def foo(a: int, b: int, c: int):
|
||||
3 | a + b == c
|
||||
| ^^^^^^^^-^
|
||||
| | |
|
||||
| | Cursor offset
|
||||
| source
|
||||
|
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hover_keyword_parameter() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
def test(a: int): ...
|
||||
|
||||
test(a<CURSOR>= 123)
|
||||
"#,
|
||||
);
|
||||
|
||||
// TODO: This should reveal `int` because the user hovers over the parameter and not the value.
|
||||
assert_snapshot!(test.hover(), @r"
|
||||
Literal[123]
|
||||
---------------------------------------------
|
||||
```text
|
||||
Literal[123]
|
||||
```
|
||||
---------------------------------------------
|
||||
info: lint:hover: Hovered content is
|
||||
--> /main.py:4:18
|
||||
|
|
||||
2 | def test(a: int): ...
|
||||
3 |
|
||||
4 | test(a= 123)
|
||||
| ^- Cursor offset
|
||||
| |
|
||||
| source
|
||||
|
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hover_union() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
|
||||
def foo(a, b): ...
|
||||
|
||||
def bar(a, b): ...
|
||||
|
||||
if random.choice([True, False]):
|
||||
a = foo
|
||||
else:
|
||||
a = bar
|
||||
|
||||
a<CURSOR>
|
||||
"#,
|
||||
);
|
||||
|
||||
assert_snapshot!(test.hover(), @r###"
|
||||
(def foo(a, b) -> Unknown) | (def bar(a, b) -> Unknown)
|
||||
---------------------------------------------
|
||||
```text
|
||||
(def foo(a, b) -> Unknown) | (def bar(a, b) -> Unknown)
|
||||
```
|
||||
---------------------------------------------
|
||||
info: lint:hover: Hovered content is
|
||||
--> /main.py:12:13
|
||||
|
|
||||
10 | a = bar
|
||||
11 |
|
||||
12 | a
|
||||
| ^- Cursor offset
|
||||
| |
|
||||
| source
|
||||
|
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hover_module() {
|
||||
let mut test = cursor_test(
|
||||
r#"
|
||||
import lib
|
||||
|
||||
li<CURSOR>b
|
||||
"#,
|
||||
);
|
||||
|
||||
test.write_file("lib.py", "a = 10").unwrap();
|
||||
|
||||
assert_snapshot!(test.hover(), @r"
|
||||
<module 'lib'>
|
||||
---------------------------------------------
|
||||
```text
|
||||
<module 'lib'>
|
||||
```
|
||||
---------------------------------------------
|
||||
info: lint:hover: Hovered content is
|
||||
--> /main.py:4:13
|
||||
|
|
||||
2 | import lib
|
||||
3 |
|
||||
4 | lib
|
||||
| ^^-
|
||||
| | |
|
||||
| | Cursor offset
|
||||
| source
|
||||
|
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hover_type_of_expression_with_type_var_type() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
type Alias[T: int = bool] = list[T<CURSOR>]
|
||||
"#,
|
||||
);
|
||||
|
||||
assert_snapshot!(test.hover(), @r"
|
||||
T
|
||||
---------------------------------------------
|
||||
```text
|
||||
T
|
||||
```
|
||||
---------------------------------------------
|
||||
info: lint:hover: Hovered content is
|
||||
--> /main.py:2:46
|
||||
|
|
||||
2 | type Alias[T: int = bool] = list[T]
|
||||
| ^- Cursor offset
|
||||
| |
|
||||
| source
|
||||
|
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hover_type_of_expression_with_type_param_spec() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
type Alias[**P = [int, str]] = Callable[P<CURSOR>, int]
|
||||
"#,
|
||||
);
|
||||
|
||||
assert_snapshot!(test.hover(), @r"
|
||||
@Todo
|
||||
---------------------------------------------
|
||||
```text
|
||||
@Todo
|
||||
```
|
||||
---------------------------------------------
|
||||
info: lint:hover: Hovered content is
|
||||
--> /main.py:2:53
|
||||
|
|
||||
2 | type Alias[**P = [int, str]] = Callable[P, int]
|
||||
| ^- Cursor offset
|
||||
| |
|
||||
| source
|
||||
|
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hover_type_of_expression_with_type_var_tuple() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
type Alias[*Ts = ()] = tuple[*Ts<CURSOR>]
|
||||
"#,
|
||||
);
|
||||
|
||||
assert_snapshot!(test.hover(), @r"
|
||||
@Todo
|
||||
---------------------------------------------
|
||||
```text
|
||||
@Todo
|
||||
```
|
||||
---------------------------------------------
|
||||
info: lint:hover: Hovered content is
|
||||
--> /main.py:2:43
|
||||
|
|
||||
2 | type Alias[*Ts = ()] = tuple[*Ts]
|
||||
| ^^- Cursor offset
|
||||
| |
|
||||
| source
|
||||
|
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hover_class_member_declaration() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
class Foo:
|
||||
a<CURSOR>: int
|
||||
"#,
|
||||
);
|
||||
|
||||
// TODO: This should be int and not `Never`, https://github.com/astral-sh/ruff/issues/17122
|
||||
assert_snapshot!(test.hover(), @r"
|
||||
Never
|
||||
---------------------------------------------
|
||||
```text
|
||||
Never
|
||||
```
|
||||
---------------------------------------------
|
||||
info: lint:hover: Hovered content is
|
||||
--> /main.py:3:13
|
||||
|
|
||||
2 | class Foo:
|
||||
3 | a: int
|
||||
| ^- Cursor offset
|
||||
| |
|
||||
| source
|
||||
|
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hover_type_narrowing() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
def foo(a: str | None, b):
|
||||
if a is not None:
|
||||
print(a<CURSOR>)
|
||||
"#,
|
||||
);
|
||||
|
||||
assert_snapshot!(test.hover(), @r"
|
||||
str
|
||||
---------------------------------------------
|
||||
```text
|
||||
str
|
||||
```
|
||||
---------------------------------------------
|
||||
info: lint:hover: Hovered content is
|
||||
--> /main.py:4:27
|
||||
|
|
||||
2 | def foo(a: str | None, b):
|
||||
3 | if a is not None:
|
||||
4 | print(a)
|
||||
| ^- Cursor offset
|
||||
| |
|
||||
| source
|
||||
|
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hover_whitespace() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
class C:
|
||||
<CURSOR>
|
||||
foo: str = 'bar'
|
||||
"#,
|
||||
);
|
||||
|
||||
assert_snapshot!(test.hover(), @"Hover provided no content");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hover_literal_int() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
print(
|
||||
0 + 1<CURSOR>
|
||||
)
|
||||
"#,
|
||||
);
|
||||
|
||||
assert_snapshot!(test.hover(), @"Hover provided no content");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hover_literal_ellipsis() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
print(
|
||||
.<CURSOR>..
|
||||
)
|
||||
"#,
|
||||
);
|
||||
|
||||
assert_snapshot!(test.hover(), @"Hover provided no content");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hover_docstring() {
|
||||
let test = cursor_test(
|
||||
r#"
|
||||
def f():
|
||||
"""Lorem ipsum dolor sit amet.<CURSOR>"""
|
||||
"#,
|
||||
);
|
||||
|
||||
assert_snapshot!(test.hover(), @"Hover provided no content");
|
||||
}
|
||||
|
||||
impl CursorTest {
|
||||
fn hover(&self) -> String {
|
||||
use std::fmt::Write;
|
||||
|
||||
let Some(hover) = hover(&self.db, self.file, self.cursor_offset) else {
|
||||
return "Hover provided no content".to_string();
|
||||
};
|
||||
|
||||
let source = hover.range;
|
||||
|
||||
let mut buf = String::new();
|
||||
|
||||
write!(
|
||||
&mut buf,
|
||||
"{plaintext}{line}{markdown}{line}",
|
||||
plaintext = hover.display(&self.db, MarkupKind::PlainText),
|
||||
line = MarkupKind::PlainText.horizontal_line(),
|
||||
markdown = hover.display(&self.db, MarkupKind::Markdown),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let config = DisplayDiagnosticConfig::default()
|
||||
.color(false)
|
||||
.format(DiagnosticFormat::Full);
|
||||
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
DiagnosticId::Lint(LintName::of("hover")),
|
||||
Severity::Info,
|
||||
"Hovered content is",
|
||||
);
|
||||
diagnostic.annotate(
|
||||
Annotation::primary(Span::from(source.file()).with_range(source.range()))
|
||||
.message("source"),
|
||||
);
|
||||
diagnostic.annotate(
|
||||
Annotation::secondary(
|
||||
Span::from(source.file()).with_range(TextRange::empty(self.cursor_offset)),
|
||||
)
|
||||
.message("Cursor offset"),
|
||||
);
|
||||
|
||||
write!(buf, "{}", diagnostic.display(&self.db, &config)).unwrap();
|
||||
|
||||
buf
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,279 +0,0 @@
|
||||
use crate::Db;
|
||||
use red_knot_python_semantic::types::Type;
|
||||
use red_knot_python_semantic::{HasType, SemanticModel};
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_python_ast::visitor::source_order::{self, SourceOrderVisitor, TraversalSignal};
|
||||
use ruff_python_ast::{AnyNodeRef, Expr, Stmt};
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
use std::fmt;
|
||||
use std::fmt::Formatter;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct InlayHint<'db> {
|
||||
pub position: TextSize,
|
||||
pub content: InlayHintContent<'db>,
|
||||
}
|
||||
|
||||
impl<'db> InlayHint<'db> {
|
||||
pub const fn display(&self, db: &'db dyn Db) -> DisplayInlayHint<'_, 'db> {
|
||||
self.content.display(db)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub enum InlayHintContent<'db> {
|
||||
Type(Type<'db>),
|
||||
ReturnType(Type<'db>),
|
||||
}
|
||||
|
||||
impl<'db> InlayHintContent<'db> {
|
||||
pub const fn display(&self, db: &'db dyn Db) -> DisplayInlayHint<'_, 'db> {
|
||||
DisplayInlayHint { db, hint: self }
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DisplayInlayHint<'a, 'db> {
|
||||
db: &'db dyn Db,
|
||||
hint: &'a InlayHintContent<'db>,
|
||||
}
|
||||
|
||||
impl fmt::Display for DisplayInlayHint<'_, '_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
match self.hint {
|
||||
InlayHintContent::Type(ty) => {
|
||||
write!(f, ": {}", ty.display(self.db.upcast()))
|
||||
}
|
||||
InlayHintContent::ReturnType(ty) => {
|
||||
write!(f, " -> {}", ty.display(self.db.upcast()))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn inlay_hints(db: &dyn Db, file: File, range: TextRange) -> Vec<InlayHint<'_>> {
|
||||
let mut visitor = InlayHintVisitor::new(db, file, range);
|
||||
|
||||
let ast = parsed_module(db.upcast(), file);
|
||||
|
||||
visitor.visit_body(ast.suite());
|
||||
|
||||
visitor.hints
|
||||
}
|
||||
|
||||
struct InlayHintVisitor<'db> {
|
||||
model: SemanticModel<'db>,
|
||||
hints: Vec<InlayHint<'db>>,
|
||||
in_assignment: bool,
|
||||
range: TextRange,
|
||||
}
|
||||
|
||||
impl<'db> InlayHintVisitor<'db> {
|
||||
fn new(db: &'db dyn Db, file: File, range: TextRange) -> Self {
|
||||
Self {
|
||||
model: SemanticModel::new(db.upcast(), file),
|
||||
hints: Vec::new(),
|
||||
in_assignment: false,
|
||||
range,
|
||||
}
|
||||
}
|
||||
|
||||
fn add_type_hint(&mut self, position: TextSize, ty: Type<'db>) {
|
||||
self.hints.push(InlayHint {
|
||||
position,
|
||||
content: InlayHintContent::Type(ty),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
impl SourceOrderVisitor<'_> for InlayHintVisitor<'_> {
|
||||
fn enter_node(&mut self, node: AnyNodeRef<'_>) -> TraversalSignal {
|
||||
if self.range.intersect(node.range()).is_some() {
|
||||
TraversalSignal::Traverse
|
||||
} else {
|
||||
TraversalSignal::Skip
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_stmt(&mut self, stmt: &Stmt) {
|
||||
let node = AnyNodeRef::from(stmt);
|
||||
|
||||
if !self.enter_node(node).is_traverse() {
|
||||
return;
|
||||
}
|
||||
|
||||
match stmt {
|
||||
Stmt::Assign(assign) => {
|
||||
self.in_assignment = true;
|
||||
for target in &assign.targets {
|
||||
self.visit_expr(target);
|
||||
}
|
||||
self.in_assignment = false;
|
||||
|
||||
return;
|
||||
}
|
||||
// TODO
|
||||
Stmt::FunctionDef(_) => {}
|
||||
Stmt::For(_) => {}
|
||||
Stmt::Expr(_) => {
|
||||
// Don't traverse into expression statements because we don't show any hints.
|
||||
return;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
source_order::walk_stmt(self, stmt);
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, expr: &'_ Expr) {
|
||||
if !self.in_assignment {
|
||||
return;
|
||||
}
|
||||
|
||||
match expr {
|
||||
Expr::Name(name) => {
|
||||
if name.ctx.is_store() {
|
||||
let ty = expr.inferred_type(&self.model);
|
||||
self.add_type_hint(expr.range().end(), ty);
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
source_order::walk_expr(self, expr);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
use insta::assert_snapshot;
|
||||
use ruff_db::{
|
||||
files::{system_path_to_file, File},
|
||||
source::source_text,
|
||||
};
|
||||
use ruff_text_size::TextSize;
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
|
||||
use red_knot_python_semantic::{
|
||||
Program, ProgramSettings, PythonPath, PythonPlatform, SearchPathSettings,
|
||||
};
|
||||
use ruff_db::system::{DbWithWritableSystem, SystemPathBuf};
|
||||
use ruff_python_ast::PythonVersion;
|
||||
|
||||
pub(super) fn inlay_hint_test(source: &str) -> InlayHintTest {
|
||||
const START: &str = "<START>";
|
||||
const END: &str = "<END>";
|
||||
|
||||
let mut db = TestDb::new();
|
||||
|
||||
let start = source.find(START);
|
||||
let end = source
|
||||
.find(END)
|
||||
.map(|x| if start.is_some() { x - START.len() } else { x })
|
||||
.unwrap_or(source.len());
|
||||
|
||||
let range = TextRange::new(
|
||||
TextSize::try_from(start.unwrap_or_default()).unwrap(),
|
||||
TextSize::try_from(end).unwrap(),
|
||||
);
|
||||
|
||||
let source = source.replace(START, "");
|
||||
let source = source.replace(END, "");
|
||||
|
||||
db.write_file("main.py", source)
|
||||
.expect("write to memory file system to be successful");
|
||||
|
||||
let file = system_path_to_file(&db, "main.py").expect("newly written file to existing");
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
ProgramSettings {
|
||||
python_version: PythonVersion::latest(),
|
||||
python_platform: PythonPlatform::default(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_roots: vec![SystemPathBuf::from("/")],
|
||||
custom_typeshed: None,
|
||||
python_path: PythonPath::KnownSitePackages(vec![]),
|
||||
},
|
||||
},
|
||||
)
|
||||
.expect("Default settings to be valid");
|
||||
|
||||
InlayHintTest { db, file, range }
|
||||
}
|
||||
|
||||
pub(super) struct InlayHintTest {
|
||||
pub(super) db: TestDb,
|
||||
pub(super) file: File,
|
||||
pub(super) range: TextRange,
|
||||
}
|
||||
|
||||
impl InlayHintTest {
|
||||
fn inlay_hints(&self) -> String {
|
||||
let hints = inlay_hints(&self.db, self.file, self.range);
|
||||
|
||||
let mut buf = source_text(&self.db, self.file).as_str().to_string();
|
||||
|
||||
let mut offset = 0;
|
||||
|
||||
for hint in hints {
|
||||
let end_position = (hint.position.to_u32() as usize) + offset;
|
||||
let hint_str = format!("[{}]", hint.display(&self.db));
|
||||
buf.insert_str(end_position, &hint_str);
|
||||
offset += hint_str.len();
|
||||
}
|
||||
|
||||
buf
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_assign_statement() {
|
||||
let test = inlay_hint_test("x = 1");
|
||||
|
||||
assert_snapshot!(test.inlay_hints(), @r"
|
||||
x[: Literal[1]] = 1
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_tuple_assignment() {
|
||||
let test = inlay_hint_test("x, y = (1, 'abc')");
|
||||
|
||||
assert_snapshot!(test.inlay_hints(), @r#"
|
||||
x[: Literal[1]], y[: Literal["abc"]] = (1, 'abc')
|
||||
"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_nested_tuple_assignment() {
|
||||
let test = inlay_hint_test("x, (y, z) = (1, ('abc', 2))");
|
||||
|
||||
assert_snapshot!(test.inlay_hints(), @r#"
|
||||
x[: Literal[1]], (y[: Literal["abc"]], z[: Literal[2]]) = (1, ('abc', 2))
|
||||
"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_assign_statement_with_type_annotation() {
|
||||
let test = inlay_hint_test("x: int = 1");
|
||||
|
||||
assert_snapshot!(test.inlay_hints(), @r"
|
||||
x: int = 1
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_assign_statement_out_of_range() {
|
||||
let test = inlay_hint_test("<START>x = 1<END>\ny = 2");
|
||||
|
||||
assert_snapshot!(test.inlay_hints(), @r"
|
||||
x[: Literal[1]] = 1
|
||||
y = 2
|
||||
");
|
||||
}
|
||||
}
|
||||
@@ -1,296 +0,0 @@
|
||||
mod db;
|
||||
mod find_node;
|
||||
mod goto;
|
||||
mod hover;
|
||||
mod inlay_hints;
|
||||
mod markup;
|
||||
|
||||
pub use db::Db;
|
||||
pub use goto::goto_type_definition;
|
||||
pub use hover::hover;
|
||||
pub use inlay_hints::inlay_hints;
|
||||
pub use markup::MarkupKind;
|
||||
|
||||
use rustc_hash::FxHashSet;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
|
||||
use red_knot_python_semantic::types::{Type, TypeDefinition};
|
||||
use ruff_db::files::{File, FileRange};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
/// Information associated with a text range.
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
|
||||
pub struct RangedValue<T> {
|
||||
pub range: FileRange,
|
||||
pub value: T,
|
||||
}
|
||||
|
||||
impl<T> RangedValue<T> {
|
||||
pub fn file_range(&self) -> FileRange {
|
||||
self.range
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Deref for RangedValue<T> {
|
||||
type Target = T;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.value
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> DerefMut for RangedValue<T> {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.value
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> IntoIterator for RangedValue<T>
|
||||
where
|
||||
T: IntoIterator,
|
||||
{
|
||||
type Item = T::Item;
|
||||
type IntoIter = T::IntoIter;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.value.into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
/// Target to which the editor can navigate to.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct NavigationTarget {
|
||||
file: File,
|
||||
|
||||
/// The range that should be focused when navigating to the target.
|
||||
///
|
||||
/// This is typically not the full range of the node. For example, it's the range of the class's name in a class definition.
|
||||
///
|
||||
/// The `focus_range` must be fully covered by `full_range`.
|
||||
focus_range: TextRange,
|
||||
|
||||
/// The range covering the entire target.
|
||||
full_range: TextRange,
|
||||
}
|
||||
|
||||
impl NavigationTarget {
|
||||
pub fn file(&self) -> File {
|
||||
self.file
|
||||
}
|
||||
|
||||
pub fn focus_range(&self) -> TextRange {
|
||||
self.focus_range
|
||||
}
|
||||
|
||||
pub fn full_range(&self) -> TextRange {
|
||||
self.full_range
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct NavigationTargets(smallvec::SmallVec<[NavigationTarget; 1]>);
|
||||
|
||||
impl NavigationTargets {
|
||||
fn single(target: NavigationTarget) -> Self {
|
||||
Self(smallvec::smallvec![target])
|
||||
}
|
||||
|
||||
fn empty() -> Self {
|
||||
Self(smallvec::SmallVec::new())
|
||||
}
|
||||
|
||||
fn unique(targets: impl IntoIterator<Item = NavigationTarget>) -> Self {
|
||||
let unique: FxHashSet<_> = targets.into_iter().collect();
|
||||
if unique.is_empty() {
|
||||
Self::empty()
|
||||
} else {
|
||||
let mut targets = unique.into_iter().collect::<Vec<_>>();
|
||||
targets.sort_by_key(|target| (target.file, target.focus_range.start()));
|
||||
Self(targets.into())
|
||||
}
|
||||
}
|
||||
|
||||
fn iter(&self) -> std::slice::Iter<'_, NavigationTarget> {
|
||||
self.0.iter()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
fn is_empty(&self) -> bool {
|
||||
self.0.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoIterator for NavigationTargets {
|
||||
type Item = NavigationTarget;
|
||||
type IntoIter = smallvec::IntoIter<[NavigationTarget; 1]>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.0.into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a NavigationTargets {
|
||||
type Item = &'a NavigationTarget;
|
||||
type IntoIter = std::slice::Iter<'a, NavigationTarget>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl FromIterator<NavigationTarget> for NavigationTargets {
|
||||
fn from_iter<T: IntoIterator<Item = NavigationTarget>>(iter: T) -> Self {
|
||||
Self::unique(iter)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait HasNavigationTargets {
|
||||
fn navigation_targets(&self, db: &dyn Db) -> NavigationTargets;
|
||||
}
|
||||
|
||||
impl HasNavigationTargets for Type<'_> {
|
||||
fn navigation_targets(&self, db: &dyn Db) -> NavigationTargets {
|
||||
match self {
|
||||
Type::Union(union) => union
|
||||
.iter(db.upcast())
|
||||
.flat_map(|target| target.navigation_targets(db))
|
||||
.collect(),
|
||||
|
||||
Type::Intersection(intersection) => {
|
||||
// Only consider the positive elements because the negative elements are mainly from narrowing constraints.
|
||||
let mut targets = intersection
|
||||
.iter_positive(db.upcast())
|
||||
.filter(|ty| !ty.is_unknown());
|
||||
|
||||
let Some(first) = targets.next() else {
|
||||
return NavigationTargets::empty();
|
||||
};
|
||||
|
||||
match targets.next() {
|
||||
Some(_) => {
|
||||
// If there are multiple types in the intersection, we can't navigate to a single one
|
||||
// because the type is the intersection of all those types.
|
||||
NavigationTargets::empty()
|
||||
}
|
||||
None => first.navigation_targets(db),
|
||||
}
|
||||
}
|
||||
|
||||
ty => ty
|
||||
.definition(db.upcast())
|
||||
.map(|definition| definition.navigation_targets(db))
|
||||
.unwrap_or_else(NavigationTargets::empty),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HasNavigationTargets for TypeDefinition<'_> {
|
||||
fn navigation_targets(&self, db: &dyn Db) -> NavigationTargets {
|
||||
let full_range = self.full_range(db.upcast());
|
||||
NavigationTargets::single(NavigationTarget {
|
||||
file: full_range.file(),
|
||||
focus_range: self.focus_range(db.upcast()).unwrap_or(full_range).range(),
|
||||
full_range: full_range.range(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::db::tests::TestDb;
|
||||
use insta::internals::SettingsBindDropGuard;
|
||||
use red_knot_python_semantic::{
|
||||
Program, ProgramSettings, PythonPath, PythonPlatform, SearchPathSettings,
|
||||
};
|
||||
use ruff_db::diagnostic::{Diagnostic, DiagnosticFormat, DisplayDiagnosticConfig};
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
use ruff_db::system::{DbWithWritableSystem, SystemPath, SystemPathBuf};
|
||||
use ruff_python_ast::PythonVersion;
|
||||
use ruff_text_size::TextSize;
|
||||
|
||||
pub(super) fn cursor_test(source: &str) -> CursorTest {
|
||||
let mut db = TestDb::new();
|
||||
let cursor_offset = source.find("<CURSOR>").expect(
|
||||
"`source`` should contain a `<CURSOR>` marker, indicating the position of the cursor.",
|
||||
);
|
||||
|
||||
let mut content = source[..cursor_offset].to_string();
|
||||
content.push_str(&source[cursor_offset + "<CURSOR>".len()..]);
|
||||
|
||||
db.write_file("main.py", &content)
|
||||
.expect("write to memory file system to be successful");
|
||||
|
||||
let file = system_path_to_file(&db, "main.py").expect("newly written file to existing");
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
ProgramSettings {
|
||||
python_version: PythonVersion::latest(),
|
||||
python_platform: PythonPlatform::default(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_roots: vec![SystemPathBuf::from("/")],
|
||||
custom_typeshed: None,
|
||||
python_path: PythonPath::KnownSitePackages(vec![]),
|
||||
},
|
||||
},
|
||||
)
|
||||
.expect("Default settings to be valid");
|
||||
|
||||
let mut insta_settings = insta::Settings::clone_current();
|
||||
insta_settings.add_filter(r#"\\(\w\w|\s|\.|")"#, "/$1");
|
||||
// Filter out TODO types because they are different between debug and release builds.
|
||||
insta_settings.add_filter(r"@Todo\(.+\)", "@Todo");
|
||||
|
||||
let insta_settings_guard = insta_settings.bind_to_scope();
|
||||
|
||||
CursorTest {
|
||||
db,
|
||||
cursor_offset: TextSize::try_from(cursor_offset)
|
||||
.expect("source to be smaller than 4GB"),
|
||||
file,
|
||||
_insta_settings_guard: insta_settings_guard,
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) struct CursorTest {
|
||||
pub(super) db: TestDb,
|
||||
pub(super) cursor_offset: TextSize,
|
||||
pub(super) file: File,
|
||||
_insta_settings_guard: SettingsBindDropGuard,
|
||||
}
|
||||
|
||||
impl CursorTest {
|
||||
pub(super) fn write_file(
|
||||
&mut self,
|
||||
path: impl AsRef<SystemPath>,
|
||||
content: &str,
|
||||
) -> std::io::Result<()> {
|
||||
self.db.write_file(path, content)
|
||||
}
|
||||
|
||||
pub(super) fn render_diagnostics<I, D>(&self, diagnostics: I) -> String
|
||||
where
|
||||
I: IntoIterator<Item = D>,
|
||||
D: IntoDiagnostic,
|
||||
{
|
||||
use std::fmt::Write;
|
||||
|
||||
let mut buf = String::new();
|
||||
|
||||
let config = DisplayDiagnosticConfig::default()
|
||||
.color(false)
|
||||
.format(DiagnosticFormat::Full);
|
||||
for diagnostic in diagnostics {
|
||||
let diag = diagnostic.into_diagnostic();
|
||||
write!(buf, "{}", diag.display(&self.db, &config)).unwrap();
|
||||
}
|
||||
|
||||
buf
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) trait IntoDiagnostic {
|
||||
fn into_diagnostic(self) -> Diagnostic;
|
||||
}
|
||||
}
|
||||
@@ -1,66 +0,0 @@
|
||||
use std::fmt;
|
||||
use std::fmt::Formatter;
|
||||
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
|
||||
pub enum MarkupKind {
|
||||
PlainText,
|
||||
Markdown,
|
||||
}
|
||||
|
||||
impl MarkupKind {
|
||||
pub(crate) const fn fenced_code_block<T>(self, code: T, language: &str) -> FencedCodeBlock<T>
|
||||
where
|
||||
T: fmt::Display,
|
||||
{
|
||||
FencedCodeBlock {
|
||||
language,
|
||||
code,
|
||||
kind: self,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) const fn horizontal_line(self) -> HorizontalLine {
|
||||
HorizontalLine { kind: self }
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct FencedCodeBlock<'a, T> {
|
||||
language: &'a str,
|
||||
code: T,
|
||||
kind: MarkupKind,
|
||||
}
|
||||
|
||||
impl<T> fmt::Display for FencedCodeBlock<'_, T>
|
||||
where
|
||||
T: fmt::Display,
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self.kind {
|
||||
MarkupKind::PlainText => self.code.fmt(f),
|
||||
MarkupKind::Markdown => write!(
|
||||
f,
|
||||
"```{language}\n{code}\n```",
|
||||
language = self.language,
|
||||
code = self.code
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub(crate) struct HorizontalLine {
|
||||
kind: MarkupKind,
|
||||
}
|
||||
|
||||
impl fmt::Display for HorizontalLine {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
match self.kind {
|
||||
MarkupKind::PlainText => {
|
||||
f.write_str("\n---------------------------------------------\n")
|
||||
}
|
||||
MarkupKind::Markdown => {
|
||||
write!(f, "\n---\n")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
35
crates/red_knot_module_resolver/Cargo.toml
Normal file
35
crates/red_knot_module_resolver/Cargo.toml
Normal file
@@ -0,0 +1,35 @@
|
||||
[package]
|
||||
name = "red_knot_module_resolver"
|
||||
version = "0.0.0"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
ruff_db = { workspace = true }
|
||||
ruff_python_stdlib = { workspace = true }
|
||||
|
||||
rustc-hash = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
smol_str = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
path-slash = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
anyhow = { workspace = true }
|
||||
insta = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
9
crates/red_knot_module_resolver/README.md
Normal file
9
crates/red_knot_module_resolver/README.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# Red Knot
|
||||
|
||||
A work-in-progress multifile module resolver for Ruff.
|
||||
|
||||
## Vendored types for the stdlib
|
||||
|
||||
This crate vendors [typeshed](https://github.com/python/typeshed)'s stubs for the standard library. The vendored stubs can be found in `crates/red_knot_module_resolver/vendor/typeshed`. The file `crates/red_knot_module_resolver/vendor/typeshed/source_commit.txt` tells you the typeshed commit that our vendored stdlib stubs currently correspond to.
|
||||
|
||||
The typeshed stubs are updated every two weeks via an automated PR using the `sync_typeshed.yaml` workflow in the `.github/workflows` directory. This workflow can also be triggered at any time via [workflow dispatch](https://docs.github.com/en/actions/using-workflows/manually-running-a-workflow#running-a-workflow).
|
||||
74
crates/red_knot_module_resolver/build.rs
Normal file
74
crates/red_knot_module_resolver/build.rs
Normal file
@@ -0,0 +1,74 @@
|
||||
//! Build script to package our vendored typeshed files
|
||||
//! into a zip archive that can be included in the Ruff binary.
|
||||
//!
|
||||
//! This script should be automatically run at build time
|
||||
//! whenever the script itself changes, or whenever any files
|
||||
//! in `crates/red_knot_module_resolver/vendor/typeshed` change.
|
||||
|
||||
use std::fs::File;
|
||||
use std::path::Path;
|
||||
|
||||
use path_slash::PathExt;
|
||||
use zip::result::ZipResult;
|
||||
use zip::write::{FileOptions, ZipWriter};
|
||||
use zip::CompressionMethod;
|
||||
|
||||
const TYPESHED_SOURCE_DIR: &str = "vendor/typeshed";
|
||||
const TYPESHED_ZIP_LOCATION: &str = "/zipped_typeshed.zip";
|
||||
|
||||
/// Recursively zip the contents of an entire directory.
|
||||
///
|
||||
/// This routine is adapted from a recipe at
|
||||
/// <https://github.com/zip-rs/zip-old/blob/5d0f198124946b7be4e5969719a7f29f363118cd/examples/write_dir.rs>
|
||||
fn zip_dir(directory_path: &str, writer: File) -> ZipResult<File> {
|
||||
let mut zip = ZipWriter::new(writer);
|
||||
|
||||
let options = FileOptions::default()
|
||||
.compression_method(CompressionMethod::Zstd)
|
||||
.unix_permissions(0o644);
|
||||
|
||||
for entry in walkdir::WalkDir::new(directory_path) {
|
||||
let dir_entry = entry.unwrap();
|
||||
let absolute_path = dir_entry.path();
|
||||
let normalized_relative_path = absolute_path
|
||||
.strip_prefix(Path::new(directory_path))
|
||||
.unwrap()
|
||||
.to_slash()
|
||||
.expect("Unexpected non-utf8 typeshed path!");
|
||||
|
||||
// Write file or directory explicitly
|
||||
// Some unzip tools unzip files with directory paths correctly, some do not!
|
||||
if absolute_path.is_file() {
|
||||
println!("adding file {absolute_path:?} as {normalized_relative_path:?} ...");
|
||||
zip.start_file(normalized_relative_path, options)?;
|
||||
let mut f = File::open(absolute_path)?;
|
||||
std::io::copy(&mut f, &mut zip).unwrap();
|
||||
} else if !normalized_relative_path.is_empty() {
|
||||
// Only if not root! Avoids path spec / warning
|
||||
// and mapname conversion failed error on unzip
|
||||
println!("adding dir {absolute_path:?} as {normalized_relative_path:?} ...");
|
||||
zip.add_directory(normalized_relative_path, options)?;
|
||||
}
|
||||
}
|
||||
zip.finish()
|
||||
}
|
||||
|
||||
fn main() {
|
||||
println!("cargo:rerun-if-changed={TYPESHED_SOURCE_DIR}");
|
||||
assert!(
|
||||
Path::new(TYPESHED_SOURCE_DIR).is_dir(),
|
||||
"Where is typeshed?"
|
||||
);
|
||||
let out_dir = std::env::var("OUT_DIR").unwrap();
|
||||
|
||||
// N.B. Deliberately using `format!()` instead of `Path::join()` here,
|
||||
// so that we use `/` as a path separator on all platforms.
|
||||
// That enables us to load the typeshed zip at compile time in `module.rs`
|
||||
// (otherwise we'd have to dynamically determine the exact path to the typeshed zip
|
||||
// based on the default path separator for the specific platform we're on,
|
||||
// which can't be done at compile time.)
|
||||
let zipped_typeshed_location = format!("{out_dir}{TYPESHED_ZIP_LOCATION}");
|
||||
|
||||
let zipped_typeshed = File::create(zipped_typeshed_location).unwrap();
|
||||
zip_dir(TYPESHED_SOURCE_DIR, zipped_typeshed).unwrap();
|
||||
}
|
||||
156
crates/red_knot_module_resolver/src/db.rs
Normal file
156
crates/red_knot_module_resolver/src/db.rs
Normal file
@@ -0,0 +1,156 @@
|
||||
use ruff_db::Upcast;
|
||||
|
||||
use crate::resolver::{
|
||||
file_to_module,
|
||||
internal::{ModuleNameIngredient, ModuleResolverSearchPaths},
|
||||
resolve_module_query,
|
||||
};
|
||||
|
||||
#[salsa::jar(db=Db)]
|
||||
pub struct Jar(
|
||||
ModuleNameIngredient<'_>,
|
||||
ModuleResolverSearchPaths,
|
||||
resolve_module_query,
|
||||
file_to_module,
|
||||
);
|
||||
|
||||
pub trait Db: salsa::DbWithJar<Jar> + ruff_db::Db + Upcast<dyn ruff_db::Db> {}
|
||||
|
||||
pub(crate) mod tests {
|
||||
use std::sync;
|
||||
|
||||
use salsa::DebugWithDb;
|
||||
|
||||
use ruff_db::file_system::{FileSystem, MemoryFileSystem, OsFileSystem};
|
||||
use ruff_db::vfs::Vfs;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[salsa::db(Jar, ruff_db::Jar)]
|
||||
pub(crate) struct TestDb {
|
||||
storage: salsa::Storage<Self>,
|
||||
file_system: TestFileSystem,
|
||||
events: sync::Arc<sync::Mutex<Vec<salsa::Event>>>,
|
||||
vfs: Vfs,
|
||||
}
|
||||
|
||||
impl TestDb {
|
||||
#[allow(unused)]
|
||||
pub(crate) fn new() -> Self {
|
||||
Self {
|
||||
storage: salsa::Storage::default(),
|
||||
file_system: TestFileSystem::Memory(MemoryFileSystem::default()),
|
||||
events: sync::Arc::default(),
|
||||
vfs: Vfs::with_stubbed_vendored(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the memory file system.
|
||||
///
|
||||
/// ## Panics
|
||||
/// If this test db isn't using a memory file system.
|
||||
#[allow(unused)]
|
||||
pub(crate) fn memory_file_system(&self) -> &MemoryFileSystem {
|
||||
if let TestFileSystem::Memory(fs) = &self.file_system {
|
||||
fs
|
||||
} else {
|
||||
panic!("The test db is not using a memory file system");
|
||||
}
|
||||
}
|
||||
|
||||
/// Uses the real file system instead of the memory file system.
|
||||
///
|
||||
/// This useful for testing advanced file system features like permissions, symlinks, etc.
|
||||
///
|
||||
/// Note that any files written to the memory file system won't be copied over.
|
||||
#[allow(unused)]
|
||||
pub(crate) fn with_os_file_system(&mut self) {
|
||||
self.file_system = TestFileSystem::Os(OsFileSystem);
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub(crate) fn vfs_mut(&mut self) -> &mut Vfs {
|
||||
&mut self.vfs
|
||||
}
|
||||
|
||||
/// Takes the salsa events.
|
||||
///
|
||||
/// ## Panics
|
||||
/// If there are any pending salsa snapshots.
|
||||
#[allow(unused)]
|
||||
pub(crate) fn take_salsa_events(&mut self) -> Vec<salsa::Event> {
|
||||
let inner = sync::Arc::get_mut(&mut self.events).expect("no pending salsa snapshots");
|
||||
|
||||
let events = inner.get_mut().unwrap();
|
||||
std::mem::take(&mut *events)
|
||||
}
|
||||
|
||||
/// Clears the salsa events.
|
||||
///
|
||||
/// ## Panics
|
||||
/// If there are any pending salsa snapshots.
|
||||
#[allow(unused)]
|
||||
pub(crate) fn clear_salsa_events(&mut self) {
|
||||
self.take_salsa_events();
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn ruff_db::Db> for TestDb {
|
||||
fn upcast(&self) -> &(dyn ruff_db::Db + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl ruff_db::Db for TestDb {
|
||||
fn file_system(&self) -> &dyn ruff_db::file_system::FileSystem {
|
||||
self.file_system.inner()
|
||||
}
|
||||
|
||||
fn vfs(&self) -> &ruff_db::vfs::Vfs {
|
||||
&self.vfs
|
||||
}
|
||||
}
|
||||
|
||||
impl Db for TestDb {}
|
||||
|
||||
impl salsa::Database for TestDb {
|
||||
fn salsa_event(&self, event: salsa::Event) {
|
||||
tracing::trace!("event: {:?}", event.debug(self));
|
||||
let mut events = self.events.lock().unwrap();
|
||||
events.push(event);
|
||||
}
|
||||
}
|
||||
|
||||
impl salsa::ParallelDatabase for TestDb {
|
||||
fn snapshot(&self) -> salsa::Snapshot<Self> {
|
||||
salsa::Snapshot::new(Self {
|
||||
storage: self.storage.snapshot(),
|
||||
file_system: self.file_system.snapshot(),
|
||||
events: self.events.clone(),
|
||||
vfs: self.vfs.snapshot(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
enum TestFileSystem {
|
||||
Memory(MemoryFileSystem),
|
||||
#[allow(unused)]
|
||||
Os(OsFileSystem),
|
||||
}
|
||||
|
||||
impl TestFileSystem {
|
||||
fn inner(&self) -> &dyn FileSystem {
|
||||
match self {
|
||||
Self::Memory(inner) => inner,
|
||||
Self::Os(inner) => inner,
|
||||
}
|
||||
}
|
||||
|
||||
fn snapshot(&self) -> Self {
|
||||
match self {
|
||||
Self::Memory(inner) => Self::Memory(inner.snapshot()),
|
||||
Self::Os(inner) => Self::Os(inner.snapshot()),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
9
crates/red_knot_module_resolver/src/lib.rs
Normal file
9
crates/red_knot_module_resolver/src/lib.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
mod db;
|
||||
mod module;
|
||||
mod resolver;
|
||||
mod typeshed;
|
||||
|
||||
pub use db::{Db, Jar};
|
||||
pub use module::{ModuleKind, ModuleName};
|
||||
pub use resolver::{resolve_module, set_module_resolution_settings, ModuleResolutionSettings};
|
||||
pub use typeshed::versions::TypeshedVersions;
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user