Compare commits
1 Commits
david/allo
...
dhruv/synt
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
909798ffa2 |
@@ -1,14 +1,3 @@
|
||||
[alias]
|
||||
dev = "run --package ruff_dev --bin ruff_dev"
|
||||
benchmark = "bench -p ruff_benchmark --bench linter --bench formatter --"
|
||||
|
||||
# statically link the C runtime so the executable does not depend on
|
||||
# that shared/dynamic library.
|
||||
#
|
||||
# See: https://github.com/astral-sh/ruff/issues/11503
|
||||
[target.'cfg(all(target_env="msvc", target_os = "windows"))']
|
||||
rustflags = ["-C", "target-feature=+crt-static"]
|
||||
|
||||
[target.'wasm32-unknown-unknown']
|
||||
# See https://docs.rs/getrandom/latest/getrandom/#webassembly-support
|
||||
rustflags = ["--cfg", 'getrandom_backend="wasm_js"']
|
||||
@@ -6,10 +6,3 @@ failure-output = "immediate-final"
|
||||
fail-fast = false
|
||||
|
||||
status-level = "skip"
|
||||
|
||||
# Mark tests that take longer than 1s as slow.
|
||||
# Terminate after 60s as a stop-gap measure to terminate on deadlock.
|
||||
slow-timeout = { period = "1s", terminate-after = 60 }
|
||||
|
||||
# Show slow jobs in the final summary
|
||||
final-status-level = "slow"
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
"rust-lang.rust-analyzer",
|
||||
"fill-labs.dependi",
|
||||
"serayuzgur.crates",
|
||||
"tamasfe.even-better-toml",
|
||||
"Swellaby.vscode-rust-test-adapter",
|
||||
"charliermarsh.ruff"
|
||||
|
||||
@@ -17,7 +17,4 @@ indent_size = 4
|
||||
trim_trailing_whitespace = false
|
||||
|
||||
[*.md]
|
||||
max_line_length = 100
|
||||
|
||||
[*.toml]
|
||||
indent_size = 4
|
||||
max_line_length = 100
|
||||
15
.gitattributes
vendored
15
.gitattributes
vendored
@@ -8,20 +8,7 @@ crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_3.py text eol=crlf
|
||||
crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring_code_examples_crlf.py text eol=crlf
|
||||
crates/ruff_python_formatter/tests/snapshots/format@docstring_code_examples_crlf.py.snap text eol=crlf
|
||||
|
||||
crates/ruff_python_parser/resources/invalid/re_lexing/line_continuation_windows_eol.py text eol=crlf
|
||||
crates/ruff_python_parser/resources/invalid/re_lex_logical_token_windows_eol.py text eol=crlf
|
||||
crates/ruff_python_parser/resources/invalid/re_lex_logical_token_mac_eol.py text eol=cr
|
||||
|
||||
crates/ruff_linter/resources/test/fixtures/ruff/RUF046_CR.py text eol=cr
|
||||
crates/ruff_linter/resources/test/fixtures/ruff/RUF046_LF.py text eol=lf
|
||||
|
||||
crates/ruff_linter/resources/test/fixtures/pyupgrade/UP018_CR.py text eol=cr
|
||||
crates/ruff_linter/resources/test/fixtures/pyupgrade/UP018_LF.py text eol=lf
|
||||
|
||||
crates/ruff_python_parser/resources/inline linguist-generated=true
|
||||
|
||||
ruff.schema.json -diff linguist-generated=true text=auto eol=lf
|
||||
ty.schema.json -diff linguist-generated=true text=auto eol=lf
|
||||
crates/ruff_python_ast/src/generated.rs -diff linguist-generated=true text=auto eol=lf
|
||||
crates/ruff_python_formatter/src/generated.rs -diff linguist-generated=true text=auto eol=lf
|
||||
ruff.schema.json linguist-generated=true text=auto eol=lf
|
||||
*.md.snap linguist-language=Markdown
|
||||
|
||||
11
.github/CODEOWNERS
vendored
11
.github/CODEOWNERS
vendored
@@ -9,16 +9,9 @@
|
||||
/crates/ruff_formatter/ @MichaReiser
|
||||
/crates/ruff_python_formatter/ @MichaReiser
|
||||
/crates/ruff_python_parser/ @MichaReiser @dhruvmanila
|
||||
/crates/ruff_annotate_snippets/ @BurntSushi
|
||||
|
||||
# flake8-pyi
|
||||
/crates/ruff_linter/src/rules/flake8_pyi/ @AlexWaygood
|
||||
|
||||
# Script for fuzzing the parser/ty etc.
|
||||
/python/py-fuzzer/ @AlexWaygood
|
||||
|
||||
# ty
|
||||
/crates/ty* @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
|
||||
/crates/ruff_db/ @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
|
||||
/scripts/ty_benchmark/ @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
|
||||
/crates/ty_python_semantic @carljm @AlexWaygood @sharkdp @dcreager
|
||||
# Script for fuzzing the parser
|
||||
/scripts/fuzz-parser/ @AlexWaygood
|
||||
|
||||
12
.github/ISSUE_TEMPLATE.md
vendored
Normal file
12
.github/ISSUE_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
<!--
|
||||
Thank you for taking the time to report an issue! We're glad to have you involved with Ruff.
|
||||
|
||||
If you're filing a bug report, please consider including the following information:
|
||||
|
||||
* List of keywords you searched for before creating this issue. Write them down here so that others can find this issue more easily and help provide feedback.
|
||||
e.g. "RUF001", "unused variable", "Jupyter notebook"
|
||||
* A minimal code snippet that reproduces the bug.
|
||||
* The command you invoked (e.g., `ruff /path/to/file.py --fix`), ideally including the `--isolated` flag.
|
||||
* The current Ruff settings (any relevant sections from your `pyproject.toml`).
|
||||
* The current Ruff version (`ruff --version`).
|
||||
-->
|
||||
31
.github/ISSUE_TEMPLATE/1_bug_report.yaml
vendored
31
.github/ISSUE_TEMPLATE/1_bug_report.yaml
vendored
@@ -1,31 +0,0 @@
|
||||
name: Bug report
|
||||
description: Report an error or unexpected behavior
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for taking the time to report an issue! We're glad to have you involved with Ruff.
|
||||
|
||||
**Before reporting, please make sure to search through [existing issues](https://github.com/astral-sh/ruff/issues?q=is:issue+is:open+label:bug) (including [closed](https://github.com/astral-sh/ruff/issues?q=is:issue%20state:closed%20label:bug)).**
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: |
|
||||
A clear and concise description of the bug, including a minimal reproducible example.
|
||||
|
||||
Be sure to include the command you invoked (e.g., `ruff check /path/to/file.py --fix`), ideally including the `--isolated` flag and
|
||||
the current Ruff settings (e.g., relevant sections from your `pyproject.toml`).
|
||||
|
||||
If possible, try to include the [playground](https://play.ruff.rs) link that reproduces this issue.
|
||||
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
attributes:
|
||||
label: Version
|
||||
description: What version of ruff are you using? (see `ruff version`)
|
||||
placeholder: e.g., ruff 0.9.3 (90589372d 2025-01-23)
|
||||
validations:
|
||||
required: false
|
||||
10
.github/ISSUE_TEMPLATE/2_rule_request.yaml
vendored
10
.github/ISSUE_TEMPLATE/2_rule_request.yaml
vendored
@@ -1,10 +0,0 @@
|
||||
name: Rule request
|
||||
description: Anything related to lint rules (proposing new rules, changes to existing rules, auto-fixes, etc.)
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: |
|
||||
A clear and concise description of the relevant request. If applicable, please describe the current behavior as well.
|
||||
validations:
|
||||
required: true
|
||||
18
.github/ISSUE_TEMPLATE/3_question.yaml
vendored
18
.github/ISSUE_TEMPLATE/3_question.yaml
vendored
@@ -1,18 +0,0 @@
|
||||
name: Question
|
||||
description: Ask a question about Ruff
|
||||
labels: ["question"]
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Question
|
||||
description: Describe your question in detail.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
attributes:
|
||||
label: Version
|
||||
description: What version of ruff are you using? (see `ruff version`)
|
||||
placeholder: e.g., ruff 0.9.3 (90589372d 2025-01-23)
|
||||
validations:
|
||||
required: false
|
||||
11
.github/ISSUE_TEMPLATE/config.yml
vendored
11
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,11 +0,0 @@
|
||||
blank_issues_enabled: true
|
||||
contact_links:
|
||||
- name: Report an issue with ty
|
||||
url: https://github.com/astral-sh/ty/issues/new/choose
|
||||
about: Please report issues for our type checker ty in the ty repository.
|
||||
- name: Documentation
|
||||
url: https://docs.astral.sh/ruff
|
||||
about: Please consult the documentation before creating an issue.
|
||||
- name: Community
|
||||
url: https://discord.com/invite/astral-sh
|
||||
about: Join our Discord community to ask questions and collaborate.
|
||||
5
.github/PULL_REQUEST_TEMPLATE.md
vendored
5
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,9 +1,8 @@
|
||||
<!--
|
||||
Thank you for contributing to Ruff/ty! To help us out with reviewing, please consider the following:
|
||||
Thank you for contributing to Ruff! To help us out with reviewing, please consider the following:
|
||||
|
||||
- Does this pull request include a summary of the change? (See below.)
|
||||
- Does this pull request include a descriptive title? (Please prefix with `[ty]` for ty pull
|
||||
requests.)
|
||||
- Does this pull request include a descriptive title?
|
||||
- Does this pull request include references to any relevant issues?
|
||||
-->
|
||||
|
||||
|
||||
11
.github/actionlint.yaml
vendored
11
.github/actionlint.yaml
vendored
@@ -1,11 +0,0 @@
|
||||
# Configuration for the actionlint tool, which we run via pre-commit
|
||||
# to verify the correctness of the syntax in our GitHub Actions workflows.
|
||||
|
||||
self-hosted-runner:
|
||||
# Various runners we use that aren't recognized out-of-the-box by actionlint:
|
||||
labels:
|
||||
- depot-ubuntu-latest-8
|
||||
- depot-ubuntu-22.04-16
|
||||
- depot-ubuntu-22.04-32
|
||||
- github-windows-2025-x86_64-8
|
||||
- github-windows-2025-x86_64-16
|
||||
8
.github/mypy-primer-ty.toml
vendored
8
.github/mypy-primer-ty.toml
vendored
@@ -1,8 +0,0 @@
|
||||
#:schema ../ty.schema.json
|
||||
# Configuration overrides for the mypy primer run
|
||||
|
||||
# Enable off-by-default rules.
|
||||
[rules]
|
||||
possibly-unresolved-reference = "warn"
|
||||
unused-ignore-comment = "warn"
|
||||
division-by-zero = "warn"
|
||||
60
.github/renovate.json5
vendored
60
.github/renovate.json5
vendored
@@ -8,55 +8,27 @@
|
||||
semanticCommits: "disabled",
|
||||
separateMajorMinor: false,
|
||||
prHourlyLimit: 10,
|
||||
enabledManagers: ["github-actions", "pre-commit", "cargo", "pep621", "pip_requirements", "npm"],
|
||||
enabledManagers: ["github-actions", "pre-commit", "cargo", "pep621", "npm"],
|
||||
cargo: {
|
||||
// See https://docs.renovatebot.com/configuration-options/#rangestrategy
|
||||
rangeStrategy: "update-lockfile",
|
||||
},
|
||||
pep621: {
|
||||
// The default for this package manager is to only search for `pyproject.toml` files
|
||||
// found at the repository root: https://docs.renovatebot.com/modules/manager/pep621/#file-matching
|
||||
fileMatch: ["^(python|scripts)/.*pyproject\\.toml$"],
|
||||
},
|
||||
pip_requirements: {
|
||||
// The default for this package manager is to run on all requirements.txt files:
|
||||
// https://docs.renovatebot.com/modules/manager/pip_requirements/#file-matching
|
||||
// `fileMatch` doesn't work for excluding files; to exclude `requirements.txt` files
|
||||
// outside the `doc/` directory, we instead have to use `ignorePaths`. Unlike `fileMatch`,
|
||||
// which takes a regex string, `ignorePaths` takes a glob string, so we have to use
|
||||
// a "negative glob pattern".
|
||||
// See:
|
||||
// - https://docs.renovatebot.com/modules/manager/#ignoring-files-that-match-the-default-filematch
|
||||
// - https://docs.renovatebot.com/configuration-options/#ignorepaths
|
||||
// - https://docs.renovatebot.com/string-pattern-matching/#negative-matching
|
||||
ignorePaths: ["!docs/requirements*.txt"]
|
||||
},
|
||||
npm: {
|
||||
// The default for this package manager is to only search for `package.json` files
|
||||
// found at the repository root: https://docs.renovatebot.com/modules/manager/npm/#file-matching
|
||||
fileMatch: ["^playground/.*package\\.json$"],
|
||||
},
|
||||
"pre-commit": {
|
||||
enabled: true,
|
||||
},
|
||||
packageRules: [
|
||||
// Pin GitHub Actions to immutable SHAs.
|
||||
{
|
||||
matchDepTypes: ["action"],
|
||||
pinDigests: true,
|
||||
},
|
||||
// Annotate GitHub Actions SHAs with a SemVer version.
|
||||
{
|
||||
extends: ["helpers:pinGitHubActionDigests"],
|
||||
extractVersion: "^(?<version>v?\\d+\\.\\d+\\.\\d+)$",
|
||||
versioning: "regex:^v?(?<major>\\d+)(\\.(?<minor>\\d+)\\.(?<patch>\\d+))?$",
|
||||
},
|
||||
{
|
||||
// Group upload/download artifact updates, the versions are dependent
|
||||
groupName: "Artifact GitHub Actions dependencies",
|
||||
matchManagers: ["github-actions"],
|
||||
matchDatasources: ["gitea-tags", "github-tags"],
|
||||
matchPackageNames: ["actions/.*-artifact"],
|
||||
matchPackagePatterns: ["actions/.*-artifact"],
|
||||
description: "Weekly update of artifact-related GitHub Actions dependencies",
|
||||
},
|
||||
{
|
||||
@@ -69,21 +41,6 @@
|
||||
description: "Disable PRs updating GitHub runners (e.g. 'runs-on: macos-14')",
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
// Disable updates of `zip-rs`; intentionally pinned for now due to ownership change
|
||||
// See: https://github.com/astral-sh/uv/issues/3642
|
||||
matchPackageNames: ["zip"],
|
||||
matchManagers: ["cargo"],
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
// `mkdocs-material` requires a manual update to keep the version in sync
|
||||
// with `mkdocs-material-insider`.
|
||||
// See: https://squidfunk.github.io/mkdocs-material/insiders/upgrade/
|
||||
matchManagers: ["pip_requirements"],
|
||||
matchPackageNames: ["mkdocs-material"],
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
groupName: "pre-commit dependencies",
|
||||
matchManagers: ["pre-commit"],
|
||||
@@ -98,15 +55,22 @@
|
||||
{
|
||||
groupName: "Monaco",
|
||||
matchManagers: ["npm"],
|
||||
matchPackageNames: ["monaco"],
|
||||
matchPackagePatterns: ["monaco"],
|
||||
description: "Weekly update of the Monaco editor",
|
||||
},
|
||||
{
|
||||
groupName: "strum",
|
||||
matchManagers: ["cargo"],
|
||||
matchPackageNames: ["strum"],
|
||||
matchPackagePatterns: ["strum"],
|
||||
description: "Weekly update of strum dependencies",
|
||||
}
|
||||
},
|
||||
{
|
||||
groupName: "ESLint",
|
||||
matchManagers: ["npm"],
|
||||
matchPackageNames: ["eslint"],
|
||||
allowedVersions: "<9",
|
||||
description: "Constraint ESLint to version 8 until TypeScript-eslint supports ESLint 9", // https://github.com/typescript-eslint/typescript-eslint/issues/8211
|
||||
},
|
||||
],
|
||||
vulnerabilityAlerts: {
|
||||
commitMessageSuffix: "",
|
||||
|
||||
480
.github/workflows/build-binaries.yml
vendored
480
.github/workflows/build-binaries.yml
vendored
@@ -1,480 +0,0 @@
|
||||
# Build ruff on all platforms.
|
||||
#
|
||||
# Generates both wheels (for PyPI) and archived binaries (for GitHub releases).
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a local
|
||||
# artifacts job within `cargo-dist`.
|
||||
name: "Build binaries"
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
pull_request:
|
||||
paths:
|
||||
# When we change pyproject.toml, we want to ensure that the maturin builds still work.
|
||||
- pyproject.toml
|
||||
# And when we change this workflow itself...
|
||||
- .github/workflows/build-binaries.yml
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
env:
|
||||
PACKAGE_NAME: ruff
|
||||
MODULE_NAME: ruff
|
||||
PYTHON_VERSION: "3.13"
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
sdist:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build sdist"
|
||||
uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
|
||||
with:
|
||||
command: sdist
|
||||
args: --out dist
|
||||
- name: "Test sdist"
|
||||
run: |
|
||||
pip install dist/"${PACKAGE_NAME}"-*.tar.gz --force-reinstall
|
||||
"${MODULE_NAME}" --help
|
||||
python -m "${MODULE_NAME}" --help
|
||||
- name: "Upload sdist"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: wheels-sdist
|
||||
path: dist
|
||||
|
||||
macos-x86_64:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: macos-14
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels - x86_64"
|
||||
uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
|
||||
with:
|
||||
target: x86_64
|
||||
args: --release --locked --out dist
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: wheels-macos-x86_64
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
TARGET=x86_64-apple-darwin
|
||||
ARCHIVE_NAME=ruff-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
mkdir -p $ARCHIVE_NAME
|
||||
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: artifacts-macos-x86_64
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
macos-aarch64:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: macos-14
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: arm64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels - aarch64"
|
||||
uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
|
||||
with:
|
||||
target: aarch64
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel - aarch64"
|
||||
run: |
|
||||
pip install dist/"${PACKAGE_NAME}"-*.whl --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: wheels-aarch64-apple-darwin
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
TARGET=aarch64-apple-darwin
|
||||
ARCHIVE_NAME=ruff-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
mkdir -p $ARCHIVE_NAME
|
||||
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: artifacts-aarch64-apple-darwin
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
windows:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: windows-latest
|
||||
strategy:
|
||||
matrix:
|
||||
platform:
|
||||
- target: x86_64-pc-windows-msvc
|
||||
arch: x64
|
||||
- target: i686-pc-windows-msvc
|
||||
arch: x86
|
||||
- target: aarch64-pc-windows-msvc
|
||||
arch: x64
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: ${{ matrix.platform.arch }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
args: --release --locked --out dist
|
||||
env:
|
||||
# aarch64 build fails, see https://github.com/PyO3/maturin/issues/2110
|
||||
XWIN_VERSION: 16
|
||||
- name: "Test wheel"
|
||||
if: ${{ !startsWith(matrix.platform.target, 'aarch64') }}
|
||||
shell: bash
|
||||
run: |
|
||||
python -m pip install dist/"${PACKAGE_NAME}"-*.whl --force-reinstall
|
||||
"${MODULE_NAME}" --help
|
||||
python -m "${MODULE_NAME}" --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: wheels-${{ matrix.platform.target }}
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.zip
|
||||
7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/ruff.exe
|
||||
sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: artifacts-${{ matrix.platform.target }}
|
||||
path: |
|
||||
*.zip
|
||||
*.sha256
|
||||
|
||||
linux:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target:
|
||||
- x86_64-unknown-linux-gnu
|
||||
- i686-unknown-linux-gnu
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: auto
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel"
|
||||
if: ${{ startsWith(matrix.target, 'x86_64') }}
|
||||
run: |
|
||||
pip install dist/"${PACKAGE_NAME}"-*.whl --force-reinstall
|
||||
"${MODULE_NAME}" --help
|
||||
python -m "${MODULE_NAME}" --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: wheels-${{ matrix.target }}
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
TARGET=${{ matrix.target }}
|
||||
ARCHIVE_NAME=ruff-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
mkdir -p $ARCHIVE_NAME
|
||||
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: artifacts-${{ matrix.target }}
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
linux-cross:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
platform:
|
||||
- target: aarch64-unknown-linux-gnu
|
||||
arch: aarch64
|
||||
# see https://github.com/astral-sh/ruff/issues/3791
|
||||
# and https://github.com/gnzlbg/jemallocator/issues/170#issuecomment-1503228963
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
- target: armv7-unknown-linux-gnueabihf
|
||||
arch: armv7
|
||||
- target: s390x-unknown-linux-gnu
|
||||
arch: s390x
|
||||
- target: powerpc64le-unknown-linux-gnu
|
||||
arch: ppc64le
|
||||
# see https://github.com/astral-sh/ruff/issues/10073
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
- target: powerpc64-unknown-linux-gnu
|
||||
arch: ppc64
|
||||
# see https://github.com/astral-sh/ruff/issues/10073
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
- target: arm-unknown-linux-musleabihf
|
||||
arch: arm
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
args: --release --locked --out dist
|
||||
- uses: uraimo/run-on-arch-action@d94c13912ea685de38fccc1109385b83fd79427d # v3.0.1
|
||||
if: ${{ matrix.platform.arch != 'ppc64' && matrix.platform.arch != 'ppc64le'}}
|
||||
name: Test wheel
|
||||
with:
|
||||
arch: ${{ matrix.platform.arch == 'arm' && 'armv6' || matrix.platform.arch }}
|
||||
distro: ${{ matrix.platform.arch == 'arm' && 'bullseye' || 'ubuntu20.04' }}
|
||||
githubToken: ${{ github.token }}
|
||||
install: |
|
||||
apt-get update
|
||||
apt-get install -y --no-install-recommends python3 python3-pip
|
||||
pip3 install -U pip
|
||||
run: |
|
||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: wheels-${{ matrix.platform.target }}
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
TARGET=${{ matrix.platform.target }}
|
||||
ARCHIVE_NAME=ruff-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
mkdir -p $ARCHIVE_NAME
|
||||
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: artifacts-${{ matrix.platform.target }}
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
musllinux:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target:
|
||||
- x86_64-unknown-linux-musl
|
||||
- i686-unknown-linux-musl
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: musllinux_1_2
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel"
|
||||
if: matrix.target == 'x86_64-unknown-linux-musl'
|
||||
uses: addnab/docker-run-action@4f65fabd2431ebc8d299f8e5a018d79a769ae185 # v3
|
||||
with:
|
||||
image: alpine:latest
|
||||
options: -v ${{ github.workspace }}:/io -w /io
|
||||
run: |
|
||||
apk add python3
|
||||
python -m venv .venv
|
||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
.venv/bin/${{ env.MODULE_NAME }} --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: wheels-${{ matrix.target }}
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
TARGET=${{ matrix.target }}
|
||||
ARCHIVE_NAME=ruff-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
mkdir -p $ARCHIVE_NAME
|
||||
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: artifacts-${{ matrix.target }}
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
musllinux-cross:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
platform:
|
||||
- target: aarch64-unknown-linux-musl
|
||||
arch: aarch64
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
- target: armv7-unknown-linux-musleabihf
|
||||
arch: armv7
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: musllinux_1_2
|
||||
args: --release --locked --out dist
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
- uses: uraimo/run-on-arch-action@d94c13912ea685de38fccc1109385b83fd79427d # v3.0.1
|
||||
name: Test wheel
|
||||
with:
|
||||
arch: ${{ matrix.platform.arch }}
|
||||
distro: alpine_latest
|
||||
githubToken: ${{ github.token }}
|
||||
install: |
|
||||
apk add python3
|
||||
run: |
|
||||
python -m venv .venv
|
||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
.venv/bin/${{ env.MODULE_NAME }} --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: wheels-${{ matrix.platform.target }}
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
TARGET=${{ matrix.platform.target }}
|
||||
ARCHIVE_NAME=ruff-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
mkdir -p $ARCHIVE_NAME
|
||||
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: artifacts-${{ matrix.platform.target }}
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
299
.github/workflows/build-docker.yml
vendored
299
.github/workflows/build-docker.yml
vendored
@@ -1,299 +0,0 @@
|
||||
# Build and publish a Docker image.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a local
|
||||
# artifacts job within `cargo-dist`.
|
||||
#
|
||||
# TODO(charlie): Ideally, the publish step would happen as a publish job within `cargo-dist`, but
|
||||
# sharing the built image as an artifact between jobs is challenging.
|
||||
name: "[ruff] Build Docker image"
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/build-docker.yml
|
||||
|
||||
env:
|
||||
RUFF_BASE_IMG: ghcr.io/${{ github.repository_owner }}/ruff
|
||||
|
||||
jobs:
|
||||
docker-build:
|
||||
name: Build Docker image (ghcr.io/astral-sh/ruff) for ${{ matrix.platform }}
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux/amd64
|
||||
- linux/arm64
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
|
||||
- uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Check tag consistency
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
env:
|
||||
TAG: ${{ inputs.plan != '' && fromJson(inputs.plan).announcement_tag || 'dry-run' }}
|
||||
run: |
|
||||
version=$(grep -m 1 "^version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
|
||||
if [ "${TAG}" != "${version}" ]; then
|
||||
echo "The input tag does not match the version from pyproject.toml:" >&2
|
||||
echo "${TAG}" >&2
|
||||
echo "${version}" >&2
|
||||
exit 1
|
||||
else
|
||||
echo "Releasing ${version}"
|
||||
fi
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
with:
|
||||
images: ${{ env.RUFF_BASE_IMG }}
|
||||
# Defining this makes sure the org.opencontainers.image.version OCI label becomes the actual release version and not the branch name
|
||||
tags: |
|
||||
type=raw,value=dry-run,enable=${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
type=pep440,pattern={{ version }},value=${{ inputs.plan != '' && fromJson(inputs.plan).announcement_tag || 'dry-run' }},enable=${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
|
||||
- name: Normalize Platform Pair (replace / with -)
|
||||
run: |
|
||||
platform=${{ matrix.platform }}
|
||||
echo "PLATFORM_TUPLE=${platform//\//-}" >> "$GITHUB_ENV"
|
||||
|
||||
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
|
||||
- name: Build and push by digest
|
||||
id: build
|
||||
uses: docker/build-push-action@1dc73863535b631f98b2378be8619f83b136f4a0 # v6.17.0
|
||||
with:
|
||||
context: .
|
||||
platforms: ${{ matrix.platform }}
|
||||
cache-from: type=gha,scope=ruff-${{ env.PLATFORM_TUPLE }}
|
||||
cache-to: type=gha,mode=min,scope=ruff-${{ env.PLATFORM_TUPLE }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
outputs: type=image,name=${{ env.RUFF_BASE_IMG }},push-by-digest=true,name-canonical=true,push=${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
|
||||
- name: Export digests
|
||||
env:
|
||||
digest: ${{ steps.build.outputs.digest }}
|
||||
run: |
|
||||
mkdir -p /tmp/digests
|
||||
touch "/tmp/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digests
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: digests-${{ env.PLATFORM_TUPLE }}
|
||||
path: /tmp/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
docker-publish:
|
||||
name: Publish Docker image (ghcr.io/astral-sh/ruff)
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
needs:
|
||||
- docker-build
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
with:
|
||||
images: ${{ env.RUFF_BASE_IMG }}
|
||||
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
|
||||
tags: |
|
||||
type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
|
||||
- name: Create manifest list and push
|
||||
working-directory: /tmp/digests
|
||||
# The jq command expands the docker/metadata json "tags" array entry to `-t tag1 -t tag2 ...` for each tag in the array
|
||||
# The printf will expand the base image with the `<RUFF_BASE_IMG>@sha256:<sha256> ...` for each sha256 in the directory
|
||||
# The final command becomes `docker buildx imagetools create -t tag1 -t tag2 ... <RUFF_BASE_IMG>@sha256:<sha256_1> <RUFF_BASE_IMG>@sha256:<sha256_2> ...`
|
||||
run: |
|
||||
# shellcheck disable=SC2046
|
||||
docker buildx imagetools create \
|
||||
$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf "${RUFF_BASE_IMG}@sha256:%s " *)
|
||||
|
||||
docker-publish-extra:
|
||||
name: Publish additional Docker image based on ${{ matrix.image-mapping }}
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
needs:
|
||||
- docker-publish
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# Mapping of base image followed by a comma followed by one or more base tags (comma separated)
|
||||
# Note, org.opencontainers.image.version label will use the first base tag (use the most specific tag first)
|
||||
image-mapping:
|
||||
- alpine:3.21,alpine3.21,alpine
|
||||
- debian:bookworm-slim,bookworm-slim,debian-slim
|
||||
- buildpack-deps:bookworm,bookworm,debian
|
||||
steps:
|
||||
- uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate Dynamic Dockerfile Tags
|
||||
shell: bash
|
||||
env:
|
||||
TAG_VALUE: ${{ fromJson(inputs.plan).announcement_tag }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the image and tags from the matrix variable
|
||||
IFS=',' read -r BASE_IMAGE BASE_TAGS <<< "${{ matrix.image-mapping }}"
|
||||
|
||||
# Generate Dockerfile content
|
||||
cat <<EOF > Dockerfile
|
||||
FROM ${BASE_IMAGE}
|
||||
COPY --from=${RUFF_BASE_IMG}:latest /ruff /usr/local/bin/ruff
|
||||
ENTRYPOINT []
|
||||
CMD ["/usr/local/bin/ruff"]
|
||||
EOF
|
||||
|
||||
# Initialize a variable to store all tag docker metadata patterns
|
||||
TAG_PATTERNS=""
|
||||
|
||||
# Loop through all base tags and append its docker metadata pattern to the list
|
||||
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
|
||||
IFS=','; for TAG in ${BASE_TAGS}; do
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ version }},suffix=-${TAG},value=${TAG_VALUE}\n"
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ major }}.{{ minor }},suffix=-${TAG},value=${TAG_VALUE}\n"
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=raw,value=${TAG}\n"
|
||||
done
|
||||
|
||||
# Remove the trailing newline from the pattern list
|
||||
TAG_PATTERNS="${TAG_PATTERNS%\\n}"
|
||||
|
||||
# Export image cache name
|
||||
echo "IMAGE_REF=${BASE_IMAGE//:/-}" >> "$GITHUB_ENV"
|
||||
|
||||
# Export tag patterns using the multiline env var syntax
|
||||
{
|
||||
echo "TAG_PATTERNS<<EOF"
|
||||
echo -e "${TAG_PATTERNS}"
|
||||
echo EOF
|
||||
} >> "$GITHUB_ENV"
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
# ghcr.io prefers index level annotations
|
||||
env:
|
||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||
with:
|
||||
images: ${{ env.RUFF_BASE_IMG }}
|
||||
flavor: |
|
||||
latest=false
|
||||
tags: |
|
||||
${{ env.TAG_PATTERNS }}
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@1dc73863535b631f98b2378be8619f83b136f4a0 # v6.17.0
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
# We do not really need to cache here as the Dockerfile is tiny
|
||||
#cache-from: type=gha,scope=ruff-${{ env.IMAGE_REF }}
|
||||
#cache-to: type=gha,mode=min,scope=ruff-${{ env.IMAGE_REF }}
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
annotations: ${{ steps.meta.outputs.annotations }}
|
||||
|
||||
# This is effectively a duplicate of `docker-publish` to make https://github.com/astral-sh/ruff/pkgs/container/ruff
|
||||
# show the ruff base image first since GitHub always shows the last updated image digests
|
||||
# This works by annotating the original digests (previously non-annotated) which triggers an update to ghcr.io
|
||||
docker-republish:
|
||||
name: Annotate Docker image (ghcr.io/astral-sh/ruff)
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
needs:
|
||||
- docker-publish-extra
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
env:
|
||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||
with:
|
||||
images: ${{ env.RUFF_BASE_IMG }}
|
||||
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
|
||||
tags: |
|
||||
type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
|
||||
- name: Create manifest list and push
|
||||
working-directory: /tmp/digests
|
||||
# The readarray part is used to make sure the quoting and special characters are preserved on expansion (e.g. spaces)
|
||||
# The jq command expands the docker/metadata json "tags" array entry to `-t tag1 -t tag2 ...` for each tag in the array
|
||||
# The printf will expand the base image with the `<RUFF_BASE_IMG>@sha256:<sha256> ...` for each sha256 in the directory
|
||||
# The final command becomes `docker buildx imagetools create -t tag1 -t tag2 ... <RUFF_BASE_IMG>@sha256:<sha256_1> <RUFF_BASE_IMG>@sha256:<sha256_2> ...`
|
||||
run: |
|
||||
readarray -t lines <<< "$DOCKER_METADATA_OUTPUT_ANNOTATIONS"; annotations=(); for line in "${lines[@]}"; do annotations+=(--annotation "$line"); done
|
||||
|
||||
# shellcheck disable=SC2046
|
||||
docker buildx imagetools create \
|
||||
"${annotations[@]}" \
|
||||
$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf "${RUFF_BASE_IMG}@sha256:%s " *)
|
||||
691
.github/workflows/ci.yaml
vendored
691
.github/workflows/ci.yaml
vendored
File diff suppressed because it is too large
Load Diff
30
.github/workflows/daily_fuzz.yaml
vendored
30
.github/workflows/daily_fuzz.yaml
vendored
@@ -31,31 +31,25 @@ jobs:
|
||||
# Don't run the cron job on forks:
|
||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1
|
||||
python-version: "3.12"
|
||||
- name: Install uv
|
||||
run: curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
- name: Install Python requirements
|
||||
run: uv pip install -r scripts/fuzz-parser/requirements.txt --system
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
uses: rui314/setup-mold@v1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Build ruff
|
||||
# A debug build means the script runs slower once it gets started,
|
||||
# but this is outweighed by the fact that a release build takes *much* longer to compile in CI
|
||||
run: cargo build --locked
|
||||
- name: Fuzz
|
||||
run: |
|
||||
# shellcheck disable=SC2046
|
||||
(
|
||||
uvx \
|
||||
--python=3.12 \
|
||||
--from=./python/py-fuzzer \
|
||||
fuzz \
|
||||
--test-executable=target/debug/ruff \
|
||||
--bin=ruff \
|
||||
$(shuf -i 0-9999999999999999999 -n 1000)
|
||||
)
|
||||
run: python scripts/fuzz-parser/fuzz.py $(shuf -i 0-9999999999999999999 -n 1000) --test-executable target/debug/ruff
|
||||
|
||||
create-issue-on-failure:
|
||||
name: Create an issue if the daily fuzz surfaced any bugs
|
||||
@@ -65,7 +59,7 @@ jobs:
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
- uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
@@ -73,6 +67,6 @@ jobs:
|
||||
owner: "astral-sh",
|
||||
repo: "ruff",
|
||||
title: `Daily parser fuzz failed on ${new Date().toDateString()}`,
|
||||
body: "Run listed here: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}",
|
||||
body: "Runs listed here: https://github.com/astral-sh/ruff/actions/workflows/daily_fuzz.yml",
|
||||
labels: ["bug", "parser", "fuzzer"],
|
||||
})
|
||||
|
||||
55
.github/workflows/docs.yaml
vendored
Normal file
55
.github/workflows/docs.yaml
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
name: mkdocs
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
ref:
|
||||
description: "The commit SHA, tag, or branch to publish. Uses the default branch if not specified."
|
||||
default: ""
|
||||
type: string
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
mkdocs:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
- uses: actions/setup-python@v5
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@v0.9.0
|
||||
with:
|
||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: pip install -r docs/requirements-insiders.txt
|
||||
- name: "Install dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: pip install -r docs/requirements.txt
|
||||
- name: "Copy README File"
|
||||
run: |
|
||||
python scripts/transform_readme.py --target mkdocs
|
||||
python scripts/generate_mkdocs.py
|
||||
- name: "Build Insiders docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.insiders.yml
|
||||
- name: "Build docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.public.yml
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@v3.5.0
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
# `github.head_ref` is only set during pull requests and for manual runs or tags we use `main` to deploy to production
|
||||
command: pages deploy site --project-name=astral-docs --branch ${{ github.head_ref || 'main' }} --commit-hash ${GITHUB_SHA}
|
||||
101
.github/workflows/mypy_primer.yaml
vendored
101
.github/workflows/mypy_primer.yaml
vendored
@@ -1,101 +0,0 @@
|
||||
name: Run mypy_primer
|
||||
|
||||
permissions: {}
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "crates/ty*/**"
|
||||
- "crates/ruff_db"
|
||||
- "crates/ruff_python_ast"
|
||||
- "crates/ruff_python_parser"
|
||||
- ".github/workflows/mypy_primer.yaml"
|
||||
- ".github/workflows/mypy_primer_comment.yaml"
|
||||
- "Cargo.lock"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.event.pull_request.number || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
RUST_BACKTRACE: 1
|
||||
|
||||
jobs:
|
||||
mypy_primer:
|
||||
name: Run mypy_primer
|
||||
runs-on: depot-ubuntu-22.04-32
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
path: ruff
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
with:
|
||||
workspaces: "ruff"
|
||||
|
||||
- name: Install Rust toolchain
|
||||
run: rustup show
|
||||
|
||||
- name: Run mypy_primer
|
||||
shell: bash
|
||||
run: |
|
||||
cd ruff
|
||||
|
||||
echo "Enabling mypy primer specific configuration overloads (see .github/mypy-primer-ty.toml)"
|
||||
mkdir -p ~/.config/ty
|
||||
cp .github/mypy-primer-ty.toml ~/.config/ty/ty.toml
|
||||
|
||||
PRIMER_SELECTOR="$(paste -s -d'|' crates/ty_python_semantic/resources/primer/good.txt)"
|
||||
|
||||
echo "new commit"
|
||||
git rev-list --format=%s --max-count=1 "$GITHUB_SHA"
|
||||
|
||||
MERGE_BASE="$(git merge-base "$GITHUB_SHA" "origin/$GITHUB_BASE_REF")"
|
||||
git checkout -b base_commit "$MERGE_BASE"
|
||||
echo "base commit"
|
||||
git rev-list --format=%s --max-count=1 base_commit
|
||||
|
||||
cd ..
|
||||
|
||||
echo "Project selector: $PRIMER_SELECTOR"
|
||||
# Allow the exit code to be 0 or 1, only fail for actual mypy_primer crashes/bugs
|
||||
uvx \
|
||||
--from="git+https://github.com/hauntsaninja/mypy_primer@01a7ca325f674433c58e02416a867178d1571128" \
|
||||
mypy_primer \
|
||||
--repo ruff \
|
||||
--type-checker ty \
|
||||
--old base_commit \
|
||||
--new "$GITHUB_SHA" \
|
||||
--project-selector "/($PRIMER_SELECTOR)\$" \
|
||||
--output concise \
|
||||
--debug > mypy_primer.diff || [ $? -eq 1 ]
|
||||
|
||||
# Output diff with ANSI color codes
|
||||
cat mypy_primer.diff
|
||||
|
||||
# Remove ANSI color codes before uploading
|
||||
sed -ie 's/\x1b\[[0-9;]*m//g' mypy_primer.diff
|
||||
|
||||
echo ${{ github.event.number }} > pr-number
|
||||
|
||||
- name: Upload diff
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: mypy_primer_diff
|
||||
path: mypy_primer.diff
|
||||
|
||||
- name: Upload pr-number
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pr-number
|
||||
path: pr-number
|
||||
97
.github/workflows/mypy_primer_comment.yaml
vendored
97
.github/workflows/mypy_primer_comment.yaml
vendored
@@ -1,97 +0,0 @@
|
||||
name: PR comment (mypy_primer)
|
||||
|
||||
on: # zizmor: ignore[dangerous-triggers]
|
||||
workflow_run:
|
||||
workflows: [Run mypy_primer]
|
||||
types: [completed]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
workflow_run_id:
|
||||
description: The mypy_primer workflow that triggers the workflow run
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: Download PR number
|
||||
with:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Parse pull request number
|
||||
id: pr-number
|
||||
run: |
|
||||
if [[ -f pr-number ]]
|
||||
then
|
||||
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: "Download mypy_primer results"
|
||||
id: download-mypy_primer_diff
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
name: mypy_primer_diff
|
||||
workflow: mypy_primer.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/mypy_primer_diff
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
if: steps.download-mypy_primer_diff.outputs.found_artifact == 'true'
|
||||
run: |
|
||||
# Guard against malicious mypy_primer results that symlink to a secret
|
||||
# file on this runner
|
||||
if [[ -L pr/mypy_primer_diff/mypy_primer.diff ]]
|
||||
then
|
||||
echo "Error: mypy_primer.diff cannot be a symlink"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Note this identifier is used to find the comment to update on
|
||||
# subsequent runs
|
||||
echo '<!-- generated-comment mypy_primer -->' >> comment.txt
|
||||
|
||||
echo '## `mypy_primer` results' >> comment.txt
|
||||
if [ -s "pr/mypy_primer_diff/mypy_primer.diff" ]; then
|
||||
echo '<details>' >> comment.txt
|
||||
echo '<summary>Changes were detected when running on open source projects</summary>' >> comment.txt
|
||||
echo '' >> comment.txt
|
||||
echo '```diff' >> comment.txt
|
||||
cat pr/mypy_primer_diff/mypy_primer.diff >> comment.txt
|
||||
echo '```' >> comment.txt
|
||||
echo '</details>' >> comment.txt
|
||||
else
|
||||
echo 'No ecosystem changes detected ✅' >> comment.txt
|
||||
fi
|
||||
|
||||
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||
cat comment.txt >> "$GITHUB_OUTPUT"
|
||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Find existing comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
|
||||
if: steps.generate-comment.outcome == 'success'
|
||||
id: find-comment
|
||||
with:
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
comment-author: "github-actions[bot]"
|
||||
body-includes: "<!-- generated-comment mypy_primer -->"
|
||||
|
||||
- name: Create or update comment
|
||||
if: steps.find-comment.outcome == 'success'
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
body-path: comment.txt
|
||||
edit-mode: replace
|
||||
29
.github/workflows/notify-dependents.yml
vendored
29
.github/workflows/notify-dependents.yml
vendored
@@ -1,29 +0,0 @@
|
||||
# Notify downstream repositories of a new release.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a post-announce
|
||||
# job within `cargo-dist`.
|
||||
name: "[ruff] Notify dependents"
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
update-dependents:
|
||||
name: Notify dependents
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: "Update pre-commit mirror"
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
with:
|
||||
github-token: ${{ secrets.RUFF_PRE_COMMIT_PAT }}
|
||||
script: |
|
||||
github.rest.actions.createWorkflowDispatch({
|
||||
owner: 'astral-sh',
|
||||
repo: 'ruff-pre-commit',
|
||||
workflow_id: 'main.yml',
|
||||
ref: 'main',
|
||||
})
|
||||
48
.github/workflows/playground.yaml
vendored
Normal file
48
.github/workflows/playground.yaml
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
name: "[Playground] Release"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18
|
||||
cache: "npm"
|
||||
cache-dependency-path: playground/package-lock.json
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
- uses: jetli/wasm-bindgen-action@v0.2.0
|
||||
- name: "Run wasm-pack"
|
||||
run: wasm-pack build --target web --out-dir ../../playground/src/pkg crates/ruff_wasm
|
||||
- name: "Install Node dependencies"
|
||||
run: npm ci
|
||||
working-directory: playground
|
||||
- name: "Run TypeScript checks"
|
||||
run: npm run check
|
||||
working-directory: playground
|
||||
- name: "Build JavaScript bundle"
|
||||
run: npm run build
|
||||
working-directory: playground
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@v3.5.0
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
# `github.head_ref` is only set during pull requests and for manual runs or tags we use `main` to deploy to production
|
||||
command: pages deploy playground/dist --project-name=ruff-playground --branch ${{ github.head_ref || 'main' }} --commit-hash ${GITHUB_SHA}
|
||||
31
.github/workflows/pr-comment.yaml
vendored
31
.github/workflows/pr-comment.yaml
vendored
@@ -10,29 +10,29 @@ on:
|
||||
description: The ecosystem workflow that triggers the workflow run
|
||||
required: true
|
||||
|
||||
permissions:
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
- uses: dawidd6/action-download-artifact@v3
|
||||
name: Download pull request number
|
||||
with:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Parse pull request number
|
||||
id: pr-number
|
||||
run: |
|
||||
if [[ -f pr-number ]]
|
||||
then
|
||||
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||
echo "pr-number=$(<pr-number)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
- uses: dawidd6/action-download-artifact@v3
|
||||
name: "Download ecosystem results"
|
||||
id: download-ecosystem-result
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
@@ -43,20 +43,11 @@ jobs:
|
||||
path: pr/ecosystem
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
if: steps.download-ecosystem-result.outputs.found_artifact == 'true'
|
||||
run: |
|
||||
# Guard against malicious ecosystem results that symlink to a secret
|
||||
# file on this runner
|
||||
if [[ -L pr/ecosystem/ecosystem-result ]]
|
||||
then
|
||||
echo "Error: ecosystem-result cannot be a symlink"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Note this identifier is used to find the comment to update on
|
||||
# subsequent runs
|
||||
echo '<!-- generated-comment ecosystem -->' >> comment.txt
|
||||
@@ -65,12 +56,12 @@ jobs:
|
||||
cat pr/ecosystem/ecosystem-result >> comment.txt
|
||||
echo "" >> comment.txt
|
||||
|
||||
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||
cat comment.txt >> "$GITHUB_OUTPUT"
|
||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||
echo 'comment<<EOF' >> $GITHUB_OUTPUT
|
||||
cat comment.txt >> $GITHUB_OUTPUT
|
||||
echo 'EOF' >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Find existing comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
|
||||
uses: peter-evans/find-comment@v3
|
||||
if: steps.generate-comment.outcome == 'success'
|
||||
id: find-comment
|
||||
with:
|
||||
@@ -80,7 +71,7 @@ jobs:
|
||||
|
||||
- name: Create or update comment
|
||||
if: steps.find-comment.outcome == 'success'
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
|
||||
144
.github/workflows/publish-docs.yml
vendored
144
.github/workflows/publish-docs.yml
vendored
@@ -1,144 +0,0 @@
|
||||
# Publish the Ruff documentation.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a post-announce
|
||||
# job within `cargo-dist`.
|
||||
name: mkdocs
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
ref:
|
||||
description: "The commit SHA, tag, or branch to publish. Uses the default branch if not specified."
|
||||
default: ""
|
||||
type: string
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
mkdocs:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
persist-credentials: true
|
||||
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: 3.12
|
||||
|
||||
- name: "Set docs version"
|
||||
env:
|
||||
version: ${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || inputs.ref }}
|
||||
run: |
|
||||
# if version is missing, use 'latest'
|
||||
if [ -z "$version" ]; then
|
||||
echo "Using 'latest' as version"
|
||||
version="latest"
|
||||
fi
|
||||
|
||||
# Use version as display name for now
|
||||
display_name="$version"
|
||||
|
||||
echo "version=$version" >> "$GITHUB_ENV"
|
||||
echo "display_name=$display_name" >> "$GITHUB_ENV"
|
||||
|
||||
- name: "Set branch name"
|
||||
run: |
|
||||
timestamp="$(date +%s)"
|
||||
|
||||
# create branch_display_name from display_name by replacing all
|
||||
# characters disallowed in git branch names with hyphens
|
||||
branch_display_name="$(echo "${display_name}" | tr -c '[:alnum:]._' '-' | tr -s '-')"
|
||||
|
||||
echo "branch_name=update-docs-$branch_display_name-$timestamp" >> "$GITHUB_ENV"
|
||||
echo "timestamp=$timestamp" >> "$GITHUB_ENV"
|
||||
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@a6f90b1f127823b31d4d4a8d96047790581349bd # v0.9.1
|
||||
with:
|
||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: pip install -r docs/requirements-insiders.txt
|
||||
|
||||
- name: "Install dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: pip install -r docs/requirements.txt
|
||||
|
||||
- name: "Copy README File"
|
||||
run: |
|
||||
python scripts/transform_readme.py --target mkdocs
|
||||
python scripts/generate_mkdocs.py
|
||||
|
||||
- name: "Build Insiders docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.insiders.yml
|
||||
|
||||
- name: "Build docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.public.yml
|
||||
|
||||
- name: "Clone docs repo"
|
||||
run: git clone https://${{ secrets.ASTRAL_DOCS_PAT }}@github.com/astral-sh/docs.git astral-docs
|
||||
|
||||
- name: "Copy docs"
|
||||
run: rm -rf astral-docs/site/ruff && mkdir -p astral-docs/site && cp -r site/ruff astral-docs/site/
|
||||
|
||||
- name: "Commit docs"
|
||||
working-directory: astral-docs
|
||||
run: |
|
||||
git config user.name "astral-docs-bot"
|
||||
git config user.email "176161322+astral-docs-bot@users.noreply.github.com"
|
||||
|
||||
git checkout -b "${branch_name}"
|
||||
git add site/ruff
|
||||
git commit -m "Update ruff documentation for $version"
|
||||
|
||||
- name: "Create Pull Request"
|
||||
working-directory: astral-docs
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
|
||||
run: |
|
||||
# set the PR title
|
||||
pull_request_title="Update ruff documentation for ${display_name}"
|
||||
|
||||
# Delete any existing pull requests that are open for this version
|
||||
# by checking against pull_request_title because the new PR will
|
||||
# supersede the old one.
|
||||
gh pr list --state open --json title --jq '.[] | select(.title == "$pull_request_title") | .number' | \
|
||||
xargs -I {} gh pr close {}
|
||||
|
||||
# push the branch to GitHub
|
||||
git push origin "${branch_name}"
|
||||
|
||||
# create the PR
|
||||
gh pr create \
|
||||
--base=main \
|
||||
--head="${branch_name}" \
|
||||
--title="${pull_request_title}" \
|
||||
--body="Automated documentation update for ${display_name}" \
|
||||
--label="documentation"
|
||||
|
||||
- name: "Merge Pull Request"
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
working-directory: astral-docs
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
|
||||
run: |
|
||||
# auto-merge the PR if the build was triggered by a release. Manual builds should be reviewed by a human.
|
||||
# give the PR a few seconds to be created before trying to auto-merge it
|
||||
sleep 10
|
||||
gh pr merge --squash "${branch_name}"
|
||||
54
.github/workflows/publish-playground.yml
vendored
54
.github/workflows/publish-playground.yml
vendored
@@ -1,54 +0,0 @@
|
||||
# Publish the Ruff playground.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a post-announce
|
||||
# job within `cargo-dist`.
|
||||
name: "[Playground] Release"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version: 22
|
||||
cache: "npm"
|
||||
cache-dependency-path: playground/package-lock.json
|
||||
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
|
||||
- name: "Install Node dependencies"
|
||||
run: npm ci
|
||||
working-directory: playground
|
||||
- name: "Run TypeScript checks"
|
||||
run: npm run check
|
||||
working-directory: playground
|
||||
- name: "Build Ruff playground"
|
||||
run: npm run build --workspace ruff-playground
|
||||
working-directory: playground
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3.14.1
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
# `github.head_ref` is only set during pull requests and for manual runs or tags we use `main` to deploy to production
|
||||
command: pages deploy playground/ruff/dist --project-name=ruff-playground --branch ${{ github.head_ref || 'main' }} --commit-hash ${GITHUB_SHA}
|
||||
32
.github/workflows/publish-pypi.yml
vendored
32
.github/workflows/publish-pypi.yml
vendored
@@ -1,32 +0,0 @@
|
||||
# Publish a release to PyPI.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a publish job
|
||||
# within `cargo-dist`.
|
||||
name: "[ruff] Publish to PyPI"
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
pypi-publish:
|
||||
name: Upload to PyPI
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
permissions:
|
||||
# For PyPI's trusted publishing.
|
||||
id-token: write
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
pattern: wheels-*
|
||||
path: wheels
|
||||
merge-multiple: true
|
||||
- name: Publish to PyPi
|
||||
run: uv publish -v wheels/*
|
||||
58
.github/workflows/publish-ty-playground.yml
vendored
58
.github/workflows/publish-ty-playground.yml
vendored
@@ -1,58 +0,0 @@
|
||||
# Publish the ty playground.
|
||||
name: "[ty Playground] Release"
|
||||
|
||||
permissions: {}
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "crates/ty*/**"
|
||||
- "crates/ruff_db/**"
|
||||
- "crates/ruff_python_ast/**"
|
||||
- "crates/ruff_python_parser/**"
|
||||
- "playground/**"
|
||||
- ".github/workflows/publish-ty-playground.yml"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version: 22
|
||||
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
|
||||
- name: "Install Node dependencies"
|
||||
run: npm ci
|
||||
working-directory: playground
|
||||
- name: "Run TypeScript checks"
|
||||
run: npm run check
|
||||
working-directory: playground
|
||||
- name: "Build ty playground"
|
||||
run: npm run build --workspace ty-playground
|
||||
working-directory: playground
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3.14.1
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
# `github.head_ref` is only set during pull requests and for manual runs or tags we use `main` to deploy to production
|
||||
command: pages deploy playground/ty/dist --project-name=ty-playground --branch ${{ github.head_ref || 'main' }} --commit-hash ${GITHUB_SHA}
|
||||
59
.github/workflows/publish-wasm.yml
vendored
59
.github/workflows/publish-wasm.yml
vendored
@@ -1,59 +0,0 @@
|
||||
# Build and publish ruff-api for wasm.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a publish
|
||||
# job within `cargo-dist`.
|
||||
name: "Build and publish wasm"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
ruff_wasm:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
strategy:
|
||||
matrix:
|
||||
target: [web, bundler, nodejs]
|
||||
fail-fast: false
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: jetli/wasm-pack-action@0d096b08b4e5a7de8c28de67e11e945404e9eefa # v0.4.0
|
||||
with:
|
||||
version: v0.13.1
|
||||
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
|
||||
- name: "Run wasm-pack build"
|
||||
run: wasm-pack build --target ${{ matrix.target }} crates/ruff_wasm
|
||||
- name: "Rename generated package"
|
||||
run: | # Replace the package name w/ jq
|
||||
jq '.name="@astral-sh/ruff-wasm-${{ matrix.target }}"' crates/ruff_wasm/pkg/package.json > /tmp/package.json
|
||||
mv /tmp/package.json crates/ruff_wasm/pkg
|
||||
- run: cp LICENSE crates/ruff_wasm/pkg # wasm-pack does not put the LICENSE file in the pkg
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version: 20
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- name: "Publish (dry-run)"
|
||||
if: ${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
run: npm publish --dry-run crates/ruff_wasm/pkg
|
||||
- name: "Publish"
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
run: npm publish --provenance --access public crates/ruff_wasm/pkg
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
600
.github/workflows/release.yaml
vendored
Normal file
600
.github/workflows/release.yaml
vendored
Normal file
@@ -0,0 +1,600 @@
|
||||
name: "[ruff] Release"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
description: "The version to tag, without the leading 'v'. If omitted, will initiate a dry run (no uploads)."
|
||||
type: string
|
||||
sha:
|
||||
description: "The full sha of the commit to be released. If omitted, the latest commit on the default branch will be used."
|
||||
default: ""
|
||||
type: string
|
||||
pull_request:
|
||||
paths:
|
||||
# When we change pyproject.toml, we want to ensure that the maturin builds still work
|
||||
- pyproject.toml
|
||||
# And when we change this workflow itself...
|
||||
- .github/workflows/release.yaml
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
PACKAGE_NAME: ruff
|
||||
PYTHON_VERSION: "3.11"
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
sdist:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build sdist"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
command: sdist
|
||||
args: --out dist
|
||||
- name: "Test sdist"
|
||||
run: |
|
||||
pip install dist/${{ env.PACKAGE_NAME }}-*.tar.gz --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload sdist"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-sdist
|
||||
path: dist
|
||||
|
||||
macos-x86_64:
|
||||
runs-on: macos-12
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels - x86_64"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: x86_64
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel - x86_64"
|
||||
run: |
|
||||
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-macos-x86_64
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-x86_64-apple-darwin.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/x86_64-apple-darwin/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: binaries-macos-x86_64
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
macos-aarch64:
|
||||
runs-on: macos-14
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: arm64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels - aarch64"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: aarch64
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel - aarch64"
|
||||
run: |
|
||||
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-aarch64-apple-darwin
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-aarch64-apple-darwin.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/aarch64-apple-darwin/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: binaries-aarch64-apple-darwin
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
windows:
|
||||
runs-on: windows-latest
|
||||
strategy:
|
||||
matrix:
|
||||
platform:
|
||||
- target: x86_64-pc-windows-msvc
|
||||
arch: x64
|
||||
- target: i686-pc-windows-msvc
|
||||
arch: x86
|
||||
- target: aarch64-pc-windows-msvc
|
||||
arch: x64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: ${{ matrix.platform.arch }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel"
|
||||
if: ${{ !startsWith(matrix.platform.target, 'aarch64') }}
|
||||
shell: bash
|
||||
run: |
|
||||
python -m pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-${{ matrix.platform.target }}
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.zip
|
||||
7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/ruff.exe
|
||||
sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: binaries-${{ matrix.platform.target }}
|
||||
path: |
|
||||
*.zip
|
||||
*.sha256
|
||||
|
||||
linux:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target:
|
||||
- x86_64-unknown-linux-gnu
|
||||
- i686-unknown-linux-gnu
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: auto
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel"
|
||||
if: ${{ startsWith(matrix.target, 'x86_64') }}
|
||||
run: |
|
||||
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-${{ matrix.target }}
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.target }}.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: binaries-${{ matrix.target }}
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
linux-cross:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
platform:
|
||||
- target: aarch64-unknown-linux-gnu
|
||||
arch: aarch64
|
||||
# see https://github.com/astral-sh/ruff/issues/3791
|
||||
# and https://github.com/gnzlbg/jemallocator/issues/170#issuecomment-1503228963
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
- target: armv7-unknown-linux-gnueabihf
|
||||
arch: armv7
|
||||
- target: s390x-unknown-linux-gnu
|
||||
arch: s390x
|
||||
- target: powerpc64le-unknown-linux-gnu
|
||||
arch: ppc64le
|
||||
# see https://github.com/astral-sh/ruff/issues/10073
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
- target: powerpc64-unknown-linux-gnu
|
||||
arch: ppc64
|
||||
# see https://github.com/astral-sh/ruff/issues/10073
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
args: --release --locked --out dist
|
||||
- uses: uraimo/run-on-arch-action@v2
|
||||
if: matrix.platform.arch != 'ppc64'
|
||||
name: Test wheel
|
||||
with:
|
||||
arch: ${{ matrix.platform.arch }}
|
||||
distro: ubuntu20.04
|
||||
githubToken: ${{ github.token }}
|
||||
install: |
|
||||
apt-get update
|
||||
apt-get install -y --no-install-recommends python3 python3-pip
|
||||
pip3 install -U pip
|
||||
run: |
|
||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-${{ matrix.platform.target }}
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: binaries-${{ matrix.platform.target }}
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
musllinux:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target:
|
||||
- x86_64-unknown-linux-musl
|
||||
- i686-unknown-linux-musl
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: musllinux_1_2
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel"
|
||||
if: matrix.target == 'x86_64-unknown-linux-musl'
|
||||
uses: addnab/docker-run-action@v3
|
||||
with:
|
||||
image: alpine:latest
|
||||
options: -v ${{ github.workspace }}:/io -w /io
|
||||
run: |
|
||||
apk add python3
|
||||
python -m venv .venv
|
||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
.venv/bin/ruff check --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-${{ matrix.target }}
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.target }}.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: binaries-${{ matrix.target }}
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
musllinux-cross:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
platform:
|
||||
- target: aarch64-unknown-linux-musl
|
||||
arch: aarch64
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
- target: armv7-unknown-linux-musleabihf
|
||||
arch: armv7
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: musllinux_1_2
|
||||
args: --release --locked --out dist
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
- uses: uraimo/run-on-arch-action@v2
|
||||
name: Test wheel
|
||||
with:
|
||||
arch: ${{ matrix.platform.arch }}
|
||||
distro: alpine_latest
|
||||
githubToken: ${{ github.token }}
|
||||
install: |
|
||||
apk add python3
|
||||
run: |
|
||||
python -m venv .venv
|
||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
.venv/bin/ruff check --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-${{ matrix.platform.target }}
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: binaries-${{ matrix.platform.target }}
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
validate-tag:
|
||||
name: Validate tag
|
||||
runs-on: ubuntu-latest
|
||||
# If you don't set an input tag, it's a dry run (no uploads).
|
||||
if: ${{ inputs.tag }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: main # We checkout the main branch to check for the commit
|
||||
- name: Check main branch
|
||||
if: ${{ inputs.sha }}
|
||||
run: |
|
||||
# Fetch the main branch since a shallow checkout is used by default
|
||||
git fetch origin main --unshallow
|
||||
if ! git branch --contains ${{ inputs.sha }} | grep -E '(^|\s)main$'; then
|
||||
echo "The specified sha is not on the main branch" >&2
|
||||
exit 1
|
||||
fi
|
||||
- name: Check tag consistency
|
||||
run: |
|
||||
# Switch to the commit we want to release
|
||||
git checkout ${{ inputs.sha }}
|
||||
version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
|
||||
if [ "${{ inputs.tag }}" != "${version}" ]; then
|
||||
echo "The input tag does not match the version from pyproject.toml:" >&2
|
||||
echo "${{ inputs.tag }}" >&2
|
||||
echo "${version}" >&2
|
||||
exit 1
|
||||
else
|
||||
echo "Releasing ${version}"
|
||||
fi
|
||||
|
||||
upload-release:
|
||||
name: Upload to PyPI
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- macos-aarch64
|
||||
- macos-x86_64
|
||||
- windows
|
||||
- linux
|
||||
- linux-cross
|
||||
- musllinux
|
||||
- musllinux-cross
|
||||
- validate-tag
|
||||
# If you don't set an input tag, it's a dry run (no uploads).
|
||||
if: ${{ inputs.tag }}
|
||||
environment:
|
||||
name: release
|
||||
permissions:
|
||||
# For pypi trusted publishing
|
||||
id-token: write
|
||||
steps:
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: wheels-*
|
||||
path: wheels
|
||||
merge-multiple: true
|
||||
- name: Publish to PyPi
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
skip-existing: true
|
||||
packages-dir: wheels
|
||||
verbose: true
|
||||
|
||||
tag-release:
|
||||
name: Tag release
|
||||
runs-on: ubuntu-latest
|
||||
needs: upload-release
|
||||
# If you don't set an input tag, it's a dry run (no uploads).
|
||||
if: ${{ inputs.tag }}
|
||||
permissions:
|
||||
# For git tag
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- name: git tag
|
||||
run: |
|
||||
git config user.email "hey@astral.sh"
|
||||
git config user.name "Ruff Release CI"
|
||||
git tag -m "v${{ inputs.tag }}" "v${{ inputs.tag }}"
|
||||
# If there is duplicate tag, this will fail. The publish to pypi action will have been a noop (due to skip
|
||||
# existing), so we make a non-destructive exit here
|
||||
git push --tags
|
||||
|
||||
publish-release:
|
||||
name: Publish to GitHub
|
||||
runs-on: ubuntu-latest
|
||||
needs: tag-release
|
||||
# If you don't set an input tag, it's a dry run (no uploads).
|
||||
if: ${{ inputs.tag }}
|
||||
permissions:
|
||||
# For GitHub release publishing
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: binaries-*
|
||||
path: binaries
|
||||
merge-multiple: true
|
||||
- name: "Publish to GitHub"
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
draft: true
|
||||
files: binaries/*
|
||||
tag_name: v${{ inputs.tag }}
|
||||
|
||||
docker-publish:
|
||||
# This action doesn't need to wait on any other task, it's easy to re-tag if something failed and we're validating
|
||||
# the tag here also
|
||||
name: Push Docker image ghcr.io/astral-sh/ruff
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
permissions:
|
||||
# For the docker push
|
||||
packages: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
- uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ghcr.io/astral-sh/ruff
|
||||
|
||||
- name: Check tag consistency
|
||||
# Unlike validate-tag we don't check if the commit is on the main branch, but it seems good enough since we can
|
||||
# change docker tags
|
||||
if: ${{ inputs.tag }}
|
||||
run: |
|
||||
version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
|
||||
if [ "${{ inputs.tag }}" != "${version}" ]; then
|
||||
echo "The input tag does not match the version from pyproject.toml:" >&2
|
||||
echo "${{ inputs.tag }}" >&2
|
||||
echo "${version}" >&2
|
||||
exit 1
|
||||
else
|
||||
echo "Releasing ${version}"
|
||||
fi
|
||||
|
||||
- name: "Build and push Docker image"
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
# Reuse the builder
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
push: ${{ inputs.tag != '' }}
|
||||
tags: ghcr.io/astral-sh/ruff:latest,ghcr.io/astral-sh/ruff:${{ inputs.tag || 'dry-run' }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
|
||||
# After the release has been published, we update downstream repositories
|
||||
# This is separate because if this fails the release is still fine, we just need to do some manual workflow triggers
|
||||
update-dependents:
|
||||
name: Update dependents
|
||||
runs-on: ubuntu-latest
|
||||
needs: publish-release
|
||||
steps:
|
||||
- name: "Update pre-commit mirror"
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.RUFF_PRE_COMMIT_PAT }}
|
||||
script: |
|
||||
github.rest.actions.createWorkflowDispatch({
|
||||
owner: 'astral-sh',
|
||||
repo: 'ruff-pre-commit',
|
||||
workflow_id: 'main.yml',
|
||||
ref: 'main',
|
||||
})
|
||||
306
.github/workflows/release.yml
vendored
306
.github/workflows/release.yml
vendored
@@ -1,306 +0,0 @@
|
||||
# This file was autogenerated by dist: https://github.com/astral-sh/cargo-dist
|
||||
#
|
||||
# Copyright 2022-2024, axodotdev
|
||||
# Copyright 2025 Astral Software Inc.
|
||||
# SPDX-License-Identifier: MIT or Apache-2.0
|
||||
#
|
||||
# CI that:
|
||||
#
|
||||
# * checks for a Git Tag that looks like a release
|
||||
# * builds artifacts with dist (archives, installers, hashes)
|
||||
# * uploads those artifacts to temporary workflow zip
|
||||
# * on success, uploads the artifacts to a GitHub Release
|
||||
#
|
||||
# Note that the GitHub Release will be created with a generated
|
||||
# title/body based on your changelogs.
|
||||
|
||||
name: Release
|
||||
permissions:
|
||||
"contents": "write"
|
||||
|
||||
# This task will run whenever you workflow_dispatch with a tag that looks like a version
|
||||
# like "1.0.0", "v0.1.0-prerelease.1", "my-app/0.1.0", "releases/v1.0.0", etc.
|
||||
# Various formats will be parsed into a VERSION and an optional PACKAGE_NAME, where
|
||||
# PACKAGE_NAME must be the name of a Cargo package in your workspace, and VERSION
|
||||
# must be a Cargo-style SemVer Version (must have at least major.minor.patch).
|
||||
#
|
||||
# If PACKAGE_NAME is specified, then the announcement will be for that
|
||||
# package (erroring out if it doesn't have the given version or isn't dist-able).
|
||||
#
|
||||
# If PACKAGE_NAME isn't specified, then the announcement will be for all
|
||||
# (dist-able) packages in the workspace with that version (this mode is
|
||||
# intended for workspaces with only one dist-able package, or with all dist-able
|
||||
# packages versioned/released in lockstep).
|
||||
#
|
||||
# If you push multiple tags at once, separate instances of this workflow will
|
||||
# spin up, creating an independent announcement for each one. However, GitHub
|
||||
# will hard limit this to 3 tags per commit, as it will assume more tags is a
|
||||
# mistake.
|
||||
#
|
||||
# If there's a prerelease-style suffix to the version, then the release(s)
|
||||
# will be marked as a prerelease.
|
||||
on:
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
description: Release Tag
|
||||
required: true
|
||||
default: dry-run
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
# Run 'dist plan' (or host) to determine what tasks we need to do
|
||||
plan:
|
||||
runs-on: "depot-ubuntu-latest-4"
|
||||
outputs:
|
||||
val: ${{ steps.plan.outputs.manifest }}
|
||||
tag: ${{ (inputs.tag != 'dry-run' && inputs.tag) || '' }}
|
||||
tag-flag: ${{ inputs.tag && inputs.tag != 'dry-run' && format('--tag={0}', inputs.tag) || '' }}
|
||||
publishing: ${{ inputs.tag && inputs.tag != 'dry-run' }}
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@85e6279cec87321a52edac9c87bce653a07cf6c2
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Install dist
|
||||
# we specify bash to get pipefail; it guards against the `curl` command
|
||||
# failing. otherwise `sh` won't catch that `curl` returned non-0
|
||||
shell: bash
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/cargo-dist/releases/download/v0.28.5-prerelease.1/cargo-dist-installer.sh | sh"
|
||||
- name: Cache dist
|
||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/dist
|
||||
# sure would be cool if github gave us proper conditionals...
|
||||
# so here's a doubly-nested ternary-via-truthiness to try to provide the best possible
|
||||
# functionality based on whether this is a pull_request, and whether it's from a fork.
|
||||
# (PRs run on the *source* but secrets are usually on the *target* -- that's *good*
|
||||
# but also really annoying to build CI around when it needs secrets to work right.)
|
||||
- id: plan
|
||||
run: |
|
||||
dist ${{ (inputs.tag && inputs.tag != 'dry-run' && format('host --steps=create --tag={0}', inputs.tag)) || 'plan' }} --output-format=json > plan-dist-manifest.json
|
||||
echo "dist ran successfully"
|
||||
cat plan-dist-manifest.json
|
||||
echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
||||
- name: "Upload dist-manifest.json"
|
||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||
with:
|
||||
name: artifacts-plan-dist-manifest
|
||||
path: plan-dist-manifest.json
|
||||
|
||||
custom-build-binaries:
|
||||
needs:
|
||||
- plan
|
||||
if: ${{ needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload' || inputs.tag == 'dry-run' }}
|
||||
uses: ./.github/workflows/build-binaries.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
|
||||
custom-build-docker:
|
||||
needs:
|
||||
- plan
|
||||
if: ${{ needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload' || inputs.tag == 'dry-run' }}
|
||||
uses: ./.github/workflows/build-docker.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
permissions:
|
||||
"contents": "read"
|
||||
"packages": "write"
|
||||
|
||||
# Build and package all the platform-agnostic(ish) things
|
||||
build-global-artifacts:
|
||||
needs:
|
||||
- plan
|
||||
- custom-build-binaries
|
||||
- custom-build-docker
|
||||
runs-on: "depot-ubuntu-latest-4"
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json
|
||||
steps:
|
||||
- uses: actions/checkout@85e6279cec87321a52edac9c87bce653a07cf6c2
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Install cached dist
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/
|
||||
- run: chmod +x ~/.cargo/bin/dist
|
||||
# Get all the local artifacts for the global tasks to use (for e.g. checksums)
|
||||
- name: Fetch local artifacts
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: target/distrib/
|
||||
merge-multiple: true
|
||||
- id: cargo-dist
|
||||
shell: bash
|
||||
run: |
|
||||
dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json
|
||||
echo "dist ran successfully"
|
||||
|
||||
# Parse out what we just built and upload it to scratch storage
|
||||
echo "paths<<EOF" >> "$GITHUB_OUTPUT"
|
||||
jq --raw-output ".upload_files[]" dist-manifest.json >> "$GITHUB_OUTPUT"
|
||||
echo "EOF" >> "$GITHUB_OUTPUT"
|
||||
|
||||
cp dist-manifest.json "$BUILD_MANIFEST_NAME"
|
||||
- name: "Upload artifacts"
|
||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||
with:
|
||||
name: artifacts-build-global
|
||||
path: |
|
||||
${{ steps.cargo-dist.outputs.paths }}
|
||||
${{ env.BUILD_MANIFEST_NAME }}
|
||||
# Determines if we should publish/announce
|
||||
host:
|
||||
needs:
|
||||
- plan
|
||||
- custom-build-binaries
|
||||
- custom-build-docker
|
||||
- build-global-artifacts
|
||||
# Only run if we're "publishing", and only if local and global didn't fail (skipped is fine)
|
||||
if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.custom-build-binaries.result == 'skipped' || needs.custom-build-binaries.result == 'success') && (needs.custom-build-docker.result == 'skipped' || needs.custom-build-docker.result == 'success') }}
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
runs-on: "depot-ubuntu-latest-4"
|
||||
outputs:
|
||||
val: ${{ steps.host.outputs.manifest }}
|
||||
steps:
|
||||
- uses: actions/checkout@85e6279cec87321a52edac9c87bce653a07cf6c2
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Install cached dist
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/
|
||||
- run: chmod +x ~/.cargo/bin/dist
|
||||
# Fetch artifacts from scratch-storage
|
||||
- name: Fetch artifacts
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: target/distrib/
|
||||
merge-multiple: true
|
||||
# This is a harmless no-op for GitHub Releases, hosting for that happens in "announce"
|
||||
- id: host
|
||||
shell: bash
|
||||
run: |
|
||||
dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json
|
||||
echo "artifacts uploaded and released successfully"
|
||||
cat dist-manifest.json
|
||||
echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
||||
- name: "Upload dist-manifest.json"
|
||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||
with:
|
||||
# Overwrite the previous copy
|
||||
name: artifacts-dist-manifest
|
||||
path: dist-manifest.json
|
||||
|
||||
custom-publish-pypi:
|
||||
needs:
|
||||
- plan
|
||||
- host
|
||||
if: ${{ !fromJson(needs.plan.outputs.val).announcement_is_prerelease || fromJson(needs.plan.outputs.val).publish_prereleases }}
|
||||
uses: ./.github/workflows/publish-pypi.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
# publish jobs get escalated permissions
|
||||
permissions:
|
||||
"id-token": "write"
|
||||
"packages": "write"
|
||||
|
||||
custom-publish-wasm:
|
||||
needs:
|
||||
- plan
|
||||
- host
|
||||
if: ${{ !fromJson(needs.plan.outputs.val).announcement_is_prerelease || fromJson(needs.plan.outputs.val).publish_prereleases }}
|
||||
uses: ./.github/workflows/publish-wasm.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
# publish jobs get escalated permissions
|
||||
permissions:
|
||||
"contents": "read"
|
||||
"id-token": "write"
|
||||
"packages": "write"
|
||||
|
||||
# Create a GitHub Release while uploading all files to it
|
||||
announce:
|
||||
needs:
|
||||
- plan
|
||||
- host
|
||||
- custom-publish-pypi
|
||||
- custom-publish-wasm
|
||||
# use "always() && ..." to allow us to wait for all publish jobs while
|
||||
# still allowing individual publish jobs to skip themselves (for prereleases).
|
||||
# "host" however must run to completion, no skipping allowed!
|
||||
if: ${{ always() && needs.host.result == 'success' && (needs.custom-publish-pypi.result == 'skipped' || needs.custom-publish-pypi.result == 'success') && (needs.custom-publish-wasm.result == 'skipped' || needs.custom-publish-wasm.result == 'success') }}
|
||||
runs-on: "depot-ubuntu-latest-4"
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@85e6279cec87321a52edac9c87bce653a07cf6c2
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
# Create a GitHub Release while uploading all files to it
|
||||
- name: "Download GitHub Artifacts"
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: artifacts
|
||||
merge-multiple: true
|
||||
- name: Cleanup
|
||||
run: |
|
||||
# Remove the granular manifests
|
||||
rm -f artifacts/*-dist-manifest.json
|
||||
- name: Create GitHub Release
|
||||
env:
|
||||
PRERELEASE_FLAG: "${{ fromJson(needs.host.outputs.val).announcement_is_prerelease && '--prerelease' || '' }}"
|
||||
ANNOUNCEMENT_TITLE: "${{ fromJson(needs.host.outputs.val).announcement_title }}"
|
||||
ANNOUNCEMENT_BODY: "${{ fromJson(needs.host.outputs.val).announcement_github_body }}"
|
||||
RELEASE_COMMIT: "${{ github.sha }}"
|
||||
run: |
|
||||
# Write and read notes from a file to avoid quoting breaking things
|
||||
echo "$ANNOUNCEMENT_BODY" > $RUNNER_TEMP/notes.txt
|
||||
|
||||
gh release create "${{ needs.plan.outputs.tag }}" --target "$RELEASE_COMMIT" $PRERELEASE_FLAG --title "$ANNOUNCEMENT_TITLE" --notes-file "$RUNNER_TEMP/notes.txt" artifacts/*
|
||||
|
||||
custom-notify-dependents:
|
||||
needs:
|
||||
- plan
|
||||
- announce
|
||||
uses: ./.github/workflows/notify-dependents.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
|
||||
custom-publish-docs:
|
||||
needs:
|
||||
- plan
|
||||
- announce
|
||||
uses: ./.github/workflows/publish-docs.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
|
||||
custom-publish-playground:
|
||||
needs:
|
||||
- plan
|
||||
- announce
|
||||
uses: ./.github/workflows/publish-playground.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
27
.github/workflows/sync_typeshed.yaml
vendored
27
.github/workflows/sync_typeshed.yaml
vendored
@@ -21,17 +21,15 @@ jobs:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
name: Checkout Ruff
|
||||
with:
|
||||
path: ruff
|
||||
persist-credentials: true
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
name: Checkout typeshed
|
||||
with:
|
||||
repository: python/typeshed
|
||||
path: typeshed
|
||||
persist-credentials: false
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
@@ -39,13 +37,13 @@ jobs:
|
||||
- name: Sync typeshed
|
||||
id: sync
|
||||
run: |
|
||||
rm -rf ruff/crates/ty_vendored/vendor/typeshed
|
||||
mkdir ruff/crates/ty_vendored/vendor/typeshed
|
||||
cp typeshed/README.md ruff/crates/ty_vendored/vendor/typeshed
|
||||
cp typeshed/LICENSE ruff/crates/ty_vendored/vendor/typeshed
|
||||
cp -r typeshed/stdlib ruff/crates/ty_vendored/vendor/typeshed/stdlib
|
||||
rm -rf ruff/crates/ty_vendored/vendor/typeshed/stdlib/@tests
|
||||
git -C typeshed rev-parse HEAD > ruff/crates/ty_vendored/vendor/typeshed/source_commit.txt
|
||||
rm -rf ruff/crates/red_knot/vendor/typeshed
|
||||
mkdir ruff/crates/red_knot/vendor/typeshed
|
||||
cp typeshed/README.md ruff/crates/red_knot/vendor/typeshed
|
||||
cp typeshed/LICENSE ruff/crates/red_knot/vendor/typeshed
|
||||
cp -r typeshed/stdlib ruff/crates/red_knot/vendor/typeshed/stdlib
|
||||
rm -rf ruff/crates/red_knot/vendor/typeshed/stdlib/@tests
|
||||
git -C typeshed rev-parse HEAD > ruff/crates/red_knot/vendor/typeshed/source_commit.txt
|
||||
- name: Commit the changes
|
||||
id: commit
|
||||
if: ${{ steps.sync.outcome == 'success' }}
|
||||
@@ -59,7 +57,7 @@ jobs:
|
||||
run: |
|
||||
cd ruff
|
||||
git push --force origin typeshedbot/sync-typeshed
|
||||
gh pr list --repo "$GITHUB_REPOSITORY" --head typeshedbot/sync-typeshed --json id --jq length | grep 1 && exit 0 # exit if there is existing pr
|
||||
gh pr list --repo $GITHUB_REPOSITORY --head typeshedbot/sync-typeshed --json id --jq length | grep 1 && exit 0 # exit if there is existing pr
|
||||
gh pr create --title "Sync vendored typeshed stubs" --body "Close and reopen this PR to trigger CI" --label "internal"
|
||||
|
||||
create-issue-on-failure:
|
||||
@@ -70,7 +68,7 @@ jobs:
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
- uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
@@ -78,6 +76,5 @@ jobs:
|
||||
owner: "astral-sh",
|
||||
repo: "ruff",
|
||||
title: `Automated typeshed sync failed on ${new Date().toDateString()}`,
|
||||
body: "Run listed here: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}",
|
||||
labels: ["bug", "ty"],
|
||||
body: "Runs are listed here: https://github.com/astral-sh/ruff/actions/workflows/sync_typeshed.yaml",
|
||||
})
|
||||
|
||||
19
.github/zizmor.yml
vendored
19
.github/zizmor.yml
vendored
@@ -1,19 +0,0 @@
|
||||
# Configuration for the zizmor static analysis tool, run via pre-commit in CI
|
||||
# https://woodruffw.github.io/zizmor/configuration/
|
||||
#
|
||||
# TODO: can we remove the ignores here so that our workflows are more secure?
|
||||
rules:
|
||||
dangerous-triggers:
|
||||
ignore:
|
||||
- pr-comment.yaml
|
||||
cache-poisoning:
|
||||
ignore:
|
||||
- build-docker.yml
|
||||
- publish-playground.yml
|
||||
excessive-permissions:
|
||||
# it's hard to test what the impact of removing these ignores would be
|
||||
# without actually running the release workflow...
|
||||
ignore:
|
||||
- build-docker.yml
|
||||
- publish-playground.yml
|
||||
- publish-docs.yml
|
||||
12
.gitignore
vendored
12
.gitignore
vendored
@@ -21,18 +21,6 @@ flamegraph.svg
|
||||
# `CARGO_TARGET_DIR=target-llvm-lines RUSTFLAGS="-Csymbol-mangling-version=v0" cargo llvm-lines -p ruff --lib`
|
||||
/target*
|
||||
|
||||
# samply profiles
|
||||
profile.json
|
||||
|
||||
# tracing-flame traces
|
||||
tracing.folded
|
||||
tracing-flamechart.svg
|
||||
tracing-flamegraph.svg
|
||||
|
||||
# insta
|
||||
*.rs.pending-snap
|
||||
|
||||
|
||||
###
|
||||
# Rust.gitignore
|
||||
###
|
||||
|
||||
@@ -14,22 +14,7 @@ MD041: false
|
||||
# MD013/line-length
|
||||
MD013: false
|
||||
|
||||
# MD014/commands-show-output
|
||||
MD014: false
|
||||
|
||||
# MD024/no-duplicate-heading
|
||||
MD024:
|
||||
# Allow when nested under different parents e.g. CHANGELOG.md
|
||||
siblings_only: true
|
||||
|
||||
# MD046/code-block-style
|
||||
#
|
||||
# Ignore this because it conflicts with the code block style used in content
|
||||
# tabs of mkdocs-material which is to add a blank line after the content title.
|
||||
#
|
||||
# Ref: https://github.com/astral-sh/ruff/pull/15011#issuecomment-2544790854
|
||||
MD046: false
|
||||
|
||||
# Link text should be descriptive
|
||||
# Disallows link text like *here* which is annoying.
|
||||
MD059: false
|
||||
|
||||
@@ -1,16 +1,10 @@
|
||||
fail_fast: false
|
||||
fail_fast: true
|
||||
|
||||
exclude: |
|
||||
(?x)^(
|
||||
.github/workflows/release.yml|
|
||||
crates/ty_vendored/vendor/.*|
|
||||
crates/ty_project/resources/.*|
|
||||
crates/ty/docs/(configuration|rules|cli).md|
|
||||
crates/ruff_benchmark/resources/.*|
|
||||
crates/red_knot/vendor/.*|
|
||||
crates/ruff_linter/resources/.*|
|
||||
crates/ruff_linter/src/rules/.*/snapshots/.*|
|
||||
crates/ruff_notebook/resources/.*|
|
||||
crates/ruff_server/resources/.*|
|
||||
crates/ruff/resources/.*|
|
||||
crates/ruff_python_formatter/resources/.*|
|
||||
crates/ruff_python_formatter/tests/snapshots/.*|
|
||||
@@ -19,23 +13,18 @@ exclude: |
|
||||
)$
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v5.0.0
|
||||
hooks:
|
||||
- id: check-merge-conflict
|
||||
|
||||
- repo: https://github.com/abravalheri/validate-pyproject
|
||||
rev: v0.24.1
|
||||
rev: v0.17
|
||||
hooks:
|
||||
- id: validate-pyproject
|
||||
|
||||
- repo: https://github.com/executablebooks/mdformat
|
||||
rev: 0.7.22
|
||||
rev: 0.7.17
|
||||
hooks:
|
||||
- id: mdformat
|
||||
additional_dependencies:
|
||||
- mdformat-mkdocs==4.0.0
|
||||
- mdformat-footnote==0.1.1
|
||||
- mdformat-mkdocs
|
||||
- mdformat-admon
|
||||
exclude: |
|
||||
(?x)^(
|
||||
docs/formatter/black\.md
|
||||
@@ -43,7 +32,7 @@ repos:
|
||||
)$
|
||||
|
||||
- repo: https://github.com/igorshubovych/markdownlint-cli
|
||||
rev: v0.45.0
|
||||
rev: v0.40.0
|
||||
hooks:
|
||||
- id: markdownlint-fix
|
||||
exclude: |
|
||||
@@ -52,21 +41,8 @@ repos:
|
||||
| docs/\w+\.md
|
||||
)$
|
||||
|
||||
- repo: https://github.com/adamchainz/blacken-docs
|
||||
rev: 1.19.1
|
||||
hooks:
|
||||
- id: blacken-docs
|
||||
args: ["--pyi", "--line-length", "130"]
|
||||
files: '^crates/.*/resources/mdtest/.*\.md'
|
||||
exclude: |
|
||||
(?x)^(
|
||||
.*?invalid(_.+)*_syntax\.md
|
||||
)$
|
||||
additional_dependencies:
|
||||
- black==25.1.0
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.32.0
|
||||
rev: v1.21.0
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
@@ -80,52 +56,25 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.11.10
|
||||
rev: v0.4.4
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
args: [--fix, --exit-non-zero-on-fix]
|
||||
types_or: [python, pyi]
|
||||
require_serial: true
|
||||
exclude: |
|
||||
(?x)^(
|
||||
crates/ruff_linter/resources/.*|
|
||||
crates/ruff_python_formatter/resources/.*
|
||||
)$
|
||||
|
||||
# Prettier
|
||||
- repo: https://github.com/rbubley/mirrors-prettier
|
||||
rev: v3.5.3
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: v3.1.0
|
||||
hooks:
|
||||
- id: prettier
|
||||
types: [yaml]
|
||||
|
||||
# zizmor detects security vulnerabilities in GitHub Actions workflows.
|
||||
# Additional configuration for the tool is found in `.github/zizmor.yml`
|
||||
- repo: https://github.com/woodruffw/zizmor-pre-commit
|
||||
rev: v1.7.0
|
||||
hooks:
|
||||
- id: zizmor
|
||||
|
||||
- repo: https://github.com/python-jsonschema/check-jsonschema
|
||||
rev: 0.33.0
|
||||
hooks:
|
||||
- id: check-github-workflows
|
||||
|
||||
# `actionlint` hook, for verifying correct syntax in GitHub Actions workflows.
|
||||
# Some additional configuration for `actionlint` can be found in `.github/actionlint.yaml`.
|
||||
- repo: https://github.com/rhysd/actionlint
|
||||
rev: v1.7.7
|
||||
hooks:
|
||||
- id: actionlint
|
||||
stages:
|
||||
# This hook is disabled by default, since it's quite slow.
|
||||
# To run all hooks *including* this hook, use `uvx pre-commit run -a --hook-stage=manual`.
|
||||
# To run *just* this hook, use `uvx pre-commit run -a actionlint --hook-stage=manual`.
|
||||
- manual
|
||||
args:
|
||||
- "-ignore=SC2129" # ignorable stylistic lint from shellcheck
|
||||
- "-ignore=SC2016" # another shellcheck lint: seems to have false positives?
|
||||
additional_dependencies:
|
||||
# actionlint has a shellcheck integration which extracts shell scripts in `run:` steps from GitHub Actions
|
||||
# and checks these with shellcheck. This is arguably its most useful feature,
|
||||
# but the integration only works if shellcheck is installed
|
||||
- "github.com/wasilibs/go-shellcheck/cmd/shellcheck@v0.10.0"
|
||||
|
||||
ci:
|
||||
skip: [cargo-fmt, dev-generate-all]
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
# Auto-generated by `cargo-dist`.
|
||||
.github/workflows/release.yml
|
||||
5
.vscode/extensions.json
vendored
5
.vscode/extensions.json
vendored
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"rust-lang.rust-analyzer"
|
||||
]
|
||||
}
|
||||
6
.vscode/settings.json
vendored
6
.vscode/settings.json
vendored
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"rust-analyzer.check.extraArgs": [
|
||||
"--all-features"
|
||||
],
|
||||
"rust-analyzer.check.command": "clippy",
|
||||
}
|
||||
@@ -1,142 +1,5 @@
|
||||
# Breaking Changes
|
||||
|
||||
## 0.11.0
|
||||
|
||||
This is a follow-up to release 0.10.0. Because of a mistake in the release process, the `requires-python` inference changes were not included in that release. Ruff 0.11.0 now includes this change as well as the stabilization of the preview behavior for `PGH004`.
|
||||
|
||||
- **Changes to how the Python version is inferred when a `target-version` is not specified** ([#16319](https://github.com/astral-sh/ruff/pull/16319))
|
||||
|
||||
In previous versions of Ruff, you could specify your Python version with:
|
||||
|
||||
- The `target-version` option in a `ruff.toml` file or the `[tool.ruff]` section of a pyproject.toml file.
|
||||
- The `project.requires-python` field in a `pyproject.toml` file with a `[tool.ruff]` section.
|
||||
|
||||
These options worked well in most cases, and are still recommended for fine control of the Python version. However, because of the way Ruff discovers config files, `pyproject.toml` files without a `[tool.ruff]` section would be ignored, including the `requires-python` setting. Ruff would then use the default Python version (3.9 as of this writing) instead, which is surprising when you've attempted to request another version.
|
||||
|
||||
In v0.10, config discovery has been updated to address this issue:
|
||||
|
||||
- If Ruff finds a `ruff.toml` file without a `target-version`, it will check
|
||||
for a `pyproject.toml` file in the same directory and respect its
|
||||
`requires-python` version, even if it does not contain a `[tool.ruff]`
|
||||
section.
|
||||
- If Ruff finds a user-level configuration, the `requires-python` field of the closest `pyproject.toml` in a parent directory will take precedence.
|
||||
- If there is no config file (`ruff.toml`or `pyproject.toml` with a
|
||||
`[tool.ruff]` section) in the directory of the file being checked, Ruff will
|
||||
search for the closest `pyproject.toml` in the parent directories and use its
|
||||
`requires-python` setting.
|
||||
|
||||
## 0.10.0
|
||||
|
||||
- **Changes to how the Python version is inferred when a `target-version` is not specified** ([#16319](https://github.com/astral-sh/ruff/pull/16319))
|
||||
|
||||
Because of a mistake in the release process, the `requires-python` inference changes are not included in this release and instead shipped as part of 0.11.0.
|
||||
You can find a description of this change in the 0.11.0 section.
|
||||
|
||||
- **Updated `TYPE_CHECKING` behavior** ([#16669](https://github.com/astral-sh/ruff/pull/16669))
|
||||
|
||||
Previously, Ruff only recognized typechecking blocks that tested the `typing.TYPE_CHECKING` symbol. Now, Ruff recognizes any local variable named `TYPE_CHECKING`. This release also removes support for the legacy `if 0:` and `if False:` typechecking checks. Use a local `TYPE_CHECKING` variable instead.
|
||||
|
||||
- **More robust noqa parsing** ([#16483](https://github.com/astral-sh/ruff/pull/16483))
|
||||
|
||||
The syntax for both file-level and in-line suppression comments has been unified and made more robust to certain errors. In most cases, this will result in more suppression comments being read by Ruff, but there are a few instances where previously read comments will now log an error to the user instead. Please refer to the documentation on [_Error suppression_](https://docs.astral.sh/ruff/linter/#error-suppression) for the full specification.
|
||||
|
||||
- **Avoid unnecessary parentheses around with statements with a single context manager and a trailing comment** ([#14005](https://github.com/astral-sh/ruff/pull/14005))
|
||||
|
||||
This change fixes a bug in the formatter where it introduced unnecessary parentheses around with statements with a single context manager and a trailing comment. This change may result in a change in formatting for some users.
|
||||
|
||||
- **Bump alpine default tag to 3.21 for derived Docker images** ([#16456](https://github.com/astral-sh/ruff/pull/16456))
|
||||
|
||||
Alpine 3.21 was released in Dec 2024 and is used in the official Alpine-based Python images. Now the ruff:alpine image will use 3.21 instead of 3.20 and ruff:alpine3.20 will no longer be updated.
|
||||
|
||||
- **\[`unsafe-markup-use`\]: `RUF035` has been recoded to `S704`** ([#15957](https://github.com/astral-sh/ruff/pull/15957))
|
||||
|
||||
## 0.9.0
|
||||
|
||||
Ruff now formats your code according to the 2025 style guide. As a result, your code might now get formatted differently. See the [changelog](./CHANGELOG.md#090) for a detailed list of changes.
|
||||
|
||||
## 0.8.0
|
||||
|
||||
- **Default to Python 3.9**
|
||||
|
||||
Ruff now defaults to Python 3.9 instead of 3.8 if no explicit Python version is configured using [`ruff.target-version`](https://docs.astral.sh/ruff/settings/#target-version) or [`project.requires-python`](https://packaging.python.org/en/latest/guides/writing-pyproject-toml/#python-requires) ([#13896](https://github.com/astral-sh/ruff/pull/13896))
|
||||
|
||||
- **Changed location of `pydoclint` diagnostics**
|
||||
|
||||
[`pydoclint`](https://docs.astral.sh/ruff/rules/#pydoclint-doc) diagnostics now point to the first-line of the problematic docstring. Previously, this was not the case.
|
||||
|
||||
If you've opted into these preview rules but have them suppressed using
|
||||
[`noqa`](https://docs.astral.sh/ruff/linter/#error-suppression) comments in
|
||||
some places, this change may mean that you need to move the `noqa` suppression
|
||||
comments. Most users should be unaffected by this change.
|
||||
|
||||
- **Use XDG (i.e. `~/.local/bin`) instead of the Cargo home directory in the standalone installer**
|
||||
|
||||
Previously, Ruff's installer used `$CARGO_HOME` or `~/.cargo/bin` for its target install directory. Now, Ruff will be installed into `$XDG_BIN_HOME`, `$XDG_DATA_HOME/../bin`, or `~/.local/bin` (in that order).
|
||||
|
||||
This change is only relevant to users of the standalone Ruff installer (using the shell or PowerShell script). If you installed Ruff using uv or pip, you should be unaffected.
|
||||
|
||||
- **Changes to the line width calculation**
|
||||
|
||||
Ruff now uses a new version of the [unicode-width](https://github.com/unicode-rs/unicode-width) Rust crate to calculate the line width. In very rare cases, this may lead to lines containing Unicode characters being reformatted, or being considered too long when they were not before ([`E501`](https://docs.astral.sh/ruff/rules/line-too-long/)).
|
||||
|
||||
## 0.7.0
|
||||
|
||||
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments
|
||||
([#12838](https://github.com/astral-sh/ruff/pull/12838), [#13292](https://github.com/astral-sh/ruff/pull/13292)).
|
||||
This was a change that we attempted to make in Ruff v0.6.0, but only partially made due to an error on our part.
|
||||
See the [blog post](https://astral.sh/blog/ruff-v0.7.0) for more details.
|
||||
- The `useless-try-except` rule (in our `tryceratops` category) has been recoded from `TRY302` to
|
||||
`TRY203` ([#13502](https://github.com/astral-sh/ruff/pull/13502)). This ensures Ruff's code is consistent with
|
||||
the same rule in the [`tryceratops`](https://github.com/guilatrova/tryceratops) linter.
|
||||
- The `lint.allow-unused-imports` setting has been removed ([#13677](https://github.com/astral-sh/ruff/pull/13677)). Use
|
||||
[`lint.pyflakes.allow-unused-imports`](https://docs.astral.sh/ruff/settings/#lint_pyflakes_allowed-unused-imports)
|
||||
instead.
|
||||
|
||||
## 0.6.0
|
||||
|
||||
- Detect imports in `src` layouts by default for `isort` rules ([#12848](https://github.com/astral-sh/ruff/pull/12848))
|
||||
|
||||
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments ([#12838](https://github.com/astral-sh/ruff/pull/12838)).
|
||||
|
||||
- Lint and format Jupyter Notebook by default ([#12878](https://github.com/astral-sh/ruff/pull/12878)).
|
||||
|
||||
You can disable specific rules for notebooks using [`per-file-ignores`](https://docs.astral.sh/ruff/settings/#lint_per-file-ignores):
|
||||
|
||||
```toml
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"*.ipynb" = ["E501"] # disable line-too-long in notebooks
|
||||
```
|
||||
|
||||
If you'd prefer to either only lint or only format Jupyter Notebook files, you can use the
|
||||
section-specific `exclude` option to do so. For example, the following would only lint Jupyter
|
||||
Notebook files and not format them:
|
||||
|
||||
```toml
|
||||
[tool.ruff.format]
|
||||
exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
And, conversely, the following would only format Jupyter Notebook files and not lint them:
|
||||
|
||||
```toml
|
||||
[tool.ruff.lint]
|
||||
exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
You can completely disable Jupyter Notebook support by updating the [`extend-exclude`](https://docs.astral.sh/ruff/settings/#extend-exclude) setting:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
extend-exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
## 0.5.0
|
||||
|
||||
- Follow the XDG specification to discover user-level configurations on macOS (same as on other Unix platforms)
|
||||
- Selecting `ALL` now excludes deprecated rules
|
||||
- The released archives now include an extra level of nesting, which can be removed with `--strip-components=1` when untarring.
|
||||
- The release artifact's file name no longer includes the version tag. This enables users to install via `/latest` URLs on GitHub.
|
||||
|
||||
## 0.3.0
|
||||
|
||||
### Ruff 2024.2 style
|
||||
@@ -246,7 +109,7 @@ flag or `unsafe-fixes` configuration option can be used to enable unsafe fixes.
|
||||
|
||||
See the [docs](https://docs.astral.sh/ruff/configuration/#fix-safety) for details.
|
||||
|
||||
### Remove formatter-conflicting rules from the default rule set ([#7900](https://github.com/astral-sh/ruff/pull/7900))
|
||||
### Remove formatter-conflicting rules from the default rule set ([#7900](https://github.com/astral-sh/ruff/pull/7900))
|
||||
|
||||
Previously, Ruff enabled all implemented rules in Pycodestyle (`E`) by default. Ruff now only includes the
|
||||
Pycodestyle prefixes `E4`, `E7`, and `E9` to exclude rules that conflict with automatic formatters. Consequently,
|
||||
@@ -259,8 +122,8 @@ This change only affects those using Ruff under its default rule set. Users that
|
||||
|
||||
### Remove support for emoji identifiers ([#7212](https://github.com/astral-sh/ruff/pull/7212))
|
||||
|
||||
Previously, Ruff supported non-standards-compliant emoji identifiers such as `📦 = 1`.
|
||||
We decided to remove this non-standard language extension. Ruff now reports syntax errors for invalid emoji identifiers in your code, the same as CPython.
|
||||
Previously, Ruff supported the non-standard compliant emoji identifiers e.g. `📦 = 1`.
|
||||
We decided to remove this non-standard language extension, and Ruff now reports syntax errors for emoji identifiers in your code, the same as CPython.
|
||||
|
||||
### Improved GitLab fingerprints ([#7203](https://github.com/astral-sh/ruff/pull/7203))
|
||||
|
||||
|
||||
2330
CHANGELOG.md
2330
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
@@ -71,7 +71,8 @@ representative at an online or offline event.
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported to the community leaders responsible for enforcement at <hey@astral.sh>.
|
||||
reported to the community leaders responsible for enforcement at
|
||||
<charlie.r.marsh@gmail.com>.
|
||||
All complaints will be reviewed and investigated promptly and fairly.
|
||||
|
||||
All community leaders are obligated to respect the privacy and security of the
|
||||
|
||||
161
CONTRIBUTING.md
161
CONTRIBUTING.md
@@ -2,10 +2,34 @@
|
||||
|
||||
Welcome! We're happy to have you here. Thank you in advance for your contribution to Ruff.
|
||||
|
||||
> [!NOTE]
|
||||
>
|
||||
> This guide is for Ruff. If you're looking to contribute to ty, please see [the ty contributing
|
||||
> guide](https://github.com/astral-sh/ruff/blob/main/crates/ty/CONTRIBUTING.md).
|
||||
- [The Basics](#the-basics)
|
||||
- [Prerequisites](#prerequisites)
|
||||
- [Development](#development)
|
||||
- [Project Structure](#project-structure)
|
||||
- [Example: Adding a new lint rule](#example-adding-a-new-lint-rule)
|
||||
- [Rule naming convention](#rule-naming-convention)
|
||||
- [Rule testing: fixtures and snapshots](#rule-testing-fixtures-and-snapshots)
|
||||
- [Example: Adding a new configuration option](#example-adding-a-new-configuration-option)
|
||||
- [MkDocs](#mkdocs)
|
||||
- [Release Process](#release-process)
|
||||
- [Creating a new release](#creating-a-new-release)
|
||||
- [Ecosystem CI](#ecosystem-ci)
|
||||
- [Benchmarking and Profiling](#benchmarking-and-profiling)
|
||||
- [CPython Benchmark](#cpython-benchmark)
|
||||
- [Microbenchmarks](#microbenchmarks)
|
||||
- [Benchmark-driven Development](#benchmark-driven-development)
|
||||
- [PR Summary](#pr-summary)
|
||||
- [Tips](#tips)
|
||||
- [Profiling Projects](#profiling-projects)
|
||||
- [Linux](#linux)
|
||||
- [Mac](#mac)
|
||||
- [`cargo dev`](#cargo-dev)
|
||||
- [Subsystems](#subsystems)
|
||||
- [Compilation Pipeline](#compilation-pipeline)
|
||||
- [Import Categorization](#import-categorization)
|
||||
- [Project root](#project-root)
|
||||
- [Package root](#package-root)
|
||||
- [Import categorization](#import-categorization-1)
|
||||
|
||||
## The Basics
|
||||
|
||||
@@ -34,14 +58,16 @@ You'll also need [Insta](https://insta.rs/docs/) to update snapshot tests:
|
||||
cargo install cargo-insta
|
||||
```
|
||||
|
||||
You'll need [uv](https://docs.astral.sh/uv/getting-started/installation/) (or `pipx` and `pip`) to
|
||||
run Python utility commands.
|
||||
And you'll need pre-commit to run some validation checks:
|
||||
|
||||
```shell
|
||||
pipx install pre-commit # or `pip install pre-commit` if you have a virtualenv
|
||||
```
|
||||
|
||||
You can optionally install pre-commit hooks to automatically run the validation checks
|
||||
when making a commit:
|
||||
|
||||
```shell
|
||||
uv tool install pre-commit
|
||||
pre-commit install
|
||||
```
|
||||
|
||||
@@ -69,14 +95,12 @@ and that it passes both the lint and test validation checks:
|
||||
```shell
|
||||
cargo clippy --workspace --all-targets --all-features -- -D warnings # Rust linting
|
||||
RUFF_UPDATE_SCHEMA=1 cargo test # Rust testing and updating ruff.schema.json
|
||||
uvx pre-commit run --all-files --show-diff-on-failure # Rust and Python formatting, Markdown and Python linting, etc.
|
||||
pre-commit run --all-files --show-diff-on-failure # Rust and Python formatting, Markdown and Python linting, etc.
|
||||
```
|
||||
|
||||
These checks will run on GitHub Actions when you open your pull request, but running them locally
|
||||
will save you time and expedite the merge process.
|
||||
|
||||
If you're using VS Code, you can also install the recommended [rust-analyzer](https://marketplace.visualstudio.com/items?itemName=rust-lang.rust-analyzer) extension to get these checks while editing.
|
||||
|
||||
Note that many code changes also require updating the snapshot tests, which is done interactively
|
||||
after running `cargo test` like so:
|
||||
|
||||
@@ -144,7 +168,7 @@ At a high level, the steps involved in adding a new lint rule are as follows:
|
||||
1. Create a file for your rule (e.g., `crates/ruff_linter/src/rules/flake8_bugbear/rules/assert_false.rs`).
|
||||
|
||||
1. In that file, define a violation struct (e.g., `pub struct AssertFalse`). You can grep for
|
||||
`#[derive(ViolationMetadata)]` to see examples.
|
||||
`#[violation]` to see examples.
|
||||
|
||||
1. In that file, define a function that adds the violation to the diagnostic list as appropriate
|
||||
(e.g., `pub(crate) fn assert_false`) based on whatever inputs are required for the rule (e.g.,
|
||||
@@ -254,7 +278,7 @@ These represent, respectively: the schema used to parse the `pyproject.toml` fil
|
||||
intermediate representation; and the final, internal representation used to power Ruff.
|
||||
|
||||
To add a new configuration option, you'll likely want to modify these latter few files (along with
|
||||
`args.rs`, if appropriate). If you want to pattern-match against an existing example, grep for
|
||||
`arg.rs`, if appropriate). If you want to pattern-match against an existing example, grep for
|
||||
`dummy_variable_rgx`, which defines a regular expression to match against acceptable unused
|
||||
variables (e.g., `_`).
|
||||
|
||||
@@ -270,20 +294,26 @@ To preview any changes to the documentation locally:
|
||||
|
||||
1. Install the [Rust toolchain](https://www.rust-lang.org/tools/install).
|
||||
|
||||
1. Install MkDocs and Material for MkDocs with:
|
||||
|
||||
```shell
|
||||
pip install -r docs/requirements.txt
|
||||
```
|
||||
|
||||
1. Generate the MkDocs site with:
|
||||
|
||||
```shell
|
||||
uv run --no-project --isolated --with-requirements docs/requirements.txt scripts/generate_mkdocs.py
|
||||
python scripts/generate_mkdocs.py
|
||||
```
|
||||
|
||||
1. Run the development server with:
|
||||
|
||||
```shell
|
||||
# For contributors.
|
||||
uvx --with-requirements docs/requirements.txt -- mkdocs serve -f mkdocs.public.yml
|
||||
mkdocs serve -f mkdocs.public.yml
|
||||
|
||||
# For members of the Astral org, which has access to MkDocs Insiders via sponsorship.
|
||||
uvx --with-requirements docs/requirements-insiders.txt -- mkdocs serve -f mkdocs.insiders.yml
|
||||
mkdocs serve -f mkdocs.insiders.yml
|
||||
```
|
||||
|
||||
The documentation should then be available locally at
|
||||
@@ -301,63 +331,41 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
|
||||
### Creating a new release
|
||||
|
||||
1. Install `uv`: `curl -LsSf https://astral.sh/uv/install.sh | sh`
|
||||
|
||||
1. Run `./scripts/release.sh`; this command will:
|
||||
|
||||
1. Run `./scripts/release/bump.sh`; this command will:
|
||||
- Generate a temporary virtual environment with `rooster`
|
||||
- Generate a changelog entry in `CHANGELOG.md`
|
||||
- Update versions in `pyproject.toml` and `Cargo.toml`
|
||||
- Update references to versions in the `README.md` and documentation
|
||||
- Display contributors for the release
|
||||
|
||||
1. The changelog should then be editorialized for consistency
|
||||
|
||||
- Often labels will be missing from pull requests they will need to be manually organized into the proper section
|
||||
- Changes should be edited to be user-facing descriptions, avoiding internal details
|
||||
|
||||
1. Highlight any breaking changes in `BREAKING_CHANGES.md`
|
||||
|
||||
1. Run `cargo check`. This should update the lock file with new versions.
|
||||
|
||||
1. Create a pull request with the changelog and version updates
|
||||
|
||||
1. Merge the PR
|
||||
|
||||
1. Run the [release workflow](https://github.com/astral-sh/ruff/actions/workflows/release.yml) with:
|
||||
|
||||
1. Run the [release workflow](https://github.com/astral-sh/ruff/actions/workflows/release.yaml) with:
|
||||
- The new version number (without starting `v`)
|
||||
|
||||
- The commit hash of the merged release pull request on `main`
|
||||
1. The release workflow will do the following:
|
||||
|
||||
1. Build all the assets. If this fails (even though we tested in step 4), we haven't tagged or
|
||||
uploaded anything, you can restart after pushing a fix. If you just need to rerun the build,
|
||||
make sure you're [re-running all the failed
|
||||
jobs](https://docs.github.com/en/actions/managing-workflow-runs/re-running-workflows-and-jobs#re-running-failed-jobs-in-a-workflow) and not just a single failed job.
|
||||
uploaded anything, you can restart after pushing a fix.
|
||||
1. Upload to PyPI.
|
||||
1. Create and push the Git tag (as extracted from `pyproject.toml`). We create the Git tag only
|
||||
after building the wheels and uploading to PyPI, since we can't delete or modify the tag ([#4468](https://github.com/astral-sh/ruff/issues/4468)).
|
||||
1. Attach artifacts to draft GitHub release
|
||||
1. Trigger downstream repositories. This can fail non-catastrophically, as we can run any
|
||||
downstream jobs manually if needed.
|
||||
|
||||
1. Verify the GitHub release:
|
||||
|
||||
1. The Changelog should match the content of `CHANGELOG.md`
|
||||
1. Append the contributors from the `scripts/release.sh` script
|
||||
|
||||
1. Publish the GitHub release
|
||||
1. Open the draft release in the GitHub release section
|
||||
1. Copy the changelog for the release into the GitHub release
|
||||
- See previous releases for formatting of section headers
|
||||
1. Append the contributors from the `bump.sh` script
|
||||
1. If needed, [update the schemastore](https://github.com/astral-sh/ruff/blob/main/scripts/update_schemastore.py).
|
||||
|
||||
1. One can determine if an update is needed when
|
||||
`git diff old-version-tag new-version-tag -- ruff.schema.json` returns a non-empty diff.
|
||||
1. Once run successfully, you should follow the link in the output to create a PR.
|
||||
|
||||
1. If needed, update the [`ruff-lsp`](https://github.com/astral-sh/ruff-lsp) and
|
||||
[`ruff-vscode`](https://github.com/astral-sh/ruff-vscode) repositories and follow
|
||||
the release instructions in those repositories. `ruff-lsp` should always be updated
|
||||
before `ruff-vscode`.
|
||||
|
||||
This step is generally not required for a patch release, but should always be done
|
||||
for a minor release.
|
||||
1. If needed, update the `ruff-lsp` and `ruff-vscode` repositories.
|
||||
|
||||
## Ecosystem CI
|
||||
|
||||
@@ -365,21 +373,13 @@ GitHub Actions will run your changes against a number of real-world projects fro
|
||||
report on any linter or formatter differences. You can also run those checks locally via:
|
||||
|
||||
```shell
|
||||
uvx --from ./python/ruff-ecosystem ruff-ecosystem check ruff "./target/debug/ruff"
|
||||
uvx --from ./python/ruff-ecosystem ruff-ecosystem format ruff "./target/debug/ruff"
|
||||
pip install -e ./python/ruff-ecosystem
|
||||
ruff-ecosystem check ruff "./target/debug/ruff"
|
||||
ruff-ecosystem format ruff "./target/debug/ruff"
|
||||
```
|
||||
|
||||
See the [ruff-ecosystem package](https://github.com/astral-sh/ruff/tree/main/python/ruff-ecosystem) for more details.
|
||||
|
||||
## Upgrading Rust
|
||||
|
||||
1. Change the `channel` in `./rust-toolchain.toml` to the new Rust version (`<latest>`)
|
||||
1. Change the `rust-version` in the `./Cargo.toml` to `<latest> - 2` (e.g. 1.84 if the latest is 1.86)
|
||||
1. Run `cargo clippy --fix --allow-dirty --allow-staged` to fix new clippy warnings
|
||||
1. Create and merge the PR
|
||||
1. Bump the Rust version in Ruff's conda forge recipe. See [this PR](https://github.com/conda-forge/ruff-feedstock/pull/266) for an example.
|
||||
1. Enjoy the new Rust version!
|
||||
|
||||
## Benchmarking and Profiling
|
||||
|
||||
We have several ways of benchmarking and profiling Ruff:
|
||||
@@ -388,7 +388,7 @@ We have several ways of benchmarking and profiling Ruff:
|
||||
- Microbenchmarks which run the linter or the formatter on individual files. These run on pull requests.
|
||||
- Profiling the linter on either the microbenchmarks or entire projects
|
||||
|
||||
> **Note**
|
||||
> \[!NOTE\]
|
||||
> When running benchmarks, ensure that your CPU is otherwise idle (e.g., close any background
|
||||
> applications, like web browsers). You may also want to switch your CPU to a "performance"
|
||||
> mode, if it exists, especially when benchmarking short-lived processes.
|
||||
@@ -402,18 +402,12 @@ which makes it a good target for benchmarking.
|
||||
git clone --branch 3.10 https://github.com/python/cpython.git crates/ruff_linter/resources/test/cpython
|
||||
```
|
||||
|
||||
Install `hyperfine`:
|
||||
|
||||
```shell
|
||||
cargo install hyperfine
|
||||
```
|
||||
|
||||
To benchmark the release build:
|
||||
|
||||
```shell
|
||||
cargo build --release --bin ruff && hyperfine --warmup 10 \
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache -e" \
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ -e"
|
||||
cargo build --release && hyperfine --warmup 10 \
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache -e" \
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ -e"
|
||||
|
||||
Benchmark 1: ./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache
|
||||
Time (mean ± σ): 293.8 ms ± 3.2 ms [User: 2384.6 ms, System: 90.3 ms]
|
||||
@@ -432,7 +426,7 @@ To benchmark against the ecosystem's existing tools:
|
||||
|
||||
```shell
|
||||
hyperfine --ignore-failure --warmup 5 \
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache" \
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache" \
|
||||
"pyflakes crates/ruff_linter/resources/test/cpython" \
|
||||
"autoflake --recursive --expand-star-imports --remove-all-unused-imports --remove-unused-variables --remove-duplicate-keys resources/test/cpython" \
|
||||
"pycodestyle crates/ruff_linter/resources/test/cpython" \
|
||||
@@ -478,10 +472,10 @@ To benchmark a subset of rules, e.g. `LineTooLong` and `DocLineTooLong`:
|
||||
|
||||
```shell
|
||||
cargo build --release && hyperfine --warmup 10 \
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache -e --select W505,E501"
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache -e --select W505,E501"
|
||||
```
|
||||
|
||||
You can run `uv venv --project ./scripts/benchmarks`, activate the venv and then run `uv sync --project ./scripts/benchmarks` to create a working environment for the
|
||||
You can run `poetry install` from `./scripts/benchmarks` to create a working environment for the
|
||||
above. All reported benchmarks were computed using the versions specified by
|
||||
`./scripts/benchmarks/pyproject.toml` on Python 3.11.
|
||||
|
||||
@@ -535,12 +529,10 @@ You can run the benchmarks with
|
||||
cargo benchmark
|
||||
```
|
||||
|
||||
`cargo benchmark` is an alias for `cargo bench -p ruff_benchmark --bench linter --bench formatter --`
|
||||
|
||||
#### Benchmark-driven Development
|
||||
|
||||
Ruff uses [Criterion.rs](https://bheisler.github.io/criterion.rs/book/) for benchmarks. You can use
|
||||
`--save-baseline=<name>` to store an initial baseline benchmark (e.g., on `main`) and then use
|
||||
`--save-baseline=<name>` to store an initial baseline benchmark (e.g. on `main`) and then use
|
||||
`--benchmark=<name>` to compare against that benchmark. Criterion will print a message telling you
|
||||
if the benchmark improved/regressed compared to that baseline.
|
||||
|
||||
@@ -575,7 +567,7 @@ cargo install critcmp
|
||||
|
||||
#### Tips
|
||||
|
||||
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo bench -p ruff_benchmark lexer`
|
||||
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo benchmark lexer`
|
||||
to only run the lexer benchmarks.
|
||||
- Use `cargo bench -p ruff_benchmark -- --quiet` for a more cleaned up output (without statistical relevance)
|
||||
- Use `cargo bench -p ruff_benchmark -- --quick` to get faster results (more prone to noise)
|
||||
@@ -610,7 +602,8 @@ Then convert the recorded profile
|
||||
perf script -F +pid > /tmp/test.perf
|
||||
```
|
||||
|
||||
You can now view the converted file with [firefox profiler](https://profiler.firefox.com/). To learn more about Firefox profiler, read the [Firefox profiler profiling-guide](https://profiler.firefox.com/docs/#/./guide-perf-profiling).
|
||||
You can now view the converted file with [firefox profiler](https://profiler.firefox.com/), with a
|
||||
more in-depth guide [here](https://profiler.firefox.com/docs/#/./guide-perf-profiling)
|
||||
|
||||
An alternative is to convert the perf data to `flamegraph.svg` using
|
||||
[flamegraph](https://github.com/flamegraph-rs/flamegraph) (`cargo install flamegraph`):
|
||||
@@ -691,9 +684,9 @@ utils with it:
|
||||
23 Newline 24
|
||||
```
|
||||
|
||||
- `cargo dev print-cst <file>`: Print the CST of a Python file using
|
||||
- `cargo dev print-cst <file>`: Print the CST of a python file using
|
||||
[LibCST](https://github.com/Instagram/LibCST), which is used in addition to the RustPython parser
|
||||
in Ruff. For example, for `if True: pass # comment`, everything, including the whitespace, is represented:
|
||||
in Ruff. E.g. for `if True: pass # comment` everything including the whitespace is represented:
|
||||
|
||||
```text
|
||||
Module {
|
||||
@@ -876,7 +869,7 @@ each configuration file.
|
||||
|
||||
The package root is used to determine a file's "module path". Consider, again, `baz.py`. In that
|
||||
case, `./my_project/src/foo` was identified as the package root, so the module path for `baz.py`
|
||||
would resolve to `foo.bar.baz` — as computed by taking the relative path from the package root
|
||||
would resolve to `foo.bar.baz` — as computed by taking the relative path from the package root
|
||||
(inclusive of the root itself). The module path can be thought of as "the path you would use to
|
||||
import the module" (e.g., `import foo.bar.baz`).
|
||||
|
||||
@@ -911,11 +904,15 @@ There are three ways in which an import can be categorized as "first-party":
|
||||
package (e.g., `from foo import bar` or `import foo.bar`), they'll be classified as first-party
|
||||
automatically. This check is as simple as comparing the first segment of the current file's
|
||||
module path to the first segment of the import.
|
||||
1. **Source roots**: Ruff supports a [`src`](https://docs.astral.sh/ruff/settings/#src) setting, which
|
||||
1. **Source roots**: Ruff supports a `[src](https://docs.astral.sh/ruff/settings/#src)` setting, which
|
||||
sets the directories to scan when identifying first-party imports. The algorithm is
|
||||
straightforward: given an import, like `import foo`, iterate over the directories enumerated in
|
||||
the `src` setting and, for each directory, check for the existence of a subdirectory `foo` or a
|
||||
file `foo.py`.
|
||||
|
||||
By default, `src` is set to the project root, along with `"src"` subdirectory in the project root.
|
||||
This ensures that Ruff supports both flat and "src" layouts out of the box.
|
||||
By default, `src` is set to the project root. In the above example, we'd want to set
|
||||
`src = ["./src"]` to ensure that we locate `./my_project/src/foo` and thus categorize `import foo`
|
||||
as first-party in `baz.py`. In practice, for this limited example, setting `src = ["./src"]` is
|
||||
unnecessary, as all imports within `./my_project/src/foo` would be categorized as first-party via
|
||||
the same-package heuristic; but if your project contains multiple packages, you'll want to set `src`
|
||||
explicitly.
|
||||
|
||||
3186
Cargo.lock
generated
3186
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
152
Cargo.toml
152
Cargo.toml
@@ -3,9 +3,8 @@ members = ["crates/*"]
|
||||
resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
# Please update rustfmt.toml when bumping the Rust edition
|
||||
edition = "2024"
|
||||
rust-version = "1.85"
|
||||
edition = "2021"
|
||||
rust-version = "1.71"
|
||||
homepage = "https://docs.astral.sh/ruff"
|
||||
documentation = "https://docs.astral.sh/ruff"
|
||||
repository = "https://github.com/astral-sh/ruff"
|
||||
@@ -14,17 +13,13 @@ license = "MIT"
|
||||
|
||||
[workspace.dependencies]
|
||||
ruff = { path = "crates/ruff" }
|
||||
ruff_annotate_snippets = { path = "crates/ruff_annotate_snippets" }
|
||||
ruff_cache = { path = "crates/ruff_cache" }
|
||||
ruff_db = { path = "crates/ruff_db", default-features = false }
|
||||
ruff_diagnostics = { path = "crates/ruff_diagnostics" }
|
||||
ruff_formatter = { path = "crates/ruff_formatter" }
|
||||
ruff_graph = { path = "crates/ruff_graph" }
|
||||
ruff_index = { path = "crates/ruff_index" }
|
||||
ruff_linter = { path = "crates/ruff_linter" }
|
||||
ruff_macros = { path = "crates/ruff_macros" }
|
||||
ruff_notebook = { path = "crates/ruff_notebook" }
|
||||
ruff_options_metadata = { path = "crates/ruff_options_metadata" }
|
||||
ruff_python_ast = { path = "crates/ruff_python_ast" }
|
||||
ruff_python_codegen = { path = "crates/ruff_python_codegen" }
|
||||
ruff_python_formatter = { path = "crates/ruff_python_formatter" }
|
||||
@@ -39,165 +34,115 @@ ruff_source_file = { path = "crates/ruff_source_file" }
|
||||
ruff_text_size = { path = "crates/ruff_text_size" }
|
||||
ruff_workspace = { path = "crates/ruff_workspace" }
|
||||
|
||||
ty = { path = "crates/ty" }
|
||||
ty_ide = { path = "crates/ty_ide" }
|
||||
ty_project = { path = "crates/ty_project", default-features = false }
|
||||
ty_python_semantic = { path = "crates/ty_python_semantic" }
|
||||
ty_server = { path = "crates/ty_server" }
|
||||
ty_test = { path = "crates/ty_test" }
|
||||
ty_vendored = { path = "crates/ty_vendored" }
|
||||
|
||||
aho-corasick = { version = "1.1.3" }
|
||||
anstream = { version = "0.6.18" }
|
||||
anstyle = { version = "1.0.10" }
|
||||
annotate-snippets = { version = "0.9.2", features = ["color"] }
|
||||
anyhow = { version = "1.0.80" }
|
||||
assert_fs = { version = "1.1.0" }
|
||||
argfile = { version = "0.2.0" }
|
||||
bincode = { version = "2.0.0" }
|
||||
bincode = { version = "1.3.3" }
|
||||
bitflags = { version = "2.5.0" }
|
||||
bstr = { version = "1.9.1" }
|
||||
cachedir = { version = "0.3.1" }
|
||||
camino = { version = "1.1.7" }
|
||||
chrono = { version = "0.4.35", default-features = false, features = ["clock"] }
|
||||
clap = { version = "4.5.3", features = ["derive"] }
|
||||
clap_complete_command = { version = "0.6.0" }
|
||||
clearscreen = { version = "4.0.0" }
|
||||
clap_complete_command = { version = "0.5.1" }
|
||||
clearscreen = { version = "3.0.0" }
|
||||
codspeed-criterion-compat = { version = "2.6.0", default-features = false }
|
||||
colored = { version = "3.0.0" }
|
||||
colored = { version = "2.1.0" }
|
||||
console_error_panic_hook = { version = "0.1.7" }
|
||||
console_log = { version = "1.0.0" }
|
||||
countme = { version = "3.0.1" }
|
||||
compact_str = "0.9.0"
|
||||
criterion = { version = "0.6.0", default-features = false }
|
||||
criterion = { version = "0.5.1", default-features = false }
|
||||
crossbeam = { version = "0.8.4" }
|
||||
dashmap = { version = "6.0.1" }
|
||||
dir-test = { version = "0.4.0" }
|
||||
dunce = { version = "1.0.5" }
|
||||
dashmap = { version = "5.5.3" }
|
||||
dirs = { version = "5.0.0" }
|
||||
drop_bomb = { version = "0.1.5" }
|
||||
env_logger = { version = "0.11.0" }
|
||||
etcetera = { version = "0.10.0" }
|
||||
fern = { version = "0.7.0" }
|
||||
fern = { version = "0.6.1" }
|
||||
filetime = { version = "0.2.23" }
|
||||
getrandom = { version = "0.3.1" }
|
||||
glob = { version = "0.3.1" }
|
||||
globset = { version = "0.4.14" }
|
||||
globwalk = { version = "0.9.1" }
|
||||
hashbrown = { version = "0.15.0", default-features = false, features = [
|
||||
"raw-entry",
|
||||
"equivalent",
|
||||
"inline-more",
|
||||
] }
|
||||
heck = "0.5.0"
|
||||
hashbrown = "0.14.3"
|
||||
hexf-parse = { version = "0.2.1" }
|
||||
ignore = { version = "0.4.22" }
|
||||
imara-diff = { version = "0.1.5" }
|
||||
imperative = { version = "1.0.4" }
|
||||
indexmap = { version = "2.6.0" }
|
||||
indexmap = { version = "2.2.6" }
|
||||
indicatif = { version = "0.17.8" }
|
||||
indoc = { version = "2.0.4" }
|
||||
insta = { version = "1.35.1" }
|
||||
insta = { version = "1.35.1", feature = ["filters", "glob"] }
|
||||
insta-cmd = { version = "0.6.0" }
|
||||
is-macro = { version = "0.3.5" }
|
||||
is-wsl = { version = "0.4.0" }
|
||||
itertools = { version = "0.14.0" }
|
||||
jiff = { version = "0.2.0" }
|
||||
itertools = { version = "0.12.1" }
|
||||
js-sys = { version = "0.3.69" }
|
||||
jod-thread = { version = "1.0.0" }
|
||||
jod-thread = { version = "0.1.2" }
|
||||
lexical-parse-float = { version = "0.8.0", features = ["format"] }
|
||||
libc = { version = "0.2.153" }
|
||||
libcst = { version = "1.1.0", default-features = false }
|
||||
log = { version = "0.4.17" }
|
||||
lsp-server = { version = "0.7.6" }
|
||||
lsp-types = { git = "https://github.com/astral-sh/lsp-types.git", rev = "3512a9f", features = [
|
||||
"proposed",
|
||||
] }
|
||||
lsp-types = { version = "0.95.0", features = ["proposed"] }
|
||||
matchit = { version = "0.8.1" }
|
||||
memchr = { version = "2.7.1" }
|
||||
mimalloc = { version = "0.1.39" }
|
||||
natord = { version = "1.0.9" }
|
||||
notify = { version = "8.0.0" }
|
||||
ordermap = { version = "0.5.0" }
|
||||
notify = { version = "6.1.1" }
|
||||
once_cell = { version = "1.19.0" }
|
||||
path-absolutize = { version = "3.1.1" }
|
||||
path-slash = { version = "0.2.1" }
|
||||
pathdiff = { version = "0.2.1" }
|
||||
pep440_rs = { version = "0.7.1" }
|
||||
parking_lot = "0.12.1"
|
||||
pep440_rs = { version = "0.6.0", features = ["serde"] }
|
||||
pretty_assertions = "1.3.0"
|
||||
proc-macro2 = { version = "1.0.79" }
|
||||
pyproject-toml = { version = "0.13.4" }
|
||||
quick-junit = { version = "0.5.0" }
|
||||
pyproject-toml = { version = "0.9.0" }
|
||||
quick-junit = { version = "0.4.0" }
|
||||
quote = { version = "1.0.23" }
|
||||
rand = { version = "0.9.0" }
|
||||
rand = { version = "0.8.5" }
|
||||
rayon = { version = "1.10.0" }
|
||||
regex = { version = "1.10.2" }
|
||||
rustc-hash = { version = "2.0.0" }
|
||||
rustc-stable-hash = { version = "0.1.2" }
|
||||
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "4818b15f3b7516555d39f5a41cb75970448bee4c" }
|
||||
result-like = { version = "0.5.0" }
|
||||
rustc-hash = { version = "1.1.0" }
|
||||
schemars = { version = "0.8.16" }
|
||||
seahash = { version = "4.1.0" }
|
||||
serde = { version = "1.0.197", features = ["derive"] }
|
||||
serde-wasm-bindgen = { version = "0.6.4" }
|
||||
serde_json = { version = "1.0.113" }
|
||||
serde_test = { version = "1.0.152" }
|
||||
serde_with = { version = "3.6.0", default-features = false, features = [
|
||||
"macros",
|
||||
] }
|
||||
serde_with = { version = "3.6.0", default-features = false, features = ["macros"] }
|
||||
shellexpand = { version = "3.0.0" }
|
||||
similar = { version = "2.4.0", features = ["inline"] }
|
||||
smallvec = { version = "1.13.2" }
|
||||
snapbox = { version = "0.6.0", features = [
|
||||
"diff",
|
||||
"term-svg",
|
||||
"cmd",
|
||||
"examples",
|
||||
] }
|
||||
static_assertions = "1.1.0"
|
||||
strum = { version = "0.27.0", features = ["strum_macros"] }
|
||||
strum_macros = { version = "0.27.0" }
|
||||
strum = { version = "0.26.0", features = ["strum_macros"] }
|
||||
strum_macros = { version = "0.26.0" }
|
||||
syn = { version = "2.0.55" }
|
||||
tempfile = { version = "3.9.0" }
|
||||
test-case = { version = "3.3.1" }
|
||||
thiserror = { version = "2.0.0" }
|
||||
tikv-jemallocator = { version = "0.6.0" }
|
||||
thiserror = { version = "1.0.58" }
|
||||
tikv-jemallocator = { version = "0.5.0" }
|
||||
toml = { version = "0.8.11" }
|
||||
tracing = { version = "0.1.40" }
|
||||
tracing-flame = { version = "0.2.0" }
|
||||
tracing-indicatif = { version = "0.3.6" }
|
||||
tracing-log = { version = "0.2.0" }
|
||||
tracing-subscriber = { version = "0.3.18", default-features = false, features = [
|
||||
"env-filter",
|
||||
"fmt",
|
||||
"ansi",
|
||||
"smallvec"
|
||||
] }
|
||||
tryfn = { version = "0.2.1" }
|
||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
|
||||
tracing-tree = { version = "0.3.0" }
|
||||
typed-arena = { version = "2.0.2" }
|
||||
unic-ucd-category = { version = "0.9" }
|
||||
unicode-ident = { version = "1.0.12" }
|
||||
unicode-width = { version = "0.2.0" }
|
||||
unicode-width = { version = "0.1.11" }
|
||||
unicode_names2 = { version = "1.2.2" }
|
||||
unicode-normalization = { version = "0.1.23" }
|
||||
ureq = { version = "2.9.6" }
|
||||
url = { version = "2.5.0" }
|
||||
uuid = { version = "1.6.1", features = [
|
||||
"v4",
|
||||
"fast-rng",
|
||||
"macro-diagnostics",
|
||||
"js",
|
||||
] }
|
||||
uuid = { version = "1.6.1", features = ["v4", "fast-rng", "macro-diagnostics", "js"] }
|
||||
walkdir = { version = "2.3.2" }
|
||||
wasm-bindgen = { version = "0.2.92" }
|
||||
wasm-bindgen-test = { version = "0.3.42" }
|
||||
wild = { version = "2" }
|
||||
zip = { version = "0.6.6", default-features = false }
|
||||
|
||||
[workspace.metadata.cargo-shear]
|
||||
ignored = ["getrandom", "ruff_options_metadata"]
|
||||
|
||||
|
||||
[workspace.lints.rust]
|
||||
unsafe_code = "warn"
|
||||
unreachable_pub = "warn"
|
||||
unexpected_cfgs = { level = "warn", check-cfg = [
|
||||
"cfg(fuzzing)",
|
||||
"cfg(codspeed)",
|
||||
] }
|
||||
|
||||
[workspace.lints.clippy]
|
||||
pedantic = { level = "warn", priority = -2 }
|
||||
@@ -213,11 +158,8 @@ missing_panics_doc = "allow"
|
||||
module_name_repetitions = "allow"
|
||||
must_use_candidate = "allow"
|
||||
similar_names = "allow"
|
||||
single_match_else = "allow"
|
||||
too_many_lines = "allow"
|
||||
needless_continue = "allow" # An explicit continue can be more readable, especially if the alternative is an empty block.
|
||||
unnecessary_debug_formatting = "allow" # too many instances, the display also doesn't quote the path which is often desired in logs where we use them the most often.
|
||||
# Without the hashes we run into a `rustfmt` bug in some snapshot tests, see #13250
|
||||
# To allow `#[allow(clippy::all)]` in `crates/ruff_python_parser/src/python.rs`.
|
||||
needless_raw_string_hashes = "allow"
|
||||
# Disallowed restriction lints
|
||||
print_stdout = "warn"
|
||||
@@ -230,13 +172,6 @@ get_unwrap = "warn"
|
||||
rc_buffer = "warn"
|
||||
rc_mutex = "warn"
|
||||
rest_pat_in_fully_bound_structs = "warn"
|
||||
# nursery rules
|
||||
redundant_clone = "warn"
|
||||
debug_assert_with_mut_call = "warn"
|
||||
unused_peekable = "warn"
|
||||
|
||||
# Diagnostics are not actionable: Enable once https://github.com/rust-lang/rust-clippy/issues/13774 is resolved.
|
||||
large_stack_arrays = "allow"
|
||||
|
||||
[profile.release]
|
||||
# Note that we set these explicitly, and these values
|
||||
@@ -261,9 +196,6 @@ opt-level = 3
|
||||
[profile.dev.package.similar]
|
||||
opt-level = 3
|
||||
|
||||
[profile.dev.package.salsa]
|
||||
opt-level = 3
|
||||
|
||||
# Reduce complexity of a parser function that would trigger a locals limit in a wasm tool.
|
||||
# https://github.com/bytecodealliance/wasm-tools/blob/b5c3d98e40590512a3b12470ef358d5c7b983b15/crates/wasmparser/src/limits.rs#L29
|
||||
[profile.dev.package.ruff_python_parser]
|
||||
@@ -274,7 +206,3 @@ opt-level = 1
|
||||
[profile.profiling]
|
||||
inherits = "release"
|
||||
debug = 1
|
||||
|
||||
# The profile that 'cargo dist' will build with.
|
||||
[profile.dist]
|
||||
inherits = "release"
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM --platform=$BUILDPLATFORM ubuntu AS build
|
||||
FROM --platform=$BUILDPLATFORM ubuntu as build
|
||||
ENV HOME="/root"
|
||||
WORKDIR $HOME
|
||||
|
||||
|
||||
25
LICENSE
25
LICENSE
@@ -1371,28 +1371,3 @@ are:
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
"""
|
||||
|
||||
- pydoclint, licensed as follows:
|
||||
"""
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2023 jsh9
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
"""
|
||||
|
||||
81
README.md
81
README.md
@@ -28,15 +28,15 @@ An extremely fast Python linter and code formatter, written in Rust.
|
||||
- ⚡️ 10-100x faster than existing linters (like Flake8) and formatters (like Black)
|
||||
- 🐍 Installable via `pip`
|
||||
- 🛠️ `pyproject.toml` support
|
||||
- 🤝 Python 3.13 compatibility
|
||||
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruffs-linter-compare-to-flake8), isort, and [Black](https://docs.astral.sh/ruff/faq/#how-does-ruffs-formatter-compare-to-black)
|
||||
- 🤝 Python 3.12 compatibility
|
||||
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruff-compare-to-flake8), isort, and Black
|
||||
- 📦 Built-in caching, to avoid re-analyzing unchanged files
|
||||
- 🔧 Fix support, for automatic error correction (e.g., automatically remove unused imports)
|
||||
- 📏 Over [800 built-in rules](https://docs.astral.sh/ruff/rules/), with native re-implementations
|
||||
of popular Flake8 plugins, like flake8-bugbear
|
||||
- ⌨️ First-party [editor integrations](https://docs.astral.sh/ruff/integrations/) for
|
||||
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://docs.astral.sh/ruff/editors/setup)
|
||||
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://docs.astral.sh/ruff/configuration/#config-file-discovery)
|
||||
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://github.com/astral-sh/ruff-lsp)
|
||||
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://docs.astral.sh/ruff/configuration/#pyprojecttoml-discovery)
|
||||
|
||||
Ruff aims to be orders of magnitude faster than alternative tools while integrating more
|
||||
functionality behind a single, common interface.
|
||||
@@ -110,47 +110,16 @@ For more, see the [documentation](https://docs.astral.sh/ruff/).
|
||||
1. [Who's Using Ruff?](#whos-using-ruff)
|
||||
1. [License](#license)
|
||||
|
||||
## Getting Started<a id="getting-started"></a>
|
||||
## Getting Started
|
||||
|
||||
For more, see the [documentation](https://docs.astral.sh/ruff/).
|
||||
|
||||
### Installation
|
||||
|
||||
Ruff is available as [`ruff`](https://pypi.org/project/ruff/) on PyPI.
|
||||
|
||||
Invoke Ruff directly with [`uvx`](https://docs.astral.sh/uv/):
|
||||
Ruff is available as [`ruff`](https://pypi.org/project/ruff/) on PyPI:
|
||||
|
||||
```shell
|
||||
uvx ruff check # Lint all files in the current directory.
|
||||
uvx ruff format # Format all files in the current directory.
|
||||
```
|
||||
|
||||
Or install Ruff with `uv` (recommended), `pip`, or `pipx`:
|
||||
|
||||
```shell
|
||||
# With uv.
|
||||
uv tool install ruff@latest # Install Ruff globally.
|
||||
uv add --dev ruff # Or add Ruff to your project.
|
||||
|
||||
# With pip.
|
||||
pip install ruff
|
||||
|
||||
# With pipx.
|
||||
pipx install ruff
|
||||
```
|
||||
|
||||
Starting with version `0.5.0`, Ruff can be installed with our standalone installers:
|
||||
|
||||
```shell
|
||||
# On macOS and Linux.
|
||||
curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
|
||||
# On Windows.
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.11.11/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.11.11/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -183,7 +152,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.11.11
|
||||
rev: v0.4.4
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
@@ -192,10 +161,11 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
- id: ruff-format
|
||||
```
|
||||
|
||||
Ruff can also be used as a [VS Code extension](https://github.com/astral-sh/ruff-vscode) or with [various other editors](https://docs.astral.sh/ruff/editors/setup).
|
||||
Ruff can also be used as a [VS Code extension](https://github.com/astral-sh/ruff-vscode) or
|
||||
alongside any other editor through the [Ruff LSP](https://github.com/astral-sh/ruff-lsp).
|
||||
|
||||
Ruff can also be used as a [GitHub Action](https://github.com/features/actions) via
|
||||
[`ruff-action`](https://github.com/astral-sh/ruff-action):
|
||||
[`ruff-action`](https://github.com/chartboost/ruff-action):
|
||||
|
||||
```yaml
|
||||
name: Ruff
|
||||
@@ -205,10 +175,10 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: astral-sh/ruff-action@v3
|
||||
- uses: chartboost/ruff-action@v1
|
||||
```
|
||||
|
||||
### Configuration<a id="configuration"></a>
|
||||
### Configuration
|
||||
|
||||
Ruff can be configured through a `pyproject.toml`, `ruff.toml`, or `.ruff.toml` file (see:
|
||||
[_Configuration_](https://docs.astral.sh/ruff/configuration/), or [_Settings_](https://docs.astral.sh/ruff/settings/)
|
||||
@@ -251,11 +221,11 @@ exclude = [
|
||||
line-length = 88
|
||||
indent-width = 4
|
||||
|
||||
# Assume Python 3.9
|
||||
target-version = "py39"
|
||||
# Assume Python 3.8
|
||||
target-version = "py38"
|
||||
|
||||
[lint]
|
||||
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
|
||||
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
|
||||
select = ["E4", "E7", "E9", "F"]
|
||||
ignore = []
|
||||
|
||||
@@ -304,7 +274,7 @@ features that may change prior to stabilization.
|
||||
See `ruff help` for more on Ruff's top-level commands, or `ruff help check` and `ruff help format`
|
||||
for more on the linting and formatting commands, respectively.
|
||||
|
||||
## Rules<a id="rules"></a>
|
||||
## Rules
|
||||
|
||||
<!-- Begin section: Rules -->
|
||||
|
||||
@@ -364,6 +334,7 @@ quality tools, including:
|
||||
- [flake8-super](https://pypi.org/project/flake8-super/)
|
||||
- [flake8-tidy-imports](https://pypi.org/project/flake8-tidy-imports/)
|
||||
- [flake8-todos](https://pypi.org/project/flake8-todos/)
|
||||
- [flake8-trio](https://pypi.org/project/flake8-trio/)
|
||||
- [flake8-type-checking](https://pypi.org/project/flake8-type-checking/)
|
||||
- [flake8-use-pathlib](https://pypi.org/project/flake8-use-pathlib/)
|
||||
- [flynt](https://pypi.org/project/flynt/) ([#2102](https://github.com/astral-sh/ruff/issues/2102))
|
||||
@@ -380,21 +351,21 @@ quality tools, including:
|
||||
|
||||
For a complete enumeration of the supported rules, see [_Rules_](https://docs.astral.sh/ruff/rules/).
|
||||
|
||||
## Contributing<a id="contributing"></a>
|
||||
## Contributing
|
||||
|
||||
Contributions are welcome and highly appreciated. To get started, check out the
|
||||
[**contributing guidelines**](https://docs.astral.sh/ruff/contributing/).
|
||||
|
||||
You can also join us on [**Discord**](https://discord.com/invite/astral-sh).
|
||||
|
||||
## Support<a id="support"></a>
|
||||
## Support
|
||||
|
||||
Having trouble? Check out the existing issues on [**GitHub**](https://github.com/astral-sh/ruff/issues),
|
||||
or feel free to [**open a new one**](https://github.com/astral-sh/ruff/issues/new).
|
||||
|
||||
You can also ask for help on [**Discord**](https://discord.com/invite/astral-sh).
|
||||
|
||||
## Acknowledgements<a id="acknowledgements"></a>
|
||||
## Acknowledgements
|
||||
|
||||
Ruff's linter draws on both the APIs and implementation details of many other
|
||||
tools in the Python ecosystem, especially [Flake8](https://github.com/PyCQA/flake8), [Pyflakes](https://github.com/PyCQA/pyflakes),
|
||||
@@ -418,7 +389,7 @@ Ruff is the beneficiary of a large number of [contributors](https://github.com/a
|
||||
|
||||
Ruff is released under the MIT license.
|
||||
|
||||
## Who's Using Ruff?<a id="whos-using-ruff"></a>
|
||||
## Who's Using Ruff?
|
||||
|
||||
Ruff is used by a number of major open-source projects and companies, including:
|
||||
|
||||
@@ -430,16 +401,13 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [Babel](https://github.com/python-babel/babel)
|
||||
- Benchling ([Refac](https://github.com/benchling/refac))
|
||||
- [Bokeh](https://github.com/bokeh/bokeh)
|
||||
- CrowdCent ([NumerBlox](https://github.com/crowdcent/numerblox)) <!-- typos: ignore -->
|
||||
- [Cryptography (PyCA)](https://github.com/pyca/cryptography)
|
||||
- CERN ([Indico](https://getindico.io/))
|
||||
- [DVC](https://github.com/iterative/dvc)
|
||||
- [Dagger](https://github.com/dagger/dagger)
|
||||
- [Dagster](https://github.com/dagster-io/dagster)
|
||||
- Databricks ([MLflow](https://github.com/mlflow/mlflow))
|
||||
- [Dify](https://github.com/langgenius/dify)
|
||||
- [FastAPI](https://github.com/tiangolo/fastapi)
|
||||
- [Godot](https://github.com/godotengine/godot)
|
||||
- [Gradio](https://github.com/gradio-app/gradio)
|
||||
- [Great Expectations](https://github.com/great-expectations/great_expectations)
|
||||
- [HTTPX](https://github.com/encode/httpx)
|
||||
@@ -448,11 +416,9 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- Hugging Face ([Transformers](https://github.com/huggingface/transformers),
|
||||
[Datasets](https://github.com/huggingface/datasets),
|
||||
[Diffusers](https://github.com/huggingface/diffusers))
|
||||
- IBM ([Qiskit](https://github.com/Qiskit/qiskit))
|
||||
- ING Bank ([popmon](https://github.com/ing-bank/popmon), [probatus](https://github.com/ing-bank/probatus))
|
||||
- [Ibis](https://github.com/ibis-project/ibis)
|
||||
- [ivy](https://github.com/unifyai/ivy)
|
||||
- [JAX](https://github.com/jax-ml/jax)
|
||||
- [Jupyter](https://github.com/jupyter-server/jupyter_server)
|
||||
- [Kraken Tech](https://kraken.tech/)
|
||||
- [LangChain](https://github.com/hwchase17/langchain)
|
||||
@@ -467,7 +433,6 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- Modern Treasury ([Python SDK](https://github.com/Modern-Treasury/modern-treasury-python))
|
||||
- Mozilla ([Firefox](https://github.com/mozilla/gecko-dev))
|
||||
- [Mypy](https://github.com/python/mypy)
|
||||
- [Nautobot](https://github.com/nautobot/nautobot)
|
||||
- Netflix ([Dispatch](https://github.com/Netflix/dispatch))
|
||||
- [Neon](https://github.com/neondatabase/neon)
|
||||
- [Nokia](https://nokia.com/)
|
||||
@@ -475,7 +440,6 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [NumPyro](https://github.com/pyro-ppl/numpyro)
|
||||
- [ONNX](https://github.com/onnx/onnx)
|
||||
- [OpenBB](https://github.com/OpenBB-finance/OpenBBTerminal)
|
||||
- [Open Wine Components](https://github.com/Open-Wine-Components/umu-launcher)
|
||||
- [PDM](https://github.com/pdm-project/pdm)
|
||||
- [PaddlePaddle](https://github.com/PaddlePaddle/Paddle)
|
||||
- [Pandas](https://github.com/pandas-dev/pandas)
|
||||
@@ -503,7 +467,6 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [Sphinx](https://github.com/sphinx-doc/sphinx)
|
||||
- [Stable Baselines3](https://github.com/DLR-RM/stable-baselines3)
|
||||
- [Starlette](https://github.com/encode/starlette)
|
||||
- [Streamlit](https://github.com/streamlit/streamlit)
|
||||
- [The Algorithms](https://github.com/TheAlgorithms/Python)
|
||||
- [Vega-Altair](https://github.com/altair-viz/altair)
|
||||
- WordPress ([Openverse](https://github.com/WordPress/openverse))
|
||||
@@ -539,7 +502,7 @@ If you're using Ruff, consider adding the Ruff badge to your project's `README.m
|
||||
<a href="https://github.com/astral-sh/ruff"><img src="https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json" alt="Ruff" style="max-width:100%;"></a>
|
||||
```
|
||||
|
||||
## License<a id="license"></a>
|
||||
## License
|
||||
|
||||
This repository is licensed under the [MIT License](https://github.com/astral-sh/ruff/blob/main/LICENSE)
|
||||
|
||||
|
||||
15
SECURITY.md
15
SECURITY.md
@@ -1,15 +0,0 @@
|
||||
# Security policy
|
||||
|
||||
## Reporting a vulnerability
|
||||
|
||||
If you have found a possible vulnerability, please email `security at astral dot sh`.
|
||||
|
||||
## Bug bounties
|
||||
|
||||
While we sincerely appreciate and encourage reports of suspected security problems, please note that
|
||||
Astral does not currently run any bug bounty programs.
|
||||
|
||||
## Vulnerability disclosures
|
||||
|
||||
Critical vulnerabilities will be disclosed via GitHub's
|
||||
[security advisory](https://github.com/astral-sh/ruff/security) system.
|
||||
21
_typos.toml
21
_typos.toml
@@ -1,10 +1,6 @@
|
||||
[files]
|
||||
# https://github.com/crate-ci/typos/issues/868
|
||||
extend-exclude = [
|
||||
"crates/ty_vendored/vendor/**/*",
|
||||
"**/resources/**/*",
|
||||
"**/snapshots/**/*",
|
||||
]
|
||||
extend-exclude = ["crates/red_knot/vendor/**/*", "**/resources/**/*", "**/snapshots/**/*"]
|
||||
|
||||
[default.extend-words]
|
||||
"arange" = "arange" # e.g. `numpy.arange`
|
||||
@@ -12,22 +8,13 @@ hel = "hel"
|
||||
whos = "whos"
|
||||
spawnve = "spawnve"
|
||||
ned = "ned"
|
||||
pn = "pn" # `import panel as pn` is a thing
|
||||
pn = "pn" # `import panel as pd` is a thing
|
||||
poit = "poit"
|
||||
BA = "BA" # acronym for "Bad Allowed", used in testing.
|
||||
jod = "jod" # e.g., `jod-thread`
|
||||
Numer = "Numer" # Library name 'NumerBlox' in "Who's Using Ruff?"
|
||||
|
||||
[default]
|
||||
extend-ignore-re = [
|
||||
# Line ignore with trailing "spellchecker:disable-line"
|
||||
"(?Rm)^.*#\\s*spellchecker:disable-line$",
|
||||
"LICENSEs",
|
||||
# Various third party dependencies uses `typ` as struct field names (e.g., lsp_types::LogMessageParams)
|
||||
"typ",
|
||||
# TODO: Remove this once the `TYP` redirects are removed from `rule_redirects.rs`
|
||||
"TYP",
|
||||
# Line ignore with trailing "spellchecker:disable-line"
|
||||
"(?Rm)^.*#\\s*spellchecker:disable-line$"
|
||||
]
|
||||
|
||||
[default.extend-identifiers]
|
||||
"FrIeNdLy" = "FrIeNdLy"
|
||||
|
||||
35
clippy.toml
35
clippy.toml
@@ -1,26 +1,13 @@
|
||||
doc-valid-idents = [
|
||||
"..",
|
||||
"CodeQL",
|
||||
"CPython",
|
||||
"FastAPI",
|
||||
"IPython",
|
||||
"LangChain",
|
||||
"LibCST",
|
||||
"McCabe",
|
||||
"NumPy",
|
||||
"SCREAMING_SNAKE_CASE",
|
||||
"SQLAlchemy",
|
||||
"StackOverflow",
|
||||
"PyCharm",
|
||||
"SNMPv1",
|
||||
"SNMPv2",
|
||||
"SNMPv3",
|
||||
"PyFlakes",
|
||||
]
|
||||
|
||||
ignore-interior-mutability = [
|
||||
# Interned is read-only. The wrapped `Rc` never gets updated.
|
||||
"ruff_formatter::format_element::Interned",
|
||||
# The expression is read-only.
|
||||
"ruff_python_ast::hashable::HashableExpr",
|
||||
"..",
|
||||
"CodeQL",
|
||||
"FastAPI",
|
||||
"IPython",
|
||||
"LangChain",
|
||||
"LibCST",
|
||||
"McCabe",
|
||||
"NumPy",
|
||||
"SCREAMING_SNAKE_CASE",
|
||||
"SQLAlchemy",
|
||||
"StackOverflow",
|
||||
]
|
||||
|
||||
42
crates/red_knot/Cargo.toml
Normal file
42
crates/red_knot/Cargo.toml
Normal file
@@ -0,0 +1,42 @@
|
||||
[package]
|
||||
name = "red_knot"
|
||||
version = "0.1.0"
|
||||
edition.workspace = true
|
||||
rust-version.workspace = true
|
||||
homepage.workspace = true
|
||||
documentation.workspace = true
|
||||
repository.workspace = true
|
||||
authors.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
ruff_python_parser = { workspace = true }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_text_size = { workspace = true }
|
||||
ruff_index = { workspace = true }
|
||||
ruff_notebook = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
bitflags = { workspace = true }
|
||||
crossbeam = { workspace = true }
|
||||
ctrlc = { version = "3.4.4" }
|
||||
dashmap = { workspace = true }
|
||||
hashbrown = { workspace = true }
|
||||
indexmap = { workspace = true }
|
||||
notify = { workspace = true }
|
||||
parking_lot = { workspace = true }
|
||||
rayon = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
smol_str = { version = "0.2.1" }
|
||||
tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true }
|
||||
tracing-tree = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
textwrap = { version = "0.16.1" }
|
||||
tempfile = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
9
crates/red_knot/README.md
Normal file
9
crates/red_knot/README.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# Red Knot
|
||||
|
||||
The Red Knot crate contains code working towards multifile analysis, type inference and, ultimately, type-checking. It's very much a work in progress for now.
|
||||
|
||||
## Vendored types for the stdlib
|
||||
|
||||
Red Knot vendors [typeshed](https://github.com/python/typeshed)'s stubs for the standard library. The vendored stubs can be found in `crates/red_knot/vendor/typeshed`. The file `crates/red_knot/vendor/typeshed/source_commit.txt` tells you the typeshed commit that our vendored stdlib stubs currently correspond to.
|
||||
|
||||
The typeshed stubs are updated every two weeks via an automated PR using the `sync_typeshed.yaml` workflow in the `.github/workflows` directory. This workflow can also be triggered at any time via [workflow dispatch](https://docs.github.com/en/actions/using-workflows/manually-running-a-workflow#running-a-workflow).
|
||||
415
crates/red_knot/src/ast_ids.rs
Normal file
415
crates/red_knot/src/ast_ids.rs
Normal file
@@ -0,0 +1,415 @@
|
||||
use std::any::type_name;
|
||||
use std::fmt::{Debug, Formatter};
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::marker::PhantomData;
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_index::{Idx, IndexVec};
|
||||
use ruff_python_ast::visitor::preorder;
|
||||
use ruff_python_ast::visitor::preorder::{PreorderVisitor, TraversalSignal};
|
||||
use ruff_python_ast::{
|
||||
AnyNodeRef, AstNode, ExceptHandler, ExceptHandlerExceptHandler, Expr, MatchCase, ModModule,
|
||||
NodeKind, Parameter, Stmt, StmtAnnAssign, StmtAssign, StmtAugAssign, StmtClassDef,
|
||||
StmtFunctionDef, StmtGlobal, StmtImport, StmtImportFrom, StmtNonlocal, StmtTypeAlias,
|
||||
TypeParam, TypeParamParamSpec, TypeParamTypeVar, TypeParamTypeVarTuple, WithItem,
|
||||
};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
/// A type agnostic ID that uniquely identifies an AST node in a file.
|
||||
#[ruff_index::newtype_index]
|
||||
pub struct AstId;
|
||||
|
||||
/// A typed ID that uniquely identifies an AST node in a file.
|
||||
///
|
||||
/// This is different from [`AstId`] in that it is a combination of ID and the type of the node the ID identifies.
|
||||
/// Typing the ID prevents mixing IDs of different node types and allows to restrict the API to only accept
|
||||
/// nodes for which an ID has been created (not all AST nodes get an ID).
|
||||
pub struct TypedAstId<N: HasAstId> {
|
||||
erased: AstId,
|
||||
_marker: PhantomData<fn() -> N>,
|
||||
}
|
||||
|
||||
impl<N: HasAstId> TypedAstId<N> {
|
||||
/// Upcasts this ID from a more specific node type to a more general node type.
|
||||
pub fn upcast<M: HasAstId>(self) -> TypedAstId<M>
|
||||
where
|
||||
N: Into<M>,
|
||||
{
|
||||
TypedAstId {
|
||||
erased: self.erased,
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> Copy for TypedAstId<N> {}
|
||||
impl<N: HasAstId> Clone for TypedAstId<N> {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> PartialEq for TypedAstId<N> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.erased == other.erased
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> Eq for TypedAstId<N> {}
|
||||
impl<N: HasAstId> Hash for TypedAstId<N> {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.erased.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> Debug for TypedAstId<N> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_tuple("TypedAstId")
|
||||
.field(&self.erased)
|
||||
.field(&type_name::<N>())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct AstIds {
|
||||
ids: IndexVec<AstId, NodeKey>,
|
||||
reverse: FxHashMap<NodeKey, AstId>,
|
||||
}
|
||||
|
||||
impl AstIds {
|
||||
// TODO rust analyzer doesn't allocate an ID for every node. It only allocates ids for
|
||||
// nodes with a corresponding HIR element, that is nodes that are definitions.
|
||||
pub fn from_module(module: &ModModule) -> Self {
|
||||
let mut visitor = AstIdsVisitor::default();
|
||||
|
||||
// TODO: visit_module?
|
||||
// Make sure we visit the root
|
||||
visitor.create_id(module);
|
||||
visitor.visit_body(&module.body);
|
||||
|
||||
while let Some(deferred) = visitor.deferred.pop() {
|
||||
match deferred {
|
||||
DeferredNode::FunctionDefinition(def) => {
|
||||
def.visit_preorder(&mut visitor);
|
||||
}
|
||||
DeferredNode::ClassDefinition(def) => def.visit_preorder(&mut visitor),
|
||||
}
|
||||
}
|
||||
|
||||
AstIds {
|
||||
ids: visitor.ids,
|
||||
reverse: visitor.reverse,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the ID to the root node.
|
||||
pub fn root(&self) -> NodeKey {
|
||||
self.ids[AstId::new(0)]
|
||||
}
|
||||
|
||||
/// Returns the [`TypedAstId`] for a node.
|
||||
pub fn ast_id<N: HasAstId>(&self, node: &N) -> TypedAstId<N> {
|
||||
let key = node.syntax_node_key();
|
||||
TypedAstId {
|
||||
erased: self.reverse.get(&key).copied().unwrap(),
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the [`TypedAstId`] for the node identified with the given [`TypedNodeKey`].
|
||||
pub fn ast_id_for_key<N: HasAstId>(&self, node: &TypedNodeKey<N>) -> TypedAstId<N> {
|
||||
let ast_id = self.ast_id_for_node_key(node.inner);
|
||||
|
||||
TypedAstId {
|
||||
erased: ast_id,
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the untyped [`AstId`] for the node identified by the given `node` key.
|
||||
pub fn ast_id_for_node_key(&self, node: NodeKey) -> AstId {
|
||||
self.reverse
|
||||
.get(&node)
|
||||
.copied()
|
||||
.expect("Can't find node in AstIds map.")
|
||||
}
|
||||
|
||||
/// Returns the [`TypedNodeKey`] for the node identified by the given [`TypedAstId`].
|
||||
pub fn key<N: HasAstId>(&self, id: TypedAstId<N>) -> TypedNodeKey<N> {
|
||||
let syntax_key = self.ids[id.erased];
|
||||
|
||||
TypedNodeKey::new(syntax_key).unwrap()
|
||||
}
|
||||
|
||||
pub fn node_key<H: HasAstId>(&self, id: TypedAstId<H>) -> NodeKey {
|
||||
self.ids[id.erased]
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for AstIds {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
let mut map = f.debug_map();
|
||||
for (key, value) in self.ids.iter_enumerated() {
|
||||
map.entry(&key, &value);
|
||||
}
|
||||
|
||||
map.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for AstIds {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.ids == other.ids
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for AstIds {}
|
||||
|
||||
#[derive(Default)]
|
||||
struct AstIdsVisitor<'a> {
|
||||
ids: IndexVec<AstId, NodeKey>,
|
||||
reverse: FxHashMap<NodeKey, AstId>,
|
||||
deferred: Vec<DeferredNode<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> AstIdsVisitor<'a> {
|
||||
fn create_id<A: HasAstId>(&mut self, node: &A) {
|
||||
let node_key = node.syntax_node_key();
|
||||
|
||||
let id = self.ids.push(node_key);
|
||||
self.reverse.insert(node_key, id);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> PreorderVisitor<'a> for AstIdsVisitor<'a> {
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt) {
|
||||
match stmt {
|
||||
Stmt::FunctionDef(def) => {
|
||||
self.create_id(def);
|
||||
self.deferred.push(DeferredNode::FunctionDefinition(def));
|
||||
return;
|
||||
}
|
||||
// TODO defer visiting the assignment body, type alias parameters etc?
|
||||
Stmt::ClassDef(def) => {
|
||||
self.create_id(def);
|
||||
self.deferred.push(DeferredNode::ClassDefinition(def));
|
||||
return;
|
||||
}
|
||||
Stmt::Expr(_) => {
|
||||
// Skip
|
||||
return;
|
||||
}
|
||||
Stmt::Return(_) => {}
|
||||
Stmt::Delete(_) => {}
|
||||
Stmt::Assign(assignment) => self.create_id(assignment),
|
||||
Stmt::AugAssign(assignment) => {
|
||||
self.create_id(assignment);
|
||||
}
|
||||
Stmt::AnnAssign(assignment) => self.create_id(assignment),
|
||||
Stmt::TypeAlias(assignment) => self.create_id(assignment),
|
||||
Stmt::For(_) => {}
|
||||
Stmt::While(_) => {}
|
||||
Stmt::If(_) => {}
|
||||
Stmt::With(_) => {}
|
||||
Stmt::Match(_) => {}
|
||||
Stmt::Raise(_) => {}
|
||||
Stmt::Try(_) => {}
|
||||
Stmt::Assert(_) => {}
|
||||
Stmt::Import(import) => self.create_id(import),
|
||||
Stmt::ImportFrom(import_from) => self.create_id(import_from),
|
||||
Stmt::Global(global) => self.create_id(global),
|
||||
Stmt::Nonlocal(non_local) => self.create_id(non_local),
|
||||
Stmt::Pass(_) => {}
|
||||
Stmt::Break(_) => {}
|
||||
Stmt::Continue(_) => {}
|
||||
Stmt::IpyEscapeCommand(_) => {}
|
||||
}
|
||||
|
||||
preorder::walk_stmt(self, stmt);
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, _expr: &'a Expr) {}
|
||||
|
||||
fn visit_parameter(&mut self, parameter: &'a Parameter) {
|
||||
self.create_id(parameter);
|
||||
preorder::walk_parameter(self, parameter);
|
||||
}
|
||||
|
||||
fn visit_except_handler(&mut self, except_handler: &'a ExceptHandler) {
|
||||
match except_handler {
|
||||
ExceptHandler::ExceptHandler(except_handler) => {
|
||||
self.create_id(except_handler);
|
||||
}
|
||||
}
|
||||
|
||||
preorder::walk_except_handler(self, except_handler);
|
||||
}
|
||||
|
||||
fn visit_with_item(&mut self, with_item: &'a WithItem) {
|
||||
self.create_id(with_item);
|
||||
preorder::walk_with_item(self, with_item);
|
||||
}
|
||||
|
||||
fn visit_match_case(&mut self, match_case: &'a MatchCase) {
|
||||
self.create_id(match_case);
|
||||
preorder::walk_match_case(self, match_case);
|
||||
}
|
||||
|
||||
fn visit_type_param(&mut self, type_param: &'a TypeParam) {
|
||||
self.create_id(type_param);
|
||||
}
|
||||
}
|
||||
|
||||
enum DeferredNode<'a> {
|
||||
FunctionDefinition(&'a StmtFunctionDef),
|
||||
ClassDefinition(&'a StmtClassDef),
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub struct TypedNodeKey<N: AstNode> {
|
||||
/// The type erased node key.
|
||||
inner: NodeKey,
|
||||
_marker: PhantomData<fn() -> N>,
|
||||
}
|
||||
|
||||
impl<N: AstNode> TypedNodeKey<N> {
|
||||
pub fn from_node(node: &N) -> Self {
|
||||
let inner = NodeKey {
|
||||
kind: node.as_any_node_ref().kind(),
|
||||
range: node.range(),
|
||||
};
|
||||
Self {
|
||||
inner,
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new(node_key: NodeKey) -> Option<Self> {
|
||||
N::can_cast(node_key.kind).then_some(TypedNodeKey {
|
||||
inner: node_key,
|
||||
_marker: PhantomData,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn resolve<'a>(&self, root: AnyNodeRef<'a>) -> Option<N::Ref<'a>> {
|
||||
let node_ref = self.inner.resolve(root)?;
|
||||
|
||||
Some(N::cast_ref(node_ref).unwrap())
|
||||
}
|
||||
|
||||
pub fn resolve_unwrap<'a>(&self, root: AnyNodeRef<'a>) -> N::Ref<'a> {
|
||||
self.resolve(root).expect("node should resolve")
|
||||
}
|
||||
|
||||
pub fn erased(&self) -> &NodeKey {
|
||||
&self.inner
|
||||
}
|
||||
}
|
||||
|
||||
struct FindNodeKeyVisitor<'a> {
|
||||
key: NodeKey,
|
||||
result: Option<AnyNodeRef<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> PreorderVisitor<'a> for FindNodeKeyVisitor<'a> {
|
||||
fn enter_node(&mut self, node: AnyNodeRef<'a>) -> TraversalSignal {
|
||||
if self.result.is_some() {
|
||||
return TraversalSignal::Skip;
|
||||
}
|
||||
|
||||
if node.range() == self.key.range && node.kind() == self.key.kind {
|
||||
self.result = Some(node);
|
||||
TraversalSignal::Skip
|
||||
} else if node.range().contains_range(self.key.range) {
|
||||
TraversalSignal::Traverse
|
||||
} else {
|
||||
TraversalSignal::Skip
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_body(&mut self, body: &'a [Stmt]) {
|
||||
// TODO it would be more efficient to use binary search instead of linear
|
||||
for stmt in body {
|
||||
if stmt.range().start() > self.key.range.end() {
|
||||
break;
|
||||
}
|
||||
|
||||
self.visit_stmt(stmt);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO an alternative to this is to have a `NodeId` on each node (in increasing order depending on the position).
|
||||
// This would allow to reduce the size of this to a u32.
|
||||
// What would be nice if we could use an `Arc::weak_ref` here but that only works if we use
|
||||
// `Arc` internally
|
||||
// TODO: Implement the logic to resolve a node, given a db (and the correct file).
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub struct NodeKey {
|
||||
kind: NodeKind,
|
||||
range: TextRange,
|
||||
}
|
||||
|
||||
impl NodeKey {
|
||||
pub fn resolve<'a>(&self, root: AnyNodeRef<'a>) -> Option<AnyNodeRef<'a>> {
|
||||
// We need to do a binary search here. Only traverse into a node if the range is withint the node
|
||||
let mut visitor = FindNodeKeyVisitor {
|
||||
key: *self,
|
||||
result: None,
|
||||
};
|
||||
|
||||
if visitor.enter_node(root) == TraversalSignal::Traverse {
|
||||
root.visit_preorder(&mut visitor);
|
||||
}
|
||||
|
||||
visitor.result
|
||||
}
|
||||
}
|
||||
|
||||
/// Marker trait implemented by AST nodes for which we extract the `AstId`.
|
||||
pub trait HasAstId: AstNode {
|
||||
fn node_key(&self) -> TypedNodeKey<Self>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
TypedNodeKey {
|
||||
inner: self.syntax_node_key(),
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
fn syntax_node_key(&self) -> NodeKey {
|
||||
NodeKey {
|
||||
kind: self.as_any_node_ref().kind(),
|
||||
range: self.range(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HasAstId for StmtFunctionDef {}
|
||||
impl HasAstId for StmtClassDef {}
|
||||
impl HasAstId for StmtAnnAssign {}
|
||||
impl HasAstId for StmtAugAssign {}
|
||||
impl HasAstId for StmtAssign {}
|
||||
impl HasAstId for StmtTypeAlias {}
|
||||
|
||||
impl HasAstId for ModModule {}
|
||||
|
||||
impl HasAstId for StmtImport {}
|
||||
|
||||
impl HasAstId for StmtImportFrom {}
|
||||
|
||||
impl HasAstId for Parameter {}
|
||||
|
||||
impl HasAstId for TypeParam {}
|
||||
impl HasAstId for Stmt {}
|
||||
impl HasAstId for TypeParamTypeVar {}
|
||||
impl HasAstId for TypeParamTypeVarTuple {}
|
||||
impl HasAstId for TypeParamParamSpec {}
|
||||
impl HasAstId for StmtGlobal {}
|
||||
impl HasAstId for StmtNonlocal {}
|
||||
|
||||
impl HasAstId for ExceptHandlerExceptHandler {}
|
||||
impl HasAstId for WithItem {}
|
||||
impl HasAstId for MatchCase {}
|
||||
165
crates/red_knot/src/cache.rs
Normal file
165
crates/red_knot/src/cache.rs
Normal file
@@ -0,0 +1,165 @@
|
||||
use std::fmt::Formatter;
|
||||
use std::hash::Hash;
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
|
||||
use crate::db::QueryResult;
|
||||
use dashmap::mapref::entry::Entry;
|
||||
|
||||
use crate::FxDashMap;
|
||||
|
||||
/// Simple key value cache that locks on a per-key level.
|
||||
pub struct KeyValueCache<K, V> {
|
||||
map: FxDashMap<K, V>,
|
||||
statistics: CacheStatistics,
|
||||
}
|
||||
|
||||
impl<K, V> KeyValueCache<K, V>
|
||||
where
|
||||
K: Eq + Hash + Clone,
|
||||
V: Clone,
|
||||
{
|
||||
pub fn try_get(&self, key: &K) -> Option<V> {
|
||||
if let Some(existing) = self.map.get(key) {
|
||||
self.statistics.hit();
|
||||
Some(existing.clone())
|
||||
} else {
|
||||
self.statistics.miss();
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get<F>(&self, key: &K, compute: F) -> QueryResult<V>
|
||||
where
|
||||
F: FnOnce(&K) -> QueryResult<V>,
|
||||
{
|
||||
Ok(match self.map.entry(key.clone()) {
|
||||
Entry::Occupied(cached) => {
|
||||
self.statistics.hit();
|
||||
|
||||
cached.get().clone()
|
||||
}
|
||||
Entry::Vacant(vacant) => {
|
||||
self.statistics.miss();
|
||||
|
||||
let value = compute(key)?;
|
||||
vacant.insert(value.clone());
|
||||
value
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn set(&mut self, key: K, value: V) {
|
||||
self.map.insert(key, value);
|
||||
}
|
||||
|
||||
pub fn remove(&mut self, key: &K) -> Option<V> {
|
||||
self.map.remove(key).map(|(_, value)| value)
|
||||
}
|
||||
|
||||
pub fn clear(&mut self) {
|
||||
self.map.clear();
|
||||
self.map.shrink_to_fit();
|
||||
}
|
||||
|
||||
pub fn statistics(&self) -> Option<Statistics> {
|
||||
self.statistics.to_statistics()
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> Default for KeyValueCache<K, V>
|
||||
where
|
||||
K: Eq + Hash,
|
||||
V: Clone,
|
||||
{
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
map: FxDashMap::default(),
|
||||
statistics: CacheStatistics::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> std::fmt::Debug for KeyValueCache<K, V>
|
||||
where
|
||||
K: std::fmt::Debug + Eq + Hash,
|
||||
V: std::fmt::Debug,
|
||||
{
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
let mut debug = f.debug_map();
|
||||
|
||||
for entry in &self.map {
|
||||
debug.entry(&entry.value(), &entry.key());
|
||||
}
|
||||
|
||||
debug.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct Statistics {
|
||||
pub hits: usize,
|
||||
pub misses: usize,
|
||||
}
|
||||
|
||||
impl Statistics {
|
||||
#[allow(clippy::cast_precision_loss)]
|
||||
pub fn hit_rate(&self) -> Option<f64> {
|
||||
if self.hits + self.misses == 0 {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some((self.hits as f64) / (self.hits + self.misses) as f64)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
pub type CacheStatistics = DebugStatistics;
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
pub type CacheStatistics = ReleaseStatistics;
|
||||
|
||||
pub trait StatisticsRecorder {
|
||||
fn hit(&self);
|
||||
fn miss(&self);
|
||||
fn to_statistics(&self) -> Option<Statistics>;
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct DebugStatistics {
|
||||
hits: AtomicUsize,
|
||||
misses: AtomicUsize,
|
||||
}
|
||||
|
||||
impl StatisticsRecorder for DebugStatistics {
|
||||
// TODO figure out appropriate Ordering
|
||||
fn hit(&self) {
|
||||
self.hits.fetch_add(1, Ordering::SeqCst);
|
||||
}
|
||||
|
||||
fn miss(&self) {
|
||||
self.misses.fetch_add(1, Ordering::SeqCst);
|
||||
}
|
||||
|
||||
fn to_statistics(&self) -> Option<Statistics> {
|
||||
let hits = self.hits.load(Ordering::SeqCst);
|
||||
let misses = self.misses.load(Ordering::SeqCst);
|
||||
|
||||
Some(Statistics { hits, misses })
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct ReleaseStatistics;
|
||||
|
||||
impl StatisticsRecorder for ReleaseStatistics {
|
||||
#[inline]
|
||||
fn hit(&self) {}
|
||||
|
||||
#[inline]
|
||||
fn miss(&self) {}
|
||||
|
||||
#[inline]
|
||||
fn to_statistics(&self) -> Option<Statistics> {
|
||||
None
|
||||
}
|
||||
}
|
||||
42
crates/red_knot/src/cancellation.rs
Normal file
42
crates/red_knot/src/cancellation.rs
Normal file
@@ -0,0 +1,42 @@
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct CancellationTokenSource {
|
||||
signal: Arc<AtomicBool>,
|
||||
}
|
||||
|
||||
impl CancellationTokenSource {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
signal: Arc::new(AtomicBool::new(false)),
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "trace", skip_all)]
|
||||
pub fn cancel(&self) {
|
||||
self.signal.store(true, std::sync::atomic::Ordering::SeqCst);
|
||||
}
|
||||
|
||||
pub fn is_cancelled(&self) -> bool {
|
||||
self.signal.load(std::sync::atomic::Ordering::SeqCst)
|
||||
}
|
||||
|
||||
pub fn token(&self) -> CancellationToken {
|
||||
CancellationToken {
|
||||
signal: self.signal.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct CancellationToken {
|
||||
signal: Arc<AtomicBool>,
|
||||
}
|
||||
|
||||
impl CancellationToken {
|
||||
/// Returns `true` if cancellation has been requested.
|
||||
pub fn is_cancelled(&self) -> bool {
|
||||
self.signal.load(std::sync::atomic::Ordering::SeqCst)
|
||||
}
|
||||
}
|
||||
248
crates/red_knot/src/db.rs
Normal file
248
crates/red_knot/src/db.rs
Normal file
@@ -0,0 +1,248 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
pub use jars::{HasJar, HasJars};
|
||||
pub use query::{QueryError, QueryResult};
|
||||
pub use runtime::DbRuntime;
|
||||
pub use storage::JarsStorage;
|
||||
|
||||
use crate::files::FileId;
|
||||
use crate::lint::{LintSemanticStorage, LintSyntaxStorage};
|
||||
use crate::module::ModuleResolver;
|
||||
use crate::parse::ParsedStorage;
|
||||
use crate::source::SourceStorage;
|
||||
use crate::symbols::SymbolTablesStorage;
|
||||
use crate::types::TypeStore;
|
||||
|
||||
mod jars;
|
||||
mod query;
|
||||
mod runtime;
|
||||
mod storage;
|
||||
|
||||
pub trait Database {
|
||||
/// Returns a reference to the runtime of the current worker.
|
||||
fn runtime(&self) -> &DbRuntime;
|
||||
|
||||
/// Returns a mutable reference to the runtime. Only one worker can hold a mutable reference to the runtime.
|
||||
fn runtime_mut(&mut self) -> &mut DbRuntime;
|
||||
|
||||
/// Returns `Ok` if the queries have not been cancelled and `Err(QueryError::Cancelled)` otherwise.
|
||||
fn cancelled(&self) -> QueryResult<()> {
|
||||
self.runtime().cancelled()
|
||||
}
|
||||
|
||||
/// Returns `true` if the queries have been cancelled.
|
||||
fn is_cancelled(&self) -> bool {
|
||||
self.runtime().is_cancelled()
|
||||
}
|
||||
}
|
||||
|
||||
/// Database that supports running queries from multiple threads.
|
||||
pub trait ParallelDatabase: Database + Send {
|
||||
/// Creates a snapshot of the database state that can be used to query the database in another thread.
|
||||
///
|
||||
/// The snapshot is a read-only view of the database but query results are shared between threads.
|
||||
/// All queries will be automatically cancelled when applying any mutations (calling [`HasJars::jars_mut`])
|
||||
/// to the database (not the snapshot, because they're readonly).
|
||||
///
|
||||
/// ## Creating a snapshot
|
||||
///
|
||||
/// Creating a snapshot of the database's jars is cheap but creating a snapshot of
|
||||
/// other state stored on the database might require deep-cloning data. That's why you should
|
||||
/// avoid creating snapshots in a hot function (e.g. don't create a snapshot for each file, instead
|
||||
/// create a snapshot when scheduling the check of an entire program).
|
||||
///
|
||||
/// ## Salsa compatibility
|
||||
/// Salsa prohibits creating a snapshot while running a local query (it's fine if other workers run a query) [[source](https://github.com/salsa-rs/salsa/issues/80)].
|
||||
/// We should avoid creating snapshots while running a query because we might want to adopt Salsa in the future (if we can figure out persistent caching).
|
||||
/// Unfortunately, the infrastructure doesn't provide an automated way of knowing when a query is run, that's
|
||||
/// why we have to "enforce" this constraint manually.
|
||||
#[must_use]
|
||||
fn snapshot(&self) -> Snapshot<Self>;
|
||||
}
|
||||
|
||||
pub trait DbWithJar<Jar>: Database + HasJar<Jar> {}
|
||||
|
||||
/// Readonly snapshot of a database.
|
||||
///
|
||||
/// ## Dead locks
|
||||
/// A snapshot should always be dropped as soon as it is no longer necessary to run queries.
|
||||
/// Storing the snapshot without running a query or periodically checking if cancellation was requested
|
||||
/// can lead to deadlocks because mutating the [`Database`] requires cancels all pending queries
|
||||
/// and waiting for all [`Snapshot`]s to be dropped.
|
||||
#[derive(Debug)]
|
||||
pub struct Snapshot<DB: ?Sized>
|
||||
where
|
||||
DB: ParallelDatabase,
|
||||
{
|
||||
db: DB,
|
||||
}
|
||||
|
||||
impl<DB> Snapshot<DB>
|
||||
where
|
||||
DB: ParallelDatabase,
|
||||
{
|
||||
pub fn new(db: DB) -> Self {
|
||||
Snapshot { db }
|
||||
}
|
||||
}
|
||||
|
||||
impl<DB> std::ops::Deref for Snapshot<DB>
|
||||
where
|
||||
DB: ParallelDatabase,
|
||||
{
|
||||
type Target = DB;
|
||||
|
||||
fn deref(&self) -> &DB {
|
||||
&self.db
|
||||
}
|
||||
}
|
||||
|
||||
pub trait Upcast<T: ?Sized> {
|
||||
fn upcast(&self) -> &T;
|
||||
}
|
||||
|
||||
// Red knot specific databases code.
|
||||
|
||||
pub trait SourceDb: DbWithJar<SourceJar> {
|
||||
// queries
|
||||
fn file_id(&self, path: &std::path::Path) -> FileId;
|
||||
|
||||
fn file_path(&self, file_id: FileId) -> Arc<std::path::Path>;
|
||||
}
|
||||
|
||||
pub trait SemanticDb: SourceDb + DbWithJar<SemanticJar> + Upcast<dyn SourceDb> {}
|
||||
|
||||
pub trait LintDb: SemanticDb + DbWithJar<LintJar> + Upcast<dyn SemanticDb> {}
|
||||
|
||||
pub trait Db: LintDb + Upcast<dyn LintDb> {}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SourceJar {
|
||||
pub sources: SourceStorage,
|
||||
pub parsed: ParsedStorage,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SemanticJar {
|
||||
pub module_resolver: ModuleResolver,
|
||||
pub symbol_tables: SymbolTablesStorage,
|
||||
pub type_store: TypeStore,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct LintJar {
|
||||
pub lint_syntax: LintSyntaxStorage,
|
||||
pub lint_semantic: LintSemanticStorage,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::db::{
|
||||
Database, DbRuntime, DbWithJar, HasJar, HasJars, JarsStorage, LintDb, LintJar, QueryResult,
|
||||
SourceDb, SourceJar, Upcast,
|
||||
};
|
||||
use crate::files::{FileId, Files};
|
||||
|
||||
use super::{SemanticDb, SemanticJar};
|
||||
|
||||
// This can be a partial database used in a single crate for testing.
|
||||
// It would hold fewer data than the full database.
|
||||
#[derive(Debug, Default)]
|
||||
pub(crate) struct TestDb {
|
||||
files: Files,
|
||||
jars: JarsStorage<Self>,
|
||||
}
|
||||
|
||||
impl HasJar<SourceJar> for TestDb {
|
||||
fn jar(&self) -> QueryResult<&SourceJar> {
|
||||
Ok(&self.jars()?.0)
|
||||
}
|
||||
|
||||
fn jar_mut(&mut self) -> &mut SourceJar {
|
||||
&mut self.jars_mut().0
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJar<SemanticJar> for TestDb {
|
||||
fn jar(&self) -> QueryResult<&SemanticJar> {
|
||||
Ok(&self.jars()?.1)
|
||||
}
|
||||
|
||||
fn jar_mut(&mut self) -> &mut SemanticJar {
|
||||
&mut self.jars_mut().1
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJar<LintJar> for TestDb {
|
||||
fn jar(&self) -> QueryResult<&LintJar> {
|
||||
Ok(&self.jars()?.2)
|
||||
}
|
||||
|
||||
fn jar_mut(&mut self) -> &mut LintJar {
|
||||
&mut self.jars_mut().2
|
||||
}
|
||||
}
|
||||
|
||||
impl SourceDb for TestDb {
|
||||
fn file_id(&self, path: &Path) -> FileId {
|
||||
self.files.intern(path)
|
||||
}
|
||||
|
||||
fn file_path(&self, file_id: FileId) -> Arc<Path> {
|
||||
self.files.path(file_id)
|
||||
}
|
||||
}
|
||||
|
||||
impl DbWithJar<SourceJar> for TestDb {}
|
||||
|
||||
impl Upcast<dyn SourceDb> for TestDb {
|
||||
fn upcast(&self) -> &(dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl SemanticDb for TestDb {}
|
||||
|
||||
impl DbWithJar<SemanticJar> for TestDb {}
|
||||
|
||||
impl Upcast<dyn SemanticDb> for TestDb {
|
||||
fn upcast(&self) -> &(dyn SemanticDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl LintDb for TestDb {}
|
||||
|
||||
impl Upcast<dyn LintDb> for TestDb {
|
||||
fn upcast(&self) -> &(dyn LintDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl DbWithJar<LintJar> for TestDb {}
|
||||
|
||||
impl HasJars for TestDb {
|
||||
type Jars = (SourceJar, SemanticJar, LintJar);
|
||||
|
||||
fn jars(&self) -> QueryResult<&Self::Jars> {
|
||||
self.jars.jars()
|
||||
}
|
||||
|
||||
fn jars_mut(&mut self) -> &mut Self::Jars {
|
||||
self.jars.jars_mut()
|
||||
}
|
||||
}
|
||||
|
||||
impl Database for TestDb {
|
||||
fn runtime(&self) -> &DbRuntime {
|
||||
self.jars.runtime()
|
||||
}
|
||||
|
||||
fn runtime_mut(&mut self) -> &mut DbRuntime {
|
||||
self.jars.runtime_mut()
|
||||
}
|
||||
}
|
||||
}
|
||||
37
crates/red_knot/src/db/jars.rs
Normal file
37
crates/red_knot/src/db/jars.rs
Normal file
@@ -0,0 +1,37 @@
|
||||
use crate::db::query::QueryResult;
|
||||
|
||||
/// Gives access to a specific jar in the database.
|
||||
///
|
||||
/// Nope, the terminology isn't borrowed from Java but from Salsa <https://salsa-rs.github.io/salsa/>,
|
||||
/// which is an analogy to storing the salsa in different jars.
|
||||
///
|
||||
/// The basic idea is that each crate can define its own jar and the jars can be combined to a single
|
||||
/// database in the top level crate. Each crate also defines its own `Database` trait. The combination of
|
||||
/// `Database` trait and the jar allows to write queries in isolation without having to know how they get composed at the upper levels.
|
||||
///
|
||||
/// Salsa further defines a `HasIngredient` trait which slices the jar to a specific storage (e.g. a specific cache).
|
||||
/// We don't need this just yet because we write our queries by hand. We may want a similar trait if we decide
|
||||
/// to use a macro to generate the queries.
|
||||
pub trait HasJar<T> {
|
||||
/// Gives a read-only reference to the jar.
|
||||
fn jar(&self) -> QueryResult<&T>;
|
||||
|
||||
/// Gives a mutable reference to the jar.
|
||||
fn jar_mut(&mut self) -> &mut T;
|
||||
}
|
||||
|
||||
/// Gives access to the jars in a database.
|
||||
pub trait HasJars {
|
||||
/// A type storing the jars.
|
||||
///
|
||||
/// Most commonly, this is a tuple where each jar is a tuple element.
|
||||
type Jars: Default;
|
||||
|
||||
/// Gives access to the underlying jars but tests if the queries have been cancelled.
|
||||
///
|
||||
/// Returns `Err(QueryError::Cancelled)` if the queries have been cancelled.
|
||||
fn jars(&self) -> QueryResult<&Self::Jars>;
|
||||
|
||||
/// Gives mutable access to the underlying jars.
|
||||
fn jars_mut(&mut self) -> &mut Self::Jars;
|
||||
}
|
||||
20
crates/red_knot/src/db/query.rs
Normal file
20
crates/red_knot/src/db/query.rs
Normal file
@@ -0,0 +1,20 @@
|
||||
use std::fmt::{Display, Formatter};
|
||||
|
||||
/// Reason why a db query operation failed.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum QueryError {
|
||||
/// The query was cancelled because the DB was mutated or the query was cancelled by the host (e.g. on a file change or when pressing CTRL+C).
|
||||
Cancelled,
|
||||
}
|
||||
|
||||
impl Display for QueryError {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
QueryError::Cancelled => f.write_str("query was cancelled"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for QueryError {}
|
||||
|
||||
pub type QueryResult<T> = Result<T, QueryError>;
|
||||
41
crates/red_knot/src/db/runtime.rs
Normal file
41
crates/red_knot/src/db/runtime.rs
Normal file
@@ -0,0 +1,41 @@
|
||||
use crate::cancellation::CancellationTokenSource;
|
||||
use crate::db::{QueryError, QueryResult};
|
||||
|
||||
/// Holds the jar agnostic state of the database.
|
||||
#[derive(Debug, Default)]
|
||||
pub struct DbRuntime {
|
||||
/// The cancellation token source used to signal other works that the queries should be aborted and
|
||||
/// exit at the next possible point.
|
||||
cancellation_token: CancellationTokenSource,
|
||||
}
|
||||
|
||||
impl DbRuntime {
|
||||
pub(super) fn snapshot(&self) -> Self {
|
||||
Self {
|
||||
cancellation_token: self.cancellation_token.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Cancels the pending queries of other workers. The current worker cannot have any pending
|
||||
/// queries because we're holding a mutable reference to the runtime.
|
||||
pub(super) fn cancel_other_workers(&mut self) {
|
||||
self.cancellation_token.cancel();
|
||||
// Set a new cancellation token so that we're in a non-cancelled state again when running the next
|
||||
// query.
|
||||
self.cancellation_token = CancellationTokenSource::default();
|
||||
}
|
||||
|
||||
/// Returns `Ok` if the queries have not been cancelled and `Err(QueryError::Cancelled)` otherwise.
|
||||
pub(super) fn cancelled(&self) -> QueryResult<()> {
|
||||
if self.cancellation_token.is_cancelled() {
|
||||
Err(QueryError::Cancelled)
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the queries have been cancelled.
|
||||
pub(super) fn is_cancelled(&self) -> bool {
|
||||
self.cancellation_token.is_cancelled()
|
||||
}
|
||||
}
|
||||
117
crates/red_knot/src/db/storage.rs
Normal file
117
crates/red_knot/src/db/storage.rs
Normal file
@@ -0,0 +1,117 @@
|
||||
use std::fmt::Formatter;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crossbeam::sync::WaitGroup;
|
||||
|
||||
use crate::db::query::QueryResult;
|
||||
use crate::db::runtime::DbRuntime;
|
||||
use crate::db::{HasJars, ParallelDatabase};
|
||||
|
||||
/// Stores the jars of a database and the state for each worker.
|
||||
///
|
||||
/// Today, all state is shared across all workers, but it may be desired to store data per worker in the future.
|
||||
pub struct JarsStorage<T>
|
||||
where
|
||||
T: HasJars + Sized,
|
||||
{
|
||||
// It's important that `jars_wait_group` is declared after `jars` to ensure that `jars` is dropped first.
|
||||
// See https://doc.rust-lang.org/reference/destructors.html
|
||||
/// Stores the jars of the database.
|
||||
jars: Arc<T::Jars>,
|
||||
|
||||
/// Used to count the references to `jars`. Allows implementing `jars_mut` without requiring to clone `jars`.
|
||||
jars_wait_group: WaitGroup,
|
||||
|
||||
/// The data agnostic state.
|
||||
runtime: DbRuntime,
|
||||
}
|
||||
|
||||
impl<Db> JarsStorage<Db>
|
||||
where
|
||||
Db: HasJars,
|
||||
{
|
||||
pub(super) fn new() -> Self {
|
||||
Self {
|
||||
jars: Arc::new(Db::Jars::default()),
|
||||
jars_wait_group: WaitGroup::default(),
|
||||
runtime: DbRuntime::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a snapshot of the jars.
|
||||
///
|
||||
/// Creating the snapshot is cheap because it doesn't clone the jars, it only increments a ref counter.
|
||||
#[must_use]
|
||||
pub fn snapshot(&self) -> JarsStorage<Db>
|
||||
where
|
||||
Db: ParallelDatabase,
|
||||
{
|
||||
Self {
|
||||
jars: self.jars.clone(),
|
||||
jars_wait_group: self.jars_wait_group.clone(),
|
||||
runtime: self.runtime.snapshot(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn jars(&self) -> QueryResult<&Db::Jars> {
|
||||
self.runtime.cancelled()?;
|
||||
Ok(&self.jars)
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the jars without cloning their content.
|
||||
///
|
||||
/// The method cancels any pending queries of other works and waits for them to complete so that
|
||||
/// this instance is the only instance holding a reference to the jars.
|
||||
pub(crate) fn jars_mut(&mut self) -> &mut Db::Jars {
|
||||
// We have a mutable ref here, so no more workers can be spawned between calling this function and taking the mut ref below.
|
||||
self.cancel_other_workers();
|
||||
|
||||
// Now all other references to `self.jars` should have been released. We can now safely return a mutable reference
|
||||
// to the Arc's content.
|
||||
let jars =
|
||||
Arc::get_mut(&mut self.jars).expect("All references to jars should have been released");
|
||||
|
||||
jars
|
||||
}
|
||||
|
||||
pub(crate) fn runtime(&self) -> &DbRuntime {
|
||||
&self.runtime
|
||||
}
|
||||
|
||||
pub(crate) fn runtime_mut(&mut self) -> &mut DbRuntime {
|
||||
// Note: This method may need to use a similar trick to `jars_mut` if `DbRuntime` is ever to store data that is shared between workers.
|
||||
&mut self.runtime
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "trace", skip(self))]
|
||||
fn cancel_other_workers(&mut self) {
|
||||
self.runtime.cancel_other_workers();
|
||||
|
||||
// Wait for all other works to complete.
|
||||
let existing_wait = std::mem::take(&mut self.jars_wait_group);
|
||||
existing_wait.wait();
|
||||
}
|
||||
}
|
||||
|
||||
impl<Db> Default for JarsStorage<Db>
|
||||
where
|
||||
Db: HasJars,
|
||||
{
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> std::fmt::Debug for JarsStorage<T>
|
||||
where
|
||||
T: HasJars,
|
||||
<T as HasJars>::Jars: std::fmt::Debug,
|
||||
{
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("SharedStorage")
|
||||
.field("jars", &self.jars)
|
||||
.field("jars_wait_group", &self.jars_wait_group)
|
||||
.field("runtime", &self.runtime)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
180
crates/red_knot/src/files.rs
Normal file
180
crates/red_knot/src/files.rs
Normal file
@@ -0,0 +1,180 @@
|
||||
use std::fmt::{Debug, Formatter};
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
|
||||
use hashbrown::hash_map::RawEntryMut;
|
||||
use parking_lot::RwLock;
|
||||
use rustc_hash::FxHasher;
|
||||
|
||||
use ruff_index::{newtype_index, IndexVec};
|
||||
|
||||
type Map<K, V> = hashbrown::HashMap<K, V, ()>;
|
||||
|
||||
#[newtype_index]
|
||||
pub struct FileId;
|
||||
|
||||
// TODO we'll need a higher level virtual file system abstraction that allows testing if a file exists
|
||||
// or retrieving its content (ideally lazily and in a way that the memory can be retained later)
|
||||
// I suspect that we'll end up with a FileSystem trait and our own Path abstraction.
|
||||
#[derive(Default)]
|
||||
pub struct Files {
|
||||
inner: Arc<RwLock<FilesInner>>,
|
||||
}
|
||||
|
||||
impl Files {
|
||||
#[tracing::instrument(level = "debug", skip(self))]
|
||||
pub fn intern(&self, path: &Path) -> FileId {
|
||||
self.inner.write().intern(path)
|
||||
}
|
||||
|
||||
pub fn try_get(&self, path: &Path) -> Option<FileId> {
|
||||
self.inner.read().try_get(path)
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(self))]
|
||||
pub fn path(&self, id: FileId) -> Arc<Path> {
|
||||
self.inner.read().path(id)
|
||||
}
|
||||
|
||||
/// Snapshots files for a new database snapshot.
|
||||
///
|
||||
/// This method should not be used outside a database snapshot.
|
||||
#[must_use]
|
||||
pub fn snapshot(&self) -> Files {
|
||||
Files {
|
||||
inner: self.inner.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Files {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
let files = self.inner.read();
|
||||
let mut debug = f.debug_map();
|
||||
for item in files.iter() {
|
||||
debug.entry(&item.0, &item.1);
|
||||
}
|
||||
|
||||
debug.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Files {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.inner.read().eq(&other.inner.read())
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for Files {}
|
||||
|
||||
#[derive(Default)]
|
||||
struct FilesInner {
|
||||
by_path: Map<FileId, ()>,
|
||||
// TODO should we use a map here to reclaim the space for removed files?
|
||||
// TODO I think we should use our own path abstraction here to avoid having to normalize paths
|
||||
// and dealing with non-utf paths everywhere.
|
||||
by_id: IndexVec<FileId, Arc<Path>>,
|
||||
}
|
||||
|
||||
impl FilesInner {
|
||||
/// Inserts the path and returns a new id for it or returns the id if it is an existing path.
|
||||
// TODO should this accept Path or PathBuf?
|
||||
pub(crate) fn intern(&mut self, path: &Path) -> FileId {
|
||||
let hash = FilesInner::hash_path(path);
|
||||
|
||||
let entry = self
|
||||
.by_path
|
||||
.raw_entry_mut()
|
||||
.from_hash(hash, |existing_file| &*self.by_id[*existing_file] == path);
|
||||
|
||||
match entry {
|
||||
RawEntryMut::Occupied(entry) => *entry.key(),
|
||||
RawEntryMut::Vacant(entry) => {
|
||||
let id = self.by_id.push(Arc::from(path));
|
||||
entry.insert_with_hasher(hash, id, (), |file| {
|
||||
FilesInner::hash_path(&self.by_id[*file])
|
||||
});
|
||||
id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn hash_path(path: &Path) -> u64 {
|
||||
let mut hasher = FxHasher::default();
|
||||
path.hash(&mut hasher);
|
||||
hasher.finish()
|
||||
}
|
||||
|
||||
pub(crate) fn try_get(&self, path: &Path) -> Option<FileId> {
|
||||
let mut hasher = FxHasher::default();
|
||||
path.hash(&mut hasher);
|
||||
let hash = hasher.finish();
|
||||
|
||||
Some(
|
||||
*self
|
||||
.by_path
|
||||
.raw_entry()
|
||||
.from_hash(hash, |existing_file| &*self.by_id[*existing_file] == path)?
|
||||
.0,
|
||||
)
|
||||
}
|
||||
|
||||
/// Returns the path for the file with the given id.
|
||||
pub(crate) fn path(&self, id: FileId) -> Arc<Path> {
|
||||
self.by_id[id].clone()
|
||||
}
|
||||
|
||||
pub(crate) fn iter(&self) -> impl Iterator<Item = (FileId, Arc<Path>)> + '_ {
|
||||
self.by_path.keys().map(|id| (*id, self.by_id[*id].clone()))
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for FilesInner {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.by_id == other.by_id
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for FilesInner {}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[test]
|
||||
fn insert_path_twice_same_id() {
|
||||
let files = Files::default();
|
||||
let path = PathBuf::from("foo/bar");
|
||||
let id1 = files.intern(&path);
|
||||
let id2 = files.intern(&path);
|
||||
assert_eq!(id1, id2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_different_paths_different_ids() {
|
||||
let files = Files::default();
|
||||
let path1 = PathBuf::from("foo/bar");
|
||||
let path2 = PathBuf::from("foo/bar/baz");
|
||||
let id1 = files.intern(&path1);
|
||||
let id2 = files.intern(&path2);
|
||||
assert_ne!(id1, id2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn four_files() {
|
||||
let files = Files::default();
|
||||
let foo_path = PathBuf::from("foo");
|
||||
let foo_id = files.intern(&foo_path);
|
||||
let bar_path = PathBuf::from("bar");
|
||||
files.intern(&bar_path);
|
||||
let baz_path = PathBuf::from("baz");
|
||||
files.intern(&baz_path);
|
||||
let qux_path = PathBuf::from("qux");
|
||||
files.intern(&qux_path);
|
||||
|
||||
let foo_id_2 = files.try_get(&foo_path).expect("foo_path to be found");
|
||||
assert_eq!(foo_id_2, foo_id);
|
||||
}
|
||||
}
|
||||
67
crates/red_knot/src/hir.rs
Normal file
67
crates/red_knot/src/hir.rs
Normal file
@@ -0,0 +1,67 @@
|
||||
//! Key observations
|
||||
//!
|
||||
//! The HIR (High-Level Intermediate Representation) avoids allocations to large extends by:
|
||||
//! * Using an arena per node type
|
||||
//! * using ids and id ranges to reference items.
|
||||
//!
|
||||
//! Using separate arena per node type has the advantage that the IDs are relatively stable, because
|
||||
//! they only change when a node of the same kind has been added or removed. (What's unclear is if that matters or if
|
||||
//! it still triggers a re-compute because the AST-id in the node has changed).
|
||||
//!
|
||||
//! The HIR does not store all details. It mainly stores the *public* interface. There's a reference
|
||||
//! back to the AST node to get more details.
|
||||
//!
|
||||
//!
|
||||
|
||||
use crate::ast_ids::{HasAstId, TypedAstId};
|
||||
use crate::files::FileId;
|
||||
use std::fmt::Formatter;
|
||||
use std::hash::{Hash, Hasher};
|
||||
|
||||
pub struct HirAstId<N: HasAstId> {
|
||||
file_id: FileId,
|
||||
node_id: TypedAstId<N>,
|
||||
}
|
||||
|
||||
impl<N: HasAstId> Copy for HirAstId<N> {}
|
||||
impl<N: HasAstId> Clone for HirAstId<N> {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> PartialEq for HirAstId<N> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.file_id == other.file_id && self.node_id == other.node_id
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> Eq for HirAstId<N> {}
|
||||
|
||||
impl<N: HasAstId> std::fmt::Debug for HirAstId<N> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("HirAstId")
|
||||
.field("file_id", &self.file_id)
|
||||
.field("node_id", &self.node_id)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> Hash for HirAstId<N> {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.file_id.hash(state);
|
||||
self.node_id.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> HirAstId<N> {
|
||||
pub fn upcast<M: HasAstId>(self) -> HirAstId<M>
|
||||
where
|
||||
N: Into<M>,
|
||||
{
|
||||
HirAstId {
|
||||
file_id: self.file_id,
|
||||
node_id: self.node_id.upcast(),
|
||||
}
|
||||
}
|
||||
}
|
||||
556
crates/red_knot/src/hir/definition.rs
Normal file
556
crates/red_knot/src/hir/definition.rs
Normal file
@@ -0,0 +1,556 @@
|
||||
use std::ops::{Index, Range};
|
||||
|
||||
use ruff_index::{newtype_index, IndexVec};
|
||||
use ruff_python_ast::visitor::preorder;
|
||||
use ruff_python_ast::visitor::preorder::PreorderVisitor;
|
||||
use ruff_python_ast::{
|
||||
Decorator, ExceptHandler, ExceptHandlerExceptHandler, Expr, MatchCase, ModModule, Stmt,
|
||||
StmtAnnAssign, StmtAssign, StmtClassDef, StmtFunctionDef, StmtGlobal, StmtImport,
|
||||
StmtImportFrom, StmtNonlocal, StmtTypeAlias, TypeParam, TypeParamParamSpec, TypeParamTypeVar,
|
||||
TypeParamTypeVarTuple, WithItem,
|
||||
};
|
||||
|
||||
use crate::ast_ids::{AstIds, HasAstId};
|
||||
use crate::files::FileId;
|
||||
use crate::hir::HirAstId;
|
||||
use crate::Name;
|
||||
|
||||
#[newtype_index]
|
||||
pub struct FunctionId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Function {
|
||||
ast_id: HirAstId<StmtFunctionDef>,
|
||||
name: Name,
|
||||
parameters: Range<ParameterId>,
|
||||
type_parameters: Range<TypeParameterId>, // TODO: type_parameters, return expression, decorators
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct ParameterId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Parameter {
|
||||
kind: ParameterKind,
|
||||
name: Name,
|
||||
default: Option<()>, // TODO use expression HIR
|
||||
ast_id: HirAstId<ruff_python_ast::Parameter>,
|
||||
}
|
||||
|
||||
// TODO or should `Parameter` be an enum?
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub enum ParameterKind {
|
||||
PositionalOnly,
|
||||
Arguments,
|
||||
Vararg,
|
||||
KeywordOnly,
|
||||
Kwarg,
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct ClassId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Class {
|
||||
name: Name,
|
||||
ast_id: HirAstId<StmtClassDef>,
|
||||
// TODO type parameters, inheritance, decorators, members
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct AssignmentId;
|
||||
|
||||
// This can have more than one name...
|
||||
// but that means we can't implement `name()` on `ModuleItem`.
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Assignment {
|
||||
// TODO: Handle multiple names / targets
|
||||
name: Name,
|
||||
ast_id: HirAstId<StmtAssign>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct AnnotatedAssignment {
|
||||
name: Name,
|
||||
ast_id: HirAstId<StmtAnnAssign>,
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct AnnotatedAssignmentId;
|
||||
|
||||
#[newtype_index]
|
||||
pub struct TypeAliasId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct TypeAlias {
|
||||
name: Name,
|
||||
ast_id: HirAstId<StmtTypeAlias>,
|
||||
parameters: Range<TypeParameterId>,
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct TypeParameterId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub enum TypeParameter {
|
||||
TypeVar(TypeParameterTypeVar),
|
||||
ParamSpec(TypeParameterParamSpec),
|
||||
TypeVarTuple(TypeParameterTypeVarTuple),
|
||||
}
|
||||
|
||||
impl TypeParameter {
|
||||
pub fn ast_id(&self) -> HirAstId<TypeParam> {
|
||||
match self {
|
||||
TypeParameter::TypeVar(type_var) => type_var.ast_id.upcast(),
|
||||
TypeParameter::ParamSpec(param_spec) => param_spec.ast_id.upcast(),
|
||||
TypeParameter::TypeVarTuple(type_var_tuple) => type_var_tuple.ast_id.upcast(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct TypeParameterTypeVar {
|
||||
name: Name,
|
||||
ast_id: HirAstId<TypeParamTypeVar>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct TypeParameterParamSpec {
|
||||
name: Name,
|
||||
ast_id: HirAstId<TypeParamParamSpec>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct TypeParameterTypeVarTuple {
|
||||
name: Name,
|
||||
ast_id: HirAstId<TypeParamTypeVarTuple>,
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct GlobalId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Global {
|
||||
// TODO track names
|
||||
ast_id: HirAstId<StmtGlobal>,
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct NonLocalId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct NonLocal {
|
||||
// TODO track names
|
||||
ast_id: HirAstId<StmtNonlocal>,
|
||||
}
|
||||
|
||||
pub enum DefinitionId {
|
||||
Function(FunctionId),
|
||||
Parameter(ParameterId),
|
||||
Class(ClassId),
|
||||
Assignment(AssignmentId),
|
||||
AnnotatedAssignment(AnnotatedAssignmentId),
|
||||
Global(GlobalId),
|
||||
NonLocal(NonLocalId),
|
||||
TypeParameter(TypeParameterId),
|
||||
TypeAlias(TypeAlias),
|
||||
}
|
||||
|
||||
pub enum DefinitionItem {
|
||||
Function(Function),
|
||||
Parameter(Parameter),
|
||||
Class(Class),
|
||||
Assignment(Assignment),
|
||||
AnnotatedAssignment(AnnotatedAssignment),
|
||||
Global(Global),
|
||||
NonLocal(NonLocal),
|
||||
TypeParameter(TypeParameter),
|
||||
TypeAlias(TypeAlias),
|
||||
}
|
||||
|
||||
// The closest is rust-analyzers item-tree. It only represents "Items" which make the public interface of a module
|
||||
// (it excludes any other statement or expressions). rust-analyzer uses it as the main input to the name resolution
|
||||
// algorithm
|
||||
// > It is the input to the name resolution algorithm, as well as to the queries defined in `adt.rs`,
|
||||
// > `data.rs`, and most things in `attr.rs`.
|
||||
//
|
||||
// > One important purpose of this layer is to provide an "invalidation barrier" for incremental
|
||||
// > computations: when typing inside an item body, the `ItemTree` of the modified file is typically
|
||||
// > unaffected, so we don't have to recompute name resolution results or item data (see `data.rs`).
|
||||
//
|
||||
// I haven't fully figured this out but I think that this composes the "public" interface of a module?
|
||||
// But maybe that's too optimistic.
|
||||
//
|
||||
//
|
||||
#[derive(Debug, Clone, Default, Eq, PartialEq)]
|
||||
pub struct Definitions {
|
||||
functions: IndexVec<FunctionId, Function>,
|
||||
parameters: IndexVec<ParameterId, Parameter>,
|
||||
classes: IndexVec<ClassId, Class>,
|
||||
assignments: IndexVec<AssignmentId, Assignment>,
|
||||
annotated_assignments: IndexVec<AnnotatedAssignmentId, AnnotatedAssignment>,
|
||||
type_aliases: IndexVec<TypeAliasId, TypeAlias>,
|
||||
type_parameters: IndexVec<TypeParameterId, TypeParameter>,
|
||||
globals: IndexVec<GlobalId, Global>,
|
||||
non_locals: IndexVec<NonLocalId, NonLocal>,
|
||||
}
|
||||
|
||||
impl Definitions {
|
||||
pub fn from_module(module: &ModModule, ast_ids: &AstIds, file_id: FileId) -> Self {
|
||||
let mut visitor = DefinitionsVisitor {
|
||||
definitions: Definitions::default(),
|
||||
ast_ids,
|
||||
file_id,
|
||||
};
|
||||
|
||||
visitor.visit_body(&module.body);
|
||||
|
||||
visitor.definitions
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<FunctionId> for Definitions {
|
||||
type Output = Function;
|
||||
|
||||
fn index(&self, index: FunctionId) -> &Self::Output {
|
||||
&self.functions[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<ParameterId> for Definitions {
|
||||
type Output = Parameter;
|
||||
|
||||
fn index(&self, index: ParameterId) -> &Self::Output {
|
||||
&self.parameters[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<ClassId> for Definitions {
|
||||
type Output = Class;
|
||||
|
||||
fn index(&self, index: ClassId) -> &Self::Output {
|
||||
&self.classes[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<AssignmentId> for Definitions {
|
||||
type Output = Assignment;
|
||||
|
||||
fn index(&self, index: AssignmentId) -> &Self::Output {
|
||||
&self.assignments[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<AnnotatedAssignmentId> for Definitions {
|
||||
type Output = AnnotatedAssignment;
|
||||
|
||||
fn index(&self, index: AnnotatedAssignmentId) -> &Self::Output {
|
||||
&self.annotated_assignments[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<TypeAliasId> for Definitions {
|
||||
type Output = TypeAlias;
|
||||
|
||||
fn index(&self, index: TypeAliasId) -> &Self::Output {
|
||||
&self.type_aliases[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<GlobalId> for Definitions {
|
||||
type Output = Global;
|
||||
|
||||
fn index(&self, index: GlobalId) -> &Self::Output {
|
||||
&self.globals[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<NonLocalId> for Definitions {
|
||||
type Output = NonLocal;
|
||||
|
||||
fn index(&self, index: NonLocalId) -> &Self::Output {
|
||||
&self.non_locals[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<TypeParameterId> for Definitions {
|
||||
type Output = TypeParameter;
|
||||
|
||||
fn index(&self, index: TypeParameterId) -> &Self::Output {
|
||||
&self.type_parameters[index]
|
||||
}
|
||||
}
|
||||
|
||||
struct DefinitionsVisitor<'a> {
|
||||
definitions: Definitions,
|
||||
ast_ids: &'a AstIds,
|
||||
file_id: FileId,
|
||||
}
|
||||
|
||||
impl DefinitionsVisitor<'_> {
|
||||
fn ast_id<N: HasAstId>(&self, node: &N) -> HirAstId<N> {
|
||||
HirAstId {
|
||||
file_id: self.file_id,
|
||||
node_id: self.ast_ids.ast_id(node),
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_function_def(&mut self, function: &StmtFunctionDef) -> FunctionId {
|
||||
let name = Name::new(&function.name);
|
||||
|
||||
let first_type_parameter_id = self.definitions.type_parameters.next_index();
|
||||
let mut last_type_parameter_id = first_type_parameter_id;
|
||||
|
||||
if let Some(type_params) = &function.type_params {
|
||||
for parameter in &type_params.type_params {
|
||||
let id = self.lower_type_parameter(parameter);
|
||||
last_type_parameter_id = id;
|
||||
}
|
||||
}
|
||||
|
||||
let parameters = self.lower_parameters(&function.parameters);
|
||||
|
||||
self.definitions.functions.push(Function {
|
||||
name,
|
||||
ast_id: self.ast_id(function),
|
||||
parameters,
|
||||
type_parameters: first_type_parameter_id..last_type_parameter_id,
|
||||
})
|
||||
}
|
||||
|
||||
fn lower_parameters(&mut self, parameters: &ruff_python_ast::Parameters) -> Range<ParameterId> {
|
||||
let first_parameter_id = self.definitions.parameters.next_index();
|
||||
let mut last_parameter_id = first_parameter_id;
|
||||
|
||||
for parameter in ¶meters.posonlyargs {
|
||||
last_parameter_id = self.definitions.parameters.push(Parameter {
|
||||
kind: ParameterKind::PositionalOnly,
|
||||
name: Name::new(¶meter.parameter.name),
|
||||
default: None,
|
||||
ast_id: self.ast_id(¶meter.parameter),
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(vararg) = ¶meters.vararg {
|
||||
last_parameter_id = self.definitions.parameters.push(Parameter {
|
||||
kind: ParameterKind::Vararg,
|
||||
name: Name::new(&vararg.name),
|
||||
default: None,
|
||||
ast_id: self.ast_id(vararg),
|
||||
});
|
||||
}
|
||||
|
||||
for parameter in ¶meters.kwonlyargs {
|
||||
last_parameter_id = self.definitions.parameters.push(Parameter {
|
||||
kind: ParameterKind::KeywordOnly,
|
||||
name: Name::new(¶meter.parameter.name),
|
||||
default: None,
|
||||
ast_id: self.ast_id(¶meter.parameter),
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(kwarg) = ¶meters.kwarg {
|
||||
last_parameter_id = self.definitions.parameters.push(Parameter {
|
||||
kind: ParameterKind::KeywordOnly,
|
||||
name: Name::new(&kwarg.name),
|
||||
default: None,
|
||||
ast_id: self.ast_id(kwarg),
|
||||
});
|
||||
}
|
||||
|
||||
first_parameter_id..last_parameter_id
|
||||
}
|
||||
|
||||
fn lower_class_def(&mut self, class: &StmtClassDef) -> ClassId {
|
||||
let name = Name::new(&class.name);
|
||||
|
||||
self.definitions.classes.push(Class {
|
||||
name,
|
||||
ast_id: self.ast_id(class),
|
||||
})
|
||||
}
|
||||
|
||||
fn lower_assignment(&mut self, assignment: &StmtAssign) {
|
||||
// FIXME handle multiple names
|
||||
if let Some(Expr::Name(name)) = assignment.targets.first() {
|
||||
self.definitions.assignments.push(Assignment {
|
||||
name: Name::new(&name.id),
|
||||
ast_id: self.ast_id(assignment),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_annotated_assignment(&mut self, annotated_assignment: &StmtAnnAssign) {
|
||||
if let Expr::Name(name) = &*annotated_assignment.target {
|
||||
self.definitions
|
||||
.annotated_assignments
|
||||
.push(AnnotatedAssignment {
|
||||
name: Name::new(&name.id),
|
||||
ast_id: self.ast_id(annotated_assignment),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_type_alias(&mut self, type_alias: &StmtTypeAlias) {
|
||||
if let Expr::Name(name) = &*type_alias.name {
|
||||
let name = Name::new(&name.id);
|
||||
|
||||
let lower_parameters_id = self.definitions.type_parameters.next_index();
|
||||
let mut last_parameter_id = lower_parameters_id;
|
||||
|
||||
if let Some(type_params) = &type_alias.type_params {
|
||||
for type_parameter in &type_params.type_params {
|
||||
let id = self.lower_type_parameter(type_parameter);
|
||||
last_parameter_id = id;
|
||||
}
|
||||
}
|
||||
|
||||
self.definitions.type_aliases.push(TypeAlias {
|
||||
name,
|
||||
ast_id: self.ast_id(type_alias),
|
||||
parameters: lower_parameters_id..last_parameter_id,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_type_parameter(&mut self, type_parameter: &TypeParam) -> TypeParameterId {
|
||||
match type_parameter {
|
||||
TypeParam::TypeVar(type_var) => {
|
||||
self.definitions
|
||||
.type_parameters
|
||||
.push(TypeParameter::TypeVar(TypeParameterTypeVar {
|
||||
name: Name::new(&type_var.name),
|
||||
ast_id: self.ast_id(type_var),
|
||||
}))
|
||||
}
|
||||
TypeParam::ParamSpec(param_spec) => {
|
||||
self.definitions
|
||||
.type_parameters
|
||||
.push(TypeParameter::ParamSpec(TypeParameterParamSpec {
|
||||
name: Name::new(¶m_spec.name),
|
||||
ast_id: self.ast_id(param_spec),
|
||||
}))
|
||||
}
|
||||
TypeParam::TypeVarTuple(type_var_tuple) => {
|
||||
self.definitions
|
||||
.type_parameters
|
||||
.push(TypeParameter::TypeVarTuple(TypeParameterTypeVarTuple {
|
||||
name: Name::new(&type_var_tuple.name),
|
||||
ast_id: self.ast_id(type_var_tuple),
|
||||
}))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_import(&mut self, _import: &StmtImport) {
|
||||
// TODO
|
||||
}
|
||||
|
||||
fn lower_import_from(&mut self, _import_from: &StmtImportFrom) {
|
||||
// TODO
|
||||
}
|
||||
|
||||
fn lower_global(&mut self, global: &StmtGlobal) -> GlobalId {
|
||||
self.definitions.globals.push(Global {
|
||||
ast_id: self.ast_id(global),
|
||||
})
|
||||
}
|
||||
|
||||
fn lower_non_local(&mut self, non_local: &StmtNonlocal) -> NonLocalId {
|
||||
self.definitions.non_locals.push(NonLocal {
|
||||
ast_id: self.ast_id(non_local),
|
||||
})
|
||||
}
|
||||
|
||||
fn lower_except_handler(&mut self, _except_handler: &ExceptHandlerExceptHandler) {
|
||||
// TODO
|
||||
}
|
||||
|
||||
fn lower_with_item(&mut self, _with_item: &WithItem) {
|
||||
// TODO
|
||||
}
|
||||
|
||||
fn lower_match_case(&mut self, _match_case: &MatchCase) {
|
||||
// TODO
|
||||
}
|
||||
}
|
||||
|
||||
impl PreorderVisitor<'_> for DefinitionsVisitor<'_> {
|
||||
fn visit_stmt(&mut self, stmt: &Stmt) {
|
||||
match stmt {
|
||||
// Definition statements
|
||||
Stmt::FunctionDef(definition) => {
|
||||
self.lower_function_def(definition);
|
||||
self.visit_body(&definition.body);
|
||||
}
|
||||
Stmt::ClassDef(definition) => {
|
||||
self.lower_class_def(definition);
|
||||
self.visit_body(&definition.body);
|
||||
}
|
||||
Stmt::Assign(assignment) => {
|
||||
self.lower_assignment(assignment);
|
||||
}
|
||||
Stmt::AnnAssign(annotated_assignment) => {
|
||||
self.lower_annotated_assignment(annotated_assignment);
|
||||
}
|
||||
Stmt::TypeAlias(type_alias) => {
|
||||
self.lower_type_alias(type_alias);
|
||||
}
|
||||
|
||||
Stmt::Import(import) => self.lower_import(import),
|
||||
Stmt::ImportFrom(import_from) => self.lower_import_from(import_from),
|
||||
Stmt::Global(global) => {
|
||||
self.lower_global(global);
|
||||
}
|
||||
Stmt::Nonlocal(non_local) => {
|
||||
self.lower_non_local(non_local);
|
||||
}
|
||||
|
||||
// Visit the compound statement bodies because they can contain other definitions.
|
||||
Stmt::For(_)
|
||||
| Stmt::While(_)
|
||||
| Stmt::If(_)
|
||||
| Stmt::With(_)
|
||||
| Stmt::Match(_)
|
||||
| Stmt::Try(_) => {
|
||||
preorder::walk_stmt(self, stmt);
|
||||
}
|
||||
|
||||
// Skip over simple statements because they can't contain any other definitions.
|
||||
Stmt::Return(_)
|
||||
| Stmt::Delete(_)
|
||||
| Stmt::AugAssign(_)
|
||||
| Stmt::Raise(_)
|
||||
| Stmt::Assert(_)
|
||||
| Stmt::Expr(_)
|
||||
| Stmt::Pass(_)
|
||||
| Stmt::Break(_)
|
||||
| Stmt::Continue(_)
|
||||
| Stmt::IpyEscapeCommand(_) => {
|
||||
// No op
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, _: &'_ Expr) {}
|
||||
|
||||
fn visit_decorator(&mut self, _decorator: &'_ Decorator) {}
|
||||
|
||||
fn visit_except_handler(&mut self, except_handler: &'_ ExceptHandler) {
|
||||
match except_handler {
|
||||
ExceptHandler::ExceptHandler(except_handler) => {
|
||||
self.lower_except_handler(except_handler);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_with_item(&mut self, with_item: &'_ WithItem) {
|
||||
self.lower_with_item(with_item);
|
||||
}
|
||||
|
||||
fn visit_match_case(&mut self, match_case: &'_ MatchCase) {
|
||||
self.lower_match_case(match_case);
|
||||
self.visit_body(&match_case.body);
|
||||
}
|
||||
}
|
||||
109
crates/red_knot/src/lib.rs
Normal file
109
crates/red_knot/src/lib.rs
Normal file
@@ -0,0 +1,109 @@
|
||||
use std::fmt::Formatter;
|
||||
use std::hash::BuildHasherDefault;
|
||||
use std::ops::Deref;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use rustc_hash::{FxHashSet, FxHasher};
|
||||
|
||||
use crate::files::FileId;
|
||||
|
||||
pub mod ast_ids;
|
||||
pub mod cache;
|
||||
pub mod cancellation;
|
||||
pub mod db;
|
||||
pub mod files;
|
||||
pub mod hir;
|
||||
pub mod lint;
|
||||
pub mod module;
|
||||
mod parse;
|
||||
pub mod program;
|
||||
pub mod source;
|
||||
mod symbols;
|
||||
mod types;
|
||||
pub mod watch;
|
||||
|
||||
pub(crate) type FxDashMap<K, V> = dashmap::DashMap<K, V, BuildHasherDefault<FxHasher>>;
|
||||
#[allow(unused)]
|
||||
pub(crate) type FxDashSet<V> = dashmap::DashSet<V, BuildHasherDefault<FxHasher>>;
|
||||
pub(crate) type FxIndexSet<V> = indexmap::set::IndexSet<V, BuildHasherDefault<FxHasher>>;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Workspace {
|
||||
/// TODO this should be a resolved path. We should probably use a newtype wrapper that guarantees that
|
||||
/// PATH is a UTF-8 path and is normalized.
|
||||
root: PathBuf,
|
||||
/// The files that are open in the workspace.
|
||||
///
|
||||
/// * Editor: The files that are actively being edited in the editor (the user has a tab open with the file).
|
||||
/// * CLI: The resolved files passed as arguments to the CLI.
|
||||
open_files: FxHashSet<FileId>,
|
||||
}
|
||||
|
||||
impl Workspace {
|
||||
pub fn new(root: PathBuf) -> Self {
|
||||
Self {
|
||||
root,
|
||||
open_files: FxHashSet::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn root(&self) -> &Path {
|
||||
self.root.as_path()
|
||||
}
|
||||
|
||||
// TODO having the content in workspace feels wrong.
|
||||
pub fn open_file(&mut self, file_id: FileId) {
|
||||
self.open_files.insert(file_id);
|
||||
}
|
||||
|
||||
pub fn close_file(&mut self, file_id: FileId) {
|
||||
self.open_files.remove(&file_id);
|
||||
}
|
||||
|
||||
// TODO introduce an `OpenFile` type instead of using an anonymous tuple.
|
||||
pub fn open_files(&self) -> impl Iterator<Item = FileId> + '_ {
|
||||
self.open_files.iter().copied()
|
||||
}
|
||||
|
||||
pub fn is_file_open(&self, file_id: FileId) -> bool {
|
||||
self.open_files.contains(&file_id)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
|
||||
pub struct Name(smol_str::SmolStr);
|
||||
|
||||
impl Name {
|
||||
#[inline]
|
||||
pub fn new(name: &str) -> Self {
|
||||
Self(smol_str::SmolStr::new(name))
|
||||
}
|
||||
|
||||
pub fn as_str(&self) -> &str {
|
||||
self.0.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Name {
|
||||
type Target = str;
|
||||
|
||||
#[inline]
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> From<T> for Name
|
||||
where
|
||||
T: Into<smol_str::SmolStr>,
|
||||
{
|
||||
fn from(value: T) -> Self {
|
||||
Self(value.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Name {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
321
crates/red_knot/src/lint.rs
Normal file
321
crates/red_knot/src/lint.rs
Normal file
@@ -0,0 +1,321 @@
|
||||
use std::cell::RefCell;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use ruff_python_ast::visitor::Visitor;
|
||||
use ruff_python_ast::{ModModule, StringLiteral};
|
||||
|
||||
use crate::cache::KeyValueCache;
|
||||
use crate::db::{LintDb, LintJar, QueryResult};
|
||||
use crate::files::FileId;
|
||||
use crate::module::ModuleName;
|
||||
use crate::parse::{parse, Parsed};
|
||||
use crate::source::{source_text, Source};
|
||||
use crate::symbols::{
|
||||
resolve_global_symbol, symbol_table, Definition, GlobalSymbolId, SymbolId, SymbolTable,
|
||||
};
|
||||
use crate::types::{infer_definition_type, infer_symbol_type, Type};
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub(crate) fn lint_syntax(db: &dyn LintDb, file_id: FileId) -> QueryResult<Diagnostics> {
|
||||
let lint_jar: &LintJar = db.jar()?;
|
||||
let storage = &lint_jar.lint_syntax;
|
||||
|
||||
#[allow(clippy::print_stdout)]
|
||||
if std::env::var("RED_KNOT_SLOW_LINT").is_ok() {
|
||||
for i in 0..10 {
|
||||
db.cancelled()?;
|
||||
println!("RED_KNOT_SLOW_LINT is set, sleeping for {i}/10 seconds");
|
||||
std::thread::sleep(Duration::from_secs(1));
|
||||
}
|
||||
}
|
||||
|
||||
storage.get(&file_id, |file_id| {
|
||||
let mut diagnostics = Vec::new();
|
||||
|
||||
let source = source_text(db.upcast(), *file_id)?;
|
||||
lint_lines(source.text(), &mut diagnostics);
|
||||
|
||||
let parsed = parse(db.upcast(), *file_id)?;
|
||||
|
||||
if parsed.errors().is_empty() {
|
||||
let ast = parsed.ast();
|
||||
|
||||
let mut visitor = SyntaxLintVisitor {
|
||||
diagnostics,
|
||||
source: source.text(),
|
||||
};
|
||||
visitor.visit_body(&ast.body);
|
||||
diagnostics = visitor.diagnostics;
|
||||
} else {
|
||||
diagnostics.extend(parsed.errors().iter().map(std::string::ToString::to_string));
|
||||
}
|
||||
|
||||
Ok(Diagnostics::from(diagnostics))
|
||||
})
|
||||
}
|
||||
|
||||
fn lint_lines(source: &str, diagnostics: &mut Vec<String>) {
|
||||
for (line_number, line) in source.lines().enumerate() {
|
||||
if line.len() < 88 {
|
||||
continue;
|
||||
}
|
||||
|
||||
let char_count = line.chars().count();
|
||||
if char_count > 88 {
|
||||
diagnostics.push(format!(
|
||||
"Line {} is too long ({} characters)",
|
||||
line_number + 1,
|
||||
char_count
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub(crate) fn lint_semantic(db: &dyn LintDb, file_id: FileId) -> QueryResult<Diagnostics> {
|
||||
let lint_jar: &LintJar = db.jar()?;
|
||||
let storage = &lint_jar.lint_semantic;
|
||||
|
||||
storage.get(&file_id, |file_id| {
|
||||
let source = source_text(db.upcast(), *file_id)?;
|
||||
let parsed = parse(db.upcast(), *file_id)?;
|
||||
let symbols = symbol_table(db.upcast(), *file_id)?;
|
||||
|
||||
let context = SemanticLintContext {
|
||||
file_id: *file_id,
|
||||
source,
|
||||
parsed,
|
||||
symbols,
|
||||
db,
|
||||
diagnostics: RefCell::new(Vec::new()),
|
||||
};
|
||||
|
||||
lint_unresolved_imports(&context)?;
|
||||
lint_bad_overrides(&context)?;
|
||||
|
||||
Ok(Diagnostics::from(context.diagnostics.take()))
|
||||
})
|
||||
}
|
||||
|
||||
fn lint_unresolved_imports(context: &SemanticLintContext) -> QueryResult<()> {
|
||||
// TODO: Consider iterating over the dependencies (imports) only instead of all definitions.
|
||||
for (symbol, definition) in context.symbols().all_definitions() {
|
||||
match definition {
|
||||
Definition::Import(import) => {
|
||||
let ty = context.infer_symbol_type(symbol)?;
|
||||
|
||||
if ty.is_unknown() {
|
||||
context.push_diagnostic(format!("Unresolved module {}", import.module));
|
||||
}
|
||||
}
|
||||
Definition::ImportFrom(import) => {
|
||||
let ty = context.infer_symbol_type(symbol)?;
|
||||
|
||||
if ty.is_unknown() {
|
||||
let module_name = import.module().map(Deref::deref).unwrap_or_default();
|
||||
let message = if import.level() > 0 {
|
||||
format!(
|
||||
"Unresolved relative import '{}' from {}{}",
|
||||
import.name(),
|
||||
".".repeat(import.level() as usize),
|
||||
module_name
|
||||
)
|
||||
} else {
|
||||
format!(
|
||||
"Unresolved import '{}' from '{}'",
|
||||
import.name(),
|
||||
module_name
|
||||
)
|
||||
};
|
||||
|
||||
context.push_diagnostic(message);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn lint_bad_overrides(context: &SemanticLintContext) -> QueryResult<()> {
|
||||
// TODO we should have a special marker on the real typing module (from typeshed) so if you
|
||||
// have your own "typing" module in your project, we don't consider it THE typing module (and
|
||||
// same for other stdlib modules that our lint rules care about)
|
||||
let Some(typing_override) =
|
||||
resolve_global_symbol(context.db.upcast(), ModuleName::new("typing"), "override")?
|
||||
else {
|
||||
// TODO once we bundle typeshed, this should be unreachable!()
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
// TODO we should maybe index definitions by type instead of iterating all, or else iterate all
|
||||
// just once, match, and branch to all lint rules that care about a type of definition
|
||||
for (symbol, definition) in context.symbols().all_definitions() {
|
||||
if !matches!(definition, Definition::FunctionDef(_)) {
|
||||
continue;
|
||||
}
|
||||
let ty = infer_definition_type(
|
||||
context.db.upcast(),
|
||||
GlobalSymbolId {
|
||||
file_id: context.file_id,
|
||||
symbol_id: symbol,
|
||||
},
|
||||
definition.clone(),
|
||||
)?;
|
||||
let Type::Function(func) = ty else {
|
||||
unreachable!("type of a FunctionDef should always be a Function");
|
||||
};
|
||||
let Some(class) = func.get_containing_class(context.db.upcast())? else {
|
||||
// not a method of a class
|
||||
continue;
|
||||
};
|
||||
if func.has_decorator(context.db.upcast(), typing_override)? {
|
||||
let method_name = func.name(context.db.upcast())?;
|
||||
if class
|
||||
.get_super_class_member(context.db.upcast(), &method_name)?
|
||||
.is_none()
|
||||
{
|
||||
// TODO should have a qualname() method to support nested classes
|
||||
context.push_diagnostic(
|
||||
format!(
|
||||
"Method {}.{} is decorated with `typing.override` but does not override any base class method",
|
||||
class.name(context.db.upcast())?,
|
||||
method_name,
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub struct SemanticLintContext<'a> {
|
||||
file_id: FileId,
|
||||
source: Source,
|
||||
parsed: Parsed,
|
||||
symbols: Arc<SymbolTable>,
|
||||
db: &'a dyn LintDb,
|
||||
diagnostics: RefCell<Vec<String>>,
|
||||
}
|
||||
|
||||
impl<'a> SemanticLintContext<'a> {
|
||||
pub fn source_text(&self) -> &str {
|
||||
self.source.text()
|
||||
}
|
||||
|
||||
pub fn file_id(&self) -> FileId {
|
||||
self.file_id
|
||||
}
|
||||
|
||||
pub fn ast(&self) -> &ModModule {
|
||||
self.parsed.ast()
|
||||
}
|
||||
|
||||
pub fn symbols(&self) -> &SymbolTable {
|
||||
&self.symbols
|
||||
}
|
||||
|
||||
pub fn infer_symbol_type(&self, symbol_id: SymbolId) -> QueryResult<Type> {
|
||||
infer_symbol_type(
|
||||
self.db.upcast(),
|
||||
GlobalSymbolId {
|
||||
file_id: self.file_id,
|
||||
symbol_id,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
pub fn push_diagnostic(&self, diagnostic: String) {
|
||||
self.diagnostics.borrow_mut().push(diagnostic);
|
||||
}
|
||||
|
||||
pub fn extend_diagnostics(&mut self, diagnostics: impl IntoIterator<Item = String>) {
|
||||
self.diagnostics.get_mut().extend(diagnostics);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct SyntaxLintVisitor<'a> {
|
||||
diagnostics: Vec<String>,
|
||||
source: &'a str,
|
||||
}
|
||||
|
||||
impl Visitor<'_> for SyntaxLintVisitor<'_> {
|
||||
fn visit_string_literal(&mut self, string_literal: &'_ StringLiteral) {
|
||||
// A very naive implementation of use double quotes
|
||||
let text = &self.source[string_literal.range];
|
||||
|
||||
if text.starts_with('\'') {
|
||||
self.diagnostics
|
||||
.push("Use double quotes for strings".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Diagnostics {
|
||||
Empty,
|
||||
List(Arc<Vec<String>>),
|
||||
}
|
||||
|
||||
impl Diagnostics {
|
||||
pub fn as_slice(&self) -> &[String] {
|
||||
match self {
|
||||
Diagnostics::Empty => &[],
|
||||
Diagnostics::List(list) => list.as_slice(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Diagnostics {
|
||||
type Target = [String];
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.as_slice()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Vec<String>> for Diagnostics {
|
||||
fn from(value: Vec<String>) -> Self {
|
||||
if value.is_empty() {
|
||||
Diagnostics::Empty
|
||||
} else {
|
||||
Diagnostics::List(Arc::new(value))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub struct LintSyntaxStorage(KeyValueCache<FileId, Diagnostics>);
|
||||
|
||||
impl Deref for LintSyntaxStorage {
|
||||
type Target = KeyValueCache<FileId, Diagnostics>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for LintSyntaxStorage {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub struct LintSemanticStorage(KeyValueCache<FileId, Diagnostics>);
|
||||
|
||||
impl Deref for LintSemanticStorage {
|
||||
type Target = KeyValueCache<FileId, Diagnostics>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for LintSemanticStorage {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
359
crates/red_knot/src/main.rs
Normal file
359
crates/red_knot/src/main.rs
Normal file
@@ -0,0 +1,359 @@
|
||||
#![allow(clippy::dbg_macro)]
|
||||
|
||||
use std::path::Path;
|
||||
use std::sync::Mutex;
|
||||
|
||||
use crossbeam::channel as crossbeam_channel;
|
||||
use tracing::subscriber::Interest;
|
||||
use tracing::{Level, Metadata};
|
||||
use tracing_subscriber::filter::LevelFilter;
|
||||
use tracing_subscriber::layer::{Context, Filter, SubscriberExt};
|
||||
use tracing_subscriber::{Layer, Registry};
|
||||
use tracing_tree::time::Uptime;
|
||||
|
||||
use red_knot::db::{HasJar, ParallelDatabase, QueryError, SourceDb, SourceJar};
|
||||
use red_knot::module::{set_module_search_paths, ModuleSearchPath, ModuleSearchPathKind};
|
||||
use red_knot::program::check::ExecutionMode;
|
||||
use red_knot::program::{FileWatcherChange, Program};
|
||||
use red_knot::watch::FileWatcher;
|
||||
use red_knot::Workspace;
|
||||
|
||||
#[allow(clippy::print_stdout, clippy::unnecessary_wraps, clippy::print_stderr)]
|
||||
fn main() -> anyhow::Result<()> {
|
||||
setup_tracing();
|
||||
|
||||
let arguments: Vec<_> = std::env::args().collect();
|
||||
|
||||
if arguments.len() < 2 {
|
||||
eprintln!("Usage: red_knot <path>");
|
||||
return Err(anyhow::anyhow!("Invalid arguments"));
|
||||
}
|
||||
|
||||
let entry_point = Path::new(&arguments[1]);
|
||||
|
||||
if !entry_point.exists() {
|
||||
eprintln!("The entry point does not exist.");
|
||||
return Err(anyhow::anyhow!("Invalid arguments"));
|
||||
}
|
||||
|
||||
if !entry_point.is_file() {
|
||||
eprintln!("The entry point is not a file.");
|
||||
return Err(anyhow::anyhow!("Invalid arguments"));
|
||||
}
|
||||
|
||||
let workspace_folder = entry_point.parent().unwrap();
|
||||
let workspace = Workspace::new(workspace_folder.to_path_buf());
|
||||
|
||||
let workspace_search_path = ModuleSearchPath::new(
|
||||
workspace.root().to_path_buf(),
|
||||
ModuleSearchPathKind::FirstParty,
|
||||
);
|
||||
let mut program = Program::new(workspace);
|
||||
set_module_search_paths(&mut program, vec![workspace_search_path]);
|
||||
|
||||
let entry_id = program.file_id(entry_point);
|
||||
program.workspace_mut().open_file(entry_id);
|
||||
|
||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new();
|
||||
|
||||
// Listen to Ctrl+C and abort the watch mode.
|
||||
let main_loop_cancellation_token = Mutex::new(Some(main_loop_cancellation_token));
|
||||
ctrlc::set_handler(move || {
|
||||
let mut lock = main_loop_cancellation_token.lock().unwrap();
|
||||
|
||||
if let Some(token) = lock.take() {
|
||||
token.stop();
|
||||
}
|
||||
})?;
|
||||
|
||||
let file_changes_notifier = main_loop.file_changes_notifier();
|
||||
|
||||
// Watch for file changes and re-trigger the analysis.
|
||||
let mut file_watcher = FileWatcher::new(move |changes| {
|
||||
file_changes_notifier.notify(changes);
|
||||
})?;
|
||||
|
||||
file_watcher.watch_folder(workspace_folder)?;
|
||||
|
||||
main_loop.run(&mut program);
|
||||
|
||||
let source_jar: &SourceJar = program.jar().unwrap();
|
||||
|
||||
dbg!(source_jar.parsed.statistics());
|
||||
dbg!(source_jar.sources.statistics());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct MainLoop {
|
||||
orchestrator_sender: crossbeam_channel::Sender<OrchestratorMessage>,
|
||||
main_loop_receiver: crossbeam_channel::Receiver<MainLoopMessage>,
|
||||
}
|
||||
|
||||
impl MainLoop {
|
||||
fn new() -> (Self, MainLoopCancellationToken) {
|
||||
let (orchestrator_sender, orchestrator_receiver) = crossbeam_channel::bounded(1);
|
||||
let (main_loop_sender, main_loop_receiver) = crossbeam_channel::bounded(1);
|
||||
|
||||
let mut orchestrator = Orchestrator {
|
||||
receiver: orchestrator_receiver,
|
||||
sender: main_loop_sender.clone(),
|
||||
revision: 0,
|
||||
};
|
||||
|
||||
std::thread::spawn(move || {
|
||||
orchestrator.run();
|
||||
});
|
||||
|
||||
(
|
||||
Self {
|
||||
orchestrator_sender,
|
||||
main_loop_receiver,
|
||||
},
|
||||
MainLoopCancellationToken {
|
||||
sender: main_loop_sender,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn file_changes_notifier(&self) -> FileChangesNotifier {
|
||||
FileChangesNotifier {
|
||||
sender: self.orchestrator_sender.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn run(self, program: &mut Program) {
|
||||
self.orchestrator_sender
|
||||
.send(OrchestratorMessage::Run)
|
||||
.unwrap();
|
||||
|
||||
for message in &self.main_loop_receiver {
|
||||
tracing::trace!("Main Loop: Tick");
|
||||
|
||||
match message {
|
||||
MainLoopMessage::CheckProgram { revision } => {
|
||||
let program = program.snapshot();
|
||||
let sender = self.orchestrator_sender.clone();
|
||||
|
||||
// Spawn a new task that checks the program. This needs to be done in a separate thread
|
||||
// to prevent blocking the main loop here.
|
||||
rayon::spawn(move || match program.check(ExecutionMode::ThreadPool) {
|
||||
Ok(result) => {
|
||||
sender
|
||||
.send(OrchestratorMessage::CheckProgramCompleted {
|
||||
diagnostics: result,
|
||||
revision,
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
Err(QueryError::Cancelled) => {}
|
||||
});
|
||||
}
|
||||
MainLoopMessage::ApplyChanges(changes) => {
|
||||
// Automatically cancels any pending queries and waits for them to complete.
|
||||
program.apply_changes(changes);
|
||||
}
|
||||
MainLoopMessage::CheckCompleted(diagnostics) => {
|
||||
dbg!(diagnostics);
|
||||
}
|
||||
MainLoopMessage::Exit => {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for MainLoop {
|
||||
fn drop(&mut self) {
|
||||
self.orchestrator_sender
|
||||
.send(OrchestratorMessage::Shutdown)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct FileChangesNotifier {
|
||||
sender: crossbeam_channel::Sender<OrchestratorMessage>,
|
||||
}
|
||||
|
||||
impl FileChangesNotifier {
|
||||
fn notify(&self, changes: Vec<FileWatcherChange>) {
|
||||
self.sender
|
||||
.send(OrchestratorMessage::FileChanges(changes))
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct MainLoopCancellationToken {
|
||||
sender: crossbeam_channel::Sender<MainLoopMessage>,
|
||||
}
|
||||
|
||||
impl MainLoopCancellationToken {
|
||||
fn stop(self) {
|
||||
self.sender.send(MainLoopMessage::Exit).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
struct Orchestrator {
|
||||
/// Sends messages to the main loop.
|
||||
sender: crossbeam_channel::Sender<MainLoopMessage>,
|
||||
/// Receives messages from the main loop.
|
||||
receiver: crossbeam_channel::Receiver<OrchestratorMessage>,
|
||||
revision: usize,
|
||||
}
|
||||
|
||||
impl Orchestrator {
|
||||
fn run(&mut self) {
|
||||
while let Ok(message) = self.receiver.recv() {
|
||||
match message {
|
||||
OrchestratorMessage::Run => {
|
||||
self.sender
|
||||
.send(MainLoopMessage::CheckProgram {
|
||||
revision: self.revision,
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
OrchestratorMessage::CheckProgramCompleted {
|
||||
diagnostics,
|
||||
revision,
|
||||
} => {
|
||||
// Only take the diagnostics if they are for the latest revision.
|
||||
if self.revision == revision {
|
||||
self.sender
|
||||
.send(MainLoopMessage::CheckCompleted(diagnostics))
|
||||
.unwrap();
|
||||
} else {
|
||||
tracing::debug!("Discarding diagnostics for outdated revision {revision} (current: {}).", self.revision);
|
||||
}
|
||||
}
|
||||
|
||||
OrchestratorMessage::FileChanges(changes) => {
|
||||
// Request cancellation, but wait until all analysis tasks have completed to
|
||||
// avoid stale messages in the next main loop.
|
||||
|
||||
self.revision += 1;
|
||||
self.debounce_changes(changes);
|
||||
}
|
||||
OrchestratorMessage::Shutdown => {
|
||||
return self.shutdown();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn debounce_changes(&self, mut changes: Vec<FileWatcherChange>) {
|
||||
loop {
|
||||
// Consume possibly incoming file change messages before running a new analysis, but don't wait for more than 100ms.
|
||||
crossbeam_channel::select! {
|
||||
recv(self.receiver) -> message => {
|
||||
match message {
|
||||
Ok(OrchestratorMessage::Shutdown) => {
|
||||
return self.shutdown();
|
||||
}
|
||||
Ok(OrchestratorMessage::FileChanges(file_changes)) => {
|
||||
changes.extend(file_changes);
|
||||
}
|
||||
|
||||
Ok(OrchestratorMessage::CheckProgramCompleted { .. })=> {
|
||||
// disregard any outdated completion message.
|
||||
}
|
||||
Ok(OrchestratorMessage::Run) => unreachable!("The orchestrator is already running."),
|
||||
|
||||
Err(_) => {
|
||||
// There are no more senders, no point in waiting for more messages
|
||||
return;
|
||||
}
|
||||
}
|
||||
},
|
||||
default(std::time::Duration::from_millis(10)) => {
|
||||
// No more file changes after 10 ms, send the changes and schedule a new analysis
|
||||
self.sender.send(MainLoopMessage::ApplyChanges(changes)).unwrap();
|
||||
self.sender.send(MainLoopMessage::CheckProgram { revision: self.revision}).unwrap();
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::unused_self)]
|
||||
fn shutdown(&self) {
|
||||
tracing::trace!("Shutting down orchestrator.");
|
||||
}
|
||||
}
|
||||
|
||||
/// Message sent from the orchestrator to the main loop.
|
||||
#[derive(Debug)]
|
||||
enum MainLoopMessage {
|
||||
CheckProgram { revision: usize },
|
||||
CheckCompleted(Vec<String>),
|
||||
ApplyChanges(Vec<FileWatcherChange>),
|
||||
Exit,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum OrchestratorMessage {
|
||||
Run,
|
||||
Shutdown,
|
||||
|
||||
CheckProgramCompleted {
|
||||
diagnostics: Vec<String>,
|
||||
revision: usize,
|
||||
},
|
||||
|
||||
FileChanges(Vec<FileWatcherChange>),
|
||||
}
|
||||
|
||||
fn setup_tracing() {
|
||||
let subscriber = Registry::default().with(
|
||||
tracing_tree::HierarchicalLayer::default()
|
||||
.with_indent_lines(true)
|
||||
.with_indent_amount(2)
|
||||
.with_bracketed_fields(true)
|
||||
.with_thread_ids(true)
|
||||
.with_targets(true)
|
||||
.with_writer(|| Box::new(std::io::stderr()))
|
||||
.with_timer(Uptime::default())
|
||||
.with_filter(LoggingFilter {
|
||||
trace_level: Level::TRACE,
|
||||
}),
|
||||
);
|
||||
|
||||
tracing::subscriber::set_global_default(subscriber).unwrap();
|
||||
}
|
||||
|
||||
struct LoggingFilter {
|
||||
trace_level: Level,
|
||||
}
|
||||
|
||||
impl LoggingFilter {
|
||||
fn is_enabled(&self, meta: &Metadata<'_>) -> bool {
|
||||
let filter = if meta.target().starts_with("red_knot") || meta.target().starts_with("ruff") {
|
||||
self.trace_level
|
||||
} else {
|
||||
Level::INFO
|
||||
};
|
||||
|
||||
meta.level() <= &filter
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> Filter<S> for LoggingFilter {
|
||||
fn enabled(&self, meta: &Metadata<'_>, _cx: &Context<'_, S>) -> bool {
|
||||
self.is_enabled(meta)
|
||||
}
|
||||
|
||||
fn callsite_enabled(&self, meta: &'static Metadata<'static>) -> Interest {
|
||||
if self.is_enabled(meta) {
|
||||
Interest::always()
|
||||
} else {
|
||||
Interest::never()
|
||||
}
|
||||
}
|
||||
|
||||
fn max_level_hint(&self) -> Option<LevelFilter> {
|
||||
Some(LevelFilter::from_level(self.trace_level))
|
||||
}
|
||||
}
|
||||
1090
crates/red_knot/src/module.rs
Normal file
1090
crates/red_knot/src/module.rs
Normal file
File diff suppressed because it is too large
Load Diff
93
crates/red_knot/src/parse.rs
Normal file
93
crates/red_knot/src/parse.rs
Normal file
@@ -0,0 +1,93 @@
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::sync::Arc;
|
||||
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_parser::{Mode, ParseError};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
use crate::cache::KeyValueCache;
|
||||
use crate::db::{QueryResult, SourceDb};
|
||||
use crate::files::FileId;
|
||||
use crate::source::source_text;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct Parsed {
|
||||
inner: Arc<ParsedInner>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
struct ParsedInner {
|
||||
ast: ast::ModModule,
|
||||
errors: Vec<ParseError>,
|
||||
}
|
||||
|
||||
impl Parsed {
|
||||
fn new(ast: ast::ModModule, errors: Vec<ParseError>) -> Self {
|
||||
Self {
|
||||
inner: Arc::new(ParsedInner { ast, errors }),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn from_text(text: &str) -> Self {
|
||||
let result = ruff_python_parser::parse(text, Mode::Module);
|
||||
|
||||
let (module, errors) = match result {
|
||||
Ok(ast::Mod::Module(module)) => (module, vec![]),
|
||||
Ok(ast::Mod::Expression(expression)) => (
|
||||
ast::ModModule {
|
||||
range: expression.range(),
|
||||
body: vec![ast::Stmt::Expr(ast::StmtExpr {
|
||||
range: expression.range(),
|
||||
value: expression.body,
|
||||
})],
|
||||
},
|
||||
vec![],
|
||||
),
|
||||
Err(errors) => (
|
||||
ast::ModModule {
|
||||
range: TextRange::default(),
|
||||
body: Vec::new(),
|
||||
},
|
||||
vec![errors],
|
||||
),
|
||||
};
|
||||
|
||||
Parsed::new(module, errors)
|
||||
}
|
||||
|
||||
pub fn ast(&self) -> &ast::ModModule {
|
||||
&self.inner.ast
|
||||
}
|
||||
|
||||
pub fn errors(&self) -> &[ParseError] {
|
||||
&self.inner.errors
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub(crate) fn parse(db: &dyn SourceDb, file_id: FileId) -> QueryResult<Parsed> {
|
||||
let jar = db.jar()?;
|
||||
|
||||
jar.parsed.get(&file_id, |file_id| {
|
||||
let source = source_text(db, *file_id)?;
|
||||
|
||||
Ok(Parsed::from_text(source.text()))
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct ParsedStorage(KeyValueCache<FileId, Parsed>);
|
||||
|
||||
impl Deref for ParsedStorage {
|
||||
type Target = KeyValueCache<FileId, Parsed>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for ParsedStorage {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
413
crates/red_knot/src/program/check.rs
Normal file
413
crates/red_knot/src/program/check.rs
Normal file
@@ -0,0 +1,413 @@
|
||||
use rayon::{current_num_threads, yield_local};
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use crate::db::{Database, QueryError, QueryResult};
|
||||
use crate::files::FileId;
|
||||
use crate::lint::{lint_semantic, lint_syntax, Diagnostics};
|
||||
use crate::module::{file_to_module, resolve_module};
|
||||
use crate::program::Program;
|
||||
use crate::symbols::{symbol_table, Dependency};
|
||||
|
||||
impl Program {
|
||||
/// Checks all open files in the workspace and its dependencies.
|
||||
#[tracing::instrument(level = "debug", skip_all)]
|
||||
pub fn check(&self, mode: ExecutionMode) -> QueryResult<Vec<String>> {
|
||||
self.cancelled()?;
|
||||
|
||||
let mut context = CheckContext::new(self);
|
||||
|
||||
match mode {
|
||||
ExecutionMode::SingleThreaded => SingleThreadedExecutor.run(&mut context)?,
|
||||
ExecutionMode::ThreadPool => ThreadPoolExecutor.run(&mut context)?,
|
||||
};
|
||||
|
||||
Ok(context.finish())
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(self, context))]
|
||||
fn check_file(&self, file: FileId, context: &CheckFileContext) -> QueryResult<Diagnostics> {
|
||||
self.cancelled()?;
|
||||
|
||||
let symbol_table = symbol_table(self, file)?;
|
||||
let dependencies = symbol_table.dependencies();
|
||||
|
||||
if !dependencies.is_empty() {
|
||||
let module = file_to_module(self, file)?;
|
||||
|
||||
// TODO scheduling all dependencies here is wasteful if we don't infer any types on them
|
||||
// but I think that's unlikely, so it is okay?
|
||||
// Anyway, we need to figure out a way to retrieve the dependencies of a module
|
||||
// from the persistent cache. So maybe it should be a separate query after all.
|
||||
for dependency in dependencies {
|
||||
let dependency_name = match dependency {
|
||||
Dependency::Module(name) => Some(name.clone()),
|
||||
Dependency::Relative { .. } => match &module {
|
||||
Some(module) => module.resolve_dependency(self, dependency)?,
|
||||
None => None,
|
||||
},
|
||||
};
|
||||
|
||||
if let Some(dependency_name) = dependency_name {
|
||||
// TODO We may want to have a different check functions for non-first-party
|
||||
// files because we only need to index them and not check them.
|
||||
// Supporting non-first-party code also requires supporting typing stubs.
|
||||
if let Some(dependency) = resolve_module(self, dependency_name)? {
|
||||
if dependency.path(self)?.root().kind().is_first_party() {
|
||||
context.schedule_dependency(dependency.path(self)?.file());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut diagnostics = Vec::new();
|
||||
|
||||
if self.workspace().is_file_open(file) {
|
||||
diagnostics.extend_from_slice(&lint_syntax(self, file)?);
|
||||
diagnostics.extend_from_slice(&lint_semantic(self, file)?);
|
||||
}
|
||||
|
||||
Ok(Diagnostics::from(diagnostics))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub enum ExecutionMode {
|
||||
SingleThreaded,
|
||||
ThreadPool,
|
||||
}
|
||||
|
||||
/// Context that stores state information about the entire check operation.
|
||||
struct CheckContext<'a> {
|
||||
/// IDs of the files that have been queued for checking.
|
||||
///
|
||||
/// Used to avoid queuing the same file twice.
|
||||
scheduled_files: FxHashSet<FileId>,
|
||||
|
||||
/// Reference to the program that is checked.
|
||||
program: &'a Program,
|
||||
|
||||
/// The aggregated diagnostics
|
||||
diagnostics: Vec<String>,
|
||||
}
|
||||
|
||||
impl<'a> CheckContext<'a> {
|
||||
fn new(program: &'a Program) -> Self {
|
||||
Self {
|
||||
scheduled_files: FxHashSet::default(),
|
||||
program,
|
||||
diagnostics: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the tasks to check all open files in the workspace.
|
||||
fn check_open_files(&mut self) -> Vec<CheckOpenFileTask> {
|
||||
self.scheduled_files
|
||||
.extend(self.program.workspace().open_files());
|
||||
|
||||
self.program
|
||||
.workspace()
|
||||
.open_files()
|
||||
.map(|file_id| CheckOpenFileTask { file_id })
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Returns the task to check a dependency.
|
||||
fn check_dependency(&mut self, file_id: FileId) -> Option<CheckDependencyTask> {
|
||||
if self.scheduled_files.insert(file_id) {
|
||||
Some(CheckDependencyTask { file_id })
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Pushes the result for a single file check operation
|
||||
fn push_diagnostics(&mut self, diagnostics: &Diagnostics) {
|
||||
self.diagnostics.extend_from_slice(diagnostics);
|
||||
}
|
||||
|
||||
/// Returns a reference to the program that is being checked.
|
||||
fn program(&self) -> &'a Program {
|
||||
self.program
|
||||
}
|
||||
|
||||
/// Creates a task context that is used to check a single file.
|
||||
fn task_context<'b, S>(&self, dependency_scheduler: &'b S) -> CheckTaskContext<'a, 'b, S>
|
||||
where
|
||||
S: ScheduleDependency,
|
||||
{
|
||||
CheckTaskContext {
|
||||
program: self.program,
|
||||
dependency_scheduler,
|
||||
}
|
||||
}
|
||||
|
||||
fn finish(self) -> Vec<String> {
|
||||
self.diagnostics
|
||||
}
|
||||
}
|
||||
|
||||
/// Trait that abstracts away how a dependency of a file gets scheduled for checking.
|
||||
trait ScheduleDependency {
|
||||
/// Schedules the file with the given ID for checking.
|
||||
fn schedule(&self, file_id: FileId);
|
||||
}
|
||||
|
||||
impl<T> ScheduleDependency for T
|
||||
where
|
||||
T: Fn(FileId),
|
||||
{
|
||||
fn schedule(&self, file_id: FileId) {
|
||||
let f = self;
|
||||
f(file_id);
|
||||
}
|
||||
}
|
||||
|
||||
/// Context that is used to run a single file check task.
|
||||
///
|
||||
/// The task is generic over `S` because it is passed across thread boundaries and
|
||||
/// we don't want to add the requirement that [`ScheduleDependency`] must be [`Send`].
|
||||
struct CheckTaskContext<'a, 'scheduler, S>
|
||||
where
|
||||
S: ScheduleDependency,
|
||||
{
|
||||
dependency_scheduler: &'scheduler S,
|
||||
program: &'a Program,
|
||||
}
|
||||
|
||||
impl<'a, 'scheduler, S> CheckTaskContext<'a, 'scheduler, S>
|
||||
where
|
||||
S: ScheduleDependency,
|
||||
{
|
||||
fn as_file_context(&self) -> CheckFileContext<'scheduler> {
|
||||
CheckFileContext {
|
||||
dependency_scheduler: self.dependency_scheduler,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Context passed when checking a single file.
|
||||
///
|
||||
/// This is a trimmed down version of [`CheckTaskContext`] with the type parameter `S` erased
|
||||
/// to avoid monomorphization of [`Program:check_file`].
|
||||
struct CheckFileContext<'a> {
|
||||
dependency_scheduler: &'a dyn ScheduleDependency,
|
||||
}
|
||||
|
||||
impl<'a> CheckFileContext<'a> {
|
||||
fn schedule_dependency(&self, file_id: FileId) {
|
||||
self.dependency_scheduler.schedule(file_id);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum CheckFileTask {
|
||||
OpenFile(CheckOpenFileTask),
|
||||
Dependency(CheckDependencyTask),
|
||||
}
|
||||
|
||||
impl CheckFileTask {
|
||||
/// Runs the task and returns the results for checking this file.
|
||||
fn run<S>(&self, context: &CheckTaskContext<S>) -> QueryResult<Diagnostics>
|
||||
where
|
||||
S: ScheduleDependency,
|
||||
{
|
||||
match self {
|
||||
Self::OpenFile(task) => task.run(context),
|
||||
Self::Dependency(task) => task.run(context),
|
||||
}
|
||||
}
|
||||
|
||||
fn file_id(&self) -> FileId {
|
||||
match self {
|
||||
CheckFileTask::OpenFile(task) => task.file_id,
|
||||
CheckFileTask::Dependency(task) => task.file_id,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Task to check an open file.
|
||||
|
||||
#[derive(Debug)]
|
||||
struct CheckOpenFileTask {
|
||||
file_id: FileId,
|
||||
}
|
||||
|
||||
impl CheckOpenFileTask {
|
||||
fn run<S>(&self, context: &CheckTaskContext<S>) -> QueryResult<Diagnostics>
|
||||
where
|
||||
S: ScheduleDependency,
|
||||
{
|
||||
context
|
||||
.program
|
||||
.check_file(self.file_id, &context.as_file_context())
|
||||
}
|
||||
}
|
||||
|
||||
/// Task to check a dependency file.
|
||||
#[derive(Debug)]
|
||||
struct CheckDependencyTask {
|
||||
file_id: FileId,
|
||||
}
|
||||
|
||||
impl CheckDependencyTask {
|
||||
fn run<S>(&self, context: &CheckTaskContext<S>) -> QueryResult<Diagnostics>
|
||||
where
|
||||
S: ScheduleDependency,
|
||||
{
|
||||
context
|
||||
.program
|
||||
.check_file(self.file_id, &context.as_file_context())
|
||||
}
|
||||
}
|
||||
|
||||
/// Executor that schedules the checking of individual program files.
|
||||
trait CheckExecutor {
|
||||
fn run(self, context: &mut CheckContext) -> QueryResult<()>;
|
||||
}
|
||||
|
||||
/// Executor that runs all check operations on the current thread.
|
||||
///
|
||||
/// The executor does not schedule dependencies for checking.
|
||||
/// The main motivation for scheduling dependencies
|
||||
/// in a multithreaded environment is to parse and index the dependencies concurrently.
|
||||
/// However, that doesn't make sense in a single threaded environment, because the dependencies then compute
|
||||
/// with checking the open files. Checking dependencies in a single threaded environment is more likely
|
||||
/// to hurt performance because we end up analyzing files in their entirety, even if we only need to type check parts of them.
|
||||
#[derive(Debug, Default)]
|
||||
struct SingleThreadedExecutor;
|
||||
|
||||
impl CheckExecutor for SingleThreadedExecutor {
|
||||
fn run(self, context: &mut CheckContext) -> QueryResult<()> {
|
||||
let mut queue = context.check_open_files();
|
||||
|
||||
let noop_schedule_dependency = |_| {};
|
||||
|
||||
while let Some(file) = queue.pop() {
|
||||
context.program().cancelled()?;
|
||||
|
||||
let task_context = context.task_context(&noop_schedule_dependency);
|
||||
context.push_diagnostics(&file.run(&task_context)?);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Executor that runs the check operations on a thread pool.
|
||||
///
|
||||
/// The executor runs each check operation as its own task using a thread pool.
|
||||
///
|
||||
/// Other than [`SingleThreadedExecutor`], this executor schedules dependencies for checking. It
|
||||
/// even schedules dependencies for checking when the thread pool size is 1 for a better debugging experience.
|
||||
#[derive(Debug, Default)]
|
||||
struct ThreadPoolExecutor;
|
||||
|
||||
impl CheckExecutor for ThreadPoolExecutor {
|
||||
fn run(self, context: &mut CheckContext) -> QueryResult<()> {
|
||||
let num_threads = current_num_threads();
|
||||
let single_threaded = num_threads == 1;
|
||||
let span = tracing::trace_span!("ThreadPoolExecutor::run", num_threads);
|
||||
let _ = span.enter();
|
||||
|
||||
let mut queue: Vec<_> = context
|
||||
.check_open_files()
|
||||
.into_iter()
|
||||
.map(CheckFileTask::OpenFile)
|
||||
.collect();
|
||||
|
||||
let (sender, receiver) = if single_threaded {
|
||||
// Use an unbounded queue for single threaded execution to prevent deadlocks
|
||||
// when a single file schedules multiple dependencies.
|
||||
crossbeam::channel::unbounded()
|
||||
} else {
|
||||
// Use a bounded queue to apply backpressure when the orchestration thread isn't able to keep
|
||||
// up processing messages from the worker threads.
|
||||
crossbeam::channel::bounded(num_threads)
|
||||
};
|
||||
|
||||
let schedule_sender = sender.clone();
|
||||
let schedule_dependency = move |file_id| {
|
||||
schedule_sender
|
||||
.send(ThreadPoolMessage::ScheduleDependency(file_id))
|
||||
.unwrap();
|
||||
};
|
||||
|
||||
let result = rayon::in_place_scope(|scope| {
|
||||
let mut pending = 0usize;
|
||||
|
||||
loop {
|
||||
context.program().cancelled()?;
|
||||
|
||||
// 1. Try to get a queued message to ensure that we have always remaining space in the channel to prevent blocking the worker threads.
|
||||
// 2. Try to process a queued file
|
||||
// 3. If there's no queued file wait for the next incoming message.
|
||||
// 4. Exit if there are no more messages and no senders.
|
||||
let message = if let Ok(message) = receiver.try_recv() {
|
||||
message
|
||||
} else if let Some(task) = queue.pop() {
|
||||
pending += 1;
|
||||
|
||||
let task_context = context.task_context(&schedule_dependency);
|
||||
let sender = sender.clone();
|
||||
let task_span = tracing::trace_span!(
|
||||
parent: &span,
|
||||
"CheckFileTask::run",
|
||||
file_id = task.file_id().as_u32(),
|
||||
);
|
||||
|
||||
scope.spawn(move |_| {
|
||||
task_span.in_scope(|| match task.run(&task_context) {
|
||||
Ok(result) => {
|
||||
sender.send(ThreadPoolMessage::Completed(result)).unwrap();
|
||||
}
|
||||
Err(err) => sender.send(ThreadPoolMessage::Errored(err)).unwrap(),
|
||||
});
|
||||
});
|
||||
|
||||
// If this is a single threaded rayon thread pool, yield the current thread
|
||||
// or we never start processing the work items.
|
||||
if single_threaded {
|
||||
yield_local();
|
||||
}
|
||||
|
||||
continue;
|
||||
} else if let Ok(message) = receiver.recv() {
|
||||
message
|
||||
} else {
|
||||
break;
|
||||
};
|
||||
|
||||
match message {
|
||||
ThreadPoolMessage::ScheduleDependency(dependency) => {
|
||||
if let Some(task) = context.check_dependency(dependency) {
|
||||
queue.push(CheckFileTask::Dependency(task));
|
||||
}
|
||||
}
|
||||
ThreadPoolMessage::Completed(diagnostics) => {
|
||||
context.push_diagnostics(&diagnostics);
|
||||
pending -= 1;
|
||||
|
||||
if pending == 0 && queue.is_empty() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
ThreadPoolMessage::Errored(err) => {
|
||||
return Err(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
});
|
||||
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum ThreadPoolMessage {
|
||||
ScheduleDependency(FileId),
|
||||
Completed(Diagnostics),
|
||||
Errored(QueryError),
|
||||
}
|
||||
275
crates/red_knot/src/program/mod.rs
Normal file
275
crates/red_knot/src/program/mod.rs
Normal file
@@ -0,0 +1,275 @@
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::db::{
|
||||
Database, Db, DbRuntime, DbWithJar, HasJar, HasJars, JarsStorage, LintDb, LintJar,
|
||||
ParallelDatabase, QueryResult, SemanticDb, SemanticJar, Snapshot, SourceDb, SourceJar, Upcast,
|
||||
};
|
||||
use crate::files::{FileId, Files};
|
||||
use crate::Workspace;
|
||||
|
||||
pub mod check;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Program {
|
||||
jars: JarsStorage<Program>,
|
||||
files: Files,
|
||||
workspace: Workspace,
|
||||
}
|
||||
|
||||
impl Program {
|
||||
pub fn new(workspace: Workspace) -> Self {
|
||||
Self {
|
||||
jars: JarsStorage::default(),
|
||||
files: Files::default(),
|
||||
workspace,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn apply_changes<I>(&mut self, changes: I)
|
||||
where
|
||||
I: IntoIterator<Item = FileWatcherChange>,
|
||||
{
|
||||
let mut aggregated_changes = AggregatedChanges::default();
|
||||
|
||||
aggregated_changes.extend(changes.into_iter().map(|change| FileChange {
|
||||
id: self.files.intern(&change.path),
|
||||
kind: change.kind,
|
||||
}));
|
||||
|
||||
let (source, semantic, lint) = self.jars_mut();
|
||||
for change in aggregated_changes.iter() {
|
||||
semantic.module_resolver.remove_module(change.id);
|
||||
semantic.symbol_tables.remove(&change.id);
|
||||
source.sources.remove(&change.id);
|
||||
source.parsed.remove(&change.id);
|
||||
// TODO: remove all dependent modules as well
|
||||
semantic.type_store.remove_module(change.id);
|
||||
lint.lint_syntax.remove(&change.id);
|
||||
lint.lint_semantic.remove(&change.id);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn files(&self) -> &Files {
|
||||
&self.files
|
||||
}
|
||||
|
||||
pub fn workspace(&self) -> &Workspace {
|
||||
&self.workspace
|
||||
}
|
||||
|
||||
pub fn workspace_mut(&mut self) -> &mut Workspace {
|
||||
&mut self.workspace
|
||||
}
|
||||
}
|
||||
|
||||
impl SourceDb for Program {
|
||||
fn file_id(&self, path: &Path) -> FileId {
|
||||
self.files.intern(path)
|
||||
}
|
||||
|
||||
fn file_path(&self, file_id: FileId) -> Arc<Path> {
|
||||
self.files.path(file_id)
|
||||
}
|
||||
}
|
||||
|
||||
impl DbWithJar<SourceJar> for Program {}
|
||||
|
||||
impl SemanticDb for Program {}
|
||||
|
||||
impl DbWithJar<SemanticJar> for Program {}
|
||||
|
||||
impl LintDb for Program {}
|
||||
|
||||
impl DbWithJar<LintJar> for Program {}
|
||||
|
||||
impl Upcast<dyn SemanticDb> for Program {
|
||||
fn upcast(&self) -> &(dyn SemanticDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SourceDb> for Program {
|
||||
fn upcast(&self) -> &(dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn LintDb> for Program {
|
||||
fn upcast(&self) -> &(dyn LintDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Db for Program {}
|
||||
|
||||
impl Database for Program {
|
||||
fn runtime(&self) -> &DbRuntime {
|
||||
self.jars.runtime()
|
||||
}
|
||||
|
||||
fn runtime_mut(&mut self) -> &mut DbRuntime {
|
||||
self.jars.runtime_mut()
|
||||
}
|
||||
}
|
||||
|
||||
impl ParallelDatabase for Program {
|
||||
fn snapshot(&self) -> Snapshot<Self> {
|
||||
Snapshot::new(Self {
|
||||
jars: self.jars.snapshot(),
|
||||
files: self.files.snapshot(),
|
||||
workspace: self.workspace.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJars for Program {
|
||||
type Jars = (SourceJar, SemanticJar, LintJar);
|
||||
|
||||
fn jars(&self) -> QueryResult<&Self::Jars> {
|
||||
self.jars.jars()
|
||||
}
|
||||
|
||||
fn jars_mut(&mut self) -> &mut Self::Jars {
|
||||
self.jars.jars_mut()
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJar<SourceJar> for Program {
|
||||
fn jar(&self) -> QueryResult<&SourceJar> {
|
||||
Ok(&self.jars()?.0)
|
||||
}
|
||||
|
||||
fn jar_mut(&mut self) -> &mut SourceJar {
|
||||
&mut self.jars_mut().0
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJar<SemanticJar> for Program {
|
||||
fn jar(&self) -> QueryResult<&SemanticJar> {
|
||||
Ok(&self.jars()?.1)
|
||||
}
|
||||
|
||||
fn jar_mut(&mut self) -> &mut SemanticJar {
|
||||
&mut self.jars_mut().1
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJar<LintJar> for Program {
|
||||
fn jar(&self) -> QueryResult<&LintJar> {
|
||||
Ok(&self.jars()?.2)
|
||||
}
|
||||
|
||||
fn jar_mut(&mut self) -> &mut LintJar {
|
||||
&mut self.jars_mut().2
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct FileWatcherChange {
|
||||
path: PathBuf,
|
||||
kind: FileChangeKind,
|
||||
}
|
||||
|
||||
impl FileWatcherChange {
|
||||
pub fn new(path: PathBuf, kind: FileChangeKind) -> Self {
|
||||
Self { path, kind }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
struct FileChange {
|
||||
id: FileId,
|
||||
kind: FileChangeKind,
|
||||
}
|
||||
|
||||
impl FileChange {
|
||||
fn file_id(self) -> FileId {
|
||||
self.id
|
||||
}
|
||||
|
||||
fn kind(self) -> FileChangeKind {
|
||||
self.kind
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
pub enum FileChangeKind {
|
||||
Created,
|
||||
Modified,
|
||||
Deleted,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
struct AggregatedChanges {
|
||||
changes: FxHashMap<FileId, FileChangeKind>,
|
||||
}
|
||||
|
||||
impl AggregatedChanges {
|
||||
fn add(&mut self, change: FileChange) {
|
||||
match self.changes.entry(change.file_id()) {
|
||||
Entry::Occupied(mut entry) => {
|
||||
let merged = entry.get_mut();
|
||||
|
||||
match (merged, change.kind()) {
|
||||
(FileChangeKind::Created, FileChangeKind::Deleted) => {
|
||||
// Deletion after creations means that ruff never saw the file.
|
||||
entry.remove();
|
||||
}
|
||||
(FileChangeKind::Created, FileChangeKind::Modified) => {
|
||||
// No-op, for ruff, modifying a file that it doesn't yet know that it exists is still considered a creation.
|
||||
}
|
||||
|
||||
(FileChangeKind::Modified, FileChangeKind::Created) => {
|
||||
// Uhh, that should probably not happen. Continue considering it a modification.
|
||||
}
|
||||
|
||||
(FileChangeKind::Modified, FileChangeKind::Deleted) => {
|
||||
*entry.get_mut() = FileChangeKind::Deleted;
|
||||
}
|
||||
|
||||
(FileChangeKind::Deleted, FileChangeKind::Created) => {
|
||||
*entry.get_mut() = FileChangeKind::Modified;
|
||||
}
|
||||
|
||||
(FileChangeKind::Deleted, FileChangeKind::Modified) => {
|
||||
// That's weird, but let's consider it a modification.
|
||||
*entry.get_mut() = FileChangeKind::Modified;
|
||||
}
|
||||
|
||||
(FileChangeKind::Created, FileChangeKind::Created)
|
||||
| (FileChangeKind::Modified, FileChangeKind::Modified)
|
||||
| (FileChangeKind::Deleted, FileChangeKind::Deleted) => {
|
||||
// No-op transitions. Some of them should be impossible but we handle them anyway.
|
||||
}
|
||||
}
|
||||
}
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(change.kind());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn extend<I>(&mut self, changes: I)
|
||||
where
|
||||
I: IntoIterator<Item = FileChange>,
|
||||
{
|
||||
let iter = changes.into_iter();
|
||||
let (lower, _) = iter.size_hint();
|
||||
self.changes.reserve(lower);
|
||||
|
||||
for change in iter {
|
||||
self.add(change);
|
||||
}
|
||||
}
|
||||
|
||||
fn iter(&self) -> impl Iterator<Item = FileChange> + '_ {
|
||||
self.changes.iter().map(|(id, kind)| FileChange {
|
||||
id: *id,
|
||||
kind: *kind,
|
||||
})
|
||||
}
|
||||
}
|
||||
95
crates/red_knot/src/source.rs
Normal file
95
crates/red_knot/src/source.rs
Normal file
@@ -0,0 +1,95 @@
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::sync::Arc;
|
||||
|
||||
use ruff_notebook::Notebook;
|
||||
use ruff_python_ast::PySourceType;
|
||||
|
||||
use crate::cache::KeyValueCache;
|
||||
use crate::db::{QueryResult, SourceDb};
|
||||
use crate::files::FileId;
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub(crate) fn source_text(db: &dyn SourceDb, file_id: FileId) -> QueryResult<Source> {
|
||||
let jar = db.jar()?;
|
||||
let sources = &jar.sources;
|
||||
|
||||
sources.get(&file_id, |file_id| {
|
||||
let path = db.file_path(*file_id);
|
||||
|
||||
let source_text = std::fs::read_to_string(&path).unwrap_or_else(|err| {
|
||||
tracing::error!("Failed to read file '{path:?}: {err}'. Falling back to empty text");
|
||||
String::new()
|
||||
});
|
||||
|
||||
let python_ty = PySourceType::from(&path);
|
||||
|
||||
let kind = match python_ty {
|
||||
PySourceType::Python => {
|
||||
SourceKind::Python(Arc::from(source_text))
|
||||
}
|
||||
PySourceType::Stub => SourceKind::Stub(Arc::from(source_text)),
|
||||
PySourceType::Ipynb => {
|
||||
let notebook = Notebook::from_source_code(&source_text).unwrap_or_else(|err| {
|
||||
// TODO should this be changed to never fail?
|
||||
// or should we instead add a diagnostic somewhere? But what would we return in this case?
|
||||
tracing::error!(
|
||||
"Failed to parse notebook '{path:?}: {err}'. Falling back to an empty notebook"
|
||||
);
|
||||
Notebook::from_source_code("").unwrap()
|
||||
});
|
||||
|
||||
SourceKind::IpyNotebook(Arc::new(notebook))
|
||||
}
|
||||
};
|
||||
|
||||
Ok(Source { kind })
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum SourceKind {
|
||||
Python(Arc<str>),
|
||||
Stub(Arc<str>),
|
||||
IpyNotebook(Arc<Notebook>),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct Source {
|
||||
kind: SourceKind,
|
||||
}
|
||||
|
||||
impl Source {
|
||||
pub fn python<T: Into<Arc<str>>>(source: T) -> Self {
|
||||
Self {
|
||||
kind: SourceKind::Python(source.into()),
|
||||
}
|
||||
}
|
||||
pub fn kind(&self) -> &SourceKind {
|
||||
&self.kind
|
||||
}
|
||||
|
||||
pub fn text(&self) -> &str {
|
||||
match &self.kind {
|
||||
SourceKind::Python(text) => text,
|
||||
SourceKind::Stub(text) => text,
|
||||
SourceKind::IpyNotebook(notebook) => notebook.source_code(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SourceStorage(pub(crate) KeyValueCache<FileId, Source>);
|
||||
|
||||
impl Deref for SourceStorage {
|
||||
type Target = KeyValueCache<FileId, Source>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for SourceStorage {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
1089
crates/red_knot/src/symbols.rs
Normal file
1089
crates/red_knot/src/symbols.rs
Normal file
File diff suppressed because it is too large
Load Diff
745
crates/red_knot/src/types.rs
Normal file
745
crates/red_knot/src/types.rs
Normal file
@@ -0,0 +1,745 @@
|
||||
#![allow(dead_code)]
|
||||
use crate::ast_ids::NodeKey;
|
||||
use crate::db::{QueryResult, SemanticDb, SemanticJar};
|
||||
use crate::files::FileId;
|
||||
use crate::symbols::{symbol_table, GlobalSymbolId, ScopeId, ScopeKind, SymbolId};
|
||||
use crate::{FxDashMap, FxIndexSet, Name};
|
||||
use ruff_index::{newtype_index, IndexVec};
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
pub(crate) mod infer;
|
||||
|
||||
pub(crate) use infer::{infer_definition_type, infer_symbol_type};
|
||||
|
||||
/// unique ID for a type
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
pub enum Type {
|
||||
/// the dynamic or gradual type: a statically-unknown set of values
|
||||
Any,
|
||||
/// the empty set of values
|
||||
Never,
|
||||
/// unknown type (no annotation)
|
||||
/// equivalent to Any, or to object in strict mode
|
||||
Unknown,
|
||||
/// name is not bound to any value
|
||||
Unbound,
|
||||
/// a specific function object
|
||||
Function(FunctionTypeId),
|
||||
/// a specific class object
|
||||
Class(ClassTypeId),
|
||||
/// the set of Python objects with the given class in their __class__'s method resolution order
|
||||
Instance(ClassTypeId),
|
||||
Union(UnionTypeId),
|
||||
Intersection(IntersectionTypeId),
|
||||
// TODO protocols, callable types, overloads, generics, type vars
|
||||
}
|
||||
|
||||
impl Type {
|
||||
fn display<'a>(&'a self, store: &'a TypeStore) -> DisplayType<'a> {
|
||||
DisplayType { ty: self, store }
|
||||
}
|
||||
|
||||
pub const fn is_unbound(&self) -> bool {
|
||||
matches!(self, Type::Unbound)
|
||||
}
|
||||
|
||||
pub const fn is_unknown(&self) -> bool {
|
||||
matches!(self, Type::Unknown)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<FunctionTypeId> for Type {
|
||||
fn from(id: FunctionTypeId) -> Self {
|
||||
Type::Function(id)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<UnionTypeId> for Type {
|
||||
fn from(id: UnionTypeId) -> Self {
|
||||
Type::Union(id)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<IntersectionTypeId> for Type {
|
||||
fn from(id: IntersectionTypeId) -> Self {
|
||||
Type::Intersection(id)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: currently calling `get_function` et al and holding on to the `FunctionTypeRef` will lock a
|
||||
// shard of this dashmap, for as long as you hold the reference. This may be a problem. We could
|
||||
// switch to having all the arenas hold Arc, or we could see if we can split up ModuleTypeStore,
|
||||
// and/or give it inner mutability and finer-grained internal locking.
|
||||
#[derive(Debug, Default)]
|
||||
pub struct TypeStore {
|
||||
modules: FxDashMap<FileId, ModuleTypeStore>,
|
||||
}
|
||||
|
||||
impl TypeStore {
|
||||
pub fn remove_module(&mut self, file_id: FileId) {
|
||||
self.modules.remove(&file_id);
|
||||
}
|
||||
|
||||
pub fn cache_symbol_type(&self, symbol: GlobalSymbolId, ty: Type) {
|
||||
self.add_or_get_module(symbol.file_id)
|
||||
.symbol_types
|
||||
.insert(symbol.symbol_id, ty);
|
||||
}
|
||||
|
||||
pub fn cache_node_type(&self, file_id: FileId, node_key: NodeKey, ty: Type) {
|
||||
self.add_or_get_module(file_id)
|
||||
.node_types
|
||||
.insert(node_key, ty);
|
||||
}
|
||||
|
||||
pub fn get_cached_symbol_type(&self, symbol: GlobalSymbolId) -> Option<Type> {
|
||||
self.try_get_module(symbol.file_id)?
|
||||
.symbol_types
|
||||
.get(&symbol.symbol_id)
|
||||
.copied()
|
||||
}
|
||||
|
||||
pub fn get_cached_node_type(&self, file_id: FileId, node_key: &NodeKey) -> Option<Type> {
|
||||
self.try_get_module(file_id)?
|
||||
.node_types
|
||||
.get(node_key)
|
||||
.copied()
|
||||
}
|
||||
|
||||
fn add_or_get_module(&self, file_id: FileId) -> ModuleStoreRefMut {
|
||||
self.modules
|
||||
.entry(file_id)
|
||||
.or_insert_with(|| ModuleTypeStore::new(file_id))
|
||||
}
|
||||
|
||||
fn get_module(&self, file_id: FileId) -> ModuleStoreRef {
|
||||
self.try_get_module(file_id).expect("module should exist")
|
||||
}
|
||||
|
||||
fn try_get_module(&self, file_id: FileId) -> Option<ModuleStoreRef> {
|
||||
self.modules.get(&file_id)
|
||||
}
|
||||
|
||||
fn add_function(
|
||||
&self,
|
||||
file_id: FileId,
|
||||
name: &str,
|
||||
symbol_id: SymbolId,
|
||||
scope_id: ScopeId,
|
||||
decorators: Vec<Type>,
|
||||
) -> FunctionTypeId {
|
||||
self.add_or_get_module(file_id)
|
||||
.add_function(name, symbol_id, scope_id, decorators)
|
||||
}
|
||||
|
||||
fn add_class(
|
||||
&self,
|
||||
file_id: FileId,
|
||||
name: &str,
|
||||
scope_id: ScopeId,
|
||||
bases: Vec<Type>,
|
||||
) -> ClassTypeId {
|
||||
self.add_or_get_module(file_id)
|
||||
.add_class(name, scope_id, bases)
|
||||
}
|
||||
|
||||
fn add_union(&mut self, file_id: FileId, elems: &[Type]) -> UnionTypeId {
|
||||
self.add_or_get_module(file_id).add_union(elems)
|
||||
}
|
||||
|
||||
fn add_intersection(
|
||||
&mut self,
|
||||
file_id: FileId,
|
||||
positive: &[Type],
|
||||
negative: &[Type],
|
||||
) -> IntersectionTypeId {
|
||||
self.add_or_get_module(file_id)
|
||||
.add_intersection(positive, negative)
|
||||
}
|
||||
|
||||
fn get_function(&self, id: FunctionTypeId) -> FunctionTypeRef {
|
||||
FunctionTypeRef {
|
||||
module_store: self.get_module(id.file_id),
|
||||
function_id: id.func_id,
|
||||
}
|
||||
}
|
||||
|
||||
fn get_class(&self, id: ClassTypeId) -> ClassTypeRef {
|
||||
ClassTypeRef {
|
||||
module_store: self.get_module(id.file_id),
|
||||
class_id: id.class_id,
|
||||
}
|
||||
}
|
||||
|
||||
fn get_union(&self, id: UnionTypeId) -> UnionTypeRef {
|
||||
UnionTypeRef {
|
||||
module_store: self.get_module(id.file_id),
|
||||
union_id: id.union_id,
|
||||
}
|
||||
}
|
||||
|
||||
fn get_intersection(&self, id: IntersectionTypeId) -> IntersectionTypeRef {
|
||||
IntersectionTypeRef {
|
||||
module_store: self.get_module(id.file_id),
|
||||
intersection_id: id.intersection_id,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type ModuleStoreRef<'a> = dashmap::mapref::one::Ref<
|
||||
'a,
|
||||
FileId,
|
||||
ModuleTypeStore,
|
||||
std::hash::BuildHasherDefault<rustc_hash::FxHasher>,
|
||||
>;
|
||||
|
||||
type ModuleStoreRefMut<'a> = dashmap::mapref::one::RefMut<
|
||||
'a,
|
||||
FileId,
|
||||
ModuleTypeStore,
|
||||
std::hash::BuildHasherDefault<rustc_hash::FxHasher>,
|
||||
>;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct FunctionTypeRef<'a> {
|
||||
module_store: ModuleStoreRef<'a>,
|
||||
function_id: ModuleFunctionTypeId,
|
||||
}
|
||||
|
||||
impl<'a> std::ops::Deref for FunctionTypeRef<'a> {
|
||||
type Target = FunctionType;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.module_store.get_function(self.function_id)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct ClassTypeRef<'a> {
|
||||
module_store: ModuleStoreRef<'a>,
|
||||
class_id: ModuleClassTypeId,
|
||||
}
|
||||
|
||||
impl<'a> std::ops::Deref for ClassTypeRef<'a> {
|
||||
type Target = ClassType;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.module_store.get_class(self.class_id)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct UnionTypeRef<'a> {
|
||||
module_store: ModuleStoreRef<'a>,
|
||||
union_id: ModuleUnionTypeId,
|
||||
}
|
||||
|
||||
impl<'a> std::ops::Deref for UnionTypeRef<'a> {
|
||||
type Target = UnionType;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.module_store.get_union(self.union_id)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct IntersectionTypeRef<'a> {
|
||||
module_store: ModuleStoreRef<'a>,
|
||||
intersection_id: ModuleIntersectionTypeId,
|
||||
}
|
||||
|
||||
impl<'a> std::ops::Deref for IntersectionTypeRef<'a> {
|
||||
type Target = IntersectionType;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.module_store.get_intersection(self.intersection_id)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
|
||||
pub struct FunctionTypeId {
|
||||
file_id: FileId,
|
||||
func_id: ModuleFunctionTypeId,
|
||||
}
|
||||
|
||||
impl FunctionTypeId {
|
||||
fn function(self, db: &dyn SemanticDb) -> QueryResult<FunctionTypeRef> {
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
Ok(jar.type_store.get_function(self))
|
||||
}
|
||||
|
||||
pub(crate) fn name(self, db: &dyn SemanticDb) -> QueryResult<Name> {
|
||||
Ok(self.function(db)?.name().into())
|
||||
}
|
||||
|
||||
pub(crate) fn global_symbol(self, db: &dyn SemanticDb) -> QueryResult<GlobalSymbolId> {
|
||||
Ok(GlobalSymbolId {
|
||||
file_id: self.file(),
|
||||
symbol_id: self.symbol(db)?,
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn file(self) -> FileId {
|
||||
self.file_id
|
||||
}
|
||||
|
||||
pub(crate) fn symbol(self, db: &dyn SemanticDb) -> QueryResult<SymbolId> {
|
||||
let FunctionType { symbol_id, .. } = *self.function(db)?;
|
||||
Ok(symbol_id)
|
||||
}
|
||||
|
||||
pub(crate) fn get_containing_class(
|
||||
self,
|
||||
db: &dyn SemanticDb,
|
||||
) -> QueryResult<Option<ClassTypeId>> {
|
||||
let table = symbol_table(db, self.file_id)?;
|
||||
let FunctionType { symbol_id, .. } = *self.function(db)?;
|
||||
let scope_id = symbol_id.symbol(&table).scope_id();
|
||||
let scope = scope_id.scope(&table);
|
||||
if !matches!(scope.kind(), ScopeKind::Class) {
|
||||
return Ok(None);
|
||||
};
|
||||
let Some(def) = scope.definition() else {
|
||||
return Ok(None);
|
||||
};
|
||||
let Some(symbol_id) = scope.defining_symbol() else {
|
||||
return Ok(None);
|
||||
};
|
||||
let Type::Class(class) = infer_definition_type(
|
||||
db,
|
||||
GlobalSymbolId {
|
||||
file_id: self.file_id,
|
||||
symbol_id,
|
||||
},
|
||||
def,
|
||||
)?
|
||||
else {
|
||||
return Ok(None);
|
||||
};
|
||||
Ok(Some(class))
|
||||
}
|
||||
|
||||
pub(crate) fn has_decorator(
|
||||
self,
|
||||
db: &dyn SemanticDb,
|
||||
decorator_symbol: GlobalSymbolId,
|
||||
) -> QueryResult<bool> {
|
||||
for deco_ty in self.function(db)?.decorators() {
|
||||
let Type::Function(deco_func) = deco_ty else {
|
||||
continue;
|
||||
};
|
||||
if deco_func.global_symbol(db)? == decorator_symbol {
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
Ok(false)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
|
||||
pub struct ClassTypeId {
|
||||
file_id: FileId,
|
||||
class_id: ModuleClassTypeId,
|
||||
}
|
||||
|
||||
impl ClassTypeId {
|
||||
fn class(self, db: &dyn SemanticDb) -> QueryResult<ClassTypeRef> {
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
Ok(jar.type_store.get_class(self))
|
||||
}
|
||||
|
||||
pub(crate) fn name(self, db: &dyn SemanticDb) -> QueryResult<Name> {
|
||||
Ok(self.class(db)?.name().into())
|
||||
}
|
||||
|
||||
pub(crate) fn get_super_class_member(
|
||||
self,
|
||||
db: &dyn SemanticDb,
|
||||
name: &Name,
|
||||
) -> QueryResult<Option<Type>> {
|
||||
// TODO we should linearize the MRO instead of doing this recursively
|
||||
let class = self.class(db)?;
|
||||
for base in class.bases() {
|
||||
if let Type::Class(base) = base {
|
||||
if let Some(own_member) = base.get_own_class_member(db, name)? {
|
||||
return Ok(Some(own_member));
|
||||
}
|
||||
if let Some(base_member) = base.get_super_class_member(db, name)? {
|
||||
return Ok(Some(base_member));
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
fn get_own_class_member(self, db: &dyn SemanticDb, name: &Name) -> QueryResult<Option<Type>> {
|
||||
// TODO: this should distinguish instance-only members (e.g. `x: int`) and not return them
|
||||
let ClassType { scope_id, .. } = *self.class(db)?;
|
||||
let table = symbol_table(db, self.file_id)?;
|
||||
if let Some(symbol_id) = table.symbol_id_by_name(scope_id, name) {
|
||||
Ok(Some(infer_symbol_type(
|
||||
db,
|
||||
GlobalSymbolId {
|
||||
file_id: self.file_id,
|
||||
symbol_id,
|
||||
},
|
||||
)?))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: get_own_instance_member, get_class_member, get_instance_member
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
|
||||
pub struct UnionTypeId {
|
||||
file_id: FileId,
|
||||
union_id: ModuleUnionTypeId,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
|
||||
pub struct IntersectionTypeId {
|
||||
file_id: FileId,
|
||||
intersection_id: ModuleIntersectionTypeId,
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
struct ModuleFunctionTypeId;
|
||||
|
||||
#[newtype_index]
|
||||
struct ModuleClassTypeId;
|
||||
|
||||
#[newtype_index]
|
||||
struct ModuleUnionTypeId;
|
||||
|
||||
#[newtype_index]
|
||||
struct ModuleIntersectionTypeId;
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ModuleTypeStore {
|
||||
file_id: FileId,
|
||||
/// arena of all function types defined in this module
|
||||
functions: IndexVec<ModuleFunctionTypeId, FunctionType>,
|
||||
/// arena of all class types defined in this module
|
||||
classes: IndexVec<ModuleClassTypeId, ClassType>,
|
||||
/// arenda of all union types created in this module
|
||||
unions: IndexVec<ModuleUnionTypeId, UnionType>,
|
||||
/// arena of all intersection types created in this module
|
||||
intersections: IndexVec<ModuleIntersectionTypeId, IntersectionType>,
|
||||
/// cached types of symbols in this module
|
||||
symbol_types: FxHashMap<SymbolId, Type>,
|
||||
/// cached types of AST nodes in this module
|
||||
node_types: FxHashMap<NodeKey, Type>,
|
||||
}
|
||||
|
||||
impl ModuleTypeStore {
|
||||
fn new(file_id: FileId) -> Self {
|
||||
Self {
|
||||
file_id,
|
||||
functions: IndexVec::default(),
|
||||
classes: IndexVec::default(),
|
||||
unions: IndexVec::default(),
|
||||
intersections: IndexVec::default(),
|
||||
symbol_types: FxHashMap::default(),
|
||||
node_types: FxHashMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
fn add_function(
|
||||
&mut self,
|
||||
name: &str,
|
||||
symbol_id: SymbolId,
|
||||
scope_id: ScopeId,
|
||||
decorators: Vec<Type>,
|
||||
) -> FunctionTypeId {
|
||||
let func_id = self.functions.push(FunctionType {
|
||||
name: Name::new(name),
|
||||
symbol_id,
|
||||
scope_id,
|
||||
decorators,
|
||||
});
|
||||
FunctionTypeId {
|
||||
file_id: self.file_id,
|
||||
func_id,
|
||||
}
|
||||
}
|
||||
|
||||
fn add_class(&mut self, name: &str, scope_id: ScopeId, bases: Vec<Type>) -> ClassTypeId {
|
||||
let class_id = self.classes.push(ClassType {
|
||||
name: Name::new(name),
|
||||
scope_id,
|
||||
// TODO: if no bases are given, that should imply [object]
|
||||
bases,
|
||||
});
|
||||
ClassTypeId {
|
||||
file_id: self.file_id,
|
||||
class_id,
|
||||
}
|
||||
}
|
||||
|
||||
fn add_union(&mut self, elems: &[Type]) -> UnionTypeId {
|
||||
let union_id = self.unions.push(UnionType {
|
||||
elements: elems.iter().copied().collect(),
|
||||
});
|
||||
UnionTypeId {
|
||||
file_id: self.file_id,
|
||||
union_id,
|
||||
}
|
||||
}
|
||||
|
||||
fn add_intersection(&mut self, positive: &[Type], negative: &[Type]) -> IntersectionTypeId {
|
||||
let intersection_id = self.intersections.push(IntersectionType {
|
||||
positive: positive.iter().copied().collect(),
|
||||
negative: negative.iter().copied().collect(),
|
||||
});
|
||||
IntersectionTypeId {
|
||||
file_id: self.file_id,
|
||||
intersection_id,
|
||||
}
|
||||
}
|
||||
|
||||
fn get_function(&self, func_id: ModuleFunctionTypeId) -> &FunctionType {
|
||||
&self.functions[func_id]
|
||||
}
|
||||
|
||||
fn get_class(&self, class_id: ModuleClassTypeId) -> &ClassType {
|
||||
&self.classes[class_id]
|
||||
}
|
||||
|
||||
fn get_union(&self, union_id: ModuleUnionTypeId) -> &UnionType {
|
||||
&self.unions[union_id]
|
||||
}
|
||||
|
||||
fn get_intersection(&self, intersection_id: ModuleIntersectionTypeId) -> &IntersectionType {
|
||||
&self.intersections[intersection_id]
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
struct DisplayType<'a> {
|
||||
ty: &'a Type,
|
||||
store: &'a TypeStore,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for DisplayType<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self.ty {
|
||||
Type::Any => f.write_str("Any"),
|
||||
Type::Never => f.write_str("Never"),
|
||||
Type::Unknown => f.write_str("Unknown"),
|
||||
Type::Unbound => f.write_str("Unbound"),
|
||||
// TODO functions and classes should display using a fully qualified name
|
||||
Type::Class(class_id) => {
|
||||
f.write_str("Literal[")?;
|
||||
f.write_str(self.store.get_class(*class_id).name())?;
|
||||
f.write_str("]")
|
||||
}
|
||||
Type::Instance(class_id) => f.write_str(self.store.get_class(*class_id).name()),
|
||||
Type::Function(func_id) => f.write_str(self.store.get_function(*func_id).name()),
|
||||
Type::Union(union_id) => self
|
||||
.store
|
||||
.get_module(union_id.file_id)
|
||||
.get_union(union_id.union_id)
|
||||
.display(f, self.store),
|
||||
Type::Intersection(int_id) => self
|
||||
.store
|
||||
.get_module(int_id.file_id)
|
||||
.get_intersection(int_id.intersection_id)
|
||||
.display(f, self.store),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct ClassType {
|
||||
/// Name of the class at definition
|
||||
name: Name,
|
||||
/// `ScopeId` of the class body
|
||||
scope_id: ScopeId,
|
||||
/// Types of all class bases
|
||||
bases: Vec<Type>,
|
||||
}
|
||||
|
||||
impl ClassType {
|
||||
fn name(&self) -> &str {
|
||||
self.name.as_str()
|
||||
}
|
||||
|
||||
fn bases(&self) -> &[Type] {
|
||||
self.bases.as_slice()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct FunctionType {
|
||||
/// name of the function at definition
|
||||
name: Name,
|
||||
/// symbol which this function is a definition of
|
||||
symbol_id: SymbolId,
|
||||
/// scope of this function's body
|
||||
scope_id: ScopeId,
|
||||
/// types of all decorators on this function
|
||||
decorators: Vec<Type>,
|
||||
}
|
||||
|
||||
impl FunctionType {
|
||||
fn name(&self) -> &str {
|
||||
self.name.as_str()
|
||||
}
|
||||
|
||||
fn scope_id(&self) -> ScopeId {
|
||||
self.scope_id
|
||||
}
|
||||
|
||||
pub(crate) fn decorators(&self) -> &[Type] {
|
||||
self.decorators.as_slice()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct UnionType {
|
||||
// the union type includes values in any of these types
|
||||
elements: FxIndexSet<Type>,
|
||||
}
|
||||
|
||||
impl UnionType {
|
||||
fn display(&self, f: &mut std::fmt::Formatter<'_>, store: &TypeStore) -> std::fmt::Result {
|
||||
f.write_str("(")?;
|
||||
let mut first = true;
|
||||
for ty in &self.elements {
|
||||
if !first {
|
||||
f.write_str(" | ")?;
|
||||
};
|
||||
first = false;
|
||||
write!(f, "{}", ty.display(store))?;
|
||||
}
|
||||
f.write_str(")")
|
||||
}
|
||||
}
|
||||
|
||||
// Negation types aren't expressible in annotations, and are most likely to arise from type
|
||||
// narrowing along with intersections (e.g. `if not isinstance(...)`), so we represent them
|
||||
// directly in intersections rather than as a separate type. This sacrifices some efficiency in the
|
||||
// case where a Not appears outside an intersection (unclear when that could even happen, but we'd
|
||||
// have to represent it as a single-element intersection if it did) in exchange for better
|
||||
// efficiency in the within-intersection case.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct IntersectionType {
|
||||
// the intersection type includes only values in all of these types
|
||||
positive: FxIndexSet<Type>,
|
||||
// the intersection type does not include any value in any of these types
|
||||
negative: FxIndexSet<Type>,
|
||||
}
|
||||
|
||||
impl IntersectionType {
|
||||
fn display(&self, f: &mut std::fmt::Formatter<'_>, store: &TypeStore) -> std::fmt::Result {
|
||||
f.write_str("(")?;
|
||||
let mut first = true;
|
||||
for (neg, ty) in self
|
||||
.positive
|
||||
.iter()
|
||||
.map(|ty| (false, ty))
|
||||
.chain(self.negative.iter().map(|ty| (true, ty)))
|
||||
{
|
||||
if !first {
|
||||
f.write_str(" & ")?;
|
||||
};
|
||||
first = false;
|
||||
if neg {
|
||||
f.write_str("~")?;
|
||||
};
|
||||
write!(f, "{}", ty.display(store))?;
|
||||
}
|
||||
f.write_str(")")
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::path::Path;
|
||||
|
||||
use crate::files::Files;
|
||||
use crate::symbols::{SymbolFlags, SymbolTable};
|
||||
use crate::types::{Type, TypeStore};
|
||||
use crate::FxIndexSet;
|
||||
|
||||
#[test]
|
||||
fn add_class() {
|
||||
let store = TypeStore::default();
|
||||
let files = Files::default();
|
||||
let file_id = files.intern(Path::new("/foo"));
|
||||
let id = store.add_class(file_id, "C", SymbolTable::root_scope_id(), Vec::new());
|
||||
assert_eq!(store.get_class(id).name(), "C");
|
||||
let inst = Type::Instance(id);
|
||||
assert_eq!(format!("{}", inst.display(&store)), "C");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_function() {
|
||||
let store = TypeStore::default();
|
||||
let files = Files::default();
|
||||
let file_id = files.intern(Path::new("/foo"));
|
||||
let mut table = SymbolTable::new();
|
||||
let func_symbol = table.add_or_update_symbol(
|
||||
SymbolTable::root_scope_id(),
|
||||
"func",
|
||||
SymbolFlags::IS_DEFINED,
|
||||
);
|
||||
|
||||
let id = store.add_function(
|
||||
file_id,
|
||||
"func",
|
||||
func_symbol,
|
||||
SymbolTable::root_scope_id(),
|
||||
vec![Type::Unknown],
|
||||
);
|
||||
assert_eq!(store.get_function(id).name(), "func");
|
||||
assert_eq!(store.get_function(id).decorators(), vec![Type::Unknown]);
|
||||
let func = Type::Function(id);
|
||||
assert_eq!(format!("{}", func.display(&store)), "func");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_union() {
|
||||
let mut store = TypeStore::default();
|
||||
let files = Files::default();
|
||||
let file_id = files.intern(Path::new("/foo"));
|
||||
let c1 = store.add_class(file_id, "C1", SymbolTable::root_scope_id(), Vec::new());
|
||||
let c2 = store.add_class(file_id, "C2", SymbolTable::root_scope_id(), Vec::new());
|
||||
let elems = vec![Type::Instance(c1), Type::Instance(c2)];
|
||||
let id = store.add_union(file_id, &elems);
|
||||
assert_eq!(
|
||||
store.get_union(id).elements,
|
||||
elems.into_iter().collect::<FxIndexSet<_>>()
|
||||
);
|
||||
let union = Type::Union(id);
|
||||
assert_eq!(format!("{}", union.display(&store)), "(C1 | C2)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_intersection() {
|
||||
let mut store = TypeStore::default();
|
||||
let files = Files::default();
|
||||
let file_id = files.intern(Path::new("/foo"));
|
||||
let c1 = store.add_class(file_id, "C1", SymbolTable::root_scope_id(), Vec::new());
|
||||
let c2 = store.add_class(file_id, "C2", SymbolTable::root_scope_id(), Vec::new());
|
||||
let c3 = store.add_class(file_id, "C3", SymbolTable::root_scope_id(), Vec::new());
|
||||
let pos = vec![Type::Instance(c1), Type::Instance(c2)];
|
||||
let neg = vec![Type::Instance(c3)];
|
||||
let id = store.add_intersection(file_id, &pos, &neg);
|
||||
assert_eq!(
|
||||
store.get_intersection(id).positive,
|
||||
pos.into_iter().collect::<FxIndexSet<_>>()
|
||||
);
|
||||
assert_eq!(
|
||||
store.get_intersection(id).negative,
|
||||
neg.into_iter().collect::<FxIndexSet<_>>()
|
||||
);
|
||||
let intersection = Type::Intersection(id);
|
||||
assert_eq!(
|
||||
format!("{}", intersection.display(&store)),
|
||||
"(C1 & C2 & ~C3)"
|
||||
);
|
||||
}
|
||||
}
|
||||
292
crates/red_knot/src/types/infer.rs
Normal file
292
crates/red_knot/src/types/infer.rs
Normal file
@@ -0,0 +1,292 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::AstNode;
|
||||
|
||||
use crate::db::{QueryResult, SemanticDb, SemanticJar};
|
||||
|
||||
use crate::module::ModuleName;
|
||||
use crate::parse::parse;
|
||||
use crate::symbols::{
|
||||
resolve_global_symbol, symbol_table, Definition, GlobalSymbolId, ImportFromDefinition,
|
||||
};
|
||||
use crate::types::Type;
|
||||
use crate::FileId;
|
||||
|
||||
// FIXME: Figure out proper dead-lock free synchronisation now that this takes `&db` instead of `&mut db`.
|
||||
#[tracing::instrument(level = "trace", skip(db))]
|
||||
pub fn infer_symbol_type(db: &dyn SemanticDb, symbol: GlobalSymbolId) -> QueryResult<Type> {
|
||||
let symbols = symbol_table(db, symbol.file_id)?;
|
||||
let defs = symbols.definitions(symbol.symbol_id);
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
|
||||
if let Some(ty) = jar.type_store.get_cached_symbol_type(symbol) {
|
||||
return Ok(ty);
|
||||
}
|
||||
|
||||
// TODO handle multiple defs, conditional defs...
|
||||
assert_eq!(defs.len(), 1);
|
||||
|
||||
let ty = infer_definition_type(db, symbol, defs[0].clone())?;
|
||||
|
||||
jar.type_store.cache_symbol_type(symbol, ty);
|
||||
|
||||
// TODO record dependencies
|
||||
Ok(ty)
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "trace", skip(db))]
|
||||
pub fn infer_definition_type(
|
||||
db: &dyn SemanticDb,
|
||||
symbol: GlobalSymbolId,
|
||||
definition: Definition,
|
||||
) -> QueryResult<Type> {
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
let type_store = &jar.type_store;
|
||||
let file_id = symbol.file_id;
|
||||
|
||||
match definition {
|
||||
Definition::ImportFrom(ImportFromDefinition {
|
||||
module,
|
||||
name,
|
||||
level,
|
||||
}) => {
|
||||
// TODO relative imports
|
||||
assert!(matches!(level, 0));
|
||||
let module_name = ModuleName::new(module.as_ref().expect("TODO relative imports"));
|
||||
if let Some(remote_symbol) = resolve_global_symbol(db, module_name, &name)? {
|
||||
infer_symbol_type(db, remote_symbol)
|
||||
} else {
|
||||
Ok(Type::Unknown)
|
||||
}
|
||||
}
|
||||
Definition::ClassDef(node_key) => {
|
||||
if let Some(ty) = type_store.get_cached_node_type(file_id, node_key.erased()) {
|
||||
Ok(ty)
|
||||
} else {
|
||||
let parsed = parse(db.upcast(), file_id)?;
|
||||
let ast = parsed.ast();
|
||||
let table = symbol_table(db, file_id)?;
|
||||
let node = node_key.resolve_unwrap(ast.as_any_node_ref());
|
||||
|
||||
let mut bases = Vec::with_capacity(node.bases().len());
|
||||
|
||||
for base in node.bases() {
|
||||
bases.push(infer_expr_type(db, file_id, base)?);
|
||||
}
|
||||
let scope_id = table.scope_id_for_node(node_key.erased());
|
||||
let ty = Type::Class(type_store.add_class(file_id, &node.name.id, scope_id, bases));
|
||||
type_store.cache_node_type(file_id, *node_key.erased(), ty);
|
||||
Ok(ty)
|
||||
}
|
||||
}
|
||||
Definition::FunctionDef(node_key) => {
|
||||
if let Some(ty) = type_store.get_cached_node_type(file_id, node_key.erased()) {
|
||||
Ok(ty)
|
||||
} else {
|
||||
let parsed = parse(db.upcast(), file_id)?;
|
||||
let ast = parsed.ast();
|
||||
let table = symbol_table(db, file_id)?;
|
||||
let node = node_key
|
||||
.resolve(ast.as_any_node_ref())
|
||||
.expect("node key should resolve");
|
||||
|
||||
let decorator_tys = node
|
||||
.decorator_list
|
||||
.iter()
|
||||
.map(|decorator| infer_expr_type(db, file_id, &decorator.expression))
|
||||
.collect::<QueryResult<_>>()?;
|
||||
let scope_id = table.scope_id_for_node(node_key.erased());
|
||||
let ty = type_store
|
||||
.add_function(
|
||||
file_id,
|
||||
&node.name.id,
|
||||
symbol.symbol_id,
|
||||
scope_id,
|
||||
decorator_tys,
|
||||
)
|
||||
.into();
|
||||
type_store.cache_node_type(file_id, *node_key.erased(), ty);
|
||||
Ok(ty)
|
||||
}
|
||||
}
|
||||
Definition::Assignment(node_key) => {
|
||||
let parsed = parse(db.upcast(), file_id)?;
|
||||
let ast = parsed.ast();
|
||||
let node = node_key.resolve_unwrap(ast.as_any_node_ref());
|
||||
// TODO handle unpacking assignment correctly
|
||||
infer_expr_type(db, file_id, &node.value)
|
||||
}
|
||||
_ => todo!("other kinds of definitions"),
|
||||
}
|
||||
}
|
||||
|
||||
fn infer_expr_type(db: &dyn SemanticDb, file_id: FileId, expr: &ast::Expr) -> QueryResult<Type> {
|
||||
// TODO cache the resolution of the type on the node
|
||||
let symbols = symbol_table(db, file_id)?;
|
||||
match expr {
|
||||
ast::Expr::Name(name) => {
|
||||
// TODO look up in the correct scope, don't assume global
|
||||
if let Some(symbol_id) = symbols.root_symbol_id_by_name(&name.id) {
|
||||
infer_symbol_type(db, GlobalSymbolId { file_id, symbol_id })
|
||||
} else {
|
||||
Ok(Type::Unknown)
|
||||
}
|
||||
}
|
||||
_ => todo!("full expression type resolution"),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::db::{HasJar, SemanticJar};
|
||||
use crate::module::{
|
||||
resolve_module, set_module_search_paths, ModuleName, ModuleSearchPath, ModuleSearchPathKind,
|
||||
};
|
||||
use crate::symbols::{symbol_table, GlobalSymbolId};
|
||||
use crate::types::{infer_symbol_type, Type};
|
||||
use crate::Name;
|
||||
|
||||
// TODO with virtual filesystem we shouldn't have to write files to disk for these
|
||||
// tests
|
||||
|
||||
struct TestCase {
|
||||
temp_dir: tempfile::TempDir,
|
||||
db: TestDb,
|
||||
|
||||
src: ModuleSearchPath,
|
||||
}
|
||||
|
||||
fn create_test() -> std::io::Result<TestCase> {
|
||||
let temp_dir = tempfile::tempdir()?;
|
||||
|
||||
let src = temp_dir.path().join("src");
|
||||
std::fs::create_dir(&src)?;
|
||||
let src = ModuleSearchPath::new(src.canonicalize()?, ModuleSearchPathKind::FirstParty);
|
||||
|
||||
let roots = vec![src.clone()];
|
||||
|
||||
let mut db = TestDb::default();
|
||||
set_module_search_paths(&mut db, roots);
|
||||
|
||||
Ok(TestCase { temp_dir, db, src })
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn follow_import_to_class() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
let db = &case.db;
|
||||
|
||||
let a_path = case.src.path().join("a.py");
|
||||
let b_path = case.src.path().join("b.py");
|
||||
std::fs::write(a_path, "from b import C as D; E = D")?;
|
||||
std::fs::write(b_path, "class C: pass")?;
|
||||
let a_file = resolve_module(db, ModuleName::new("a"))?
|
||||
.expect("module should be found")
|
||||
.path(db)?
|
||||
.file();
|
||||
let a_syms = symbol_table(db, a_file)?;
|
||||
let e_sym = a_syms
|
||||
.root_symbol_id_by_name("E")
|
||||
.expect("E symbol should be found");
|
||||
|
||||
let ty = infer_symbol_type(
|
||||
db,
|
||||
GlobalSymbolId {
|
||||
file_id: a_file,
|
||||
symbol_id: e_sym,
|
||||
},
|
||||
)?;
|
||||
|
||||
let jar = HasJar::<SemanticJar>::jar(db)?;
|
||||
assert!(matches!(ty, Type::Class(_)));
|
||||
assert_eq!(format!("{}", ty.display(&jar.type_store)), "Literal[C]");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_base_class_by_name() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
let db = &case.db;
|
||||
|
||||
let path = case.src.path().join("mod.py");
|
||||
std::fs::write(path, "class Base: pass\nclass Sub(Base): pass")?;
|
||||
let file = resolve_module(db, ModuleName::new("mod"))?
|
||||
.expect("module should be found")
|
||||
.path(db)?
|
||||
.file();
|
||||
let syms = symbol_table(db, file)?;
|
||||
let sym = syms
|
||||
.root_symbol_id_by_name("Sub")
|
||||
.expect("Sub symbol should be found");
|
||||
|
||||
let ty = infer_symbol_type(
|
||||
db,
|
||||
GlobalSymbolId {
|
||||
file_id: file,
|
||||
symbol_id: sym,
|
||||
},
|
||||
)?;
|
||||
|
||||
let Type::Class(class_id) = ty else {
|
||||
panic!("Sub is not a Class")
|
||||
};
|
||||
let jar = HasJar::<SemanticJar>::jar(db)?;
|
||||
let base_names: Vec<_> = jar
|
||||
.type_store
|
||||
.get_class(class_id)
|
||||
.bases()
|
||||
.iter()
|
||||
.map(|base_ty| format!("{}", base_ty.display(&jar.type_store)))
|
||||
.collect();
|
||||
|
||||
assert_eq!(base_names, vec!["Literal[Base]"]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_method() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
let db = &case.db;
|
||||
|
||||
let path = case.src.path().join("mod.py");
|
||||
std::fs::write(path, "class C:\n def f(self): pass")?;
|
||||
let file = resolve_module(db, ModuleName::new("mod"))?
|
||||
.expect("module should be found")
|
||||
.path(db)?
|
||||
.file();
|
||||
let syms = symbol_table(db, file)?;
|
||||
let sym = syms
|
||||
.root_symbol_id_by_name("C")
|
||||
.expect("C symbol should be found");
|
||||
|
||||
let ty = infer_symbol_type(
|
||||
db,
|
||||
GlobalSymbolId {
|
||||
file_id: file,
|
||||
symbol_id: sym,
|
||||
},
|
||||
)?;
|
||||
|
||||
let Type::Class(class_id) = ty else {
|
||||
panic!("C is not a Class");
|
||||
};
|
||||
|
||||
let member_ty = class_id
|
||||
.get_own_class_member(db, &Name::new("f"))
|
||||
.expect("C.f to resolve");
|
||||
|
||||
let Some(Type::Function(func_id)) = member_ty else {
|
||||
panic!("C.f is not a Function");
|
||||
};
|
||||
|
||||
let jar = HasJar::<SemanticJar>::jar(db)?;
|
||||
let function = jar.type_store.get_function(func_id);
|
||||
assert_eq!(function.name(), "f");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
77
crates/red_knot/src/watch.rs
Normal file
77
crates/red_knot/src/watch.rs
Normal file
@@ -0,0 +1,77 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Context;
|
||||
use notify::event::{CreateKind, RemoveKind};
|
||||
use notify::{recommended_watcher, Event, EventKind, RecommendedWatcher, RecursiveMode, Watcher};
|
||||
|
||||
use crate::program::{FileChangeKind, FileWatcherChange};
|
||||
|
||||
pub struct FileWatcher {
|
||||
watcher: RecommendedWatcher,
|
||||
}
|
||||
|
||||
pub trait EventHandler: Send + 'static {
|
||||
fn handle(&self, changes: Vec<FileWatcherChange>);
|
||||
}
|
||||
|
||||
impl<F> EventHandler for F
|
||||
where
|
||||
F: Fn(Vec<FileWatcherChange>) + Send + 'static,
|
||||
{
|
||||
fn handle(&self, changes: Vec<FileWatcherChange>) {
|
||||
let f = self;
|
||||
f(changes);
|
||||
}
|
||||
}
|
||||
|
||||
impl FileWatcher {
|
||||
pub fn new<E>(handler: E) -> anyhow::Result<Self>
|
||||
where
|
||||
E: EventHandler,
|
||||
{
|
||||
Self::from_handler(Box::new(handler))
|
||||
}
|
||||
|
||||
fn from_handler(handler: Box<dyn EventHandler>) -> anyhow::Result<Self> {
|
||||
let watcher = recommended_watcher(move |changes: notify::Result<Event>| {
|
||||
match changes {
|
||||
Ok(event) => {
|
||||
// TODO verify that this handles all events correctly
|
||||
let change_kind = match event.kind {
|
||||
EventKind::Create(CreateKind::File) => FileChangeKind::Created,
|
||||
EventKind::Modify(_) => FileChangeKind::Modified,
|
||||
EventKind::Remove(RemoveKind::File) => FileChangeKind::Deleted,
|
||||
_ => {
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let mut changes = Vec::new();
|
||||
|
||||
for path in event.paths {
|
||||
if path.is_file() {
|
||||
changes.push(FileWatcherChange::new(path, change_kind));
|
||||
}
|
||||
}
|
||||
|
||||
if !changes.is_empty() {
|
||||
handler.handle(changes);
|
||||
}
|
||||
}
|
||||
// TODO proper error handling
|
||||
Err(err) => {
|
||||
panic!("Error: {err}");
|
||||
}
|
||||
}
|
||||
})
|
||||
.context("Failed to create file watcher.")?;
|
||||
|
||||
Ok(Self { watcher })
|
||||
}
|
||||
|
||||
pub fn watch_folder(&mut self, path: &Path) -> anyhow::Result<()> {
|
||||
self.watcher.watch(path, RecursiveMode::Recursive)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
124
crates/red_knot/vendor/typeshed/README.md
vendored
Normal file
124
crates/red_knot/vendor/typeshed/README.md
vendored
Normal file
@@ -0,0 +1,124 @@
|
||||
# typeshed
|
||||
|
||||
[](https://github.com/python/typeshed/actions/workflows/tests.yml)
|
||||
[](https://gitter.im/python/typing?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
[](https://github.com/python/typeshed/blob/main/CONTRIBUTING.md)
|
||||
|
||||
## About
|
||||
|
||||
Typeshed contains external type annotations for the Python standard library
|
||||
and Python builtins, as well as third party packages as contributed by
|
||||
people external to those projects.
|
||||
|
||||
This data can e.g. be used for static analysis, type checking, type inference,
|
||||
and autocompletion.
|
||||
|
||||
For information on how to use typeshed, read below. Information for
|
||||
contributors can be found in [CONTRIBUTING.md](CONTRIBUTING.md). **Please read
|
||||
it before submitting pull requests; do not report issues with annotations to
|
||||
the project the stubs are for, but instead report them here to typeshed.**
|
||||
|
||||
Further documentation on stub files, typeshed, and Python's typing system in
|
||||
general, can also be found at https://typing.readthedocs.io/en/latest/.
|
||||
|
||||
Typeshed supports Python versions 3.8 and up.
|
||||
|
||||
## Using
|
||||
|
||||
If you're just using a type checker ([mypy](https://github.com/python/mypy/),
|
||||
[pyright](https://github.com/microsoft/pyright),
|
||||
[pytype](https://github.com/google/pytype/), PyCharm, ...), as opposed to
|
||||
developing it, you don't need to interact with the typeshed repo at
|
||||
all: a copy of standard library part of typeshed is bundled with type checkers.
|
||||
And type stubs for third party packages and modules you are using can
|
||||
be installed from PyPI. For example, if you are using `html5lib` and `requests`,
|
||||
you can install the type stubs using
|
||||
|
||||
```bash
|
||||
$ pip install types-html5lib types-requests
|
||||
```
|
||||
|
||||
These PyPI packages follow [PEP 561](http://www.python.org/dev/peps/pep-0561/)
|
||||
and are automatically released (up to once a day) by
|
||||
[typeshed internal machinery](https://github.com/typeshed-internal/stub_uploader).
|
||||
|
||||
Type checkers should be able to use these stub packages when installed. For more
|
||||
details, see the documentation for your type checker.
|
||||
|
||||
### Package versioning for third-party stubs
|
||||
|
||||
Version numbers of third-party stub packages consist of at least four parts.
|
||||
All parts of the stub version, except for the last part, correspond to the
|
||||
version of the runtime package being stubbed. For example, if the `types-foo`
|
||||
package has version `1.2.0.20240309`, this guarantees that the `types-foo` package
|
||||
contains stubs targeted against `foo==1.2.*` and tested against the latest
|
||||
version of `foo` matching that specifier. In this example, the final element
|
||||
of the version number (20240309) indicates that the stub package was pushed on
|
||||
March 9, 2024.
|
||||
|
||||
At typeshed, we try to keep breaking changes to a minimum. However, due to the
|
||||
nature of stubs, any version bump can introduce changes that might make your
|
||||
code fail to type check.
|
||||
|
||||
There are several strategies available for specifying the version of a stubs
|
||||
package you're using, each with its own tradeoffs:
|
||||
|
||||
1. Use the same bounds that you use for the package being stubbed. For example,
|
||||
if you use `requests>=2.30.0,<2.32`, you can use
|
||||
`types-requests>=2.30.0,<2.32`. This ensures that the stubs are compatible
|
||||
with the package you are using, but it carries a small risk of breaking
|
||||
type checking due to changes in the stubs.
|
||||
|
||||
Another risk of this strategy is that stubs often lag behind
|
||||
the package being stubbed. You might want to force the package being stubbed
|
||||
to a certain minimum version because it fixes a critical bug, but if
|
||||
correspondingly updated stubs have not been released, your type
|
||||
checking results may not be fully accurate.
|
||||
2. Pin the stubs to a known good version and update the pin from time to time
|
||||
(either manually, or using a tool such as dependabot or renovate).
|
||||
|
||||
For example, if you use `types-requests==2.31.0.1`, you can have confidence
|
||||
that upgrading dependencies will not break type checking. However, you will
|
||||
miss out on improvements in the stubs that could potentially improve type
|
||||
checking until you update the pin. This strategy also has the risk that the
|
||||
stubs you are using might become incompatible with the package being stubbed.
|
||||
3. Don't pin the stubs. This is the option that demands the least work from
|
||||
you when it comes to updating version pins, and has the advantage that you
|
||||
will automatically benefit from improved stubs whenever a new version of the
|
||||
stubs package is released. However, it carries the risk that the stubs
|
||||
become incompatible with the package being stubbed.
|
||||
|
||||
For example, if a new major version of the package is released, there's a
|
||||
chance the stubs might be updated to reflect the new version of the runtime
|
||||
package before you update the package being stubbed.
|
||||
|
||||
You can also switch between the different strategies as needed. For example,
|
||||
you could default to strategy (1), but fall back to strategy (2) when
|
||||
a problem arises that can't easily be fixed.
|
||||
|
||||
### The `_typeshed` package
|
||||
|
||||
typeshed includes a package `_typeshed` as part of the standard library.
|
||||
This package and its submodules contain utility types, but are not
|
||||
available at runtime. For more information about how to use this package,
|
||||
[see the `stdlib/_typeshed` directory](https://github.com/python/typeshed/tree/main/stdlib/_typeshed).
|
||||
|
||||
## Discussion
|
||||
|
||||
If you've run into behavior in the type checker that suggests the type
|
||||
stubs for a given library are incorrect or incomplete,
|
||||
we want to hear from you!
|
||||
|
||||
Our main forum for discussion is the project's [GitHub issue
|
||||
tracker](https://github.com/python/typeshed/issues). This is the right
|
||||
place to start a discussion of any of the above or most any other
|
||||
topic concerning the project.
|
||||
|
||||
If you have general questions about typing with Python, or you need
|
||||
a review of your type annotations or stubs outside of typeshed, head over to
|
||||
[our discussion forum](https://github.com/python/typing/discussions).
|
||||
For less formal discussion, try the typing chat room on
|
||||
[gitter.im](https://gitter.im/python/typing). Some typeshed maintainers
|
||||
are almost always present; feel free to find us there and we're happy
|
||||
to chat. Substantive technical discussion will be directed to the
|
||||
issue tracker.
|
||||
1
crates/red_knot/vendor/typeshed/source_commit.txt
vendored
Normal file
1
crates/red_knot/vendor/typeshed/source_commit.txt
vendored
Normal file
@@ -0,0 +1 @@
|
||||
a9d7e861f7a46ae7acd56569326adef302e10f29
|
||||
@@ -20,53 +20,34 @@
|
||||
__future__: 3.0-
|
||||
__main__: 3.0-
|
||||
_ast: 3.0-
|
||||
_asyncio: 3.0-
|
||||
_bisect: 3.0-
|
||||
_blake2: 3.6-
|
||||
_bootlocale: 3.4-3.9
|
||||
_bz2: 3.3-
|
||||
_codecs: 3.0-
|
||||
_collections_abc: 3.3-
|
||||
_compat_pickle: 3.1-
|
||||
_compression: 3.5-3.13
|
||||
_contextvars: 3.7-
|
||||
_compression: 3.5-
|
||||
_csv: 3.0-
|
||||
_ctypes: 3.0-
|
||||
_curses: 3.0-
|
||||
_curses_panel: 3.0-
|
||||
_dbm: 3.0-
|
||||
_decimal: 3.3-
|
||||
_frozen_importlib: 3.0-
|
||||
_frozen_importlib_external: 3.5-
|
||||
_gdbm: 3.0-
|
||||
_hashlib: 3.0-
|
||||
_dummy_thread: 3.0-3.8
|
||||
_dummy_threading: 3.0-3.8
|
||||
_heapq: 3.0-
|
||||
_imp: 3.0-
|
||||
_interpchannels: 3.13-
|
||||
_interpqueues: 3.13-
|
||||
_interpreters: 3.13-
|
||||
_io: 3.0-
|
||||
_json: 3.0-
|
||||
_locale: 3.0-
|
||||
_lsprof: 3.0-
|
||||
_lzma: 3.3-
|
||||
_markupbase: 3.0-
|
||||
_msi: 3.0-3.12
|
||||
_multibytecodec: 3.0-
|
||||
_msi: 3.0-
|
||||
_operator: 3.4-
|
||||
_osx_support: 3.0-
|
||||
_pickle: 3.0-
|
||||
_posixsubprocess: 3.2-
|
||||
_py_abc: 3.7-
|
||||
_pydecimal: 3.5-
|
||||
_queue: 3.7-
|
||||
_random: 3.0-
|
||||
_sitebuiltins: 3.4-
|
||||
_socket: 3.0- # present in 3.0 at runtime, but not in typeshed
|
||||
_sqlite3: 3.0-
|
||||
_ssl: 3.0-
|
||||
_stat: 3.4-
|
||||
_struct: 3.0-
|
||||
_thread: 3.0-
|
||||
_threading_local: 3.0-
|
||||
_tkinter: 3.0-
|
||||
@@ -78,17 +59,15 @@ _weakrefset: 3.0-
|
||||
_winapi: 3.3-
|
||||
abc: 3.0-
|
||||
aifc: 3.0-3.12
|
||||
annotationlib: 3.14-
|
||||
antigravity: 3.0-
|
||||
argparse: 3.0-
|
||||
array: 3.0-
|
||||
ast: 3.0-
|
||||
asynchat: 3.0-3.11
|
||||
asyncio: 3.4-
|
||||
asyncio.mixins: 3.10-
|
||||
asyncio.exceptions: 3.8-
|
||||
asyncio.format_helpers: 3.7-
|
||||
asyncio.graph: 3.14-
|
||||
asyncio.mixins: 3.10-
|
||||
asyncio.runners: 3.7-
|
||||
asyncio.staggered: 3.8-
|
||||
asyncio.taskgroups: 3.11-
|
||||
@@ -119,9 +98,7 @@ collections: 3.0-
|
||||
collections.abc: 3.3-
|
||||
colorsys: 3.0-
|
||||
compileall: 3.0-
|
||||
compression: 3.14-
|
||||
concurrent: 3.2-
|
||||
concurrent.futures.interpreter: 3.14-
|
||||
configparser: 3.0-
|
||||
contextlib: 3.0-
|
||||
contextvars: 3.7-
|
||||
@@ -134,7 +111,6 @@ curses: 3.0-
|
||||
dataclasses: 3.7-
|
||||
datetime: 3.0-
|
||||
dbm: 3.0-
|
||||
dbm.sqlite3: 3.13-
|
||||
decimal: 3.0-
|
||||
difflib: 3.0-
|
||||
dis: 3.0-
|
||||
@@ -142,13 +118,9 @@ distutils: 3.0-3.11
|
||||
distutils.command.bdist_msi: 3.0-3.10
|
||||
distutils.command.bdist_wininst: 3.0-3.9
|
||||
doctest: 3.0-
|
||||
dummy_threading: 3.0-3.8
|
||||
email: 3.0-
|
||||
encodings: 3.0-
|
||||
encodings.cp1125: 3.4-
|
||||
encodings.cp273: 3.4-
|
||||
encodings.cp858: 3.2-
|
||||
encodings.koi8_t: 3.5-
|
||||
encodings.kz1048: 3.5-
|
||||
ensurepip: 3.0-
|
||||
enum: 3.4-
|
||||
errno: 3.0-
|
||||
@@ -180,15 +152,10 @@ imghdr: 3.0-3.12
|
||||
imp: 3.0-3.11
|
||||
importlib: 3.0-
|
||||
importlib._abc: 3.10-
|
||||
importlib._bootstrap: 3.0-
|
||||
importlib._bootstrap_external: 3.5-
|
||||
importlib.metadata: 3.8-
|
||||
importlib.metadata._meta: 3.10-
|
||||
importlib.metadata.diagnose: 3.13-
|
||||
importlib.readers: 3.10-
|
||||
importlib.resources: 3.7-
|
||||
importlib.resources._common: 3.11-
|
||||
importlib.resources._functional: 3.13-
|
||||
importlib.resources.abc: 3.11-
|
||||
importlib.resources.readers: 3.11-
|
||||
importlib.resources.simple: 3.11-
|
||||
@@ -199,7 +166,7 @@ ipaddress: 3.3-
|
||||
itertools: 3.0-
|
||||
json: 3.0-
|
||||
keyword: 3.0-
|
||||
lib2to3: 3.0-3.12
|
||||
lib2to3: 3.0-
|
||||
linecache: 3.0-
|
||||
locale: 3.0-
|
||||
logging: 3.0-
|
||||
@@ -230,7 +197,6 @@ os: 3.0-
|
||||
ossaudiodev: 3.0-3.12
|
||||
parser: 3.0-3.9
|
||||
pathlib: 3.4-
|
||||
pathlib.types: 3.14-
|
||||
pdb: 3.0-
|
||||
pickle: 3.0-
|
||||
pickletools: 3.0-
|
||||
@@ -283,7 +249,6 @@ ssl: 3.0-
|
||||
stat: 3.0-
|
||||
statistics: 3.4-
|
||||
string: 3.0-
|
||||
string.templatelib: 3.14-
|
||||
stringprep: 3.0-
|
||||
struct: 3.0-
|
||||
subprocess: 3.0-
|
||||
@@ -305,7 +270,6 @@ threading: 3.0-
|
||||
time: 3.0-
|
||||
timeit: 3.0-
|
||||
tkinter: 3.0-
|
||||
tkinter.tix: 3.0-3.12
|
||||
token: 3.0-
|
||||
tokenize: 3.0-
|
||||
tomllib: 3.11-
|
||||
3
crates/red_knot/vendor/typeshed/stdlib/__main__.pyi
vendored
Normal file
3
crates/red_knot/vendor/typeshed/stdlib/__main__.pyi
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
from typing import Any
|
||||
|
||||
def __getattr__(name: str) -> Any: ...
|
||||
591
crates/red_knot/vendor/typeshed/stdlib/_ast.pyi
vendored
Normal file
591
crates/red_knot/vendor/typeshed/stdlib/_ast.pyi
vendored
Normal file
@@ -0,0 +1,591 @@
|
||||
import sys
|
||||
import typing_extensions
|
||||
from typing import Any, ClassVar, Literal
|
||||
|
||||
PyCF_ONLY_AST: Literal[1024]
|
||||
PyCF_TYPE_COMMENTS: Literal[4096]
|
||||
PyCF_ALLOW_TOP_LEVEL_AWAIT: Literal[8192]
|
||||
|
||||
# Alias used for fields that must always be valid identifiers
|
||||
# A string `x` counts as a valid identifier if both the following are True
|
||||
# (1) `x.isidentifier()` evaluates to `True`
|
||||
# (2) `keyword.iskeyword(x)` evaluates to `False`
|
||||
_Identifier: typing_extensions.TypeAlias = str
|
||||
|
||||
class AST:
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ()
|
||||
_attributes: ClassVar[tuple[str, ...]]
|
||||
_fields: ClassVar[tuple[str, ...]]
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None: ...
|
||||
# TODO: Not all nodes have all of the following attributes
|
||||
lineno: int
|
||||
col_offset: int
|
||||
end_lineno: int | None
|
||||
end_col_offset: int | None
|
||||
type_comment: str | None
|
||||
|
||||
class mod(AST): ...
|
||||
class type_ignore(AST): ...
|
||||
|
||||
class TypeIgnore(type_ignore):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("lineno", "tag")
|
||||
tag: str
|
||||
|
||||
class FunctionType(mod):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("argtypes", "returns")
|
||||
argtypes: list[expr]
|
||||
returns: expr
|
||||
|
||||
class Module(mod):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("body", "type_ignores")
|
||||
body: list[stmt]
|
||||
type_ignores: list[TypeIgnore]
|
||||
|
||||
class Interactive(mod):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("body",)
|
||||
body: list[stmt]
|
||||
|
||||
class Expression(mod):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("body",)
|
||||
body: expr
|
||||
|
||||
class stmt(AST): ...
|
||||
|
||||
class FunctionDef(stmt):
|
||||
if sys.version_info >= (3, 12):
|
||||
__match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment", "type_params")
|
||||
elif sys.version_info >= (3, 10):
|
||||
__match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment")
|
||||
name: _Identifier
|
||||
args: arguments
|
||||
body: list[stmt]
|
||||
decorator_list: list[expr]
|
||||
returns: expr | None
|
||||
if sys.version_info >= (3, 12):
|
||||
type_params: list[type_param]
|
||||
|
||||
class AsyncFunctionDef(stmt):
|
||||
if sys.version_info >= (3, 12):
|
||||
__match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment", "type_params")
|
||||
elif sys.version_info >= (3, 10):
|
||||
__match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment")
|
||||
name: _Identifier
|
||||
args: arguments
|
||||
body: list[stmt]
|
||||
decorator_list: list[expr]
|
||||
returns: expr | None
|
||||
if sys.version_info >= (3, 12):
|
||||
type_params: list[type_param]
|
||||
|
||||
class ClassDef(stmt):
|
||||
if sys.version_info >= (3, 12):
|
||||
__match_args__ = ("name", "bases", "keywords", "body", "decorator_list", "type_params")
|
||||
elif sys.version_info >= (3, 10):
|
||||
__match_args__ = ("name", "bases", "keywords", "body", "decorator_list")
|
||||
name: _Identifier
|
||||
bases: list[expr]
|
||||
keywords: list[keyword]
|
||||
body: list[stmt]
|
||||
decorator_list: list[expr]
|
||||
if sys.version_info >= (3, 12):
|
||||
type_params: list[type_param]
|
||||
|
||||
class Return(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value",)
|
||||
value: expr | None
|
||||
|
||||
class Delete(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("targets",)
|
||||
targets: list[expr]
|
||||
|
||||
class Assign(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("targets", "value", "type_comment")
|
||||
targets: list[expr]
|
||||
value: expr
|
||||
|
||||
class AugAssign(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "op", "value")
|
||||
target: Name | Attribute | Subscript
|
||||
op: operator
|
||||
value: expr
|
||||
|
||||
class AnnAssign(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "annotation", "value", "simple")
|
||||
target: Name | Attribute | Subscript
|
||||
annotation: expr
|
||||
value: expr | None
|
||||
simple: int
|
||||
|
||||
class For(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "iter", "body", "orelse", "type_comment")
|
||||
target: expr
|
||||
iter: expr
|
||||
body: list[stmt]
|
||||
orelse: list[stmt]
|
||||
|
||||
class AsyncFor(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "iter", "body", "orelse", "type_comment")
|
||||
target: expr
|
||||
iter: expr
|
||||
body: list[stmt]
|
||||
orelse: list[stmt]
|
||||
|
||||
class While(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("test", "body", "orelse")
|
||||
test: expr
|
||||
body: list[stmt]
|
||||
orelse: list[stmt]
|
||||
|
||||
class If(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("test", "body", "orelse")
|
||||
test: expr
|
||||
body: list[stmt]
|
||||
orelse: list[stmt]
|
||||
|
||||
class With(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("items", "body", "type_comment")
|
||||
items: list[withitem]
|
||||
body: list[stmt]
|
||||
|
||||
class AsyncWith(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("items", "body", "type_comment")
|
||||
items: list[withitem]
|
||||
body: list[stmt]
|
||||
|
||||
class Raise(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("exc", "cause")
|
||||
exc: expr | None
|
||||
cause: expr | None
|
||||
|
||||
class Try(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("body", "handlers", "orelse", "finalbody")
|
||||
body: list[stmt]
|
||||
handlers: list[ExceptHandler]
|
||||
orelse: list[stmt]
|
||||
finalbody: list[stmt]
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
class TryStar(stmt):
|
||||
__match_args__ = ("body", "handlers", "orelse", "finalbody")
|
||||
body: list[stmt]
|
||||
handlers: list[ExceptHandler]
|
||||
orelse: list[stmt]
|
||||
finalbody: list[stmt]
|
||||
|
||||
class Assert(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("test", "msg")
|
||||
test: expr
|
||||
msg: expr | None
|
||||
|
||||
class Import(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("names",)
|
||||
names: list[alias]
|
||||
|
||||
class ImportFrom(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("module", "names", "level")
|
||||
module: str | None
|
||||
names: list[alias]
|
||||
level: int
|
||||
|
||||
class Global(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("names",)
|
||||
names: list[_Identifier]
|
||||
|
||||
class Nonlocal(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("names",)
|
||||
names: list[_Identifier]
|
||||
|
||||
class Expr(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value",)
|
||||
value: expr
|
||||
|
||||
class Pass(stmt): ...
|
||||
class Break(stmt): ...
|
||||
class Continue(stmt): ...
|
||||
class expr(AST): ...
|
||||
|
||||
class BoolOp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("op", "values")
|
||||
op: boolop
|
||||
values: list[expr]
|
||||
|
||||
class BinOp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("left", "op", "right")
|
||||
left: expr
|
||||
op: operator
|
||||
right: expr
|
||||
|
||||
class UnaryOp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("op", "operand")
|
||||
op: unaryop
|
||||
operand: expr
|
||||
|
||||
class Lambda(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("args", "body")
|
||||
args: arguments
|
||||
body: expr
|
||||
|
||||
class IfExp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("test", "body", "orelse")
|
||||
test: expr
|
||||
body: expr
|
||||
orelse: expr
|
||||
|
||||
class Dict(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("keys", "values")
|
||||
keys: list[expr | None]
|
||||
values: list[expr]
|
||||
|
||||
class Set(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elts",)
|
||||
elts: list[expr]
|
||||
|
||||
class ListComp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elt", "generators")
|
||||
elt: expr
|
||||
generators: list[comprehension]
|
||||
|
||||
class SetComp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elt", "generators")
|
||||
elt: expr
|
||||
generators: list[comprehension]
|
||||
|
||||
class DictComp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("key", "value", "generators")
|
||||
key: expr
|
||||
value: expr
|
||||
generators: list[comprehension]
|
||||
|
||||
class GeneratorExp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elt", "generators")
|
||||
elt: expr
|
||||
generators: list[comprehension]
|
||||
|
||||
class Await(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value",)
|
||||
value: expr
|
||||
|
||||
class Yield(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value",)
|
||||
value: expr | None
|
||||
|
||||
class YieldFrom(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value",)
|
||||
value: expr
|
||||
|
||||
class Compare(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("left", "ops", "comparators")
|
||||
left: expr
|
||||
ops: list[cmpop]
|
||||
comparators: list[expr]
|
||||
|
||||
class Call(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("func", "args", "keywords")
|
||||
func: expr
|
||||
args: list[expr]
|
||||
keywords: list[keyword]
|
||||
|
||||
class FormattedValue(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value", "conversion", "format_spec")
|
||||
value: expr
|
||||
conversion: int
|
||||
format_spec: expr | None
|
||||
|
||||
class JoinedStr(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("values",)
|
||||
values: list[expr]
|
||||
|
||||
class Constant(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value", "kind")
|
||||
value: Any # None, str, bytes, bool, int, float, complex, Ellipsis
|
||||
kind: str | None
|
||||
# Aliases for value, for backwards compatibility
|
||||
s: Any
|
||||
n: int | float | complex
|
||||
|
||||
class NamedExpr(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "value")
|
||||
target: Name
|
||||
value: expr
|
||||
|
||||
class Attribute(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value", "attr", "ctx")
|
||||
value: expr
|
||||
attr: _Identifier
|
||||
ctx: expr_context
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
_Slice: typing_extensions.TypeAlias = expr
|
||||
else:
|
||||
class slice(AST): ...
|
||||
_Slice: typing_extensions.TypeAlias = slice
|
||||
|
||||
class Slice(_Slice):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("lower", "upper", "step")
|
||||
lower: expr | None
|
||||
upper: expr | None
|
||||
step: expr | None
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
class ExtSlice(slice):
|
||||
dims: list[slice]
|
||||
|
||||
class Index(slice):
|
||||
value: expr
|
||||
|
||||
class Subscript(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value", "slice", "ctx")
|
||||
value: expr
|
||||
slice: _Slice
|
||||
ctx: expr_context
|
||||
|
||||
class Starred(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value", "ctx")
|
||||
value: expr
|
||||
ctx: expr_context
|
||||
|
||||
class Name(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("id", "ctx")
|
||||
id: _Identifier
|
||||
ctx: expr_context
|
||||
|
||||
class List(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elts", "ctx")
|
||||
elts: list[expr]
|
||||
ctx: expr_context
|
||||
|
||||
class Tuple(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elts", "ctx")
|
||||
elts: list[expr]
|
||||
ctx: expr_context
|
||||
if sys.version_info >= (3, 9):
|
||||
dims: list[expr]
|
||||
|
||||
class expr_context(AST): ...
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
class AugLoad(expr_context): ...
|
||||
class AugStore(expr_context): ...
|
||||
class Param(expr_context): ...
|
||||
|
||||
class Suite(mod):
|
||||
body: list[stmt]
|
||||
|
||||
class Del(expr_context): ...
|
||||
class Load(expr_context): ...
|
||||
class Store(expr_context): ...
|
||||
class boolop(AST): ...
|
||||
class And(boolop): ...
|
||||
class Or(boolop): ...
|
||||
class operator(AST): ...
|
||||
class Add(operator): ...
|
||||
class BitAnd(operator): ...
|
||||
class BitOr(operator): ...
|
||||
class BitXor(operator): ...
|
||||
class Div(operator): ...
|
||||
class FloorDiv(operator): ...
|
||||
class LShift(operator): ...
|
||||
class Mod(operator): ...
|
||||
class Mult(operator): ...
|
||||
class MatMult(operator): ...
|
||||
class Pow(operator): ...
|
||||
class RShift(operator): ...
|
||||
class Sub(operator): ...
|
||||
class unaryop(AST): ...
|
||||
class Invert(unaryop): ...
|
||||
class Not(unaryop): ...
|
||||
class UAdd(unaryop): ...
|
||||
class USub(unaryop): ...
|
||||
class cmpop(AST): ...
|
||||
class Eq(cmpop): ...
|
||||
class Gt(cmpop): ...
|
||||
class GtE(cmpop): ...
|
||||
class In(cmpop): ...
|
||||
class Is(cmpop): ...
|
||||
class IsNot(cmpop): ...
|
||||
class Lt(cmpop): ...
|
||||
class LtE(cmpop): ...
|
||||
class NotEq(cmpop): ...
|
||||
class NotIn(cmpop): ...
|
||||
|
||||
class comprehension(AST):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "iter", "ifs", "is_async")
|
||||
target: expr
|
||||
iter: expr
|
||||
ifs: list[expr]
|
||||
is_async: int
|
||||
|
||||
class excepthandler(AST): ...
|
||||
|
||||
class ExceptHandler(excepthandler):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("type", "name", "body")
|
||||
type: expr | None
|
||||
name: _Identifier | None
|
||||
body: list[stmt]
|
||||
|
||||
class arguments(AST):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("posonlyargs", "args", "vararg", "kwonlyargs", "kw_defaults", "kwarg", "defaults")
|
||||
posonlyargs: list[arg]
|
||||
args: list[arg]
|
||||
vararg: arg | None
|
||||
kwonlyargs: list[arg]
|
||||
kw_defaults: list[expr | None]
|
||||
kwarg: arg | None
|
||||
defaults: list[expr]
|
||||
|
||||
class arg(AST):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("arg", "annotation", "type_comment")
|
||||
arg: _Identifier
|
||||
annotation: expr | None
|
||||
|
||||
class keyword(AST):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("arg", "value")
|
||||
arg: _Identifier | None
|
||||
value: expr
|
||||
|
||||
class alias(AST):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("name", "asname")
|
||||
name: str
|
||||
asname: _Identifier | None
|
||||
|
||||
class withitem(AST):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("context_expr", "optional_vars")
|
||||
context_expr: expr
|
||||
optional_vars: expr | None
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
class Match(stmt):
|
||||
__match_args__ = ("subject", "cases")
|
||||
subject: expr
|
||||
cases: list[match_case]
|
||||
|
||||
class pattern(AST): ...
|
||||
# Without the alias, Pyright complains variables named pattern are recursively defined
|
||||
_Pattern: typing_extensions.TypeAlias = pattern
|
||||
|
||||
class match_case(AST):
|
||||
__match_args__ = ("pattern", "guard", "body")
|
||||
pattern: _Pattern
|
||||
guard: expr | None
|
||||
body: list[stmt]
|
||||
|
||||
class MatchValue(pattern):
|
||||
__match_args__ = ("value",)
|
||||
value: expr
|
||||
|
||||
class MatchSingleton(pattern):
|
||||
__match_args__ = ("value",)
|
||||
value: Literal[True, False] | None
|
||||
|
||||
class MatchSequence(pattern):
|
||||
__match_args__ = ("patterns",)
|
||||
patterns: list[pattern]
|
||||
|
||||
class MatchStar(pattern):
|
||||
__match_args__ = ("name",)
|
||||
name: _Identifier | None
|
||||
|
||||
class MatchMapping(pattern):
|
||||
__match_args__ = ("keys", "patterns", "rest")
|
||||
keys: list[expr]
|
||||
patterns: list[pattern]
|
||||
rest: _Identifier | None
|
||||
|
||||
class MatchClass(pattern):
|
||||
__match_args__ = ("cls", "patterns", "kwd_attrs", "kwd_patterns")
|
||||
cls: expr
|
||||
patterns: list[pattern]
|
||||
kwd_attrs: list[_Identifier]
|
||||
kwd_patterns: list[pattern]
|
||||
|
||||
class MatchAs(pattern):
|
||||
__match_args__ = ("pattern", "name")
|
||||
pattern: _Pattern | None
|
||||
name: _Identifier | None
|
||||
|
||||
class MatchOr(pattern):
|
||||
__match_args__ = ("patterns",)
|
||||
patterns: list[pattern]
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
class type_param(AST):
|
||||
end_lineno: int
|
||||
end_col_offset: int
|
||||
|
||||
class TypeVar(type_param):
|
||||
__match_args__ = ("name", "bound")
|
||||
name: _Identifier
|
||||
bound: expr | None
|
||||
|
||||
class ParamSpec(type_param):
|
||||
__match_args__ = ("name",)
|
||||
name: _Identifier
|
||||
|
||||
class TypeVarTuple(type_param):
|
||||
__match_args__ = ("name",)
|
||||
name: _Identifier
|
||||
|
||||
class TypeAlias(stmt):
|
||||
__match_args__ = ("name", "type_params", "value")
|
||||
name: Name
|
||||
type_params: list[type_param]
|
||||
value: expr
|
||||
@@ -2,13 +2,10 @@ import codecs
|
||||
import sys
|
||||
from _typeshed import ReadableBuffer
|
||||
from collections.abc import Callable
|
||||
from typing import Literal, final, overload, type_check_only
|
||||
from typing import Literal, overload
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
# This type is not exposed; it is defined in unicodeobject.c
|
||||
# At runtime it calls itself builtins.EncodingMap
|
||||
@final
|
||||
@type_check_only
|
||||
class _EncodingMap:
|
||||
def size(self) -> int: ...
|
||||
|
||||
@@ -81,12 +78,26 @@ def escape_decode(data: str | ReadableBuffer, errors: str | None = None, /) -> t
|
||||
def escape_encode(data: bytes, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
||||
def latin_1_decode(data: ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ...
|
||||
def latin_1_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
||||
def raw_unicode_escape_decode(
|
||||
data: str | ReadableBuffer, errors: str | None = None, final: bool = True, /
|
||||
) -> tuple[str, int]: ...
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
def raw_unicode_escape_decode(
|
||||
data: str | ReadableBuffer, errors: str | None = None, final: bool = True, /
|
||||
) -> tuple[str, int]: ...
|
||||
|
||||
else:
|
||||
def raw_unicode_escape_decode(data: str | ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ...
|
||||
|
||||
def raw_unicode_escape_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
||||
def readbuffer_encode(data: str | ReadableBuffer, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
||||
def unicode_escape_decode(data: str | ReadableBuffer, errors: str | None = None, final: bool = True, /) -> tuple[str, int]: ...
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
def unicode_escape_decode(
|
||||
data: str | ReadableBuffer, errors: str | None = None, final: bool = True, /
|
||||
) -> tuple[str, int]: ...
|
||||
|
||||
else:
|
||||
def unicode_escape_decode(data: str | ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ...
|
||||
|
||||
def unicode_escape_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
||||
def utf_16_be_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
||||
def utf_16_be_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
||||
@@ -1,14 +1,14 @@
|
||||
import sys
|
||||
from abc import abstractmethod
|
||||
from types import MappingProxyType
|
||||
from typing import ( # noqa: Y022,Y038,UP035
|
||||
from typing import ( # noqa: Y022,Y038,Y057
|
||||
AbstractSet as Set,
|
||||
AsyncGenerator as AsyncGenerator,
|
||||
AsyncIterable as AsyncIterable,
|
||||
AsyncIterator as AsyncIterator,
|
||||
Awaitable as Awaitable,
|
||||
ByteString as ByteString,
|
||||
Callable as Callable,
|
||||
ClassVar,
|
||||
Collection as Collection,
|
||||
Container as Container,
|
||||
Coroutine as Coroutine,
|
||||
@@ -59,12 +59,8 @@ __all__ = [
|
||||
"ValuesView",
|
||||
"Sequence",
|
||||
"MutableSequence",
|
||||
"ByteString",
|
||||
]
|
||||
if sys.version_info < (3, 14):
|
||||
from typing import ByteString as ByteString # noqa: Y057,UP035
|
||||
|
||||
__all__ += ["ByteString"]
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
__all__ += ["Buffer"]
|
||||
|
||||
@@ -74,17 +70,12 @@ _VT_co = TypeVar("_VT_co", covariant=True) # Value type covariant containers.
|
||||
@final
|
||||
class dict_keys(KeysView[_KT_co], Generic[_KT_co, _VT_co]): # undocumented
|
||||
def __eq__(self, value: object, /) -> bool: ...
|
||||
def __reversed__(self) -> Iterator[_KT_co]: ...
|
||||
__hash__: ClassVar[None] # type: ignore[assignment]
|
||||
if sys.version_info >= (3, 13):
|
||||
def isdisjoint(self, other: Iterable[_KT_co], /) -> bool: ...
|
||||
if sys.version_info >= (3, 10):
|
||||
@property
|
||||
def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ...
|
||||
|
||||
@final
|
||||
class dict_values(ValuesView[_VT_co], Generic[_KT_co, _VT_co]): # undocumented
|
||||
def __reversed__(self) -> Iterator[_VT_co]: ...
|
||||
if sys.version_info >= (3, 10):
|
||||
@property
|
||||
def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ...
|
||||
@@ -92,10 +83,6 @@ class dict_values(ValuesView[_VT_co], Generic[_KT_co, _VT_co]): # undocumented
|
||||
@final
|
||||
class dict_items(ItemsView[_KT_co, _VT_co]): # undocumented
|
||||
def __eq__(self, value: object, /) -> bool: ...
|
||||
def __reversed__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ...
|
||||
__hash__: ClassVar[None] # type: ignore[assignment]
|
||||
if sys.version_info >= (3, 13):
|
||||
def isdisjoint(self, other: Iterable[tuple[_KT_co, _VT_co]], /) -> bool: ...
|
||||
if sys.version_info >= (3, 10):
|
||||
@property
|
||||
def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ...
|
||||
25
crates/red_knot/vendor/typeshed/stdlib/_compression.pyi
vendored
Normal file
25
crates/red_knot/vendor/typeshed/stdlib/_compression.pyi
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
from _typeshed import WriteableBuffer
|
||||
from collections.abc import Callable
|
||||
from io import DEFAULT_BUFFER_SIZE, BufferedIOBase, RawIOBase
|
||||
from typing import Any, Protocol
|
||||
|
||||
BUFFER_SIZE = DEFAULT_BUFFER_SIZE
|
||||
|
||||
class _Reader(Protocol):
|
||||
def read(self, n: int, /) -> bytes: ...
|
||||
def seekable(self) -> bool: ...
|
||||
def seek(self, n: int, /) -> Any: ...
|
||||
|
||||
class BaseStream(BufferedIOBase): ...
|
||||
|
||||
class DecompressReader(RawIOBase):
|
||||
def __init__(
|
||||
self,
|
||||
fp: _Reader,
|
||||
decomp_factory: Callable[..., object],
|
||||
trailing_error: type[Exception] | tuple[type[Exception], ...] = (),
|
||||
**decomp_args: Any,
|
||||
) -> None: ...
|
||||
def readinto(self, b: WriteableBuffer) -> int: ...
|
||||
def read(self, size: int = -1) -> bytes: ...
|
||||
def seek(self, offset: int, whence: int = 0) -> int: ...
|
||||
90
crates/red_knot/vendor/typeshed/stdlib/_csv.pyi
vendored
Normal file
90
crates/red_knot/vendor/typeshed/stdlib/_csv.pyi
vendored
Normal file
@@ -0,0 +1,90 @@
|
||||
import sys
|
||||
from _typeshed import SupportsWrite
|
||||
from collections.abc import Iterable, Iterator
|
||||
from typing import Any, Final, Literal
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
__version__: Final[str]
|
||||
|
||||
QUOTE_ALL: Literal[1]
|
||||
QUOTE_MINIMAL: Literal[0]
|
||||
QUOTE_NONE: Literal[3]
|
||||
QUOTE_NONNUMERIC: Literal[2]
|
||||
if sys.version_info >= (3, 12):
|
||||
QUOTE_STRINGS: Literal[4]
|
||||
QUOTE_NOTNULL: Literal[5]
|
||||
|
||||
# Ideally this would be `QUOTE_ALL | QUOTE_MINIMAL | QUOTE_NONE | QUOTE_NONNUMERIC`
|
||||
# However, using literals in situations like these can cause false-positives (see #7258)
|
||||
_QuotingType: TypeAlias = int
|
||||
|
||||
class Error(Exception): ...
|
||||
|
||||
class Dialect:
|
||||
delimiter: str
|
||||
quotechar: str | None
|
||||
escapechar: str | None
|
||||
doublequote: bool
|
||||
skipinitialspace: bool
|
||||
lineterminator: str
|
||||
quoting: _QuotingType
|
||||
strict: bool
|
||||
def __init__(self) -> None: ...
|
||||
|
||||
_DialectLike: TypeAlias = str | Dialect | type[Dialect]
|
||||
|
||||
class _reader(Iterator[list[str]]):
|
||||
@property
|
||||
def dialect(self) -> Dialect: ...
|
||||
line_num: int
|
||||
def __next__(self) -> list[str]: ...
|
||||
|
||||
class _writer:
|
||||
@property
|
||||
def dialect(self) -> Dialect: ...
|
||||
def writerow(self, row: Iterable[Any]) -> Any: ...
|
||||
def writerows(self, rows: Iterable[Iterable[Any]]) -> None: ...
|
||||
|
||||
def writer(
|
||||
csvfile: SupportsWrite[str],
|
||||
dialect: _DialectLike = "excel",
|
||||
*,
|
||||
delimiter: str = ",",
|
||||
quotechar: str | None = '"',
|
||||
escapechar: str | None = None,
|
||||
doublequote: bool = True,
|
||||
skipinitialspace: bool = False,
|
||||
lineterminator: str = "\r\n",
|
||||
quoting: _QuotingType = 0,
|
||||
strict: bool = False,
|
||||
) -> _writer: ...
|
||||
def reader(
|
||||
csvfile: Iterable[str],
|
||||
dialect: _DialectLike = "excel",
|
||||
*,
|
||||
delimiter: str = ",",
|
||||
quotechar: str | None = '"',
|
||||
escapechar: str | None = None,
|
||||
doublequote: bool = True,
|
||||
skipinitialspace: bool = False,
|
||||
lineterminator: str = "\r\n",
|
||||
quoting: _QuotingType = 0,
|
||||
strict: bool = False,
|
||||
) -> _reader: ...
|
||||
def register_dialect(
|
||||
name: str,
|
||||
dialect: type[Dialect] = ...,
|
||||
*,
|
||||
delimiter: str = ",",
|
||||
quotechar: str | None = '"',
|
||||
escapechar: str | None = None,
|
||||
doublequote: bool = True,
|
||||
skipinitialspace: bool = False,
|
||||
lineterminator: str = "\r\n",
|
||||
quoting: _QuotingType = 0,
|
||||
strict: bool = False,
|
||||
) -> None: ...
|
||||
def unregister_dialect(name: str) -> None: ...
|
||||
def get_dialect(name: str) -> Dialect: ...
|
||||
def list_dialects() -> list[str]: ...
|
||||
def field_size_limit(new_limit: int = ...) -> int: ...
|
||||
211
crates/red_knot/vendor/typeshed/stdlib/_ctypes.pyi
vendored
Normal file
211
crates/red_knot/vendor/typeshed/stdlib/_ctypes.pyi
vendored
Normal file
@@ -0,0 +1,211 @@
|
||||
import sys
|
||||
from _typeshed import ReadableBuffer, WriteableBuffer
|
||||
from abc import abstractmethod
|
||||
from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence
|
||||
from ctypes import CDLL, ArgumentError as ArgumentError
|
||||
from typing import Any, ClassVar, Generic, TypeVar, overload
|
||||
from typing_extensions import Self, TypeAlias
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
from types import GenericAlias
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_CT = TypeVar("_CT", bound=_CData)
|
||||
|
||||
FUNCFLAG_CDECL: int
|
||||
FUNCFLAG_PYTHONAPI: int
|
||||
FUNCFLAG_USE_ERRNO: int
|
||||
FUNCFLAG_USE_LASTERROR: int
|
||||
RTLD_GLOBAL: int
|
||||
RTLD_LOCAL: int
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
CTYPES_MAX_ARGCOUNT: int
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
SIZEOF_TIME_T: int
|
||||
|
||||
if sys.platform == "win32":
|
||||
# Description, Source, HelpFile, HelpContext, scode
|
||||
_COMError_Details: TypeAlias = tuple[str | None, str | None, str | None, int | None, int | None]
|
||||
|
||||
class COMError(Exception):
|
||||
hresult: int
|
||||
text: str | None
|
||||
details: _COMError_Details
|
||||
|
||||
def __init__(self, hresult: int, text: str | None, details: _COMError_Details) -> None: ...
|
||||
|
||||
def CopyComPointer(src: _PointerLike, dst: _PointerLike | _CArgObject) -> int: ...
|
||||
|
||||
FUNCFLAG_HRESULT: int
|
||||
FUNCFLAG_STDCALL: int
|
||||
|
||||
def FormatError(code: int = ...) -> str: ...
|
||||
def get_last_error() -> int: ...
|
||||
def set_last_error(value: int) -> int: ...
|
||||
def LoadLibrary(name: str, load_flags: int = 0, /) -> int: ...
|
||||
def FreeLibrary(handle: int, /) -> None: ...
|
||||
|
||||
class _CDataMeta(type):
|
||||
# By default mypy complains about the following two methods, because strictly speaking cls
|
||||
# might not be a Type[_CT]. However this can never actually happen, because the only class that
|
||||
# uses _CDataMeta as its metaclass is _CData. So it's safe to ignore the errors here.
|
||||
def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc]
|
||||
def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc]
|
||||
|
||||
class _CData(metaclass=_CDataMeta):
|
||||
_b_base_: int
|
||||
_b_needsfree_: bool
|
||||
_objects: Mapping[Any, int] | None
|
||||
# At runtime the following classmethods are available only on classes, not
|
||||
# on instances. This can't be reflected properly in the type system:
|
||||
#
|
||||
# Structure.from_buffer(...) # valid at runtime
|
||||
# Structure(...).from_buffer(...) # invalid at runtime
|
||||
#
|
||||
|
||||
@classmethod
|
||||
def from_buffer(cls, source: WriteableBuffer, offset: int = ...) -> Self: ...
|
||||
@classmethod
|
||||
def from_buffer_copy(cls, source: ReadableBuffer, offset: int = ...) -> Self: ...
|
||||
@classmethod
|
||||
def from_address(cls, address: int) -> Self: ...
|
||||
@classmethod
|
||||
def from_param(cls, obj: Any) -> Self | _CArgObject: ...
|
||||
@classmethod
|
||||
def in_dll(cls, library: CDLL, name: str) -> Self: ...
|
||||
def __buffer__(self, flags: int, /) -> memoryview: ...
|
||||
def __release_buffer__(self, buffer: memoryview, /) -> None: ...
|
||||
|
||||
class _SimpleCData(_CData, Generic[_T]):
|
||||
value: _T
|
||||
# The TypeVar can be unsolved here,
|
||||
# but we can't use overloads without creating many, many mypy false-positive errors
|
||||
def __init__(self, value: _T = ...) -> None: ... # pyright: ignore[reportInvalidTypeVarUse]
|
||||
|
||||
class _CanCastTo(_CData): ...
|
||||
class _PointerLike(_CanCastTo): ...
|
||||
|
||||
class _Pointer(_PointerLike, _CData, Generic[_CT]):
|
||||
_type_: type[_CT]
|
||||
contents: _CT
|
||||
@overload
|
||||
def __init__(self) -> None: ...
|
||||
@overload
|
||||
def __init__(self, arg: _CT) -> None: ...
|
||||
@overload
|
||||
def __getitem__(self, key: int, /) -> Any: ...
|
||||
@overload
|
||||
def __getitem__(self, key: slice, /) -> list[Any]: ...
|
||||
def __setitem__(self, key: int, value: Any, /) -> None: ...
|
||||
|
||||
def POINTER(type: type[_CT]) -> type[_Pointer[_CT]]: ...
|
||||
def pointer(arg: _CT, /) -> _Pointer[_CT]: ...
|
||||
|
||||
class _CArgObject: ...
|
||||
|
||||
def byref(obj: _CData, offset: int = ...) -> _CArgObject: ...
|
||||
|
||||
_ECT: TypeAlias = Callable[[_CData | None, CFuncPtr, tuple[_CData, ...]], _CData]
|
||||
_PF: TypeAlias = tuple[int] | tuple[int, str | None] | tuple[int, str | None, Any]
|
||||
|
||||
class CFuncPtr(_PointerLike, _CData):
|
||||
restype: type[_CData] | Callable[[int], Any] | None
|
||||
argtypes: Sequence[type[_CData]]
|
||||
errcheck: _ECT
|
||||
# Abstract attribute that must be defined on subclasses
|
||||
_flags_: ClassVar[int]
|
||||
@overload
|
||||
def __init__(self) -> None: ...
|
||||
@overload
|
||||
def __init__(self, address: int, /) -> None: ...
|
||||
@overload
|
||||
def __init__(self, callable: Callable[..., Any], /) -> None: ...
|
||||
@overload
|
||||
def __init__(self, func_spec: tuple[str | int, CDLL], paramflags: tuple[_PF, ...] | None = ..., /) -> None: ...
|
||||
if sys.platform == "win32":
|
||||
@overload
|
||||
def __init__(
|
||||
self, vtbl_index: int, name: str, paramflags: tuple[_PF, ...] | None = ..., iid: _CData | None = ..., /
|
||||
) -> None: ...
|
||||
|
||||
def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
|
||||
|
||||
_GetT = TypeVar("_GetT")
|
||||
_SetT = TypeVar("_SetT")
|
||||
|
||||
class _CField(Generic[_CT, _GetT, _SetT]):
|
||||
offset: int
|
||||
size: int
|
||||
@overload
|
||||
def __get__(self, instance: None, owner: type[Any] | None, /) -> Self: ...
|
||||
@overload
|
||||
def __get__(self, instance: Any, owner: type[Any] | None, /) -> _GetT: ...
|
||||
def __set__(self, instance: Any, value: _SetT, /) -> None: ...
|
||||
|
||||
class _StructUnionMeta(_CDataMeta):
|
||||
_fields_: Sequence[tuple[str, type[_CData]] | tuple[str, type[_CData], int]]
|
||||
_pack_: int
|
||||
_anonymous_: Sequence[str]
|
||||
def __getattr__(self, name: str) -> _CField[Any, Any, Any]: ...
|
||||
|
||||
class _StructUnionBase(_CData, metaclass=_StructUnionMeta):
|
||||
def __init__(self, *args: Any, **kw: Any) -> None: ...
|
||||
def __getattr__(self, name: str) -> Any: ...
|
||||
def __setattr__(self, name: str, value: Any) -> None: ...
|
||||
|
||||
class Union(_StructUnionBase): ...
|
||||
class Structure(_StructUnionBase): ...
|
||||
|
||||
class Array(_CData, Generic[_CT]):
|
||||
@property
|
||||
@abstractmethod
|
||||
def _length_(self) -> int: ...
|
||||
@_length_.setter
|
||||
def _length_(self, value: int) -> None: ...
|
||||
@property
|
||||
@abstractmethod
|
||||
def _type_(self) -> type[_CT]: ...
|
||||
@_type_.setter
|
||||
def _type_(self, value: type[_CT]) -> None: ...
|
||||
# Note: only available if _CT == c_char
|
||||
@property
|
||||
def raw(self) -> bytes: ...
|
||||
@raw.setter
|
||||
def raw(self, value: ReadableBuffer) -> None: ...
|
||||
value: Any # Note: bytes if _CT == c_char, str if _CT == c_wchar, unavailable otherwise
|
||||
# TODO These methods cannot be annotated correctly at the moment.
|
||||
# All of these "Any"s stand for the array's element type, but it's not possible to use _CT
|
||||
# here, because of a special feature of ctypes.
|
||||
# By default, when accessing an element of an Array[_CT], the returned object has type _CT.
|
||||
# However, when _CT is a "simple type" like c_int, ctypes automatically "unboxes" the object
|
||||
# and converts it to the corresponding Python primitive. For example, when accessing an element
|
||||
# of an Array[c_int], a Python int object is returned, not a c_int.
|
||||
# This behavior does *not* apply to subclasses of "simple types".
|
||||
# If MyInt is a subclass of c_int, then accessing an element of an Array[MyInt] returns
|
||||
# a MyInt, not an int.
|
||||
# This special behavior is not easy to model in a stub, so for now all places where
|
||||
# the array element type would belong are annotated with Any instead.
|
||||
def __init__(self, *args: Any) -> None: ...
|
||||
@overload
|
||||
def __getitem__(self, key: int, /) -> Any: ...
|
||||
@overload
|
||||
def __getitem__(self, key: slice, /) -> list[Any]: ...
|
||||
@overload
|
||||
def __setitem__(self, key: int, value: Any, /) -> None: ...
|
||||
@overload
|
||||
def __setitem__(self, key: slice, value: Iterable[Any], /) -> None: ...
|
||||
def __iter__(self) -> Iterator[Any]: ...
|
||||
# Can't inherit from Sized because the metaclass conflict between
|
||||
# Sized and _CData prevents using _CDataMeta.
|
||||
def __len__(self) -> int: ...
|
||||
if sys.version_info >= (3, 9):
|
||||
def __class_getitem__(cls, item: Any) -> GenericAlias: ...
|
||||
|
||||
def addressof(obj: _CData) -> int: ...
|
||||
def alignment(obj_or_type: _CData | type[_CData]) -> int: ...
|
||||
def get_errno() -> int: ...
|
||||
def resize(obj: _CData, size: int) -> None: ...
|
||||
def set_errno(value: int) -> int: ...
|
||||
def sizeof(obj_or_type: _CData | type[_CData]) -> int: ...
|
||||
@@ -1,7 +1,6 @@
|
||||
import sys
|
||||
from _typeshed import ReadOnlyBuffer, SupportsRead, SupportsWrite
|
||||
from curses import _ncurses_version
|
||||
from typing import Any, final, overload
|
||||
from _typeshed import ReadOnlyBuffer, SupportsRead
|
||||
from typing import IO, Any, NamedTuple, final, overload
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
# NOTE: This module is ordinarily only available on Unix, but the windows-curses
|
||||
@@ -64,7 +63,8 @@ A_COLOR: int
|
||||
A_DIM: int
|
||||
A_HORIZONTAL: int
|
||||
A_INVIS: int
|
||||
A_ITALIC: int
|
||||
if sys.platform != "darwin":
|
||||
A_ITALIC: int
|
||||
A_LEFT: int
|
||||
A_LOW: int
|
||||
A_NORMAL: int
|
||||
@@ -95,14 +95,13 @@ BUTTON4_DOUBLE_CLICKED: int
|
||||
BUTTON4_PRESSED: int
|
||||
BUTTON4_RELEASED: int
|
||||
BUTTON4_TRIPLE_CLICKED: int
|
||||
# Darwin ncurses doesn't provide BUTTON5_* constants prior to 3.12.10 and 3.13.3
|
||||
if sys.version_info >= (3, 10):
|
||||
if sys.version_info >= (3, 12) or sys.platform != "darwin":
|
||||
BUTTON5_PRESSED: int
|
||||
BUTTON5_RELEASED: int
|
||||
BUTTON5_CLICKED: int
|
||||
BUTTON5_DOUBLE_CLICKED: int
|
||||
BUTTON5_TRIPLE_CLICKED: int
|
||||
# Darwin ncurses doesn't provide BUTTON5_* constants
|
||||
if sys.version_info >= (3, 10) and sys.platform != "darwin":
|
||||
BUTTON5_PRESSED: int
|
||||
BUTTON5_RELEASED: int
|
||||
BUTTON5_CLICKED: int
|
||||
BUTTON5_DOUBLE_CLICKED: int
|
||||
BUTTON5_TRIPLE_CLICKED: int
|
||||
BUTTON_ALT: int
|
||||
BUTTON_CTRL: int
|
||||
BUTTON_SHIFT: int
|
||||
@@ -293,11 +292,14 @@ def erasechar() -> bytes: ...
|
||||
def filter() -> None: ...
|
||||
def flash() -> None: ...
|
||||
def flushinp() -> None: ...
|
||||
def get_escdelay() -> int: ...
|
||||
def get_tabsize() -> int: ...
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
def get_escdelay() -> int: ...
|
||||
def get_tabsize() -> int: ...
|
||||
|
||||
def getmouse() -> tuple[int, int, int, int, int]: ...
|
||||
def getsyx() -> tuple[int, int]: ...
|
||||
def getwin(file: SupportsRead[bytes], /) -> window: ...
|
||||
def getwin(file: SupportsRead[bytes], /) -> _CursesWindow: ...
|
||||
def halfdelay(tenths: int, /) -> None: ...
|
||||
def has_colors() -> bool: ...
|
||||
|
||||
@@ -309,7 +311,7 @@ def has_il() -> bool: ...
|
||||
def has_key(key: int, /) -> bool: ...
|
||||
def init_color(color_number: int, r: int, g: int, b: int, /) -> None: ...
|
||||
def init_pair(pair_number: int, fg: int, bg: int, /) -> None: ...
|
||||
def initscr() -> window: ...
|
||||
def initscr() -> _CursesWindow: ...
|
||||
def intrflush(flag: bool, /) -> None: ...
|
||||
def is_term_resized(nlines: int, ncols: int, /) -> bool: ...
|
||||
def isendwin() -> bool: ...
|
||||
@@ -320,8 +322,8 @@ def meta(yes: bool, /) -> None: ...
|
||||
def mouseinterval(interval: int, /) -> None: ...
|
||||
def mousemask(newmask: int, /) -> tuple[int, int]: ...
|
||||
def napms(ms: int, /) -> int: ...
|
||||
def newpad(nlines: int, ncols: int, /) -> window: ...
|
||||
def newwin(nlines: int, ncols: int, begin_y: int = ..., begin_x: int = ..., /) -> window: ...
|
||||
def newpad(nlines: int, ncols: int, /) -> _CursesWindow: ...
|
||||
def newwin(nlines: int, ncols: int, begin_y: int = ..., begin_x: int = ..., /) -> _CursesWindow: ...
|
||||
def nl(flag: bool = True, /) -> None: ...
|
||||
def nocbreak() -> None: ...
|
||||
def noecho() -> None: ...
|
||||
@@ -339,8 +341,11 @@ def resetty() -> None: ...
|
||||
def resize_term(nlines: int, ncols: int, /) -> None: ...
|
||||
def resizeterm(nlines: int, ncols: int, /) -> None: ...
|
||||
def savetty() -> None: ...
|
||||
def set_escdelay(ms: int, /) -> None: ...
|
||||
def set_tabsize(size: int, /) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
def set_escdelay(ms: int, /) -> None: ...
|
||||
def set_tabsize(size: int, /) -> None: ...
|
||||
|
||||
def setsyx(y: int, x: int, /) -> None: ...
|
||||
def setupterm(term: str | None = None, fd: int = -1) -> None: ...
|
||||
def start_color() -> None: ...
|
||||
@@ -364,7 +369,11 @@ def tparm(
|
||||
) -> bytes: ...
|
||||
def typeahead(fd: int, /) -> None: ...
|
||||
def unctrl(ch: _ChType, /) -> bytes: ...
|
||||
def unget_wch(ch: int | str, /) -> None: ...
|
||||
|
||||
if sys.version_info < (3, 12) or sys.platform != "darwin":
|
||||
# The support for macos was dropped in 3.12
|
||||
def unget_wch(ch: int | str, /) -> None: ...
|
||||
|
||||
def ungetch(ch: _ChType, /) -> None: ...
|
||||
def ungetmouse(id: int, x: int, y: int, z: int, bstate: int, /) -> None: ...
|
||||
def update_lines_cols() -> None: ...
|
||||
@@ -374,7 +383,7 @@ def use_env(flag: bool, /) -> None: ...
|
||||
class error(Exception): ...
|
||||
|
||||
@final
|
||||
class window: # undocumented
|
||||
class _CursesWindow:
|
||||
encoding: str
|
||||
@overload
|
||||
def addch(self, ch: _ChType, attr: int = ...) -> None: ...
|
||||
@@ -427,9 +436,9 @@ class window: # undocumented
|
||||
def delch(self, y: int, x: int) -> None: ...
|
||||
def deleteln(self) -> None: ...
|
||||
@overload
|
||||
def derwin(self, begin_y: int, begin_x: int) -> window: ...
|
||||
def derwin(self, begin_y: int, begin_x: int) -> _CursesWindow: ...
|
||||
@overload
|
||||
def derwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> window: ...
|
||||
def derwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> _CursesWindow: ...
|
||||
def echochar(self, ch: _ChType, attr: int = ..., /) -> None: ...
|
||||
def enclose(self, y: int, x: int, /) -> bool: ...
|
||||
def erase(self) -> None: ...
|
||||
@@ -439,10 +448,13 @@ class window: # undocumented
|
||||
def getch(self) -> int: ...
|
||||
@overload
|
||||
def getch(self, y: int, x: int) -> int: ...
|
||||
@overload
|
||||
def get_wch(self) -> int | str: ...
|
||||
@overload
|
||||
def get_wch(self, y: int, x: int) -> int | str: ...
|
||||
if sys.version_info < (3, 12) or sys.platform != "darwin":
|
||||
# The support for macos was dropped in 3.12
|
||||
@overload
|
||||
def get_wch(self) -> int | str: ...
|
||||
@overload
|
||||
def get_wch(self, y: int, x: int) -> int | str: ...
|
||||
|
||||
@overload
|
||||
def getkey(self) -> str: ...
|
||||
@overload
|
||||
@@ -489,7 +501,7 @@ class window: # undocumented
|
||||
def instr(self, y: int, x: int, n: int = ...) -> bytes: ...
|
||||
def is_linetouched(self, line: int, /) -> bool: ...
|
||||
def is_wintouched(self) -> bool: ...
|
||||
def keypad(self, yes: bool, /) -> None: ...
|
||||
def keypad(self, yes: bool) -> None: ...
|
||||
def leaveok(self, yes: bool) -> None: ...
|
||||
def move(self, new_y: int, new_x: int) -> None: ...
|
||||
def mvderwin(self, y: int, x: int) -> None: ...
|
||||
@@ -501,18 +513,18 @@ class window: # undocumented
|
||||
@overload
|
||||
def noutrefresh(self, pminrow: int, pmincol: int, sminrow: int, smincol: int, smaxrow: int, smaxcol: int) -> None: ...
|
||||
@overload
|
||||
def overlay(self, destwin: window) -> None: ...
|
||||
def overlay(self, destwin: _CursesWindow) -> None: ...
|
||||
@overload
|
||||
def overlay(
|
||||
self, destwin: window, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int
|
||||
self, destwin: _CursesWindow, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int
|
||||
) -> None: ...
|
||||
@overload
|
||||
def overwrite(self, destwin: window) -> None: ...
|
||||
def overwrite(self, destwin: _CursesWindow) -> None: ...
|
||||
@overload
|
||||
def overwrite(
|
||||
self, destwin: window, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int
|
||||
self, destwin: _CursesWindow, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int
|
||||
) -> None: ...
|
||||
def putwin(self, file: SupportsWrite[bytes], /) -> None: ...
|
||||
def putwin(self, file: IO[Any], /) -> None: ...
|
||||
def redrawln(self, beg: int, num: int, /) -> None: ...
|
||||
def redrawwin(self) -> None: ...
|
||||
@overload
|
||||
@@ -526,13 +538,13 @@ class window: # undocumented
|
||||
def standend(self) -> None: ...
|
||||
def standout(self) -> None: ...
|
||||
@overload
|
||||
def subpad(self, begin_y: int, begin_x: int) -> window: ...
|
||||
def subpad(self, begin_y: int, begin_x: int) -> _CursesWindow: ...
|
||||
@overload
|
||||
def subpad(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> window: ...
|
||||
def subpad(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> _CursesWindow: ...
|
||||
@overload
|
||||
def subwin(self, begin_y: int, begin_x: int) -> window: ...
|
||||
def subwin(self, begin_y: int, begin_x: int) -> _CursesWindow: ...
|
||||
@overload
|
||||
def subwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> window: ...
|
||||
def subwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> _CursesWindow: ...
|
||||
def syncdown(self) -> None: ...
|
||||
def syncok(self, flag: bool) -> None: ...
|
||||
def syncup(self) -> None: ...
|
||||
@@ -545,4 +557,10 @@ class window: # undocumented
|
||||
@overload
|
||||
def vline(self, y: int, x: int, ch: _ChType, n: int) -> None: ...
|
||||
|
||||
class _ncurses_version(NamedTuple):
|
||||
major: int
|
||||
minor: int
|
||||
patch: int
|
||||
|
||||
ncurses_version: _ncurses_version
|
||||
window = _CursesWindow # undocumented
|
||||
281
crates/red_knot/vendor/typeshed/stdlib/_decimal.pyi
vendored
Normal file
281
crates/red_knot/vendor/typeshed/stdlib/_decimal.pyi
vendored
Normal file
@@ -0,0 +1,281 @@
|
||||
import numbers
|
||||
import sys
|
||||
from collections.abc import Container, Sequence
|
||||
from types import TracebackType
|
||||
from typing import Any, ClassVar, Final, Literal, NamedTuple, overload
|
||||
from typing_extensions import Self, TypeAlias
|
||||
|
||||
_Decimal: TypeAlias = Decimal | int
|
||||
_DecimalNew: TypeAlias = Decimal | float | str | tuple[int, Sequence[int], int]
|
||||
_ComparableNum: TypeAlias = Decimal | float | numbers.Rational
|
||||
|
||||
__version__: Final[str]
|
||||
__libmpdec_version__: Final[str]
|
||||
|
||||
class DecimalTuple(NamedTuple):
|
||||
sign: int
|
||||
digits: tuple[int, ...]
|
||||
exponent: int | Literal["n", "N", "F"]
|
||||
|
||||
ROUND_DOWN: str
|
||||
ROUND_HALF_UP: str
|
||||
ROUND_HALF_EVEN: str
|
||||
ROUND_CEILING: str
|
||||
ROUND_FLOOR: str
|
||||
ROUND_UP: str
|
||||
ROUND_HALF_DOWN: str
|
||||
ROUND_05UP: str
|
||||
HAVE_CONTEXTVAR: bool
|
||||
HAVE_THREADS: bool
|
||||
MAX_EMAX: int
|
||||
MAX_PREC: int
|
||||
MIN_EMIN: int
|
||||
MIN_ETINY: int
|
||||
|
||||
class DecimalException(ArithmeticError): ...
|
||||
class Clamped(DecimalException): ...
|
||||
class InvalidOperation(DecimalException): ...
|
||||
class ConversionSyntax(InvalidOperation): ...
|
||||
class DivisionByZero(DecimalException, ZeroDivisionError): ...
|
||||
class DivisionImpossible(InvalidOperation): ...
|
||||
class DivisionUndefined(InvalidOperation, ZeroDivisionError): ...
|
||||
class Inexact(DecimalException): ...
|
||||
class InvalidContext(InvalidOperation): ...
|
||||
class Rounded(DecimalException): ...
|
||||
class Subnormal(DecimalException): ...
|
||||
class Overflow(Inexact, Rounded): ...
|
||||
class Underflow(Inexact, Rounded, Subnormal): ...
|
||||
class FloatOperation(DecimalException, TypeError): ...
|
||||
|
||||
def setcontext(context: Context, /) -> None: ...
|
||||
def getcontext() -> Context: ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
def localcontext(
|
||||
ctx: Context | None = None,
|
||||
*,
|
||||
prec: int | None = ...,
|
||||
rounding: str | None = ...,
|
||||
Emin: int | None = ...,
|
||||
Emax: int | None = ...,
|
||||
capitals: int | None = ...,
|
||||
clamp: int | None = ...,
|
||||
traps: dict[_TrapType, bool] | None = ...,
|
||||
flags: dict[_TrapType, bool] | None = ...,
|
||||
) -> _ContextManager: ...
|
||||
|
||||
else:
|
||||
def localcontext(ctx: Context | None = None) -> _ContextManager: ...
|
||||
|
||||
class Decimal:
|
||||
def __new__(cls, value: _DecimalNew = ..., context: Context | None = ...) -> Self: ...
|
||||
@classmethod
|
||||
def from_float(cls, f: float, /) -> Self: ...
|
||||
def __bool__(self) -> bool: ...
|
||||
def compare(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
||||
def __hash__(self) -> int: ...
|
||||
def as_tuple(self) -> DecimalTuple: ...
|
||||
def as_integer_ratio(self) -> tuple[int, int]: ...
|
||||
def to_eng_string(self, context: Context | None = None) -> str: ...
|
||||
def __abs__(self) -> Decimal: ...
|
||||
def __add__(self, value: _Decimal, /) -> Decimal: ...
|
||||
def __divmod__(self, value: _Decimal, /) -> tuple[Decimal, Decimal]: ...
|
||||
def __eq__(self, value: object, /) -> bool: ...
|
||||
def __floordiv__(self, value: _Decimal, /) -> Decimal: ...
|
||||
def __ge__(self, value: _ComparableNum, /) -> bool: ...
|
||||
def __gt__(self, value: _ComparableNum, /) -> bool: ...
|
||||
def __le__(self, value: _ComparableNum, /) -> bool: ...
|
||||
def __lt__(self, value: _ComparableNum, /) -> bool: ...
|
||||
def __mod__(self, value: _Decimal, /) -> Decimal: ...
|
||||
def __mul__(self, value: _Decimal, /) -> Decimal: ...
|
||||
def __neg__(self) -> Decimal: ...
|
||||
def __pos__(self) -> Decimal: ...
|
||||
def __pow__(self, value: _Decimal, mod: _Decimal | None = None, /) -> Decimal: ...
|
||||
def __radd__(self, value: _Decimal, /) -> Decimal: ...
|
||||
def __rdivmod__(self, value: _Decimal, /) -> tuple[Decimal, Decimal]: ...
|
||||
def __rfloordiv__(self, value: _Decimal, /) -> Decimal: ...
|
||||
def __rmod__(self, value: _Decimal, /) -> Decimal: ...
|
||||
def __rmul__(self, value: _Decimal, /) -> Decimal: ...
|
||||
def __rsub__(self, value: _Decimal, /) -> Decimal: ...
|
||||
def __rtruediv__(self, value: _Decimal, /) -> Decimal: ...
|
||||
def __sub__(self, value: _Decimal, /) -> Decimal: ...
|
||||
def __truediv__(self, value: _Decimal, /) -> Decimal: ...
|
||||
def remainder_near(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
||||
def __float__(self) -> float: ...
|
||||
def __int__(self) -> int: ...
|
||||
def __trunc__(self) -> int: ...
|
||||
@property
|
||||
def real(self) -> Decimal: ...
|
||||
@property
|
||||
def imag(self) -> Decimal: ...
|
||||
def conjugate(self) -> Decimal: ...
|
||||
def __complex__(self) -> complex: ...
|
||||
@overload
|
||||
def __round__(self) -> int: ...
|
||||
@overload
|
||||
def __round__(self, ndigits: int, /) -> Decimal: ...
|
||||
def __floor__(self) -> int: ...
|
||||
def __ceil__(self) -> int: ...
|
||||
def fma(self, other: _Decimal, third: _Decimal, context: Context | None = None) -> Decimal: ...
|
||||
def __rpow__(self, value: _Decimal, mod: Context | None = None, /) -> Decimal: ...
|
||||
def normalize(self, context: Context | None = None) -> Decimal: ...
|
||||
def quantize(self, exp: _Decimal, rounding: str | None = None, context: Context | None = None) -> Decimal: ...
|
||||
def same_quantum(self, other: _Decimal, context: Context | None = None) -> bool: ...
|
||||
def to_integral_exact(self, rounding: str | None = None, context: Context | None = None) -> Decimal: ...
|
||||
def to_integral_value(self, rounding: str | None = None, context: Context | None = None) -> Decimal: ...
|
||||
def to_integral(self, rounding: str | None = None, context: Context | None = None) -> Decimal: ...
|
||||
def sqrt(self, context: Context | None = None) -> Decimal: ...
|
||||
def max(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
||||
def min(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
||||
def adjusted(self) -> int: ...
|
||||
def canonical(self) -> Decimal: ...
|
||||
def compare_signal(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
||||
def compare_total(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
||||
def compare_total_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
||||
def copy_abs(self) -> Decimal: ...
|
||||
def copy_negate(self) -> Decimal: ...
|
||||
def copy_sign(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
||||
def exp(self, context: Context | None = None) -> Decimal: ...
|
||||
def is_canonical(self) -> bool: ...
|
||||
def is_finite(self) -> bool: ...
|
||||
def is_infinite(self) -> bool: ...
|
||||
def is_nan(self) -> bool: ...
|
||||
def is_normal(self, context: Context | None = None) -> bool: ...
|
||||
def is_qnan(self) -> bool: ...
|
||||
def is_signed(self) -> bool: ...
|
||||
def is_snan(self) -> bool: ...
|
||||
def is_subnormal(self, context: Context | None = None) -> bool: ...
|
||||
def is_zero(self) -> bool: ...
|
||||
def ln(self, context: Context | None = None) -> Decimal: ...
|
||||
def log10(self, context: Context | None = None) -> Decimal: ...
|
||||
def logb(self, context: Context | None = None) -> Decimal: ...
|
||||
def logical_and(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
||||
def logical_invert(self, context: Context | None = None) -> Decimal: ...
|
||||
def logical_or(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
||||
def logical_xor(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
||||
def max_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
||||
def min_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
||||
def next_minus(self, context: Context | None = None) -> Decimal: ...
|
||||
def next_plus(self, context: Context | None = None) -> Decimal: ...
|
||||
def next_toward(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
||||
def number_class(self, context: Context | None = None) -> str: ...
|
||||
def radix(self) -> Decimal: ...
|
||||
def rotate(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
||||
def scaleb(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
||||
def shift(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
||||
def __reduce__(self) -> tuple[type[Self], tuple[str]]: ...
|
||||
def __copy__(self) -> Self: ...
|
||||
def __deepcopy__(self, memo: Any, /) -> Self: ...
|
||||
def __format__(self, specifier: str, context: Context | None = ..., /) -> str: ...
|
||||
|
||||
class _ContextManager:
|
||||
new_context: Context
|
||||
saved_context: Context
|
||||
def __init__(self, new_context: Context) -> None: ...
|
||||
def __enter__(self) -> Context: ...
|
||||
def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ...
|
||||
|
||||
_TrapType: TypeAlias = type[DecimalException]
|
||||
|
||||
class Context:
|
||||
# TODO: Context doesn't allow you to delete *any* attributes from instances of the class at runtime,
|
||||
# even settable attributes like `prec` and `rounding`,
|
||||
# but that's inexpressable in the stub.
|
||||
# Type checkers either ignore it or misinterpret it
|
||||
# if you add a `def __delattr__(self, name: str, /) -> NoReturn` method to the stub
|
||||
prec: int
|
||||
rounding: str
|
||||
Emin: int
|
||||
Emax: int
|
||||
capitals: int
|
||||
clamp: int
|
||||
traps: dict[_TrapType, bool]
|
||||
flags: dict[_TrapType, bool]
|
||||
def __init__(
|
||||
self,
|
||||
prec: int | None = ...,
|
||||
rounding: str | None = ...,
|
||||
Emin: int | None = ...,
|
||||
Emax: int | None = ...,
|
||||
capitals: int | None = ...,
|
||||
clamp: int | None = ...,
|
||||
flags: None | dict[_TrapType, bool] | Container[_TrapType] = ...,
|
||||
traps: None | dict[_TrapType, bool] | Container[_TrapType] = ...,
|
||||
_ignored_flags: list[_TrapType] | None = ...,
|
||||
) -> None: ...
|
||||
def __reduce__(self) -> tuple[type[Self], tuple[Any, ...]]: ...
|
||||
def clear_flags(self) -> None: ...
|
||||
def clear_traps(self) -> None: ...
|
||||
def copy(self) -> Context: ...
|
||||
def __copy__(self) -> Context: ...
|
||||
# see https://github.com/python/cpython/issues/94107
|
||||
__hash__: ClassVar[None] # type: ignore[assignment]
|
||||
def Etiny(self) -> int: ...
|
||||
def Etop(self) -> int: ...
|
||||
def create_decimal(self, num: _DecimalNew = "0", /) -> Decimal: ...
|
||||
def create_decimal_from_float(self, f: float, /) -> Decimal: ...
|
||||
def abs(self, x: _Decimal, /) -> Decimal: ...
|
||||
def add(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def canonical(self, x: Decimal, /) -> Decimal: ...
|
||||
def compare(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def compare_signal(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def compare_total(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def compare_total_mag(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def copy_abs(self, x: _Decimal, /) -> Decimal: ...
|
||||
def copy_decimal(self, x: _Decimal, /) -> Decimal: ...
|
||||
def copy_negate(self, x: _Decimal, /) -> Decimal: ...
|
||||
def copy_sign(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def divide(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def divide_int(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def divmod(self, x: _Decimal, y: _Decimal, /) -> tuple[Decimal, Decimal]: ...
|
||||
def exp(self, x: _Decimal, /) -> Decimal: ...
|
||||
def fma(self, x: _Decimal, y: _Decimal, z: _Decimal, /) -> Decimal: ...
|
||||
def is_canonical(self, x: _Decimal, /) -> bool: ...
|
||||
def is_finite(self, x: _Decimal, /) -> bool: ...
|
||||
def is_infinite(self, x: _Decimal, /) -> bool: ...
|
||||
def is_nan(self, x: _Decimal, /) -> bool: ...
|
||||
def is_normal(self, x: _Decimal, /) -> bool: ...
|
||||
def is_qnan(self, x: _Decimal, /) -> bool: ...
|
||||
def is_signed(self, x: _Decimal, /) -> bool: ...
|
||||
def is_snan(self, x: _Decimal, /) -> bool: ...
|
||||
def is_subnormal(self, x: _Decimal, /) -> bool: ...
|
||||
def is_zero(self, x: _Decimal, /) -> bool: ...
|
||||
def ln(self, x: _Decimal, /) -> Decimal: ...
|
||||
def log10(self, x: _Decimal, /) -> Decimal: ...
|
||||
def logb(self, x: _Decimal, /) -> Decimal: ...
|
||||
def logical_and(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def logical_invert(self, x: _Decimal, /) -> Decimal: ...
|
||||
def logical_or(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def logical_xor(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def max(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def max_mag(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def min(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def min_mag(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def minus(self, x: _Decimal, /) -> Decimal: ...
|
||||
def multiply(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def next_minus(self, x: _Decimal, /) -> Decimal: ...
|
||||
def next_plus(self, x: _Decimal, /) -> Decimal: ...
|
||||
def next_toward(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def normalize(self, x: _Decimal, /) -> Decimal: ...
|
||||
def number_class(self, x: _Decimal, /) -> str: ...
|
||||
def plus(self, x: _Decimal, /) -> Decimal: ...
|
||||
def power(self, a: _Decimal, b: _Decimal, modulo: _Decimal | None = None) -> Decimal: ...
|
||||
def quantize(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def radix(self) -> Decimal: ...
|
||||
def remainder(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def remainder_near(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def rotate(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def same_quantum(self, x: _Decimal, y: _Decimal, /) -> bool: ...
|
||||
def scaleb(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def shift(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def sqrt(self, x: _Decimal, /) -> Decimal: ...
|
||||
def subtract(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def to_eng_string(self, x: _Decimal, /) -> str: ...
|
||||
def to_sci_string(self, x: _Decimal, /) -> str: ...
|
||||
def to_integral_exact(self, x: _Decimal, /) -> Decimal: ...
|
||||
def to_integral_value(self, x: _Decimal, /) -> Decimal: ...
|
||||
def to_integral(self, x: _Decimal, /) -> Decimal: ...
|
||||
|
||||
DefaultContext: Context
|
||||
BasicContext: Context
|
||||
ExtendedContext: Context
|
||||
33
crates/red_knot/vendor/typeshed/stdlib/_dummy_thread.pyi
vendored
Normal file
33
crates/red_knot/vendor/typeshed/stdlib/_dummy_thread.pyi
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
from collections.abc import Callable
|
||||
from types import TracebackType
|
||||
from typing import Any, NoReturn, overload
|
||||
from typing_extensions import TypeVarTuple, Unpack
|
||||
|
||||
__all__ = ["error", "start_new_thread", "exit", "get_ident", "allocate_lock", "interrupt_main", "LockType", "RLock"]
|
||||
|
||||
_Ts = TypeVarTuple("_Ts")
|
||||
|
||||
TIMEOUT_MAX: int
|
||||
error = RuntimeError
|
||||
|
||||
@overload
|
||||
def start_new_thread(function: Callable[[Unpack[_Ts]], object], args: tuple[Unpack[_Ts]]) -> None: ...
|
||||
@overload
|
||||
def start_new_thread(function: Callable[..., object], args: tuple[Any, ...], kwargs: dict[str, Any]) -> None: ...
|
||||
def exit() -> NoReturn: ...
|
||||
def get_ident() -> int: ...
|
||||
def allocate_lock() -> LockType: ...
|
||||
def stack_size(size: int | None = None) -> int: ...
|
||||
|
||||
class LockType:
|
||||
locked_status: bool
|
||||
def acquire(self, waitflag: bool | None = None, timeout: int = -1) -> bool: ...
|
||||
def __enter__(self, waitflag: bool | None = None, timeout: int = -1) -> bool: ...
|
||||
def __exit__(self, typ: type[BaseException] | None, val: BaseException | None, tb: TracebackType | None) -> None: ...
|
||||
def release(self) -> bool: ...
|
||||
def locked(self) -> bool: ...
|
||||
|
||||
class RLock(LockType):
|
||||
def release(self) -> None: ... # type: ignore[override]
|
||||
|
||||
def interrupt_main() -> None: ...
|
||||
164
crates/red_knot/vendor/typeshed/stdlib/_dummy_threading.pyi
vendored
Normal file
164
crates/red_knot/vendor/typeshed/stdlib/_dummy_threading.pyi
vendored
Normal file
@@ -0,0 +1,164 @@
|
||||
import sys
|
||||
from _thread import _excepthook, _ExceptHookArgs
|
||||
from _typeshed import ProfileFunction, TraceFunction
|
||||
from collections.abc import Callable, Iterable, Mapping
|
||||
from types import TracebackType
|
||||
from typing import Any, TypeVar
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
__all__ = [
|
||||
"get_ident",
|
||||
"active_count",
|
||||
"Condition",
|
||||
"current_thread",
|
||||
"enumerate",
|
||||
"main_thread",
|
||||
"TIMEOUT_MAX",
|
||||
"Event",
|
||||
"Lock",
|
||||
"RLock",
|
||||
"Semaphore",
|
||||
"BoundedSemaphore",
|
||||
"Thread",
|
||||
"Barrier",
|
||||
"BrokenBarrierError",
|
||||
"Timer",
|
||||
"ThreadError",
|
||||
"setprofile",
|
||||
"settrace",
|
||||
"local",
|
||||
"stack_size",
|
||||
"ExceptHookArgs",
|
||||
"excepthook",
|
||||
]
|
||||
|
||||
def active_count() -> int: ...
|
||||
def current_thread() -> Thread: ...
|
||||
def currentThread() -> Thread: ...
|
||||
def get_ident() -> int: ...
|
||||
def enumerate() -> list[Thread]: ...
|
||||
def main_thread() -> Thread: ...
|
||||
def settrace(func: TraceFunction) -> None: ...
|
||||
def setprofile(func: ProfileFunction | None) -> None: ...
|
||||
def stack_size(size: int | None = None) -> int: ...
|
||||
|
||||
TIMEOUT_MAX: float
|
||||
|
||||
class ThreadError(Exception): ...
|
||||
|
||||
class local:
|
||||
def __getattribute__(self, name: str) -> Any: ...
|
||||
def __setattr__(self, name: str, value: Any) -> None: ...
|
||||
def __delattr__(self, name: str) -> None: ...
|
||||
|
||||
class Thread:
|
||||
name: str
|
||||
daemon: bool
|
||||
@property
|
||||
def ident(self) -> int | None: ...
|
||||
def __init__(
|
||||
self,
|
||||
group: None = None,
|
||||
target: Callable[..., object] | None = None,
|
||||
name: str | None = None,
|
||||
args: Iterable[Any] = (),
|
||||
kwargs: Mapping[str, Any] | None = None,
|
||||
*,
|
||||
daemon: bool | None = None,
|
||||
) -> None: ...
|
||||
def start(self) -> None: ...
|
||||
def run(self) -> None: ...
|
||||
def join(self, timeout: float | None = None) -> None: ...
|
||||
def getName(self) -> str: ...
|
||||
def setName(self, name: str) -> None: ...
|
||||
@property
|
||||
def native_id(self) -> int | None: ... # only available on some platforms
|
||||
def is_alive(self) -> bool: ...
|
||||
if sys.version_info < (3, 9):
|
||||
def isAlive(self) -> bool: ...
|
||||
|
||||
def isDaemon(self) -> bool: ...
|
||||
def setDaemon(self, daemonic: bool) -> None: ...
|
||||
|
||||
class _DummyThread(Thread): ...
|
||||
|
||||
class Lock:
|
||||
def __enter__(self) -> bool: ...
|
||||
def __exit__(
|
||||
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
|
||||
) -> bool | None: ...
|
||||
def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ...
|
||||
def release(self) -> None: ...
|
||||
def locked(self) -> bool: ...
|
||||
|
||||
class _RLock:
|
||||
def __enter__(self) -> bool: ...
|
||||
def __exit__(
|
||||
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
|
||||
) -> bool | None: ...
|
||||
def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: ...
|
||||
def release(self) -> None: ...
|
||||
|
||||
RLock = _RLock
|
||||
|
||||
class Condition:
|
||||
def __init__(self, lock: Lock | _RLock | None = None) -> None: ...
|
||||
def __enter__(self) -> bool: ...
|
||||
def __exit__(
|
||||
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
|
||||
) -> bool | None: ...
|
||||
def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ...
|
||||
def release(self) -> None: ...
|
||||
def wait(self, timeout: float | None = None) -> bool: ...
|
||||
def wait_for(self, predicate: Callable[[], _T], timeout: float | None = None) -> _T: ...
|
||||
def notify(self, n: int = 1) -> None: ...
|
||||
def notify_all(self) -> None: ...
|
||||
def notifyAll(self) -> None: ...
|
||||
|
||||
class Semaphore:
|
||||
def __init__(self, value: int = 1) -> None: ...
|
||||
def __exit__(
|
||||
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
|
||||
) -> bool | None: ...
|
||||
def acquire(self, blocking: bool = True, timeout: float | None = None) -> bool: ...
|
||||
def __enter__(self, blocking: bool = True, timeout: float | None = None) -> bool: ...
|
||||
if sys.version_info >= (3, 9):
|
||||
def release(self, n: int = ...) -> None: ...
|
||||
else:
|
||||
def release(self) -> None: ...
|
||||
|
||||
class BoundedSemaphore(Semaphore): ...
|
||||
|
||||
class Event:
|
||||
def is_set(self) -> bool: ...
|
||||
def set(self) -> None: ...
|
||||
def clear(self) -> None: ...
|
||||
def wait(self, timeout: float | None = None) -> bool: ...
|
||||
|
||||
excepthook = _excepthook
|
||||
ExceptHookArgs = _ExceptHookArgs
|
||||
|
||||
class Timer(Thread):
|
||||
def __init__(
|
||||
self,
|
||||
interval: float,
|
||||
function: Callable[..., object],
|
||||
args: Iterable[Any] | None = None,
|
||||
kwargs: Mapping[str, Any] | None = None,
|
||||
) -> None: ...
|
||||
def cancel(self) -> None: ...
|
||||
|
||||
class Barrier:
|
||||
@property
|
||||
def parties(self) -> int: ...
|
||||
@property
|
||||
def n_waiting(self) -> int: ...
|
||||
@property
|
||||
def broken(self) -> bool: ...
|
||||
def __init__(self, parties: int, action: Callable[[], None] | None = None, timeout: float | None = None) -> None: ...
|
||||
def wait(self, timeout: float | None = None) -> int: ...
|
||||
def reset(self) -> None: ...
|
||||
def abort(self) -> None: ...
|
||||
|
||||
class BrokenBarrierError(RuntimeError): ...
|
||||
@@ -1,6 +1,5 @@
|
||||
from collections.abc import Callable
|
||||
from typing import Any, final
|
||||
from typing_extensions import Self
|
||||
|
||||
@final
|
||||
class make_encoder:
|
||||
@@ -20,8 +19,8 @@ class make_encoder:
|
||||
def encoder(self) -> Callable[[str], str]: ...
|
||||
@property
|
||||
def item_separator(self) -> str: ...
|
||||
def __new__(
|
||||
cls,
|
||||
def __init__(
|
||||
self,
|
||||
markers: dict[int, Any] | None,
|
||||
default: Callable[[Any], Any],
|
||||
encoder: Callable[[str], str],
|
||||
@@ -31,7 +30,7 @@ class make_encoder:
|
||||
sort_keys: bool,
|
||||
skipkeys: bool,
|
||||
allow_nan: bool,
|
||||
) -> Self: ...
|
||||
) -> None: ...
|
||||
def __call__(self, obj: object, _current_indent_level: int) -> Any: ...
|
||||
|
||||
@final
|
||||
@@ -43,9 +42,8 @@ class make_scanner:
|
||||
parse_float: Any
|
||||
strict: bool
|
||||
# TODO: 'context' needs the attrs above (ducktype), but not __call__.
|
||||
def __new__(cls, context: make_scanner) -> Self: ...
|
||||
def __init__(self, context: make_scanner) -> None: ...
|
||||
def __call__(self, string: str, index: int) -> tuple[Any, int]: ...
|
||||
|
||||
def encode_basestring(s: str, /) -> str: ...
|
||||
def encode_basestring_ascii(s: str, /) -> str: ...
|
||||
def encode_basestring_ascii(s: str) -> str: ...
|
||||
def scanstring(string: str, end: int, strict: bool = ...) -> tuple[str, int]: ...
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user