Compare commits
1 Commits
dhruv/form
...
zb/debug-c
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
17e77fe515 |
@@ -1,8 +0,0 @@
|
|||||||
[profile.ci]
|
|
||||||
# Print out output for failing tests as soon as they fail, and also at the end
|
|
||||||
# of the run (for easy scrollability).
|
|
||||||
failure-output = "immediate-final"
|
|
||||||
# Do not cancel the test run on the first failure.
|
|
||||||
fail-fast = false
|
|
||||||
|
|
||||||
status-level = "skip"
|
|
||||||
4
.gitattributes
vendored
4
.gitattributes
vendored
@@ -2,13 +2,9 @@
|
|||||||
|
|
||||||
crates/ruff_linter/resources/test/fixtures/isort/line_ending_crlf.py text eol=crlf
|
crates/ruff_linter/resources/test/fixtures/isort/line_ending_crlf.py text eol=crlf
|
||||||
crates/ruff_linter/resources/test/fixtures/pycodestyle/W605_1.py text eol=crlf
|
crates/ruff_linter/resources/test/fixtures/pycodestyle/W605_1.py text eol=crlf
|
||||||
crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_2.py text eol=crlf
|
|
||||||
crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_3.py text eol=crlf
|
|
||||||
|
|
||||||
crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring_code_examples_crlf.py text eol=crlf
|
crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring_code_examples_crlf.py text eol=crlf
|
||||||
crates/ruff_python_formatter/tests/snapshots/format@docstring_code_examples_crlf.py.snap text eol=crlf
|
crates/ruff_python_formatter/tests/snapshots/format@docstring_code_examples_crlf.py.snap text eol=crlf
|
||||||
|
|
||||||
crates/ruff_python_parser/resources/inline linguist-generated=true
|
|
||||||
|
|
||||||
ruff.schema.json linguist-generated=true text=auto eol=lf
|
ruff.schema.json linguist-generated=true text=auto eol=lf
|
||||||
*.md.snap linguist-language=Markdown
|
*.md.snap linguist-language=Markdown
|
||||||
|
|||||||
12
.github/CODEOWNERS
vendored
12
.github/CODEOWNERS
vendored
@@ -5,13 +5,5 @@
|
|||||||
# - The '*' pattern is global owners.
|
# - The '*' pattern is global owners.
|
||||||
# - Order is important. The last matching pattern has the most precedence.
|
# - Order is important. The last matching pattern has the most precedence.
|
||||||
|
|
||||||
/crates/ruff_notebook/ @dhruvmanila
|
# Jupyter
|
||||||
/crates/ruff_formatter/ @MichaReiser
|
/crates/ruff_linter/src/jupyter/ @dhruvmanila
|
||||||
/crates/ruff_python_formatter/ @MichaReiser
|
|
||||||
/crates/ruff_python_parser/ @MichaReiser @dhruvmanila
|
|
||||||
|
|
||||||
# flake8-pyi
|
|
||||||
/crates/ruff_linter/src/rules/flake8_pyi/ @AlexWaygood
|
|
||||||
|
|
||||||
# Script for fuzzing the parser
|
|
||||||
/scripts/fuzz-parser/ @AlexWaygood
|
|
||||||
|
|||||||
2
.github/ISSUE_TEMPLATE.md
vendored
2
.github/ISSUE_TEMPLATE.md
vendored
@@ -3,8 +3,6 @@ Thank you for taking the time to report an issue! We're glad to have you involve
|
|||||||
|
|
||||||
If you're filing a bug report, please consider including the following information:
|
If you're filing a bug report, please consider including the following information:
|
||||||
|
|
||||||
* List of keywords you searched for before creating this issue. Write them down here so that others can find this issue more easily and help provide feedback.
|
|
||||||
e.g. "RUF001", "unused variable", "Jupyter notebook"
|
|
||||||
* A minimal code snippet that reproduces the bug.
|
* A minimal code snippet that reproduces the bug.
|
||||||
* The command you invoked (e.g., `ruff /path/to/file.py --fix`), ideally including the `--isolated` flag.
|
* The command you invoked (e.g., `ruff /path/to/file.py --fix`), ideally including the `--isolated` flag.
|
||||||
* The current Ruff settings (any relevant sections from your `pyproject.toml`).
|
* The current Ruff settings (any relevant sections from your `pyproject.toml`).
|
||||||
|
|||||||
13
.github/dependabot.yml
vendored
Normal file
13
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: "github-actions"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
labels: ["internal"]
|
||||||
|
|
||||||
|
- package-ecosystem: "cargo"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
labels: ["internal"]
|
||||||
79
.github/renovate.json5
vendored
79
.github/renovate.json5
vendored
@@ -1,79 +0,0 @@
|
|||||||
{
|
|
||||||
$schema: "https://docs.renovatebot.com/renovate-schema.json",
|
|
||||||
dependencyDashboard: true,
|
|
||||||
suppressNotifications: ["prEditedNotification"],
|
|
||||||
extends: ["config:recommended"],
|
|
||||||
labels: ["internal"],
|
|
||||||
schedule: ["before 4am on Monday"],
|
|
||||||
semanticCommits: "disabled",
|
|
||||||
separateMajorMinor: false,
|
|
||||||
prHourlyLimit: 10,
|
|
||||||
enabledManagers: ["github-actions", "pre-commit", "cargo", "pep621", "npm"],
|
|
||||||
cargo: {
|
|
||||||
// See https://docs.renovatebot.com/configuration-options/#rangestrategy
|
|
||||||
rangeStrategy: "update-lockfile",
|
|
||||||
},
|
|
||||||
pep621: {
|
|
||||||
fileMatch: ["^(python|scripts)/.*pyproject\\.toml$"],
|
|
||||||
},
|
|
||||||
npm: {
|
|
||||||
fileMatch: ["^playground/.*package\\.json$"],
|
|
||||||
},
|
|
||||||
"pre-commit": {
|
|
||||||
enabled: true,
|
|
||||||
},
|
|
||||||
packageRules: [
|
|
||||||
{
|
|
||||||
// Group upload/download artifact updates, the versions are dependent
|
|
||||||
groupName: "Artifact GitHub Actions dependencies",
|
|
||||||
matchManagers: ["github-actions"],
|
|
||||||
matchDatasources: ["gitea-tags", "github-tags"],
|
|
||||||
matchPackagePatterns: ["actions/.*-artifact"],
|
|
||||||
description: "Weekly update of artifact-related GitHub Actions dependencies",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
// This package rule disables updates for GitHub runners:
|
|
||||||
// we'd only pin them to a specific version
|
|
||||||
// if there was a deliberate reason to do so
|
|
||||||
groupName: "GitHub runners",
|
|
||||||
matchManagers: ["github-actions"],
|
|
||||||
matchDatasources: ["github-runners"],
|
|
||||||
description: "Disable PRs updating GitHub runners (e.g. 'runs-on: macos-14')",
|
|
||||||
enabled: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
groupName: "pre-commit dependencies",
|
|
||||||
matchManagers: ["pre-commit"],
|
|
||||||
description: "Weekly update of pre-commit dependencies",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
groupName: "NPM Development dependencies",
|
|
||||||
matchManagers: ["npm"],
|
|
||||||
matchDepTypes: ["devDependencies"],
|
|
||||||
description: "Weekly update of NPM development dependencies",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
groupName: "Monaco",
|
|
||||||
matchManagers: ["npm"],
|
|
||||||
matchPackagePatterns: ["monaco"],
|
|
||||||
description: "Weekly update of the Monaco editor",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
groupName: "strum",
|
|
||||||
matchManagers: ["cargo"],
|
|
||||||
matchPackagePatterns: ["strum"],
|
|
||||||
description: "Weekly update of strum dependencies",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
groupName: "ESLint",
|
|
||||||
matchManagers: ["npm"],
|
|
||||||
matchPackageNames: ["eslint"],
|
|
||||||
allowedVersions: "<9",
|
|
||||||
description: "Constraint ESLint to version 8 until TypeScript-eslint supports ESLint 9", // https://github.com/typescript-eslint/typescript-eslint/issues/8211
|
|
||||||
},
|
|
||||||
],
|
|
||||||
vulnerabilityAlerts: {
|
|
||||||
commitMessageSuffix: "",
|
|
||||||
labels: ["internal", "security"],
|
|
||||||
},
|
|
||||||
}
|
|
||||||
201
.github/workflows/ci.yaml
vendored
201
.github/workflows/ci.yaml
vendored
@@ -23,8 +23,6 @@ jobs:
|
|||||||
name: "Determine changes"
|
name: "Determine changes"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
outputs:
|
outputs:
|
||||||
# Flag that is raised when any code that affects parser is changed
|
|
||||||
parser: ${{ steps.changed.outputs.parser_any_changed }}
|
|
||||||
# Flag that is raised when any code that affects linter is changed
|
# Flag that is raised when any code that affects linter is changed
|
||||||
linter: ${{ steps.changed.outputs.linter_any_changed }}
|
linter: ${{ steps.changed.outputs.linter_any_changed }}
|
||||||
# Flag that is raised when any code that affects formatter is changed
|
# Flag that is raised when any code that affects formatter is changed
|
||||||
@@ -37,21 +35,10 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- uses: tj-actions/changed-files@v44
|
- uses: tj-actions/changed-files@v40
|
||||||
id: changed
|
id: changed
|
||||||
with:
|
with:
|
||||||
files_yaml: |
|
files_yaml: |
|
||||||
parser:
|
|
||||||
- Cargo.toml
|
|
||||||
- Cargo.lock
|
|
||||||
- crates/ruff_python_trivia/**
|
|
||||||
- crates/ruff_source_file/**
|
|
||||||
- crates/ruff_text_size/**
|
|
||||||
- crates/ruff_python_ast/**
|
|
||||||
- crates/ruff_python_parser/**
|
|
||||||
- scripts/fuzz-parser/**
|
|
||||||
- .github/workflows/ci.yaml
|
|
||||||
|
|
||||||
linter:
|
linter:
|
||||||
- Cargo.toml
|
- Cargo.toml
|
||||||
- Cargo.lock
|
- Cargo.lock
|
||||||
@@ -59,6 +46,7 @@ jobs:
|
|||||||
- "!crates/ruff_python_formatter/**"
|
- "!crates/ruff_python_formatter/**"
|
||||||
- "!crates/ruff_formatter/**"
|
- "!crates/ruff_formatter/**"
|
||||||
- "!crates/ruff_dev/**"
|
- "!crates/ruff_dev/**"
|
||||||
|
- "!crates/ruff_shrinking/**"
|
||||||
- scripts/*
|
- scripts/*
|
||||||
- python/**
|
- python/**
|
||||||
- .github/workflows/ci.yaml
|
- .github/workflows/ci.yaml
|
||||||
@@ -88,7 +76,6 @@ jobs:
|
|||||||
cargo-fmt:
|
cargo-fmt:
|
||||||
name: "cargo fmt"
|
name: "cargo fmt"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 10
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
@@ -100,7 +87,6 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: determine_changes
|
needs: determine_changes
|
||||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||||
timeout-minutes: 20
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
@@ -109,26 +95,45 @@ jobs:
|
|||||||
rustup target add wasm32-unknown-unknown
|
rustup target add wasm32-unknown-unknown
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
- name: "Clippy"
|
- name: "Clippy"
|
||||||
run: cargo clippy --workspace --all-targets --all-features --locked -- -D warnings
|
run: cargo clippy --workspace --all-targets --all-features -- -D warnings
|
||||||
- name: "Clippy (wasm)"
|
- name: "Clippy (wasm)"
|
||||||
run: cargo clippy -p ruff_wasm --target wasm32-unknown-unknown --all-features --locked -- -D warnings
|
run: cargo clippy -p ruff_wasm --target wasm32-unknown-unknown --all-features -- -D warnings
|
||||||
|
|
||||||
cargo-test-linux:
|
cargo-test-linux:
|
||||||
name: "cargo test (linux)"
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: determine_changes
|
needs: determine_changes
|
||||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||||
timeout-minutes: 20
|
name: "cargo test (linux)"
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup show
|
run: rustup show
|
||||||
- name: "Install mold"
|
- name: "Install cargo insta"
|
||||||
uses: rui314/setup-mold@v1
|
|
||||||
- name: "Install cargo nextest"
|
|
||||||
uses: taiki-e/install-action@v2
|
uses: taiki-e/install-action@v2
|
||||||
with:
|
with:
|
||||||
tool: cargo-nextest
|
tool: cargo-insta
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
- name: "Run tests"
|
||||||
|
run: cargo insta test --all --all-features --unreferenced reject
|
||||||
|
# Check for broken links in the documentation.
|
||||||
|
- run: cargo doc --all --no-deps
|
||||||
|
env:
|
||||||
|
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
|
||||||
|
RUSTDOCFLAGS: "-D warnings"
|
||||||
|
- uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: ruff
|
||||||
|
path: target/debug/ruff
|
||||||
|
|
||||||
|
cargo-test-windows:
|
||||||
|
runs-on: windows-latest
|
||||||
|
needs: determine_changes
|
||||||
|
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||||
|
name: "cargo test (windows)"
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- name: "Install Rust toolchain"
|
||||||
|
run: rustup show
|
||||||
- name: "Install cargo insta"
|
- name: "Install cargo insta"
|
||||||
uses: taiki-e/install-action@v2
|
uses: taiki-e/install-action@v2
|
||||||
with:
|
with:
|
||||||
@@ -136,47 +141,14 @@ jobs:
|
|||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
- name: "Run tests"
|
- name: "Run tests"
|
||||||
shell: bash
|
shell: bash
|
||||||
env:
|
# We can't reject unreferenced snapshots on windows because flake8_executable can't run on windows
|
||||||
NEXTEST_PROFILE: "ci"
|
run: cargo insta test --all --all-features
|
||||||
run: cargo insta test --all-features --unreferenced reject --test-runner nextest
|
|
||||||
|
|
||||||
# Check for broken links in the documentation.
|
|
||||||
- run: cargo doc --all --no-deps
|
|
||||||
env:
|
|
||||||
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
|
|
||||||
RUSTDOCFLAGS: "-D warnings"
|
|
||||||
- uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: ruff
|
|
||||||
path: target/debug/ruff
|
|
||||||
|
|
||||||
cargo-test-windows:
|
|
||||||
name: "cargo test (windows)"
|
|
||||||
runs-on: windows-latest
|
|
||||||
needs: determine_changes
|
|
||||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
|
||||||
timeout-minutes: 20
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- name: "Install Rust toolchain"
|
|
||||||
run: rustup show
|
|
||||||
- name: "Install cargo nextest"
|
|
||||||
uses: taiki-e/install-action@v2
|
|
||||||
with:
|
|
||||||
tool: cargo-nextest
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
- name: "Run tests"
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
cargo nextest run --all-features --profile ci
|
|
||||||
cargo test --all-features --doc
|
|
||||||
|
|
||||||
cargo-test-wasm:
|
cargo-test-wasm:
|
||||||
name: "cargo test (wasm)"
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: determine_changes
|
needs: determine_changes
|
||||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||||
timeout-minutes: 10
|
name: "cargo test (wasm)"
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
@@ -193,28 +165,11 @@ jobs:
|
|||||||
cd crates/ruff_wasm
|
cd crates/ruff_wasm
|
||||||
wasm-pack test --node
|
wasm-pack test --node
|
||||||
|
|
||||||
cargo-build-release:
|
|
||||||
name: "cargo build (release)"
|
|
||||||
runs-on: macos-latest
|
|
||||||
needs: determine_changes
|
|
||||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
|
||||||
timeout-minutes: 20
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- name: "Install Rust toolchain"
|
|
||||||
run: rustup show
|
|
||||||
- name: "Install mold"
|
|
||||||
uses: rui314/setup-mold@v1
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
- name: "Build"
|
|
||||||
run: cargo build --release --locked
|
|
||||||
|
|
||||||
cargo-fuzz:
|
cargo-fuzz:
|
||||||
name: "cargo fuzz"
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: determine_changes
|
needs: determine_changes
|
||||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||||
timeout-minutes: 10
|
name: "cargo fuzz"
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
@@ -225,47 +180,14 @@ jobs:
|
|||||||
- name: "Install cargo-fuzz"
|
- name: "Install cargo-fuzz"
|
||||||
uses: taiki-e/install-action@v2
|
uses: taiki-e/install-action@v2
|
||||||
with:
|
with:
|
||||||
tool: cargo-fuzz@0.11.2
|
tool: cargo-fuzz@0.11
|
||||||
- run: cargo fuzz build -s none
|
- run: cargo fuzz build -s none
|
||||||
|
|
||||||
fuzz-parser:
|
|
||||||
name: "Fuzz the parser"
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs:
|
|
||||||
- cargo-test-linux
|
|
||||||
- determine_changes
|
|
||||||
if: ${{ needs.determine_changes.outputs.parser == 'true' }}
|
|
||||||
timeout-minutes: 20
|
|
||||||
env:
|
|
||||||
FORCE_COLOR: 1
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
|
||||||
- name: Install uv
|
|
||||||
run: curl -LsSf https://astral.sh/uv/install.sh | sh
|
|
||||||
- name: Install Python requirements
|
|
||||||
run: uv pip install -r scripts/fuzz-parser/requirements.txt --system
|
|
||||||
- uses: actions/download-artifact@v4
|
|
||||||
name: Download Ruff binary to test
|
|
||||||
id: download-cached-binary
|
|
||||||
with:
|
|
||||||
name: ruff
|
|
||||||
path: ruff-to-test
|
|
||||||
- name: Fuzz
|
|
||||||
run: |
|
|
||||||
# Make executable, since artifact download doesn't preserve this
|
|
||||||
chmod +x ${{ steps.download-cached-binary.outputs.download-path }}/ruff
|
|
||||||
|
|
||||||
python scripts/fuzz-parser/fuzz.py 0-500 --test-executable ${{ steps.download-cached-binary.outputs.download-path }}/ruff
|
|
||||||
|
|
||||||
scripts:
|
scripts:
|
||||||
name: "test scripts"
|
name: "test scripts"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: determine_changes
|
needs: determine_changes
|
||||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||||
timeout-minutes: 5
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
@@ -288,27 +210,27 @@ jobs:
|
|||||||
- determine_changes
|
- determine_changes
|
||||||
# Only runs on pull requests, since that is the only we way we can find the base version for comparison.
|
# Only runs on pull requests, since that is the only we way we can find the base version for comparison.
|
||||||
# Ecosystem check needs linter and/or formatter changes.
|
# Ecosystem check needs linter and/or formatter changes.
|
||||||
if: ${{ github.event_name == 'pull_request' && needs.determine_changes.outputs.code == 'true' }}
|
if: github.event_name == 'pull_request' && ${{
|
||||||
timeout-minutes: 20
|
needs.determine_changes.outputs.code == 'true'
|
||||||
|
}}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
|
||||||
- uses: actions/download-artifact@v4
|
- uses: actions/download-artifact@v3
|
||||||
name: Download comparison Ruff binary
|
name: Download comparison Ruff binary
|
||||||
id: ruff-target
|
id: ruff-target
|
||||||
with:
|
with:
|
||||||
name: ruff
|
name: ruff
|
||||||
path: target/debug
|
path: target/debug
|
||||||
|
|
||||||
- uses: dawidd6/action-download-artifact@v3
|
- uses: dawidd6/action-download-artifact@v2
|
||||||
name: Download baseline Ruff binary
|
name: Download baseline Ruff binary
|
||||||
with:
|
with:
|
||||||
name: ruff
|
name: ruff
|
||||||
branch: ${{ github.event.pull_request.base.ref }}
|
branch: ${{ github.event.pull_request.base.ref }}
|
||||||
workflow: "ci.yaml"
|
|
||||||
check_artifacts: true
|
check_artifacts: true
|
||||||
|
|
||||||
- name: Install ruff-ecosystem
|
- name: Install ruff-ecosystem
|
||||||
@@ -383,36 +305,40 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
echo ${{ github.event.number }} > pr-number
|
echo ${{ github.event.number }} > pr-number
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v4
|
- uses: actions/upload-artifact@v3
|
||||||
name: Upload PR Number
|
name: Upload PR Number
|
||||||
with:
|
with:
|
||||||
name: pr-number
|
name: pr-number
|
||||||
path: pr-number
|
path: pr-number
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v4
|
- uses: actions/upload-artifact@v3
|
||||||
name: Upload Results
|
name: Upload Results
|
||||||
with:
|
with:
|
||||||
name: ecosystem-result
|
name: ecosystem-result
|
||||||
path: ecosystem-result
|
path: ecosystem-result
|
||||||
|
|
||||||
cargo-shear:
|
cargo-udeps:
|
||||||
name: "cargo shear"
|
name: "cargo udeps"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: determine_changes
|
needs: determine_changes
|
||||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: cargo-bins/cargo-binstall@main
|
- name: "Install nightly Rust toolchain"
|
||||||
- run: cargo binstall --no-confirm cargo-shear
|
# Only pinned to make caching work, update freely
|
||||||
- run: cargo shear
|
run: rustup toolchain install nightly-2023-10-15
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
- name: "Install cargo-udeps"
|
||||||
|
uses: taiki-e/install-action@cargo-udeps
|
||||||
|
- name: "Run cargo-udeps"
|
||||||
|
run: cargo +nightly-2023-10-15 udeps
|
||||||
|
|
||||||
python-package:
|
python-package:
|
||||||
name: "python package"
|
name: "python package"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 20
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
architecture: x64
|
architecture: x64
|
||||||
@@ -434,10 +360,9 @@ jobs:
|
|||||||
pre-commit:
|
pre-commit:
|
||||||
name: "pre-commit"
|
name: "pre-commit"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 10
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
@@ -446,7 +371,7 @@ jobs:
|
|||||||
- name: "Install pre-commit"
|
- name: "Install pre-commit"
|
||||||
run: pip install pre-commit
|
run: pip install pre-commit
|
||||||
- name: "Cache pre-commit"
|
- name: "Cache pre-commit"
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v3
|
||||||
with:
|
with:
|
||||||
path: ~/.cache/pre-commit
|
path: ~/.cache/pre-commit
|
||||||
key: pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
key: pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
@@ -463,15 +388,14 @@ jobs:
|
|||||||
docs:
|
docs:
|
||||||
name: "mkdocs"
|
name: "mkdocs"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 10
|
|
||||||
env:
|
env:
|
||||||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
- name: "Add SSH key"
|
- name: "Add SSH key"
|
||||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||||
uses: webfactory/ssh-agent@v0.9.0
|
uses: webfactory/ssh-agent@v0.8.0
|
||||||
with:
|
with:
|
||||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
@@ -501,7 +425,6 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: determine_changes
|
needs: determine_changes
|
||||||
if: needs.determine_changes.outputs.formatter == 'true' || github.ref == 'refs/heads/main'
|
if: needs.determine_changes.outputs.formatter == 'true' || github.ref == 'refs/heads/main'
|
||||||
timeout-minutes: 10
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
@@ -518,13 +441,12 @@ jobs:
|
|||||||
check-ruff-lsp:
|
check-ruff-lsp:
|
||||||
name: "test ruff-lsp"
|
name: "test ruff-lsp"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 5
|
|
||||||
needs:
|
needs:
|
||||||
- cargo-test-linux
|
- cargo-test-linux
|
||||||
- determine_changes
|
- determine_changes
|
||||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||||
steps:
|
steps:
|
||||||
- uses: extractions/setup-just@v2
|
- uses: extractions/setup-just@v1
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
@@ -533,11 +455,11 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
repository: "astral-sh/ruff-lsp"
|
repository: "astral-sh/ruff-lsp"
|
||||||
|
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
|
||||||
- uses: actions/download-artifact@v4
|
- uses: actions/download-artifact@v3
|
||||||
name: Download development ruff binary
|
name: Download development ruff binary
|
||||||
id: ruff-target
|
id: ruff-target
|
||||||
with:
|
with:
|
||||||
@@ -561,8 +483,7 @@ jobs:
|
|||||||
benchmarks:
|
benchmarks:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: determine_changes
|
needs: determine_changes
|
||||||
if: ${{ github.repository == 'astral-sh/ruff' && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||||
timeout-minutes: 20
|
|
||||||
steps:
|
steps:
|
||||||
- name: "Checkout Branch"
|
- name: "Checkout Branch"
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|||||||
72
.github/workflows/daily_fuzz.yaml
vendored
72
.github/workflows/daily_fuzz.yaml
vendored
@@ -1,72 +0,0 @@
|
|||||||
name: Daily parser fuzz
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
schedule:
|
|
||||||
- cron: "0 0 * * *"
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- ".github/workflows/daily_fuzz.yaml"
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
env:
|
|
||||||
CARGO_INCREMENTAL: 0
|
|
||||||
CARGO_NET_RETRY: 10
|
|
||||||
CARGO_TERM_COLOR: always
|
|
||||||
RUSTUP_MAX_RETRIES: 10
|
|
||||||
PACKAGE_NAME: ruff
|
|
||||||
FORCE_COLOR: 1
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
fuzz:
|
|
||||||
name: Fuzz
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
timeout-minutes: 20
|
|
||||||
# Don't run the cron job on forks:
|
|
||||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: "3.12"
|
|
||||||
- name: Install uv
|
|
||||||
run: curl -LsSf https://astral.sh/uv/install.sh | sh
|
|
||||||
- name: Install Python requirements
|
|
||||||
run: uv pip install -r scripts/fuzz-parser/requirements.txt --system
|
|
||||||
- name: "Install Rust toolchain"
|
|
||||||
run: rustup show
|
|
||||||
- name: "Install mold"
|
|
||||||
uses: rui314/setup-mold@v1
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
- name: Build ruff
|
|
||||||
# A debug build means the script runs slower once it gets started,
|
|
||||||
# but this is outweighed by the fact that a release build takes *much* longer to compile in CI
|
|
||||||
run: cargo build --locked
|
|
||||||
- name: Fuzz
|
|
||||||
run: python scripts/fuzz-parser/fuzz.py $(shuf -i 0-9999999999999999999 -n 1000) --test-executable target/debug/ruff
|
|
||||||
|
|
||||||
create-issue-on-failure:
|
|
||||||
name: Create an issue if the daily fuzz surfaced any bugs
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: fuzz
|
|
||||||
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && needs.fuzz.result == 'failure' }}
|
|
||||||
permissions:
|
|
||||||
issues: write
|
|
||||||
steps:
|
|
||||||
- uses: actions/github-script@v7
|
|
||||||
with:
|
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
script: |
|
|
||||||
await github.rest.issues.create({
|
|
||||||
owner: "astral-sh",
|
|
||||||
repo: "ruff",
|
|
||||||
title: `Daily parser fuzz failed on ${new Date().toDateString()}`,
|
|
||||||
body: "Runs listed here: https://github.com/astral-sh/ruff/actions/workflows/daily_fuzz.yml",
|
|
||||||
labels: ["bug", "parser", "fuzzer"],
|
|
||||||
})
|
|
||||||
6
.github/workflows/docs.yaml
vendored
6
.github/workflows/docs.yaml
vendored
@@ -20,10 +20,10 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.ref }}
|
ref: ${{ inputs.ref }}
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
- name: "Add SSH key"
|
- name: "Add SSH key"
|
||||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||||
uses: webfactory/ssh-agent@v0.9.0
|
uses: webfactory/ssh-agent@v0.8.0
|
||||||
with:
|
with:
|
||||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
@@ -47,7 +47,7 @@ jobs:
|
|||||||
run: mkdocs build --strict -f mkdocs.public.yml
|
run: mkdocs build --strict -f mkdocs.public.yml
|
||||||
- name: "Deploy to Cloudflare Pages"
|
- name: "Deploy to Cloudflare Pages"
|
||||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||||
uses: cloudflare/wrangler-action@v3.5.0
|
uses: cloudflare/wrangler-action@v3.3.2
|
||||||
with:
|
with:
|
||||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||||
|
|||||||
247
.github/workflows/flake8-to-ruff.yaml
vendored
Normal file
247
.github/workflows/flake8-to-ruff.yaml
vendored
Normal file
@@ -0,0 +1,247 @@
|
|||||||
|
name: "[flake8-to-ruff] Release"
|
||||||
|
|
||||||
|
on: workflow_dispatch
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
env:
|
||||||
|
PACKAGE_NAME: flake8-to-ruff
|
||||||
|
CRATE_NAME: flake8_to_ruff
|
||||||
|
PYTHON_VERSION: "3.11"
|
||||||
|
CARGO_INCREMENTAL: 0
|
||||||
|
CARGO_NET_RETRY: 10
|
||||||
|
CARGO_TERM_COLOR: always
|
||||||
|
RUSTUP_MAX_RETRIES: 10
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
macos-x86_64:
|
||||||
|
runs-on: macos-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
architecture: x64
|
||||||
|
- name: "Install Rust toolchain"
|
||||||
|
run: rustup show
|
||||||
|
- name: "Build wheels - x86_64"
|
||||||
|
uses: PyO3/maturin-action@v1
|
||||||
|
with:
|
||||||
|
target: x86_64
|
||||||
|
args: --release --out dist --sdist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||||
|
- name: "Install built wheel - x86_64"
|
||||||
|
run: |
|
||||||
|
pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
|
||||||
|
- name: "Upload wheels"
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: wheels
|
||||||
|
path: dist
|
||||||
|
macos-universal:
|
||||||
|
runs-on: macos-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
architecture: x64
|
||||||
|
- name: "Install Rust toolchain"
|
||||||
|
run: rustup show
|
||||||
|
- name: "Build wheels - universal2"
|
||||||
|
uses: PyO3/maturin-action@v1
|
||||||
|
with:
|
||||||
|
args: --release --target universal2-apple-darwin --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||||
|
- name: "Install built wheel - universal2"
|
||||||
|
run: |
|
||||||
|
pip install dist/${{ env.CRATE_NAME }}-*universal2.whl --force-reinstall
|
||||||
|
- name: "Upload wheels"
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: wheels
|
||||||
|
path: dist
|
||||||
|
|
||||||
|
windows:
|
||||||
|
runs-on: windows-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
target: [x64, x86]
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
architecture: ${{ matrix.target }}
|
||||||
|
- name: "Install Rust toolchain"
|
||||||
|
run: rustup show
|
||||||
|
- name: "Build wheels"
|
||||||
|
uses: PyO3/maturin-action@v1
|
||||||
|
with:
|
||||||
|
target: ${{ matrix.target }}
|
||||||
|
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||||
|
- name: "Install built wheel"
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
python -m pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
|
||||||
|
- name: "Upload wheels"
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: wheels
|
||||||
|
path: dist
|
||||||
|
|
||||||
|
linux:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
target: [x86_64, i686]
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
architecture: x64
|
||||||
|
- name: "Build wheels"
|
||||||
|
uses: PyO3/maturin-action@v1
|
||||||
|
with:
|
||||||
|
target: ${{ matrix.target }}
|
||||||
|
manylinux: auto
|
||||||
|
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||||
|
- name: "Install built wheel"
|
||||||
|
if: matrix.target == 'x86_64'
|
||||||
|
run: |
|
||||||
|
pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
|
||||||
|
- name: "Upload wheels"
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: wheels
|
||||||
|
path: dist
|
||||||
|
|
||||||
|
linux-cross:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
target: [aarch64, armv7, s390x, ppc64le, ppc64]
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
- name: "Build wheels"
|
||||||
|
uses: PyO3/maturin-action@v1
|
||||||
|
with:
|
||||||
|
target: ${{ matrix.target }}
|
||||||
|
manylinux: auto
|
||||||
|
args: --no-default-features --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||||
|
- uses: uraimo/run-on-arch-action@v2
|
||||||
|
if: matrix.target != 'ppc64'
|
||||||
|
name: Install built wheel
|
||||||
|
with:
|
||||||
|
arch: ${{ matrix.target }}
|
||||||
|
distro: ubuntu20.04
|
||||||
|
githubToken: ${{ github.token }}
|
||||||
|
install: |
|
||||||
|
apt-get update
|
||||||
|
apt-get install -y --no-install-recommends python3 python3-pip
|
||||||
|
pip3 install -U pip
|
||||||
|
run: |
|
||||||
|
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||||
|
- name: "Upload wheels"
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: wheels
|
||||||
|
path: dist
|
||||||
|
|
||||||
|
musllinux:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
target:
|
||||||
|
- x86_64-unknown-linux-musl
|
||||||
|
- i686-unknown-linux-musl
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
architecture: x64
|
||||||
|
- name: "Build wheels"
|
||||||
|
uses: PyO3/maturin-action@v1
|
||||||
|
with:
|
||||||
|
target: ${{ matrix.target }}
|
||||||
|
manylinux: musllinux_1_2
|
||||||
|
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||||
|
- name: "Install built wheel"
|
||||||
|
if: matrix.target == 'x86_64-unknown-linux-musl'
|
||||||
|
uses: addnab/docker-run-action@v3
|
||||||
|
with:
|
||||||
|
image: alpine:latest
|
||||||
|
options: -v ${{ github.workspace }}:/io -w /io
|
||||||
|
run: |
|
||||||
|
apk add py3-pip
|
||||||
|
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links /io/dist/ --force-reinstall
|
||||||
|
- name: "Upload wheels"
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: wheels
|
||||||
|
path: dist
|
||||||
|
|
||||||
|
musllinux-cross:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
platform:
|
||||||
|
- target: aarch64-unknown-linux-musl
|
||||||
|
arch: aarch64
|
||||||
|
- target: armv7-unknown-linux-musleabihf
|
||||||
|
arch: armv7
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
- name: "Build wheels"
|
||||||
|
uses: PyO3/maturin-action@v1
|
||||||
|
with:
|
||||||
|
target: ${{ matrix.platform.target }}
|
||||||
|
manylinux: musllinux_1_2
|
||||||
|
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||||
|
- uses: uraimo/run-on-arch-action@v2
|
||||||
|
name: Install built wheel
|
||||||
|
with:
|
||||||
|
arch: ${{ matrix.platform.arch }}
|
||||||
|
distro: alpine_latest
|
||||||
|
githubToken: ${{ github.token }}
|
||||||
|
install: |
|
||||||
|
apk add py3-pip
|
||||||
|
run: |
|
||||||
|
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||||
|
- name: "Upload wheels"
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: wheels
|
||||||
|
path: dist
|
||||||
|
|
||||||
|
release:
|
||||||
|
name: Release
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs:
|
||||||
|
- macos-universal
|
||||||
|
- macos-x86_64
|
||||||
|
- windows
|
||||||
|
- linux
|
||||||
|
- linux-cross
|
||||||
|
- musllinux
|
||||||
|
- musllinux-cross
|
||||||
|
steps:
|
||||||
|
- uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: wheels
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
- name: "Publish to PyPi"
|
||||||
|
env:
|
||||||
|
TWINE_USERNAME: __token__
|
||||||
|
TWINE_PASSWORD: ${{ secrets.FLAKE8_TO_RUFF_TOKEN }}
|
||||||
|
run: |
|
||||||
|
pip install --upgrade twine
|
||||||
|
twine upload --skip-existing *
|
||||||
2
.github/workflows/playground.yaml
vendored
2
.github/workflows/playground.yaml
vendored
@@ -40,7 +40,7 @@ jobs:
|
|||||||
working-directory: playground
|
working-directory: playground
|
||||||
- name: "Deploy to Cloudflare Pages"
|
- name: "Deploy to Cloudflare Pages"
|
||||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||||
uses: cloudflare/wrangler-action@v3.5.0
|
uses: cloudflare/wrangler-action@v3.3.2
|
||||||
with:
|
with:
|
||||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||||
|
|||||||
8
.github/workflows/pr-comment.yaml
vendored
8
.github/workflows/pr-comment.yaml
vendored
@@ -17,7 +17,7 @@ jobs:
|
|||||||
comment:
|
comment:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: dawidd6/action-download-artifact@v3
|
- uses: dawidd6/action-download-artifact@v2
|
||||||
name: Download pull request number
|
name: Download pull request number
|
||||||
with:
|
with:
|
||||||
name: pr-number
|
name: pr-number
|
||||||
@@ -32,7 +32,7 @@ jobs:
|
|||||||
echo "pr-number=$(<pr-number)" >> $GITHUB_OUTPUT
|
echo "pr-number=$(<pr-number)" >> $GITHUB_OUTPUT
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- uses: dawidd6/action-download-artifact@v3
|
- uses: dawidd6/action-download-artifact@v2
|
||||||
name: "Download ecosystem results"
|
name: "Download ecosystem results"
|
||||||
id: download-ecosystem-result
|
id: download-ecosystem-result
|
||||||
if: steps.pr-number.outputs.pr-number
|
if: steps.pr-number.outputs.pr-number
|
||||||
@@ -61,7 +61,7 @@ jobs:
|
|||||||
echo 'EOF' >> $GITHUB_OUTPUT
|
echo 'EOF' >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Find existing comment
|
- name: Find existing comment
|
||||||
uses: peter-evans/find-comment@v3
|
uses: peter-evans/find-comment@v2
|
||||||
if: steps.generate-comment.outcome == 'success'
|
if: steps.generate-comment.outcome == 'success'
|
||||||
id: find-comment
|
id: find-comment
|
||||||
with:
|
with:
|
||||||
@@ -71,7 +71,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Create or update comment
|
- name: Create or update comment
|
||||||
if: steps.find-comment.outcome == 'success'
|
if: steps.find-comment.outcome == 'success'
|
||||||
uses: peter-evans/create-or-update-comment@v4
|
uses: peter-evans/create-or-update-comment@v3
|
||||||
with:
|
with:
|
||||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||||
|
|||||||
138
.github/workflows/release.yaml
vendored
138
.github/workflows/release.yaml
vendored
@@ -36,7 +36,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.sha }}
|
ref: ${{ inputs.sha }}
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
- name: "Prep README.md"
|
- name: "Prep README.md"
|
||||||
@@ -52,18 +52,18 @@ jobs:
|
|||||||
ruff --help
|
ruff --help
|
||||||
python -m ruff --help
|
python -m ruff --help
|
||||||
- name: "Upload sdist"
|
- name: "Upload sdist"
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: wheels-sdist
|
name: wheels
|
||||||
path: dist
|
path: dist
|
||||||
|
|
||||||
macos-x86_64:
|
macos-x86_64:
|
||||||
runs-on: macos-12
|
runs-on: macos-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.sha }}
|
ref: ${{ inputs.sha }}
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
architecture: x64
|
architecture: x64
|
||||||
@@ -73,16 +73,16 @@ jobs:
|
|||||||
uses: PyO3/maturin-action@v1
|
uses: PyO3/maturin-action@v1
|
||||||
with:
|
with:
|
||||||
target: x86_64
|
target: x86_64
|
||||||
args: --release --locked --out dist
|
args: --release --out dist
|
||||||
- name: "Test wheel - x86_64"
|
- name: "Test wheel - x86_64"
|
||||||
run: |
|
run: |
|
||||||
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
||||||
ruff --help
|
ruff --help
|
||||||
python -m ruff --help
|
python -m ruff --help
|
||||||
- name: "Upload wheels"
|
- name: "Upload wheels"
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: wheels-macos-x86_64
|
name: wheels
|
||||||
path: dist
|
path: dist
|
||||||
- name: "Archive binary"
|
- name: "Archive binary"
|
||||||
run: |
|
run: |
|
||||||
@@ -90,39 +90,38 @@ jobs:
|
|||||||
tar czvf $ARCHIVE_FILE -C target/x86_64-apple-darwin/release ruff
|
tar czvf $ARCHIVE_FILE -C target/x86_64-apple-darwin/release ruff
|
||||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||||
- name: "Upload binary"
|
- name: "Upload binary"
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: binaries-macos-x86_64
|
name: binaries
|
||||||
path: |
|
path: |
|
||||||
*.tar.gz
|
*.tar.gz
|
||||||
*.sha256
|
*.sha256
|
||||||
|
|
||||||
macos-aarch64:
|
macos-universal:
|
||||||
runs-on: macos-14
|
runs-on: macos-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.sha }}
|
ref: ${{ inputs.sha }}
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
architecture: arm64
|
architecture: x64
|
||||||
- name: "Prep README.md"
|
- name: "Prep README.md"
|
||||||
run: python scripts/transform_readme.py --target pypi
|
run: python scripts/transform_readme.py --target pypi
|
||||||
- name: "Build wheels - aarch64"
|
- name: "Build wheels - universal2"
|
||||||
uses: PyO3/maturin-action@v1
|
uses: PyO3/maturin-action@v1
|
||||||
with:
|
with:
|
||||||
target: aarch64
|
args: --release --target universal2-apple-darwin --out dist
|
||||||
args: --release --locked --out dist
|
- name: "Test wheel - universal2"
|
||||||
- name: "Test wheel - aarch64"
|
|
||||||
run: |
|
run: |
|
||||||
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
pip install dist/${{ env.PACKAGE_NAME }}-*universal2.whl --force-reinstall
|
||||||
ruff --help
|
ruff --help
|
||||||
python -m ruff --help
|
python -m ruff --help
|
||||||
- name: "Upload wheels"
|
- name: "Upload wheels"
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: wheels-aarch64-apple-darwin
|
name: wheels
|
||||||
path: dist
|
path: dist
|
||||||
- name: "Archive binary"
|
- name: "Archive binary"
|
||||||
run: |
|
run: |
|
||||||
@@ -130,9 +129,9 @@ jobs:
|
|||||||
tar czvf $ARCHIVE_FILE -C target/aarch64-apple-darwin/release ruff
|
tar czvf $ARCHIVE_FILE -C target/aarch64-apple-darwin/release ruff
|
||||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||||
- name: "Upload binary"
|
- name: "Upload binary"
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: binaries-aarch64-apple-darwin
|
name: binaries
|
||||||
path: |
|
path: |
|
||||||
*.tar.gz
|
*.tar.gz
|
||||||
*.sha256
|
*.sha256
|
||||||
@@ -152,7 +151,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.sha }}
|
ref: ${{ inputs.sha }}
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
architecture: ${{ matrix.platform.arch }}
|
architecture: ${{ matrix.platform.arch }}
|
||||||
@@ -162,7 +161,7 @@ jobs:
|
|||||||
uses: PyO3/maturin-action@v1
|
uses: PyO3/maturin-action@v1
|
||||||
with:
|
with:
|
||||||
target: ${{ matrix.platform.target }}
|
target: ${{ matrix.platform.target }}
|
||||||
args: --release --locked --out dist
|
args: --release --out dist
|
||||||
- name: "Test wheel"
|
- name: "Test wheel"
|
||||||
if: ${{ !startsWith(matrix.platform.target, 'aarch64') }}
|
if: ${{ !startsWith(matrix.platform.target, 'aarch64') }}
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -171,9 +170,9 @@ jobs:
|
|||||||
ruff --help
|
ruff --help
|
||||||
python -m ruff --help
|
python -m ruff --help
|
||||||
- name: "Upload wheels"
|
- name: "Upload wheels"
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: wheels-${{ matrix.platform.target }}
|
name: wheels
|
||||||
path: dist
|
path: dist
|
||||||
- name: "Archive binary"
|
- name: "Archive binary"
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -182,9 +181,9 @@ jobs:
|
|||||||
7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/ruff.exe
|
7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/ruff.exe
|
||||||
sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||||
- name: "Upload binary"
|
- name: "Upload binary"
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: binaries-${{ matrix.platform.target }}
|
name: binaries
|
||||||
path: |
|
path: |
|
||||||
*.zip
|
*.zip
|
||||||
*.sha256
|
*.sha256
|
||||||
@@ -200,7 +199,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.sha }}
|
ref: ${{ inputs.sha }}
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
architecture: x64
|
architecture: x64
|
||||||
@@ -211,7 +210,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
target: ${{ matrix.target }}
|
target: ${{ matrix.target }}
|
||||||
manylinux: auto
|
manylinux: auto
|
||||||
args: --release --locked --out dist
|
args: --release --out dist
|
||||||
- name: "Test wheel"
|
- name: "Test wheel"
|
||||||
if: ${{ startsWith(matrix.target, 'x86_64') }}
|
if: ${{ startsWith(matrix.target, 'x86_64') }}
|
||||||
run: |
|
run: |
|
||||||
@@ -219,9 +218,9 @@ jobs:
|
|||||||
ruff --help
|
ruff --help
|
||||||
python -m ruff --help
|
python -m ruff --help
|
||||||
- name: "Upload wheels"
|
- name: "Upload wheels"
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: wheels-${{ matrix.target }}
|
name: wheels
|
||||||
path: dist
|
path: dist
|
||||||
- name: "Archive binary"
|
- name: "Archive binary"
|
||||||
run: |
|
run: |
|
||||||
@@ -229,9 +228,9 @@ jobs:
|
|||||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
|
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
|
||||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||||
- name: "Upload binary"
|
- name: "Upload binary"
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: binaries-${{ matrix.target }}
|
name: binaries
|
||||||
path: |
|
path: |
|
||||||
*.tar.gz
|
*.tar.gz
|
||||||
*.sha256
|
*.sha256
|
||||||
@@ -252,18 +251,14 @@ jobs:
|
|||||||
arch: s390x
|
arch: s390x
|
||||||
- target: powerpc64le-unknown-linux-gnu
|
- target: powerpc64le-unknown-linux-gnu
|
||||||
arch: ppc64le
|
arch: ppc64le
|
||||||
# see https://github.com/astral-sh/ruff/issues/10073
|
|
||||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
|
||||||
- target: powerpc64-unknown-linux-gnu
|
- target: powerpc64-unknown-linux-gnu
|
||||||
arch: ppc64
|
arch: ppc64
|
||||||
# see https://github.com/astral-sh/ruff/issues/10073
|
|
||||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.sha }}
|
ref: ${{ inputs.sha }}
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
- name: "Prep README.md"
|
- name: "Prep README.md"
|
||||||
@@ -274,7 +269,7 @@ jobs:
|
|||||||
target: ${{ matrix.platform.target }}
|
target: ${{ matrix.platform.target }}
|
||||||
manylinux: auto
|
manylinux: auto
|
||||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||||
args: --release --locked --out dist
|
args: --release --out dist
|
||||||
- uses: uraimo/run-on-arch-action@v2
|
- uses: uraimo/run-on-arch-action@v2
|
||||||
if: matrix.platform.arch != 'ppc64'
|
if: matrix.platform.arch != 'ppc64'
|
||||||
name: Test wheel
|
name: Test wheel
|
||||||
@@ -290,9 +285,9 @@ jobs:
|
|||||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||||
ruff --help
|
ruff --help
|
||||||
- name: "Upload wheels"
|
- name: "Upload wheels"
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: wheels-${{ matrix.platform.target }}
|
name: wheels
|
||||||
path: dist
|
path: dist
|
||||||
- name: "Archive binary"
|
- name: "Archive binary"
|
||||||
run: |
|
run: |
|
||||||
@@ -300,9 +295,9 @@ jobs:
|
|||||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
|
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
|
||||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||||
- name: "Upload binary"
|
- name: "Upload binary"
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: binaries-${{ matrix.platform.target }}
|
name: binaries
|
||||||
path: |
|
path: |
|
||||||
*.tar.gz
|
*.tar.gz
|
||||||
*.sha256
|
*.sha256
|
||||||
@@ -318,7 +313,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.sha }}
|
ref: ${{ inputs.sha }}
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
architecture: x64
|
architecture: x64
|
||||||
@@ -329,7 +324,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
target: ${{ matrix.target }}
|
target: ${{ matrix.target }}
|
||||||
manylinux: musllinux_1_2
|
manylinux: musllinux_1_2
|
||||||
args: --release --locked --out dist
|
args: --release --out dist
|
||||||
- name: "Test wheel"
|
- name: "Test wheel"
|
||||||
if: matrix.target == 'x86_64-unknown-linux-musl'
|
if: matrix.target == 'x86_64-unknown-linux-musl'
|
||||||
uses: addnab/docker-run-action@v3
|
uses: addnab/docker-run-action@v3
|
||||||
@@ -337,14 +332,14 @@ jobs:
|
|||||||
image: alpine:latest
|
image: alpine:latest
|
||||||
options: -v ${{ github.workspace }}:/io -w /io
|
options: -v ${{ github.workspace }}:/io -w /io
|
||||||
run: |
|
run: |
|
||||||
apk add python3
|
apk add py3-pip
|
||||||
python -m venv .venv
|
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links /io/dist/ --force-reinstall
|
||||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
ruff --help
|
||||||
.venv/bin/ruff check --help
|
python -m ruff --help
|
||||||
- name: "Upload wheels"
|
- name: "Upload wheels"
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: wheels-${{ matrix.target }}
|
name: wheels
|
||||||
path: dist
|
path: dist
|
||||||
- name: "Archive binary"
|
- name: "Archive binary"
|
||||||
run: |
|
run: |
|
||||||
@@ -352,9 +347,9 @@ jobs:
|
|||||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
|
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
|
||||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||||
- name: "Upload binary"
|
- name: "Upload binary"
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: binaries-${{ matrix.target }}
|
name: binaries
|
||||||
path: |
|
path: |
|
||||||
*.tar.gz
|
*.tar.gz
|
||||||
*.sha256
|
*.sha256
|
||||||
@@ -374,7 +369,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.sha }}
|
ref: ${{ inputs.sha }}
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
- name: "Prep README.md"
|
- name: "Prep README.md"
|
||||||
@@ -384,7 +379,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
target: ${{ matrix.platform.target }}
|
target: ${{ matrix.platform.target }}
|
||||||
manylinux: musllinux_1_2
|
manylinux: musllinux_1_2
|
||||||
args: --release --locked --out dist
|
args: --release --out dist
|
||||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||||
- uses: uraimo/run-on-arch-action@v2
|
- uses: uraimo/run-on-arch-action@v2
|
||||||
name: Test wheel
|
name: Test wheel
|
||||||
@@ -393,15 +388,14 @@ jobs:
|
|||||||
distro: alpine_latest
|
distro: alpine_latest
|
||||||
githubToken: ${{ github.token }}
|
githubToken: ${{ github.token }}
|
||||||
install: |
|
install: |
|
||||||
apk add python3
|
apk add py3-pip
|
||||||
run: |
|
run: |
|
||||||
python -m venv .venv
|
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
ruff check --help
|
||||||
.venv/bin/ruff check --help
|
|
||||||
- name: "Upload wheels"
|
- name: "Upload wheels"
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: wheels-${{ matrix.platform.target }}
|
name: wheels
|
||||||
path: dist
|
path: dist
|
||||||
- name: "Archive binary"
|
- name: "Archive binary"
|
||||||
run: |
|
run: |
|
||||||
@@ -409,9 +403,9 @@ jobs:
|
|||||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
|
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
|
||||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||||
- name: "Upload binary"
|
- name: "Upload binary"
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: binaries-${{ matrix.platform.target }}
|
name: binaries
|
||||||
path: |
|
path: |
|
||||||
*.tar.gz
|
*.tar.gz
|
||||||
*.sha256
|
*.sha256
|
||||||
@@ -452,7 +446,7 @@ jobs:
|
|||||||
name: Upload to PyPI
|
name: Upload to PyPI
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs:
|
needs:
|
||||||
- macos-aarch64
|
- macos-universal
|
||||||
- macos-x86_64
|
- macos-x86_64
|
||||||
- windows
|
- windows
|
||||||
- linux
|
- linux
|
||||||
@@ -468,11 +462,10 @@ jobs:
|
|||||||
# For pypi trusted publishing
|
# For pypi trusted publishing
|
||||||
id-token: write
|
id-token: write
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/download-artifact@v4
|
- uses: actions/download-artifact@v3
|
||||||
with:
|
with:
|
||||||
pattern: wheels-*
|
name: wheels
|
||||||
path: wheels
|
path: wheels
|
||||||
merge-multiple: true
|
|
||||||
- name: Publish to PyPi
|
- name: Publish to PyPi
|
||||||
uses: pypa/gh-action-pypi-publish@release/v1
|
uses: pypa/gh-action-pypi-publish@release/v1
|
||||||
with:
|
with:
|
||||||
@@ -512,13 +505,12 @@ jobs:
|
|||||||
# For GitHub release publishing
|
# For GitHub release publishing
|
||||||
contents: write
|
contents: write
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/download-artifact@v4
|
- uses: actions/download-artifact@v3
|
||||||
with:
|
with:
|
||||||
pattern: binaries-*
|
name: binaries
|
||||||
path: binaries
|
path: binaries
|
||||||
merge-multiple: true
|
|
||||||
- name: "Publish to GitHub"
|
- name: "Publish to GitHub"
|
||||||
uses: softprops/action-gh-release@v2
|
uses: softprops/action-gh-release@v1
|
||||||
with:
|
with:
|
||||||
draft: true
|
draft: true
|
||||||
files: binaries/*
|
files: binaries/*
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -92,7 +92,6 @@ coverage.xml
|
|||||||
.hypothesis/
|
.hypothesis/
|
||||||
.pytest_cache/
|
.pytest_cache/
|
||||||
cover/
|
cover/
|
||||||
repos/
|
|
||||||
|
|
||||||
# Translations
|
# Translations
|
||||||
*.mo
|
*.mo
|
||||||
|
|||||||
@@ -17,4 +17,4 @@ MD013: false
|
|||||||
# MD024/no-duplicate-heading
|
# MD024/no-duplicate-heading
|
||||||
MD024:
|
MD024:
|
||||||
# Allow when nested under different parents e.g. CHANGELOG.md
|
# Allow when nested under different parents e.g. CHANGELOG.md
|
||||||
siblings_only: true
|
allow_different_nesting: true
|
||||||
|
|||||||
@@ -2,10 +2,9 @@ fail_fast: true
|
|||||||
|
|
||||||
exclude: |
|
exclude: |
|
||||||
(?x)^(
|
(?x)^(
|
||||||
crates/red_knot/vendor/.*|
|
|
||||||
crates/ruff_linter/resources/.*|
|
crates/ruff_linter/resources/.*|
|
||||||
crates/ruff_linter/src/rules/.*/snapshots/.*|
|
crates/ruff_linter/src/rules/.*/snapshots/.*|
|
||||||
crates/ruff/resources/.*|
|
crates/ruff_cli/resources/.*|
|
||||||
crates/ruff_python_formatter/resources/.*|
|
crates/ruff_python_formatter/resources/.*|
|
||||||
crates/ruff_python_formatter/tests/snapshots/.*|
|
crates/ruff_python_formatter/tests/snapshots/.*|
|
||||||
crates/ruff_python_resolver/resources/.*|
|
crates/ruff_python_resolver/resources/.*|
|
||||||
@@ -14,7 +13,7 @@ exclude: |
|
|||||||
|
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/abravalheri/validate-pyproject
|
- repo: https://github.com/abravalheri/validate-pyproject
|
||||||
rev: v0.17
|
rev: v0.15
|
||||||
hooks:
|
hooks:
|
||||||
- id: validate-pyproject
|
- id: validate-pyproject
|
||||||
|
|
||||||
@@ -32,7 +31,7 @@ repos:
|
|||||||
)$
|
)$
|
||||||
|
|
||||||
- repo: https://github.com/igorshubovych/markdownlint-cli
|
- repo: https://github.com/igorshubovych/markdownlint-cli
|
||||||
rev: v0.40.0
|
rev: v0.37.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: markdownlint-fix
|
- id: markdownlint-fix
|
||||||
exclude: |
|
exclude: |
|
||||||
@@ -42,7 +41,7 @@ repos:
|
|||||||
)$
|
)$
|
||||||
|
|
||||||
- repo: https://github.com/crate-ci/typos
|
- repo: https://github.com/crate-ci/typos
|
||||||
rev: v1.21.0
|
rev: v1.16.22
|
||||||
hooks:
|
hooks:
|
||||||
- id: typos
|
- id: typos
|
||||||
|
|
||||||
@@ -56,7 +55,7 @@ repos:
|
|||||||
pass_filenames: false # This makes it a lot faster
|
pass_filenames: false # This makes it a lot faster
|
||||||
|
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.4.4
|
rev: v0.1.4
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff-format
|
- id: ruff-format
|
||||||
- id: ruff
|
- id: ruff
|
||||||
@@ -71,7 +70,7 @@ repos:
|
|||||||
|
|
||||||
# Prettier
|
# Prettier
|
||||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||||
rev: v3.1.0
|
rev: v3.0.3
|
||||||
hooks:
|
hooks:
|
||||||
- id: prettier
|
- id: prettier
|
||||||
types: [yaml]
|
types: [yaml]
|
||||||
|
|||||||
@@ -1,93 +1,5 @@
|
|||||||
# Breaking Changes
|
# Breaking Changes
|
||||||
|
|
||||||
## 0.3.0
|
|
||||||
|
|
||||||
### Ruff 2024.2 style
|
|
||||||
|
|
||||||
The formatter now formats code according to the Ruff 2024.2 style guide. Read the [changelog](./CHANGELOG.md#030) for a detailed list of stabilized style changes.
|
|
||||||
|
|
||||||
### `isort`: Use one blank line after imports in typing stub files ([#9971](https://github.com/astral-sh/ruff/pull/9971))
|
|
||||||
|
|
||||||
Previously, Ruff used one or two blank lines (or the number configured by `isort.lines-after-imports`) after imports in typing stub files (`.pyi` files).
|
|
||||||
The [typing style guide for stubs](https://typing.readthedocs.io/en/latest/source/stubs.html#style-guide) recommends using at most 1 blank line for grouping.
|
|
||||||
As of this release, `isort` now always uses one blank line after imports in stub files, the same as the formatter.
|
|
||||||
|
|
||||||
### `build` is no longer excluded by default ([#10093](https://github.com/astral-sh/ruff/pull/10093))
|
|
||||||
|
|
||||||
Ruff maintains a list of directories and files that are excluded by default. This list now consists of the following patterns:
|
|
||||||
|
|
||||||
- `.bzr`
|
|
||||||
- `.direnv`
|
|
||||||
- `.eggs`
|
|
||||||
- `.git`
|
|
||||||
- `.git-rewrite`
|
|
||||||
- `.hg`
|
|
||||||
- `.ipynb_checkpoints`
|
|
||||||
- `.mypy_cache`
|
|
||||||
- `.nox`
|
|
||||||
- `.pants.d`
|
|
||||||
- `.pyenv`
|
|
||||||
- `.pytest_cache`
|
|
||||||
- `.pytype`
|
|
||||||
- `.ruff_cache`
|
|
||||||
- `.svn`
|
|
||||||
- `.tox`
|
|
||||||
- `.venv`
|
|
||||||
- `.vscode`
|
|
||||||
- `__pypackages__`
|
|
||||||
- `_build`
|
|
||||||
- `buck-out`
|
|
||||||
- `dist`
|
|
||||||
- `node_modules`
|
|
||||||
- `site-packages`
|
|
||||||
- `venv`
|
|
||||||
|
|
||||||
Previously, the `build` directory was included in this list. However, the `build` directory tends to be a not-unpopular directory
|
|
||||||
name, and excluding it by default caused confusion. Ruff now no longer excludes `build` except if it is excluded by a `.gitignore` file
|
|
||||||
or because it is listed in `extend-exclude`.
|
|
||||||
|
|
||||||
### `--format` is no longer a valid `rule` or `linter` command option
|
|
||||||
|
|
||||||
Previously, `ruff rule` and `ruff linter` accepted the `--format <FORMAT>` option as an alias for `--output-format`. Ruff no longer
|
|
||||||
supports this alias. Please use `ruff rule --output-format <FORMAT>` and `ruff linter --output-format <FORMAT>` instead.
|
|
||||||
|
|
||||||
## 0.1.9
|
|
||||||
|
|
||||||
### `site-packages` is now excluded by default ([#5513](https://github.com/astral-sh/ruff/pull/5513))
|
|
||||||
|
|
||||||
Ruff maintains a list of default exclusions, which now consists of the following patterns:
|
|
||||||
|
|
||||||
- `.bzr`
|
|
||||||
- `.direnv`
|
|
||||||
- `.eggs`
|
|
||||||
- `.git-rewrite`
|
|
||||||
- `.git`
|
|
||||||
- `.hg`
|
|
||||||
- `.ipynb_checkpoints`
|
|
||||||
- `.mypy_cache`
|
|
||||||
- `.nox`
|
|
||||||
- `.pants.d`
|
|
||||||
- `.pyenv`
|
|
||||||
- `.pytest_cache`
|
|
||||||
- `.pytype`
|
|
||||||
- `.ruff_cache`
|
|
||||||
- `.svn`
|
|
||||||
- `.tox`
|
|
||||||
- `.venv`
|
|
||||||
- `.vscode`
|
|
||||||
- `__pypackages__`
|
|
||||||
- `_build`
|
|
||||||
- `buck-out`
|
|
||||||
- `build`
|
|
||||||
- `dist`
|
|
||||||
- `node_modules`
|
|
||||||
- `site-packages`
|
|
||||||
- `venv`
|
|
||||||
|
|
||||||
Previously, the `site-packages` directory was not excluded by default. While `site-packages` tends
|
|
||||||
to be excluded anyway by virtue of the `.venv` exclusion, this may not be the case when using Ruff
|
|
||||||
from VS Code outside a virtual environment.
|
|
||||||
|
|
||||||
## 0.1.0
|
## 0.1.0
|
||||||
|
|
||||||
### The deprecated `format` setting has been removed
|
### The deprecated `format` setting has been removed
|
||||||
|
|||||||
1117
CHANGELOG.md
1117
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
127
CONTRIBUTING.md
127
CONTRIBUTING.md
@@ -26,25 +26,30 @@ Welcome! We're happy to have you here. Thank you in advance for your contributio
|
|||||||
- [`cargo dev`](#cargo-dev)
|
- [`cargo dev`](#cargo-dev)
|
||||||
- [Subsystems](#subsystems)
|
- [Subsystems](#subsystems)
|
||||||
- [Compilation Pipeline](#compilation-pipeline)
|
- [Compilation Pipeline](#compilation-pipeline)
|
||||||
- [Import Categorization](#import-categorization)
|
|
||||||
- [Project root](#project-root)
|
|
||||||
- [Package root](#package-root)
|
|
||||||
- [Import categorization](#import-categorization-1)
|
|
||||||
|
|
||||||
## The Basics
|
## The Basics
|
||||||
|
|
||||||
Ruff welcomes contributions in the form of pull requests.
|
Ruff welcomes contributions in the form of Pull Requests.
|
||||||
|
|
||||||
For small changes (e.g., bug fixes), feel free to submit a PR.
|
For small changes (e.g., bug fixes), feel free to submit a PR.
|
||||||
|
|
||||||
For larger changes (e.g., new lint rules, new functionality, new configuration options), consider
|
For larger changes (e.g., new lint rules, new functionality, new configuration options), consider
|
||||||
creating an [**issue**](https://github.com/astral-sh/ruff/issues) outlining your proposed change.
|
creating an [**issue**](https://github.com/astral-sh/ruff/issues) outlining your proposed change.
|
||||||
You can also join us on [Discord](https://discord.com/invite/astral-sh) to discuss your idea with the
|
You can also join us on [**Discord**](https://discord.gg/c9MhzV8aU5) to discuss your idea with the
|
||||||
community. We've labeled [beginner-friendly tasks](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22)
|
community. We've labeled [beginner-friendly tasks](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22)
|
||||||
in the issue tracker, along with [bugs](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3Abug)
|
in the issue tracker, along with [bugs](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3Abug)
|
||||||
and [improvements](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3Aaccepted)
|
and [improvements](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3Aaccepted)
|
||||||
that are ready for contributions.
|
that are ready for contributions.
|
||||||
|
|
||||||
|
If you're looking for a place to start, we recommend implementing a new lint rule (see:
|
||||||
|
[_Adding a new lint rule_](#example-adding-a-new-lint-rule), which will allow you to learn from and
|
||||||
|
pattern-match against the examples in the existing codebase. Many lint rules are inspired by
|
||||||
|
existing Python plugins, which can be used as a reference implementation.
|
||||||
|
|
||||||
|
As a concrete example: consider taking on one of the rules from the [`flake8-pyi`](https://github.com/astral-sh/ruff/issues/848)
|
||||||
|
plugin, and looking to the originating [Python source](https://github.com/PyCQA/flake8-pyi) for
|
||||||
|
guidance.
|
||||||
|
|
||||||
If you have suggestions on how we might improve the contributing documentation, [let us know](https://github.com/astral-sh/ruff/discussions/5693)!
|
If you have suggestions on how we might improve the contributing documentation, [let us know](https://github.com/astral-sh/ruff/discussions/5693)!
|
||||||
|
|
||||||
### Prerequisites
|
### Prerequisites
|
||||||
@@ -58,7 +63,7 @@ You'll also need [Insta](https://insta.rs/docs/) to update snapshot tests:
|
|||||||
cargo install cargo-insta
|
cargo install cargo-insta
|
||||||
```
|
```
|
||||||
|
|
||||||
And you'll need pre-commit to run some validation checks:
|
and pre-commit to run some validation checks:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
pipx install pre-commit # or `pip install pre-commit` if you have a virtualenv
|
pipx install pre-commit # or `pip install pre-commit` if you have a virtualenv
|
||||||
@@ -71,22 +76,12 @@ when making a commit:
|
|||||||
pre-commit install
|
pre-commit install
|
||||||
```
|
```
|
||||||
|
|
||||||
We recommend [nextest](https://nexte.st/) to run Ruff's test suite (via `cargo nextest run`),
|
|
||||||
though it's not strictly necessary:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
cargo install cargo-nextest --locked
|
|
||||||
```
|
|
||||||
|
|
||||||
Throughout this guide, any usages of `cargo test` can be replaced with `cargo nextest run`,
|
|
||||||
if you choose to install `nextest`.
|
|
||||||
|
|
||||||
### Development
|
### Development
|
||||||
|
|
||||||
After cloning the repository, run Ruff locally from the repository root with:
|
After cloning the repository, run Ruff locally from the repository root with:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
cargo run -p ruff -- check /path/to/file.py --no-cache
|
cargo run -p ruff_cli -- check /path/to/file.py --no-cache
|
||||||
```
|
```
|
||||||
|
|
||||||
Prior to opening a pull request, ensure that your code has been auto-formatted,
|
Prior to opening a pull request, ensure that your code has been auto-formatted,
|
||||||
@@ -98,7 +93,7 @@ RUFF_UPDATE_SCHEMA=1 cargo test # Rust testing and updating ruff.schema.json
|
|||||||
pre-commit run --all-files --show-diff-on-failure # Rust and Python formatting, Markdown and Python linting, etc.
|
pre-commit run --all-files --show-diff-on-failure # Rust and Python formatting, Markdown and Python linting, etc.
|
||||||
```
|
```
|
||||||
|
|
||||||
These checks will run on GitHub Actions when you open your pull request, but running them locally
|
These checks will run on GitHub Actions when you open your Pull Request, but running them locally
|
||||||
will save you time and expedite the merge process.
|
will save you time and expedite the merge process.
|
||||||
|
|
||||||
Note that many code changes also require updating the snapshot tests, which is done interactively
|
Note that many code changes also require updating the snapshot tests, which is done interactively
|
||||||
@@ -108,14 +103,7 @@ after running `cargo test` like so:
|
|||||||
cargo insta review
|
cargo insta review
|
||||||
```
|
```
|
||||||
|
|
||||||
If your pull request relates to a specific lint rule, include the category and rule code in the
|
Your Pull Request will be reviewed by a maintainer, which may involve a few rounds of iteration
|
||||||
title, as in the following examples:
|
|
||||||
|
|
||||||
- \[`flake8-bugbear`\] Avoid false positive for usage after `continue` (`B031`)
|
|
||||||
- \[`flake8-simplify`\] Detect implicit `else` cases in `needless-bool` (`SIM103`)
|
|
||||||
- \[`pycodestyle`\] Implement `redundant-backslash` (`E502`)
|
|
||||||
|
|
||||||
Your pull request will be reviewed by a maintainer, which may involve a few rounds of iteration
|
|
||||||
prior to merging.
|
prior to merging.
|
||||||
|
|
||||||
### Project Structure
|
### Project Structure
|
||||||
@@ -123,8 +111,8 @@ prior to merging.
|
|||||||
Ruff is structured as a monorepo with a [flat crate structure](https://matklad.github.io/2021/08/22/large-rust-workspaces.html),
|
Ruff is structured as a monorepo with a [flat crate structure](https://matklad.github.io/2021/08/22/large-rust-workspaces.html),
|
||||||
such that all crates are contained in a flat `crates` directory.
|
such that all crates are contained in a flat `crates` directory.
|
||||||
|
|
||||||
The vast majority of the code, including all lint rules, lives in the `ruff_linter` crate (located
|
The vast majority of the code, including all lint rules, lives in the `ruff` crate (located at
|
||||||
at `crates/ruff_linter`). As a contributor, that's the crate that'll be most relevant to you.
|
`crates/ruff_linter`). As a contributor, that's the crate that'll be most relevant to you.
|
||||||
|
|
||||||
At the time of writing, the repository includes the following crates:
|
At the time of writing, the repository includes the following crates:
|
||||||
|
|
||||||
@@ -132,7 +120,7 @@ At the time of writing, the repository includes the following crates:
|
|||||||
If you're working on a rule, this is the crate for you.
|
If you're working on a rule, this is the crate for you.
|
||||||
- `crates/ruff_benchmark`: binary crate for running micro-benchmarks.
|
- `crates/ruff_benchmark`: binary crate for running micro-benchmarks.
|
||||||
- `crates/ruff_cache`: library crate for caching lint results.
|
- `crates/ruff_cache`: library crate for caching lint results.
|
||||||
- `crates/ruff`: binary crate containing Ruff's command-line interface.
|
- `crates/ruff_cli`: binary crate containing Ruff's command-line interface.
|
||||||
- `crates/ruff_dev`: binary crate containing utilities used in the development of Ruff itself (e.g.,
|
- `crates/ruff_dev`: binary crate containing utilities used in the development of Ruff itself (e.g.,
|
||||||
`cargo dev generate-all`), see the [`cargo dev`](#cargo-dev) section below.
|
`cargo dev generate-all`), see the [`cargo dev`](#cargo-dev) section below.
|
||||||
- `crates/ruff_diagnostics`: library crate for the rule-independent abstractions in the lint
|
- `crates/ruff_diagnostics`: library crate for the rule-independent abstractions in the lint
|
||||||
@@ -197,14 +185,11 @@ and calling out to lint rule analyzer functions as it goes.
|
|||||||
If you need to inspect the AST, you can run `cargo dev print-ast` with a Python file. Grep
|
If you need to inspect the AST, you can run `cargo dev print-ast` with a Python file. Grep
|
||||||
for the `Diagnostic::new` invocations to understand how other, similar rules are implemented.
|
for the `Diagnostic::new` invocations to understand how other, similar rules are implemented.
|
||||||
|
|
||||||
Once you're satisfied with your code, add tests for your rule
|
Once you're satisfied with your code, add tests for your rule. See [rule testing](#rule-testing-fixtures-and-snapshots)
|
||||||
(see: [rule testing](#rule-testing-fixtures-and-snapshots)), and regenerate the documentation and
|
for more details.
|
||||||
associated assets (like our JSON Schema) with `cargo dev generate-all`.
|
|
||||||
|
|
||||||
Finally, submit a pull request, and include the category, rule name, and rule code in the title, as
|
Finally, regenerate the documentation and other generated assets (like our JSON Schema) with:
|
||||||
in:
|
`cargo dev generate-all`.
|
||||||
|
|
||||||
> \[`pycodestyle`\] Implement `redundant-backslash` (`E502`)
|
|
||||||
|
|
||||||
#### Rule naming convention
|
#### Rule naming convention
|
||||||
|
|
||||||
@@ -246,7 +231,7 @@ Once you've completed the code for the rule itself, you can define tests with th
|
|||||||
For example, if you're adding a new rule named `E402`, you would run:
|
For example, if you're adding a new rule named `E402`, you would run:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
cargo run -p ruff -- check crates/ruff_linter/resources/test/fixtures/pycodestyle/E402.py --no-cache --preview --select E402
|
cargo run -p ruff_cli -- check crates/ruff_linter/resources/test/fixtures/pycodestyle/E402.py --no-cache --select E402
|
||||||
```
|
```
|
||||||
|
|
||||||
**Note:** Only a subset of rules are enabled by default. When testing a new rule, ensure that
|
**Note:** Only a subset of rules are enabled by default. When testing a new rule, ensure that
|
||||||
@@ -267,7 +252,7 @@ Once you've completed the code for the rule itself, you can define tests with th
|
|||||||
|
|
||||||
Ruff's user-facing settings live in a few different places.
|
Ruff's user-facing settings live in a few different places.
|
||||||
|
|
||||||
First, the command-line options are defined via the `Args` struct in `crates/ruff/src/args.rs`.
|
First, the command-line options are defined via the `Args` struct in `crates/ruff_cli/src/args.rs`.
|
||||||
|
|
||||||
Second, the `pyproject.toml` options are defined in `crates/ruff_workspace/src/options.rs` (via the
|
Second, the `pyproject.toml` options are defined in `crates/ruff_workspace/src/options.rs` (via the
|
||||||
`Options` struct), `crates/ruff_workspace/src/configuration.rs` (via the `Configuration` struct),
|
`Options` struct), `crates/ruff_workspace/src/configuration.rs` (via the `Configuration` struct),
|
||||||
@@ -317,7 +302,7 @@ To preview any changes to the documentation locally:
|
|||||||
```
|
```
|
||||||
|
|
||||||
The documentation should then be available locally at
|
The documentation should then be available locally at
|
||||||
[http://127.0.0.1:8000/ruff/](http://127.0.0.1:8000/ruff/).
|
[http://127.0.0.1:8000/docs/](http://127.0.0.1:8000/docs/).
|
||||||
|
|
||||||
## Release Process
|
## Release Process
|
||||||
|
|
||||||
@@ -330,29 +315,27 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
|
|||||||
|
|
||||||
### Creating a new release
|
### Creating a new release
|
||||||
|
|
||||||
1. Install `uv`: `curl -LsSf https://astral.sh/uv/install.sh | sh`
|
We use an experimental in-house tool for managing releases.
|
||||||
1. Run `./scripts/release/bump.sh`; this command will:
|
|
||||||
- Generate a temporary virtual environment with `rooster`
|
1. Install `rooster`: `pip install git+https://github.com/zanieb/rooster@main`
|
||||||
|
1. Run `rooster release`; this command will:
|
||||||
- Generate a changelog entry in `CHANGELOG.md`
|
- Generate a changelog entry in `CHANGELOG.md`
|
||||||
- Update versions in `pyproject.toml` and `Cargo.toml`
|
- Update versions in `pyproject.toml` and `Cargo.toml`
|
||||||
- Update references to versions in the `README.md` and documentation
|
- Update references to versions in the `README.md` and documentation
|
||||||
- Display contributors for the release
|
|
||||||
1. The changelog should then be editorialized for consistency
|
1. The changelog should then be editorialized for consistency
|
||||||
- Often labels will be missing from pull requests they will need to be manually organized into the proper section
|
- Often labels will be missing from pull requests they will need to be manually organized into the proper section
|
||||||
- Changes should be edited to be user-facing descriptions, avoiding internal details
|
- Changes should be edited to be user-facing descriptions, avoiding internal details
|
||||||
1. Highlight any breaking changes in `BREAKING_CHANGES.md`
|
1. Highlight any breaking changes in `BREAKING_CHANGES.md`
|
||||||
1. Run `cargo check`. This should update the lock file with new versions.
|
|
||||||
1. Create a pull request with the changelog and version updates
|
1. Create a pull request with the changelog and version updates
|
||||||
1. Merge the PR
|
1. Merge the PR
|
||||||
1. Run the [release workflow](https://github.com/astral-sh/ruff/actions/workflows/release.yaml) with:
|
1. Run the release workflow with the version number (without starting `v`) as input. Make sure
|
||||||
- The new version number (without starting `v`)
|
main has your merged PR as last commit
|
||||||
- The commit hash of the merged release pull request on `main`
|
|
||||||
1. The release workflow will do the following:
|
1. The release workflow will do the following:
|
||||||
1. Build all the assets. If this fails (even though we tested in step 4), we haven't tagged or
|
1. Build all the assets. If this fails (even though we tested in step 4), we haven't tagged or
|
||||||
uploaded anything, you can restart after pushing a fix.
|
uploaded anything, you can restart after pushing a fix.
|
||||||
1. Upload to PyPI.
|
1. Upload to PyPI.
|
||||||
1. Create and push the Git tag (as extracted from `pyproject.toml`). We create the Git tag only
|
1. Create and push the Git tag (as extracted from `pyproject.toml`). We create the Git tag only
|
||||||
after building the wheels and uploading to PyPI, since we can't delete or modify the tag ([#4468](https://github.com/astral-sh/ruff/issues/4468)).
|
after building the wheels and uploading to PyPI, since we can't delete or modify the tag ([#4468](https://github.com/charliermarsh/ruff/issues/4468)).
|
||||||
1. Attach artifacts to draft GitHub release
|
1. Attach artifacts to draft GitHub release
|
||||||
1. Trigger downstream repositories. This can fail non-catastrophically, as we can run any
|
1. Trigger downstream repositories. This can fail non-catastrophically, as we can run any
|
||||||
downstream jobs manually if needed.
|
downstream jobs manually if needed.
|
||||||
@@ -360,11 +343,8 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
|
|||||||
1. Open the draft release in the GitHub release section
|
1. Open the draft release in the GitHub release section
|
||||||
1. Copy the changelog for the release into the GitHub release
|
1. Copy the changelog for the release into the GitHub release
|
||||||
- See previous releases for formatting of section headers
|
- See previous releases for formatting of section headers
|
||||||
1. Append the contributors from the `bump.sh` script
|
1. Generate the contributor list with `rooster contributors` and add to the release notes
|
||||||
1. If needed, [update the schemastore](https://github.com/astral-sh/ruff/blob/main/scripts/update_schemastore.py).
|
1. If needed, [update the schemastore](https://github.com/charliermarsh/ruff/blob/main/scripts/update_schemastore.py)
|
||||||
1. One can determine if an update is needed when
|
|
||||||
`git diff old-version-tag new-version-tag -- ruff.schema.json` returns a non-empty diff.
|
|
||||||
1. Once run successfully, you should follow the link in the output to create a PR.
|
|
||||||
1. If needed, update the `ruff-lsp` and `ruff-vscode` repositories.
|
1. If needed, update the `ruff-lsp` and `ruff-vscode` repositories.
|
||||||
|
|
||||||
## Ecosystem CI
|
## Ecosystem CI
|
||||||
@@ -385,14 +365,9 @@ See the [ruff-ecosystem package](https://github.com/astral-sh/ruff/tree/main/pyt
|
|||||||
We have several ways of benchmarking and profiling Ruff:
|
We have several ways of benchmarking and profiling Ruff:
|
||||||
|
|
||||||
- Our main performance benchmark comparing Ruff with other tools on the CPython codebase
|
- Our main performance benchmark comparing Ruff with other tools on the CPython codebase
|
||||||
- Microbenchmarks which run the linter or the formatter on individual files. These run on pull requests.
|
- Microbenchmarks which the linter or the formatter on individual files. There run on pull requests.
|
||||||
- Profiling the linter on either the microbenchmarks or entire projects
|
- Profiling the linter on either the microbenchmarks or entire projects
|
||||||
|
|
||||||
> \[!NOTE\]
|
|
||||||
> When running benchmarks, ensure that your CPU is otherwise idle (e.g., close any background
|
|
||||||
> applications, like web browsers). You may also want to switch your CPU to a "performance"
|
|
||||||
> mode, if it exists, especially when benchmarking short-lived processes.
|
|
||||||
|
|
||||||
### CPython Benchmark
|
### CPython Benchmark
|
||||||
|
|
||||||
First, clone [CPython](https://github.com/python/cpython). It's a large and diverse Python codebase,
|
First, clone [CPython](https://github.com/python/cpython). It's a large and diverse Python codebase,
|
||||||
@@ -538,10 +513,10 @@ if the benchmark improved/regressed compared to that baseline.
|
|||||||
|
|
||||||
```shell
|
```shell
|
||||||
# Run once on your "baseline" code
|
# Run once on your "baseline" code
|
||||||
cargo bench -p ruff_benchmark -- --save-baseline=main
|
cargo benchmark --save-baseline=main
|
||||||
|
|
||||||
# Then iterate with
|
# Then iterate with
|
||||||
cargo bench -p ruff_benchmark -- --baseline=main
|
cargo benchmark --baseline=main
|
||||||
```
|
```
|
||||||
|
|
||||||
#### PR Summary
|
#### PR Summary
|
||||||
@@ -551,10 +526,10 @@ This is useful to illustrate the improvements of a PR.
|
|||||||
|
|
||||||
```shell
|
```shell
|
||||||
# On main
|
# On main
|
||||||
cargo bench -p ruff_benchmark -- --save-baseline=main
|
cargo benchmark --save-baseline=main
|
||||||
|
|
||||||
# After applying your changes
|
# After applying your changes
|
||||||
cargo bench -p ruff_benchmark -- --save-baseline=pr
|
cargo benchmark --save-baseline=pr
|
||||||
|
|
||||||
critcmp main pr
|
critcmp main pr
|
||||||
```
|
```
|
||||||
@@ -567,10 +542,10 @@ cargo install critcmp
|
|||||||
|
|
||||||
#### Tips
|
#### Tips
|
||||||
|
|
||||||
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo benchmark lexer`
|
- Use `cargo benchmark <filter>` to only run specific benchmarks. For example: `cargo benchmark linter/pydantic`
|
||||||
to only run the lexer benchmarks.
|
to only run the pydantic tests.
|
||||||
- Use `cargo bench -p ruff_benchmark -- --quiet` for a more cleaned up output (without statistical relevance)
|
- Use `cargo benchmark --quiet` for a more cleaned up output (without statistical relevance)
|
||||||
- Use `cargo bench -p ruff_benchmark -- --quick` to get faster results (more prone to noise)
|
- Use `cargo benchmark --quick` to get faster results (more prone to noise)
|
||||||
|
|
||||||
### Profiling Projects
|
### Profiling Projects
|
||||||
|
|
||||||
@@ -581,10 +556,10 @@ examples.
|
|||||||
|
|
||||||
#### Linux
|
#### Linux
|
||||||
|
|
||||||
Install `perf` and build `ruff_benchmark` with the `profiling` profile and then run it with perf
|
Install `perf` and build `ruff_benchmark` with the `release-debug` profile and then run it with perf
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
cargo bench -p ruff_benchmark --no-run --profile=profiling && perf record --call-graph dwarf -F 9999 cargo bench -p ruff_benchmark --profile=profiling -- --profile-time=1
|
cargo bench -p ruff_benchmark --no-run --profile=release-debug && perf record --call-graph dwarf -F 9999 cargo bench -p ruff_benchmark --profile=release-debug -- --profile-time=1
|
||||||
```
|
```
|
||||||
|
|
||||||
You can also use the `ruff_dev` launcher to run `ruff check` multiple times on a repository to
|
You can also use the `ruff_dev` launcher to run `ruff check` multiple times on a repository to
|
||||||
@@ -592,8 +567,8 @@ gather enough samples for a good flamegraph (change the 999, the sample rate, an
|
|||||||
of checks, to your liking)
|
of checks, to your liking)
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
cargo build --bin ruff_dev --profile=profiling
|
cargo build --bin ruff_dev --profile=release-debug
|
||||||
perf record -g -F 999 target/profiling/ruff_dev repeat --repeat 30 --exit-zero --no-cache path/to/cpython > /dev/null
|
perf record -g -F 999 target/release-debug/ruff_dev repeat --repeat 30 --exit-zero --no-cache path/to/cpython > /dev/null
|
||||||
```
|
```
|
||||||
|
|
||||||
Then convert the recorded profile
|
Then convert the recorded profile
|
||||||
@@ -623,7 +598,7 @@ cargo install cargo-instruments
|
|||||||
Then run the profiler with
|
Then run the profiler with
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
cargo instruments -t time --bench linter --profile profiling -p ruff_benchmark -- --profile-time=1
|
cargo instruments -t time --bench linter --profile release-debug -p ruff_benchmark -- --profile-time=1
|
||||||
```
|
```
|
||||||
|
|
||||||
- `-t`: Specifies what to profile. Useful options are `time` to profile the wall time and `alloc`
|
- `-t`: Specifies what to profile. Useful options are `time` to profile the wall time and `alloc`
|
||||||
@@ -638,7 +613,7 @@ Otherwise, follow the instructions from the linux section.
|
|||||||
utils with it:
|
utils with it:
|
||||||
|
|
||||||
- `cargo dev print-ast <file>`: Print the AST of a python file using the
|
- `cargo dev print-ast <file>`: Print the AST of a python file using the
|
||||||
[RustPython parser](https://github.com/astral-sh/ruff/tree/main/crates/ruff_python_parser) that is
|
[RustPython parser](https://github.com/astral-sh/RustPython-Parser/tree/main/parser) that is
|
||||||
mainly used in Ruff. For `if True: pass # comment`, you can see the syntax tree, the byte offsets
|
mainly used in Ruff. For `if True: pass # comment`, you can see the syntax tree, the byte offsets
|
||||||
for start and stop of each node and also how the `:` token, the comment and whitespace are not
|
for start and stop of each node and also how the `:` token, the comment and whitespace are not
|
||||||
represented anymore:
|
represented anymore:
|
||||||
@@ -814,8 +789,8 @@ To understand Ruff's import categorization system, we first need to define two c
|
|||||||
"project root".)
|
"project root".)
|
||||||
- "Package root": The top-most directory defining the Python package that includes a given Python
|
- "Package root": The top-most directory defining the Python package that includes a given Python
|
||||||
file. To find the package root for a given Python file, traverse up its parent directories until
|
file. To find the package root for a given Python file, traverse up its parent directories until
|
||||||
you reach a parent directory that doesn't contain an `__init__.py` file (and isn't in a subtree
|
you reach a parent directory that doesn't contain an `__init__.py` file (and isn't marked as
|
||||||
marked as a [namespace package](https://docs.astral.sh/ruff/settings/#namespace-packages)); take the directory
|
a [namespace package](https://docs.astral.sh/ruff/settings/#namespace-packages)); take the directory
|
||||||
just before that, i.e., the first directory in the package.
|
just before that, i.e., the first directory in the package.
|
||||||
|
|
||||||
For example, given:
|
For example, given:
|
||||||
|
|||||||
1930
Cargo.lock
generated
1930
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
139
Cargo.toml
139
Cargo.toml
@@ -9,114 +9,51 @@ homepage = "https://docs.astral.sh/ruff"
|
|||||||
documentation = "https://docs.astral.sh/ruff"
|
documentation = "https://docs.astral.sh/ruff"
|
||||||
repository = "https://github.com/astral-sh/ruff"
|
repository = "https://github.com/astral-sh/ruff"
|
||||||
authors = ["Charlie Marsh <charlie.r.marsh@gmail.com>"]
|
authors = ["Charlie Marsh <charlie.r.marsh@gmail.com>"]
|
||||||
license = "MIT"
|
license = "MIT2"
|
||||||
|
|
||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
aho-corasick = { version = "1.1.3" }
|
anyhow = { version = "1.0.69" }
|
||||||
annotate-snippets = { version = "0.9.2", features = ["color"] }
|
bitflags = { version = "2.4.1" }
|
||||||
anyhow = { version = "1.0.80" }
|
chrono = { version = "0.4.31", default-features = false, features = ["clock"] }
|
||||||
argfile = { version = "0.2.0" }
|
clap = { version = "4.4.7", features = ["derive"] }
|
||||||
bincode = { version = "1.3.3" }
|
colored = { version = "2.0.0" }
|
||||||
bitflags = { version = "2.5.0" }
|
filetime = { version = "0.2.20" }
|
||||||
bstr = { version = "1.9.1" }
|
|
||||||
cachedir = { version = "0.3.1" }
|
|
||||||
chrono = { version = "0.4.35", default-features = false, features = ["clock"] }
|
|
||||||
clap = { version = "4.5.3", features = ["derive"] }
|
|
||||||
clap_complete_command = { version = "0.5.1" }
|
|
||||||
clearscreen = { version = "3.0.0" }
|
|
||||||
codspeed-criterion-compat = { version = "2.6.0", default-features = false }
|
|
||||||
colored = { version = "2.1.0" }
|
|
||||||
console_error_panic_hook = { version = "0.1.7" }
|
|
||||||
console_log = { version = "1.0.0" }
|
|
||||||
countme = { version = "3.0.1" }
|
|
||||||
criterion = { version = "0.5.1", default-features = false }
|
|
||||||
crossbeam = { version = "0.8.4" }
|
|
||||||
dashmap = { version = "5.5.3" }
|
|
||||||
dirs = { version = "5.0.0" }
|
|
||||||
drop_bomb = { version = "0.1.5" }
|
|
||||||
env_logger = { version = "0.11.0" }
|
|
||||||
fern = { version = "0.6.1" }
|
|
||||||
filetime = { version = "0.2.23" }
|
|
||||||
glob = { version = "0.3.1" }
|
glob = { version = "0.3.1" }
|
||||||
globset = { version = "0.4.14" }
|
globset = { version = "0.4.14" }
|
||||||
hashbrown = "0.14.3"
|
ignore = { version = "0.4.20" }
|
||||||
hexf-parse = { version = "0.2.1" }
|
insta = { version = "1.34.0", feature = ["filters", "glob"] }
|
||||||
ignore = { version = "0.4.22" }
|
is-macro = { version = "0.3.0" }
|
||||||
imara-diff = { version = "0.1.5" }
|
itertools = { version = "0.11.0" }
|
||||||
imperative = { version = "1.0.4" }
|
|
||||||
indexmap = { version = "2.2.6" }
|
|
||||||
indicatif = { version = "0.17.8" }
|
|
||||||
indoc = { version = "2.0.4" }
|
|
||||||
insta = { version = "1.35.1", feature = ["filters", "glob"] }
|
|
||||||
insta-cmd = { version = "0.6.0" }
|
|
||||||
is-macro = { version = "0.3.5" }
|
|
||||||
is-wsl = { version = "0.4.0" }
|
|
||||||
itertools = { version = "0.12.1" }
|
|
||||||
js-sys = { version = "0.3.69" }
|
|
||||||
jod-thread = { version = "0.1.2" }
|
|
||||||
lexical-parse-float = { version = "0.8.0", features = ["format"] }
|
|
||||||
libc = { version = "0.2.153" }
|
|
||||||
libcst = { version = "1.1.0", default-features = false }
|
libcst = { version = "1.1.0", default-features = false }
|
||||||
log = { version = "0.4.17" }
|
log = { version = "0.4.17" }
|
||||||
lsp-server = { version = "0.7.6" }
|
memchr = { version = "2.6.4" }
|
||||||
lsp-types = { version = "0.95.0", features = ["proposed"] }
|
once_cell = { version = "1.17.1" }
|
||||||
matchit = { version = "0.8.1" }
|
|
||||||
memchr = { version = "2.7.1" }
|
|
||||||
mimalloc = { version = "0.1.39" }
|
|
||||||
natord = { version = "1.0.9" }
|
|
||||||
notify = { version = "6.1.1" }
|
|
||||||
once_cell = { version = "1.19.0" }
|
|
||||||
path-absolutize = { version = "3.1.1" }
|
path-absolutize = { version = "3.1.1" }
|
||||||
path-slash = { version = "0.2.1" }
|
proc-macro2 = { version = "1.0.70" }
|
||||||
pathdiff = { version = "0.2.1" }
|
|
||||||
parking_lot = "0.12.1"
|
|
||||||
pep440_rs = { version = "0.6.0", features = ["serde"] }
|
|
||||||
pretty_assertions = "1.3.0"
|
|
||||||
proc-macro2 = { version = "1.0.79" }
|
|
||||||
pyproject-toml = { version = "0.9.0" }
|
|
||||||
quick-junit = { version = "0.4.0" }
|
|
||||||
quote = { version = "1.0.23" }
|
quote = { version = "1.0.23" }
|
||||||
rand = { version = "0.8.5" }
|
|
||||||
rayon = { version = "1.10.0" }
|
|
||||||
regex = { version = "1.10.2" }
|
regex = { version = "1.10.2" }
|
||||||
result-like = { version = "0.5.0" }
|
|
||||||
rustc-hash = { version = "1.1.0" }
|
rustc-hash = { version = "1.1.0" }
|
||||||
schemars = { version = "0.8.16" }
|
schemars = { version = "0.8.16" }
|
||||||
seahash = { version = "4.1.0" }
|
serde = { version = "1.0.190", features = ["derive"] }
|
||||||
serde = { version = "1.0.197", features = ["derive"] }
|
serde_json = { version = "1.0.108" }
|
||||||
serde-wasm-bindgen = { version = "0.6.4" }
|
|
||||||
serde_json = { version = "1.0.113" }
|
|
||||||
serde_test = { version = "1.0.152" }
|
|
||||||
serde_with = { version = "3.6.0", default-features = false, features = ["macros"] }
|
|
||||||
shellexpand = { version = "3.0.0" }
|
shellexpand = { version = "3.0.0" }
|
||||||
similar = { version = "2.4.0", features = ["inline"] }
|
similar = { version = "2.3.0", features = ["inline"] }
|
||||||
smallvec = { version = "1.13.2" }
|
smallvec = { version = "1.11.2" }
|
||||||
static_assertions = "1.1.0"
|
static_assertions = "1.1.0"
|
||||||
strum = { version = "0.26.0", features = ["strum_macros"] }
|
strum = { version = "0.25.0", features = ["strum_macros"] }
|
||||||
strum_macros = { version = "0.26.0" }
|
strum_macros = { version = "0.25.3" }
|
||||||
syn = { version = "2.0.55" }
|
syn = { version = "2.0.39" }
|
||||||
tempfile = { version = "3.9.0" }
|
test-case = { version = "3.2.1" }
|
||||||
test-case = { version = "3.3.1" }
|
thiserror = { version = "1.0.50" }
|
||||||
thiserror = { version = "1.0.58" }
|
toml = { version = "0.7.8" }
|
||||||
tikv-jemallocator = { version = "0.5.0" }
|
|
||||||
toml = { version = "0.8.11" }
|
|
||||||
tracing = { version = "0.1.40" }
|
tracing = { version = "0.1.40" }
|
||||||
tracing-indicatif = { version = "0.3.6" }
|
tracing-indicatif = { version = "0.3.4" }
|
||||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
|
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
|
||||||
tracing-tree = { version = "0.3.0" }
|
|
||||||
typed-arena = { version = "2.0.2" }
|
|
||||||
unic-ucd-category = { version = "0.9" }
|
|
||||||
unicode-ident = { version = "1.0.12" }
|
unicode-ident = { version = "1.0.12" }
|
||||||
|
unicode_names2 = { version = "1.2.0" }
|
||||||
unicode-width = { version = "0.1.11" }
|
unicode-width = { version = "0.1.11" }
|
||||||
unicode_names2 = { version = "1.2.2" }
|
|
||||||
unicode-normalization = { version = "0.1.23" }
|
|
||||||
ureq = { version = "2.9.6" }
|
|
||||||
url = { version = "2.5.0" }
|
|
||||||
uuid = { version = "1.6.1", features = ["v4", "fast-rng", "macro-diagnostics", "js"] }
|
uuid = { version = "1.6.1", features = ["v4", "fast-rng", "macro-diagnostics", "js"] }
|
||||||
walkdir = { version = "2.3.2" }
|
wsl = { version = "0.1.0" }
|
||||||
wasm-bindgen = { version = "0.2.92" }
|
|
||||||
wasm-bindgen-test = { version = "0.3.42" }
|
|
||||||
wild = { version = "2" }
|
|
||||||
|
|
||||||
[workspace.lints.rust]
|
[workspace.lints.rust]
|
||||||
unsafe_code = "warn"
|
unsafe_code = "warn"
|
||||||
@@ -129,7 +66,6 @@ char_lit_as_u8 = "allow"
|
|||||||
collapsible_else_if = "allow"
|
collapsible_else_if = "allow"
|
||||||
collapsible_if = "allow"
|
collapsible_if = "allow"
|
||||||
implicit_hasher = "allow"
|
implicit_hasher = "allow"
|
||||||
map_unwrap_or = "allow"
|
|
||||||
match_same_arms = "allow"
|
match_same_arms = "allow"
|
||||||
missing_errors_doc = "allow"
|
missing_errors_doc = "allow"
|
||||||
missing_panics_doc = "allow"
|
missing_panics_doc = "allow"
|
||||||
@@ -152,20 +88,7 @@ rc_mutex = "warn"
|
|||||||
rest_pat_in_fully_bound_structs = "warn"
|
rest_pat_in_fully_bound_structs = "warn"
|
||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
# Note that we set these explicitly, and these values
|
lto = "fat"
|
||||||
# were chosen based on a trade-off between compile times
|
|
||||||
# and runtime performance[1].
|
|
||||||
#
|
|
||||||
# [1]: https://github.com/astral-sh/ruff/pull/9031
|
|
||||||
lto = "thin"
|
|
||||||
codegen-units = 16
|
|
||||||
|
|
||||||
# Some crates don't change as much but benefit more from
|
|
||||||
# more expensive optimization passes, so we selectively
|
|
||||||
# decrease codegen-units in some cases.
|
|
||||||
[profile.release.package.ruff_python_parser]
|
|
||||||
codegen-units = 1
|
|
||||||
[profile.release.package.ruff_python_ast]
|
|
||||||
codegen-units = 1
|
codegen-units = 1
|
||||||
|
|
||||||
[profile.dev.package.insta]
|
[profile.dev.package.insta]
|
||||||
@@ -179,8 +102,8 @@ opt-level = 3
|
|||||||
[profile.dev.package.ruff_python_parser]
|
[profile.dev.package.ruff_python_parser]
|
||||||
opt-level = 1
|
opt-level = 1
|
||||||
|
|
||||||
# Use the `--profile profiling` flag to show symbols in release mode.
|
# Use the `--profile release-debug` flag to show symbols in release mode.
|
||||||
# e.g. `cargo build --profile profiling`
|
# e.g. `cargo build --profile release-debug`
|
||||||
[profile.profiling]
|
[profile.release-debug]
|
||||||
inherits = "release"
|
inherits = "release"
|
||||||
debug = 1
|
debug = 1
|
||||||
|
|||||||
66
README.md
66
README.md
@@ -4,12 +4,11 @@
|
|||||||
|
|
||||||
[](https://github.com/astral-sh/ruff)
|
[](https://github.com/astral-sh/ruff)
|
||||||
[](https://pypi.python.org/pypi/ruff)
|
[](https://pypi.python.org/pypi/ruff)
|
||||||
[](https://github.com/astral-sh/ruff/blob/main/LICENSE)
|
[](https://pypi.python.org/pypi/ruff)
|
||||||
[](https://pypi.python.org/pypi/ruff)
|
[](https://pypi.python.org/pypi/ruff)
|
||||||
[](https://github.com/astral-sh/ruff/actions)
|
[](https://github.com/astral-sh/ruff/actions)
|
||||||
[](https://discord.com/invite/astral-sh)
|
|
||||||
|
|
||||||
[**Docs**](https://docs.astral.sh/ruff/) | [**Playground**](https://play.ruff.rs/)
|
[**Discord**](https://discord.gg/c9MhzV8aU5) | [**Docs**](https://docs.astral.sh/ruff/) | [**Playground**](https://play.ruff.rs/)
|
||||||
|
|
||||||
An extremely fast Python linter and code formatter, written in Rust.
|
An extremely fast Python linter and code formatter, written in Rust.
|
||||||
|
|
||||||
@@ -32,7 +31,7 @@ An extremely fast Python linter and code formatter, written in Rust.
|
|||||||
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruff-compare-to-flake8), isort, and Black
|
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruff-compare-to-flake8), isort, and Black
|
||||||
- 📦 Built-in caching, to avoid re-analyzing unchanged files
|
- 📦 Built-in caching, to avoid re-analyzing unchanged files
|
||||||
- 🔧 Fix support, for automatic error correction (e.g., automatically remove unused imports)
|
- 🔧 Fix support, for automatic error correction (e.g., automatically remove unused imports)
|
||||||
- 📏 Over [800 built-in rules](https://docs.astral.sh/ruff/rules/), with native re-implementations
|
- 📏 Over [700 built-in rules](https://docs.astral.sh/ruff/rules/), with native re-implementations
|
||||||
of popular Flake8 plugins, like flake8-bugbear
|
of popular Flake8 plugins, like flake8-bugbear
|
||||||
- ⌨️ First-party [editor integrations](https://docs.astral.sh/ruff/integrations/) for
|
- ⌨️ First-party [editor integrations](https://docs.astral.sh/ruff/integrations/) for
|
||||||
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://github.com/astral-sh/ruff-lsp)
|
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://github.com/astral-sh/ruff-lsp)
|
||||||
@@ -50,7 +49,6 @@ times faster than any individual tool.
|
|||||||
Ruff is extremely actively developed and used in major open-source projects like:
|
Ruff is extremely actively developed and used in major open-source projects like:
|
||||||
|
|
||||||
- [Apache Airflow](https://github.com/apache/airflow)
|
- [Apache Airflow](https://github.com/apache/airflow)
|
||||||
- [Apache Superset](https://github.com/apache/superset)
|
|
||||||
- [FastAPI](https://github.com/tiangolo/fastapi)
|
- [FastAPI](https://github.com/tiangolo/fastapi)
|
||||||
- [Hugging Face](https://github.com/huggingface/transformers)
|
- [Hugging Face](https://github.com/huggingface/transformers)
|
||||||
- [Pandas](https://github.com/pandas-dev/pandas)
|
- [Pandas](https://github.com/pandas-dev/pandas)
|
||||||
@@ -130,7 +128,7 @@ and with [a variety of other package managers](https://docs.astral.sh/ruff/insta
|
|||||||
To run Ruff as a linter, try any of the following:
|
To run Ruff as a linter, try any of the following:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
ruff check # Lint all files in the current directory (and any subdirectories).
|
ruff check . # Lint all files in the current directory (and any subdirectories).
|
||||||
ruff check path/to/code/ # Lint all files in `/path/to/code` (and any subdirectories).
|
ruff check path/to/code/ # Lint all files in `/path/to/code` (and any subdirectories).
|
||||||
ruff check path/to/code/*.py # Lint all `.py` files in `/path/to/code`.
|
ruff check path/to/code/*.py # Lint all `.py` files in `/path/to/code`.
|
||||||
ruff check path/to/code/to/file.py # Lint `file.py`.
|
ruff check path/to/code/to/file.py # Lint `file.py`.
|
||||||
@@ -140,7 +138,7 @@ ruff check @arguments.txt # Lint using an input file, treating its con
|
|||||||
Or, to run Ruff as a formatter:
|
Or, to run Ruff as a formatter:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
ruff format # Format all files in the current directory (and any subdirectories).
|
ruff format . # Format all files in the current directory (and any subdirectories).
|
||||||
ruff format path/to/code/ # Format all files in `/path/to/code` (and any subdirectories).
|
ruff format path/to/code/ # Format all files in `/path/to/code` (and any subdirectories).
|
||||||
ruff format path/to/code/*.py # Format all `.py` files in `/path/to/code`.
|
ruff format path/to/code/*.py # Format all `.py` files in `/path/to/code`.
|
||||||
ruff format path/to/code/to/file.py # Format `file.py`.
|
ruff format path/to/code/to/file.py # Format `file.py`.
|
||||||
@@ -152,7 +150,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
|||||||
```yaml
|
```yaml
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
# Ruff version.
|
# Ruff version.
|
||||||
rev: v0.4.4
|
rev: v0.1.7
|
||||||
hooks:
|
hooks:
|
||||||
# Run the linter.
|
# Run the linter.
|
||||||
- id: ruff
|
- id: ruff
|
||||||
@@ -174,7 +172,7 @@ jobs:
|
|||||||
ruff:
|
ruff:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- uses: chartboost/ruff-action@v1
|
- uses: chartboost/ruff-action@v1
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -184,9 +182,10 @@ Ruff can be configured through a `pyproject.toml`, `ruff.toml`, or `.ruff.toml`
|
|||||||
[_Configuration_](https://docs.astral.sh/ruff/configuration/), or [_Settings_](https://docs.astral.sh/ruff/settings/)
|
[_Configuration_](https://docs.astral.sh/ruff/configuration/), or [_Settings_](https://docs.astral.sh/ruff/settings/)
|
||||||
for a complete list of all configuration options).
|
for a complete list of all configuration options).
|
||||||
|
|
||||||
If left unspecified, Ruff's default configuration is equivalent to the following `ruff.toml` file:
|
If left unspecified, Ruff's default configuration is equivalent to:
|
||||||
|
|
||||||
```toml
|
```toml
|
||||||
|
[tool.ruff]
|
||||||
# Exclude a variety of commonly ignored directories.
|
# Exclude a variety of commonly ignored directories.
|
||||||
exclude = [
|
exclude = [
|
||||||
".bzr",
|
".bzr",
|
||||||
@@ -195,25 +194,20 @@ exclude = [
|
|||||||
".git",
|
".git",
|
||||||
".git-rewrite",
|
".git-rewrite",
|
||||||
".hg",
|
".hg",
|
||||||
".ipynb_checkpoints",
|
|
||||||
".mypy_cache",
|
".mypy_cache",
|
||||||
".nox",
|
".nox",
|
||||||
".pants.d",
|
".pants.d",
|
||||||
".pyenv",
|
|
||||||
".pytest_cache",
|
|
||||||
".pytype",
|
".pytype",
|
||||||
".ruff_cache",
|
".ruff_cache",
|
||||||
".svn",
|
".svn",
|
||||||
".tox",
|
".tox",
|
||||||
".venv",
|
".venv",
|
||||||
".vscode",
|
|
||||||
"__pypackages__",
|
"__pypackages__",
|
||||||
"_build",
|
"_build",
|
||||||
"buck-out",
|
"buck-out",
|
||||||
"build",
|
"build",
|
||||||
"dist",
|
"dist",
|
||||||
"node_modules",
|
"node_modules",
|
||||||
"site-packages",
|
|
||||||
"venv",
|
"venv",
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -224,7 +218,7 @@ indent-width = 4
|
|||||||
# Assume Python 3.8
|
# Assume Python 3.8
|
||||||
target-version = "py38"
|
target-version = "py38"
|
||||||
|
|
||||||
[lint]
|
[tool.ruff.lint]
|
||||||
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
|
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
|
||||||
select = ["E4", "E7", "E9", "F"]
|
select = ["E4", "E7", "E9", "F"]
|
||||||
ignore = []
|
ignore = []
|
||||||
@@ -236,7 +230,7 @@ unfixable = []
|
|||||||
# Allow unused variables when underscore-prefixed.
|
# Allow unused variables when underscore-prefixed.
|
||||||
dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
|
dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
|
||||||
|
|
||||||
[format]
|
[tool.ruff.format]
|
||||||
# Like Black, use double quotes for strings.
|
# Like Black, use double quotes for strings.
|
||||||
quote-style = "double"
|
quote-style = "double"
|
||||||
|
|
||||||
@@ -250,20 +244,11 @@ skip-magic-trailing-comma = false
|
|||||||
line-ending = "auto"
|
line-ending = "auto"
|
||||||
```
|
```
|
||||||
|
|
||||||
Note that, in a `pyproject.toml`, each section header should be prefixed with `tool.ruff`. For
|
Some configuration options can be provided via the command-line, such as those related to
|
||||||
example, `[lint]` should be replaced with `[tool.ruff.lint]`.
|
rule enablement and disablement, file discovery, and logging level:
|
||||||
|
|
||||||
Some configuration options can be provided via dedicated command-line arguments, such as those
|
|
||||||
related to rule enablement and disablement, file discovery, and logging level:
|
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
ruff check --select F401 --select F403 --quiet
|
ruff check path/to/code/ --select F401 --select F403 --quiet
|
||||||
```
|
|
||||||
|
|
||||||
The remaining configuration options can be provided through a catch-all `--config` argument:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
ruff check --config "lint.per-file-ignores = {'some_file.py' = ['F841']}"
|
|
||||||
```
|
```
|
||||||
|
|
||||||
See `ruff help` for more on Ruff's top-level commands, or `ruff help check` and `ruff help format`
|
See `ruff help` for more on Ruff's top-level commands, or `ruff help check` and `ruff help format`
|
||||||
@@ -273,7 +258,7 @@ for more on the linting and formatting commands, respectively.
|
|||||||
|
|
||||||
<!-- Begin section: Rules -->
|
<!-- Begin section: Rules -->
|
||||||
|
|
||||||
**Ruff supports over 800 lint rules**, many of which are inspired by popular tools like Flake8,
|
**Ruff supports over 700 lint rules**, many of which are inspired by popular tools like Flake8,
|
||||||
isort, pyupgrade, and others. Regardless of the rule's origin, Ruff re-implements every rule in
|
isort, pyupgrade, and others. Regardless of the rule's origin, Ruff re-implements every rule in
|
||||||
Rust as a first-party feature.
|
Rust as a first-party feature.
|
||||||
|
|
||||||
@@ -351,14 +336,14 @@ For a complete enumeration of the supported rules, see [_Rules_](https://docs.as
|
|||||||
Contributions are welcome and highly appreciated. To get started, check out the
|
Contributions are welcome and highly appreciated. To get started, check out the
|
||||||
[**contributing guidelines**](https://docs.astral.sh/ruff/contributing/).
|
[**contributing guidelines**](https://docs.astral.sh/ruff/contributing/).
|
||||||
|
|
||||||
You can also join us on [**Discord**](https://discord.com/invite/astral-sh).
|
You can also join us on [**Discord**](https://discord.gg/c9MhzV8aU5).
|
||||||
|
|
||||||
## Support
|
## Support
|
||||||
|
|
||||||
Having trouble? Check out the existing issues on [**GitHub**](https://github.com/astral-sh/ruff/issues),
|
Having trouble? Check out the existing issues on [**GitHub**](https://github.com/astral-sh/ruff/issues),
|
||||||
or feel free to [**open a new one**](https://github.com/astral-sh/ruff/issues/new).
|
or feel free to [**open a new one**](https://github.com/astral-sh/ruff/issues/new).
|
||||||
|
|
||||||
You can also ask for help on [**Discord**](https://discord.com/invite/astral-sh).
|
You can also ask for help on [**Discord**](https://discord.gg/c9MhzV8aU5).
|
||||||
|
|
||||||
## Acknowledgements
|
## Acknowledgements
|
||||||
|
|
||||||
@@ -388,7 +373,6 @@ Ruff is released under the MIT license.
|
|||||||
|
|
||||||
Ruff is used by a number of major open-source projects and companies, including:
|
Ruff is used by a number of major open-source projects and companies, including:
|
||||||
|
|
||||||
- [Albumentations](https://github.com/albumentations-team/albumentations)
|
|
||||||
- Amazon ([AWS SAM](https://github.com/aws/serverless-application-model))
|
- Amazon ([AWS SAM](https://github.com/aws/serverless-application-model))
|
||||||
- Anthropic ([Python SDK](https://github.com/anthropics/anthropic-sdk-python))
|
- Anthropic ([Python SDK](https://github.com/anthropics/anthropic-sdk-python))
|
||||||
- [Apache Airflow](https://github.com/apache/airflow)
|
- [Apache Airflow](https://github.com/apache/airflow)
|
||||||
@@ -397,7 +381,6 @@ Ruff is used by a number of major open-source projects and companies, including:
|
|||||||
- Benchling ([Refac](https://github.com/benchling/refac))
|
- Benchling ([Refac](https://github.com/benchling/refac))
|
||||||
- [Bokeh](https://github.com/bokeh/bokeh)
|
- [Bokeh](https://github.com/bokeh/bokeh)
|
||||||
- [Cryptography (PyCA)](https://github.com/pyca/cryptography)
|
- [Cryptography (PyCA)](https://github.com/pyca/cryptography)
|
||||||
- CERN ([Indico](https://getindico.io/))
|
|
||||||
- [DVC](https://github.com/iterative/dvc)
|
- [DVC](https://github.com/iterative/dvc)
|
||||||
- [Dagger](https://github.com/dagger/dagger)
|
- [Dagger](https://github.com/dagger/dagger)
|
||||||
- [Dagster](https://github.com/dagster-io/dagster)
|
- [Dagster](https://github.com/dagster-io/dagster)
|
||||||
@@ -413,9 +396,7 @@ Ruff is used by a number of major open-source projects and companies, including:
|
|||||||
[Diffusers](https://github.com/huggingface/diffusers))
|
[Diffusers](https://github.com/huggingface/diffusers))
|
||||||
- ING Bank ([popmon](https://github.com/ing-bank/popmon), [probatus](https://github.com/ing-bank/probatus))
|
- ING Bank ([popmon](https://github.com/ing-bank/popmon), [probatus](https://github.com/ing-bank/probatus))
|
||||||
- [Ibis](https://github.com/ibis-project/ibis)
|
- [Ibis](https://github.com/ibis-project/ibis)
|
||||||
- [ivy](https://github.com/unifyai/ivy)
|
|
||||||
- [Jupyter](https://github.com/jupyter-server/jupyter_server)
|
- [Jupyter](https://github.com/jupyter-server/jupyter_server)
|
||||||
- [Kraken Tech](https://kraken.tech/)
|
|
||||||
- [LangChain](https://github.com/hwchase17/langchain)
|
- [LangChain](https://github.com/hwchase17/langchain)
|
||||||
- [Litestar](https://litestar.dev/)
|
- [Litestar](https://litestar.dev/)
|
||||||
- [LlamaIndex](https://github.com/jerryjliu/llama_index)
|
- [LlamaIndex](https://github.com/jerryjliu/llama_index)
|
||||||
@@ -425,14 +406,12 @@ Ruff is used by a number of major open-source projects and companies, including:
|
|||||||
- Microsoft ([Semantic Kernel](https://github.com/microsoft/semantic-kernel),
|
- Microsoft ([Semantic Kernel](https://github.com/microsoft/semantic-kernel),
|
||||||
[ONNX Runtime](https://github.com/microsoft/onnxruntime),
|
[ONNX Runtime](https://github.com/microsoft/onnxruntime),
|
||||||
[LightGBM](https://github.com/microsoft/LightGBM))
|
[LightGBM](https://github.com/microsoft/LightGBM))
|
||||||
- Modern Treasury ([Python SDK](https://github.com/Modern-Treasury/modern-treasury-python))
|
- Modern Treasury ([Python SDK](https://github.com/Modern-Treasury/modern-treasury-python-sdk))
|
||||||
- Mozilla ([Firefox](https://github.com/mozilla/gecko-dev))
|
- Mozilla ([Firefox](https://github.com/mozilla/gecko-dev))
|
||||||
- [Mypy](https://github.com/python/mypy)
|
- [Mypy](https://github.com/python/mypy)
|
||||||
- Netflix ([Dispatch](https://github.com/Netflix/dispatch))
|
- Netflix ([Dispatch](https://github.com/Netflix/dispatch))
|
||||||
- [Neon](https://github.com/neondatabase/neon)
|
- [Neon](https://github.com/neondatabase/neon)
|
||||||
- [Nokia](https://nokia.com/)
|
|
||||||
- [NoneBot](https://github.com/nonebot/nonebot2)
|
- [NoneBot](https://github.com/nonebot/nonebot2)
|
||||||
- [NumPyro](https://github.com/pyro-ppl/numpyro)
|
|
||||||
- [ONNX](https://github.com/onnx/onnx)
|
- [ONNX](https://github.com/onnx/onnx)
|
||||||
- [OpenBB](https://github.com/OpenBB-finance/OpenBBTerminal)
|
- [OpenBB](https://github.com/OpenBB-finance/OpenBBTerminal)
|
||||||
- [PDM](https://github.com/pdm-project/pdm)
|
- [PDM](https://github.com/pdm-project/pdm)
|
||||||
@@ -444,13 +423,10 @@ Ruff is used by a number of major open-source projects and companies, including:
|
|||||||
- [PostHog](https://github.com/PostHog/posthog)
|
- [PostHog](https://github.com/PostHog/posthog)
|
||||||
- Prefect ([Python SDK](https://github.com/PrefectHQ/prefect), [Marvin](https://github.com/PrefectHQ/marvin))
|
- Prefect ([Python SDK](https://github.com/PrefectHQ/prefect), [Marvin](https://github.com/PrefectHQ/marvin))
|
||||||
- [PyInstaller](https://github.com/pyinstaller/pyinstaller)
|
- [PyInstaller](https://github.com/pyinstaller/pyinstaller)
|
||||||
- [PyMC](https://github.com/pymc-devs/pymc/)
|
|
||||||
- [PyMC-Marketing](https://github.com/pymc-labs/pymc-marketing)
|
- [PyMC-Marketing](https://github.com/pymc-labs/pymc-marketing)
|
||||||
- [pytest](https://github.com/pytest-dev/pytest)
|
|
||||||
- [PyTorch](https://github.com/pytorch/pytorch)
|
- [PyTorch](https://github.com/pytorch/pytorch)
|
||||||
- [Pydantic](https://github.com/pydantic/pydantic)
|
- [Pydantic](https://github.com/pydantic/pydantic)
|
||||||
- [Pylint](https://github.com/PyCQA/pylint)
|
- [Pylint](https://github.com/PyCQA/pylint)
|
||||||
- [PyVista](https://github.com/pyvista/pyvista)
|
|
||||||
- [Reflex](https://github.com/reflex-dev/reflex)
|
- [Reflex](https://github.com/reflex-dev/reflex)
|
||||||
- [River](https://github.com/online-ml/river)
|
- [River](https://github.com/online-ml/river)
|
||||||
- [Rippling](https://rippling.com)
|
- [Rippling](https://rippling.com)
|
||||||
@@ -477,7 +453,7 @@ Ruff is used by a number of major open-source projects and companies, including:
|
|||||||
|
|
||||||
### Show Your Support
|
### Show Your Support
|
||||||
|
|
||||||
If you're using Ruff, consider adding the Ruff badge to your project's `README.md`:
|
If you're using Ruff, consider adding the Ruff badge to project's `README.md`:
|
||||||
|
|
||||||
```md
|
```md
|
||||||
[](https://github.com/astral-sh/ruff)
|
[](https://github.com/astral-sh/ruff)
|
||||||
@@ -499,10 +475,10 @@ If you're using Ruff, consider adding the Ruff badge to your project's `README.m
|
|||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
This repository is licensed under the [MIT License](https://github.com/astral-sh/ruff/blob/main/LICENSE)
|
MIT
|
||||||
|
|
||||||
<div align="center">
|
<div align="center">
|
||||||
<a target="_blank" href="https://astral.sh" style="background:none">
|
<a target="_blank" href="https://astral.sh" style="background:none">
|
||||||
<img src="https://raw.githubusercontent.com/astral-sh/ruff/main/assets/svg/Astral.svg" alt="Made by Astral">
|
<img src="https://raw.githubusercontent.com/astral-sh/ruff/main/assets/svg/Astral.svg">
|
||||||
</a>
|
</a>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
11
_typos.toml
11
_typos.toml
@@ -1,20 +1,11 @@
|
|||||||
[files]
|
[files]
|
||||||
# https://github.com/crate-ci/typos/issues/868
|
# https://github.com/crate-ci/typos/issues/868
|
||||||
extend-exclude = ["crates/red_knot/vendor/**/*", "**/resources/**/*", "**/snapshots/**/*"]
|
extend-exclude = ["**/resources/**/*", "**/snapshots/**/*"]
|
||||||
|
|
||||||
[default.extend-words]
|
[default.extend-words]
|
||||||
"arange" = "arange" # e.g. `numpy.arange`
|
|
||||||
hel = "hel"
|
hel = "hel"
|
||||||
whos = "whos"
|
whos = "whos"
|
||||||
spawnve = "spawnve"
|
spawnve = "spawnve"
|
||||||
ned = "ned"
|
ned = "ned"
|
||||||
pn = "pn" # `import panel as pd` is a thing
|
|
||||||
poit = "poit"
|
poit = "poit"
|
||||||
BA = "BA" # acronym for "Bad Allowed", used in testing.
|
BA = "BA" # acronym for "Bad Allowed", used in testing.
|
||||||
jod = "jod" # e.g., `jod-thread`
|
|
||||||
|
|
||||||
[default]
|
|
||||||
extend-ignore-re = [
|
|
||||||
# Line ignore with trailing "spellchecker:disable-line"
|
|
||||||
"(?Rm)^.*#\\s*spellchecker:disable-line$"
|
|
||||||
]
|
|
||||||
|
|||||||
14
clippy.toml
14
clippy.toml
@@ -1,13 +1,7 @@
|
|||||||
doc-valid-idents = [
|
doc-valid-idents = [
|
||||||
"..",
|
|
||||||
"CodeQL",
|
|
||||||
"FastAPI",
|
|
||||||
"IPython",
|
|
||||||
"LangChain",
|
|
||||||
"LibCST",
|
|
||||||
"McCabe",
|
|
||||||
"NumPy",
|
|
||||||
"SCREAMING_SNAKE_CASE",
|
|
||||||
"SQLAlchemy",
|
|
||||||
"StackOverflow",
|
"StackOverflow",
|
||||||
|
"CodeQL",
|
||||||
|
"IPython",
|
||||||
|
"NumPy",
|
||||||
|
"..",
|
||||||
]
|
]
|
||||||
|
|||||||
39
crates/flake8_to_ruff/Cargo.toml
Normal file
39
crates/flake8_to_ruff/Cargo.toml
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
[package]
|
||||||
|
name = "flake8-to-ruff"
|
||||||
|
version = "0.1.7"
|
||||||
|
description = """
|
||||||
|
Convert Flake8 configuration files to Ruff configuration files.
|
||||||
|
"""
|
||||||
|
authors = { workspace = true }
|
||||||
|
edition = { workspace = true }
|
||||||
|
rust-version = { workspace = true }
|
||||||
|
homepage = { workspace = true }
|
||||||
|
documentation = { workspace = true }
|
||||||
|
repository = { workspace = true }
|
||||||
|
license = { workspace = true }
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
ruff_linter = { path = "../ruff_linter", default-features = false }
|
||||||
|
ruff_workspace = { path = "../ruff_workspace" }
|
||||||
|
|
||||||
|
anyhow = { workspace = true }
|
||||||
|
clap = { workspace = true }
|
||||||
|
colored = { workspace = true }
|
||||||
|
configparser = { version = "3.0.3" }
|
||||||
|
itertools = { workspace = true }
|
||||||
|
log = { workspace = true }
|
||||||
|
once_cell = { workspace = true }
|
||||||
|
pep440_rs = { version = "0.3.12", features = ["serde"] }
|
||||||
|
regex = { workspace = true }
|
||||||
|
rustc-hash = { workspace = true }
|
||||||
|
serde = { workspace = true }
|
||||||
|
serde_json = { workspace = true }
|
||||||
|
strum = { workspace = true }
|
||||||
|
strum_macros = { workspace = true }
|
||||||
|
toml = { workspace = true }
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
pretty_assertions = "1.3.0"
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
99
crates/flake8_to_ruff/README.md
Normal file
99
crates/flake8_to_ruff/README.md
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
# flake8-to-ruff
|
||||||
|
|
||||||
|
Convert existing Flake8 configuration files (`setup.cfg`, `tox.ini`, or `.flake8`) for use with
|
||||||
|
[Ruff](https://github.com/astral-sh/ruff).
|
||||||
|
|
||||||
|
Generates a Ruff-compatible `pyproject.toml` section.
|
||||||
|
|
||||||
|
## Installation and Usage
|
||||||
|
|
||||||
|
### Installation
|
||||||
|
|
||||||
|
Available as [`flake8-to-ruff`](https://pypi.org/project/flake8-to-ruff/) on PyPI:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
pip install flake8-to-ruff
|
||||||
|
```
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
To run `flake8-to-ruff`:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
flake8-to-ruff path/to/setup.cfg
|
||||||
|
flake8-to-ruff path/to/tox.ini
|
||||||
|
flake8-to-ruff path/to/.flake8
|
||||||
|
```
|
||||||
|
|
||||||
|
`flake8-to-ruff` will print the relevant `pyproject.toml` sections to standard output, like so:
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[tool.ruff]
|
||||||
|
exclude = [
|
||||||
|
'.svn',
|
||||||
|
'CVS',
|
||||||
|
'.bzr',
|
||||||
|
'.hg',
|
||||||
|
'.git',
|
||||||
|
'__pycache__',
|
||||||
|
'.tox',
|
||||||
|
'.idea',
|
||||||
|
'.mypy_cache',
|
||||||
|
'.venv',
|
||||||
|
'node_modules',
|
||||||
|
'_state_machine.py',
|
||||||
|
'test_fstring.py',
|
||||||
|
'bad_coding2.py',
|
||||||
|
'badsyntax_*.py',
|
||||||
|
]
|
||||||
|
select = [
|
||||||
|
'A',
|
||||||
|
'E',
|
||||||
|
'F',
|
||||||
|
'Q',
|
||||||
|
]
|
||||||
|
ignore = []
|
||||||
|
|
||||||
|
[tool.ruff.flake8-quotes]
|
||||||
|
inline-quotes = 'single'
|
||||||
|
|
||||||
|
[tool.ruff.pep8-naming]
|
||||||
|
ignore-names = [
|
||||||
|
'foo',
|
||||||
|
'bar',
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
### Plugins
|
||||||
|
|
||||||
|
`flake8-to-ruff` will attempt to infer any activated plugins based on the settings provided in your
|
||||||
|
configuration file.
|
||||||
|
|
||||||
|
For example, if your `.flake8` file includes a `docstring-convention` property, `flake8-to-ruff`
|
||||||
|
will enable the appropriate [`flake8-docstrings`](https://pypi.org/project/flake8-docstrings/)
|
||||||
|
checks.
|
||||||
|
|
||||||
|
Alternatively, you can manually specify plugins on the command-line:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
flake8-to-ruff path/to/.flake8 --plugin flake8-builtins --plugin flake8-quotes
|
||||||
|
```
|
||||||
|
|
||||||
|
## Limitations
|
||||||
|
|
||||||
|
1. Ruff only supports a subset of the Flake configuration options. `flake8-to-ruff` will warn on and
|
||||||
|
ignore unsupported options in the `.flake8` file (or equivalent). (Similarly, Ruff has a few
|
||||||
|
configuration options that don't exist in Flake8.)
|
||||||
|
1. Ruff will omit any rule codes that are unimplemented or unsupported by Ruff, including rule
|
||||||
|
codes from unsupported plugins. (See the
|
||||||
|
[documentation](https://docs.astral.sh/ruff/faq/#how-does-ruff-compare-to-flake8) for the complete
|
||||||
|
list of supported plugins.)
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
MIT
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
Contributions are welcome and hugely appreciated. To get started, check out the
|
||||||
|
[contributing guidelines](https://github.com/astral-sh/ruff/blob/main/CONTRIBUTING.md).
|
||||||
65
crates/flake8_to_ruff/examples/cryptography/pyproject.toml
Normal file
65
crates/flake8_to_ruff/examples/cryptography/pyproject.toml
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
[build-system]
|
||||||
|
requires = [
|
||||||
|
# The minimum setuptools version is specific to the PEP 517 backend,
|
||||||
|
# and may be stricter than the version required in `setup.cfg`
|
||||||
|
"setuptools>=40.6.0,!=60.9.0",
|
||||||
|
"wheel",
|
||||||
|
# Must be kept in sync with the `install_requirements` in `setup.cfg`
|
||||||
|
"cffi>=1.12; platform_python_implementation != 'PyPy'",
|
||||||
|
"setuptools-rust>=0.11.4",
|
||||||
|
]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
|
[tool.black]
|
||||||
|
line-length = 79
|
||||||
|
target-version = ["py36"]
|
||||||
|
|
||||||
|
[tool.pytest.ini_options]
|
||||||
|
addopts = "-r s --capture=no --strict-markers --benchmark-disable"
|
||||||
|
markers = [
|
||||||
|
"skip_fips: this test is not executed in FIPS mode",
|
||||||
|
"supported: parametrized test requiring only_if and skip_message",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.mypy]
|
||||||
|
show_error_codes = true
|
||||||
|
check_untyped_defs = true
|
||||||
|
no_implicit_reexport = true
|
||||||
|
warn_redundant_casts = true
|
||||||
|
warn_unused_ignores = true
|
||||||
|
warn_unused_configs = true
|
||||||
|
strict_equality = true
|
||||||
|
|
||||||
|
[[tool.mypy.overrides]]
|
||||||
|
module = [
|
||||||
|
"pretend"
|
||||||
|
]
|
||||||
|
ignore_missing_imports = true
|
||||||
|
|
||||||
|
[tool.coverage.run]
|
||||||
|
branch = true
|
||||||
|
relative_files = true
|
||||||
|
source = [
|
||||||
|
"cryptography",
|
||||||
|
"tests/",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.coverage.paths]
|
||||||
|
source = [
|
||||||
|
"src/cryptography",
|
||||||
|
"*.tox/*/lib*/python*/site-packages/cryptography",
|
||||||
|
"*.tox\\*\\Lib\\site-packages\\cryptography",
|
||||||
|
"*.tox/pypy/site-packages/cryptography",
|
||||||
|
]
|
||||||
|
tests =[
|
||||||
|
"tests/",
|
||||||
|
"*tests\\",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.coverage.report]
|
||||||
|
exclude_lines = [
|
||||||
|
"@abc.abstractmethod",
|
||||||
|
"@abc.abstractproperty",
|
||||||
|
"@typing.overload",
|
||||||
|
"if typing.TYPE_CHECKING",
|
||||||
|
]
|
||||||
91
crates/flake8_to_ruff/examples/cryptography/setup.cfg
Normal file
91
crates/flake8_to_ruff/examples/cryptography/setup.cfg
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
[metadata]
|
||||||
|
name = cryptography
|
||||||
|
version = attr: cryptography.__version__
|
||||||
|
description = cryptography is a package which provides cryptographic recipes and primitives to Python developers.
|
||||||
|
long_description = file: README.rst
|
||||||
|
long_description_content_type = text/x-rst
|
||||||
|
license = BSD-3-Clause OR Apache-2.0
|
||||||
|
url = https://github.com/pyca/cryptography
|
||||||
|
author = The Python Cryptographic Authority and individual contributors
|
||||||
|
author_email = cryptography-dev@python.org
|
||||||
|
project_urls =
|
||||||
|
Documentation=https://cryptography.io/
|
||||||
|
Source=https://github.com/pyca/cryptography/
|
||||||
|
Issues=https://github.com/pyca/cryptography/issues
|
||||||
|
Changelog=https://cryptography.io/en/latest/changelog/
|
||||||
|
classifiers =
|
||||||
|
Development Status :: 5 - Production/Stable
|
||||||
|
Intended Audience :: Developers
|
||||||
|
License :: OSI Approved :: Apache Software License
|
||||||
|
License :: OSI Approved :: BSD License
|
||||||
|
Natural Language :: English
|
||||||
|
Operating System :: MacOS :: MacOS X
|
||||||
|
Operating System :: POSIX
|
||||||
|
Operating System :: POSIX :: BSD
|
||||||
|
Operating System :: POSIX :: Linux
|
||||||
|
Operating System :: Microsoft :: Windows
|
||||||
|
Programming Language :: Python
|
||||||
|
Programming Language :: Python :: 3
|
||||||
|
Programming Language :: Python :: 3 :: Only
|
||||||
|
Programming Language :: Python :: 3.6
|
||||||
|
Programming Language :: Python :: 3.7
|
||||||
|
Programming Language :: Python :: 3.8
|
||||||
|
Programming Language :: Python :: 3.9
|
||||||
|
Programming Language :: Python :: 3.10
|
||||||
|
Programming Language :: Python :: 3.11
|
||||||
|
Programming Language :: Python :: Implementation :: CPython
|
||||||
|
Programming Language :: Python :: Implementation :: PyPy
|
||||||
|
Topic :: Security :: Cryptography
|
||||||
|
|
||||||
|
[options]
|
||||||
|
python_requires = >=3.6
|
||||||
|
include_package_data = True
|
||||||
|
zip_safe = False
|
||||||
|
package_dir =
|
||||||
|
=src
|
||||||
|
packages = find:
|
||||||
|
# `install_requires` must be kept in sync with `pyproject.toml`
|
||||||
|
install_requires =
|
||||||
|
cffi >=1.12
|
||||||
|
|
||||||
|
[options.packages.find]
|
||||||
|
where = src
|
||||||
|
exclude =
|
||||||
|
_cffi_src
|
||||||
|
_cffi_src.*
|
||||||
|
|
||||||
|
[options.extras_require]
|
||||||
|
test =
|
||||||
|
pytest>=6.2.0
|
||||||
|
pytest-benchmark
|
||||||
|
pytest-cov
|
||||||
|
pytest-subtests
|
||||||
|
pytest-xdist
|
||||||
|
pretend
|
||||||
|
iso8601
|
||||||
|
pytz
|
||||||
|
hypothesis>=1.11.4,!=3.79.2
|
||||||
|
docs =
|
||||||
|
sphinx >= 1.6.5,!=1.8.0,!=3.1.0,!=3.1.1,!=5.2.0,!=5.2.0.post0
|
||||||
|
sphinx_rtd_theme
|
||||||
|
docstest =
|
||||||
|
pyenchant >= 1.6.11
|
||||||
|
twine >= 1.12.0
|
||||||
|
sphinxcontrib-spelling >= 4.0.1
|
||||||
|
sdist =
|
||||||
|
setuptools_rust >= 0.11.4
|
||||||
|
pep8test =
|
||||||
|
black
|
||||||
|
flake8
|
||||||
|
flake8-import-order
|
||||||
|
pep8-naming
|
||||||
|
# This extra is for OpenSSH private keys that use bcrypt KDF
|
||||||
|
# Versions: v3.1.3 - ignore_few_rounds, v3.1.5 - abi3
|
||||||
|
ssh =
|
||||||
|
bcrypt >= 3.1.5
|
||||||
|
|
||||||
|
[flake8]
|
||||||
|
ignore = E203,E211,W503,W504,N818
|
||||||
|
exclude = .tox,*.egg,.git,_build,.hypothesis
|
||||||
|
select = E,W,F,N,I
|
||||||
|
application-import-names = cryptography,cryptography_vectors,tests
|
||||||
19
crates/flake8_to_ruff/examples/jupyterhub.ini
Normal file
19
crates/flake8_to_ruff/examples/jupyterhub.ini
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
[flake8]
|
||||||
|
# Ignore style and complexity
|
||||||
|
# E: style errors
|
||||||
|
# W: style warnings
|
||||||
|
# C: complexity
|
||||||
|
# D: docstring warnings (unused pydocstyle extension)
|
||||||
|
# F841: local variable assigned but never used
|
||||||
|
ignore = E, C, W, D, F841
|
||||||
|
builtins = c, get_config
|
||||||
|
exclude =
|
||||||
|
.cache,
|
||||||
|
.github,
|
||||||
|
docs,
|
||||||
|
jupyterhub/alembic*,
|
||||||
|
onbuild,
|
||||||
|
scripts,
|
||||||
|
share,
|
||||||
|
tools,
|
||||||
|
setup.py
|
||||||
43
crates/flake8_to_ruff/examples/manim.ini
Normal file
43
crates/flake8_to_ruff/examples/manim.ini
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
[flake8]
|
||||||
|
# Exclude the grpc generated code
|
||||||
|
exclude = ./manim/grpc/gen/*
|
||||||
|
max-complexity = 15
|
||||||
|
max-line-length = 88
|
||||||
|
statistics = True
|
||||||
|
# Prevents some flake8-rst-docstrings errors
|
||||||
|
rst-roles = attr,class,func,meth,mod,obj,ref,doc,exc
|
||||||
|
rst-directives = manim, SEEALSO, seealso
|
||||||
|
docstring-convention=numpy
|
||||||
|
|
||||||
|
select = A,A00,B,B9,C4,C90,D,E,F,F,PT,RST,SIM,W
|
||||||
|
|
||||||
|
# General Compatibility
|
||||||
|
extend-ignore = E203, W503, D202, D212, D213, D404
|
||||||
|
|
||||||
|
# Misc
|
||||||
|
F401, F403, F405, F841, E501, E731, E402, F811, F821,
|
||||||
|
|
||||||
|
# Plug-in: flake8-builtins
|
||||||
|
A001, A002, A003,
|
||||||
|
|
||||||
|
# Plug-in: flake8-bugbear
|
||||||
|
B006, B007, B008, B009, B010, B903, B950,
|
||||||
|
|
||||||
|
# Plug-in: flake8-simplify
|
||||||
|
SIM105, SIM106, SIM119,
|
||||||
|
|
||||||
|
# Plug-in: flake8-comprehensions
|
||||||
|
C901
|
||||||
|
|
||||||
|
# Plug-in: flake8-pytest-style
|
||||||
|
PT001, PT004, PT006, PT011, PT018, PT022, PT023,
|
||||||
|
|
||||||
|
# Plug-in: flake8-docstrings
|
||||||
|
D100, D101, D102, D103, D104, D105, D106, D107,
|
||||||
|
D200, D202, D204, D205, D209,
|
||||||
|
D301,
|
||||||
|
D400, D401, D402, D403, D405, D406, D407, D409, D411, D412, D414,
|
||||||
|
|
||||||
|
# Plug-in: flake8-rst-docstrings
|
||||||
|
RST201, RST203, RST210, RST212, RST213, RST215,
|
||||||
|
RST301, RST303,
|
||||||
36
crates/flake8_to_ruff/examples/poetry.ini
Normal file
36
crates/flake8_to_ruff/examples/poetry.ini
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
[flake8]
|
||||||
|
min_python_version = 3.7.0
|
||||||
|
max-line-length = 88
|
||||||
|
ban-relative-imports = true
|
||||||
|
# flake8-use-fstring: https://github.com/MichaelKim0407/flake8-use-fstring#--percent-greedy-and---format-greedy
|
||||||
|
format-greedy = 1
|
||||||
|
inline-quotes = double
|
||||||
|
enable-extensions = TC, TC1
|
||||||
|
type-checking-strict = true
|
||||||
|
eradicate-whitelist-extend = ^-.*;
|
||||||
|
extend-ignore =
|
||||||
|
# E203: Whitespace before ':' (pycqa/pycodestyle#373)
|
||||||
|
E203,
|
||||||
|
# SIM106: Handle error-cases first
|
||||||
|
SIM106,
|
||||||
|
# ANN101: Missing type annotation for self in method
|
||||||
|
ANN101,
|
||||||
|
# ANN102: Missing type annotation for cls in classmethod
|
||||||
|
ANN102,
|
||||||
|
# PIE781: assign-and-return
|
||||||
|
PIE781,
|
||||||
|
# PIE798 no-unnecessary-class: Consider using a module for namespacing instead
|
||||||
|
PIE798,
|
||||||
|
per-file-ignores =
|
||||||
|
# TC002: Move third-party import '...' into a type-checking block
|
||||||
|
__init__.py:TC002,
|
||||||
|
# ANN201: Missing return type annotation for public function
|
||||||
|
tests/test_*:ANN201
|
||||||
|
tests/**/test_*:ANN201
|
||||||
|
extend-exclude =
|
||||||
|
# Frozen and not subject to change in this repo:
|
||||||
|
get-poetry.py,
|
||||||
|
install-poetry.py,
|
||||||
|
# External to the project's coding standards:
|
||||||
|
tests/fixtures/*,
|
||||||
|
tests/**/fixtures/*,
|
||||||
19
crates/flake8_to_ruff/examples/python-discord.ini
Normal file
19
crates/flake8_to_ruff/examples/python-discord.ini
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
[flake8]
|
||||||
|
max-line-length=120
|
||||||
|
docstring-convention=all
|
||||||
|
import-order-style=pycharm
|
||||||
|
application_import_names=bot,tests
|
||||||
|
exclude=.cache,.venv,.git,constants.py
|
||||||
|
extend-ignore=
|
||||||
|
B311,W503,E226,S311,T000,E731
|
||||||
|
# Missing Docstrings
|
||||||
|
D100,D104,D105,D107,
|
||||||
|
# Docstring Whitespace
|
||||||
|
D203,D212,D214,D215,
|
||||||
|
# Docstring Quotes
|
||||||
|
D301,D302,
|
||||||
|
# Docstring Content
|
||||||
|
D400,D401,D402,D404,D405,D406,D407,D408,D409,D410,D411,D412,D413,D414,D416,D417
|
||||||
|
# Type Annotations
|
||||||
|
ANN002,ANN003,ANN101,ANN102,ANN204,ANN206,ANN401
|
||||||
|
per-file-ignores=tests/*:D,ANN
|
||||||
6
crates/flake8_to_ruff/examples/requests.ini
Normal file
6
crates/flake8_to_ruff/examples/requests.ini
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
[flake8]
|
||||||
|
ignore = E203, E501, W503
|
||||||
|
per-file-ignores =
|
||||||
|
requests/__init__.py:E402, F401
|
||||||
|
requests/compat.py:E402, F401
|
||||||
|
tests/compat.py:F401
|
||||||
34
crates/flake8_to_ruff/pyproject.toml
Normal file
34
crates/flake8_to_ruff/pyproject.toml
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
[project]
|
||||||
|
name = "flake8-to-ruff"
|
||||||
|
keywords = ["automation", "flake8", "pycodestyle", "pyflakes", "pylint", "clippy"]
|
||||||
|
classifiers = [
|
||||||
|
"Development Status :: 3 - Alpha",
|
||||||
|
"Environment :: Console",
|
||||||
|
"Intended Audience :: Developers",
|
||||||
|
"License :: OSI Approved :: MIT License",
|
||||||
|
"Operating System :: OS Independent",
|
||||||
|
"Programming Language :: Python",
|
||||||
|
"Programming Language :: Python :: 3.7",
|
||||||
|
"Programming Language :: Python :: 3.8",
|
||||||
|
"Programming Language :: Python :: 3.9",
|
||||||
|
"Programming Language :: Python :: 3.10",
|
||||||
|
"Programming Language :: Python :: 3.11",
|
||||||
|
"Programming Language :: Python :: 3 :: Only",
|
||||||
|
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||||
|
"Topic :: Software Development :: Quality Assurance",
|
||||||
|
]
|
||||||
|
author = "Charlie Marsh"
|
||||||
|
author_email = "charlie.r.marsh@gmail.com"
|
||||||
|
description = "Convert existing Flake8 configuration to Ruff."
|
||||||
|
requires-python = ">=3.7"
|
||||||
|
|
||||||
|
[project.urls]
|
||||||
|
repository = "https://github.com/astral-sh/ruff#subdirectory=crates/flake8_to_ruff"
|
||||||
|
|
||||||
|
[build-system]
|
||||||
|
requires = ["maturin>=1.0,<2.0"]
|
||||||
|
build-backend = "maturin"
|
||||||
|
|
||||||
|
[tool.maturin]
|
||||||
|
bindings = "bin"
|
||||||
|
strip = true
|
||||||
13
crates/flake8_to_ruff/src/black.rs
Normal file
13
crates/flake8_to_ruff/src/black.rs
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
//! Extract Black configuration settings from a pyproject.toml.
|
||||||
|
|
||||||
|
use ruff_linter::line_width::LineLength;
|
||||||
|
use ruff_linter::settings::types::PythonVersion;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
|
||||||
|
pub(crate) struct Black {
|
||||||
|
#[serde(alias = "line-length", alias = "line_length")]
|
||||||
|
pub(crate) line_length: Option<LineLength>,
|
||||||
|
#[serde(alias = "target-version", alias = "target_version")]
|
||||||
|
pub(crate) target_version: Option<Vec<PythonVersion>>,
|
||||||
|
}
|
||||||
687
crates/flake8_to_ruff/src/converter.rs
Normal file
687
crates/flake8_to_ruff/src/converter.rs
Normal file
@@ -0,0 +1,687 @@
|
|||||||
|
use std::collections::{HashMap, HashSet};
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use itertools::Itertools;
|
||||||
|
|
||||||
|
use ruff_linter::line_width::LineLength;
|
||||||
|
use ruff_linter::registry::Linter;
|
||||||
|
use ruff_linter::rule_selector::RuleSelector;
|
||||||
|
use ruff_linter::rules::flake8_pytest_style::types::{
|
||||||
|
ParametrizeNameType, ParametrizeValuesRowType, ParametrizeValuesType,
|
||||||
|
};
|
||||||
|
use ruff_linter::rules::flake8_quotes::settings::Quote;
|
||||||
|
use ruff_linter::rules::flake8_tidy_imports::settings::Strictness;
|
||||||
|
use ruff_linter::rules::pydocstyle::settings::Convention;
|
||||||
|
use ruff_linter::settings::types::PythonVersion;
|
||||||
|
use ruff_linter::settings::DEFAULT_SELECTORS;
|
||||||
|
use ruff_linter::warn_user;
|
||||||
|
use ruff_workspace::options::{
|
||||||
|
Flake8AnnotationsOptions, Flake8BugbearOptions, Flake8BuiltinsOptions, Flake8ErrMsgOptions,
|
||||||
|
Flake8PytestStyleOptions, Flake8QuotesOptions, Flake8TidyImportsOptions, LintCommonOptions,
|
||||||
|
LintOptions, McCabeOptions, Options, Pep8NamingOptions, PydocstyleOptions,
|
||||||
|
};
|
||||||
|
use ruff_workspace::pyproject::Pyproject;
|
||||||
|
|
||||||
|
use super::external_config::ExternalConfig;
|
||||||
|
use super::plugin::Plugin;
|
||||||
|
use super::{parser, plugin};
|
||||||
|
|
||||||
|
pub(crate) fn convert(
|
||||||
|
config: &HashMap<String, HashMap<String, Option<String>>>,
|
||||||
|
external_config: &ExternalConfig,
|
||||||
|
plugins: Option<Vec<Plugin>>,
|
||||||
|
) -> Pyproject {
|
||||||
|
// Extract the Flake8 section.
|
||||||
|
let flake8 = config
|
||||||
|
.get("flake8")
|
||||||
|
.expect("Unable to find flake8 section in INI file");
|
||||||
|
|
||||||
|
// Extract all referenced rule code prefixes, to power plugin inference.
|
||||||
|
let mut referenced_codes: HashSet<RuleSelector> = HashSet::default();
|
||||||
|
for (key, value) in flake8 {
|
||||||
|
if let Some(value) = value {
|
||||||
|
match key.as_str() {
|
||||||
|
"select" | "ignore" | "extend-select" | "extend_select" | "extend-ignore"
|
||||||
|
| "extend_ignore" => {
|
||||||
|
referenced_codes.extend(parser::parse_prefix_codes(value.as_ref()));
|
||||||
|
}
|
||||||
|
"per-file-ignores" | "per_file_ignores" => {
|
||||||
|
if let Ok(per_file_ignores) =
|
||||||
|
parser::parse_files_to_codes_mapping(value.as_ref())
|
||||||
|
{
|
||||||
|
for (_, codes) in parser::collect_per_file_ignores(per_file_ignores) {
|
||||||
|
referenced_codes.extend(codes);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Infer plugins, if not provided.
|
||||||
|
let plugins = plugins.unwrap_or_else(|| {
|
||||||
|
let from_options = plugin::infer_plugins_from_options(flake8);
|
||||||
|
if !from_options.is_empty() {
|
||||||
|
#[allow(clippy::print_stderr)]
|
||||||
|
{
|
||||||
|
eprintln!("Inferred plugins from settings: {from_options:#?}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let from_codes = plugin::infer_plugins_from_codes(&referenced_codes);
|
||||||
|
if !from_codes.is_empty() {
|
||||||
|
#[allow(clippy::print_stderr)]
|
||||||
|
{
|
||||||
|
eprintln!("Inferred plugins from referenced codes: {from_codes:#?}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
from_options.into_iter().chain(from_codes).collect()
|
||||||
|
});
|
||||||
|
|
||||||
|
// Check if the user has specified a `select`. If not, we'll add our own
|
||||||
|
// default `select`, and populate it based on user plugins.
|
||||||
|
let mut select = flake8
|
||||||
|
.get("select")
|
||||||
|
.and_then(|value| {
|
||||||
|
value
|
||||||
|
.as_ref()
|
||||||
|
.map(|value| HashSet::from_iter(parser::parse_prefix_codes(value)))
|
||||||
|
})
|
||||||
|
.unwrap_or_else(|| resolve_select(&plugins));
|
||||||
|
let mut ignore: HashSet<RuleSelector> = flake8
|
||||||
|
.get("ignore")
|
||||||
|
.and_then(|value| {
|
||||||
|
value
|
||||||
|
.as_ref()
|
||||||
|
.map(|value| HashSet::from_iter(parser::parse_prefix_codes(value)))
|
||||||
|
})
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
// Parse each supported option.
|
||||||
|
let mut options = Options::default();
|
||||||
|
let mut lint_options = LintCommonOptions::default();
|
||||||
|
let mut flake8_annotations = Flake8AnnotationsOptions::default();
|
||||||
|
let mut flake8_bugbear = Flake8BugbearOptions::default();
|
||||||
|
let mut flake8_builtins = Flake8BuiltinsOptions::default();
|
||||||
|
let mut flake8_errmsg = Flake8ErrMsgOptions::default();
|
||||||
|
let mut flake8_pytest_style = Flake8PytestStyleOptions::default();
|
||||||
|
let mut flake8_quotes = Flake8QuotesOptions::default();
|
||||||
|
let mut flake8_tidy_imports = Flake8TidyImportsOptions::default();
|
||||||
|
let mut mccabe = McCabeOptions::default();
|
||||||
|
let mut pep8_naming = Pep8NamingOptions::default();
|
||||||
|
let mut pydocstyle = PydocstyleOptions::default();
|
||||||
|
for (key, value) in flake8 {
|
||||||
|
if let Some(value) = value {
|
||||||
|
match key.as_str() {
|
||||||
|
// flake8
|
||||||
|
"builtins" => {
|
||||||
|
options.builtins = Some(parser::parse_strings(value.as_ref()));
|
||||||
|
}
|
||||||
|
"max-line-length" | "max_line_length" => match LineLength::from_str(value) {
|
||||||
|
Ok(line_length) => options.line_length = Some(line_length),
|
||||||
|
Err(e) => {
|
||||||
|
warn_user!("Unable to parse '{key}' property: {e}");
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"select" => {
|
||||||
|
// No-op (handled above).
|
||||||
|
select.extend(parser::parse_prefix_codes(value.as_ref()));
|
||||||
|
}
|
||||||
|
"ignore" => {
|
||||||
|
// No-op (handled above).
|
||||||
|
}
|
||||||
|
"extend-select" | "extend_select" => {
|
||||||
|
// Unlike Flake8, use a single explicit `select`.
|
||||||
|
select.extend(parser::parse_prefix_codes(value.as_ref()));
|
||||||
|
}
|
||||||
|
"extend-ignore" | "extend_ignore" => {
|
||||||
|
// Unlike Flake8, use a single explicit `ignore`.
|
||||||
|
ignore.extend(parser::parse_prefix_codes(value.as_ref()));
|
||||||
|
}
|
||||||
|
"exclude" => {
|
||||||
|
options.exclude = Some(parser::parse_strings(value.as_ref()));
|
||||||
|
}
|
||||||
|
"extend-exclude" | "extend_exclude" => {
|
||||||
|
options.extend_exclude = Some(parser::parse_strings(value.as_ref()));
|
||||||
|
}
|
||||||
|
"per-file-ignores" | "per_file_ignores" => {
|
||||||
|
match parser::parse_files_to_codes_mapping(value.as_ref()) {
|
||||||
|
Ok(per_file_ignores) => {
|
||||||
|
lint_options.per_file_ignores =
|
||||||
|
Some(parser::collect_per_file_ignores(per_file_ignores));
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
warn_user!("Unable to parse '{key}' property: {e}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// flake8-bugbear
|
||||||
|
"extend-immutable-calls" | "extend_immutable_calls" => {
|
||||||
|
flake8_bugbear.extend_immutable_calls =
|
||||||
|
Some(parser::parse_strings(value.as_ref()));
|
||||||
|
}
|
||||||
|
// flake8-builtins
|
||||||
|
"builtins-ignorelist" | "builtins_ignorelist" => {
|
||||||
|
flake8_builtins.builtins_ignorelist =
|
||||||
|
Some(parser::parse_strings(value.as_ref()));
|
||||||
|
}
|
||||||
|
// flake8-annotations
|
||||||
|
"suppress-none-returning" | "suppress_none_returning" => {
|
||||||
|
match parser::parse_bool(value.as_ref()) {
|
||||||
|
Ok(bool) => flake8_annotations.suppress_none_returning = Some(bool),
|
||||||
|
Err(e) => {
|
||||||
|
warn_user!("Unable to parse '{key}' property: {e}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"suppress-dummy-args" | "suppress_dummy_args" => {
|
||||||
|
match parser::parse_bool(value.as_ref()) {
|
||||||
|
Ok(bool) => flake8_annotations.suppress_dummy_args = Some(bool),
|
||||||
|
Err(e) => {
|
||||||
|
warn_user!("Unable to parse '{key}' property: {e}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"mypy-init-return" | "mypy_init_return" => {
|
||||||
|
match parser::parse_bool(value.as_ref()) {
|
||||||
|
Ok(bool) => flake8_annotations.mypy_init_return = Some(bool),
|
||||||
|
Err(e) => {
|
||||||
|
warn_user!("Unable to parse '{key}' property: {e}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"allow-star-arg-any" | "allow_star_arg_any" => {
|
||||||
|
match parser::parse_bool(value.as_ref()) {
|
||||||
|
Ok(bool) => flake8_annotations.allow_star_arg_any = Some(bool),
|
||||||
|
Err(e) => {
|
||||||
|
warn_user!("Unable to parse '{key}' property: {e}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// flake8-quotes
|
||||||
|
"quotes" | "inline-quotes" | "inline_quotes" => match value.trim() {
|
||||||
|
"'" | "single" => flake8_quotes.inline_quotes = Some(Quote::Single),
|
||||||
|
"\"" | "double" => flake8_quotes.inline_quotes = Some(Quote::Double),
|
||||||
|
_ => {
|
||||||
|
warn_user!("Unexpected '{key}' value: {value}");
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"multiline-quotes" | "multiline_quotes" => match value.trim() {
|
||||||
|
"'" | "single" => flake8_quotes.multiline_quotes = Some(Quote::Single),
|
||||||
|
"\"" | "double" => flake8_quotes.multiline_quotes = Some(Quote::Double),
|
||||||
|
_ => {
|
||||||
|
warn_user!("Unexpected '{key}' value: {value}");
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"docstring-quotes" | "docstring_quotes" => match value.trim() {
|
||||||
|
"'" | "single" => flake8_quotes.docstring_quotes = Some(Quote::Single),
|
||||||
|
"\"" | "double" => flake8_quotes.docstring_quotes = Some(Quote::Double),
|
||||||
|
_ => {
|
||||||
|
warn_user!("Unexpected '{key}' value: {value}");
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"avoid-escape" | "avoid_escape" => match parser::parse_bool(value.as_ref()) {
|
||||||
|
Ok(bool) => flake8_quotes.avoid_escape = Some(bool),
|
||||||
|
Err(e) => {
|
||||||
|
warn_user!("Unable to parse '{key}' property: {e}");
|
||||||
|
}
|
||||||
|
},
|
||||||
|
// pep8-naming
|
||||||
|
"ignore-names" | "ignore_names" => {
|
||||||
|
pep8_naming.ignore_names = Some(parser::parse_strings(value.as_ref()));
|
||||||
|
}
|
||||||
|
"classmethod-decorators" | "classmethod_decorators" => {
|
||||||
|
pep8_naming.classmethod_decorators =
|
||||||
|
Some(parser::parse_strings(value.as_ref()));
|
||||||
|
}
|
||||||
|
"staticmethod-decorators" | "staticmethod_decorators" => {
|
||||||
|
pep8_naming.staticmethod_decorators =
|
||||||
|
Some(parser::parse_strings(value.as_ref()));
|
||||||
|
}
|
||||||
|
// flake8-tidy-imports
|
||||||
|
"ban-relative-imports" | "ban_relative_imports" => match value.trim() {
|
||||||
|
"true" => flake8_tidy_imports.ban_relative_imports = Some(Strictness::All),
|
||||||
|
"parents" => {
|
||||||
|
flake8_tidy_imports.ban_relative_imports = Some(Strictness::Parents);
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
warn_user!("Unexpected '{key}' value: {value}");
|
||||||
|
}
|
||||||
|
},
|
||||||
|
// flake8-docstrings
|
||||||
|
"docstring-convention" => match value.trim() {
|
||||||
|
"google" => pydocstyle.convention = Some(Convention::Google),
|
||||||
|
"numpy" => pydocstyle.convention = Some(Convention::Numpy),
|
||||||
|
"pep257" => pydocstyle.convention = Some(Convention::Pep257),
|
||||||
|
"all" => pydocstyle.convention = None,
|
||||||
|
_ => {
|
||||||
|
warn_user!("Unexpected '{key}' value: {value}");
|
||||||
|
}
|
||||||
|
},
|
||||||
|
// mccabe
|
||||||
|
"max-complexity" | "max_complexity" => match value.parse::<usize>() {
|
||||||
|
Ok(max_complexity) => mccabe.max_complexity = Some(max_complexity),
|
||||||
|
Err(e) => {
|
||||||
|
warn_user!("Unable to parse '{key}' property: {e}");
|
||||||
|
}
|
||||||
|
},
|
||||||
|
// flake8-errmsg
|
||||||
|
"errmsg-max-string-length" | "errmsg_max_string_length" => {
|
||||||
|
match value.parse::<usize>() {
|
||||||
|
Ok(max_string_length) => {
|
||||||
|
flake8_errmsg.max_string_length = Some(max_string_length);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
warn_user!("Unable to parse '{key}' property: {e}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// flake8-pytest-style
|
||||||
|
"pytest-fixture-no-parentheses" | "pytest_fixture_no_parentheses " => {
|
||||||
|
match parser::parse_bool(value.as_ref()) {
|
||||||
|
Ok(bool) => flake8_pytest_style.fixture_parentheses = Some(!bool),
|
||||||
|
Err(e) => {
|
||||||
|
warn_user!("Unable to parse '{key}' property: {e}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"pytest-parametrize-names-type" | "pytest_parametrize_names_type" => {
|
||||||
|
match value.trim() {
|
||||||
|
"csv" => {
|
||||||
|
flake8_pytest_style.parametrize_names_type =
|
||||||
|
Some(ParametrizeNameType::Csv);
|
||||||
|
}
|
||||||
|
"tuple" => {
|
||||||
|
flake8_pytest_style.parametrize_names_type =
|
||||||
|
Some(ParametrizeNameType::Tuple);
|
||||||
|
}
|
||||||
|
"list" => {
|
||||||
|
flake8_pytest_style.parametrize_names_type =
|
||||||
|
Some(ParametrizeNameType::List);
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
warn_user!("Unexpected '{key}' value: {value}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"pytest-parametrize-values-type" | "pytest_parametrize_values_type" => {
|
||||||
|
match value.trim() {
|
||||||
|
"tuple" => {
|
||||||
|
flake8_pytest_style.parametrize_values_type =
|
||||||
|
Some(ParametrizeValuesType::Tuple);
|
||||||
|
}
|
||||||
|
"list" => {
|
||||||
|
flake8_pytest_style.parametrize_values_type =
|
||||||
|
Some(ParametrizeValuesType::List);
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
warn_user!("Unexpected '{key}' value: {value}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"pytest-parametrize-values-row-type" | "pytest_parametrize_values_row_type" => {
|
||||||
|
match value.trim() {
|
||||||
|
"tuple" => {
|
||||||
|
flake8_pytest_style.parametrize_values_row_type =
|
||||||
|
Some(ParametrizeValuesRowType::Tuple);
|
||||||
|
}
|
||||||
|
"list" => {
|
||||||
|
flake8_pytest_style.parametrize_values_row_type =
|
||||||
|
Some(ParametrizeValuesRowType::List);
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
warn_user!("Unexpected '{key}' value: {value}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"pytest-raises-require-match-for" | "pytest_raises_require_match_for" => {
|
||||||
|
flake8_pytest_style.raises_require_match_for =
|
||||||
|
Some(parser::parse_strings(value.as_ref()));
|
||||||
|
}
|
||||||
|
"pytest-mark-no-parentheses" | "pytest_mark_no_parentheses" => {
|
||||||
|
match parser::parse_bool(value.as_ref()) {
|
||||||
|
Ok(bool) => flake8_pytest_style.mark_parentheses = Some(!bool),
|
||||||
|
Err(e) => {
|
||||||
|
warn_user!("Unable to parse '{key}' property: {e}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Unknown
|
||||||
|
_ => {
|
||||||
|
warn_user!("Skipping unsupported property: {}", key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deduplicate and sort.
|
||||||
|
lint_options.select = Some(
|
||||||
|
select
|
||||||
|
.into_iter()
|
||||||
|
.sorted_by_key(RuleSelector::prefix_and_code)
|
||||||
|
.collect(),
|
||||||
|
);
|
||||||
|
lint_options.ignore = Some(
|
||||||
|
ignore
|
||||||
|
.into_iter()
|
||||||
|
.sorted_by_key(RuleSelector::prefix_and_code)
|
||||||
|
.collect(),
|
||||||
|
);
|
||||||
|
if flake8_annotations != Flake8AnnotationsOptions::default() {
|
||||||
|
lint_options.flake8_annotations = Some(flake8_annotations);
|
||||||
|
}
|
||||||
|
if flake8_bugbear != Flake8BugbearOptions::default() {
|
||||||
|
lint_options.flake8_bugbear = Some(flake8_bugbear);
|
||||||
|
}
|
||||||
|
if flake8_builtins != Flake8BuiltinsOptions::default() {
|
||||||
|
lint_options.flake8_builtins = Some(flake8_builtins);
|
||||||
|
}
|
||||||
|
if flake8_errmsg != Flake8ErrMsgOptions::default() {
|
||||||
|
lint_options.flake8_errmsg = Some(flake8_errmsg);
|
||||||
|
}
|
||||||
|
if flake8_pytest_style != Flake8PytestStyleOptions::default() {
|
||||||
|
lint_options.flake8_pytest_style = Some(flake8_pytest_style);
|
||||||
|
}
|
||||||
|
if flake8_quotes != Flake8QuotesOptions::default() {
|
||||||
|
lint_options.flake8_quotes = Some(flake8_quotes);
|
||||||
|
}
|
||||||
|
if flake8_tidy_imports != Flake8TidyImportsOptions::default() {
|
||||||
|
lint_options.flake8_tidy_imports = Some(flake8_tidy_imports);
|
||||||
|
}
|
||||||
|
if mccabe != McCabeOptions::default() {
|
||||||
|
lint_options.mccabe = Some(mccabe);
|
||||||
|
}
|
||||||
|
if pep8_naming != Pep8NamingOptions::default() {
|
||||||
|
lint_options.pep8_naming = Some(pep8_naming);
|
||||||
|
}
|
||||||
|
if pydocstyle != PydocstyleOptions::default() {
|
||||||
|
lint_options.pydocstyle = Some(pydocstyle);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract any settings from the existing `pyproject.toml`.
|
||||||
|
if let Some(black) = &external_config.black {
|
||||||
|
if let Some(line_length) = &black.line_length {
|
||||||
|
options.line_length = Some(*line_length);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(target_version) = &black.target_version {
|
||||||
|
if let Some(target_version) = target_version.iter().min() {
|
||||||
|
options.target_version = Some(*target_version);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(isort) = &external_config.isort {
|
||||||
|
if let Some(src_paths) = &isort.src_paths {
|
||||||
|
match options.src.as_mut() {
|
||||||
|
Some(src) => {
|
||||||
|
src.extend_from_slice(src_paths);
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
options.src = Some(src_paths.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(project) = &external_config.project {
|
||||||
|
if let Some(requires_python) = &project.requires_python {
|
||||||
|
if options.target_version.is_none() {
|
||||||
|
options.target_version =
|
||||||
|
PythonVersion::get_minimum_supported_version(requires_python);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if lint_options != LintCommonOptions::default() {
|
||||||
|
options.lint = Some(LintOptions {
|
||||||
|
common: lint_options,
|
||||||
|
..LintOptions::default()
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create the pyproject.toml.
|
||||||
|
Pyproject::new(options)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Resolve the set of enabled `RuleSelector` values for the given
|
||||||
|
/// plugins.
|
||||||
|
fn resolve_select(plugins: &[Plugin]) -> HashSet<RuleSelector> {
|
||||||
|
let mut select: HashSet<_> = DEFAULT_SELECTORS.iter().cloned().collect();
|
||||||
|
select.extend(plugins.iter().map(|p| Linter::from(p).into()));
|
||||||
|
select
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
|
use itertools::Itertools;
|
||||||
|
use pep440_rs::VersionSpecifiers;
|
||||||
|
|
||||||
|
use pretty_assertions::assert_eq;
|
||||||
|
use ruff_linter::line_width::LineLength;
|
||||||
|
use ruff_linter::registry::Linter;
|
||||||
|
use ruff_linter::rule_selector::RuleSelector;
|
||||||
|
use ruff_linter::rules::flake8_quotes;
|
||||||
|
use ruff_linter::rules::pydocstyle::settings::Convention;
|
||||||
|
use ruff_linter::settings::types::PythonVersion;
|
||||||
|
use ruff_workspace::options::{
|
||||||
|
Flake8QuotesOptions, LintCommonOptions, LintOptions, Options, PydocstyleOptions,
|
||||||
|
};
|
||||||
|
use ruff_workspace::pyproject::Pyproject;
|
||||||
|
|
||||||
|
use crate::converter::DEFAULT_SELECTORS;
|
||||||
|
use crate::pep621::Project;
|
||||||
|
use crate::ExternalConfig;
|
||||||
|
|
||||||
|
use super::super::plugin::Plugin;
|
||||||
|
use super::convert;
|
||||||
|
|
||||||
|
fn lint_default_options(plugins: impl IntoIterator<Item = RuleSelector>) -> LintCommonOptions {
|
||||||
|
LintCommonOptions {
|
||||||
|
ignore: Some(vec![]),
|
||||||
|
select: Some(
|
||||||
|
DEFAULT_SELECTORS
|
||||||
|
.iter()
|
||||||
|
.cloned()
|
||||||
|
.chain(plugins)
|
||||||
|
.sorted_by_key(RuleSelector::prefix_and_code)
|
||||||
|
.collect(),
|
||||||
|
),
|
||||||
|
..LintCommonOptions::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_converts_empty() {
|
||||||
|
let actual = convert(
|
||||||
|
&HashMap::from([("flake8".to_string(), HashMap::default())]),
|
||||||
|
&ExternalConfig::default(),
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
let expected = Pyproject::new(Options {
|
||||||
|
lint: Some(LintOptions {
|
||||||
|
common: lint_default_options([]),
|
||||||
|
..LintOptions::default()
|
||||||
|
}),
|
||||||
|
..Options::default()
|
||||||
|
});
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_converts_dashes() {
|
||||||
|
let actual = convert(
|
||||||
|
&HashMap::from([(
|
||||||
|
"flake8".to_string(),
|
||||||
|
HashMap::from([("max-line-length".to_string(), Some("100".to_string()))]),
|
||||||
|
)]),
|
||||||
|
&ExternalConfig::default(),
|
||||||
|
Some(vec![]),
|
||||||
|
);
|
||||||
|
let expected = Pyproject::new(Options {
|
||||||
|
line_length: Some(LineLength::try_from(100).unwrap()),
|
||||||
|
lint: Some(LintOptions {
|
||||||
|
common: lint_default_options([]),
|
||||||
|
..LintOptions::default()
|
||||||
|
}),
|
||||||
|
..Options::default()
|
||||||
|
});
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_converts_underscores() {
|
||||||
|
let actual = convert(
|
||||||
|
&HashMap::from([(
|
||||||
|
"flake8".to_string(),
|
||||||
|
HashMap::from([("max_line_length".to_string(), Some("100".to_string()))]),
|
||||||
|
)]),
|
||||||
|
&ExternalConfig::default(),
|
||||||
|
Some(vec![]),
|
||||||
|
);
|
||||||
|
let expected = Pyproject::new(Options {
|
||||||
|
line_length: Some(LineLength::try_from(100).unwrap()),
|
||||||
|
lint: Some(LintOptions {
|
||||||
|
common: lint_default_options([]),
|
||||||
|
..LintOptions::default()
|
||||||
|
}),
|
||||||
|
..Options::default()
|
||||||
|
});
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_ignores_parse_errors() {
|
||||||
|
let actual = convert(
|
||||||
|
&HashMap::from([(
|
||||||
|
"flake8".to_string(),
|
||||||
|
HashMap::from([("max_line_length".to_string(), Some("abc".to_string()))]),
|
||||||
|
)]),
|
||||||
|
&ExternalConfig::default(),
|
||||||
|
Some(vec![]),
|
||||||
|
);
|
||||||
|
let expected = Pyproject::new(Options {
|
||||||
|
lint: Some(LintOptions {
|
||||||
|
common: lint_default_options([]),
|
||||||
|
..LintOptions::default()
|
||||||
|
}),
|
||||||
|
..Options::default()
|
||||||
|
});
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_converts_plugin_options() {
|
||||||
|
let actual = convert(
|
||||||
|
&HashMap::from([(
|
||||||
|
"flake8".to_string(),
|
||||||
|
HashMap::from([("inline-quotes".to_string(), Some("single".to_string()))]),
|
||||||
|
)]),
|
||||||
|
&ExternalConfig::default(),
|
||||||
|
Some(vec![]),
|
||||||
|
);
|
||||||
|
let expected = Pyproject::new(Options {
|
||||||
|
lint: Some(LintOptions {
|
||||||
|
common: LintCommonOptions {
|
||||||
|
flake8_quotes: Some(Flake8QuotesOptions {
|
||||||
|
inline_quotes: Some(flake8_quotes::settings::Quote::Single),
|
||||||
|
multiline_quotes: None,
|
||||||
|
docstring_quotes: None,
|
||||||
|
avoid_escape: None,
|
||||||
|
}),
|
||||||
|
..lint_default_options([])
|
||||||
|
},
|
||||||
|
..LintOptions::default()
|
||||||
|
}),
|
||||||
|
..Options::default()
|
||||||
|
});
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_converts_docstring_conventions() {
|
||||||
|
let actual = convert(
|
||||||
|
&HashMap::from([(
|
||||||
|
"flake8".to_string(),
|
||||||
|
HashMap::from([(
|
||||||
|
"docstring-convention".to_string(),
|
||||||
|
Some("numpy".to_string()),
|
||||||
|
)]),
|
||||||
|
)]),
|
||||||
|
&ExternalConfig::default(),
|
||||||
|
Some(vec![Plugin::Flake8Docstrings]),
|
||||||
|
);
|
||||||
|
let expected = Pyproject::new(Options {
|
||||||
|
lint: Some(LintOptions {
|
||||||
|
common: LintCommonOptions {
|
||||||
|
pydocstyle: Some(PydocstyleOptions {
|
||||||
|
convention: Some(Convention::Numpy),
|
||||||
|
ignore_decorators: None,
|
||||||
|
property_decorators: None,
|
||||||
|
}),
|
||||||
|
..lint_default_options([Linter::Pydocstyle.into()])
|
||||||
|
},
|
||||||
|
..LintOptions::default()
|
||||||
|
}),
|
||||||
|
..Options::default()
|
||||||
|
});
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_infers_plugins_if_omitted() {
|
||||||
|
let actual = convert(
|
||||||
|
&HashMap::from([(
|
||||||
|
"flake8".to_string(),
|
||||||
|
HashMap::from([("inline-quotes".to_string(), Some("single".to_string()))]),
|
||||||
|
)]),
|
||||||
|
&ExternalConfig::default(),
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
let expected = Pyproject::new(Options {
|
||||||
|
lint: Some(LintOptions {
|
||||||
|
common: LintCommonOptions {
|
||||||
|
flake8_quotes: Some(Flake8QuotesOptions {
|
||||||
|
inline_quotes: Some(flake8_quotes::settings::Quote::Single),
|
||||||
|
multiline_quotes: None,
|
||||||
|
docstring_quotes: None,
|
||||||
|
avoid_escape: None,
|
||||||
|
}),
|
||||||
|
..lint_default_options([Linter::Flake8Quotes.into()])
|
||||||
|
},
|
||||||
|
..LintOptions::default()
|
||||||
|
}),
|
||||||
|
..Options::default()
|
||||||
|
});
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_converts_project_requires_python() -> Result<()> {
|
||||||
|
let actual = convert(
|
||||||
|
&HashMap::from([("flake8".to_string(), HashMap::default())]),
|
||||||
|
&ExternalConfig {
|
||||||
|
project: Some(&Project {
|
||||||
|
requires_python: Some(VersionSpecifiers::from_str(">=3.8.16, <3.11")?),
|
||||||
|
}),
|
||||||
|
..ExternalConfig::default()
|
||||||
|
},
|
||||||
|
Some(vec![]),
|
||||||
|
);
|
||||||
|
let expected = Pyproject::new(Options {
|
||||||
|
target_version: Some(PythonVersion::Py38),
|
||||||
|
lint: Some(LintOptions {
|
||||||
|
common: lint_default_options([]),
|
||||||
|
..LintOptions::default()
|
||||||
|
}),
|
||||||
|
..Options::default()
|
||||||
|
});
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
10
crates/flake8_to_ruff/src/external_config.rs
Normal file
10
crates/flake8_to_ruff/src/external_config.rs
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
use super::black::Black;
|
||||||
|
use super::isort::Isort;
|
||||||
|
use super::pep621::Project;
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
pub(crate) struct ExternalConfig<'a> {
|
||||||
|
pub(crate) black: Option<&'a Black>,
|
||||||
|
pub(crate) isort: Option<&'a Isort>,
|
||||||
|
pub(crate) project: Option<&'a Project>,
|
||||||
|
}
|
||||||
10
crates/flake8_to_ruff/src/isort.rs
Normal file
10
crates/flake8_to_ruff/src/isort.rs
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
//! Extract isort configuration settings from a pyproject.toml.
|
||||||
|
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
/// The [isort configuration](https://pycqa.github.io/isort/docs/configuration/config_files.html).
|
||||||
|
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
|
||||||
|
pub(crate) struct Isort {
|
||||||
|
#[serde(alias = "src-paths", alias = "src_paths")]
|
||||||
|
pub(crate) src_paths: Option<Vec<String>>,
|
||||||
|
}
|
||||||
80
crates/flake8_to_ruff/src/main.rs
Normal file
80
crates/flake8_to_ruff/src/main.rs
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
//! Utility to generate Ruff's `pyproject.toml` section from a Flake8 INI file.
|
||||||
|
|
||||||
|
mod black;
|
||||||
|
mod converter;
|
||||||
|
mod external_config;
|
||||||
|
mod isort;
|
||||||
|
mod parser;
|
||||||
|
mod pep621;
|
||||||
|
mod plugin;
|
||||||
|
mod pyproject;
|
||||||
|
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
|
use clap::Parser;
|
||||||
|
use configparser::ini::Ini;
|
||||||
|
|
||||||
|
use crate::converter::convert;
|
||||||
|
use crate::external_config::ExternalConfig;
|
||||||
|
use crate::plugin::Plugin;
|
||||||
|
use crate::pyproject::parse;
|
||||||
|
use ruff_linter::logging::{set_up_logging, LogLevel};
|
||||||
|
|
||||||
|
#[derive(Parser)]
|
||||||
|
#[command(
|
||||||
|
about = "Convert existing Flake8 configuration to Ruff.",
|
||||||
|
long_about = None
|
||||||
|
)]
|
||||||
|
struct Args {
|
||||||
|
/// Path to the Flake8 configuration file (e.g., `setup.cfg`, `tox.ini`, or
|
||||||
|
/// `.flake8`).
|
||||||
|
#[arg(required = true)]
|
||||||
|
file: PathBuf,
|
||||||
|
/// Optional path to a `pyproject.toml` file, used to ensure compatibility
|
||||||
|
/// with Black.
|
||||||
|
#[arg(long)]
|
||||||
|
pyproject: Option<PathBuf>,
|
||||||
|
/// List of plugins to enable.
|
||||||
|
#[arg(long, value_delimiter = ',')]
|
||||||
|
plugin: Option<Vec<Plugin>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() -> Result<()> {
|
||||||
|
set_up_logging(&LogLevel::Default)?;
|
||||||
|
|
||||||
|
let args = Args::parse();
|
||||||
|
|
||||||
|
// Read the INI file.
|
||||||
|
let mut ini = Ini::new_cs();
|
||||||
|
ini.set_multiline(true);
|
||||||
|
let config = ini.load(args.file).map_err(|msg| anyhow::anyhow!(msg))?;
|
||||||
|
|
||||||
|
// Read the pyproject.toml file.
|
||||||
|
let pyproject = args.pyproject.map(parse).transpose()?;
|
||||||
|
let external_config = pyproject
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|pyproject| pyproject.tool.as_ref())
|
||||||
|
.map(|tool| ExternalConfig {
|
||||||
|
black: tool.black.as_ref(),
|
||||||
|
isort: tool.isort.as_ref(),
|
||||||
|
..Default::default()
|
||||||
|
})
|
||||||
|
.unwrap_or_default();
|
||||||
|
let external_config = ExternalConfig {
|
||||||
|
project: pyproject
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|pyproject| pyproject.project.as_ref()),
|
||||||
|
..external_config
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create Ruff's pyproject.toml section.
|
||||||
|
let pyproject = convert(&config, &external_config, args.plugin);
|
||||||
|
|
||||||
|
#[allow(clippy::print_stdout)]
|
||||||
|
{
|
||||||
|
println!("{}", toml::to_string_pretty(&pyproject)?);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
391
crates/flake8_to_ruff/src/parser.rs
Normal file
391
crates/flake8_to_ruff/src/parser.rs
Normal file
@@ -0,0 +1,391 @@
|
|||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use anyhow::{bail, Result};
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
|
use regex::Regex;
|
||||||
|
use rustc_hash::FxHashMap;
|
||||||
|
|
||||||
|
use ruff_linter::settings::types::PatternPrefixPair;
|
||||||
|
use ruff_linter::{warn_user, RuleSelector};
|
||||||
|
|
||||||
|
static COMMA_SEPARATED_LIST_RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"[,\s]").unwrap());
|
||||||
|
|
||||||
|
/// Parse a comma-separated list of `RuleSelector` values (e.g.,
|
||||||
|
/// "F401,E501").
|
||||||
|
pub(crate) fn parse_prefix_codes(value: &str) -> Vec<RuleSelector> {
|
||||||
|
let mut codes: Vec<RuleSelector> = vec![];
|
||||||
|
for code in COMMA_SEPARATED_LIST_RE.split(value) {
|
||||||
|
let code = code.trim();
|
||||||
|
if code.is_empty() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if let Ok(code) = RuleSelector::from_str(code) {
|
||||||
|
codes.push(code);
|
||||||
|
} else {
|
||||||
|
warn_user!("Unsupported prefix code: {code}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
codes
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse a comma-separated list of strings (e.g., "__init__.py,__main__.py").
|
||||||
|
pub(crate) fn parse_strings(value: &str) -> Vec<String> {
|
||||||
|
COMMA_SEPARATED_LIST_RE
|
||||||
|
.split(value)
|
||||||
|
.map(str::trim)
|
||||||
|
.filter(|part| !part.is_empty())
|
||||||
|
.map(String::from)
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse a boolean.
|
||||||
|
pub(crate) fn parse_bool(value: &str) -> Result<bool> {
|
||||||
|
match value.trim() {
|
||||||
|
"true" => Ok(true),
|
||||||
|
"false" => Ok(false),
|
||||||
|
_ => bail!("Unexpected boolean value: {value}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct Token {
|
||||||
|
token_name: TokenType,
|
||||||
|
src: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
enum TokenType {
|
||||||
|
Code,
|
||||||
|
File,
|
||||||
|
Colon,
|
||||||
|
Comma,
|
||||||
|
Ws,
|
||||||
|
Eof,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct State {
|
||||||
|
seen_sep: bool,
|
||||||
|
seen_colon: bool,
|
||||||
|
filenames: Vec<String>,
|
||||||
|
codes: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl State {
|
||||||
|
const fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
seen_sep: true,
|
||||||
|
seen_colon: false,
|
||||||
|
filenames: vec![],
|
||||||
|
codes: vec![],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generate the list of `StrRuleCodePair` pairs for the current
|
||||||
|
/// state.
|
||||||
|
fn parse(&self) -> Vec<PatternPrefixPair> {
|
||||||
|
let mut codes: Vec<PatternPrefixPair> = vec![];
|
||||||
|
for code in &self.codes {
|
||||||
|
if let Ok(code) = RuleSelector::from_str(code) {
|
||||||
|
for filename in &self.filenames {
|
||||||
|
codes.push(PatternPrefixPair {
|
||||||
|
pattern: filename.clone(),
|
||||||
|
prefix: code.clone(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
warn_user!("Unsupported prefix code: {code}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
codes
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Tokenize the raw 'files-to-codes' mapping.
|
||||||
|
fn tokenize_files_to_codes_mapping(value: &str) -> Vec<Token> {
|
||||||
|
let mut tokens = vec![];
|
||||||
|
let mut i = 0;
|
||||||
|
while i < value.len() {
|
||||||
|
for (token_re, token_name) in [
|
||||||
|
(
|
||||||
|
Regex::new(r"([A-Z]+[0-9]*)(?:$|\s|,)").unwrap(),
|
||||||
|
TokenType::Code,
|
||||||
|
),
|
||||||
|
(Regex::new(r"([^\s:,]+)").unwrap(), TokenType::File),
|
||||||
|
(Regex::new(r"(\s*:\s*)").unwrap(), TokenType::Colon),
|
||||||
|
(Regex::new(r"(\s*,\s*)").unwrap(), TokenType::Comma),
|
||||||
|
(Regex::new(r"(\s+)").unwrap(), TokenType::Ws),
|
||||||
|
] {
|
||||||
|
if let Some(cap) = token_re.captures(&value[i..]) {
|
||||||
|
let mat = cap.get(1).unwrap();
|
||||||
|
if mat.start() == 0 {
|
||||||
|
tokens.push(Token {
|
||||||
|
token_name,
|
||||||
|
src: mat.as_str().trim().to_string(),
|
||||||
|
});
|
||||||
|
i += mat.end();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tokens.push(Token {
|
||||||
|
token_name: TokenType::Eof,
|
||||||
|
src: String::new(),
|
||||||
|
});
|
||||||
|
tokens
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse a 'files-to-codes' mapping, mimicking Flake8's internal logic.
|
||||||
|
/// See: <https://github.com/PyCQA/flake8/blob/7dfe99616fc2f07c0017df2ba5fa884158f3ea8a/src/flake8/utils.py#L45>
|
||||||
|
pub(crate) fn parse_files_to_codes_mapping(value: &str) -> Result<Vec<PatternPrefixPair>> {
|
||||||
|
if value.trim().is_empty() {
|
||||||
|
return Ok(vec![]);
|
||||||
|
}
|
||||||
|
let mut codes: Vec<PatternPrefixPair> = vec![];
|
||||||
|
let mut state = State::new();
|
||||||
|
for token in tokenize_files_to_codes_mapping(value) {
|
||||||
|
if matches!(token.token_name, TokenType::Comma | TokenType::Ws) {
|
||||||
|
state.seen_sep = true;
|
||||||
|
} else if !state.seen_colon {
|
||||||
|
if matches!(token.token_name, TokenType::Colon) {
|
||||||
|
state.seen_colon = true;
|
||||||
|
state.seen_sep = true;
|
||||||
|
} else if state.seen_sep && matches!(token.token_name, TokenType::File) {
|
||||||
|
state.filenames.push(token.src);
|
||||||
|
state.seen_sep = false;
|
||||||
|
} else {
|
||||||
|
bail!("Unexpected token: {:?}", token.token_name);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if matches!(token.token_name, TokenType::Eof) {
|
||||||
|
codes.extend(state.parse());
|
||||||
|
state = State::new();
|
||||||
|
} else if state.seen_sep && matches!(token.token_name, TokenType::Code) {
|
||||||
|
state.codes.push(token.src);
|
||||||
|
state.seen_sep = false;
|
||||||
|
} else if state.seen_sep && matches!(token.token_name, TokenType::File) {
|
||||||
|
codes.extend(state.parse());
|
||||||
|
state = State::new();
|
||||||
|
state.filenames.push(token.src);
|
||||||
|
state.seen_sep = false;
|
||||||
|
} else {
|
||||||
|
bail!("Unexpected token: {:?}", token.token_name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(codes)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Collect a list of `PatternPrefixPair` structs as a `BTreeMap`.
|
||||||
|
pub(crate) fn collect_per_file_ignores(
|
||||||
|
pairs: Vec<PatternPrefixPair>,
|
||||||
|
) -> FxHashMap<String, Vec<RuleSelector>> {
|
||||||
|
let mut per_file_ignores: FxHashMap<String, Vec<RuleSelector>> = FxHashMap::default();
|
||||||
|
for pair in pairs {
|
||||||
|
per_file_ignores
|
||||||
|
.entry(pair.pattern)
|
||||||
|
.or_default()
|
||||||
|
.push(pair.prefix);
|
||||||
|
}
|
||||||
|
per_file_ignores
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use anyhow::Result;
|
||||||
|
|
||||||
|
use ruff_linter::codes;
|
||||||
|
use ruff_linter::registry::Linter;
|
||||||
|
use ruff_linter::settings::types::PatternPrefixPair;
|
||||||
|
use ruff_linter::RuleSelector;
|
||||||
|
|
||||||
|
use super::{parse_files_to_codes_mapping, parse_prefix_codes, parse_strings};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_parses_prefix_codes() {
|
||||||
|
let actual = parse_prefix_codes("");
|
||||||
|
let expected: Vec<RuleSelector> = vec![];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
let actual = parse_prefix_codes(" ");
|
||||||
|
let expected: Vec<RuleSelector> = vec![];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
let actual = parse_prefix_codes("F401");
|
||||||
|
let expected = vec![codes::Pyflakes::_401.into()];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
let actual = parse_prefix_codes("F401,");
|
||||||
|
let expected = vec![codes::Pyflakes::_401.into()];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
let actual = parse_prefix_codes("F401,E501");
|
||||||
|
let expected = vec![
|
||||||
|
codes::Pyflakes::_401.into(),
|
||||||
|
codes::Pycodestyle::E501.into(),
|
||||||
|
];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
let actual = parse_prefix_codes("F401, E501");
|
||||||
|
let expected = vec![
|
||||||
|
codes::Pyflakes::_401.into(),
|
||||||
|
codes::Pycodestyle::E501.into(),
|
||||||
|
];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_parses_strings() {
|
||||||
|
let actual = parse_strings("");
|
||||||
|
let expected: Vec<String> = vec![];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
let actual = parse_strings(" ");
|
||||||
|
let expected: Vec<String> = vec![];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
let actual = parse_strings("__init__.py");
|
||||||
|
let expected = vec!["__init__.py".to_string()];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
let actual = parse_strings("__init__.py,");
|
||||||
|
let expected = vec!["__init__.py".to_string()];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
let actual = parse_strings("__init__.py,__main__.py");
|
||||||
|
let expected = vec!["__init__.py".to_string(), "__main__.py".to_string()];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
let actual = parse_strings("__init__.py, __main__.py");
|
||||||
|
let expected = vec!["__init__.py".to_string(), "__main__.py".to_string()];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_parse_files_to_codes_mapping() -> Result<()> {
|
||||||
|
let actual = parse_files_to_codes_mapping("")?;
|
||||||
|
let expected: Vec<PatternPrefixPair> = vec![];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
let actual = parse_files_to_codes_mapping(" ")?;
|
||||||
|
let expected: Vec<PatternPrefixPair> = vec![];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
// Ex) locust
|
||||||
|
let actual = parse_files_to_codes_mapping(
|
||||||
|
"per-file-ignores =
|
||||||
|
locust/test/*: F841
|
||||||
|
examples/*: F841
|
||||||
|
*.pyi: E302,E704"
|
||||||
|
.strip_prefix("per-file-ignores =")
|
||||||
|
.unwrap(),
|
||||||
|
)?;
|
||||||
|
let expected: Vec<PatternPrefixPair> = vec![
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "locust/test/*".to_string(),
|
||||||
|
prefix: codes::Pyflakes::_841.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "examples/*".to_string(),
|
||||||
|
prefix: codes::Pyflakes::_841.into(),
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
// Ex) celery
|
||||||
|
let actual = parse_files_to_codes_mapping(
|
||||||
|
"per-file-ignores =
|
||||||
|
t/*,setup.py,examples/*,docs/*,extra/*:
|
||||||
|
D,"
|
||||||
|
.strip_prefix("per-file-ignores =")
|
||||||
|
.unwrap(),
|
||||||
|
)?;
|
||||||
|
let expected: Vec<PatternPrefixPair> = vec![
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "t/*".to_string(),
|
||||||
|
prefix: Linter::Pydocstyle.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "setup.py".to_string(),
|
||||||
|
prefix: Linter::Pydocstyle.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "examples/*".to_string(),
|
||||||
|
prefix: Linter::Pydocstyle.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "docs/*".to_string(),
|
||||||
|
prefix: Linter::Pydocstyle.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "extra/*".to_string(),
|
||||||
|
prefix: Linter::Pydocstyle.into(),
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
// Ex) scrapy
|
||||||
|
let actual = parse_files_to_codes_mapping(
|
||||||
|
"per-file-ignores =
|
||||||
|
scrapy/__init__.py:E402
|
||||||
|
scrapy/core/downloader/handlers/http.py:F401
|
||||||
|
scrapy/http/__init__.py:F401
|
||||||
|
scrapy/linkextractors/__init__.py:E402,F401
|
||||||
|
scrapy/selector/__init__.py:F401
|
||||||
|
scrapy/spiders/__init__.py:E402,F401
|
||||||
|
scrapy/utils/url.py:F403,F405
|
||||||
|
tests/test_loader.py:E741"
|
||||||
|
.strip_prefix("per-file-ignores =")
|
||||||
|
.unwrap(),
|
||||||
|
)?;
|
||||||
|
let expected: Vec<PatternPrefixPair> = vec![
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "scrapy/__init__.py".to_string(),
|
||||||
|
prefix: codes::Pycodestyle::E402.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "scrapy/core/downloader/handlers/http.py".to_string(),
|
||||||
|
prefix: codes::Pyflakes::_401.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "scrapy/http/__init__.py".to_string(),
|
||||||
|
prefix: codes::Pyflakes::_401.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "scrapy/linkextractors/__init__.py".to_string(),
|
||||||
|
prefix: codes::Pycodestyle::E402.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "scrapy/linkextractors/__init__.py".to_string(),
|
||||||
|
prefix: codes::Pyflakes::_401.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "scrapy/selector/__init__.py".to_string(),
|
||||||
|
prefix: codes::Pyflakes::_401.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "scrapy/spiders/__init__.py".to_string(),
|
||||||
|
prefix: codes::Pycodestyle::E402.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "scrapy/spiders/__init__.py".to_string(),
|
||||||
|
prefix: codes::Pyflakes::_401.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "scrapy/utils/url.py".to_string(),
|
||||||
|
prefix: codes::Pyflakes::_403.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "scrapy/utils/url.py".to_string(),
|
||||||
|
prefix: codes::Pyflakes::_405.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "tests/test_loader.py".to_string(),
|
||||||
|
prefix: codes::Pycodestyle::E741.into(),
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
10
crates/flake8_to_ruff/src/pep621.rs
Normal file
10
crates/flake8_to_ruff/src/pep621.rs
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
//! Extract PEP 621 configuration settings from a pyproject.toml.
|
||||||
|
|
||||||
|
use pep440_rs::VersionSpecifiers;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
|
||||||
|
pub(crate) struct Project {
|
||||||
|
#[serde(alias = "requires-python", alias = "requires_python")]
|
||||||
|
pub(crate) requires_python: Option<VersionSpecifiers>,
|
||||||
|
}
|
||||||
368
crates/flake8_to_ruff/src/plugin.rs
Normal file
368
crates/flake8_to_ruff/src/plugin.rs
Normal file
@@ -0,0 +1,368 @@
|
|||||||
|
use std::collections::{BTreeSet, HashMap, HashSet};
|
||||||
|
use std::fmt;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use anyhow::anyhow;
|
||||||
|
use ruff_linter::registry::Linter;
|
||||||
|
use ruff_linter::rule_selector::PreviewOptions;
|
||||||
|
use ruff_linter::RuleSelector;
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq)]
|
||||||
|
pub enum Plugin {
|
||||||
|
Flake82020,
|
||||||
|
Flake8Annotations,
|
||||||
|
Flake8Bandit,
|
||||||
|
Flake8BlindExcept,
|
||||||
|
Flake8BooleanTrap,
|
||||||
|
Flake8Bugbear,
|
||||||
|
Flake8Builtins,
|
||||||
|
Flake8Commas,
|
||||||
|
Flake8Comprehensions,
|
||||||
|
Flake8Datetimez,
|
||||||
|
Flake8Debugger,
|
||||||
|
Flake8Docstrings,
|
||||||
|
Flake8Eradicate,
|
||||||
|
Flake8ErrMsg,
|
||||||
|
Flake8Executable,
|
||||||
|
Flake8ImplicitStrConcat,
|
||||||
|
Flake8ImportConventions,
|
||||||
|
Flake8NoPep420,
|
||||||
|
Flake8Pie,
|
||||||
|
Flake8Print,
|
||||||
|
Flake8PytestStyle,
|
||||||
|
Flake8Quotes,
|
||||||
|
Flake8Return,
|
||||||
|
Flake8Simplify,
|
||||||
|
Flake8TidyImports,
|
||||||
|
Flake8TypeChecking,
|
||||||
|
Flake8UnusedArguments,
|
||||||
|
Flake8UsePathlib,
|
||||||
|
McCabe,
|
||||||
|
PEP8Naming,
|
||||||
|
PandasVet,
|
||||||
|
Pyupgrade,
|
||||||
|
Tryceratops,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for Plugin {
|
||||||
|
type Err = anyhow::Error;
|
||||||
|
|
||||||
|
fn from_str(string: &str) -> Result<Self, Self::Err> {
|
||||||
|
match string {
|
||||||
|
"flake8-2020" => Ok(Plugin::Flake82020),
|
||||||
|
"flake8-annotations" => Ok(Plugin::Flake8Annotations),
|
||||||
|
"flake8-bandit" => Ok(Plugin::Flake8Bandit),
|
||||||
|
"flake8-blind-except" => Ok(Plugin::Flake8BlindExcept),
|
||||||
|
"flake8-boolean-trap" => Ok(Plugin::Flake8BooleanTrap),
|
||||||
|
"flake8-bugbear" => Ok(Plugin::Flake8Bugbear),
|
||||||
|
"flake8-builtins" => Ok(Plugin::Flake8Builtins),
|
||||||
|
"flake8-commas" => Ok(Plugin::Flake8Commas),
|
||||||
|
"flake8-comprehensions" => Ok(Plugin::Flake8Comprehensions),
|
||||||
|
"flake8-datetimez" => Ok(Plugin::Flake8Datetimez),
|
||||||
|
"flake8-debugger" => Ok(Plugin::Flake8Debugger),
|
||||||
|
"flake8-docstrings" => Ok(Plugin::Flake8Docstrings),
|
||||||
|
"flake8-eradicate" => Ok(Plugin::Flake8Eradicate),
|
||||||
|
"flake8-errmsg" => Ok(Plugin::Flake8ErrMsg),
|
||||||
|
"flake8-executable" => Ok(Plugin::Flake8Executable),
|
||||||
|
"flake8-implicit-str-concat" => Ok(Plugin::Flake8ImplicitStrConcat),
|
||||||
|
"flake8-import-conventions" => Ok(Plugin::Flake8ImportConventions),
|
||||||
|
"flake8-no-pep420" => Ok(Plugin::Flake8NoPep420),
|
||||||
|
"flake8-pie" => Ok(Plugin::Flake8Pie),
|
||||||
|
"flake8-print" => Ok(Plugin::Flake8Print),
|
||||||
|
"flake8-pytest-style" => Ok(Plugin::Flake8PytestStyle),
|
||||||
|
"flake8-quotes" => Ok(Plugin::Flake8Quotes),
|
||||||
|
"flake8-return" => Ok(Plugin::Flake8Return),
|
||||||
|
"flake8-simplify" => Ok(Plugin::Flake8Simplify),
|
||||||
|
"flake8-tidy-imports" => Ok(Plugin::Flake8TidyImports),
|
||||||
|
"flake8-type-checking" => Ok(Plugin::Flake8TypeChecking),
|
||||||
|
"flake8-unused-arguments" => Ok(Plugin::Flake8UnusedArguments),
|
||||||
|
"flake8-use-pathlib" => Ok(Plugin::Flake8UsePathlib),
|
||||||
|
"mccabe" => Ok(Plugin::McCabe),
|
||||||
|
"pep8-naming" => Ok(Plugin::PEP8Naming),
|
||||||
|
"pandas-vet" => Ok(Plugin::PandasVet),
|
||||||
|
"pyupgrade" => Ok(Plugin::Pyupgrade),
|
||||||
|
"tryceratops" => Ok(Plugin::Tryceratops),
|
||||||
|
_ => Err(anyhow!("Unknown plugin: {string}")),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Debug for Plugin {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"{}",
|
||||||
|
match self {
|
||||||
|
Plugin::Flake82020 => "flake8-2020",
|
||||||
|
Plugin::Flake8Annotations => "flake8-annotations",
|
||||||
|
Plugin::Flake8Bandit => "flake8-bandit",
|
||||||
|
Plugin::Flake8BlindExcept => "flake8-blind-except",
|
||||||
|
Plugin::Flake8BooleanTrap => "flake8-boolean-trap",
|
||||||
|
Plugin::Flake8Bugbear => "flake8-bugbear",
|
||||||
|
Plugin::Flake8Builtins => "flake8-builtins",
|
||||||
|
Plugin::Flake8Commas => "flake8-commas",
|
||||||
|
Plugin::Flake8Comprehensions => "flake8-comprehensions",
|
||||||
|
Plugin::Flake8Datetimez => "flake8-datetimez",
|
||||||
|
Plugin::Flake8Debugger => "flake8-debugger",
|
||||||
|
Plugin::Flake8Docstrings => "flake8-docstrings",
|
||||||
|
Plugin::Flake8Eradicate => "flake8-eradicate",
|
||||||
|
Plugin::Flake8ErrMsg => "flake8-errmsg",
|
||||||
|
Plugin::Flake8Executable => "flake8-executable",
|
||||||
|
Plugin::Flake8ImplicitStrConcat => "flake8-implicit-str-concat",
|
||||||
|
Plugin::Flake8ImportConventions => "flake8-import-conventions",
|
||||||
|
Plugin::Flake8NoPep420 => "flake8-no-pep420",
|
||||||
|
Plugin::Flake8Pie => "flake8-pie",
|
||||||
|
Plugin::Flake8Print => "flake8-print",
|
||||||
|
Plugin::Flake8PytestStyle => "flake8-pytest-style",
|
||||||
|
Plugin::Flake8Quotes => "flake8-quotes",
|
||||||
|
Plugin::Flake8Return => "flake8-return",
|
||||||
|
Plugin::Flake8Simplify => "flake8-simplify",
|
||||||
|
Plugin::Flake8TidyImports => "flake8-tidy-imports",
|
||||||
|
Plugin::Flake8TypeChecking => "flake8-type-checking",
|
||||||
|
Plugin::Flake8UnusedArguments => "flake8-unused-arguments",
|
||||||
|
Plugin::Flake8UsePathlib => "flake8-use-pathlib",
|
||||||
|
Plugin::McCabe => "mccabe",
|
||||||
|
Plugin::PEP8Naming => "pep8-naming",
|
||||||
|
Plugin::PandasVet => "pandas-vet",
|
||||||
|
Plugin::Pyupgrade => "pyupgrade",
|
||||||
|
Plugin::Tryceratops => "tryceratops",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&Plugin> for Linter {
|
||||||
|
fn from(plugin: &Plugin) -> Self {
|
||||||
|
match plugin {
|
||||||
|
Plugin::Flake82020 => Linter::Flake82020,
|
||||||
|
Plugin::Flake8Annotations => Linter::Flake8Annotations,
|
||||||
|
Plugin::Flake8Bandit => Linter::Flake8Bandit,
|
||||||
|
Plugin::Flake8BlindExcept => Linter::Flake8BlindExcept,
|
||||||
|
Plugin::Flake8BooleanTrap => Linter::Flake8BooleanTrap,
|
||||||
|
Plugin::Flake8Bugbear => Linter::Flake8Bugbear,
|
||||||
|
Plugin::Flake8Builtins => Linter::Flake8Builtins,
|
||||||
|
Plugin::Flake8Commas => Linter::Flake8Commas,
|
||||||
|
Plugin::Flake8Comprehensions => Linter::Flake8Comprehensions,
|
||||||
|
Plugin::Flake8Datetimez => Linter::Flake8Datetimez,
|
||||||
|
Plugin::Flake8Debugger => Linter::Flake8Debugger,
|
||||||
|
Plugin::Flake8Docstrings => Linter::Pydocstyle,
|
||||||
|
Plugin::Flake8Eradicate => Linter::Eradicate,
|
||||||
|
Plugin::Flake8ErrMsg => Linter::Flake8ErrMsg,
|
||||||
|
Plugin::Flake8Executable => Linter::Flake8Executable,
|
||||||
|
Plugin::Flake8ImplicitStrConcat => Linter::Flake8ImplicitStrConcat,
|
||||||
|
Plugin::Flake8ImportConventions => Linter::Flake8ImportConventions,
|
||||||
|
Plugin::Flake8NoPep420 => Linter::Flake8NoPep420,
|
||||||
|
Plugin::Flake8Pie => Linter::Flake8Pie,
|
||||||
|
Plugin::Flake8Print => Linter::Flake8Print,
|
||||||
|
Plugin::Flake8PytestStyle => Linter::Flake8PytestStyle,
|
||||||
|
Plugin::Flake8Quotes => Linter::Flake8Quotes,
|
||||||
|
Plugin::Flake8Return => Linter::Flake8Return,
|
||||||
|
Plugin::Flake8Simplify => Linter::Flake8Simplify,
|
||||||
|
Plugin::Flake8TidyImports => Linter::Flake8TidyImports,
|
||||||
|
Plugin::Flake8TypeChecking => Linter::Flake8TypeChecking,
|
||||||
|
Plugin::Flake8UnusedArguments => Linter::Flake8UnusedArguments,
|
||||||
|
Plugin::Flake8UsePathlib => Linter::Flake8UsePathlib,
|
||||||
|
Plugin::McCabe => Linter::McCabe,
|
||||||
|
Plugin::PEP8Naming => Linter::PEP8Naming,
|
||||||
|
Plugin::PandasVet => Linter::PandasVet,
|
||||||
|
Plugin::Pyupgrade => Linter::Pyupgrade,
|
||||||
|
Plugin::Tryceratops => Linter::Tryceratops,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Infer the enabled plugins based on user-provided options.
|
||||||
|
///
|
||||||
|
/// For example, if the user specified a `mypy-init-return` setting, we should
|
||||||
|
/// infer that `flake8-annotations` is active.
|
||||||
|
pub(crate) fn infer_plugins_from_options(flake8: &HashMap<String, Option<String>>) -> Vec<Plugin> {
|
||||||
|
let mut plugins = BTreeSet::new();
|
||||||
|
for key in flake8.keys() {
|
||||||
|
match key.as_str() {
|
||||||
|
// flake8-annotations
|
||||||
|
"suppress-none-returning" | "suppress_none_returning" => {
|
||||||
|
plugins.insert(Plugin::Flake8Annotations);
|
||||||
|
}
|
||||||
|
"suppress-dummy-args" | "suppress_dummy_args" => {
|
||||||
|
plugins.insert(Plugin::Flake8Annotations);
|
||||||
|
}
|
||||||
|
"allow-untyped-defs" | "allow_untyped_defs" => {
|
||||||
|
plugins.insert(Plugin::Flake8Annotations);
|
||||||
|
}
|
||||||
|
"allow-untyped-nested" | "allow_untyped_nested" => {
|
||||||
|
plugins.insert(Plugin::Flake8Annotations);
|
||||||
|
}
|
||||||
|
"mypy-init-return" | "mypy_init_return" => {
|
||||||
|
plugins.insert(Plugin::Flake8Annotations);
|
||||||
|
}
|
||||||
|
"dispatch-decorators" | "dispatch_decorators" => {
|
||||||
|
plugins.insert(Plugin::Flake8Annotations);
|
||||||
|
}
|
||||||
|
"overload-decorators" | "overload_decorators" => {
|
||||||
|
plugins.insert(Plugin::Flake8Annotations);
|
||||||
|
}
|
||||||
|
"allow-star-arg-any" | "allow_star_arg_any" => {
|
||||||
|
plugins.insert(Plugin::Flake8Annotations);
|
||||||
|
}
|
||||||
|
// flake8-bugbear
|
||||||
|
"extend-immutable-calls" | "extend_immutable_calls" => {
|
||||||
|
plugins.insert(Plugin::Flake8Bugbear);
|
||||||
|
}
|
||||||
|
// flake8-builtins
|
||||||
|
"builtins-ignorelist" | "builtins_ignorelist" => {
|
||||||
|
plugins.insert(Plugin::Flake8Builtins);
|
||||||
|
}
|
||||||
|
// flake8-docstrings
|
||||||
|
"docstring-convention" | "docstring_convention" => {
|
||||||
|
plugins.insert(Plugin::Flake8Docstrings);
|
||||||
|
}
|
||||||
|
// flake8-eradicate
|
||||||
|
"eradicate-aggressive" | "eradicate_aggressive" => {
|
||||||
|
plugins.insert(Plugin::Flake8Eradicate);
|
||||||
|
}
|
||||||
|
"eradicate-whitelist" | "eradicate_whitelist" => {
|
||||||
|
plugins.insert(Plugin::Flake8Eradicate);
|
||||||
|
}
|
||||||
|
"eradicate-whitelist-extend" | "eradicate_whitelist_extend" => {
|
||||||
|
plugins.insert(Plugin::Flake8Eradicate);
|
||||||
|
}
|
||||||
|
// flake8-pytest-style
|
||||||
|
"pytest-fixture-no-parentheses" | "pytest_fixture_no_parentheses " => {
|
||||||
|
plugins.insert(Plugin::Flake8PytestStyle);
|
||||||
|
}
|
||||||
|
"pytest-parametrize-names-type" | "pytest_parametrize_names_type" => {
|
||||||
|
plugins.insert(Plugin::Flake8PytestStyle);
|
||||||
|
}
|
||||||
|
"pytest-parametrize-values-type" | "pytest_parametrize_values_type" => {
|
||||||
|
plugins.insert(Plugin::Flake8PytestStyle);
|
||||||
|
}
|
||||||
|
"pytest-parametrize-values-row-type" | "pytest_parametrize_values_row_type" => {
|
||||||
|
plugins.insert(Plugin::Flake8PytestStyle);
|
||||||
|
}
|
||||||
|
"pytest-raises-require-match-for" | "pytest_raises_require_match_for" => {
|
||||||
|
plugins.insert(Plugin::Flake8PytestStyle);
|
||||||
|
}
|
||||||
|
"pytest-mark-no-parentheses" | "pytest_mark_no_parentheses" => {
|
||||||
|
plugins.insert(Plugin::Flake8PytestStyle);
|
||||||
|
}
|
||||||
|
// flake8-quotes
|
||||||
|
"quotes" | "inline-quotes" | "inline_quotes" => {
|
||||||
|
plugins.insert(Plugin::Flake8Quotes);
|
||||||
|
}
|
||||||
|
"multiline-quotes" | "multiline_quotes" => {
|
||||||
|
plugins.insert(Plugin::Flake8Quotes);
|
||||||
|
}
|
||||||
|
"docstring-quotes" | "docstring_quotes" => {
|
||||||
|
plugins.insert(Plugin::Flake8Quotes);
|
||||||
|
}
|
||||||
|
"avoid-escape" | "avoid_escape" => {
|
||||||
|
plugins.insert(Plugin::Flake8Quotes);
|
||||||
|
}
|
||||||
|
// flake8-tidy-imports
|
||||||
|
"ban-relative-imports" | "ban_relative_imports" => {
|
||||||
|
plugins.insert(Plugin::Flake8TidyImports);
|
||||||
|
}
|
||||||
|
"banned-modules" | "banned_modules" => {
|
||||||
|
plugins.insert(Plugin::Flake8TidyImports);
|
||||||
|
}
|
||||||
|
// mccabe
|
||||||
|
"max-complexity" | "max_complexity" => {
|
||||||
|
plugins.insert(Plugin::McCabe);
|
||||||
|
}
|
||||||
|
// pep8-naming
|
||||||
|
"ignore-names" | "ignore_names" => {
|
||||||
|
plugins.insert(Plugin::PEP8Naming);
|
||||||
|
}
|
||||||
|
"classmethod-decorators" | "classmethod_decorators" => {
|
||||||
|
plugins.insert(Plugin::PEP8Naming);
|
||||||
|
}
|
||||||
|
"staticmethod-decorators" | "staticmethod_decorators" => {
|
||||||
|
plugins.insert(Plugin::PEP8Naming);
|
||||||
|
}
|
||||||
|
"max-string-length" | "max_string_length" => {
|
||||||
|
plugins.insert(Plugin::Flake8ErrMsg);
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Vec::from_iter(plugins)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Infer the enabled plugins based on the referenced prefixes.
|
||||||
|
///
|
||||||
|
/// For example, if the user ignores `ANN101`, we should infer that
|
||||||
|
/// `flake8-annotations` is active.
|
||||||
|
pub(crate) fn infer_plugins_from_codes(selectors: &HashSet<RuleSelector>) -> Vec<Plugin> {
|
||||||
|
// Ignore cases in which we've knowingly changed rule prefixes.
|
||||||
|
[
|
||||||
|
Plugin::Flake82020,
|
||||||
|
Plugin::Flake8Annotations,
|
||||||
|
Plugin::Flake8Bandit,
|
||||||
|
// Plugin::Flake8BlindExcept,
|
||||||
|
Plugin::Flake8BooleanTrap,
|
||||||
|
Plugin::Flake8Bugbear,
|
||||||
|
Plugin::Flake8Builtins,
|
||||||
|
// Plugin::Flake8Commas,
|
||||||
|
Plugin::Flake8Comprehensions,
|
||||||
|
Plugin::Flake8Datetimez,
|
||||||
|
Plugin::Flake8Debugger,
|
||||||
|
Plugin::Flake8Docstrings,
|
||||||
|
// Plugin::Flake8Eradicate,
|
||||||
|
Plugin::Flake8ErrMsg,
|
||||||
|
Plugin::Flake8Executable,
|
||||||
|
Plugin::Flake8ImplicitStrConcat,
|
||||||
|
// Plugin::Flake8ImportConventions,
|
||||||
|
Plugin::Flake8NoPep420,
|
||||||
|
Plugin::Flake8Pie,
|
||||||
|
Plugin::Flake8Print,
|
||||||
|
Plugin::Flake8PytestStyle,
|
||||||
|
Plugin::Flake8Quotes,
|
||||||
|
Plugin::Flake8Return,
|
||||||
|
Plugin::Flake8Simplify,
|
||||||
|
// Plugin::Flake8TidyImports,
|
||||||
|
// Plugin::Flake8TypeChecking,
|
||||||
|
Plugin::Flake8UnusedArguments,
|
||||||
|
// Plugin::Flake8UsePathlib,
|
||||||
|
Plugin::McCabe,
|
||||||
|
Plugin::PEP8Naming,
|
||||||
|
Plugin::PandasVet,
|
||||||
|
Plugin::Tryceratops,
|
||||||
|
]
|
||||||
|
.into_iter()
|
||||||
|
.filter(|plugin| {
|
||||||
|
for selector in selectors {
|
||||||
|
if selector
|
||||||
|
.rules(&PreviewOptions::default())
|
||||||
|
.any(|rule| Linter::from(plugin).rules().any(|r| r == rule))
|
||||||
|
{
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
false
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use super::{infer_plugins_from_options, Plugin};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_infers_plugins() {
|
||||||
|
let actual = infer_plugins_from_options(&HashMap::from([(
|
||||||
|
"inline-quotes".to_string(),
|
||||||
|
Some("single".to_string()),
|
||||||
|
)]));
|
||||||
|
let expected = vec![Plugin::Flake8Quotes];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
let actual = infer_plugins_from_options(&HashMap::from([(
|
||||||
|
"staticmethod-decorators".to_string(),
|
||||||
|
Some("[]".to_string()),
|
||||||
|
)]));
|
||||||
|
let expected = vec![Plugin::PEP8Naming];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
}
|
||||||
|
}
|
||||||
26
crates/flake8_to_ruff/src/pyproject.rs
Normal file
26
crates/flake8_to_ruff/src/pyproject.rs
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
use super::black::Black;
|
||||||
|
use super::isort::Isort;
|
||||||
|
use super::pep621::Project;
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
pub(crate) struct Tools {
|
||||||
|
pub(crate) black: Option<Black>,
|
||||||
|
pub(crate) isort: Option<Isort>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
pub(crate) struct Pyproject {
|
||||||
|
pub(crate) tool: Option<Tools>,
|
||||||
|
pub(crate) project: Option<Project>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn parse<P: AsRef<Path>>(path: P) -> Result<Pyproject> {
|
||||||
|
let contents = std::fs::read_to_string(path)?;
|
||||||
|
let pyproject = toml::from_str::<Pyproject>(&contents)?;
|
||||||
|
Ok(pyproject)
|
||||||
|
}
|
||||||
@@ -1,42 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "red_knot"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition.workspace = true
|
|
||||||
rust-version.workspace = true
|
|
||||||
homepage.workspace = true
|
|
||||||
documentation.workspace = true
|
|
||||||
repository.workspace = true
|
|
||||||
authors.workspace = true
|
|
||||||
license.workspace = true
|
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
ruff_python_parser = { path = "../ruff_python_parser" }
|
|
||||||
ruff_python_ast = { path = "../ruff_python_ast" }
|
|
||||||
ruff_text_size = { path = "../ruff_text_size" }
|
|
||||||
ruff_index = { path = "../ruff_index" }
|
|
||||||
ruff_notebook = { path = "../ruff_notebook" }
|
|
||||||
|
|
||||||
anyhow = { workspace = true }
|
|
||||||
bitflags = { workspace = true }
|
|
||||||
crossbeam = { workspace = true }
|
|
||||||
ctrlc = { version = "3.4.4" }
|
|
||||||
dashmap = { workspace = true }
|
|
||||||
hashbrown = { workspace = true }
|
|
||||||
indexmap = { workspace = true }
|
|
||||||
notify = { workspace = true }
|
|
||||||
parking_lot = { workspace = true }
|
|
||||||
rayon = { workspace = true }
|
|
||||||
rustc-hash = { workspace = true }
|
|
||||||
smol_str = { version = "0.2.1" }
|
|
||||||
tracing = { workspace = true }
|
|
||||||
tracing-subscriber = { workspace = true }
|
|
||||||
tracing-tree = { workspace = true }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
textwrap = { version = "0.16.1" }
|
|
||||||
tempfile = { workspace = true }
|
|
||||||
|
|
||||||
[lints]
|
|
||||||
workspace = true
|
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
# Red Knot
|
|
||||||
|
|
||||||
The Red Knot crate contains code working towards multifile analysis, type inference and, ultimately, type-checking. It's very much a work in progress for now.
|
|
||||||
|
|
||||||
## Vendored types for the stdlib
|
|
||||||
|
|
||||||
Red Knot vendors [typeshed](https://github.com/python/typeshed)'s stubs for the standard library. The vendored stubs can be found in `crates/red_knot/vendor/typeshed`. The file `crates/red_knot/vendor/typeshed/source_commit.txt` tells you the typeshed commit that our vendored stdlib stubs currently correspond to.
|
|
||||||
|
|
||||||
Updating the vendored stubs is currently done manually. On a Unix machine, follow the following steps (if you have a typeshed clone in a `typeshed` directory, and a Ruff clone in a `ruff` directory):
|
|
||||||
|
|
||||||
```shell
|
|
||||||
rm -rf ruff/crates/red_knot/vendor/typeshed
|
|
||||||
mkdir ruff/crates/red_knot/vendor/typeshed
|
|
||||||
cp typeshed/README.md ruff/crates/red_knot/vendor/typeshed
|
|
||||||
cp typeshed/LICENSE ruff/crates/red_knot/vendor/typeshed
|
|
||||||
cp -r typeshed/stdlib ruff/crates/red_knot/vendor/typeshed/stdlib
|
|
||||||
git -C typeshed rev-parse HEAD > ruff/crates/red_knot/vendor/typeshed/source_commit.txt
|
|
||||||
```
|
|
||||||
@@ -1,415 +0,0 @@
|
|||||||
use std::any::type_name;
|
|
||||||
use std::fmt::{Debug, Formatter};
|
|
||||||
use std::hash::{Hash, Hasher};
|
|
||||||
use std::marker::PhantomData;
|
|
||||||
|
|
||||||
use rustc_hash::FxHashMap;
|
|
||||||
|
|
||||||
use ruff_index::{Idx, IndexVec};
|
|
||||||
use ruff_python_ast::visitor::preorder;
|
|
||||||
use ruff_python_ast::visitor::preorder::{PreorderVisitor, TraversalSignal};
|
|
||||||
use ruff_python_ast::{
|
|
||||||
AnyNodeRef, AstNode, ExceptHandler, ExceptHandlerExceptHandler, Expr, MatchCase, ModModule,
|
|
||||||
NodeKind, Parameter, Stmt, StmtAnnAssign, StmtAssign, StmtAugAssign, StmtClassDef,
|
|
||||||
StmtFunctionDef, StmtGlobal, StmtImport, StmtImportFrom, StmtNonlocal, StmtTypeAlias,
|
|
||||||
TypeParam, TypeParamParamSpec, TypeParamTypeVar, TypeParamTypeVarTuple, WithItem,
|
|
||||||
};
|
|
||||||
use ruff_text_size::{Ranged, TextRange};
|
|
||||||
|
|
||||||
/// A type agnostic ID that uniquely identifies an AST node in a file.
|
|
||||||
#[ruff_index::newtype_index]
|
|
||||||
pub struct AstId;
|
|
||||||
|
|
||||||
/// A typed ID that uniquely identifies an AST node in a file.
|
|
||||||
///
|
|
||||||
/// This is different from [`AstId`] in that it is a combination of ID and the type of the node the ID identifies.
|
|
||||||
/// Typing the ID prevents mixing IDs of different node types and allows to restrict the API to only accept
|
|
||||||
/// nodes for which an ID has been created (not all AST nodes get an ID).
|
|
||||||
pub struct TypedAstId<N: HasAstId> {
|
|
||||||
erased: AstId,
|
|
||||||
_marker: PhantomData<fn() -> N>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<N: HasAstId> TypedAstId<N> {
|
|
||||||
/// Upcasts this ID from a more specific node type to a more general node type.
|
|
||||||
pub fn upcast<M: HasAstId>(self) -> TypedAstId<M>
|
|
||||||
where
|
|
||||||
N: Into<M>,
|
|
||||||
{
|
|
||||||
TypedAstId {
|
|
||||||
erased: self.erased,
|
|
||||||
_marker: PhantomData,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<N: HasAstId> Copy for TypedAstId<N> {}
|
|
||||||
impl<N: HasAstId> Clone for TypedAstId<N> {
|
|
||||||
fn clone(&self) -> Self {
|
|
||||||
*self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<N: HasAstId> PartialEq for TypedAstId<N> {
|
|
||||||
fn eq(&self, other: &Self) -> bool {
|
|
||||||
self.erased == other.erased
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<N: HasAstId> Eq for TypedAstId<N> {}
|
|
||||||
impl<N: HasAstId> Hash for TypedAstId<N> {
|
|
||||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
|
||||||
self.erased.hash(state);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<N: HasAstId> Debug for TypedAstId<N> {
|
|
||||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
|
||||||
f.debug_tuple("TypedAstId")
|
|
||||||
.field(&self.erased)
|
|
||||||
.field(&type_name::<N>())
|
|
||||||
.finish()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct AstIds {
|
|
||||||
ids: IndexVec<AstId, NodeKey>,
|
|
||||||
reverse: FxHashMap<NodeKey, AstId>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AstIds {
|
|
||||||
// TODO rust analyzer doesn't allocate an ID for every node. It only allocates ids for
|
|
||||||
// nodes with a corresponding HIR element, that is nodes that are definitions.
|
|
||||||
pub fn from_module(module: &ModModule) -> Self {
|
|
||||||
let mut visitor = AstIdsVisitor::default();
|
|
||||||
|
|
||||||
// TODO: visit_module?
|
|
||||||
// Make sure we visit the root
|
|
||||||
visitor.create_id(module);
|
|
||||||
visitor.visit_body(&module.body);
|
|
||||||
|
|
||||||
while let Some(deferred) = visitor.deferred.pop() {
|
|
||||||
match deferred {
|
|
||||||
DeferredNode::FunctionDefinition(def) => {
|
|
||||||
def.visit_preorder(&mut visitor);
|
|
||||||
}
|
|
||||||
DeferredNode::ClassDefinition(def) => def.visit_preorder(&mut visitor),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
AstIds {
|
|
||||||
ids: visitor.ids,
|
|
||||||
reverse: visitor.reverse,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the ID to the root node.
|
|
||||||
pub fn root(&self) -> NodeKey {
|
|
||||||
self.ids[AstId::new(0)]
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the [`TypedAstId`] for a node.
|
|
||||||
pub fn ast_id<N: HasAstId>(&self, node: &N) -> TypedAstId<N> {
|
|
||||||
let key = node.syntax_node_key();
|
|
||||||
TypedAstId {
|
|
||||||
erased: self.reverse.get(&key).copied().unwrap(),
|
|
||||||
_marker: PhantomData,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the [`TypedAstId`] for the node identified with the given [`TypedNodeKey`].
|
|
||||||
pub fn ast_id_for_key<N: HasAstId>(&self, node: &TypedNodeKey<N>) -> TypedAstId<N> {
|
|
||||||
let ast_id = self.ast_id_for_node_key(node.inner);
|
|
||||||
|
|
||||||
TypedAstId {
|
|
||||||
erased: ast_id,
|
|
||||||
_marker: PhantomData,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the untyped [`AstId`] for the node identified by the given `node` key.
|
|
||||||
pub fn ast_id_for_node_key(&self, node: NodeKey) -> AstId {
|
|
||||||
self.reverse
|
|
||||||
.get(&node)
|
|
||||||
.copied()
|
|
||||||
.expect("Can't find node in AstIds map.")
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the [`TypedNodeKey`] for the node identified by the given [`TypedAstId`].
|
|
||||||
pub fn key<N: HasAstId>(&self, id: TypedAstId<N>) -> TypedNodeKey<N> {
|
|
||||||
let syntax_key = self.ids[id.erased];
|
|
||||||
|
|
||||||
TypedNodeKey::new(syntax_key).unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn node_key<H: HasAstId>(&self, id: TypedAstId<H>) -> NodeKey {
|
|
||||||
self.ids[id.erased]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Debug for AstIds {
|
|
||||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
|
||||||
let mut map = f.debug_map();
|
|
||||||
for (key, value) in self.ids.iter_enumerated() {
|
|
||||||
map.entry(&key, &value);
|
|
||||||
}
|
|
||||||
|
|
||||||
map.finish()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PartialEq for AstIds {
|
|
||||||
fn eq(&self, other: &Self) -> bool {
|
|
||||||
self.ids == other.ids
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Eq for AstIds {}
|
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
struct AstIdsVisitor<'a> {
|
|
||||||
ids: IndexVec<AstId, NodeKey>,
|
|
||||||
reverse: FxHashMap<NodeKey, AstId>,
|
|
||||||
deferred: Vec<DeferredNode<'a>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> AstIdsVisitor<'a> {
|
|
||||||
fn create_id<A: HasAstId>(&mut self, node: &A) {
|
|
||||||
let node_key = node.syntax_node_key();
|
|
||||||
|
|
||||||
let id = self.ids.push(node_key);
|
|
||||||
self.reverse.insert(node_key, id);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> PreorderVisitor<'a> for AstIdsVisitor<'a> {
|
|
||||||
fn visit_stmt(&mut self, stmt: &'a Stmt) {
|
|
||||||
match stmt {
|
|
||||||
Stmt::FunctionDef(def) => {
|
|
||||||
self.create_id(def);
|
|
||||||
self.deferred.push(DeferredNode::FunctionDefinition(def));
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
// TODO defer visiting the assignment body, type alias parameters etc?
|
|
||||||
Stmt::ClassDef(def) => {
|
|
||||||
self.create_id(def);
|
|
||||||
self.deferred.push(DeferredNode::ClassDefinition(def));
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
Stmt::Expr(_) => {
|
|
||||||
// Skip
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
Stmt::Return(_) => {}
|
|
||||||
Stmt::Delete(_) => {}
|
|
||||||
Stmt::Assign(assignment) => self.create_id(assignment),
|
|
||||||
Stmt::AugAssign(assignment) => {
|
|
||||||
self.create_id(assignment);
|
|
||||||
}
|
|
||||||
Stmt::AnnAssign(assignment) => self.create_id(assignment),
|
|
||||||
Stmt::TypeAlias(assignment) => self.create_id(assignment),
|
|
||||||
Stmt::For(_) => {}
|
|
||||||
Stmt::While(_) => {}
|
|
||||||
Stmt::If(_) => {}
|
|
||||||
Stmt::With(_) => {}
|
|
||||||
Stmt::Match(_) => {}
|
|
||||||
Stmt::Raise(_) => {}
|
|
||||||
Stmt::Try(_) => {}
|
|
||||||
Stmt::Assert(_) => {}
|
|
||||||
Stmt::Import(import) => self.create_id(import),
|
|
||||||
Stmt::ImportFrom(import_from) => self.create_id(import_from),
|
|
||||||
Stmt::Global(global) => self.create_id(global),
|
|
||||||
Stmt::Nonlocal(non_local) => self.create_id(non_local),
|
|
||||||
Stmt::Pass(_) => {}
|
|
||||||
Stmt::Break(_) => {}
|
|
||||||
Stmt::Continue(_) => {}
|
|
||||||
Stmt::IpyEscapeCommand(_) => {}
|
|
||||||
}
|
|
||||||
|
|
||||||
preorder::walk_stmt(self, stmt);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn visit_expr(&mut self, _expr: &'a Expr) {}
|
|
||||||
|
|
||||||
fn visit_parameter(&mut self, parameter: &'a Parameter) {
|
|
||||||
self.create_id(parameter);
|
|
||||||
preorder::walk_parameter(self, parameter);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn visit_except_handler(&mut self, except_handler: &'a ExceptHandler) {
|
|
||||||
match except_handler {
|
|
||||||
ExceptHandler::ExceptHandler(except_handler) => {
|
|
||||||
self.create_id(except_handler);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
preorder::walk_except_handler(self, except_handler);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn visit_with_item(&mut self, with_item: &'a WithItem) {
|
|
||||||
self.create_id(with_item);
|
|
||||||
preorder::walk_with_item(self, with_item);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn visit_match_case(&mut self, match_case: &'a MatchCase) {
|
|
||||||
self.create_id(match_case);
|
|
||||||
preorder::walk_match_case(self, match_case);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn visit_type_param(&mut self, type_param: &'a TypeParam) {
|
|
||||||
self.create_id(type_param);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
enum DeferredNode<'a> {
|
|
||||||
FunctionDefinition(&'a StmtFunctionDef),
|
|
||||||
ClassDefinition(&'a StmtClassDef),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
|
||||||
pub struct TypedNodeKey<N: AstNode> {
|
|
||||||
/// The type erased node key.
|
|
||||||
inner: NodeKey,
|
|
||||||
_marker: PhantomData<fn() -> N>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<N: AstNode> TypedNodeKey<N> {
|
|
||||||
pub fn from_node(node: &N) -> Self {
|
|
||||||
let inner = NodeKey {
|
|
||||||
kind: node.as_any_node_ref().kind(),
|
|
||||||
range: node.range(),
|
|
||||||
};
|
|
||||||
Self {
|
|
||||||
inner,
|
|
||||||
_marker: PhantomData,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn new(node_key: NodeKey) -> Option<Self> {
|
|
||||||
N::can_cast(node_key.kind).then_some(TypedNodeKey {
|
|
||||||
inner: node_key,
|
|
||||||
_marker: PhantomData,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn resolve<'a>(&self, root: AnyNodeRef<'a>) -> Option<N::Ref<'a>> {
|
|
||||||
let node_ref = self.inner.resolve(root)?;
|
|
||||||
|
|
||||||
Some(N::cast_ref(node_ref).unwrap())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn resolve_unwrap<'a>(&self, root: AnyNodeRef<'a>) -> N::Ref<'a> {
|
|
||||||
self.resolve(root).expect("node should resolve")
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn erased(&self) -> &NodeKey {
|
|
||||||
&self.inner
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
struct FindNodeKeyVisitor<'a> {
|
|
||||||
key: NodeKey,
|
|
||||||
result: Option<AnyNodeRef<'a>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> PreorderVisitor<'a> for FindNodeKeyVisitor<'a> {
|
|
||||||
fn enter_node(&mut self, node: AnyNodeRef<'a>) -> TraversalSignal {
|
|
||||||
if self.result.is_some() {
|
|
||||||
return TraversalSignal::Skip;
|
|
||||||
}
|
|
||||||
|
|
||||||
if node.range() == self.key.range && node.kind() == self.key.kind {
|
|
||||||
self.result = Some(node);
|
|
||||||
TraversalSignal::Skip
|
|
||||||
} else if node.range().contains_range(self.key.range) {
|
|
||||||
TraversalSignal::Traverse
|
|
||||||
} else {
|
|
||||||
TraversalSignal::Skip
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn visit_body(&mut self, body: &'a [Stmt]) {
|
|
||||||
// TODO it would be more efficient to use binary search instead of linear
|
|
||||||
for stmt in body {
|
|
||||||
if stmt.range().start() > self.key.range.end() {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
self.visit_stmt(stmt);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO an alternative to this is to have a `NodeId` on each node (in increasing order depending on the position).
|
|
||||||
// This would allow to reduce the size of this to a u32.
|
|
||||||
// What would be nice if we could use an `Arc::weak_ref` here but that only works if we use
|
|
||||||
// `Arc` internally
|
|
||||||
// TODO: Implement the logic to resolve a node, given a db (and the correct file).
|
|
||||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
|
||||||
pub struct NodeKey {
|
|
||||||
kind: NodeKind,
|
|
||||||
range: TextRange,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl NodeKey {
|
|
||||||
pub fn resolve<'a>(&self, root: AnyNodeRef<'a>) -> Option<AnyNodeRef<'a>> {
|
|
||||||
// We need to do a binary search here. Only traverse into a node if the range is withint the node
|
|
||||||
let mut visitor = FindNodeKeyVisitor {
|
|
||||||
key: *self,
|
|
||||||
result: None,
|
|
||||||
};
|
|
||||||
|
|
||||||
if visitor.enter_node(root) == TraversalSignal::Traverse {
|
|
||||||
root.visit_preorder(&mut visitor);
|
|
||||||
}
|
|
||||||
|
|
||||||
visitor.result
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Marker trait implemented by AST nodes for which we extract the `AstId`.
|
|
||||||
pub trait HasAstId: AstNode {
|
|
||||||
fn node_key(&self) -> TypedNodeKey<Self>
|
|
||||||
where
|
|
||||||
Self: Sized,
|
|
||||||
{
|
|
||||||
TypedNodeKey {
|
|
||||||
inner: self.syntax_node_key(),
|
|
||||||
_marker: PhantomData,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn syntax_node_key(&self) -> NodeKey {
|
|
||||||
NodeKey {
|
|
||||||
kind: self.as_any_node_ref().kind(),
|
|
||||||
range: self.range(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HasAstId for StmtFunctionDef {}
|
|
||||||
impl HasAstId for StmtClassDef {}
|
|
||||||
impl HasAstId for StmtAnnAssign {}
|
|
||||||
impl HasAstId for StmtAugAssign {}
|
|
||||||
impl HasAstId for StmtAssign {}
|
|
||||||
impl HasAstId for StmtTypeAlias {}
|
|
||||||
|
|
||||||
impl HasAstId for ModModule {}
|
|
||||||
|
|
||||||
impl HasAstId for StmtImport {}
|
|
||||||
|
|
||||||
impl HasAstId for StmtImportFrom {}
|
|
||||||
|
|
||||||
impl HasAstId for Parameter {}
|
|
||||||
|
|
||||||
impl HasAstId for TypeParam {}
|
|
||||||
impl HasAstId for Stmt {}
|
|
||||||
impl HasAstId for TypeParamTypeVar {}
|
|
||||||
impl HasAstId for TypeParamTypeVarTuple {}
|
|
||||||
impl HasAstId for TypeParamParamSpec {}
|
|
||||||
impl HasAstId for StmtGlobal {}
|
|
||||||
impl HasAstId for StmtNonlocal {}
|
|
||||||
|
|
||||||
impl HasAstId for ExceptHandlerExceptHandler {}
|
|
||||||
impl HasAstId for WithItem {}
|
|
||||||
impl HasAstId for MatchCase {}
|
|
||||||
@@ -1,165 +0,0 @@
|
|||||||
use std::fmt::Formatter;
|
|
||||||
use std::hash::Hash;
|
|
||||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
|
||||||
|
|
||||||
use crate::db::QueryResult;
|
|
||||||
use dashmap::mapref::entry::Entry;
|
|
||||||
|
|
||||||
use crate::FxDashMap;
|
|
||||||
|
|
||||||
/// Simple key value cache that locks on a per-key level.
|
|
||||||
pub struct KeyValueCache<K, V> {
|
|
||||||
map: FxDashMap<K, V>,
|
|
||||||
statistics: CacheStatistics,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<K, V> KeyValueCache<K, V>
|
|
||||||
where
|
|
||||||
K: Eq + Hash + Clone,
|
|
||||||
V: Clone,
|
|
||||||
{
|
|
||||||
pub fn try_get(&self, key: &K) -> Option<V> {
|
|
||||||
if let Some(existing) = self.map.get(key) {
|
|
||||||
self.statistics.hit();
|
|
||||||
Some(existing.clone())
|
|
||||||
} else {
|
|
||||||
self.statistics.miss();
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get<F>(&self, key: &K, compute: F) -> QueryResult<V>
|
|
||||||
where
|
|
||||||
F: FnOnce(&K) -> QueryResult<V>,
|
|
||||||
{
|
|
||||||
Ok(match self.map.entry(key.clone()) {
|
|
||||||
Entry::Occupied(cached) => {
|
|
||||||
self.statistics.hit();
|
|
||||||
|
|
||||||
cached.get().clone()
|
|
||||||
}
|
|
||||||
Entry::Vacant(vacant) => {
|
|
||||||
self.statistics.miss();
|
|
||||||
|
|
||||||
let value = compute(key)?;
|
|
||||||
vacant.insert(value.clone());
|
|
||||||
value
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn set(&mut self, key: K, value: V) {
|
|
||||||
self.map.insert(key, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn remove(&mut self, key: &K) -> Option<V> {
|
|
||||||
self.map.remove(key).map(|(_, value)| value)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn clear(&mut self) {
|
|
||||||
self.map.clear();
|
|
||||||
self.map.shrink_to_fit();
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn statistics(&self) -> Option<Statistics> {
|
|
||||||
self.statistics.to_statistics()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<K, V> Default for KeyValueCache<K, V>
|
|
||||||
where
|
|
||||||
K: Eq + Hash,
|
|
||||||
V: Clone,
|
|
||||||
{
|
|
||||||
fn default() -> Self {
|
|
||||||
Self {
|
|
||||||
map: FxDashMap::default(),
|
|
||||||
statistics: CacheStatistics::default(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<K, V> std::fmt::Debug for KeyValueCache<K, V>
|
|
||||||
where
|
|
||||||
K: std::fmt::Debug + Eq + Hash,
|
|
||||||
V: std::fmt::Debug,
|
|
||||||
{
|
|
||||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
|
||||||
let mut debug = f.debug_map();
|
|
||||||
|
|
||||||
for entry in &self.map {
|
|
||||||
debug.entry(&entry.value(), &entry.key());
|
|
||||||
}
|
|
||||||
|
|
||||||
debug.finish()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
|
||||||
pub struct Statistics {
|
|
||||||
pub hits: usize,
|
|
||||||
pub misses: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Statistics {
|
|
||||||
#[allow(clippy::cast_precision_loss)]
|
|
||||||
pub fn hit_rate(&self) -> Option<f64> {
|
|
||||||
if self.hits + self.misses == 0 {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
Some((self.hits as f64) / (self.hits + self.misses) as f64)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(debug_assertions)]
|
|
||||||
pub type CacheStatistics = DebugStatistics;
|
|
||||||
|
|
||||||
#[cfg(not(debug_assertions))]
|
|
||||||
pub type CacheStatistics = ReleaseStatistics;
|
|
||||||
|
|
||||||
pub trait StatisticsRecorder {
|
|
||||||
fn hit(&self);
|
|
||||||
fn miss(&self);
|
|
||||||
fn to_statistics(&self) -> Option<Statistics>;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default)]
|
|
||||||
pub struct DebugStatistics {
|
|
||||||
hits: AtomicUsize,
|
|
||||||
misses: AtomicUsize,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl StatisticsRecorder for DebugStatistics {
|
|
||||||
// TODO figure out appropriate Ordering
|
|
||||||
fn hit(&self) {
|
|
||||||
self.hits.fetch_add(1, Ordering::SeqCst);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn miss(&self) {
|
|
||||||
self.misses.fetch_add(1, Ordering::SeqCst);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn to_statistics(&self) -> Option<Statistics> {
|
|
||||||
let hits = self.hits.load(Ordering::SeqCst);
|
|
||||||
let misses = self.misses.load(Ordering::SeqCst);
|
|
||||||
|
|
||||||
Some(Statistics { hits, misses })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default)]
|
|
||||||
pub struct ReleaseStatistics;
|
|
||||||
|
|
||||||
impl StatisticsRecorder for ReleaseStatistics {
|
|
||||||
#[inline]
|
|
||||||
fn hit(&self) {}
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
fn miss(&self) {}
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
fn to_statistics(&self) -> Option<Statistics> {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,42 +0,0 @@
|
|||||||
use std::sync::atomic::AtomicBool;
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Default)]
|
|
||||||
pub struct CancellationTokenSource {
|
|
||||||
signal: Arc<AtomicBool>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl CancellationTokenSource {
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Self {
|
|
||||||
signal: Arc::new(AtomicBool::new(false)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip_all)]
|
|
||||||
pub fn cancel(&self) {
|
|
||||||
self.signal.store(true, std::sync::atomic::Ordering::SeqCst);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_cancelled(&self) -> bool {
|
|
||||||
self.signal.load(std::sync::atomic::Ordering::SeqCst)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn token(&self) -> CancellationToken {
|
|
||||||
CancellationToken {
|
|
||||||
signal: self.signal.clone(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct CancellationToken {
|
|
||||||
signal: Arc<AtomicBool>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl CancellationToken {
|
|
||||||
/// Returns `true` if cancellation has been requested.
|
|
||||||
pub fn is_cancelled(&self) -> bool {
|
|
||||||
self.signal.load(std::sync::atomic::Ordering::SeqCst)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,248 +0,0 @@
|
|||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
pub use jars::{HasJar, HasJars};
|
|
||||||
pub use query::{QueryError, QueryResult};
|
|
||||||
pub use runtime::DbRuntime;
|
|
||||||
pub use storage::JarsStorage;
|
|
||||||
|
|
||||||
use crate::files::FileId;
|
|
||||||
use crate::lint::{LintSemanticStorage, LintSyntaxStorage};
|
|
||||||
use crate::module::ModuleResolver;
|
|
||||||
use crate::parse::ParsedStorage;
|
|
||||||
use crate::source::SourceStorage;
|
|
||||||
use crate::symbols::SymbolTablesStorage;
|
|
||||||
use crate::types::TypeStore;
|
|
||||||
|
|
||||||
mod jars;
|
|
||||||
mod query;
|
|
||||||
mod runtime;
|
|
||||||
mod storage;
|
|
||||||
|
|
||||||
pub trait Database {
|
|
||||||
/// Returns a reference to the runtime of the current worker.
|
|
||||||
fn runtime(&self) -> &DbRuntime;
|
|
||||||
|
|
||||||
/// Returns a mutable reference to the runtime. Only one worker can hold a mutable reference to the runtime.
|
|
||||||
fn runtime_mut(&mut self) -> &mut DbRuntime;
|
|
||||||
|
|
||||||
/// Returns `Ok` if the queries have not been cancelled and `Err(QueryError::Cancelled)` otherwise.
|
|
||||||
fn cancelled(&self) -> QueryResult<()> {
|
|
||||||
self.runtime().cancelled()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns `true` if the queries have been cancelled.
|
|
||||||
fn is_cancelled(&self) -> bool {
|
|
||||||
self.runtime().is_cancelled()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Database that supports running queries from multiple threads.
|
|
||||||
pub trait ParallelDatabase: Database + Send {
|
|
||||||
/// Creates a snapshot of the database state that can be used to query the database in another thread.
|
|
||||||
///
|
|
||||||
/// The snapshot is a read-only view of the database but query results are shared between threads.
|
|
||||||
/// All queries will be automatically cancelled when applying any mutations (calling [`HasJars::jars_mut`])
|
|
||||||
/// to the database (not the snapshot, because they're readonly).
|
|
||||||
///
|
|
||||||
/// ## Creating a snapshot
|
|
||||||
///
|
|
||||||
/// Creating a snapshot of the database's jars is cheap but creating a snapshot of
|
|
||||||
/// other state stored on the database might require deep-cloning data. That's why you should
|
|
||||||
/// avoid creating snapshots in a hot function (e.g. don't create a snapshot for each file, instead
|
|
||||||
/// create a snapshot when scheduling the check of an entire program).
|
|
||||||
///
|
|
||||||
/// ## Salsa compatibility
|
|
||||||
/// Salsa prohibits creating a snapshot while running a local query (it's fine if other workers run a query) [[source](https://github.com/salsa-rs/salsa/issues/80)].
|
|
||||||
/// We should avoid creating snapshots while running a query because we might want to adopt Salsa in the future (if we can figure out persistent caching).
|
|
||||||
/// Unfortunately, the infrastructure doesn't provide an automated way of knowing when a query is run, that's
|
|
||||||
/// why we have to "enforce" this constraint manually.
|
|
||||||
#[must_use]
|
|
||||||
fn snapshot(&self) -> Snapshot<Self>;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait DbWithJar<Jar>: Database + HasJar<Jar> {}
|
|
||||||
|
|
||||||
/// Readonly snapshot of a database.
|
|
||||||
///
|
|
||||||
/// ## Dead locks
|
|
||||||
/// A snapshot should always be dropped as soon as it is no longer necessary to run queries.
|
|
||||||
/// Storing the snapshot without running a query or periodically checking if cancellation was requested
|
|
||||||
/// can lead to deadlocks because mutating the [`Database`] requires cancels all pending queries
|
|
||||||
/// and waiting for all [`Snapshot`]s to be dropped.
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct Snapshot<DB: ?Sized>
|
|
||||||
where
|
|
||||||
DB: ParallelDatabase,
|
|
||||||
{
|
|
||||||
db: DB,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<DB> Snapshot<DB>
|
|
||||||
where
|
|
||||||
DB: ParallelDatabase,
|
|
||||||
{
|
|
||||||
pub fn new(db: DB) -> Self {
|
|
||||||
Snapshot { db }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<DB> std::ops::Deref for Snapshot<DB>
|
|
||||||
where
|
|
||||||
DB: ParallelDatabase,
|
|
||||||
{
|
|
||||||
type Target = DB;
|
|
||||||
|
|
||||||
fn deref(&self) -> &DB {
|
|
||||||
&self.db
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait Upcast<T: ?Sized> {
|
|
||||||
fn upcast(&self) -> &T;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Red knot specific databases code.
|
|
||||||
|
|
||||||
pub trait SourceDb: DbWithJar<SourceJar> {
|
|
||||||
// queries
|
|
||||||
fn file_id(&self, path: &std::path::Path) -> FileId;
|
|
||||||
|
|
||||||
fn file_path(&self, file_id: FileId) -> Arc<std::path::Path>;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait SemanticDb: SourceDb + DbWithJar<SemanticJar> + Upcast<dyn SourceDb> {}
|
|
||||||
|
|
||||||
pub trait LintDb: SemanticDb + DbWithJar<LintJar> + Upcast<dyn SemanticDb> {}
|
|
||||||
|
|
||||||
pub trait Db: LintDb + Upcast<dyn LintDb> {}
|
|
||||||
|
|
||||||
#[derive(Debug, Default)]
|
|
||||||
pub struct SourceJar {
|
|
||||||
pub sources: SourceStorage,
|
|
||||||
pub parsed: ParsedStorage,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default)]
|
|
||||||
pub struct SemanticJar {
|
|
||||||
pub module_resolver: ModuleResolver,
|
|
||||||
pub symbol_tables: SymbolTablesStorage,
|
|
||||||
pub type_store: TypeStore,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default)]
|
|
||||||
pub struct LintJar {
|
|
||||||
pub lint_syntax: LintSyntaxStorage,
|
|
||||||
pub lint_semantic: LintSemanticStorage,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
pub(crate) mod tests {
|
|
||||||
use std::path::Path;
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use crate::db::{
|
|
||||||
Database, DbRuntime, DbWithJar, HasJar, HasJars, JarsStorage, LintDb, LintJar, QueryResult,
|
|
||||||
SourceDb, SourceJar, Upcast,
|
|
||||||
};
|
|
||||||
use crate::files::{FileId, Files};
|
|
||||||
|
|
||||||
use super::{SemanticDb, SemanticJar};
|
|
||||||
|
|
||||||
// This can be a partial database used in a single crate for testing.
|
|
||||||
// It would hold fewer data than the full database.
|
|
||||||
#[derive(Debug, Default)]
|
|
||||||
pub(crate) struct TestDb {
|
|
||||||
files: Files,
|
|
||||||
jars: JarsStorage<Self>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HasJar<SourceJar> for TestDb {
|
|
||||||
fn jar(&self) -> QueryResult<&SourceJar> {
|
|
||||||
Ok(&self.jars()?.0)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn jar_mut(&mut self) -> &mut SourceJar {
|
|
||||||
&mut self.jars_mut().0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HasJar<SemanticJar> for TestDb {
|
|
||||||
fn jar(&self) -> QueryResult<&SemanticJar> {
|
|
||||||
Ok(&self.jars()?.1)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn jar_mut(&mut self) -> &mut SemanticJar {
|
|
||||||
&mut self.jars_mut().1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HasJar<LintJar> for TestDb {
|
|
||||||
fn jar(&self) -> QueryResult<&LintJar> {
|
|
||||||
Ok(&self.jars()?.2)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn jar_mut(&mut self) -> &mut LintJar {
|
|
||||||
&mut self.jars_mut().2
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SourceDb for TestDb {
|
|
||||||
fn file_id(&self, path: &Path) -> FileId {
|
|
||||||
self.files.intern(path)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn file_path(&self, file_id: FileId) -> Arc<Path> {
|
|
||||||
self.files.path(file_id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DbWithJar<SourceJar> for TestDb {}
|
|
||||||
|
|
||||||
impl Upcast<dyn SourceDb> for TestDb {
|
|
||||||
fn upcast(&self) -> &(dyn SourceDb + 'static) {
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SemanticDb for TestDb {}
|
|
||||||
|
|
||||||
impl DbWithJar<SemanticJar> for TestDb {}
|
|
||||||
|
|
||||||
impl Upcast<dyn SemanticDb> for TestDb {
|
|
||||||
fn upcast(&self) -> &(dyn SemanticDb + 'static) {
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl LintDb for TestDb {}
|
|
||||||
|
|
||||||
impl Upcast<dyn LintDb> for TestDb {
|
|
||||||
fn upcast(&self) -> &(dyn LintDb + 'static) {
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DbWithJar<LintJar> for TestDb {}
|
|
||||||
|
|
||||||
impl HasJars for TestDb {
|
|
||||||
type Jars = (SourceJar, SemanticJar, LintJar);
|
|
||||||
|
|
||||||
fn jars(&self) -> QueryResult<&Self::Jars> {
|
|
||||||
self.jars.jars()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn jars_mut(&mut self) -> &mut Self::Jars {
|
|
||||||
self.jars.jars_mut()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Database for TestDb {
|
|
||||||
fn runtime(&self) -> &DbRuntime {
|
|
||||||
self.jars.runtime()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn runtime_mut(&mut self) -> &mut DbRuntime {
|
|
||||||
self.jars.runtime_mut()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
use crate::db::query::QueryResult;
|
|
||||||
|
|
||||||
/// Gives access to a specific jar in the database.
|
|
||||||
///
|
|
||||||
/// Nope, the terminology isn't borrowed from Java but from Salsa <https://salsa-rs.github.io/salsa/>,
|
|
||||||
/// which is an analogy to storing the salsa in different jars.
|
|
||||||
///
|
|
||||||
/// The basic idea is that each crate can define its own jar and the jars can be combined to a single
|
|
||||||
/// database in the top level crate. Each crate also defines its own `Database` trait. The combination of
|
|
||||||
/// `Database` trait and the jar allows to write queries in isolation without having to know how they get composed at the upper levels.
|
|
||||||
///
|
|
||||||
/// Salsa further defines a `HasIngredient` trait which slices the jar to a specific storage (e.g. a specific cache).
|
|
||||||
/// We don't need this just yet because we write our queries by hand. We may want a similar trait if we decide
|
|
||||||
/// to use a macro to generate the queries.
|
|
||||||
pub trait HasJar<T> {
|
|
||||||
/// Gives a read-only reference to the jar.
|
|
||||||
fn jar(&self) -> QueryResult<&T>;
|
|
||||||
|
|
||||||
/// Gives a mutable reference to the jar.
|
|
||||||
fn jar_mut(&mut self) -> &mut T;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Gives access to the jars in a database.
|
|
||||||
pub trait HasJars {
|
|
||||||
/// A type storing the jars.
|
|
||||||
///
|
|
||||||
/// Most commonly, this is a tuple where each jar is a tuple element.
|
|
||||||
type Jars: Default;
|
|
||||||
|
|
||||||
/// Gives access to the underlying jars but tests if the queries have been cancelled.
|
|
||||||
///
|
|
||||||
/// Returns `Err(QueryError::Cancelled)` if the queries have been cancelled.
|
|
||||||
fn jars(&self) -> QueryResult<&Self::Jars>;
|
|
||||||
|
|
||||||
/// Gives mutable access to the underlying jars.
|
|
||||||
fn jars_mut(&mut self) -> &mut Self::Jars;
|
|
||||||
}
|
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
use std::fmt::{Display, Formatter};
|
|
||||||
|
|
||||||
/// Reason why a db query operation failed.
|
|
||||||
#[derive(Debug, Clone, Copy)]
|
|
||||||
pub enum QueryError {
|
|
||||||
/// The query was cancelled because the DB was mutated or the query was cancelled by the host (e.g. on a file change or when pressing CTRL+C).
|
|
||||||
Cancelled,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Display for QueryError {
|
|
||||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
|
||||||
match self {
|
|
||||||
QueryError::Cancelled => f.write_str("query was cancelled"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::error::Error for QueryError {}
|
|
||||||
|
|
||||||
pub type QueryResult<T> = Result<T, QueryError>;
|
|
||||||
@@ -1,41 +0,0 @@
|
|||||||
use crate::cancellation::CancellationTokenSource;
|
|
||||||
use crate::db::{QueryError, QueryResult};
|
|
||||||
|
|
||||||
/// Holds the jar agnostic state of the database.
|
|
||||||
#[derive(Debug, Default)]
|
|
||||||
pub struct DbRuntime {
|
|
||||||
/// The cancellation token source used to signal other works that the queries should be aborted and
|
|
||||||
/// exit at the next possible point.
|
|
||||||
cancellation_token: CancellationTokenSource,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DbRuntime {
|
|
||||||
pub(super) fn snapshot(&self) -> Self {
|
|
||||||
Self {
|
|
||||||
cancellation_token: self.cancellation_token.clone(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Cancels the pending queries of other workers. The current worker cannot have any pending
|
|
||||||
/// queries because we're holding a mutable reference to the runtime.
|
|
||||||
pub(super) fn cancel_other_workers(&mut self) {
|
|
||||||
self.cancellation_token.cancel();
|
|
||||||
// Set a new cancellation token so that we're in a non-cancelled state again when running the next
|
|
||||||
// query.
|
|
||||||
self.cancellation_token = CancellationTokenSource::default();
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns `Ok` if the queries have not been cancelled and `Err(QueryError::Cancelled)` otherwise.
|
|
||||||
pub(super) fn cancelled(&self) -> QueryResult<()> {
|
|
||||||
if self.cancellation_token.is_cancelled() {
|
|
||||||
Err(QueryError::Cancelled)
|
|
||||||
} else {
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns `true` if the queries have been cancelled.
|
|
||||||
pub(super) fn is_cancelled(&self) -> bool {
|
|
||||||
self.cancellation_token.is_cancelled()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,117 +0,0 @@
|
|||||||
use std::fmt::Formatter;
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use crossbeam::sync::WaitGroup;
|
|
||||||
|
|
||||||
use crate::db::query::QueryResult;
|
|
||||||
use crate::db::runtime::DbRuntime;
|
|
||||||
use crate::db::{HasJars, ParallelDatabase};
|
|
||||||
|
|
||||||
/// Stores the jars of a database and the state for each worker.
|
|
||||||
///
|
|
||||||
/// Today, all state is shared across all workers, but it may be desired to store data per worker in the future.
|
|
||||||
pub struct JarsStorage<T>
|
|
||||||
where
|
|
||||||
T: HasJars + Sized,
|
|
||||||
{
|
|
||||||
// It's important that `jars_wait_group` is declared after `jars` to ensure that `jars` is dropped first.
|
|
||||||
// See https://doc.rust-lang.org/reference/destructors.html
|
|
||||||
/// Stores the jars of the database.
|
|
||||||
jars: Arc<T::Jars>,
|
|
||||||
|
|
||||||
/// Used to count the references to `jars`. Allows implementing `jars_mut` without requiring to clone `jars`.
|
|
||||||
jars_wait_group: WaitGroup,
|
|
||||||
|
|
||||||
/// The data agnostic state.
|
|
||||||
runtime: DbRuntime,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<Db> JarsStorage<Db>
|
|
||||||
where
|
|
||||||
Db: HasJars,
|
|
||||||
{
|
|
||||||
pub(super) fn new() -> Self {
|
|
||||||
Self {
|
|
||||||
jars: Arc::new(Db::Jars::default()),
|
|
||||||
jars_wait_group: WaitGroup::default(),
|
|
||||||
runtime: DbRuntime::default(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Creates a snapshot of the jars.
|
|
||||||
///
|
|
||||||
/// Creating the snapshot is cheap because it doesn't clone the jars, it only increments a ref counter.
|
|
||||||
#[must_use]
|
|
||||||
pub fn snapshot(&self) -> JarsStorage<Db>
|
|
||||||
where
|
|
||||||
Db: ParallelDatabase,
|
|
||||||
{
|
|
||||||
Self {
|
|
||||||
jars: self.jars.clone(),
|
|
||||||
jars_wait_group: self.jars_wait_group.clone(),
|
|
||||||
runtime: self.runtime.snapshot(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn jars(&self) -> QueryResult<&Db::Jars> {
|
|
||||||
self.runtime.cancelled()?;
|
|
||||||
Ok(&self.jars)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns a mutable reference to the jars without cloning their content.
|
|
||||||
///
|
|
||||||
/// The method cancels any pending queries of other works and waits for them to complete so that
|
|
||||||
/// this instance is the only instance holding a reference to the jars.
|
|
||||||
pub(crate) fn jars_mut(&mut self) -> &mut Db::Jars {
|
|
||||||
// We have a mutable ref here, so no more workers can be spawned between calling this function and taking the mut ref below.
|
|
||||||
self.cancel_other_workers();
|
|
||||||
|
|
||||||
// Now all other references to `self.jars` should have been released. We can now safely return a mutable reference
|
|
||||||
// to the Arc's content.
|
|
||||||
let jars =
|
|
||||||
Arc::get_mut(&mut self.jars).expect("All references to jars should have been released");
|
|
||||||
|
|
||||||
jars
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn runtime(&self) -> &DbRuntime {
|
|
||||||
&self.runtime
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn runtime_mut(&mut self) -> &mut DbRuntime {
|
|
||||||
// Note: This method may need to use a similar trick to `jars_mut` if `DbRuntime` is ever to store data that is shared between workers.
|
|
||||||
&mut self.runtime
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip(self))]
|
|
||||||
fn cancel_other_workers(&mut self) {
|
|
||||||
self.runtime.cancel_other_workers();
|
|
||||||
|
|
||||||
// Wait for all other works to complete.
|
|
||||||
let existing_wait = std::mem::take(&mut self.jars_wait_group);
|
|
||||||
existing_wait.wait();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<Db> Default for JarsStorage<Db>
|
|
||||||
where
|
|
||||||
Db: HasJars,
|
|
||||||
{
|
|
||||||
fn default() -> Self {
|
|
||||||
Self::new()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> std::fmt::Debug for JarsStorage<T>
|
|
||||||
where
|
|
||||||
T: HasJars,
|
|
||||||
<T as HasJars>::Jars: std::fmt::Debug,
|
|
||||||
{
|
|
||||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
|
||||||
f.debug_struct("SharedStorage")
|
|
||||||
.field("jars", &self.jars)
|
|
||||||
.field("jars_wait_group", &self.jars_wait_group)
|
|
||||||
.field("runtime", &self.runtime)
|
|
||||||
.finish()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,180 +0,0 @@
|
|||||||
use std::fmt::{Debug, Formatter};
|
|
||||||
use std::hash::{Hash, Hasher};
|
|
||||||
use std::path::Path;
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use hashbrown::hash_map::RawEntryMut;
|
|
||||||
use parking_lot::RwLock;
|
|
||||||
use rustc_hash::FxHasher;
|
|
||||||
|
|
||||||
use ruff_index::{newtype_index, IndexVec};
|
|
||||||
|
|
||||||
type Map<K, V> = hashbrown::HashMap<K, V, ()>;
|
|
||||||
|
|
||||||
#[newtype_index]
|
|
||||||
pub struct FileId;
|
|
||||||
|
|
||||||
// TODO we'll need a higher level virtual file system abstraction that allows testing if a file exists
|
|
||||||
// or retrieving its content (ideally lazily and in a way that the memory can be retained later)
|
|
||||||
// I suspect that we'll end up with a FileSystem trait and our own Path abstraction.
|
|
||||||
#[derive(Default)]
|
|
||||||
pub struct Files {
|
|
||||||
inner: Arc<RwLock<FilesInner>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Files {
|
|
||||||
#[tracing::instrument(level = "debug", skip(self))]
|
|
||||||
pub fn intern(&self, path: &Path) -> FileId {
|
|
||||||
self.inner.write().intern(path)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn try_get(&self, path: &Path) -> Option<FileId> {
|
|
||||||
self.inner.read().try_get(path)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(level = "debug", skip(self))]
|
|
||||||
pub fn path(&self, id: FileId) -> Arc<Path> {
|
|
||||||
self.inner.read().path(id)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Snapshots files for a new database snapshot.
|
|
||||||
///
|
|
||||||
/// This method should not be used outside a database snapshot.
|
|
||||||
#[must_use]
|
|
||||||
pub fn snapshot(&self) -> Files {
|
|
||||||
Files {
|
|
||||||
inner: self.inner.clone(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Debug for Files {
|
|
||||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
|
||||||
let files = self.inner.read();
|
|
||||||
let mut debug = f.debug_map();
|
|
||||||
for item in files.iter() {
|
|
||||||
debug.entry(&item.0, &item.1);
|
|
||||||
}
|
|
||||||
|
|
||||||
debug.finish()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PartialEq for Files {
|
|
||||||
fn eq(&self, other: &Self) -> bool {
|
|
||||||
self.inner.read().eq(&other.inner.read())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Eq for Files {}
|
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
struct FilesInner {
|
|
||||||
by_path: Map<FileId, ()>,
|
|
||||||
// TODO should we use a map here to reclaim the space for removed files?
|
|
||||||
// TODO I think we should use our own path abstraction here to avoid having to normalize paths
|
|
||||||
// and dealing with non-utf paths everywhere.
|
|
||||||
by_id: IndexVec<FileId, Arc<Path>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FilesInner {
|
|
||||||
/// Inserts the path and returns a new id for it or returns the id if it is an existing path.
|
|
||||||
// TODO should this accept Path or PathBuf?
|
|
||||||
pub(crate) fn intern(&mut self, path: &Path) -> FileId {
|
|
||||||
let hash = FilesInner::hash_path(path);
|
|
||||||
|
|
||||||
let entry = self
|
|
||||||
.by_path
|
|
||||||
.raw_entry_mut()
|
|
||||||
.from_hash(hash, |existing_file| &*self.by_id[*existing_file] == path);
|
|
||||||
|
|
||||||
match entry {
|
|
||||||
RawEntryMut::Occupied(entry) => *entry.key(),
|
|
||||||
RawEntryMut::Vacant(entry) => {
|
|
||||||
let id = self.by_id.push(Arc::from(path));
|
|
||||||
entry.insert_with_hasher(hash, id, (), |file| {
|
|
||||||
FilesInner::hash_path(&self.by_id[*file])
|
|
||||||
});
|
|
||||||
id
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn hash_path(path: &Path) -> u64 {
|
|
||||||
let mut hasher = FxHasher::default();
|
|
||||||
path.hash(&mut hasher);
|
|
||||||
hasher.finish()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn try_get(&self, path: &Path) -> Option<FileId> {
|
|
||||||
let mut hasher = FxHasher::default();
|
|
||||||
path.hash(&mut hasher);
|
|
||||||
let hash = hasher.finish();
|
|
||||||
|
|
||||||
Some(
|
|
||||||
*self
|
|
||||||
.by_path
|
|
||||||
.raw_entry()
|
|
||||||
.from_hash(hash, |existing_file| &*self.by_id[*existing_file] == path)?
|
|
||||||
.0,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the path for the file with the given id.
|
|
||||||
pub(crate) fn path(&self, id: FileId) -> Arc<Path> {
|
|
||||||
self.by_id[id].clone()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn iter(&self) -> impl Iterator<Item = (FileId, Arc<Path>)> + '_ {
|
|
||||||
self.by_path.keys().map(|id| (*id, self.by_id[*id].clone()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PartialEq for FilesInner {
|
|
||||||
fn eq(&self, other: &Self) -> bool {
|
|
||||||
self.by_id == other.by_id
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Eq for FilesInner {}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn insert_path_twice_same_id() {
|
|
||||||
let files = Files::default();
|
|
||||||
let path = PathBuf::from("foo/bar");
|
|
||||||
let id1 = files.intern(&path);
|
|
||||||
let id2 = files.intern(&path);
|
|
||||||
assert_eq!(id1, id2);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn insert_different_paths_different_ids() {
|
|
||||||
let files = Files::default();
|
|
||||||
let path1 = PathBuf::from("foo/bar");
|
|
||||||
let path2 = PathBuf::from("foo/bar/baz");
|
|
||||||
let id1 = files.intern(&path1);
|
|
||||||
let id2 = files.intern(&path2);
|
|
||||||
assert_ne!(id1, id2);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn four_files() {
|
|
||||||
let files = Files::default();
|
|
||||||
let foo_path = PathBuf::from("foo");
|
|
||||||
let foo_id = files.intern(&foo_path);
|
|
||||||
let bar_path = PathBuf::from("bar");
|
|
||||||
files.intern(&bar_path);
|
|
||||||
let baz_path = PathBuf::from("baz");
|
|
||||||
files.intern(&baz_path);
|
|
||||||
let qux_path = PathBuf::from("qux");
|
|
||||||
files.intern(&qux_path);
|
|
||||||
|
|
||||||
let foo_id_2 = files.try_get(&foo_path).expect("foo_path to be found");
|
|
||||||
assert_eq!(foo_id_2, foo_id);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,67 +0,0 @@
|
|||||||
//! Key observations
|
|
||||||
//!
|
|
||||||
//! The HIR (High-Level Intermediate Representation) avoids allocations to large extends by:
|
|
||||||
//! * Using an arena per node type
|
|
||||||
//! * using ids and id ranges to reference items.
|
|
||||||
//!
|
|
||||||
//! Using separate arena per node type has the advantage that the IDs are relatively stable, because
|
|
||||||
//! they only change when a node of the same kind has been added or removed. (What's unclear is if that matters or if
|
|
||||||
//! it still triggers a re-compute because the AST-id in the node has changed).
|
|
||||||
//!
|
|
||||||
//! The HIR does not store all details. It mainly stores the *public* interface. There's a reference
|
|
||||||
//! back to the AST node to get more details.
|
|
||||||
//!
|
|
||||||
//!
|
|
||||||
|
|
||||||
use crate::ast_ids::{HasAstId, TypedAstId};
|
|
||||||
use crate::files::FileId;
|
|
||||||
use std::fmt::Formatter;
|
|
||||||
use std::hash::{Hash, Hasher};
|
|
||||||
|
|
||||||
pub struct HirAstId<N: HasAstId> {
|
|
||||||
file_id: FileId,
|
|
||||||
node_id: TypedAstId<N>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<N: HasAstId> Copy for HirAstId<N> {}
|
|
||||||
impl<N: HasAstId> Clone for HirAstId<N> {
|
|
||||||
fn clone(&self) -> Self {
|
|
||||||
*self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<N: HasAstId> PartialEq for HirAstId<N> {
|
|
||||||
fn eq(&self, other: &Self) -> bool {
|
|
||||||
self.file_id == other.file_id && self.node_id == other.node_id
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<N: HasAstId> Eq for HirAstId<N> {}
|
|
||||||
|
|
||||||
impl<N: HasAstId> std::fmt::Debug for HirAstId<N> {
|
|
||||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
|
||||||
f.debug_struct("HirAstId")
|
|
||||||
.field("file_id", &self.file_id)
|
|
||||||
.field("node_id", &self.node_id)
|
|
||||||
.finish()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<N: HasAstId> Hash for HirAstId<N> {
|
|
||||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
|
||||||
self.file_id.hash(state);
|
|
||||||
self.node_id.hash(state);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<N: HasAstId> HirAstId<N> {
|
|
||||||
pub fn upcast<M: HasAstId>(self) -> HirAstId<M>
|
|
||||||
where
|
|
||||||
N: Into<M>,
|
|
||||||
{
|
|
||||||
HirAstId {
|
|
||||||
file_id: self.file_id,
|
|
||||||
node_id: self.node_id.upcast(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,556 +0,0 @@
|
|||||||
use std::ops::{Index, Range};
|
|
||||||
|
|
||||||
use ruff_index::{newtype_index, IndexVec};
|
|
||||||
use ruff_python_ast::visitor::preorder;
|
|
||||||
use ruff_python_ast::visitor::preorder::PreorderVisitor;
|
|
||||||
use ruff_python_ast::{
|
|
||||||
Decorator, ExceptHandler, ExceptHandlerExceptHandler, Expr, MatchCase, ModModule, Stmt,
|
|
||||||
StmtAnnAssign, StmtAssign, StmtClassDef, StmtFunctionDef, StmtGlobal, StmtImport,
|
|
||||||
StmtImportFrom, StmtNonlocal, StmtTypeAlias, TypeParam, TypeParamParamSpec, TypeParamTypeVar,
|
|
||||||
TypeParamTypeVarTuple, WithItem,
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::ast_ids::{AstIds, HasAstId};
|
|
||||||
use crate::files::FileId;
|
|
||||||
use crate::hir::HirAstId;
|
|
||||||
use crate::Name;
|
|
||||||
|
|
||||||
#[newtype_index]
|
|
||||||
pub struct FunctionId;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
|
||||||
pub struct Function {
|
|
||||||
ast_id: HirAstId<StmtFunctionDef>,
|
|
||||||
name: Name,
|
|
||||||
parameters: Range<ParameterId>,
|
|
||||||
type_parameters: Range<TypeParameterId>, // TODO: type_parameters, return expression, decorators
|
|
||||||
}
|
|
||||||
|
|
||||||
#[newtype_index]
|
|
||||||
pub struct ParameterId;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
|
||||||
pub struct Parameter {
|
|
||||||
kind: ParameterKind,
|
|
||||||
name: Name,
|
|
||||||
default: Option<()>, // TODO use expression HIR
|
|
||||||
ast_id: HirAstId<ruff_python_ast::Parameter>,
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO or should `Parameter` be an enum?
|
|
||||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
|
||||||
pub enum ParameterKind {
|
|
||||||
PositionalOnly,
|
|
||||||
Arguments,
|
|
||||||
Vararg,
|
|
||||||
KeywordOnly,
|
|
||||||
Kwarg,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[newtype_index]
|
|
||||||
pub struct ClassId;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
|
||||||
pub struct Class {
|
|
||||||
name: Name,
|
|
||||||
ast_id: HirAstId<StmtClassDef>,
|
|
||||||
// TODO type parameters, inheritance, decorators, members
|
|
||||||
}
|
|
||||||
|
|
||||||
#[newtype_index]
|
|
||||||
pub struct AssignmentId;
|
|
||||||
|
|
||||||
// This can have more than one name...
|
|
||||||
// but that means we can't implement `name()` on `ModuleItem`.
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
|
||||||
pub struct Assignment {
|
|
||||||
// TODO: Handle multiple names / targets
|
|
||||||
name: Name,
|
|
||||||
ast_id: HirAstId<StmtAssign>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
|
||||||
pub struct AnnotatedAssignment {
|
|
||||||
name: Name,
|
|
||||||
ast_id: HirAstId<StmtAnnAssign>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[newtype_index]
|
|
||||||
pub struct AnnotatedAssignmentId;
|
|
||||||
|
|
||||||
#[newtype_index]
|
|
||||||
pub struct TypeAliasId;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
|
||||||
pub struct TypeAlias {
|
|
||||||
name: Name,
|
|
||||||
ast_id: HirAstId<StmtTypeAlias>,
|
|
||||||
parameters: Range<TypeParameterId>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[newtype_index]
|
|
||||||
pub struct TypeParameterId;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
|
||||||
pub enum TypeParameter {
|
|
||||||
TypeVar(TypeParameterTypeVar),
|
|
||||||
ParamSpec(TypeParameterParamSpec),
|
|
||||||
TypeVarTuple(TypeParameterTypeVarTuple),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TypeParameter {
|
|
||||||
pub fn ast_id(&self) -> HirAstId<TypeParam> {
|
|
||||||
match self {
|
|
||||||
TypeParameter::TypeVar(type_var) => type_var.ast_id.upcast(),
|
|
||||||
TypeParameter::ParamSpec(param_spec) => param_spec.ast_id.upcast(),
|
|
||||||
TypeParameter::TypeVarTuple(type_var_tuple) => type_var_tuple.ast_id.upcast(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
|
||||||
pub struct TypeParameterTypeVar {
|
|
||||||
name: Name,
|
|
||||||
ast_id: HirAstId<TypeParamTypeVar>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
|
||||||
pub struct TypeParameterParamSpec {
|
|
||||||
name: Name,
|
|
||||||
ast_id: HirAstId<TypeParamParamSpec>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
|
||||||
pub struct TypeParameterTypeVarTuple {
|
|
||||||
name: Name,
|
|
||||||
ast_id: HirAstId<TypeParamTypeVarTuple>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[newtype_index]
|
|
||||||
pub struct GlobalId;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
|
||||||
pub struct Global {
|
|
||||||
// TODO track names
|
|
||||||
ast_id: HirAstId<StmtGlobal>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[newtype_index]
|
|
||||||
pub struct NonLocalId;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
|
||||||
pub struct NonLocal {
|
|
||||||
// TODO track names
|
|
||||||
ast_id: HirAstId<StmtNonlocal>,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub enum DefinitionId {
|
|
||||||
Function(FunctionId),
|
|
||||||
Parameter(ParameterId),
|
|
||||||
Class(ClassId),
|
|
||||||
Assignment(AssignmentId),
|
|
||||||
AnnotatedAssignment(AnnotatedAssignmentId),
|
|
||||||
Global(GlobalId),
|
|
||||||
NonLocal(NonLocalId),
|
|
||||||
TypeParameter(TypeParameterId),
|
|
||||||
TypeAlias(TypeAlias),
|
|
||||||
}
|
|
||||||
|
|
||||||
pub enum DefinitionItem {
|
|
||||||
Function(Function),
|
|
||||||
Parameter(Parameter),
|
|
||||||
Class(Class),
|
|
||||||
Assignment(Assignment),
|
|
||||||
AnnotatedAssignment(AnnotatedAssignment),
|
|
||||||
Global(Global),
|
|
||||||
NonLocal(NonLocal),
|
|
||||||
TypeParameter(TypeParameter),
|
|
||||||
TypeAlias(TypeAlias),
|
|
||||||
}
|
|
||||||
|
|
||||||
// The closest is rust-analyzers item-tree. It only represents "Items" which make the public interface of a module
|
|
||||||
// (it excludes any other statement or expressions). rust-analyzer uses it as the main input to the name resolution
|
|
||||||
// algorithm
|
|
||||||
// > It is the input to the name resolution algorithm, as well as to the queries defined in `adt.rs`,
|
|
||||||
// > `data.rs`, and most things in `attr.rs`.
|
|
||||||
//
|
|
||||||
// > One important purpose of this layer is to provide an "invalidation barrier" for incremental
|
|
||||||
// > computations: when typing inside an item body, the `ItemTree` of the modified file is typically
|
|
||||||
// > unaffected, so we don't have to recompute name resolution results or item data (see `data.rs`).
|
|
||||||
//
|
|
||||||
// I haven't fully figured this out but I think that this composes the "public" interface of a module?
|
|
||||||
// But maybe that's too optimistic.
|
|
||||||
//
|
|
||||||
//
|
|
||||||
#[derive(Debug, Clone, Default, Eq, PartialEq)]
|
|
||||||
pub struct Definitions {
|
|
||||||
functions: IndexVec<FunctionId, Function>,
|
|
||||||
parameters: IndexVec<ParameterId, Parameter>,
|
|
||||||
classes: IndexVec<ClassId, Class>,
|
|
||||||
assignments: IndexVec<AssignmentId, Assignment>,
|
|
||||||
annotated_assignments: IndexVec<AnnotatedAssignmentId, AnnotatedAssignment>,
|
|
||||||
type_aliases: IndexVec<TypeAliasId, TypeAlias>,
|
|
||||||
type_parameters: IndexVec<TypeParameterId, TypeParameter>,
|
|
||||||
globals: IndexVec<GlobalId, Global>,
|
|
||||||
non_locals: IndexVec<NonLocalId, NonLocal>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Definitions {
|
|
||||||
pub fn from_module(module: &ModModule, ast_ids: &AstIds, file_id: FileId) -> Self {
|
|
||||||
let mut visitor = DefinitionsVisitor {
|
|
||||||
definitions: Definitions::default(),
|
|
||||||
ast_ids,
|
|
||||||
file_id,
|
|
||||||
};
|
|
||||||
|
|
||||||
visitor.visit_body(&module.body);
|
|
||||||
|
|
||||||
visitor.definitions
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Index<FunctionId> for Definitions {
|
|
||||||
type Output = Function;
|
|
||||||
|
|
||||||
fn index(&self, index: FunctionId) -> &Self::Output {
|
|
||||||
&self.functions[index]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Index<ParameterId> for Definitions {
|
|
||||||
type Output = Parameter;
|
|
||||||
|
|
||||||
fn index(&self, index: ParameterId) -> &Self::Output {
|
|
||||||
&self.parameters[index]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Index<ClassId> for Definitions {
|
|
||||||
type Output = Class;
|
|
||||||
|
|
||||||
fn index(&self, index: ClassId) -> &Self::Output {
|
|
||||||
&self.classes[index]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Index<AssignmentId> for Definitions {
|
|
||||||
type Output = Assignment;
|
|
||||||
|
|
||||||
fn index(&self, index: AssignmentId) -> &Self::Output {
|
|
||||||
&self.assignments[index]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Index<AnnotatedAssignmentId> for Definitions {
|
|
||||||
type Output = AnnotatedAssignment;
|
|
||||||
|
|
||||||
fn index(&self, index: AnnotatedAssignmentId) -> &Self::Output {
|
|
||||||
&self.annotated_assignments[index]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Index<TypeAliasId> for Definitions {
|
|
||||||
type Output = TypeAlias;
|
|
||||||
|
|
||||||
fn index(&self, index: TypeAliasId) -> &Self::Output {
|
|
||||||
&self.type_aliases[index]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Index<GlobalId> for Definitions {
|
|
||||||
type Output = Global;
|
|
||||||
|
|
||||||
fn index(&self, index: GlobalId) -> &Self::Output {
|
|
||||||
&self.globals[index]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Index<NonLocalId> for Definitions {
|
|
||||||
type Output = NonLocal;
|
|
||||||
|
|
||||||
fn index(&self, index: NonLocalId) -> &Self::Output {
|
|
||||||
&self.non_locals[index]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Index<TypeParameterId> for Definitions {
|
|
||||||
type Output = TypeParameter;
|
|
||||||
|
|
||||||
fn index(&self, index: TypeParameterId) -> &Self::Output {
|
|
||||||
&self.type_parameters[index]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
struct DefinitionsVisitor<'a> {
|
|
||||||
definitions: Definitions,
|
|
||||||
ast_ids: &'a AstIds,
|
|
||||||
file_id: FileId,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DefinitionsVisitor<'_> {
|
|
||||||
fn ast_id<N: HasAstId>(&self, node: &N) -> HirAstId<N> {
|
|
||||||
HirAstId {
|
|
||||||
file_id: self.file_id,
|
|
||||||
node_id: self.ast_ids.ast_id(node),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn lower_function_def(&mut self, function: &StmtFunctionDef) -> FunctionId {
|
|
||||||
let name = Name::new(&function.name);
|
|
||||||
|
|
||||||
let first_type_parameter_id = self.definitions.type_parameters.next_index();
|
|
||||||
let mut last_type_parameter_id = first_type_parameter_id;
|
|
||||||
|
|
||||||
if let Some(type_params) = &function.type_params {
|
|
||||||
for parameter in &type_params.type_params {
|
|
||||||
let id = self.lower_type_parameter(parameter);
|
|
||||||
last_type_parameter_id = id;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let parameters = self.lower_parameters(&function.parameters);
|
|
||||||
|
|
||||||
self.definitions.functions.push(Function {
|
|
||||||
name,
|
|
||||||
ast_id: self.ast_id(function),
|
|
||||||
parameters,
|
|
||||||
type_parameters: first_type_parameter_id..last_type_parameter_id,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn lower_parameters(&mut self, parameters: &ruff_python_ast::Parameters) -> Range<ParameterId> {
|
|
||||||
let first_parameter_id = self.definitions.parameters.next_index();
|
|
||||||
let mut last_parameter_id = first_parameter_id;
|
|
||||||
|
|
||||||
for parameter in ¶meters.posonlyargs {
|
|
||||||
last_parameter_id = self.definitions.parameters.push(Parameter {
|
|
||||||
kind: ParameterKind::PositionalOnly,
|
|
||||||
name: Name::new(¶meter.parameter.name),
|
|
||||||
default: None,
|
|
||||||
ast_id: self.ast_id(¶meter.parameter),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(vararg) = ¶meters.vararg {
|
|
||||||
last_parameter_id = self.definitions.parameters.push(Parameter {
|
|
||||||
kind: ParameterKind::Vararg,
|
|
||||||
name: Name::new(&vararg.name),
|
|
||||||
default: None,
|
|
||||||
ast_id: self.ast_id(vararg),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
for parameter in ¶meters.kwonlyargs {
|
|
||||||
last_parameter_id = self.definitions.parameters.push(Parameter {
|
|
||||||
kind: ParameterKind::KeywordOnly,
|
|
||||||
name: Name::new(¶meter.parameter.name),
|
|
||||||
default: None,
|
|
||||||
ast_id: self.ast_id(¶meter.parameter),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(kwarg) = ¶meters.kwarg {
|
|
||||||
last_parameter_id = self.definitions.parameters.push(Parameter {
|
|
||||||
kind: ParameterKind::KeywordOnly,
|
|
||||||
name: Name::new(&kwarg.name),
|
|
||||||
default: None,
|
|
||||||
ast_id: self.ast_id(kwarg),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
first_parameter_id..last_parameter_id
|
|
||||||
}
|
|
||||||
|
|
||||||
fn lower_class_def(&mut self, class: &StmtClassDef) -> ClassId {
|
|
||||||
let name = Name::new(&class.name);
|
|
||||||
|
|
||||||
self.definitions.classes.push(Class {
|
|
||||||
name,
|
|
||||||
ast_id: self.ast_id(class),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn lower_assignment(&mut self, assignment: &StmtAssign) {
|
|
||||||
// FIXME handle multiple names
|
|
||||||
if let Some(Expr::Name(name)) = assignment.targets.first() {
|
|
||||||
self.definitions.assignments.push(Assignment {
|
|
||||||
name: Name::new(&name.id),
|
|
||||||
ast_id: self.ast_id(assignment),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn lower_annotated_assignment(&mut self, annotated_assignment: &StmtAnnAssign) {
|
|
||||||
if let Expr::Name(name) = &*annotated_assignment.target {
|
|
||||||
self.definitions
|
|
||||||
.annotated_assignments
|
|
||||||
.push(AnnotatedAssignment {
|
|
||||||
name: Name::new(&name.id),
|
|
||||||
ast_id: self.ast_id(annotated_assignment),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn lower_type_alias(&mut self, type_alias: &StmtTypeAlias) {
|
|
||||||
if let Expr::Name(name) = &*type_alias.name {
|
|
||||||
let name = Name::new(&name.id);
|
|
||||||
|
|
||||||
let lower_parameters_id = self.definitions.type_parameters.next_index();
|
|
||||||
let mut last_parameter_id = lower_parameters_id;
|
|
||||||
|
|
||||||
if let Some(type_params) = &type_alias.type_params {
|
|
||||||
for type_parameter in &type_params.type_params {
|
|
||||||
let id = self.lower_type_parameter(type_parameter);
|
|
||||||
last_parameter_id = id;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
self.definitions.type_aliases.push(TypeAlias {
|
|
||||||
name,
|
|
||||||
ast_id: self.ast_id(type_alias),
|
|
||||||
parameters: lower_parameters_id..last_parameter_id,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn lower_type_parameter(&mut self, type_parameter: &TypeParam) -> TypeParameterId {
|
|
||||||
match type_parameter {
|
|
||||||
TypeParam::TypeVar(type_var) => {
|
|
||||||
self.definitions
|
|
||||||
.type_parameters
|
|
||||||
.push(TypeParameter::TypeVar(TypeParameterTypeVar {
|
|
||||||
name: Name::new(&type_var.name),
|
|
||||||
ast_id: self.ast_id(type_var),
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
TypeParam::ParamSpec(param_spec) => {
|
|
||||||
self.definitions
|
|
||||||
.type_parameters
|
|
||||||
.push(TypeParameter::ParamSpec(TypeParameterParamSpec {
|
|
||||||
name: Name::new(¶m_spec.name),
|
|
||||||
ast_id: self.ast_id(param_spec),
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
TypeParam::TypeVarTuple(type_var_tuple) => {
|
|
||||||
self.definitions
|
|
||||||
.type_parameters
|
|
||||||
.push(TypeParameter::TypeVarTuple(TypeParameterTypeVarTuple {
|
|
||||||
name: Name::new(&type_var_tuple.name),
|
|
||||||
ast_id: self.ast_id(type_var_tuple),
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn lower_import(&mut self, _import: &StmtImport) {
|
|
||||||
// TODO
|
|
||||||
}
|
|
||||||
|
|
||||||
fn lower_import_from(&mut self, _import_from: &StmtImportFrom) {
|
|
||||||
// TODO
|
|
||||||
}
|
|
||||||
|
|
||||||
fn lower_global(&mut self, global: &StmtGlobal) -> GlobalId {
|
|
||||||
self.definitions.globals.push(Global {
|
|
||||||
ast_id: self.ast_id(global),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn lower_non_local(&mut self, non_local: &StmtNonlocal) -> NonLocalId {
|
|
||||||
self.definitions.non_locals.push(NonLocal {
|
|
||||||
ast_id: self.ast_id(non_local),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn lower_except_handler(&mut self, _except_handler: &ExceptHandlerExceptHandler) {
|
|
||||||
// TODO
|
|
||||||
}
|
|
||||||
|
|
||||||
fn lower_with_item(&mut self, _with_item: &WithItem) {
|
|
||||||
// TODO
|
|
||||||
}
|
|
||||||
|
|
||||||
fn lower_match_case(&mut self, _match_case: &MatchCase) {
|
|
||||||
// TODO
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PreorderVisitor<'_> for DefinitionsVisitor<'_> {
|
|
||||||
fn visit_stmt(&mut self, stmt: &Stmt) {
|
|
||||||
match stmt {
|
|
||||||
// Definition statements
|
|
||||||
Stmt::FunctionDef(definition) => {
|
|
||||||
self.lower_function_def(definition);
|
|
||||||
self.visit_body(&definition.body);
|
|
||||||
}
|
|
||||||
Stmt::ClassDef(definition) => {
|
|
||||||
self.lower_class_def(definition);
|
|
||||||
self.visit_body(&definition.body);
|
|
||||||
}
|
|
||||||
Stmt::Assign(assignment) => {
|
|
||||||
self.lower_assignment(assignment);
|
|
||||||
}
|
|
||||||
Stmt::AnnAssign(annotated_assignment) => {
|
|
||||||
self.lower_annotated_assignment(annotated_assignment);
|
|
||||||
}
|
|
||||||
Stmt::TypeAlias(type_alias) => {
|
|
||||||
self.lower_type_alias(type_alias);
|
|
||||||
}
|
|
||||||
|
|
||||||
Stmt::Import(import) => self.lower_import(import),
|
|
||||||
Stmt::ImportFrom(import_from) => self.lower_import_from(import_from),
|
|
||||||
Stmt::Global(global) => {
|
|
||||||
self.lower_global(global);
|
|
||||||
}
|
|
||||||
Stmt::Nonlocal(non_local) => {
|
|
||||||
self.lower_non_local(non_local);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Visit the compound statement bodies because they can contain other definitions.
|
|
||||||
Stmt::For(_)
|
|
||||||
| Stmt::While(_)
|
|
||||||
| Stmt::If(_)
|
|
||||||
| Stmt::With(_)
|
|
||||||
| Stmt::Match(_)
|
|
||||||
| Stmt::Try(_) => {
|
|
||||||
preorder::walk_stmt(self, stmt);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Skip over simple statements because they can't contain any other definitions.
|
|
||||||
Stmt::Return(_)
|
|
||||||
| Stmt::Delete(_)
|
|
||||||
| Stmt::AugAssign(_)
|
|
||||||
| Stmt::Raise(_)
|
|
||||||
| Stmt::Assert(_)
|
|
||||||
| Stmt::Expr(_)
|
|
||||||
| Stmt::Pass(_)
|
|
||||||
| Stmt::Break(_)
|
|
||||||
| Stmt::Continue(_)
|
|
||||||
| Stmt::IpyEscapeCommand(_) => {
|
|
||||||
// No op
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn visit_expr(&mut self, _: &'_ Expr) {}
|
|
||||||
|
|
||||||
fn visit_decorator(&mut self, _decorator: &'_ Decorator) {}
|
|
||||||
|
|
||||||
fn visit_except_handler(&mut self, except_handler: &'_ ExceptHandler) {
|
|
||||||
match except_handler {
|
|
||||||
ExceptHandler::ExceptHandler(except_handler) => {
|
|
||||||
self.lower_except_handler(except_handler);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn visit_with_item(&mut self, with_item: &'_ WithItem) {
|
|
||||||
self.lower_with_item(with_item);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn visit_match_case(&mut self, match_case: &'_ MatchCase) {
|
|
||||||
self.lower_match_case(match_case);
|
|
||||||
self.visit_body(&match_case.body);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,109 +0,0 @@
|
|||||||
use std::fmt::Formatter;
|
|
||||||
use std::hash::BuildHasherDefault;
|
|
||||||
use std::ops::Deref;
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
|
|
||||||
use rustc_hash::{FxHashSet, FxHasher};
|
|
||||||
|
|
||||||
use crate::files::FileId;
|
|
||||||
|
|
||||||
pub mod ast_ids;
|
|
||||||
pub mod cache;
|
|
||||||
pub mod cancellation;
|
|
||||||
pub mod db;
|
|
||||||
pub mod files;
|
|
||||||
pub mod hir;
|
|
||||||
pub mod lint;
|
|
||||||
pub mod module;
|
|
||||||
mod parse;
|
|
||||||
pub mod program;
|
|
||||||
pub mod source;
|
|
||||||
mod symbols;
|
|
||||||
mod types;
|
|
||||||
pub mod watch;
|
|
||||||
|
|
||||||
pub(crate) type FxDashMap<K, V> = dashmap::DashMap<K, V, BuildHasherDefault<FxHasher>>;
|
|
||||||
#[allow(unused)]
|
|
||||||
pub(crate) type FxDashSet<V> = dashmap::DashSet<V, BuildHasherDefault<FxHasher>>;
|
|
||||||
pub(crate) type FxIndexSet<V> = indexmap::set::IndexSet<V, BuildHasherDefault<FxHasher>>;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct Workspace {
|
|
||||||
/// TODO this should be a resolved path. We should probably use a newtype wrapper that guarantees that
|
|
||||||
/// PATH is a UTF-8 path and is normalized.
|
|
||||||
root: PathBuf,
|
|
||||||
/// The files that are open in the workspace.
|
|
||||||
///
|
|
||||||
/// * Editor: The files that are actively being edited in the editor (the user has a tab open with the file).
|
|
||||||
/// * CLI: The resolved files passed as arguments to the CLI.
|
|
||||||
open_files: FxHashSet<FileId>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Workspace {
|
|
||||||
pub fn new(root: PathBuf) -> Self {
|
|
||||||
Self {
|
|
||||||
root,
|
|
||||||
open_files: FxHashSet::default(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn root(&self) -> &Path {
|
|
||||||
self.root.as_path()
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO having the content in workspace feels wrong.
|
|
||||||
pub fn open_file(&mut self, file_id: FileId) {
|
|
||||||
self.open_files.insert(file_id);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn close_file(&mut self, file_id: FileId) {
|
|
||||||
self.open_files.remove(&file_id);
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO introduce an `OpenFile` type instead of using an anonymous tuple.
|
|
||||||
pub fn open_files(&self) -> impl Iterator<Item = FileId> + '_ {
|
|
||||||
self.open_files.iter().copied()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_file_open(&self, file_id: FileId) -> bool {
|
|
||||||
self.open_files.contains(&file_id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
|
|
||||||
pub struct Name(smol_str::SmolStr);
|
|
||||||
|
|
||||||
impl Name {
|
|
||||||
#[inline]
|
|
||||||
pub fn new(name: &str) -> Self {
|
|
||||||
Self(smol_str::SmolStr::new(name))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn as_str(&self) -> &str {
|
|
||||||
self.0.as_str()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Deref for Name {
|
|
||||||
type Target = str;
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
self.as_str()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> From<T> for Name
|
|
||||||
where
|
|
||||||
T: Into<smol_str::SmolStr>,
|
|
||||||
{
|
|
||||||
fn from(value: T) -> Self {
|
|
||||||
Self(value.into())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Display for Name {
|
|
||||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
|
||||||
f.write_str(self.as_str())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,321 +0,0 @@
|
|||||||
use std::cell::RefCell;
|
|
||||||
use std::ops::{Deref, DerefMut};
|
|
||||||
use std::sync::Arc;
|
|
||||||
use std::time::Duration;
|
|
||||||
|
|
||||||
use ruff_python_ast::visitor::Visitor;
|
|
||||||
use ruff_python_ast::{ModModule, StringLiteral};
|
|
||||||
|
|
||||||
use crate::cache::KeyValueCache;
|
|
||||||
use crate::db::{LintDb, LintJar, QueryResult};
|
|
||||||
use crate::files::FileId;
|
|
||||||
use crate::module::ModuleName;
|
|
||||||
use crate::parse::{parse, Parsed};
|
|
||||||
use crate::source::{source_text, Source};
|
|
||||||
use crate::symbols::{
|
|
||||||
resolve_global_symbol, symbol_table, Definition, GlobalSymbolId, SymbolId, SymbolTable,
|
|
||||||
};
|
|
||||||
use crate::types::{infer_definition_type, infer_symbol_type, Type};
|
|
||||||
|
|
||||||
#[tracing::instrument(level = "debug", skip(db))]
|
|
||||||
pub(crate) fn lint_syntax(db: &dyn LintDb, file_id: FileId) -> QueryResult<Diagnostics> {
|
|
||||||
let lint_jar: &LintJar = db.jar()?;
|
|
||||||
let storage = &lint_jar.lint_syntax;
|
|
||||||
|
|
||||||
#[allow(clippy::print_stdout)]
|
|
||||||
if std::env::var("RED_KNOT_SLOW_LINT").is_ok() {
|
|
||||||
for i in 0..10 {
|
|
||||||
db.cancelled()?;
|
|
||||||
println!("RED_KNOT_SLOW_LINT is set, sleeping for {i}/10 seconds");
|
|
||||||
std::thread::sleep(Duration::from_secs(1));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
storage.get(&file_id, |file_id| {
|
|
||||||
let mut diagnostics = Vec::new();
|
|
||||||
|
|
||||||
let source = source_text(db.upcast(), *file_id)?;
|
|
||||||
lint_lines(source.text(), &mut diagnostics);
|
|
||||||
|
|
||||||
let parsed = parse(db.upcast(), *file_id)?;
|
|
||||||
|
|
||||||
if parsed.errors().is_empty() {
|
|
||||||
let ast = parsed.ast();
|
|
||||||
|
|
||||||
let mut visitor = SyntaxLintVisitor {
|
|
||||||
diagnostics,
|
|
||||||
source: source.text(),
|
|
||||||
};
|
|
||||||
visitor.visit_body(&ast.body);
|
|
||||||
diagnostics = visitor.diagnostics;
|
|
||||||
} else {
|
|
||||||
diagnostics.extend(parsed.errors().iter().map(std::string::ToString::to_string));
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Diagnostics::from(diagnostics))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn lint_lines(source: &str, diagnostics: &mut Vec<String>) {
|
|
||||||
for (line_number, line) in source.lines().enumerate() {
|
|
||||||
if line.len() < 88 {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let char_count = line.chars().count();
|
|
||||||
if char_count > 88 {
|
|
||||||
diagnostics.push(format!(
|
|
||||||
"Line {} is too long ({} characters)",
|
|
||||||
line_number + 1,
|
|
||||||
char_count
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(level = "debug", skip(db))]
|
|
||||||
pub(crate) fn lint_semantic(db: &dyn LintDb, file_id: FileId) -> QueryResult<Diagnostics> {
|
|
||||||
let lint_jar: &LintJar = db.jar()?;
|
|
||||||
let storage = &lint_jar.lint_semantic;
|
|
||||||
|
|
||||||
storage.get(&file_id, |file_id| {
|
|
||||||
let source = source_text(db.upcast(), *file_id)?;
|
|
||||||
let parsed = parse(db.upcast(), *file_id)?;
|
|
||||||
let symbols = symbol_table(db.upcast(), *file_id)?;
|
|
||||||
|
|
||||||
let context = SemanticLintContext {
|
|
||||||
file_id: *file_id,
|
|
||||||
source,
|
|
||||||
parsed,
|
|
||||||
symbols,
|
|
||||||
db,
|
|
||||||
diagnostics: RefCell::new(Vec::new()),
|
|
||||||
};
|
|
||||||
|
|
||||||
lint_unresolved_imports(&context)?;
|
|
||||||
lint_bad_overrides(&context)?;
|
|
||||||
|
|
||||||
Ok(Diagnostics::from(context.diagnostics.take()))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn lint_unresolved_imports(context: &SemanticLintContext) -> QueryResult<()> {
|
|
||||||
// TODO: Consider iterating over the dependencies (imports) only instead of all definitions.
|
|
||||||
for (symbol, definition) in context.symbols().all_definitions() {
|
|
||||||
match definition {
|
|
||||||
Definition::Import(import) => {
|
|
||||||
let ty = context.infer_symbol_type(symbol)?;
|
|
||||||
|
|
||||||
if ty.is_unknown() {
|
|
||||||
context.push_diagnostic(format!("Unresolved module {}", import.module));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Definition::ImportFrom(import) => {
|
|
||||||
let ty = context.infer_symbol_type(symbol)?;
|
|
||||||
|
|
||||||
if ty.is_unknown() {
|
|
||||||
let module_name = import.module().map(Deref::deref).unwrap_or_default();
|
|
||||||
let message = if import.level() > 0 {
|
|
||||||
format!(
|
|
||||||
"Unresolved relative import '{}' from {}{}",
|
|
||||||
import.name(),
|
|
||||||
".".repeat(import.level() as usize),
|
|
||||||
module_name
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
format!(
|
|
||||||
"Unresolved import '{}' from '{}'",
|
|
||||||
import.name(),
|
|
||||||
module_name
|
|
||||||
)
|
|
||||||
};
|
|
||||||
|
|
||||||
context.push_diagnostic(message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn lint_bad_overrides(context: &SemanticLintContext) -> QueryResult<()> {
|
|
||||||
// TODO we should have a special marker on the real typing module (from typeshed) so if you
|
|
||||||
// have your own "typing" module in your project, we don't consider it THE typing module (and
|
|
||||||
// same for other stdlib modules that our lint rules care about)
|
|
||||||
let Some(typing_override) =
|
|
||||||
resolve_global_symbol(context.db.upcast(), ModuleName::new("typing"), "override")?
|
|
||||||
else {
|
|
||||||
// TODO once we bundle typeshed, this should be unreachable!()
|
|
||||||
return Ok(());
|
|
||||||
};
|
|
||||||
|
|
||||||
// TODO we should maybe index definitions by type instead of iterating all, or else iterate all
|
|
||||||
// just once, match, and branch to all lint rules that care about a type of definition
|
|
||||||
for (symbol, definition) in context.symbols().all_definitions() {
|
|
||||||
if !matches!(definition, Definition::FunctionDef(_)) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
let ty = infer_definition_type(
|
|
||||||
context.db.upcast(),
|
|
||||||
GlobalSymbolId {
|
|
||||||
file_id: context.file_id,
|
|
||||||
symbol_id: symbol,
|
|
||||||
},
|
|
||||||
definition.clone(),
|
|
||||||
)?;
|
|
||||||
let Type::Function(func) = ty else {
|
|
||||||
unreachable!("type of a FunctionDef should always be a Function");
|
|
||||||
};
|
|
||||||
let Some(class) = func.get_containing_class(context.db.upcast())? else {
|
|
||||||
// not a method of a class
|
|
||||||
continue;
|
|
||||||
};
|
|
||||||
if func.has_decorator(context.db.upcast(), typing_override)? {
|
|
||||||
let method_name = func.name(context.db.upcast())?;
|
|
||||||
if class
|
|
||||||
.get_super_class_member(context.db.upcast(), &method_name)?
|
|
||||||
.is_none()
|
|
||||||
{
|
|
||||||
// TODO should have a qualname() method to support nested classes
|
|
||||||
context.push_diagnostic(
|
|
||||||
format!(
|
|
||||||
"Method {}.{} is decorated with `typing.override` but does not override any base class method",
|
|
||||||
class.name(context.db.upcast())?,
|
|
||||||
method_name,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct SemanticLintContext<'a> {
|
|
||||||
file_id: FileId,
|
|
||||||
source: Source,
|
|
||||||
parsed: Parsed,
|
|
||||||
symbols: Arc<SymbolTable>,
|
|
||||||
db: &'a dyn LintDb,
|
|
||||||
diagnostics: RefCell<Vec<String>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> SemanticLintContext<'a> {
|
|
||||||
pub fn source_text(&self) -> &str {
|
|
||||||
self.source.text()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn file_id(&self) -> FileId {
|
|
||||||
self.file_id
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn ast(&self) -> &ModModule {
|
|
||||||
self.parsed.ast()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn symbols(&self) -> &SymbolTable {
|
|
||||||
&self.symbols
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn infer_symbol_type(&self, symbol_id: SymbolId) -> QueryResult<Type> {
|
|
||||||
infer_symbol_type(
|
|
||||||
self.db.upcast(),
|
|
||||||
GlobalSymbolId {
|
|
||||||
file_id: self.file_id,
|
|
||||||
symbol_id,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn push_diagnostic(&self, diagnostic: String) {
|
|
||||||
self.diagnostics.borrow_mut().push(diagnostic);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn extend_diagnostics(&mut self, diagnostics: impl IntoIterator<Item = String>) {
|
|
||||||
self.diagnostics.get_mut().extend(diagnostics);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
struct SyntaxLintVisitor<'a> {
|
|
||||||
diagnostics: Vec<String>,
|
|
||||||
source: &'a str,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Visitor<'_> for SyntaxLintVisitor<'_> {
|
|
||||||
fn visit_string_literal(&mut self, string_literal: &'_ StringLiteral) {
|
|
||||||
// A very naive implementation of use double quotes
|
|
||||||
let text = &self.source[string_literal.range];
|
|
||||||
|
|
||||||
if text.starts_with('\'') {
|
|
||||||
self.diagnostics
|
|
||||||
.push("Use double quotes for strings".to_string());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub enum Diagnostics {
|
|
||||||
Empty,
|
|
||||||
List(Arc<Vec<String>>),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Diagnostics {
|
|
||||||
pub fn as_slice(&self) -> &[String] {
|
|
||||||
match self {
|
|
||||||
Diagnostics::Empty => &[],
|
|
||||||
Diagnostics::List(list) => list.as_slice(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Deref for Diagnostics {
|
|
||||||
type Target = [String];
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
self.as_slice()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<Vec<String>> for Diagnostics {
|
|
||||||
fn from(value: Vec<String>) -> Self {
|
|
||||||
if value.is_empty() {
|
|
||||||
Diagnostics::Empty
|
|
||||||
} else {
|
|
||||||
Diagnostics::List(Arc::new(value))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Default, Debug)]
|
|
||||||
pub struct LintSyntaxStorage(KeyValueCache<FileId, Diagnostics>);
|
|
||||||
|
|
||||||
impl Deref for LintSyntaxStorage {
|
|
||||||
type Target = KeyValueCache<FileId, Diagnostics>;
|
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DerefMut for LintSyntaxStorage {
|
|
||||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
|
||||||
&mut self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Default, Debug)]
|
|
||||||
pub struct LintSemanticStorage(KeyValueCache<FileId, Diagnostics>);
|
|
||||||
|
|
||||||
impl Deref for LintSemanticStorage {
|
|
||||||
type Target = KeyValueCache<FileId, Diagnostics>;
|
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DerefMut for LintSemanticStorage {
|
|
||||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
|
||||||
&mut self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,359 +0,0 @@
|
|||||||
#![allow(clippy::dbg_macro)]
|
|
||||||
|
|
||||||
use std::path::Path;
|
|
||||||
use std::sync::Mutex;
|
|
||||||
|
|
||||||
use crossbeam::channel as crossbeam_channel;
|
|
||||||
use tracing::subscriber::Interest;
|
|
||||||
use tracing::{Level, Metadata};
|
|
||||||
use tracing_subscriber::filter::LevelFilter;
|
|
||||||
use tracing_subscriber::layer::{Context, Filter, SubscriberExt};
|
|
||||||
use tracing_subscriber::{Layer, Registry};
|
|
||||||
use tracing_tree::time::Uptime;
|
|
||||||
|
|
||||||
use red_knot::db::{HasJar, ParallelDatabase, QueryError, SourceDb, SourceJar};
|
|
||||||
use red_knot::module::{set_module_search_paths, ModuleSearchPath, ModuleSearchPathKind};
|
|
||||||
use red_knot::program::check::ExecutionMode;
|
|
||||||
use red_knot::program::{FileWatcherChange, Program};
|
|
||||||
use red_knot::watch::FileWatcher;
|
|
||||||
use red_knot::Workspace;
|
|
||||||
|
|
||||||
#[allow(clippy::print_stdout, clippy::unnecessary_wraps, clippy::print_stderr)]
|
|
||||||
fn main() -> anyhow::Result<()> {
|
|
||||||
setup_tracing();
|
|
||||||
|
|
||||||
let arguments: Vec<_> = std::env::args().collect();
|
|
||||||
|
|
||||||
if arguments.len() < 2 {
|
|
||||||
eprintln!("Usage: red_knot <path>");
|
|
||||||
return Err(anyhow::anyhow!("Invalid arguments"));
|
|
||||||
}
|
|
||||||
|
|
||||||
let entry_point = Path::new(&arguments[1]);
|
|
||||||
|
|
||||||
if !entry_point.exists() {
|
|
||||||
eprintln!("The entry point does not exist.");
|
|
||||||
return Err(anyhow::anyhow!("Invalid arguments"));
|
|
||||||
}
|
|
||||||
|
|
||||||
if !entry_point.is_file() {
|
|
||||||
eprintln!("The entry point is not a file.");
|
|
||||||
return Err(anyhow::anyhow!("Invalid arguments"));
|
|
||||||
}
|
|
||||||
|
|
||||||
let workspace_folder = entry_point.parent().unwrap();
|
|
||||||
let workspace = Workspace::new(workspace_folder.to_path_buf());
|
|
||||||
|
|
||||||
let workspace_search_path = ModuleSearchPath::new(
|
|
||||||
workspace.root().to_path_buf(),
|
|
||||||
ModuleSearchPathKind::FirstParty,
|
|
||||||
);
|
|
||||||
let mut program = Program::new(workspace);
|
|
||||||
set_module_search_paths(&mut program, vec![workspace_search_path]);
|
|
||||||
|
|
||||||
let entry_id = program.file_id(entry_point);
|
|
||||||
program.workspace_mut().open_file(entry_id);
|
|
||||||
|
|
||||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new();
|
|
||||||
|
|
||||||
// Listen to Ctrl+C and abort the watch mode.
|
|
||||||
let main_loop_cancellation_token = Mutex::new(Some(main_loop_cancellation_token));
|
|
||||||
ctrlc::set_handler(move || {
|
|
||||||
let mut lock = main_loop_cancellation_token.lock().unwrap();
|
|
||||||
|
|
||||||
if let Some(token) = lock.take() {
|
|
||||||
token.stop();
|
|
||||||
}
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let file_changes_notifier = main_loop.file_changes_notifier();
|
|
||||||
|
|
||||||
// Watch for file changes and re-trigger the analysis.
|
|
||||||
let mut file_watcher = FileWatcher::new(move |changes| {
|
|
||||||
file_changes_notifier.notify(changes);
|
|
||||||
})?;
|
|
||||||
|
|
||||||
file_watcher.watch_folder(workspace_folder)?;
|
|
||||||
|
|
||||||
main_loop.run(&mut program);
|
|
||||||
|
|
||||||
let source_jar: &SourceJar = program.jar().unwrap();
|
|
||||||
|
|
||||||
dbg!(source_jar.parsed.statistics());
|
|
||||||
dbg!(source_jar.sources.statistics());
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
struct MainLoop {
|
|
||||||
orchestrator_sender: crossbeam_channel::Sender<OrchestratorMessage>,
|
|
||||||
main_loop_receiver: crossbeam_channel::Receiver<MainLoopMessage>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MainLoop {
|
|
||||||
fn new() -> (Self, MainLoopCancellationToken) {
|
|
||||||
let (orchestrator_sender, orchestrator_receiver) = crossbeam_channel::bounded(1);
|
|
||||||
let (main_loop_sender, main_loop_receiver) = crossbeam_channel::bounded(1);
|
|
||||||
|
|
||||||
let mut orchestrator = Orchestrator {
|
|
||||||
receiver: orchestrator_receiver,
|
|
||||||
sender: main_loop_sender.clone(),
|
|
||||||
revision: 0,
|
|
||||||
};
|
|
||||||
|
|
||||||
std::thread::spawn(move || {
|
|
||||||
orchestrator.run();
|
|
||||||
});
|
|
||||||
|
|
||||||
(
|
|
||||||
Self {
|
|
||||||
orchestrator_sender,
|
|
||||||
main_loop_receiver,
|
|
||||||
},
|
|
||||||
MainLoopCancellationToken {
|
|
||||||
sender: main_loop_sender,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn file_changes_notifier(&self) -> FileChangesNotifier {
|
|
||||||
FileChangesNotifier {
|
|
||||||
sender: self.orchestrator_sender.clone(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run(self, program: &mut Program) {
|
|
||||||
self.orchestrator_sender
|
|
||||||
.send(OrchestratorMessage::Run)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
for message in &self.main_loop_receiver {
|
|
||||||
tracing::trace!("Main Loop: Tick");
|
|
||||||
|
|
||||||
match message {
|
|
||||||
MainLoopMessage::CheckProgram { revision } => {
|
|
||||||
let program = program.snapshot();
|
|
||||||
let sender = self.orchestrator_sender.clone();
|
|
||||||
|
|
||||||
// Spawn a new task that checks the program. This needs to be done in a separate thread
|
|
||||||
// to prevent blocking the main loop here.
|
|
||||||
rayon::spawn(move || match program.check(ExecutionMode::ThreadPool) {
|
|
||||||
Ok(result) => {
|
|
||||||
sender
|
|
||||||
.send(OrchestratorMessage::CheckProgramCompleted {
|
|
||||||
diagnostics: result,
|
|
||||||
revision,
|
|
||||||
})
|
|
||||||
.unwrap();
|
|
||||||
}
|
|
||||||
Err(QueryError::Cancelled) => {}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
MainLoopMessage::ApplyChanges(changes) => {
|
|
||||||
// Automatically cancels any pending queries and waits for them to complete.
|
|
||||||
program.apply_changes(changes);
|
|
||||||
}
|
|
||||||
MainLoopMessage::CheckCompleted(diagnostics) => {
|
|
||||||
dbg!(diagnostics);
|
|
||||||
}
|
|
||||||
MainLoopMessage::Exit => {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Drop for MainLoop {
|
|
||||||
fn drop(&mut self) {
|
|
||||||
self.orchestrator_sender
|
|
||||||
.send(OrchestratorMessage::Shutdown)
|
|
||||||
.unwrap();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
struct FileChangesNotifier {
|
|
||||||
sender: crossbeam_channel::Sender<OrchestratorMessage>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FileChangesNotifier {
|
|
||||||
fn notify(&self, changes: Vec<FileWatcherChange>) {
|
|
||||||
self.sender
|
|
||||||
.send(OrchestratorMessage::FileChanges(changes))
|
|
||||||
.unwrap();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
struct MainLoopCancellationToken {
|
|
||||||
sender: crossbeam_channel::Sender<MainLoopMessage>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MainLoopCancellationToken {
|
|
||||||
fn stop(self) {
|
|
||||||
self.sender.send(MainLoopMessage::Exit).unwrap();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
struct Orchestrator {
|
|
||||||
/// Sends messages to the main loop.
|
|
||||||
sender: crossbeam_channel::Sender<MainLoopMessage>,
|
|
||||||
/// Receives messages from the main loop.
|
|
||||||
receiver: crossbeam_channel::Receiver<OrchestratorMessage>,
|
|
||||||
revision: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Orchestrator {
|
|
||||||
fn run(&mut self) {
|
|
||||||
while let Ok(message) = self.receiver.recv() {
|
|
||||||
match message {
|
|
||||||
OrchestratorMessage::Run => {
|
|
||||||
self.sender
|
|
||||||
.send(MainLoopMessage::CheckProgram {
|
|
||||||
revision: self.revision,
|
|
||||||
})
|
|
||||||
.unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
OrchestratorMessage::CheckProgramCompleted {
|
|
||||||
diagnostics,
|
|
||||||
revision,
|
|
||||||
} => {
|
|
||||||
// Only take the diagnostics if they are for the latest revision.
|
|
||||||
if self.revision == revision {
|
|
||||||
self.sender
|
|
||||||
.send(MainLoopMessage::CheckCompleted(diagnostics))
|
|
||||||
.unwrap();
|
|
||||||
} else {
|
|
||||||
tracing::debug!("Discarding diagnostics for outdated revision {revision} (current: {}).", self.revision);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
OrchestratorMessage::FileChanges(changes) => {
|
|
||||||
// Request cancellation, but wait until all analysis tasks have completed to
|
|
||||||
// avoid stale messages in the next main loop.
|
|
||||||
|
|
||||||
self.revision += 1;
|
|
||||||
self.debounce_changes(changes);
|
|
||||||
}
|
|
||||||
OrchestratorMessage::Shutdown => {
|
|
||||||
return self.shutdown();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn debounce_changes(&self, mut changes: Vec<FileWatcherChange>) {
|
|
||||||
loop {
|
|
||||||
// Consume possibly incoming file change messages before running a new analysis, but don't wait for more than 100ms.
|
|
||||||
crossbeam_channel::select! {
|
|
||||||
recv(self.receiver) -> message => {
|
|
||||||
match message {
|
|
||||||
Ok(OrchestratorMessage::Shutdown) => {
|
|
||||||
return self.shutdown();
|
|
||||||
}
|
|
||||||
Ok(OrchestratorMessage::FileChanges(file_changes)) => {
|
|
||||||
changes.extend(file_changes);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(OrchestratorMessage::CheckProgramCompleted { .. })=> {
|
|
||||||
// disregard any outdated completion message.
|
|
||||||
}
|
|
||||||
Ok(OrchestratorMessage::Run) => unreachable!("The orchestrator is already running."),
|
|
||||||
|
|
||||||
Err(_) => {
|
|
||||||
// There are no more senders, no point in waiting for more messages
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
default(std::time::Duration::from_millis(10)) => {
|
|
||||||
// No more file changes after 10 ms, send the changes and schedule a new analysis
|
|
||||||
self.sender.send(MainLoopMessage::ApplyChanges(changes)).unwrap();
|
|
||||||
self.sender.send(MainLoopMessage::CheckProgram { revision: self.revision}).unwrap();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(clippy::unused_self)]
|
|
||||||
fn shutdown(&self) {
|
|
||||||
tracing::trace!("Shutting down orchestrator.");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Message sent from the orchestrator to the main loop.
|
|
||||||
#[derive(Debug)]
|
|
||||||
enum MainLoopMessage {
|
|
||||||
CheckProgram { revision: usize },
|
|
||||||
CheckCompleted(Vec<String>),
|
|
||||||
ApplyChanges(Vec<FileWatcherChange>),
|
|
||||||
Exit,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
enum OrchestratorMessage {
|
|
||||||
Run,
|
|
||||||
Shutdown,
|
|
||||||
|
|
||||||
CheckProgramCompleted {
|
|
||||||
diagnostics: Vec<String>,
|
|
||||||
revision: usize,
|
|
||||||
},
|
|
||||||
|
|
||||||
FileChanges(Vec<FileWatcherChange>),
|
|
||||||
}
|
|
||||||
|
|
||||||
fn setup_tracing() {
|
|
||||||
let subscriber = Registry::default().with(
|
|
||||||
tracing_tree::HierarchicalLayer::default()
|
|
||||||
.with_indent_lines(true)
|
|
||||||
.with_indent_amount(2)
|
|
||||||
.with_bracketed_fields(true)
|
|
||||||
.with_thread_ids(true)
|
|
||||||
.with_targets(true)
|
|
||||||
.with_writer(|| Box::new(std::io::stderr()))
|
|
||||||
.with_timer(Uptime::default())
|
|
||||||
.with_filter(LoggingFilter {
|
|
||||||
trace_level: Level::TRACE,
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
|
|
||||||
tracing::subscriber::set_global_default(subscriber).unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
struct LoggingFilter {
|
|
||||||
trace_level: Level,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl LoggingFilter {
|
|
||||||
fn is_enabled(&self, meta: &Metadata<'_>) -> bool {
|
|
||||||
let filter = if meta.target().starts_with("red_knot") || meta.target().starts_with("ruff") {
|
|
||||||
self.trace_level
|
|
||||||
} else {
|
|
||||||
Level::INFO
|
|
||||||
};
|
|
||||||
|
|
||||||
meta.level() <= &filter
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<S> Filter<S> for LoggingFilter {
|
|
||||||
fn enabled(&self, meta: &Metadata<'_>, _cx: &Context<'_, S>) -> bool {
|
|
||||||
self.is_enabled(meta)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn callsite_enabled(&self, meta: &'static Metadata<'static>) -> Interest {
|
|
||||||
if self.is_enabled(meta) {
|
|
||||||
Interest::always()
|
|
||||||
} else {
|
|
||||||
Interest::never()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn max_level_hint(&self) -> Option<LevelFilter> {
|
|
||||||
Some(LevelFilter::from_level(self.trace_level))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,93 +0,0 @@
|
|||||||
use std::ops::{Deref, DerefMut};
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use ruff_python_ast as ast;
|
|
||||||
use ruff_python_parser::{Mode, ParseError};
|
|
||||||
use ruff_text_size::{Ranged, TextRange};
|
|
||||||
|
|
||||||
use crate::cache::KeyValueCache;
|
|
||||||
use crate::db::{QueryResult, SourceDb};
|
|
||||||
use crate::files::FileId;
|
|
||||||
use crate::source::source_text;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub struct Parsed {
|
|
||||||
inner: Arc<ParsedInner>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq)]
|
|
||||||
struct ParsedInner {
|
|
||||||
ast: ast::ModModule,
|
|
||||||
errors: Vec<ParseError>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Parsed {
|
|
||||||
fn new(ast: ast::ModModule, errors: Vec<ParseError>) -> Self {
|
|
||||||
Self {
|
|
||||||
inner: Arc::new(ParsedInner { ast, errors }),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn from_text(text: &str) -> Self {
|
|
||||||
let result = ruff_python_parser::parse(text, Mode::Module);
|
|
||||||
|
|
||||||
let (module, errors) = match result {
|
|
||||||
Ok(ast::Mod::Module(module)) => (module, vec![]),
|
|
||||||
Ok(ast::Mod::Expression(expression)) => (
|
|
||||||
ast::ModModule {
|
|
||||||
range: expression.range(),
|
|
||||||
body: vec![ast::Stmt::Expr(ast::StmtExpr {
|
|
||||||
range: expression.range(),
|
|
||||||
value: expression.body,
|
|
||||||
})],
|
|
||||||
},
|
|
||||||
vec![],
|
|
||||||
),
|
|
||||||
Err(errors) => (
|
|
||||||
ast::ModModule {
|
|
||||||
range: TextRange::default(),
|
|
||||||
body: Vec::new(),
|
|
||||||
},
|
|
||||||
vec![errors],
|
|
||||||
),
|
|
||||||
};
|
|
||||||
|
|
||||||
Parsed::new(module, errors)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn ast(&self) -> &ast::ModModule {
|
|
||||||
&self.inner.ast
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn errors(&self) -> &[ParseError] {
|
|
||||||
&self.inner.errors
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(level = "debug", skip(db))]
|
|
||||||
pub(crate) fn parse(db: &dyn SourceDb, file_id: FileId) -> QueryResult<Parsed> {
|
|
||||||
let jar = db.jar()?;
|
|
||||||
|
|
||||||
jar.parsed.get(&file_id, |file_id| {
|
|
||||||
let source = source_text(db, *file_id)?;
|
|
||||||
|
|
||||||
Ok(Parsed::from_text(source.text()))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default)]
|
|
||||||
pub struct ParsedStorage(KeyValueCache<FileId, Parsed>);
|
|
||||||
|
|
||||||
impl Deref for ParsedStorage {
|
|
||||||
type Target = KeyValueCache<FileId, Parsed>;
|
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DerefMut for ParsedStorage {
|
|
||||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
|
||||||
&mut self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,413 +0,0 @@
|
|||||||
use rayon::{current_num_threads, yield_local};
|
|
||||||
use rustc_hash::FxHashSet;
|
|
||||||
|
|
||||||
use crate::db::{Database, QueryError, QueryResult};
|
|
||||||
use crate::files::FileId;
|
|
||||||
use crate::lint::{lint_semantic, lint_syntax, Diagnostics};
|
|
||||||
use crate::module::{file_to_module, resolve_module};
|
|
||||||
use crate::program::Program;
|
|
||||||
use crate::symbols::{symbol_table, Dependency};
|
|
||||||
|
|
||||||
impl Program {
|
|
||||||
/// Checks all open files in the workspace and its dependencies.
|
|
||||||
#[tracing::instrument(level = "debug", skip_all)]
|
|
||||||
pub fn check(&self, mode: ExecutionMode) -> QueryResult<Vec<String>> {
|
|
||||||
self.cancelled()?;
|
|
||||||
|
|
||||||
let mut context = CheckContext::new(self);
|
|
||||||
|
|
||||||
match mode {
|
|
||||||
ExecutionMode::SingleThreaded => SingleThreadedExecutor.run(&mut context)?,
|
|
||||||
ExecutionMode::ThreadPool => ThreadPoolExecutor.run(&mut context)?,
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(context.finish())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(level = "debug", skip(self, context))]
|
|
||||||
fn check_file(&self, file: FileId, context: &CheckFileContext) -> QueryResult<Diagnostics> {
|
|
||||||
self.cancelled()?;
|
|
||||||
|
|
||||||
let symbol_table = symbol_table(self, file)?;
|
|
||||||
let dependencies = symbol_table.dependencies();
|
|
||||||
|
|
||||||
if !dependencies.is_empty() {
|
|
||||||
let module = file_to_module(self, file)?;
|
|
||||||
|
|
||||||
// TODO scheduling all dependencies here is wasteful if we don't infer any types on them
|
|
||||||
// but I think that's unlikely, so it is okay?
|
|
||||||
// Anyway, we need to figure out a way to retrieve the dependencies of a module
|
|
||||||
// from the persistent cache. So maybe it should be a separate query after all.
|
|
||||||
for dependency in dependencies {
|
|
||||||
let dependency_name = match dependency {
|
|
||||||
Dependency::Module(name) => Some(name.clone()),
|
|
||||||
Dependency::Relative { .. } => match &module {
|
|
||||||
Some(module) => module.resolve_dependency(self, dependency)?,
|
|
||||||
None => None,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Some(dependency_name) = dependency_name {
|
|
||||||
// TODO We may want to have a different check functions for non-first-party
|
|
||||||
// files because we only need to index them and not check them.
|
|
||||||
// Supporting non-first-party code also requires supporting typing stubs.
|
|
||||||
if let Some(dependency) = resolve_module(self, dependency_name)? {
|
|
||||||
if dependency.path(self)?.root().kind().is_first_party() {
|
|
||||||
context.schedule_dependency(dependency.path(self)?.file());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut diagnostics = Vec::new();
|
|
||||||
|
|
||||||
if self.workspace().is_file_open(file) {
|
|
||||||
diagnostics.extend_from_slice(&lint_syntax(self, file)?);
|
|
||||||
diagnostics.extend_from_slice(&lint_semantic(self, file)?);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Diagnostics::from(diagnostics))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
|
||||||
pub enum ExecutionMode {
|
|
||||||
SingleThreaded,
|
|
||||||
ThreadPool,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Context that stores state information about the entire check operation.
|
|
||||||
struct CheckContext<'a> {
|
|
||||||
/// IDs of the files that have been queued for checking.
|
|
||||||
///
|
|
||||||
/// Used to avoid queuing the same file twice.
|
|
||||||
scheduled_files: FxHashSet<FileId>,
|
|
||||||
|
|
||||||
/// Reference to the program that is checked.
|
|
||||||
program: &'a Program,
|
|
||||||
|
|
||||||
/// The aggregated diagnostics
|
|
||||||
diagnostics: Vec<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> CheckContext<'a> {
|
|
||||||
fn new(program: &'a Program) -> Self {
|
|
||||||
Self {
|
|
||||||
scheduled_files: FxHashSet::default(),
|
|
||||||
program,
|
|
||||||
diagnostics: Vec::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the tasks to check all open files in the workspace.
|
|
||||||
fn check_open_files(&mut self) -> Vec<CheckOpenFileTask> {
|
|
||||||
self.scheduled_files
|
|
||||||
.extend(self.program.workspace().open_files());
|
|
||||||
|
|
||||||
self.program
|
|
||||||
.workspace()
|
|
||||||
.open_files()
|
|
||||||
.map(|file_id| CheckOpenFileTask { file_id })
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the task to check a dependency.
|
|
||||||
fn check_dependency(&mut self, file_id: FileId) -> Option<CheckDependencyTask> {
|
|
||||||
if self.scheduled_files.insert(file_id) {
|
|
||||||
Some(CheckDependencyTask { file_id })
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Pushes the result for a single file check operation
|
|
||||||
fn push_diagnostics(&mut self, diagnostics: &Diagnostics) {
|
|
||||||
self.diagnostics.extend_from_slice(diagnostics);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns a reference to the program that is being checked.
|
|
||||||
fn program(&self) -> &'a Program {
|
|
||||||
self.program
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Creates a task context that is used to check a single file.
|
|
||||||
fn task_context<'b, S>(&self, dependency_scheduler: &'b S) -> CheckTaskContext<'a, 'b, S>
|
|
||||||
where
|
|
||||||
S: ScheduleDependency,
|
|
||||||
{
|
|
||||||
CheckTaskContext {
|
|
||||||
program: self.program,
|
|
||||||
dependency_scheduler,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn finish(self) -> Vec<String> {
|
|
||||||
self.diagnostics
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Trait that abstracts away how a dependency of a file gets scheduled for checking.
|
|
||||||
trait ScheduleDependency {
|
|
||||||
/// Schedules the file with the given ID for checking.
|
|
||||||
fn schedule(&self, file_id: FileId);
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> ScheduleDependency for T
|
|
||||||
where
|
|
||||||
T: Fn(FileId),
|
|
||||||
{
|
|
||||||
fn schedule(&self, file_id: FileId) {
|
|
||||||
let f = self;
|
|
||||||
f(file_id);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Context that is used to run a single file check task.
|
|
||||||
///
|
|
||||||
/// The task is generic over `S` because it is passed across thread boundaries and
|
|
||||||
/// we don't want to add the requirement that [`ScheduleDependency`] must be [`Send`].
|
|
||||||
struct CheckTaskContext<'a, 'scheduler, S>
|
|
||||||
where
|
|
||||||
S: ScheduleDependency,
|
|
||||||
{
|
|
||||||
dependency_scheduler: &'scheduler S,
|
|
||||||
program: &'a Program,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a, 'scheduler, S> CheckTaskContext<'a, 'scheduler, S>
|
|
||||||
where
|
|
||||||
S: ScheduleDependency,
|
|
||||||
{
|
|
||||||
fn as_file_context(&self) -> CheckFileContext<'scheduler> {
|
|
||||||
CheckFileContext {
|
|
||||||
dependency_scheduler: self.dependency_scheduler,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Context passed when checking a single file.
|
|
||||||
///
|
|
||||||
/// This is a trimmed down version of [`CheckTaskContext`] with the type parameter `S` erased
|
|
||||||
/// to avoid monomorphization of [`Program:check_file`].
|
|
||||||
struct CheckFileContext<'a> {
|
|
||||||
dependency_scheduler: &'a dyn ScheduleDependency,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> CheckFileContext<'a> {
|
|
||||||
fn schedule_dependency(&self, file_id: FileId) {
|
|
||||||
self.dependency_scheduler.schedule(file_id);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
enum CheckFileTask {
|
|
||||||
OpenFile(CheckOpenFileTask),
|
|
||||||
Dependency(CheckDependencyTask),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl CheckFileTask {
|
|
||||||
/// Runs the task and returns the results for checking this file.
|
|
||||||
fn run<S>(&self, context: &CheckTaskContext<S>) -> QueryResult<Diagnostics>
|
|
||||||
where
|
|
||||||
S: ScheduleDependency,
|
|
||||||
{
|
|
||||||
match self {
|
|
||||||
Self::OpenFile(task) => task.run(context),
|
|
||||||
Self::Dependency(task) => task.run(context),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn file_id(&self) -> FileId {
|
|
||||||
match self {
|
|
||||||
CheckFileTask::OpenFile(task) => task.file_id,
|
|
||||||
CheckFileTask::Dependency(task) => task.file_id,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Task to check an open file.
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
struct CheckOpenFileTask {
|
|
||||||
file_id: FileId,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl CheckOpenFileTask {
|
|
||||||
fn run<S>(&self, context: &CheckTaskContext<S>) -> QueryResult<Diagnostics>
|
|
||||||
where
|
|
||||||
S: ScheduleDependency,
|
|
||||||
{
|
|
||||||
context
|
|
||||||
.program
|
|
||||||
.check_file(self.file_id, &context.as_file_context())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Task to check a dependency file.
|
|
||||||
#[derive(Debug)]
|
|
||||||
struct CheckDependencyTask {
|
|
||||||
file_id: FileId,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl CheckDependencyTask {
|
|
||||||
fn run<S>(&self, context: &CheckTaskContext<S>) -> QueryResult<Diagnostics>
|
|
||||||
where
|
|
||||||
S: ScheduleDependency,
|
|
||||||
{
|
|
||||||
context
|
|
||||||
.program
|
|
||||||
.check_file(self.file_id, &context.as_file_context())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Executor that schedules the checking of individual program files.
|
|
||||||
trait CheckExecutor {
|
|
||||||
fn run(self, context: &mut CheckContext) -> QueryResult<()>;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Executor that runs all check operations on the current thread.
|
|
||||||
///
|
|
||||||
/// The executor does not schedule dependencies for checking.
|
|
||||||
/// The main motivation for scheduling dependencies
|
|
||||||
/// in a multithreaded environment is to parse and index the dependencies concurrently.
|
|
||||||
/// However, that doesn't make sense in a single threaded environment, because the dependencies then compute
|
|
||||||
/// with checking the open files. Checking dependencies in a single threaded environment is more likely
|
|
||||||
/// to hurt performance because we end up analyzing files in their entirety, even if we only need to type check parts of them.
|
|
||||||
#[derive(Debug, Default)]
|
|
||||||
struct SingleThreadedExecutor;
|
|
||||||
|
|
||||||
impl CheckExecutor for SingleThreadedExecutor {
|
|
||||||
fn run(self, context: &mut CheckContext) -> QueryResult<()> {
|
|
||||||
let mut queue = context.check_open_files();
|
|
||||||
|
|
||||||
let noop_schedule_dependency = |_| {};
|
|
||||||
|
|
||||||
while let Some(file) = queue.pop() {
|
|
||||||
context.program().cancelled()?;
|
|
||||||
|
|
||||||
let task_context = context.task_context(&noop_schedule_dependency);
|
|
||||||
context.push_diagnostics(&file.run(&task_context)?);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Executor that runs the check operations on a thread pool.
|
|
||||||
///
|
|
||||||
/// The executor runs each check operation as its own task using a thread pool.
|
|
||||||
///
|
|
||||||
/// Other than [`SingleThreadedExecutor`], this executor schedules dependencies for checking. It
|
|
||||||
/// even schedules dependencies for checking when the thread pool size is 1 for a better debugging experience.
|
|
||||||
#[derive(Debug, Default)]
|
|
||||||
struct ThreadPoolExecutor;
|
|
||||||
|
|
||||||
impl CheckExecutor for ThreadPoolExecutor {
|
|
||||||
fn run(self, context: &mut CheckContext) -> QueryResult<()> {
|
|
||||||
let num_threads = current_num_threads();
|
|
||||||
let single_threaded = num_threads == 1;
|
|
||||||
let span = tracing::trace_span!("ThreadPoolExecutor::run", num_threads);
|
|
||||||
let _ = span.enter();
|
|
||||||
|
|
||||||
let mut queue: Vec<_> = context
|
|
||||||
.check_open_files()
|
|
||||||
.into_iter()
|
|
||||||
.map(CheckFileTask::OpenFile)
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let (sender, receiver) = if single_threaded {
|
|
||||||
// Use an unbounded queue for single threaded execution to prevent deadlocks
|
|
||||||
// when a single file schedules multiple dependencies.
|
|
||||||
crossbeam::channel::unbounded()
|
|
||||||
} else {
|
|
||||||
// Use a bounded queue to apply backpressure when the orchestration thread isn't able to keep
|
|
||||||
// up processing messages from the worker threads.
|
|
||||||
crossbeam::channel::bounded(num_threads)
|
|
||||||
};
|
|
||||||
|
|
||||||
let schedule_sender = sender.clone();
|
|
||||||
let schedule_dependency = move |file_id| {
|
|
||||||
schedule_sender
|
|
||||||
.send(ThreadPoolMessage::ScheduleDependency(file_id))
|
|
||||||
.unwrap();
|
|
||||||
};
|
|
||||||
|
|
||||||
let result = rayon::in_place_scope(|scope| {
|
|
||||||
let mut pending = 0usize;
|
|
||||||
|
|
||||||
loop {
|
|
||||||
context.program().cancelled()?;
|
|
||||||
|
|
||||||
// 1. Try to get a queued message to ensure that we have always remaining space in the channel to prevent blocking the worker threads.
|
|
||||||
// 2. Try to process a queued file
|
|
||||||
// 3. If there's no queued file wait for the next incoming message.
|
|
||||||
// 4. Exit if there are no more messages and no senders.
|
|
||||||
let message = if let Ok(message) = receiver.try_recv() {
|
|
||||||
message
|
|
||||||
} else if let Some(task) = queue.pop() {
|
|
||||||
pending += 1;
|
|
||||||
|
|
||||||
let task_context = context.task_context(&schedule_dependency);
|
|
||||||
let sender = sender.clone();
|
|
||||||
let task_span = tracing::trace_span!(
|
|
||||||
parent: &span,
|
|
||||||
"CheckFileTask::run",
|
|
||||||
file_id = task.file_id().as_u32(),
|
|
||||||
);
|
|
||||||
|
|
||||||
scope.spawn(move |_| {
|
|
||||||
task_span.in_scope(|| match task.run(&task_context) {
|
|
||||||
Ok(result) => {
|
|
||||||
sender.send(ThreadPoolMessage::Completed(result)).unwrap();
|
|
||||||
}
|
|
||||||
Err(err) => sender.send(ThreadPoolMessage::Errored(err)).unwrap(),
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
// If this is a single threaded rayon thread pool, yield the current thread
|
|
||||||
// or we never start processing the work items.
|
|
||||||
if single_threaded {
|
|
||||||
yield_local();
|
|
||||||
}
|
|
||||||
|
|
||||||
continue;
|
|
||||||
} else if let Ok(message) = receiver.recv() {
|
|
||||||
message
|
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
};
|
|
||||||
|
|
||||||
match message {
|
|
||||||
ThreadPoolMessage::ScheduleDependency(dependency) => {
|
|
||||||
if let Some(task) = context.check_dependency(dependency) {
|
|
||||||
queue.push(CheckFileTask::Dependency(task));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ThreadPoolMessage::Completed(diagnostics) => {
|
|
||||||
context.push_diagnostics(&diagnostics);
|
|
||||||
pending -= 1;
|
|
||||||
|
|
||||||
if pending == 0 && queue.is_empty() {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ThreadPoolMessage::Errored(err) => {
|
|
||||||
return Err(err);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
});
|
|
||||||
|
|
||||||
result
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
enum ThreadPoolMessage {
|
|
||||||
ScheduleDependency(FileId),
|
|
||||||
Completed(Diagnostics),
|
|
||||||
Errored(QueryError),
|
|
||||||
}
|
|
||||||
@@ -1,275 +0,0 @@
|
|||||||
use std::collections::hash_map::Entry;
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use rustc_hash::FxHashMap;
|
|
||||||
|
|
||||||
use crate::db::{
|
|
||||||
Database, Db, DbRuntime, DbWithJar, HasJar, HasJars, JarsStorage, LintDb, LintJar,
|
|
||||||
ParallelDatabase, QueryResult, SemanticDb, SemanticJar, Snapshot, SourceDb, SourceJar, Upcast,
|
|
||||||
};
|
|
||||||
use crate::files::{FileId, Files};
|
|
||||||
use crate::Workspace;
|
|
||||||
|
|
||||||
pub mod check;
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct Program {
|
|
||||||
jars: JarsStorage<Program>,
|
|
||||||
files: Files,
|
|
||||||
workspace: Workspace,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Program {
|
|
||||||
pub fn new(workspace: Workspace) -> Self {
|
|
||||||
Self {
|
|
||||||
jars: JarsStorage::default(),
|
|
||||||
files: Files::default(),
|
|
||||||
workspace,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn apply_changes<I>(&mut self, changes: I)
|
|
||||||
where
|
|
||||||
I: IntoIterator<Item = FileWatcherChange>,
|
|
||||||
{
|
|
||||||
let mut aggregated_changes = AggregatedChanges::default();
|
|
||||||
|
|
||||||
aggregated_changes.extend(changes.into_iter().map(|change| FileChange {
|
|
||||||
id: self.files.intern(&change.path),
|
|
||||||
kind: change.kind,
|
|
||||||
}));
|
|
||||||
|
|
||||||
let (source, semantic, lint) = self.jars_mut();
|
|
||||||
for change in aggregated_changes.iter() {
|
|
||||||
semantic.module_resolver.remove_module(change.id);
|
|
||||||
semantic.symbol_tables.remove(&change.id);
|
|
||||||
source.sources.remove(&change.id);
|
|
||||||
source.parsed.remove(&change.id);
|
|
||||||
// TODO: remove all dependent modules as well
|
|
||||||
semantic.type_store.remove_module(change.id);
|
|
||||||
lint.lint_syntax.remove(&change.id);
|
|
||||||
lint.lint_semantic.remove(&change.id);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn files(&self) -> &Files {
|
|
||||||
&self.files
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn workspace(&self) -> &Workspace {
|
|
||||||
&self.workspace
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn workspace_mut(&mut self) -> &mut Workspace {
|
|
||||||
&mut self.workspace
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SourceDb for Program {
|
|
||||||
fn file_id(&self, path: &Path) -> FileId {
|
|
||||||
self.files.intern(path)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn file_path(&self, file_id: FileId) -> Arc<Path> {
|
|
||||||
self.files.path(file_id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DbWithJar<SourceJar> for Program {}
|
|
||||||
|
|
||||||
impl SemanticDb for Program {}
|
|
||||||
|
|
||||||
impl DbWithJar<SemanticJar> for Program {}
|
|
||||||
|
|
||||||
impl LintDb for Program {}
|
|
||||||
|
|
||||||
impl DbWithJar<LintJar> for Program {}
|
|
||||||
|
|
||||||
impl Upcast<dyn SemanticDb> for Program {
|
|
||||||
fn upcast(&self) -> &(dyn SemanticDb + 'static) {
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Upcast<dyn SourceDb> for Program {
|
|
||||||
fn upcast(&self) -> &(dyn SourceDb + 'static) {
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Upcast<dyn LintDb> for Program {
|
|
||||||
fn upcast(&self) -> &(dyn LintDb + 'static) {
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Db for Program {}
|
|
||||||
|
|
||||||
impl Database for Program {
|
|
||||||
fn runtime(&self) -> &DbRuntime {
|
|
||||||
self.jars.runtime()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn runtime_mut(&mut self) -> &mut DbRuntime {
|
|
||||||
self.jars.runtime_mut()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ParallelDatabase for Program {
|
|
||||||
fn snapshot(&self) -> Snapshot<Self> {
|
|
||||||
Snapshot::new(Self {
|
|
||||||
jars: self.jars.snapshot(),
|
|
||||||
files: self.files.snapshot(),
|
|
||||||
workspace: self.workspace.clone(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HasJars for Program {
|
|
||||||
type Jars = (SourceJar, SemanticJar, LintJar);
|
|
||||||
|
|
||||||
fn jars(&self) -> QueryResult<&Self::Jars> {
|
|
||||||
self.jars.jars()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn jars_mut(&mut self) -> &mut Self::Jars {
|
|
||||||
self.jars.jars_mut()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HasJar<SourceJar> for Program {
|
|
||||||
fn jar(&self) -> QueryResult<&SourceJar> {
|
|
||||||
Ok(&self.jars()?.0)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn jar_mut(&mut self) -> &mut SourceJar {
|
|
||||||
&mut self.jars_mut().0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HasJar<SemanticJar> for Program {
|
|
||||||
fn jar(&self) -> QueryResult<&SemanticJar> {
|
|
||||||
Ok(&self.jars()?.1)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn jar_mut(&mut self) -> &mut SemanticJar {
|
|
||||||
&mut self.jars_mut().1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HasJar<LintJar> for Program {
|
|
||||||
fn jar(&self) -> QueryResult<&LintJar> {
|
|
||||||
Ok(&self.jars()?.2)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn jar_mut(&mut self) -> &mut LintJar {
|
|
||||||
&mut self.jars_mut().2
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct FileWatcherChange {
|
|
||||||
path: PathBuf,
|
|
||||||
kind: FileChangeKind,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FileWatcherChange {
|
|
||||||
pub fn new(path: PathBuf, kind: FileChangeKind) -> Self {
|
|
||||||
Self { path, kind }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug)]
|
|
||||||
struct FileChange {
|
|
||||||
id: FileId,
|
|
||||||
kind: FileChangeKind,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FileChange {
|
|
||||||
fn file_id(self) -> FileId {
|
|
||||||
self.id
|
|
||||||
}
|
|
||||||
|
|
||||||
fn kind(self) -> FileChangeKind {
|
|
||||||
self.kind
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
|
||||||
pub enum FileChangeKind {
|
|
||||||
Created,
|
|
||||||
Modified,
|
|
||||||
Deleted,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Default, Debug)]
|
|
||||||
struct AggregatedChanges {
|
|
||||||
changes: FxHashMap<FileId, FileChangeKind>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AggregatedChanges {
|
|
||||||
fn add(&mut self, change: FileChange) {
|
|
||||||
match self.changes.entry(change.file_id()) {
|
|
||||||
Entry::Occupied(mut entry) => {
|
|
||||||
let merged = entry.get_mut();
|
|
||||||
|
|
||||||
match (merged, change.kind()) {
|
|
||||||
(FileChangeKind::Created, FileChangeKind::Deleted) => {
|
|
||||||
// Deletion after creations means that ruff never saw the file.
|
|
||||||
entry.remove();
|
|
||||||
}
|
|
||||||
(FileChangeKind::Created, FileChangeKind::Modified) => {
|
|
||||||
// No-op, for ruff, modifying a file that it doesn't yet know that it exists is still considered a creation.
|
|
||||||
}
|
|
||||||
|
|
||||||
(FileChangeKind::Modified, FileChangeKind::Created) => {
|
|
||||||
// Uhh, that should probably not happen. Continue considering it a modification.
|
|
||||||
}
|
|
||||||
|
|
||||||
(FileChangeKind::Modified, FileChangeKind::Deleted) => {
|
|
||||||
*entry.get_mut() = FileChangeKind::Deleted;
|
|
||||||
}
|
|
||||||
|
|
||||||
(FileChangeKind::Deleted, FileChangeKind::Created) => {
|
|
||||||
*entry.get_mut() = FileChangeKind::Modified;
|
|
||||||
}
|
|
||||||
|
|
||||||
(FileChangeKind::Deleted, FileChangeKind::Modified) => {
|
|
||||||
// That's weird, but let's consider it a modification.
|
|
||||||
*entry.get_mut() = FileChangeKind::Modified;
|
|
||||||
}
|
|
||||||
|
|
||||||
(FileChangeKind::Created, FileChangeKind::Created)
|
|
||||||
| (FileChangeKind::Modified, FileChangeKind::Modified)
|
|
||||||
| (FileChangeKind::Deleted, FileChangeKind::Deleted) => {
|
|
||||||
// No-op transitions. Some of them should be impossible but we handle them anyway.
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Entry::Vacant(entry) => {
|
|
||||||
entry.insert(change.kind());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn extend<I>(&mut self, changes: I)
|
|
||||||
where
|
|
||||||
I: IntoIterator<Item = FileChange>,
|
|
||||||
{
|
|
||||||
let iter = changes.into_iter();
|
|
||||||
let (lower, _) = iter.size_hint();
|
|
||||||
self.changes.reserve(lower);
|
|
||||||
|
|
||||||
for change in iter {
|
|
||||||
self.add(change);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn iter(&self) -> impl Iterator<Item = FileChange> + '_ {
|
|
||||||
self.changes.iter().map(|(id, kind)| FileChange {
|
|
||||||
id: *id,
|
|
||||||
kind: *kind,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,95 +0,0 @@
|
|||||||
use std::ops::{Deref, DerefMut};
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use ruff_notebook::Notebook;
|
|
||||||
use ruff_python_ast::PySourceType;
|
|
||||||
|
|
||||||
use crate::cache::KeyValueCache;
|
|
||||||
use crate::db::{QueryResult, SourceDb};
|
|
||||||
use crate::files::FileId;
|
|
||||||
|
|
||||||
#[tracing::instrument(level = "debug", skip(db))]
|
|
||||||
pub(crate) fn source_text(db: &dyn SourceDb, file_id: FileId) -> QueryResult<Source> {
|
|
||||||
let jar = db.jar()?;
|
|
||||||
let sources = &jar.sources;
|
|
||||||
|
|
||||||
sources.get(&file_id, |file_id| {
|
|
||||||
let path = db.file_path(*file_id);
|
|
||||||
|
|
||||||
let source_text = std::fs::read_to_string(&path).unwrap_or_else(|err| {
|
|
||||||
tracing::error!("Failed to read file '{path:?}: {err}'. Falling back to empty text");
|
|
||||||
String::new()
|
|
||||||
});
|
|
||||||
|
|
||||||
let python_ty = PySourceType::from(&path);
|
|
||||||
|
|
||||||
let kind = match python_ty {
|
|
||||||
PySourceType::Python => {
|
|
||||||
SourceKind::Python(Arc::from(source_text))
|
|
||||||
}
|
|
||||||
PySourceType::Stub => SourceKind::Stub(Arc::from(source_text)),
|
|
||||||
PySourceType::Ipynb => {
|
|
||||||
let notebook = Notebook::from_source_code(&source_text).unwrap_or_else(|err| {
|
|
||||||
// TODO should this be changed to never fail?
|
|
||||||
// or should we instead add a diagnostic somewhere? But what would we return in this case?
|
|
||||||
tracing::error!(
|
|
||||||
"Failed to parse notebook '{path:?}: {err}'. Falling back to an empty notebook"
|
|
||||||
);
|
|
||||||
Notebook::from_source_code("").unwrap()
|
|
||||||
});
|
|
||||||
|
|
||||||
SourceKind::IpyNotebook(Arc::new(notebook))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(Source { kind })
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub enum SourceKind {
|
|
||||||
Python(Arc<str>),
|
|
||||||
Stub(Arc<str>),
|
|
||||||
IpyNotebook(Arc<Notebook>),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub struct Source {
|
|
||||||
kind: SourceKind,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Source {
|
|
||||||
pub fn python<T: Into<Arc<str>>>(source: T) -> Self {
|
|
||||||
Self {
|
|
||||||
kind: SourceKind::Python(source.into()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pub fn kind(&self) -> &SourceKind {
|
|
||||||
&self.kind
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn text(&self) -> &str {
|
|
||||||
match &self.kind {
|
|
||||||
SourceKind::Python(text) => text,
|
|
||||||
SourceKind::Stub(text) => text,
|
|
||||||
SourceKind::IpyNotebook(notebook) => notebook.source_code(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default)]
|
|
||||||
pub struct SourceStorage(pub(crate) KeyValueCache<FileId, Source>);
|
|
||||||
|
|
||||||
impl Deref for SourceStorage {
|
|
||||||
type Target = KeyValueCache<FileId, Source>;
|
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DerefMut for SourceStorage {
|
|
||||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
|
||||||
&mut self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,745 +0,0 @@
|
|||||||
#![allow(dead_code)]
|
|
||||||
use crate::ast_ids::NodeKey;
|
|
||||||
use crate::db::{QueryResult, SemanticDb, SemanticJar};
|
|
||||||
use crate::files::FileId;
|
|
||||||
use crate::symbols::{symbol_table, GlobalSymbolId, ScopeId, ScopeKind, SymbolId};
|
|
||||||
use crate::{FxDashMap, FxIndexSet, Name};
|
|
||||||
use ruff_index::{newtype_index, IndexVec};
|
|
||||||
use rustc_hash::FxHashMap;
|
|
||||||
|
|
||||||
pub(crate) mod infer;
|
|
||||||
|
|
||||||
pub(crate) use infer::{infer_definition_type, infer_symbol_type};
|
|
||||||
|
|
||||||
/// unique ID for a type
|
|
||||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
|
||||||
pub enum Type {
|
|
||||||
/// the dynamic or gradual type: a statically-unknown set of values
|
|
||||||
Any,
|
|
||||||
/// the empty set of values
|
|
||||||
Never,
|
|
||||||
/// unknown type (no annotation)
|
|
||||||
/// equivalent to Any, or to object in strict mode
|
|
||||||
Unknown,
|
|
||||||
/// name is not bound to any value
|
|
||||||
Unbound,
|
|
||||||
/// a specific function object
|
|
||||||
Function(FunctionTypeId),
|
|
||||||
/// a specific class object
|
|
||||||
Class(ClassTypeId),
|
|
||||||
/// the set of Python objects with the given class in their __class__'s method resolution order
|
|
||||||
Instance(ClassTypeId),
|
|
||||||
Union(UnionTypeId),
|
|
||||||
Intersection(IntersectionTypeId),
|
|
||||||
// TODO protocols, callable types, overloads, generics, type vars
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Type {
|
|
||||||
fn display<'a>(&'a self, store: &'a TypeStore) -> DisplayType<'a> {
|
|
||||||
DisplayType { ty: self, store }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const fn is_unbound(&self) -> bool {
|
|
||||||
matches!(self, Type::Unbound)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const fn is_unknown(&self) -> bool {
|
|
||||||
matches!(self, Type::Unknown)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<FunctionTypeId> for Type {
|
|
||||||
fn from(id: FunctionTypeId) -> Self {
|
|
||||||
Type::Function(id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<UnionTypeId> for Type {
|
|
||||||
fn from(id: UnionTypeId) -> Self {
|
|
||||||
Type::Union(id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<IntersectionTypeId> for Type {
|
|
||||||
fn from(id: IntersectionTypeId) -> Self {
|
|
||||||
Type::Intersection(id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: currently calling `get_function` et al and holding on to the `FunctionTypeRef` will lock a
|
|
||||||
// shard of this dashmap, for as long as you hold the reference. This may be a problem. We could
|
|
||||||
// switch to having all the arenas hold Arc, or we could see if we can split up ModuleTypeStore,
|
|
||||||
// and/or give it inner mutability and finer-grained internal locking.
|
|
||||||
#[derive(Debug, Default)]
|
|
||||||
pub struct TypeStore {
|
|
||||||
modules: FxDashMap<FileId, ModuleTypeStore>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TypeStore {
|
|
||||||
pub fn remove_module(&mut self, file_id: FileId) {
|
|
||||||
self.modules.remove(&file_id);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn cache_symbol_type(&self, symbol: GlobalSymbolId, ty: Type) {
|
|
||||||
self.add_or_get_module(symbol.file_id)
|
|
||||||
.symbol_types
|
|
||||||
.insert(symbol.symbol_id, ty);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn cache_node_type(&self, file_id: FileId, node_key: NodeKey, ty: Type) {
|
|
||||||
self.add_or_get_module(file_id)
|
|
||||||
.node_types
|
|
||||||
.insert(node_key, ty);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_cached_symbol_type(&self, symbol: GlobalSymbolId) -> Option<Type> {
|
|
||||||
self.try_get_module(symbol.file_id)?
|
|
||||||
.symbol_types
|
|
||||||
.get(&symbol.symbol_id)
|
|
||||||
.copied()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_cached_node_type(&self, file_id: FileId, node_key: &NodeKey) -> Option<Type> {
|
|
||||||
self.try_get_module(file_id)?
|
|
||||||
.node_types
|
|
||||||
.get(node_key)
|
|
||||||
.copied()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn add_or_get_module(&self, file_id: FileId) -> ModuleStoreRefMut {
|
|
||||||
self.modules
|
|
||||||
.entry(file_id)
|
|
||||||
.or_insert_with(|| ModuleTypeStore::new(file_id))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_module(&self, file_id: FileId) -> ModuleStoreRef {
|
|
||||||
self.try_get_module(file_id).expect("module should exist")
|
|
||||||
}
|
|
||||||
|
|
||||||
fn try_get_module(&self, file_id: FileId) -> Option<ModuleStoreRef> {
|
|
||||||
self.modules.get(&file_id)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn add_function(
|
|
||||||
&self,
|
|
||||||
file_id: FileId,
|
|
||||||
name: &str,
|
|
||||||
symbol_id: SymbolId,
|
|
||||||
scope_id: ScopeId,
|
|
||||||
decorators: Vec<Type>,
|
|
||||||
) -> FunctionTypeId {
|
|
||||||
self.add_or_get_module(file_id)
|
|
||||||
.add_function(name, symbol_id, scope_id, decorators)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn add_class(
|
|
||||||
&self,
|
|
||||||
file_id: FileId,
|
|
||||||
name: &str,
|
|
||||||
scope_id: ScopeId,
|
|
||||||
bases: Vec<Type>,
|
|
||||||
) -> ClassTypeId {
|
|
||||||
self.add_or_get_module(file_id)
|
|
||||||
.add_class(name, scope_id, bases)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn add_union(&mut self, file_id: FileId, elems: &[Type]) -> UnionTypeId {
|
|
||||||
self.add_or_get_module(file_id).add_union(elems)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn add_intersection(
|
|
||||||
&mut self,
|
|
||||||
file_id: FileId,
|
|
||||||
positive: &[Type],
|
|
||||||
negative: &[Type],
|
|
||||||
) -> IntersectionTypeId {
|
|
||||||
self.add_or_get_module(file_id)
|
|
||||||
.add_intersection(positive, negative)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_function(&self, id: FunctionTypeId) -> FunctionTypeRef {
|
|
||||||
FunctionTypeRef {
|
|
||||||
module_store: self.get_module(id.file_id),
|
|
||||||
function_id: id.func_id,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_class(&self, id: ClassTypeId) -> ClassTypeRef {
|
|
||||||
ClassTypeRef {
|
|
||||||
module_store: self.get_module(id.file_id),
|
|
||||||
class_id: id.class_id,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_union(&self, id: UnionTypeId) -> UnionTypeRef {
|
|
||||||
UnionTypeRef {
|
|
||||||
module_store: self.get_module(id.file_id),
|
|
||||||
union_id: id.union_id,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_intersection(&self, id: IntersectionTypeId) -> IntersectionTypeRef {
|
|
||||||
IntersectionTypeRef {
|
|
||||||
module_store: self.get_module(id.file_id),
|
|
||||||
intersection_id: id.intersection_id,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type ModuleStoreRef<'a> = dashmap::mapref::one::Ref<
|
|
||||||
'a,
|
|
||||||
FileId,
|
|
||||||
ModuleTypeStore,
|
|
||||||
std::hash::BuildHasherDefault<rustc_hash::FxHasher>,
|
|
||||||
>;
|
|
||||||
|
|
||||||
type ModuleStoreRefMut<'a> = dashmap::mapref::one::RefMut<
|
|
||||||
'a,
|
|
||||||
FileId,
|
|
||||||
ModuleTypeStore,
|
|
||||||
std::hash::BuildHasherDefault<rustc_hash::FxHasher>,
|
|
||||||
>;
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub(crate) struct FunctionTypeRef<'a> {
|
|
||||||
module_store: ModuleStoreRef<'a>,
|
|
||||||
function_id: ModuleFunctionTypeId,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> std::ops::Deref for FunctionTypeRef<'a> {
|
|
||||||
type Target = FunctionType;
|
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
self.module_store.get_function(self.function_id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub(crate) struct ClassTypeRef<'a> {
|
|
||||||
module_store: ModuleStoreRef<'a>,
|
|
||||||
class_id: ModuleClassTypeId,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> std::ops::Deref for ClassTypeRef<'a> {
|
|
||||||
type Target = ClassType;
|
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
self.module_store.get_class(self.class_id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub(crate) struct UnionTypeRef<'a> {
|
|
||||||
module_store: ModuleStoreRef<'a>,
|
|
||||||
union_id: ModuleUnionTypeId,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> std::ops::Deref for UnionTypeRef<'a> {
|
|
||||||
type Target = UnionType;
|
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
self.module_store.get_union(self.union_id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub(crate) struct IntersectionTypeRef<'a> {
|
|
||||||
module_store: ModuleStoreRef<'a>,
|
|
||||||
intersection_id: ModuleIntersectionTypeId,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> std::ops::Deref for IntersectionTypeRef<'a> {
|
|
||||||
type Target = IntersectionType;
|
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
self.module_store.get_intersection(self.intersection_id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
|
|
||||||
pub struct FunctionTypeId {
|
|
||||||
file_id: FileId,
|
|
||||||
func_id: ModuleFunctionTypeId,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FunctionTypeId {
|
|
||||||
fn function(self, db: &dyn SemanticDb) -> QueryResult<FunctionTypeRef> {
|
|
||||||
let jar: &SemanticJar = db.jar()?;
|
|
||||||
Ok(jar.type_store.get_function(self))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn name(self, db: &dyn SemanticDb) -> QueryResult<Name> {
|
|
||||||
Ok(self.function(db)?.name().into())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn global_symbol(self, db: &dyn SemanticDb) -> QueryResult<GlobalSymbolId> {
|
|
||||||
Ok(GlobalSymbolId {
|
|
||||||
file_id: self.file(),
|
|
||||||
symbol_id: self.symbol(db)?,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn file(self) -> FileId {
|
|
||||||
self.file_id
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn symbol(self, db: &dyn SemanticDb) -> QueryResult<SymbolId> {
|
|
||||||
let FunctionType { symbol_id, .. } = *self.function(db)?;
|
|
||||||
Ok(symbol_id)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn get_containing_class(
|
|
||||||
self,
|
|
||||||
db: &dyn SemanticDb,
|
|
||||||
) -> QueryResult<Option<ClassTypeId>> {
|
|
||||||
let table = symbol_table(db, self.file_id)?;
|
|
||||||
let FunctionType { symbol_id, .. } = *self.function(db)?;
|
|
||||||
let scope_id = symbol_id.symbol(&table).scope_id();
|
|
||||||
let scope = scope_id.scope(&table);
|
|
||||||
if !matches!(scope.kind(), ScopeKind::Class) {
|
|
||||||
return Ok(None);
|
|
||||||
};
|
|
||||||
let Some(def) = scope.definition() else {
|
|
||||||
return Ok(None);
|
|
||||||
};
|
|
||||||
let Some(symbol_id) = scope.defining_symbol() else {
|
|
||||||
return Ok(None);
|
|
||||||
};
|
|
||||||
let Type::Class(class) = infer_definition_type(
|
|
||||||
db,
|
|
||||||
GlobalSymbolId {
|
|
||||||
file_id: self.file_id,
|
|
||||||
symbol_id,
|
|
||||||
},
|
|
||||||
def,
|
|
||||||
)?
|
|
||||||
else {
|
|
||||||
return Ok(None);
|
|
||||||
};
|
|
||||||
Ok(Some(class))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn has_decorator(
|
|
||||||
self,
|
|
||||||
db: &dyn SemanticDb,
|
|
||||||
decorator_symbol: GlobalSymbolId,
|
|
||||||
) -> QueryResult<bool> {
|
|
||||||
for deco_ty in self.function(db)?.decorators() {
|
|
||||||
let Type::Function(deco_func) = deco_ty else {
|
|
||||||
continue;
|
|
||||||
};
|
|
||||||
if deco_func.global_symbol(db)? == decorator_symbol {
|
|
||||||
return Ok(true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(false)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
|
|
||||||
pub struct ClassTypeId {
|
|
||||||
file_id: FileId,
|
|
||||||
class_id: ModuleClassTypeId,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ClassTypeId {
|
|
||||||
fn class(self, db: &dyn SemanticDb) -> QueryResult<ClassTypeRef> {
|
|
||||||
let jar: &SemanticJar = db.jar()?;
|
|
||||||
Ok(jar.type_store.get_class(self))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn name(self, db: &dyn SemanticDb) -> QueryResult<Name> {
|
|
||||||
Ok(self.class(db)?.name().into())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn get_super_class_member(
|
|
||||||
self,
|
|
||||||
db: &dyn SemanticDb,
|
|
||||||
name: &Name,
|
|
||||||
) -> QueryResult<Option<Type>> {
|
|
||||||
// TODO we should linearize the MRO instead of doing this recursively
|
|
||||||
let class = self.class(db)?;
|
|
||||||
for base in class.bases() {
|
|
||||||
if let Type::Class(base) = base {
|
|
||||||
if let Some(own_member) = base.get_own_class_member(db, name)? {
|
|
||||||
return Ok(Some(own_member));
|
|
||||||
}
|
|
||||||
if let Some(base_member) = base.get_super_class_member(db, name)? {
|
|
||||||
return Ok(Some(base_member));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_own_class_member(self, db: &dyn SemanticDb, name: &Name) -> QueryResult<Option<Type>> {
|
|
||||||
// TODO: this should distinguish instance-only members (e.g. `x: int`) and not return them
|
|
||||||
let ClassType { scope_id, .. } = *self.class(db)?;
|
|
||||||
let table = symbol_table(db, self.file_id)?;
|
|
||||||
if let Some(symbol_id) = table.symbol_id_by_name(scope_id, name) {
|
|
||||||
Ok(Some(infer_symbol_type(
|
|
||||||
db,
|
|
||||||
GlobalSymbolId {
|
|
||||||
file_id: self.file_id,
|
|
||||||
symbol_id,
|
|
||||||
},
|
|
||||||
)?))
|
|
||||||
} else {
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: get_own_instance_member, get_class_member, get_instance_member
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
|
|
||||||
pub struct UnionTypeId {
|
|
||||||
file_id: FileId,
|
|
||||||
union_id: ModuleUnionTypeId,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
|
|
||||||
pub struct IntersectionTypeId {
|
|
||||||
file_id: FileId,
|
|
||||||
intersection_id: ModuleIntersectionTypeId,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[newtype_index]
|
|
||||||
struct ModuleFunctionTypeId;
|
|
||||||
|
|
||||||
#[newtype_index]
|
|
||||||
struct ModuleClassTypeId;
|
|
||||||
|
|
||||||
#[newtype_index]
|
|
||||||
struct ModuleUnionTypeId;
|
|
||||||
|
|
||||||
#[newtype_index]
|
|
||||||
struct ModuleIntersectionTypeId;
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
struct ModuleTypeStore {
|
|
||||||
file_id: FileId,
|
|
||||||
/// arena of all function types defined in this module
|
|
||||||
functions: IndexVec<ModuleFunctionTypeId, FunctionType>,
|
|
||||||
/// arena of all class types defined in this module
|
|
||||||
classes: IndexVec<ModuleClassTypeId, ClassType>,
|
|
||||||
/// arenda of all union types created in this module
|
|
||||||
unions: IndexVec<ModuleUnionTypeId, UnionType>,
|
|
||||||
/// arena of all intersection types created in this module
|
|
||||||
intersections: IndexVec<ModuleIntersectionTypeId, IntersectionType>,
|
|
||||||
/// cached types of symbols in this module
|
|
||||||
symbol_types: FxHashMap<SymbolId, Type>,
|
|
||||||
/// cached types of AST nodes in this module
|
|
||||||
node_types: FxHashMap<NodeKey, Type>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ModuleTypeStore {
|
|
||||||
fn new(file_id: FileId) -> Self {
|
|
||||||
Self {
|
|
||||||
file_id,
|
|
||||||
functions: IndexVec::default(),
|
|
||||||
classes: IndexVec::default(),
|
|
||||||
unions: IndexVec::default(),
|
|
||||||
intersections: IndexVec::default(),
|
|
||||||
symbol_types: FxHashMap::default(),
|
|
||||||
node_types: FxHashMap::default(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn add_function(
|
|
||||||
&mut self,
|
|
||||||
name: &str,
|
|
||||||
symbol_id: SymbolId,
|
|
||||||
scope_id: ScopeId,
|
|
||||||
decorators: Vec<Type>,
|
|
||||||
) -> FunctionTypeId {
|
|
||||||
let func_id = self.functions.push(FunctionType {
|
|
||||||
name: Name::new(name),
|
|
||||||
symbol_id,
|
|
||||||
scope_id,
|
|
||||||
decorators,
|
|
||||||
});
|
|
||||||
FunctionTypeId {
|
|
||||||
file_id: self.file_id,
|
|
||||||
func_id,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn add_class(&mut self, name: &str, scope_id: ScopeId, bases: Vec<Type>) -> ClassTypeId {
|
|
||||||
let class_id = self.classes.push(ClassType {
|
|
||||||
name: Name::new(name),
|
|
||||||
scope_id,
|
|
||||||
// TODO: if no bases are given, that should imply [object]
|
|
||||||
bases,
|
|
||||||
});
|
|
||||||
ClassTypeId {
|
|
||||||
file_id: self.file_id,
|
|
||||||
class_id,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn add_union(&mut self, elems: &[Type]) -> UnionTypeId {
|
|
||||||
let union_id = self.unions.push(UnionType {
|
|
||||||
elements: elems.iter().copied().collect(),
|
|
||||||
});
|
|
||||||
UnionTypeId {
|
|
||||||
file_id: self.file_id,
|
|
||||||
union_id,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn add_intersection(&mut self, positive: &[Type], negative: &[Type]) -> IntersectionTypeId {
|
|
||||||
let intersection_id = self.intersections.push(IntersectionType {
|
|
||||||
positive: positive.iter().copied().collect(),
|
|
||||||
negative: negative.iter().copied().collect(),
|
|
||||||
});
|
|
||||||
IntersectionTypeId {
|
|
||||||
file_id: self.file_id,
|
|
||||||
intersection_id,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_function(&self, func_id: ModuleFunctionTypeId) -> &FunctionType {
|
|
||||||
&self.functions[func_id]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_class(&self, class_id: ModuleClassTypeId) -> &ClassType {
|
|
||||||
&self.classes[class_id]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_union(&self, union_id: ModuleUnionTypeId) -> &UnionType {
|
|
||||||
&self.unions[union_id]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_intersection(&self, intersection_id: ModuleIntersectionTypeId) -> &IntersectionType {
|
|
||||||
&self.intersections[intersection_id]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug)]
|
|
||||||
struct DisplayType<'a> {
|
|
||||||
ty: &'a Type,
|
|
||||||
store: &'a TypeStore,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Display for DisplayType<'_> {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
match self.ty {
|
|
||||||
Type::Any => f.write_str("Any"),
|
|
||||||
Type::Never => f.write_str("Never"),
|
|
||||||
Type::Unknown => f.write_str("Unknown"),
|
|
||||||
Type::Unbound => f.write_str("Unbound"),
|
|
||||||
// TODO functions and classes should display using a fully qualified name
|
|
||||||
Type::Class(class_id) => {
|
|
||||||
f.write_str("Literal[")?;
|
|
||||||
f.write_str(self.store.get_class(*class_id).name())?;
|
|
||||||
f.write_str("]")
|
|
||||||
}
|
|
||||||
Type::Instance(class_id) => f.write_str(self.store.get_class(*class_id).name()),
|
|
||||||
Type::Function(func_id) => f.write_str(self.store.get_function(*func_id).name()),
|
|
||||||
Type::Union(union_id) => self
|
|
||||||
.store
|
|
||||||
.get_module(union_id.file_id)
|
|
||||||
.get_union(union_id.union_id)
|
|
||||||
.display(f, self.store),
|
|
||||||
Type::Intersection(int_id) => self
|
|
||||||
.store
|
|
||||||
.get_module(int_id.file_id)
|
|
||||||
.get_intersection(int_id.intersection_id)
|
|
||||||
.display(f, self.store),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub(crate) struct ClassType {
|
|
||||||
/// Name of the class at definition
|
|
||||||
name: Name,
|
|
||||||
/// `ScopeId` of the class body
|
|
||||||
scope_id: ScopeId,
|
|
||||||
/// Types of all class bases
|
|
||||||
bases: Vec<Type>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ClassType {
|
|
||||||
fn name(&self) -> &str {
|
|
||||||
self.name.as_str()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn bases(&self) -> &[Type] {
|
|
||||||
self.bases.as_slice()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub(crate) struct FunctionType {
|
|
||||||
/// name of the function at definition
|
|
||||||
name: Name,
|
|
||||||
/// symbol which this function is a definition of
|
|
||||||
symbol_id: SymbolId,
|
|
||||||
/// scope of this function's body
|
|
||||||
scope_id: ScopeId,
|
|
||||||
/// types of all decorators on this function
|
|
||||||
decorators: Vec<Type>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FunctionType {
|
|
||||||
fn name(&self) -> &str {
|
|
||||||
self.name.as_str()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn scope_id(&self) -> ScopeId {
|
|
||||||
self.scope_id
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn decorators(&self) -> &[Type] {
|
|
||||||
self.decorators.as_slice()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub(crate) struct UnionType {
|
|
||||||
// the union type includes values in any of these types
|
|
||||||
elements: FxIndexSet<Type>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl UnionType {
|
|
||||||
fn display(&self, f: &mut std::fmt::Formatter<'_>, store: &TypeStore) -> std::fmt::Result {
|
|
||||||
f.write_str("(")?;
|
|
||||||
let mut first = true;
|
|
||||||
for ty in &self.elements {
|
|
||||||
if !first {
|
|
||||||
f.write_str(" | ")?;
|
|
||||||
};
|
|
||||||
first = false;
|
|
||||||
write!(f, "{}", ty.display(store))?;
|
|
||||||
}
|
|
||||||
f.write_str(")")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Negation types aren't expressible in annotations, and are most likely to arise from type
|
|
||||||
// narrowing along with intersections (e.g. `if not isinstance(...)`), so we represent them
|
|
||||||
// directly in intersections rather than as a separate type. This sacrifices some efficiency in the
|
|
||||||
// case where a Not appears outside an intersection (unclear when that could even happen, but we'd
|
|
||||||
// have to represent it as a single-element intersection if it did) in exchange for better
|
|
||||||
// efficiency in the within-intersection case.
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub(crate) struct IntersectionType {
|
|
||||||
// the intersection type includes only values in all of these types
|
|
||||||
positive: FxIndexSet<Type>,
|
|
||||||
// the intersection type does not include any value in any of these types
|
|
||||||
negative: FxIndexSet<Type>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl IntersectionType {
|
|
||||||
fn display(&self, f: &mut std::fmt::Formatter<'_>, store: &TypeStore) -> std::fmt::Result {
|
|
||||||
f.write_str("(")?;
|
|
||||||
let mut first = true;
|
|
||||||
for (neg, ty) in self
|
|
||||||
.positive
|
|
||||||
.iter()
|
|
||||||
.map(|ty| (false, ty))
|
|
||||||
.chain(self.negative.iter().map(|ty| (true, ty)))
|
|
||||||
{
|
|
||||||
if !first {
|
|
||||||
f.write_str(" & ")?;
|
|
||||||
};
|
|
||||||
first = false;
|
|
||||||
if neg {
|
|
||||||
f.write_str("~")?;
|
|
||||||
};
|
|
||||||
write!(f, "{}", ty.display(store))?;
|
|
||||||
}
|
|
||||||
f.write_str(")")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
use crate::files::Files;
|
|
||||||
use crate::symbols::{SymbolFlags, SymbolTable};
|
|
||||||
use crate::types::{Type, TypeStore};
|
|
||||||
use crate::FxIndexSet;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn add_class() {
|
|
||||||
let store = TypeStore::default();
|
|
||||||
let files = Files::default();
|
|
||||||
let file_id = files.intern(Path::new("/foo"));
|
|
||||||
let id = store.add_class(file_id, "C", SymbolTable::root_scope_id(), Vec::new());
|
|
||||||
assert_eq!(store.get_class(id).name(), "C");
|
|
||||||
let inst = Type::Instance(id);
|
|
||||||
assert_eq!(format!("{}", inst.display(&store)), "C");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn add_function() {
|
|
||||||
let store = TypeStore::default();
|
|
||||||
let files = Files::default();
|
|
||||||
let file_id = files.intern(Path::new("/foo"));
|
|
||||||
let mut table = SymbolTable::new();
|
|
||||||
let func_symbol = table.add_or_update_symbol(
|
|
||||||
SymbolTable::root_scope_id(),
|
|
||||||
"func",
|
|
||||||
SymbolFlags::IS_DEFINED,
|
|
||||||
);
|
|
||||||
|
|
||||||
let id = store.add_function(
|
|
||||||
file_id,
|
|
||||||
"func",
|
|
||||||
func_symbol,
|
|
||||||
SymbolTable::root_scope_id(),
|
|
||||||
vec![Type::Unknown],
|
|
||||||
);
|
|
||||||
assert_eq!(store.get_function(id).name(), "func");
|
|
||||||
assert_eq!(store.get_function(id).decorators(), vec![Type::Unknown]);
|
|
||||||
let func = Type::Function(id);
|
|
||||||
assert_eq!(format!("{}", func.display(&store)), "func");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn add_union() {
|
|
||||||
let mut store = TypeStore::default();
|
|
||||||
let files = Files::default();
|
|
||||||
let file_id = files.intern(Path::new("/foo"));
|
|
||||||
let c1 = store.add_class(file_id, "C1", SymbolTable::root_scope_id(), Vec::new());
|
|
||||||
let c2 = store.add_class(file_id, "C2", SymbolTable::root_scope_id(), Vec::new());
|
|
||||||
let elems = vec![Type::Instance(c1), Type::Instance(c2)];
|
|
||||||
let id = store.add_union(file_id, &elems);
|
|
||||||
assert_eq!(
|
|
||||||
store.get_union(id).elements,
|
|
||||||
elems.into_iter().collect::<FxIndexSet<_>>()
|
|
||||||
);
|
|
||||||
let union = Type::Union(id);
|
|
||||||
assert_eq!(format!("{}", union.display(&store)), "(C1 | C2)");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn add_intersection() {
|
|
||||||
let mut store = TypeStore::default();
|
|
||||||
let files = Files::default();
|
|
||||||
let file_id = files.intern(Path::new("/foo"));
|
|
||||||
let c1 = store.add_class(file_id, "C1", SymbolTable::root_scope_id(), Vec::new());
|
|
||||||
let c2 = store.add_class(file_id, "C2", SymbolTable::root_scope_id(), Vec::new());
|
|
||||||
let c3 = store.add_class(file_id, "C3", SymbolTable::root_scope_id(), Vec::new());
|
|
||||||
let pos = vec![Type::Instance(c1), Type::Instance(c2)];
|
|
||||||
let neg = vec![Type::Instance(c3)];
|
|
||||||
let id = store.add_intersection(file_id, &pos, &neg);
|
|
||||||
assert_eq!(
|
|
||||||
store.get_intersection(id).positive,
|
|
||||||
pos.into_iter().collect::<FxIndexSet<_>>()
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
store.get_intersection(id).negative,
|
|
||||||
neg.into_iter().collect::<FxIndexSet<_>>()
|
|
||||||
);
|
|
||||||
let intersection = Type::Intersection(id);
|
|
||||||
assert_eq!(
|
|
||||||
format!("{}", intersection.display(&store)),
|
|
||||||
"(C1 & C2 & ~C3)"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,292 +0,0 @@
|
|||||||
#![allow(dead_code)]
|
|
||||||
|
|
||||||
use ruff_python_ast as ast;
|
|
||||||
use ruff_python_ast::AstNode;
|
|
||||||
|
|
||||||
use crate::db::{QueryResult, SemanticDb, SemanticJar};
|
|
||||||
|
|
||||||
use crate::module::ModuleName;
|
|
||||||
use crate::parse::parse;
|
|
||||||
use crate::symbols::{
|
|
||||||
resolve_global_symbol, symbol_table, Definition, GlobalSymbolId, ImportFromDefinition,
|
|
||||||
};
|
|
||||||
use crate::types::Type;
|
|
||||||
use crate::FileId;
|
|
||||||
|
|
||||||
// FIXME: Figure out proper dead-lock free synchronisation now that this takes `&db` instead of `&mut db`.
|
|
||||||
#[tracing::instrument(level = "trace", skip(db))]
|
|
||||||
pub fn infer_symbol_type(db: &dyn SemanticDb, symbol: GlobalSymbolId) -> QueryResult<Type> {
|
|
||||||
let symbols = symbol_table(db, symbol.file_id)?;
|
|
||||||
let defs = symbols.definitions(symbol.symbol_id);
|
|
||||||
let jar: &SemanticJar = db.jar()?;
|
|
||||||
|
|
||||||
if let Some(ty) = jar.type_store.get_cached_symbol_type(symbol) {
|
|
||||||
return Ok(ty);
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO handle multiple defs, conditional defs...
|
|
||||||
assert_eq!(defs.len(), 1);
|
|
||||||
|
|
||||||
let ty = infer_definition_type(db, symbol, defs[0].clone())?;
|
|
||||||
|
|
||||||
jar.type_store.cache_symbol_type(symbol, ty);
|
|
||||||
|
|
||||||
// TODO record dependencies
|
|
||||||
Ok(ty)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip(db))]
|
|
||||||
pub fn infer_definition_type(
|
|
||||||
db: &dyn SemanticDb,
|
|
||||||
symbol: GlobalSymbolId,
|
|
||||||
definition: Definition,
|
|
||||||
) -> QueryResult<Type> {
|
|
||||||
let jar: &SemanticJar = db.jar()?;
|
|
||||||
let type_store = &jar.type_store;
|
|
||||||
let file_id = symbol.file_id;
|
|
||||||
|
|
||||||
match definition {
|
|
||||||
Definition::ImportFrom(ImportFromDefinition {
|
|
||||||
module,
|
|
||||||
name,
|
|
||||||
level,
|
|
||||||
}) => {
|
|
||||||
// TODO relative imports
|
|
||||||
assert!(matches!(level, 0));
|
|
||||||
let module_name = ModuleName::new(module.as_ref().expect("TODO relative imports"));
|
|
||||||
if let Some(remote_symbol) = resolve_global_symbol(db, module_name, &name)? {
|
|
||||||
infer_symbol_type(db, remote_symbol)
|
|
||||||
} else {
|
|
||||||
Ok(Type::Unknown)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Definition::ClassDef(node_key) => {
|
|
||||||
if let Some(ty) = type_store.get_cached_node_type(file_id, node_key.erased()) {
|
|
||||||
Ok(ty)
|
|
||||||
} else {
|
|
||||||
let parsed = parse(db.upcast(), file_id)?;
|
|
||||||
let ast = parsed.ast();
|
|
||||||
let table = symbol_table(db, file_id)?;
|
|
||||||
let node = node_key.resolve_unwrap(ast.as_any_node_ref());
|
|
||||||
|
|
||||||
let mut bases = Vec::with_capacity(node.bases().len());
|
|
||||||
|
|
||||||
for base in node.bases() {
|
|
||||||
bases.push(infer_expr_type(db, file_id, base)?);
|
|
||||||
}
|
|
||||||
let scope_id = table.scope_id_for_node(node_key.erased());
|
|
||||||
let ty = Type::Class(type_store.add_class(file_id, &node.name.id, scope_id, bases));
|
|
||||||
type_store.cache_node_type(file_id, *node_key.erased(), ty);
|
|
||||||
Ok(ty)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Definition::FunctionDef(node_key) => {
|
|
||||||
if let Some(ty) = type_store.get_cached_node_type(file_id, node_key.erased()) {
|
|
||||||
Ok(ty)
|
|
||||||
} else {
|
|
||||||
let parsed = parse(db.upcast(), file_id)?;
|
|
||||||
let ast = parsed.ast();
|
|
||||||
let table = symbol_table(db, file_id)?;
|
|
||||||
let node = node_key
|
|
||||||
.resolve(ast.as_any_node_ref())
|
|
||||||
.expect("node key should resolve");
|
|
||||||
|
|
||||||
let decorator_tys = node
|
|
||||||
.decorator_list
|
|
||||||
.iter()
|
|
||||||
.map(|decorator| infer_expr_type(db, file_id, &decorator.expression))
|
|
||||||
.collect::<QueryResult<_>>()?;
|
|
||||||
let scope_id = table.scope_id_for_node(node_key.erased());
|
|
||||||
let ty = type_store
|
|
||||||
.add_function(
|
|
||||||
file_id,
|
|
||||||
&node.name.id,
|
|
||||||
symbol.symbol_id,
|
|
||||||
scope_id,
|
|
||||||
decorator_tys,
|
|
||||||
)
|
|
||||||
.into();
|
|
||||||
type_store.cache_node_type(file_id, *node_key.erased(), ty);
|
|
||||||
Ok(ty)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Definition::Assignment(node_key) => {
|
|
||||||
let parsed = parse(db.upcast(), file_id)?;
|
|
||||||
let ast = parsed.ast();
|
|
||||||
let node = node_key.resolve_unwrap(ast.as_any_node_ref());
|
|
||||||
// TODO handle unpacking assignment correctly
|
|
||||||
infer_expr_type(db, file_id, &node.value)
|
|
||||||
}
|
|
||||||
_ => todo!("other kinds of definitions"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn infer_expr_type(db: &dyn SemanticDb, file_id: FileId, expr: &ast::Expr) -> QueryResult<Type> {
|
|
||||||
// TODO cache the resolution of the type on the node
|
|
||||||
let symbols = symbol_table(db, file_id)?;
|
|
||||||
match expr {
|
|
||||||
ast::Expr::Name(name) => {
|
|
||||||
// TODO look up in the correct scope, don't assume global
|
|
||||||
if let Some(symbol_id) = symbols.root_symbol_id_by_name(&name.id) {
|
|
||||||
infer_symbol_type(db, GlobalSymbolId { file_id, symbol_id })
|
|
||||||
} else {
|
|
||||||
Ok(Type::Unknown)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => todo!("full expression type resolution"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use crate::db::tests::TestDb;
|
|
||||||
use crate::db::{HasJar, SemanticJar};
|
|
||||||
use crate::module::{
|
|
||||||
resolve_module, set_module_search_paths, ModuleName, ModuleSearchPath, ModuleSearchPathKind,
|
|
||||||
};
|
|
||||||
use crate::symbols::{symbol_table, GlobalSymbolId};
|
|
||||||
use crate::types::{infer_symbol_type, Type};
|
|
||||||
use crate::Name;
|
|
||||||
|
|
||||||
// TODO with virtual filesystem we shouldn't have to write files to disk for these
|
|
||||||
// tests
|
|
||||||
|
|
||||||
struct TestCase {
|
|
||||||
temp_dir: tempfile::TempDir,
|
|
||||||
db: TestDb,
|
|
||||||
|
|
||||||
src: ModuleSearchPath,
|
|
||||||
}
|
|
||||||
|
|
||||||
fn create_test() -> std::io::Result<TestCase> {
|
|
||||||
let temp_dir = tempfile::tempdir()?;
|
|
||||||
|
|
||||||
let src = temp_dir.path().join("src");
|
|
||||||
std::fs::create_dir(&src)?;
|
|
||||||
let src = ModuleSearchPath::new(src.canonicalize()?, ModuleSearchPathKind::FirstParty);
|
|
||||||
|
|
||||||
let roots = vec![src.clone()];
|
|
||||||
|
|
||||||
let mut db = TestDb::default();
|
|
||||||
set_module_search_paths(&mut db, roots);
|
|
||||||
|
|
||||||
Ok(TestCase { temp_dir, db, src })
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn follow_import_to_class() -> anyhow::Result<()> {
|
|
||||||
let case = create_test()?;
|
|
||||||
let db = &case.db;
|
|
||||||
|
|
||||||
let a_path = case.src.path().join("a.py");
|
|
||||||
let b_path = case.src.path().join("b.py");
|
|
||||||
std::fs::write(a_path, "from b import C as D; E = D")?;
|
|
||||||
std::fs::write(b_path, "class C: pass")?;
|
|
||||||
let a_file = resolve_module(db, ModuleName::new("a"))?
|
|
||||||
.expect("module should be found")
|
|
||||||
.path(db)?
|
|
||||||
.file();
|
|
||||||
let a_syms = symbol_table(db, a_file)?;
|
|
||||||
let e_sym = a_syms
|
|
||||||
.root_symbol_id_by_name("E")
|
|
||||||
.expect("E symbol should be found");
|
|
||||||
|
|
||||||
let ty = infer_symbol_type(
|
|
||||||
db,
|
|
||||||
GlobalSymbolId {
|
|
||||||
file_id: a_file,
|
|
||||||
symbol_id: e_sym,
|
|
||||||
},
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let jar = HasJar::<SemanticJar>::jar(db)?;
|
|
||||||
assert!(matches!(ty, Type::Class(_)));
|
|
||||||
assert_eq!(format!("{}", ty.display(&jar.type_store)), "Literal[C]");
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn resolve_base_class_by_name() -> anyhow::Result<()> {
|
|
||||||
let case = create_test()?;
|
|
||||||
let db = &case.db;
|
|
||||||
|
|
||||||
let path = case.src.path().join("mod.py");
|
|
||||||
std::fs::write(path, "class Base: pass\nclass Sub(Base): pass")?;
|
|
||||||
let file = resolve_module(db, ModuleName::new("mod"))?
|
|
||||||
.expect("module should be found")
|
|
||||||
.path(db)?
|
|
||||||
.file();
|
|
||||||
let syms = symbol_table(db, file)?;
|
|
||||||
let sym = syms
|
|
||||||
.root_symbol_id_by_name("Sub")
|
|
||||||
.expect("Sub symbol should be found");
|
|
||||||
|
|
||||||
let ty = infer_symbol_type(
|
|
||||||
db,
|
|
||||||
GlobalSymbolId {
|
|
||||||
file_id: file,
|
|
||||||
symbol_id: sym,
|
|
||||||
},
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let Type::Class(class_id) = ty else {
|
|
||||||
panic!("Sub is not a Class")
|
|
||||||
};
|
|
||||||
let jar = HasJar::<SemanticJar>::jar(db)?;
|
|
||||||
let base_names: Vec<_> = jar
|
|
||||||
.type_store
|
|
||||||
.get_class(class_id)
|
|
||||||
.bases()
|
|
||||||
.iter()
|
|
||||||
.map(|base_ty| format!("{}", base_ty.display(&jar.type_store)))
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
assert_eq!(base_names, vec!["Literal[Base]"]);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn resolve_method() -> anyhow::Result<()> {
|
|
||||||
let case = create_test()?;
|
|
||||||
let db = &case.db;
|
|
||||||
|
|
||||||
let path = case.src.path().join("mod.py");
|
|
||||||
std::fs::write(path, "class C:\n def f(self): pass")?;
|
|
||||||
let file = resolve_module(db, ModuleName::new("mod"))?
|
|
||||||
.expect("module should be found")
|
|
||||||
.path(db)?
|
|
||||||
.file();
|
|
||||||
let syms = symbol_table(db, file)?;
|
|
||||||
let sym = syms
|
|
||||||
.root_symbol_id_by_name("C")
|
|
||||||
.expect("C symbol should be found");
|
|
||||||
|
|
||||||
let ty = infer_symbol_type(
|
|
||||||
db,
|
|
||||||
GlobalSymbolId {
|
|
||||||
file_id: file,
|
|
||||||
symbol_id: sym,
|
|
||||||
},
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let Type::Class(class_id) = ty else {
|
|
||||||
panic!("C is not a Class");
|
|
||||||
};
|
|
||||||
|
|
||||||
let member_ty = class_id
|
|
||||||
.get_own_class_member(db, &Name::new("f"))
|
|
||||||
.expect("C.f to resolve");
|
|
||||||
|
|
||||||
let Some(Type::Function(func_id)) = member_ty else {
|
|
||||||
panic!("C.f is not a Function");
|
|
||||||
};
|
|
||||||
|
|
||||||
let jar = HasJar::<SemanticJar>::jar(db)?;
|
|
||||||
let function = jar.type_store.get_function(func_id);
|
|
||||||
assert_eq!(function.name(), "f");
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,77 +0,0 @@
|
|||||||
use std::path::Path;
|
|
||||||
|
|
||||||
use anyhow::Context;
|
|
||||||
use notify::event::{CreateKind, RemoveKind};
|
|
||||||
use notify::{recommended_watcher, Event, EventKind, RecommendedWatcher, RecursiveMode, Watcher};
|
|
||||||
|
|
||||||
use crate::program::{FileChangeKind, FileWatcherChange};
|
|
||||||
|
|
||||||
pub struct FileWatcher {
|
|
||||||
watcher: RecommendedWatcher,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait EventHandler: Send + 'static {
|
|
||||||
fn handle(&self, changes: Vec<FileWatcherChange>);
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<F> EventHandler for F
|
|
||||||
where
|
|
||||||
F: Fn(Vec<FileWatcherChange>) + Send + 'static,
|
|
||||||
{
|
|
||||||
fn handle(&self, changes: Vec<FileWatcherChange>) {
|
|
||||||
let f = self;
|
|
||||||
f(changes);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FileWatcher {
|
|
||||||
pub fn new<E>(handler: E) -> anyhow::Result<Self>
|
|
||||||
where
|
|
||||||
E: EventHandler,
|
|
||||||
{
|
|
||||||
Self::from_handler(Box::new(handler))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_handler(handler: Box<dyn EventHandler>) -> anyhow::Result<Self> {
|
|
||||||
let watcher = recommended_watcher(move |changes: notify::Result<Event>| {
|
|
||||||
match changes {
|
|
||||||
Ok(event) => {
|
|
||||||
// TODO verify that this handles all events correctly
|
|
||||||
let change_kind = match event.kind {
|
|
||||||
EventKind::Create(CreateKind::File) => FileChangeKind::Created,
|
|
||||||
EventKind::Modify(_) => FileChangeKind::Modified,
|
|
||||||
EventKind::Remove(RemoveKind::File) => FileChangeKind::Deleted,
|
|
||||||
_ => {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut changes = Vec::new();
|
|
||||||
|
|
||||||
for path in event.paths {
|
|
||||||
if path.is_file() {
|
|
||||||
changes.push(FileWatcherChange::new(path, change_kind));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !changes.is_empty() {
|
|
||||||
handler.handle(changes);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// TODO proper error handling
|
|
||||||
Err(err) => {
|
|
||||||
panic!("Error: {err}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.context("Failed to create file watcher.")?;
|
|
||||||
|
|
||||||
Ok(Self { watcher })
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn watch_folder(&mut self, path: &Path) -> anyhow::Result<()> {
|
|
||||||
self.watcher.watch(path, RecursiveMode::Recursive)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
237
crates/red_knot/vendor/typeshed/LICENSE
vendored
237
crates/red_knot/vendor/typeshed/LICENSE
vendored
@@ -1,237 +0,0 @@
|
|||||||
The "typeshed" project is licensed under the terms of the Apache license, as
|
|
||||||
reproduced below.
|
|
||||||
|
|
||||||
= = = = =
|
|
||||||
|
|
||||||
Apache License
|
|
||||||
Version 2.0, January 2004
|
|
||||||
http://www.apache.org/licenses/
|
|
||||||
|
|
||||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
|
||||||
|
|
||||||
1. Definitions.
|
|
||||||
|
|
||||||
"License" shall mean the terms and conditions for use, reproduction,
|
|
||||||
and distribution as defined by Sections 1 through 9 of this document.
|
|
||||||
|
|
||||||
"Licensor" shall mean the copyright owner or entity authorized by
|
|
||||||
the copyright owner that is granting the License.
|
|
||||||
|
|
||||||
"Legal Entity" shall mean the union of the acting entity and all
|
|
||||||
other entities that control, are controlled by, or are under common
|
|
||||||
control with that entity. For the purposes of this definition,
|
|
||||||
"control" means (i) the power, direct or indirect, to cause the
|
|
||||||
direction or management of such entity, whether by contract or
|
|
||||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
|
||||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
|
||||||
|
|
||||||
"You" (or "Your") shall mean an individual or Legal Entity
|
|
||||||
exercising permissions granted by this License.
|
|
||||||
|
|
||||||
"Source" form shall mean the preferred form for making modifications,
|
|
||||||
including but not limited to software source code, documentation
|
|
||||||
source, and configuration files.
|
|
||||||
|
|
||||||
"Object" form shall mean any form resulting from mechanical
|
|
||||||
transformation or translation of a Source form, including but
|
|
||||||
not limited to compiled object code, generated documentation,
|
|
||||||
and conversions to other media types.
|
|
||||||
|
|
||||||
"Work" shall mean the work of authorship, whether in Source or
|
|
||||||
Object form, made available under the License, as indicated by a
|
|
||||||
copyright notice that is included in or attached to the work
|
|
||||||
(an example is provided in the Appendix below).
|
|
||||||
|
|
||||||
"Derivative Works" shall mean any work, whether in Source or Object
|
|
||||||
form, that is based on (or derived from) the Work and for which the
|
|
||||||
editorial revisions, annotations, elaborations, or other modifications
|
|
||||||
represent, as a whole, an original work of authorship. For the purposes
|
|
||||||
of this License, Derivative Works shall not include works that remain
|
|
||||||
separable from, or merely link (or bind by name) to the interfaces of,
|
|
||||||
the Work and Derivative Works thereof.
|
|
||||||
|
|
||||||
"Contribution" shall mean any work of authorship, including
|
|
||||||
the original version of the Work and any modifications or additions
|
|
||||||
to that Work or Derivative Works thereof, that is intentionally
|
|
||||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
|
||||||
or by an individual or Legal Entity authorized to submit on behalf of
|
|
||||||
the copyright owner. For the purposes of this definition, "submitted"
|
|
||||||
means any form of electronic, verbal, or written communication sent
|
|
||||||
to the Licensor or its representatives, including but not limited to
|
|
||||||
communication on electronic mailing lists, source code control systems,
|
|
||||||
and issue tracking systems that are managed by, or on behalf of, the
|
|
||||||
Licensor for the purpose of discussing and improving the Work, but
|
|
||||||
excluding communication that is conspicuously marked or otherwise
|
|
||||||
designated in writing by the copyright owner as "Not a Contribution."
|
|
||||||
|
|
||||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
|
||||||
on behalf of whom a Contribution has been received by Licensor and
|
|
||||||
subsequently incorporated within the Work.
|
|
||||||
|
|
||||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
copyright license to reproduce, prepare Derivative Works of,
|
|
||||||
publicly display, publicly perform, sublicense, and distribute the
|
|
||||||
Work and such Derivative Works in Source or Object form.
|
|
||||||
|
|
||||||
3. Grant of Patent License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
(except as stated in this section) patent license to make, have made,
|
|
||||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
|
||||||
where such license applies only to those patent claims licensable
|
|
||||||
by such Contributor that are necessarily infringed by their
|
|
||||||
Contribution(s) alone or by combination of their Contribution(s)
|
|
||||||
with the Work to which such Contribution(s) was submitted. If You
|
|
||||||
institute patent litigation against any entity (including a
|
|
||||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
|
||||||
or a Contribution incorporated within the Work constitutes direct
|
|
||||||
or contributory patent infringement, then any patent licenses
|
|
||||||
granted to You under this License for that Work shall terminate
|
|
||||||
as of the date such litigation is filed.
|
|
||||||
|
|
||||||
4. Redistribution. You may reproduce and distribute copies of the
|
|
||||||
Work or Derivative Works thereof in any medium, with or without
|
|
||||||
modifications, and in Source or Object form, provided that You
|
|
||||||
meet the following conditions:
|
|
||||||
|
|
||||||
(a) You must give any other recipients of the Work or
|
|
||||||
Derivative Works a copy of this License; and
|
|
||||||
|
|
||||||
(b) You must cause any modified files to carry prominent notices
|
|
||||||
stating that You changed the files; and
|
|
||||||
|
|
||||||
(c) You must retain, in the Source form of any Derivative Works
|
|
||||||
that You distribute, all copyright, patent, trademark, and
|
|
||||||
attribution notices from the Source form of the Work,
|
|
||||||
excluding those notices that do not pertain to any part of
|
|
||||||
the Derivative Works; and
|
|
||||||
|
|
||||||
(d) If the Work includes a "NOTICE" text file as part of its
|
|
||||||
distribution, then any Derivative Works that You distribute must
|
|
||||||
include a readable copy of the attribution notices contained
|
|
||||||
within such NOTICE file, excluding those notices that do not
|
|
||||||
pertain to any part of the Derivative Works, in at least one
|
|
||||||
of the following places: within a NOTICE text file distributed
|
|
||||||
as part of the Derivative Works; within the Source form or
|
|
||||||
documentation, if provided along with the Derivative Works; or,
|
|
||||||
within a display generated by the Derivative Works, if and
|
|
||||||
wherever such third-party notices normally appear. The contents
|
|
||||||
of the NOTICE file are for informational purposes only and
|
|
||||||
do not modify the License. You may add Your own attribution
|
|
||||||
notices within Derivative Works that You distribute, alongside
|
|
||||||
or as an addendum to the NOTICE text from the Work, provided
|
|
||||||
that such additional attribution notices cannot be construed
|
|
||||||
as modifying the License.
|
|
||||||
|
|
||||||
You may add Your own copyright statement to Your modifications and
|
|
||||||
may provide additional or different license terms and conditions
|
|
||||||
for use, reproduction, or distribution of Your modifications, or
|
|
||||||
for any such Derivative Works as a whole, provided Your use,
|
|
||||||
reproduction, and distribution of the Work otherwise complies with
|
|
||||||
the conditions stated in this License.
|
|
||||||
|
|
||||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
|
||||||
any Contribution intentionally submitted for inclusion in the Work
|
|
||||||
by You to the Licensor shall be under the terms and conditions of
|
|
||||||
this License, without any additional terms or conditions.
|
|
||||||
Notwithstanding the above, nothing herein shall supersede or modify
|
|
||||||
the terms of any separate license agreement you may have executed
|
|
||||||
with Licensor regarding such Contributions.
|
|
||||||
|
|
||||||
6. Trademarks. This License does not grant permission to use the trade
|
|
||||||
names, trademarks, service marks, or product names of the Licensor,
|
|
||||||
except as required for reasonable and customary use in describing the
|
|
||||||
origin of the Work and reproducing the content of the NOTICE file.
|
|
||||||
|
|
||||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
|
||||||
agreed to in writing, Licensor provides the Work (and each
|
|
||||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
implied, including, without limitation, any warranties or conditions
|
|
||||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
|
||||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
|
||||||
appropriateness of using or redistributing the Work and assume any
|
|
||||||
risks associated with Your exercise of permissions under this License.
|
|
||||||
|
|
||||||
8. Limitation of Liability. In no event and under no legal theory,
|
|
||||||
whether in tort (including negligence), contract, or otherwise,
|
|
||||||
unless required by applicable law (such as deliberate and grossly
|
|
||||||
negligent acts) or agreed to in writing, shall any Contributor be
|
|
||||||
liable to You for damages, including any direct, indirect, special,
|
|
||||||
incidental, or consequential damages of any character arising as a
|
|
||||||
result of this License or out of the use or inability to use the
|
|
||||||
Work (including but not limited to damages for loss of goodwill,
|
|
||||||
work stoppage, computer failure or malfunction, or any and all
|
|
||||||
other commercial damages or losses), even if such Contributor
|
|
||||||
has been advised of the possibility of such damages.
|
|
||||||
|
|
||||||
9. Accepting Warranty or Additional Liability. While redistributing
|
|
||||||
the Work or Derivative Works thereof, You may choose to offer,
|
|
||||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
|
||||||
or other liability obligations and/or rights consistent with this
|
|
||||||
License. However, in accepting such obligations, You may act only
|
|
||||||
on Your own behalf and on Your sole responsibility, not on behalf
|
|
||||||
of any other Contributor, and only if You agree to indemnify,
|
|
||||||
defend, and hold each Contributor harmless for any liability
|
|
||||||
incurred by, or claims asserted against, such Contributor by reason
|
|
||||||
of your accepting any such warranty or additional liability.
|
|
||||||
|
|
||||||
END OF TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
APPENDIX: How to apply the Apache License to your work.
|
|
||||||
|
|
||||||
To apply the Apache License to your work, attach the following
|
|
||||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
|
||||||
replaced with your own identifying information. (Don't include
|
|
||||||
the brackets!) The text should be enclosed in the appropriate
|
|
||||||
comment syntax for the file format. We also recommend that a
|
|
||||||
file or class name and description of purpose be included on the
|
|
||||||
same "printed page" as the copyright notice for easier
|
|
||||||
identification within third-party archives.
|
|
||||||
|
|
||||||
Copyright {yyyy} {name of copyright owner}
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
|
|
||||||
= = = = =
|
|
||||||
|
|
||||||
Parts of typeshed are licensed under different licenses (like the MIT
|
|
||||||
license), reproduced below.
|
|
||||||
|
|
||||||
= = = = =
|
|
||||||
|
|
||||||
The MIT License
|
|
||||||
|
|
||||||
Copyright (c) 2015 Jukka Lehtosalo and contributors
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a
|
|
||||||
copy of this software and associated documentation files (the "Software"),
|
|
||||||
to deal in the Software without restriction, including without limitation
|
|
||||||
the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
|
||||||
and/or sell copies of the Software, and to permit persons to whom the
|
|
||||||
Software is furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in
|
|
||||||
all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
||||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
|
||||||
DEALINGS IN THE SOFTWARE.
|
|
||||||
|
|
||||||
= = = = =
|
|
||||||
124
crates/red_knot/vendor/typeshed/README.md
vendored
124
crates/red_knot/vendor/typeshed/README.md
vendored
@@ -1,124 +0,0 @@
|
|||||||
# typeshed
|
|
||||||
|
|
||||||
[](https://github.com/python/typeshed/actions/workflows/tests.yml)
|
|
||||||
[](https://gitter.im/python/typing?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
|
||||||
[](https://github.com/python/typeshed/blob/main/CONTRIBUTING.md)
|
|
||||||
|
|
||||||
## About
|
|
||||||
|
|
||||||
Typeshed contains external type annotations for the Python standard library
|
|
||||||
and Python builtins, as well as third party packages as contributed by
|
|
||||||
people external to those projects.
|
|
||||||
|
|
||||||
This data can e.g. be used for static analysis, type checking, type inference,
|
|
||||||
and autocompletion.
|
|
||||||
|
|
||||||
For information on how to use typeshed, read below. Information for
|
|
||||||
contributors can be found in [CONTRIBUTING.md](CONTRIBUTING.md). **Please read
|
|
||||||
it before submitting pull requests; do not report issues with annotations to
|
|
||||||
the project the stubs are for, but instead report them here to typeshed.**
|
|
||||||
|
|
||||||
Further documentation on stub files, typeshed, and Python's typing system in
|
|
||||||
general, can also be found at https://typing.readthedocs.io/en/latest/.
|
|
||||||
|
|
||||||
Typeshed supports Python versions 3.8 and up.
|
|
||||||
|
|
||||||
## Using
|
|
||||||
|
|
||||||
If you're just using a type checker ([mypy](https://github.com/python/mypy/),
|
|
||||||
[pyright](https://github.com/microsoft/pyright),
|
|
||||||
[pytype](https://github.com/google/pytype/), PyCharm, ...), as opposed to
|
|
||||||
developing it, you don't need to interact with the typeshed repo at
|
|
||||||
all: a copy of standard library part of typeshed is bundled with type checkers.
|
|
||||||
And type stubs for third party packages and modules you are using can
|
|
||||||
be installed from PyPI. For example, if you are using `html5lib` and `requests`,
|
|
||||||
you can install the type stubs using
|
|
||||||
|
|
||||||
```bash
|
|
||||||
$ pip install types-html5lib types-requests
|
|
||||||
```
|
|
||||||
|
|
||||||
These PyPI packages follow [PEP 561](http://www.python.org/dev/peps/pep-0561/)
|
|
||||||
and are automatically released (up to once a day) by
|
|
||||||
[typeshed internal machinery](https://github.com/typeshed-internal/stub_uploader).
|
|
||||||
|
|
||||||
Type checkers should be able to use these stub packages when installed. For more
|
|
||||||
details, see the documentation for your type checker.
|
|
||||||
|
|
||||||
### Package versioning for third-party stubs
|
|
||||||
|
|
||||||
Version numbers of third-party stub packages consist of at least four parts.
|
|
||||||
All parts of the stub version, except for the last part, correspond to the
|
|
||||||
version of the runtime package being stubbed. For example, if the `types-foo`
|
|
||||||
package has version `1.2.0.20240309`, this guarantees that the `types-foo` package
|
|
||||||
contains stubs targeted against `foo==1.2.*` and tested against the latest
|
|
||||||
version of `foo` matching that specifier. In this example, the final element
|
|
||||||
of the version number (20240309) indicates that the stub package was pushed on
|
|
||||||
March 9, 2024.
|
|
||||||
|
|
||||||
At typeshed, we try to keep breaking changes to a minimum. However, due to the
|
|
||||||
nature of stubs, any version bump can introduce changes that might make your
|
|
||||||
code fail to type check.
|
|
||||||
|
|
||||||
There are several strategies available for specifying the version of a stubs
|
|
||||||
package you're using, each with its own tradeoffs:
|
|
||||||
|
|
||||||
1. Use the same bounds that you use for the package being stubbed. For example,
|
|
||||||
if you use `requests>=2.30.0,<2.32`, you can use
|
|
||||||
`types-requests>=2.30.0,<2.32`. This ensures that the stubs are compatible
|
|
||||||
with the package you are using, but it carries a small risk of breaking
|
|
||||||
type checking due to changes in the stubs.
|
|
||||||
|
|
||||||
Another risk of this strategy is that stubs often lag behind
|
|
||||||
the package being stubbed. You might want to force the package being stubbed
|
|
||||||
to a certain minimum version because it fixes a critical bug, but if
|
|
||||||
correspondingly updated stubs have not been released, your type
|
|
||||||
checking results may not be fully accurate.
|
|
||||||
2. Pin the stubs to a known good version and update the pin from time to time
|
|
||||||
(either manually, or using a tool such as dependabot or renovate).
|
|
||||||
|
|
||||||
For example, if you use `types-requests==2.31.0.1`, you can have confidence
|
|
||||||
that upgrading dependencies will not break type checking. However, you will
|
|
||||||
miss out on improvements in the stubs that could potentially improve type
|
|
||||||
checking until you update the pin. This strategy also has the risk that the
|
|
||||||
stubs you are using might become incompatible with the package being stubbed.
|
|
||||||
3. Don't pin the stubs. This is the option that demands the least work from
|
|
||||||
you when it comes to updating version pins, and has the advantage that you
|
|
||||||
will automatically benefit from improved stubs whenever a new version of the
|
|
||||||
stubs package is released. However, it carries the risk that the stubs
|
|
||||||
become incompatible with the package being stubbed.
|
|
||||||
|
|
||||||
For example, if a new major version of the package is released, there's a
|
|
||||||
chance the stubs might be updated to reflect the new version of the runtime
|
|
||||||
package before you update the package being stubbed.
|
|
||||||
|
|
||||||
You can also switch between the different strategies as needed. For example,
|
|
||||||
you could default to strategy (1), but fall back to strategy (2) when
|
|
||||||
a problem arises that can't easily be fixed.
|
|
||||||
|
|
||||||
### The `_typeshed` package
|
|
||||||
|
|
||||||
typeshed includes a package `_typeshed` as part of the standard library.
|
|
||||||
This package and its submodules contain utility types, but are not
|
|
||||||
available at runtime. For more information about how to use this package,
|
|
||||||
[see the `stdlib/_typeshed` directory](https://github.com/python/typeshed/tree/main/stdlib/_typeshed).
|
|
||||||
|
|
||||||
## Discussion
|
|
||||||
|
|
||||||
If you've run into behavior in the type checker that suggests the type
|
|
||||||
stubs for a given library are incorrect or incomplete,
|
|
||||||
we want to hear from you!
|
|
||||||
|
|
||||||
Our main forum for discussion is the project's [GitHub issue
|
|
||||||
tracker](https://github.com/python/typeshed/issues). This is the right
|
|
||||||
place to start a discussion of any of the above or most any other
|
|
||||||
topic concerning the project.
|
|
||||||
|
|
||||||
If you have general questions about typing with Python, or you need
|
|
||||||
a review of your type annotations or stubs outside of typeshed, head over to
|
|
||||||
[our discussion forum](https://github.com/python/typing/discussions).
|
|
||||||
For less formal discussion, try the typing chat room on
|
|
||||||
[gitter.im](https://gitter.im/python/typing). Some typeshed maintainers
|
|
||||||
are almost always present; feel free to find us there and we're happy
|
|
||||||
to chat. Substantive technical discussion will be directed to the
|
|
||||||
issue tracker.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
2d33fe212221a05661c0db5215a91cf3d7b7f072
|
|
||||||
309
crates/red_knot/vendor/typeshed/stdlib/VERSIONS
vendored
309
crates/red_knot/vendor/typeshed/stdlib/VERSIONS
vendored
@@ -1,309 +0,0 @@
|
|||||||
# The structure of this file is as follows:
|
|
||||||
# - Blank lines and comments starting with `#` are ignored.
|
|
||||||
# - Lines contain the name of a module, followed by a colon,
|
|
||||||
# a space, and a version range (for example: `symbol: 3.0-3.9`).
|
|
||||||
#
|
|
||||||
# Version ranges may be of the form "X.Y-A.B" or "X.Y-". The
|
|
||||||
# first form means that a module was introduced in version X.Y and last
|
|
||||||
# available in version A.B. The second form means that the module was
|
|
||||||
# introduced in version X.Y and is still available in the latest
|
|
||||||
# version of Python.
|
|
||||||
#
|
|
||||||
# If a submodule is not listed separately, it has the same lifetime as
|
|
||||||
# its parent module.
|
|
||||||
#
|
|
||||||
# Python versions before 3.0 are ignored, so any module that was already
|
|
||||||
# present in 3.0 will have "3.0" as its minimum version. Version ranges
|
|
||||||
# for unsupported versions of Python 3 are generally accurate but we do
|
|
||||||
# not guarantee their correctness.
|
|
||||||
|
|
||||||
__future__: 3.0-
|
|
||||||
__main__: 3.0-
|
|
||||||
_ast: 3.0-
|
|
||||||
_bisect: 3.0-
|
|
||||||
_bootlocale: 3.4-3.9
|
|
||||||
_codecs: 3.0-
|
|
||||||
_collections_abc: 3.3-
|
|
||||||
_compat_pickle: 3.1-
|
|
||||||
_compression: 3.5-
|
|
||||||
_csv: 3.0-
|
|
||||||
_ctypes: 3.0-
|
|
||||||
_curses: 3.0-
|
|
||||||
_decimal: 3.3-
|
|
||||||
_dummy_thread: 3.0-3.8
|
|
||||||
_dummy_threading: 3.0-3.8
|
|
||||||
_heapq: 3.0-
|
|
||||||
_imp: 3.0-
|
|
||||||
_json: 3.0-
|
|
||||||
_locale: 3.0-
|
|
||||||
_lsprof: 3.0-
|
|
||||||
_markupbase: 3.0-
|
|
||||||
_msi: 3.0-
|
|
||||||
_operator: 3.4-
|
|
||||||
_osx_support: 3.0-
|
|
||||||
_posixsubprocess: 3.2-
|
|
||||||
_py_abc: 3.7-
|
|
||||||
_pydecimal: 3.5-
|
|
||||||
_random: 3.0-
|
|
||||||
_sitebuiltins: 3.4-
|
|
||||||
_socket: 3.0- # present in 3.0 at runtime, but not in typeshed
|
|
||||||
_stat: 3.4-
|
|
||||||
_thread: 3.0-
|
|
||||||
_threading_local: 3.0-
|
|
||||||
_tkinter: 3.0-
|
|
||||||
_tracemalloc: 3.4-
|
|
||||||
_typeshed: 3.0- # not present at runtime, only for type checking
|
|
||||||
_warnings: 3.0-
|
|
||||||
_weakref: 3.0-
|
|
||||||
_weakrefset: 3.0-
|
|
||||||
_winapi: 3.3-
|
|
||||||
abc: 3.0-
|
|
||||||
aifc: 3.0-3.12
|
|
||||||
antigravity: 3.0-
|
|
||||||
argparse: 3.0-
|
|
||||||
array: 3.0-
|
|
||||||
ast: 3.0-
|
|
||||||
asynchat: 3.0-3.11
|
|
||||||
asyncio: 3.4-
|
|
||||||
asyncio.mixins: 3.10-
|
|
||||||
asyncio.exceptions: 3.8-
|
|
||||||
asyncio.format_helpers: 3.7-
|
|
||||||
asyncio.runners: 3.7-
|
|
||||||
asyncio.staggered: 3.8-
|
|
||||||
asyncio.taskgroups: 3.11-
|
|
||||||
asyncio.threads: 3.9-
|
|
||||||
asyncio.timeouts: 3.11-
|
|
||||||
asyncio.trsock: 3.8-
|
|
||||||
asyncore: 3.0-3.11
|
|
||||||
atexit: 3.0-
|
|
||||||
audioop: 3.0-3.12
|
|
||||||
base64: 3.0-
|
|
||||||
bdb: 3.0-
|
|
||||||
binascii: 3.0-
|
|
||||||
binhex: 3.0-3.10
|
|
||||||
bisect: 3.0-
|
|
||||||
builtins: 3.0-
|
|
||||||
bz2: 3.0-
|
|
||||||
cProfile: 3.0-
|
|
||||||
calendar: 3.0-
|
|
||||||
cgi: 3.0-3.12
|
|
||||||
cgitb: 3.0-3.12
|
|
||||||
chunk: 3.0-3.12
|
|
||||||
cmath: 3.0-
|
|
||||||
cmd: 3.0-
|
|
||||||
code: 3.0-
|
|
||||||
codecs: 3.0-
|
|
||||||
codeop: 3.0-
|
|
||||||
collections: 3.0-
|
|
||||||
collections.abc: 3.3-
|
|
||||||
colorsys: 3.0-
|
|
||||||
compileall: 3.0-
|
|
||||||
concurrent: 3.2-
|
|
||||||
configparser: 3.0-
|
|
||||||
contextlib: 3.0-
|
|
||||||
contextvars: 3.7-
|
|
||||||
copy: 3.0-
|
|
||||||
copyreg: 3.0-
|
|
||||||
crypt: 3.0-3.12
|
|
||||||
csv: 3.0-
|
|
||||||
ctypes: 3.0-
|
|
||||||
curses: 3.0-
|
|
||||||
dataclasses: 3.7-
|
|
||||||
datetime: 3.0-
|
|
||||||
dbm: 3.0-
|
|
||||||
decimal: 3.0-
|
|
||||||
difflib: 3.0-
|
|
||||||
dis: 3.0-
|
|
||||||
distutils: 3.0-3.11
|
|
||||||
distutils.command.bdist_msi: 3.0-3.10
|
|
||||||
distutils.command.bdist_wininst: 3.0-3.9
|
|
||||||
doctest: 3.0-
|
|
||||||
dummy_threading: 3.0-3.8
|
|
||||||
email: 3.0-
|
|
||||||
encodings: 3.0-
|
|
||||||
ensurepip: 3.0-
|
|
||||||
enum: 3.4-
|
|
||||||
errno: 3.0-
|
|
||||||
faulthandler: 3.3-
|
|
||||||
fcntl: 3.0-
|
|
||||||
filecmp: 3.0-
|
|
||||||
fileinput: 3.0-
|
|
||||||
fnmatch: 3.0-
|
|
||||||
formatter: 3.0-3.9
|
|
||||||
fractions: 3.0-
|
|
||||||
ftplib: 3.0-
|
|
||||||
functools: 3.0-
|
|
||||||
gc: 3.0-
|
|
||||||
genericpath: 3.0-
|
|
||||||
getopt: 3.0-
|
|
||||||
getpass: 3.0-
|
|
||||||
gettext: 3.0-
|
|
||||||
glob: 3.0-
|
|
||||||
graphlib: 3.9-
|
|
||||||
grp: 3.0-
|
|
||||||
gzip: 3.0-
|
|
||||||
hashlib: 3.0-
|
|
||||||
heapq: 3.0-
|
|
||||||
hmac: 3.0-
|
|
||||||
html: 3.0-
|
|
||||||
http: 3.0-
|
|
||||||
imaplib: 3.0-
|
|
||||||
imghdr: 3.0-3.12
|
|
||||||
imp: 3.0-3.11
|
|
||||||
importlib: 3.0-
|
|
||||||
importlib._abc: 3.10-
|
|
||||||
importlib.metadata: 3.8-
|
|
||||||
importlib.metadata._meta: 3.10-
|
|
||||||
importlib.readers: 3.10-
|
|
||||||
importlib.resources: 3.7-
|
|
||||||
importlib.resources.abc: 3.11-
|
|
||||||
importlib.resources.readers: 3.11-
|
|
||||||
importlib.resources.simple: 3.11-
|
|
||||||
importlib.simple: 3.11-
|
|
||||||
inspect: 3.0-
|
|
||||||
io: 3.0-
|
|
||||||
ipaddress: 3.3-
|
|
||||||
itertools: 3.0-
|
|
||||||
json: 3.0-
|
|
||||||
keyword: 3.0-
|
|
||||||
lib2to3: 3.0-
|
|
||||||
linecache: 3.0-
|
|
||||||
locale: 3.0-
|
|
||||||
logging: 3.0-
|
|
||||||
lzma: 3.3-
|
|
||||||
mailbox: 3.0-
|
|
||||||
mailcap: 3.0-3.12
|
|
||||||
marshal: 3.0-
|
|
||||||
math: 3.0-
|
|
||||||
mimetypes: 3.0-
|
|
||||||
mmap: 3.0-
|
|
||||||
modulefinder: 3.0-
|
|
||||||
msilib: 3.0-3.12
|
|
||||||
msvcrt: 3.0-
|
|
||||||
multiprocessing: 3.0-
|
|
||||||
multiprocessing.resource_tracker: 3.8-
|
|
||||||
multiprocessing.shared_memory: 3.8-
|
|
||||||
netrc: 3.0-
|
|
||||||
nis: 3.0-3.12
|
|
||||||
nntplib: 3.0-3.12
|
|
||||||
nt: 3.0-
|
|
||||||
ntpath: 3.0-
|
|
||||||
nturl2path: 3.0-
|
|
||||||
numbers: 3.0-
|
|
||||||
opcode: 3.0-
|
|
||||||
operator: 3.0-
|
|
||||||
optparse: 3.0-
|
|
||||||
os: 3.0-
|
|
||||||
ossaudiodev: 3.0-3.12
|
|
||||||
parser: 3.0-3.9
|
|
||||||
pathlib: 3.4-
|
|
||||||
pdb: 3.0-
|
|
||||||
pickle: 3.0-
|
|
||||||
pickletools: 3.0-
|
|
||||||
pipes: 3.0-3.12
|
|
||||||
pkgutil: 3.0-
|
|
||||||
platform: 3.0-
|
|
||||||
plistlib: 3.0-
|
|
||||||
poplib: 3.0-
|
|
||||||
posix: 3.0-
|
|
||||||
posixpath: 3.0-
|
|
||||||
pprint: 3.0-
|
|
||||||
profile: 3.0-
|
|
||||||
pstats: 3.0-
|
|
||||||
pty: 3.0-
|
|
||||||
pwd: 3.0-
|
|
||||||
py_compile: 3.0-
|
|
||||||
pyclbr: 3.0-
|
|
||||||
pydoc: 3.0-
|
|
||||||
pydoc_data: 3.0-
|
|
||||||
pyexpat: 3.0-
|
|
||||||
queue: 3.0-
|
|
||||||
quopri: 3.0-
|
|
||||||
random: 3.0-
|
|
||||||
re: 3.0-
|
|
||||||
readline: 3.0-
|
|
||||||
reprlib: 3.0-
|
|
||||||
resource: 3.0-
|
|
||||||
rlcompleter: 3.0-
|
|
||||||
runpy: 3.0-
|
|
||||||
sched: 3.0-
|
|
||||||
secrets: 3.6-
|
|
||||||
select: 3.0-
|
|
||||||
selectors: 3.4-
|
|
||||||
shelve: 3.0-
|
|
||||||
shlex: 3.0-
|
|
||||||
shutil: 3.0-
|
|
||||||
signal: 3.0-
|
|
||||||
site: 3.0-
|
|
||||||
smtpd: 3.0-3.11
|
|
||||||
smtplib: 3.0-
|
|
||||||
sndhdr: 3.0-3.12
|
|
||||||
socket: 3.0-
|
|
||||||
socketserver: 3.0-
|
|
||||||
spwd: 3.0-3.12
|
|
||||||
sqlite3: 3.0-
|
|
||||||
sre_compile: 3.0-
|
|
||||||
sre_constants: 3.0-
|
|
||||||
sre_parse: 3.0-
|
|
||||||
ssl: 3.0-
|
|
||||||
stat: 3.0-
|
|
||||||
statistics: 3.4-
|
|
||||||
string: 3.0-
|
|
||||||
stringprep: 3.0-
|
|
||||||
struct: 3.0-
|
|
||||||
subprocess: 3.0-
|
|
||||||
sunau: 3.0-3.12
|
|
||||||
symbol: 3.0-3.9
|
|
||||||
symtable: 3.0-
|
|
||||||
sys: 3.0-
|
|
||||||
sys._monitoring: 3.12- # Doesn't actually exist. See comments in the stub.
|
|
||||||
sysconfig: 3.0-
|
|
||||||
syslog: 3.0-
|
|
||||||
tabnanny: 3.0-
|
|
||||||
tarfile: 3.0-
|
|
||||||
telnetlib: 3.0-3.12
|
|
||||||
tempfile: 3.0-
|
|
||||||
termios: 3.0-
|
|
||||||
textwrap: 3.0-
|
|
||||||
this: 3.0-
|
|
||||||
threading: 3.0-
|
|
||||||
time: 3.0-
|
|
||||||
timeit: 3.0-
|
|
||||||
tkinter: 3.0-
|
|
||||||
token: 3.0-
|
|
||||||
tokenize: 3.0-
|
|
||||||
tomllib: 3.11-
|
|
||||||
trace: 3.0-
|
|
||||||
traceback: 3.0-
|
|
||||||
tracemalloc: 3.4-
|
|
||||||
tty: 3.0-
|
|
||||||
turtle: 3.0-
|
|
||||||
types: 3.0-
|
|
||||||
typing: 3.5-
|
|
||||||
typing_extensions: 3.0-
|
|
||||||
unicodedata: 3.0-
|
|
||||||
unittest: 3.0-
|
|
||||||
unittest._log: 3.9-
|
|
||||||
unittest.async_case: 3.8-
|
|
||||||
urllib: 3.0-
|
|
||||||
uu: 3.0-3.12
|
|
||||||
uuid: 3.0-
|
|
||||||
venv: 3.3-
|
|
||||||
warnings: 3.0-
|
|
||||||
wave: 3.0-
|
|
||||||
weakref: 3.0-
|
|
||||||
webbrowser: 3.0-
|
|
||||||
winreg: 3.0-
|
|
||||||
winsound: 3.0-
|
|
||||||
wsgiref: 3.0-
|
|
||||||
wsgiref.types: 3.11-
|
|
||||||
xdrlib: 3.0-3.12
|
|
||||||
xml: 3.0-
|
|
||||||
xmlrpc: 3.0-
|
|
||||||
xxlimited: 3.2-
|
|
||||||
zipapp: 3.5-
|
|
||||||
zipfile: 3.0-
|
|
||||||
zipfile._path: 3.12-
|
|
||||||
zipimport: 3.0-
|
|
||||||
zlib: 3.0-
|
|
||||||
zoneinfo: 3.9-
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
from typing_extensions import TypeAlias
|
|
||||||
|
|
||||||
_VersionInfo: TypeAlias = tuple[int, int, int, str, int]
|
|
||||||
|
|
||||||
class _Feature:
|
|
||||||
def __init__(self, optionalRelease: _VersionInfo, mandatoryRelease: _VersionInfo | None, compiler_flag: int) -> None: ...
|
|
||||||
def getOptionalRelease(self) -> _VersionInfo: ...
|
|
||||||
def getMandatoryRelease(self) -> _VersionInfo | None: ...
|
|
||||||
compiler_flag: int
|
|
||||||
|
|
||||||
absolute_import: _Feature
|
|
||||||
division: _Feature
|
|
||||||
generators: _Feature
|
|
||||||
nested_scopes: _Feature
|
|
||||||
print_function: _Feature
|
|
||||||
unicode_literals: _Feature
|
|
||||||
with_statement: _Feature
|
|
||||||
barry_as_FLUFL: _Feature
|
|
||||||
generator_stop: _Feature
|
|
||||||
annotations: _Feature
|
|
||||||
|
|
||||||
all_feature_names: list[str] # undocumented
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"all_feature_names",
|
|
||||||
"absolute_import",
|
|
||||||
"division",
|
|
||||||
"generators",
|
|
||||||
"nested_scopes",
|
|
||||||
"print_function",
|
|
||||||
"unicode_literals",
|
|
||||||
"with_statement",
|
|
||||||
"barry_as_FLUFL",
|
|
||||||
"generator_stop",
|
|
||||||
"annotations",
|
|
||||||
]
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
from typing import Any
|
|
||||||
|
|
||||||
def __getattr__(name: str) -> Any: ...
|
|
||||||
591
crates/red_knot/vendor/typeshed/stdlib/_ast.pyi
vendored
591
crates/red_knot/vendor/typeshed/stdlib/_ast.pyi
vendored
@@ -1,591 +0,0 @@
|
|||||||
import sys
|
|
||||||
import typing_extensions
|
|
||||||
from typing import Any, ClassVar, Literal
|
|
||||||
|
|
||||||
PyCF_ONLY_AST: Literal[1024]
|
|
||||||
PyCF_TYPE_COMMENTS: Literal[4096]
|
|
||||||
PyCF_ALLOW_TOP_LEVEL_AWAIT: Literal[8192]
|
|
||||||
|
|
||||||
# Alias used for fields that must always be valid identifiers
|
|
||||||
# A string `x` counts as a valid identifier if both the following are True
|
|
||||||
# (1) `x.isidentifier()` evaluates to `True`
|
|
||||||
# (2) `keyword.iskeyword(x)` evaluates to `False`
|
|
||||||
_Identifier: typing_extensions.TypeAlias = str
|
|
||||||
|
|
||||||
class AST:
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ()
|
|
||||||
_attributes: ClassVar[tuple[str, ...]]
|
|
||||||
_fields: ClassVar[tuple[str, ...]]
|
|
||||||
def __init__(self, *args: Any, **kwargs: Any) -> None: ...
|
|
||||||
# TODO: Not all nodes have all of the following attributes
|
|
||||||
lineno: int
|
|
||||||
col_offset: int
|
|
||||||
end_lineno: int | None
|
|
||||||
end_col_offset: int | None
|
|
||||||
type_comment: str | None
|
|
||||||
|
|
||||||
class mod(AST): ...
|
|
||||||
class type_ignore(AST): ...
|
|
||||||
|
|
||||||
class TypeIgnore(type_ignore):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("lineno", "tag")
|
|
||||||
tag: str
|
|
||||||
|
|
||||||
class FunctionType(mod):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("argtypes", "returns")
|
|
||||||
argtypes: list[expr]
|
|
||||||
returns: expr
|
|
||||||
|
|
||||||
class Module(mod):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("body", "type_ignores")
|
|
||||||
body: list[stmt]
|
|
||||||
type_ignores: list[TypeIgnore]
|
|
||||||
|
|
||||||
class Interactive(mod):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("body",)
|
|
||||||
body: list[stmt]
|
|
||||||
|
|
||||||
class Expression(mod):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("body",)
|
|
||||||
body: expr
|
|
||||||
|
|
||||||
class stmt(AST): ...
|
|
||||||
|
|
||||||
class FunctionDef(stmt):
|
|
||||||
if sys.version_info >= (3, 12):
|
|
||||||
__match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment", "type_params")
|
|
||||||
elif sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment")
|
|
||||||
name: _Identifier
|
|
||||||
args: arguments
|
|
||||||
body: list[stmt]
|
|
||||||
decorator_list: list[expr]
|
|
||||||
returns: expr | None
|
|
||||||
if sys.version_info >= (3, 12):
|
|
||||||
type_params: list[type_param]
|
|
||||||
|
|
||||||
class AsyncFunctionDef(stmt):
|
|
||||||
if sys.version_info >= (3, 12):
|
|
||||||
__match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment", "type_params")
|
|
||||||
elif sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment")
|
|
||||||
name: _Identifier
|
|
||||||
args: arguments
|
|
||||||
body: list[stmt]
|
|
||||||
decorator_list: list[expr]
|
|
||||||
returns: expr | None
|
|
||||||
if sys.version_info >= (3, 12):
|
|
||||||
type_params: list[type_param]
|
|
||||||
|
|
||||||
class ClassDef(stmt):
|
|
||||||
if sys.version_info >= (3, 12):
|
|
||||||
__match_args__ = ("name", "bases", "keywords", "body", "decorator_list", "type_params")
|
|
||||||
elif sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("name", "bases", "keywords", "body", "decorator_list")
|
|
||||||
name: _Identifier
|
|
||||||
bases: list[expr]
|
|
||||||
keywords: list[keyword]
|
|
||||||
body: list[stmt]
|
|
||||||
decorator_list: list[expr]
|
|
||||||
if sys.version_info >= (3, 12):
|
|
||||||
type_params: list[type_param]
|
|
||||||
|
|
||||||
class Return(stmt):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("value",)
|
|
||||||
value: expr | None
|
|
||||||
|
|
||||||
class Delete(stmt):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("targets",)
|
|
||||||
targets: list[expr]
|
|
||||||
|
|
||||||
class Assign(stmt):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("targets", "value", "type_comment")
|
|
||||||
targets: list[expr]
|
|
||||||
value: expr
|
|
||||||
|
|
||||||
class AugAssign(stmt):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("target", "op", "value")
|
|
||||||
target: Name | Attribute | Subscript
|
|
||||||
op: operator
|
|
||||||
value: expr
|
|
||||||
|
|
||||||
class AnnAssign(stmt):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("target", "annotation", "value", "simple")
|
|
||||||
target: Name | Attribute | Subscript
|
|
||||||
annotation: expr
|
|
||||||
value: expr | None
|
|
||||||
simple: int
|
|
||||||
|
|
||||||
class For(stmt):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("target", "iter", "body", "orelse", "type_comment")
|
|
||||||
target: expr
|
|
||||||
iter: expr
|
|
||||||
body: list[stmt]
|
|
||||||
orelse: list[stmt]
|
|
||||||
|
|
||||||
class AsyncFor(stmt):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("target", "iter", "body", "orelse", "type_comment")
|
|
||||||
target: expr
|
|
||||||
iter: expr
|
|
||||||
body: list[stmt]
|
|
||||||
orelse: list[stmt]
|
|
||||||
|
|
||||||
class While(stmt):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("test", "body", "orelse")
|
|
||||||
test: expr
|
|
||||||
body: list[stmt]
|
|
||||||
orelse: list[stmt]
|
|
||||||
|
|
||||||
class If(stmt):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("test", "body", "orelse")
|
|
||||||
test: expr
|
|
||||||
body: list[stmt]
|
|
||||||
orelse: list[stmt]
|
|
||||||
|
|
||||||
class With(stmt):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("items", "body", "type_comment")
|
|
||||||
items: list[withitem]
|
|
||||||
body: list[stmt]
|
|
||||||
|
|
||||||
class AsyncWith(stmt):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("items", "body", "type_comment")
|
|
||||||
items: list[withitem]
|
|
||||||
body: list[stmt]
|
|
||||||
|
|
||||||
class Raise(stmt):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("exc", "cause")
|
|
||||||
exc: expr | None
|
|
||||||
cause: expr | None
|
|
||||||
|
|
||||||
class Try(stmt):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("body", "handlers", "orelse", "finalbody")
|
|
||||||
body: list[stmt]
|
|
||||||
handlers: list[ExceptHandler]
|
|
||||||
orelse: list[stmt]
|
|
||||||
finalbody: list[stmt]
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 11):
|
|
||||||
class TryStar(stmt):
|
|
||||||
__match_args__ = ("body", "handlers", "orelse", "finalbody")
|
|
||||||
body: list[stmt]
|
|
||||||
handlers: list[ExceptHandler]
|
|
||||||
orelse: list[stmt]
|
|
||||||
finalbody: list[stmt]
|
|
||||||
|
|
||||||
class Assert(stmt):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("test", "msg")
|
|
||||||
test: expr
|
|
||||||
msg: expr | None
|
|
||||||
|
|
||||||
class Import(stmt):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("names",)
|
|
||||||
names: list[alias]
|
|
||||||
|
|
||||||
class ImportFrom(stmt):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("module", "names", "level")
|
|
||||||
module: str | None
|
|
||||||
names: list[alias]
|
|
||||||
level: int
|
|
||||||
|
|
||||||
class Global(stmt):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("names",)
|
|
||||||
names: list[_Identifier]
|
|
||||||
|
|
||||||
class Nonlocal(stmt):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("names",)
|
|
||||||
names: list[_Identifier]
|
|
||||||
|
|
||||||
class Expr(stmt):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("value",)
|
|
||||||
value: expr
|
|
||||||
|
|
||||||
class Pass(stmt): ...
|
|
||||||
class Break(stmt): ...
|
|
||||||
class Continue(stmt): ...
|
|
||||||
class expr(AST): ...
|
|
||||||
|
|
||||||
class BoolOp(expr):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("op", "values")
|
|
||||||
op: boolop
|
|
||||||
values: list[expr]
|
|
||||||
|
|
||||||
class BinOp(expr):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("left", "op", "right")
|
|
||||||
left: expr
|
|
||||||
op: operator
|
|
||||||
right: expr
|
|
||||||
|
|
||||||
class UnaryOp(expr):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("op", "operand")
|
|
||||||
op: unaryop
|
|
||||||
operand: expr
|
|
||||||
|
|
||||||
class Lambda(expr):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("args", "body")
|
|
||||||
args: arguments
|
|
||||||
body: expr
|
|
||||||
|
|
||||||
class IfExp(expr):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("test", "body", "orelse")
|
|
||||||
test: expr
|
|
||||||
body: expr
|
|
||||||
orelse: expr
|
|
||||||
|
|
||||||
class Dict(expr):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("keys", "values")
|
|
||||||
keys: list[expr | None]
|
|
||||||
values: list[expr]
|
|
||||||
|
|
||||||
class Set(expr):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("elts",)
|
|
||||||
elts: list[expr]
|
|
||||||
|
|
||||||
class ListComp(expr):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("elt", "generators")
|
|
||||||
elt: expr
|
|
||||||
generators: list[comprehension]
|
|
||||||
|
|
||||||
class SetComp(expr):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("elt", "generators")
|
|
||||||
elt: expr
|
|
||||||
generators: list[comprehension]
|
|
||||||
|
|
||||||
class DictComp(expr):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("key", "value", "generators")
|
|
||||||
key: expr
|
|
||||||
value: expr
|
|
||||||
generators: list[comprehension]
|
|
||||||
|
|
||||||
class GeneratorExp(expr):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("elt", "generators")
|
|
||||||
elt: expr
|
|
||||||
generators: list[comprehension]
|
|
||||||
|
|
||||||
class Await(expr):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("value",)
|
|
||||||
value: expr
|
|
||||||
|
|
||||||
class Yield(expr):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("value",)
|
|
||||||
value: expr | None
|
|
||||||
|
|
||||||
class YieldFrom(expr):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("value",)
|
|
||||||
value: expr
|
|
||||||
|
|
||||||
class Compare(expr):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("left", "ops", "comparators")
|
|
||||||
left: expr
|
|
||||||
ops: list[cmpop]
|
|
||||||
comparators: list[expr]
|
|
||||||
|
|
||||||
class Call(expr):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("func", "args", "keywords")
|
|
||||||
func: expr
|
|
||||||
args: list[expr]
|
|
||||||
keywords: list[keyword]
|
|
||||||
|
|
||||||
class FormattedValue(expr):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("value", "conversion", "format_spec")
|
|
||||||
value: expr
|
|
||||||
conversion: int
|
|
||||||
format_spec: expr | None
|
|
||||||
|
|
||||||
class JoinedStr(expr):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("values",)
|
|
||||||
values: list[expr]
|
|
||||||
|
|
||||||
class Constant(expr):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("value", "kind")
|
|
||||||
value: Any # None, str, bytes, bool, int, float, complex, Ellipsis
|
|
||||||
kind: str | None
|
|
||||||
# Aliases for value, for backwards compatibility
|
|
||||||
s: Any
|
|
||||||
n: int | float | complex
|
|
||||||
|
|
||||||
class NamedExpr(expr):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("target", "value")
|
|
||||||
target: Name
|
|
||||||
value: expr
|
|
||||||
|
|
||||||
class Attribute(expr):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("value", "attr", "ctx")
|
|
||||||
value: expr
|
|
||||||
attr: _Identifier
|
|
||||||
ctx: expr_context
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 9):
|
|
||||||
_Slice: typing_extensions.TypeAlias = expr
|
|
||||||
else:
|
|
||||||
class slice(AST): ...
|
|
||||||
_Slice: typing_extensions.TypeAlias = slice
|
|
||||||
|
|
||||||
class Slice(_Slice):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("lower", "upper", "step")
|
|
||||||
lower: expr | None
|
|
||||||
upper: expr | None
|
|
||||||
step: expr | None
|
|
||||||
|
|
||||||
if sys.version_info < (3, 9):
|
|
||||||
class ExtSlice(slice):
|
|
||||||
dims: list[slice]
|
|
||||||
|
|
||||||
class Index(slice):
|
|
||||||
value: expr
|
|
||||||
|
|
||||||
class Subscript(expr):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("value", "slice", "ctx")
|
|
||||||
value: expr
|
|
||||||
slice: _Slice
|
|
||||||
ctx: expr_context
|
|
||||||
|
|
||||||
class Starred(expr):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("value", "ctx")
|
|
||||||
value: expr
|
|
||||||
ctx: expr_context
|
|
||||||
|
|
||||||
class Name(expr):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("id", "ctx")
|
|
||||||
id: _Identifier
|
|
||||||
ctx: expr_context
|
|
||||||
|
|
||||||
class List(expr):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("elts", "ctx")
|
|
||||||
elts: list[expr]
|
|
||||||
ctx: expr_context
|
|
||||||
|
|
||||||
class Tuple(expr):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("elts", "ctx")
|
|
||||||
elts: list[expr]
|
|
||||||
ctx: expr_context
|
|
||||||
if sys.version_info >= (3, 9):
|
|
||||||
dims: list[expr]
|
|
||||||
|
|
||||||
class expr_context(AST): ...
|
|
||||||
|
|
||||||
if sys.version_info < (3, 9):
|
|
||||||
class AugLoad(expr_context): ...
|
|
||||||
class AugStore(expr_context): ...
|
|
||||||
class Param(expr_context): ...
|
|
||||||
|
|
||||||
class Suite(mod):
|
|
||||||
body: list[stmt]
|
|
||||||
|
|
||||||
class Del(expr_context): ...
|
|
||||||
class Load(expr_context): ...
|
|
||||||
class Store(expr_context): ...
|
|
||||||
class boolop(AST): ...
|
|
||||||
class And(boolop): ...
|
|
||||||
class Or(boolop): ...
|
|
||||||
class operator(AST): ...
|
|
||||||
class Add(operator): ...
|
|
||||||
class BitAnd(operator): ...
|
|
||||||
class BitOr(operator): ...
|
|
||||||
class BitXor(operator): ...
|
|
||||||
class Div(operator): ...
|
|
||||||
class FloorDiv(operator): ...
|
|
||||||
class LShift(operator): ...
|
|
||||||
class Mod(operator): ...
|
|
||||||
class Mult(operator): ...
|
|
||||||
class MatMult(operator): ...
|
|
||||||
class Pow(operator): ...
|
|
||||||
class RShift(operator): ...
|
|
||||||
class Sub(operator): ...
|
|
||||||
class unaryop(AST): ...
|
|
||||||
class Invert(unaryop): ...
|
|
||||||
class Not(unaryop): ...
|
|
||||||
class UAdd(unaryop): ...
|
|
||||||
class USub(unaryop): ...
|
|
||||||
class cmpop(AST): ...
|
|
||||||
class Eq(cmpop): ...
|
|
||||||
class Gt(cmpop): ...
|
|
||||||
class GtE(cmpop): ...
|
|
||||||
class In(cmpop): ...
|
|
||||||
class Is(cmpop): ...
|
|
||||||
class IsNot(cmpop): ...
|
|
||||||
class Lt(cmpop): ...
|
|
||||||
class LtE(cmpop): ...
|
|
||||||
class NotEq(cmpop): ...
|
|
||||||
class NotIn(cmpop): ...
|
|
||||||
|
|
||||||
class comprehension(AST):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("target", "iter", "ifs", "is_async")
|
|
||||||
target: expr
|
|
||||||
iter: expr
|
|
||||||
ifs: list[expr]
|
|
||||||
is_async: int
|
|
||||||
|
|
||||||
class excepthandler(AST): ...
|
|
||||||
|
|
||||||
class ExceptHandler(excepthandler):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("type", "name", "body")
|
|
||||||
type: expr | None
|
|
||||||
name: _Identifier | None
|
|
||||||
body: list[stmt]
|
|
||||||
|
|
||||||
class arguments(AST):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("posonlyargs", "args", "vararg", "kwonlyargs", "kw_defaults", "kwarg", "defaults")
|
|
||||||
posonlyargs: list[arg]
|
|
||||||
args: list[arg]
|
|
||||||
vararg: arg | None
|
|
||||||
kwonlyargs: list[arg]
|
|
||||||
kw_defaults: list[expr | None]
|
|
||||||
kwarg: arg | None
|
|
||||||
defaults: list[expr]
|
|
||||||
|
|
||||||
class arg(AST):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("arg", "annotation", "type_comment")
|
|
||||||
arg: _Identifier
|
|
||||||
annotation: expr | None
|
|
||||||
|
|
||||||
class keyword(AST):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("arg", "value")
|
|
||||||
arg: _Identifier | None
|
|
||||||
value: expr
|
|
||||||
|
|
||||||
class alias(AST):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("name", "asname")
|
|
||||||
name: str
|
|
||||||
asname: _Identifier | None
|
|
||||||
|
|
||||||
class withitem(AST):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__ = ("context_expr", "optional_vars")
|
|
||||||
context_expr: expr
|
|
||||||
optional_vars: expr | None
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
class Match(stmt):
|
|
||||||
__match_args__ = ("subject", "cases")
|
|
||||||
subject: expr
|
|
||||||
cases: list[match_case]
|
|
||||||
|
|
||||||
class pattern(AST): ...
|
|
||||||
# Without the alias, Pyright complains variables named pattern are recursively defined
|
|
||||||
_Pattern: typing_extensions.TypeAlias = pattern
|
|
||||||
|
|
||||||
class match_case(AST):
|
|
||||||
__match_args__ = ("pattern", "guard", "body")
|
|
||||||
pattern: _Pattern
|
|
||||||
guard: expr | None
|
|
||||||
body: list[stmt]
|
|
||||||
|
|
||||||
class MatchValue(pattern):
|
|
||||||
__match_args__ = ("value",)
|
|
||||||
value: expr
|
|
||||||
|
|
||||||
class MatchSingleton(pattern):
|
|
||||||
__match_args__ = ("value",)
|
|
||||||
value: Literal[True, False] | None
|
|
||||||
|
|
||||||
class MatchSequence(pattern):
|
|
||||||
__match_args__ = ("patterns",)
|
|
||||||
patterns: list[pattern]
|
|
||||||
|
|
||||||
class MatchStar(pattern):
|
|
||||||
__match_args__ = ("name",)
|
|
||||||
name: _Identifier | None
|
|
||||||
|
|
||||||
class MatchMapping(pattern):
|
|
||||||
__match_args__ = ("keys", "patterns", "rest")
|
|
||||||
keys: list[expr]
|
|
||||||
patterns: list[pattern]
|
|
||||||
rest: _Identifier | None
|
|
||||||
|
|
||||||
class MatchClass(pattern):
|
|
||||||
__match_args__ = ("cls", "patterns", "kwd_attrs", "kwd_patterns")
|
|
||||||
cls: expr
|
|
||||||
patterns: list[pattern]
|
|
||||||
kwd_attrs: list[_Identifier]
|
|
||||||
kwd_patterns: list[pattern]
|
|
||||||
|
|
||||||
class MatchAs(pattern):
|
|
||||||
__match_args__ = ("pattern", "name")
|
|
||||||
pattern: _Pattern | None
|
|
||||||
name: _Identifier | None
|
|
||||||
|
|
||||||
class MatchOr(pattern):
|
|
||||||
__match_args__ = ("patterns",)
|
|
||||||
patterns: list[pattern]
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 12):
|
|
||||||
class type_param(AST):
|
|
||||||
end_lineno: int
|
|
||||||
end_col_offset: int
|
|
||||||
|
|
||||||
class TypeVar(type_param):
|
|
||||||
__match_args__ = ("name", "bound")
|
|
||||||
name: _Identifier
|
|
||||||
bound: expr | None
|
|
||||||
|
|
||||||
class ParamSpec(type_param):
|
|
||||||
__match_args__ = ("name",)
|
|
||||||
name: _Identifier
|
|
||||||
|
|
||||||
class TypeVarTuple(type_param):
|
|
||||||
__match_args__ = ("name",)
|
|
||||||
name: _Identifier
|
|
||||||
|
|
||||||
class TypeAlias(stmt):
|
|
||||||
__match_args__ = ("name", "type_params", "value")
|
|
||||||
name: Name
|
|
||||||
type_params: list[type_param]
|
|
||||||
value: expr
|
|
||||||
@@ -1,84 +0,0 @@
|
|||||||
import sys
|
|
||||||
from _typeshed import SupportsLenAndGetItem, SupportsRichComparisonT
|
|
||||||
from collections.abc import Callable, MutableSequence
|
|
||||||
from typing import TypeVar, overload
|
|
||||||
|
|
||||||
_T = TypeVar("_T")
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
@overload
|
|
||||||
def bisect_left(
|
|
||||||
a: SupportsLenAndGetItem[SupportsRichComparisonT],
|
|
||||||
x: SupportsRichComparisonT,
|
|
||||||
lo: int = 0,
|
|
||||||
hi: int | None = None,
|
|
||||||
*,
|
|
||||||
key: None = None,
|
|
||||||
) -> int: ...
|
|
||||||
@overload
|
|
||||||
def bisect_left(
|
|
||||||
a: SupportsLenAndGetItem[_T],
|
|
||||||
x: SupportsRichComparisonT,
|
|
||||||
lo: int = 0,
|
|
||||||
hi: int | None = None,
|
|
||||||
*,
|
|
||||||
key: Callable[[_T], SupportsRichComparisonT],
|
|
||||||
) -> int: ...
|
|
||||||
@overload
|
|
||||||
def bisect_right(
|
|
||||||
a: SupportsLenAndGetItem[SupportsRichComparisonT],
|
|
||||||
x: SupportsRichComparisonT,
|
|
||||||
lo: int = 0,
|
|
||||||
hi: int | None = None,
|
|
||||||
*,
|
|
||||||
key: None = None,
|
|
||||||
) -> int: ...
|
|
||||||
@overload
|
|
||||||
def bisect_right(
|
|
||||||
a: SupportsLenAndGetItem[_T],
|
|
||||||
x: SupportsRichComparisonT,
|
|
||||||
lo: int = 0,
|
|
||||||
hi: int | None = None,
|
|
||||||
*,
|
|
||||||
key: Callable[[_T], SupportsRichComparisonT],
|
|
||||||
) -> int: ...
|
|
||||||
@overload
|
|
||||||
def insort_left(
|
|
||||||
a: MutableSequence[SupportsRichComparisonT],
|
|
||||||
x: SupportsRichComparisonT,
|
|
||||||
lo: int = 0,
|
|
||||||
hi: int | None = None,
|
|
||||||
*,
|
|
||||||
key: None = None,
|
|
||||||
) -> None: ...
|
|
||||||
@overload
|
|
||||||
def insort_left(
|
|
||||||
a: MutableSequence[_T], x: _T, lo: int = 0, hi: int | None = None, *, key: Callable[[_T], SupportsRichComparisonT]
|
|
||||||
) -> None: ...
|
|
||||||
@overload
|
|
||||||
def insort_right(
|
|
||||||
a: MutableSequence[SupportsRichComparisonT],
|
|
||||||
x: SupportsRichComparisonT,
|
|
||||||
lo: int = 0,
|
|
||||||
hi: int | None = None,
|
|
||||||
*,
|
|
||||||
key: None = None,
|
|
||||||
) -> None: ...
|
|
||||||
@overload
|
|
||||||
def insort_right(
|
|
||||||
a: MutableSequence[_T], x: _T, lo: int = 0, hi: int | None = None, *, key: Callable[[_T], SupportsRichComparisonT]
|
|
||||||
) -> None: ...
|
|
||||||
|
|
||||||
else:
|
|
||||||
def bisect_left(
|
|
||||||
a: SupportsLenAndGetItem[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None
|
|
||||||
) -> int: ...
|
|
||||||
def bisect_right(
|
|
||||||
a: SupportsLenAndGetItem[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None
|
|
||||||
) -> int: ...
|
|
||||||
def insort_left(
|
|
||||||
a: MutableSequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None
|
|
||||||
) -> None: ...
|
|
||||||
def insort_right(
|
|
||||||
a: MutableSequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None
|
|
||||||
) -> None: ...
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
def getpreferredencoding(do_setlocale: bool = True) -> str: ...
|
|
||||||
133
crates/red_knot/vendor/typeshed/stdlib/_codecs.pyi
vendored
133
crates/red_knot/vendor/typeshed/stdlib/_codecs.pyi
vendored
@@ -1,133 +0,0 @@
|
|||||||
import codecs
|
|
||||||
import sys
|
|
||||||
from _typeshed import ReadableBuffer
|
|
||||||
from collections.abc import Callable
|
|
||||||
from typing import Literal, overload
|
|
||||||
from typing_extensions import TypeAlias
|
|
||||||
|
|
||||||
# This type is not exposed; it is defined in unicodeobject.c
|
|
||||||
class _EncodingMap:
|
|
||||||
def size(self) -> int: ...
|
|
||||||
|
|
||||||
_CharMap: TypeAlias = dict[int, int] | _EncodingMap
|
|
||||||
_Handler: TypeAlias = Callable[[UnicodeError], tuple[str | bytes, int]]
|
|
||||||
_SearchFunction: TypeAlias = Callable[[str], codecs.CodecInfo | None]
|
|
||||||
|
|
||||||
def register(search_function: _SearchFunction, /) -> None: ...
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
def unregister(search_function: _SearchFunction, /) -> None: ...
|
|
||||||
|
|
||||||
def register_error(errors: str, handler: _Handler, /) -> None: ...
|
|
||||||
def lookup_error(name: str, /) -> _Handler: ...
|
|
||||||
|
|
||||||
# The type ignore on `encode` and `decode` is to avoid issues with overlapping overloads, for more details, see #300
|
|
||||||
# https://docs.python.org/3/library/codecs.html#binary-transforms
|
|
||||||
_BytesToBytesEncoding: TypeAlias = Literal[
|
|
||||||
"base64",
|
|
||||||
"base_64",
|
|
||||||
"base64_codec",
|
|
||||||
"bz2",
|
|
||||||
"bz2_codec",
|
|
||||||
"hex",
|
|
||||||
"hex_codec",
|
|
||||||
"quopri",
|
|
||||||
"quotedprintable",
|
|
||||||
"quoted_printable",
|
|
||||||
"quopri_codec",
|
|
||||||
"uu",
|
|
||||||
"uu_codec",
|
|
||||||
"zip",
|
|
||||||
"zlib",
|
|
||||||
"zlib_codec",
|
|
||||||
]
|
|
||||||
# https://docs.python.org/3/library/codecs.html#text-transforms
|
|
||||||
_StrToStrEncoding: TypeAlias = Literal["rot13", "rot_13"]
|
|
||||||
|
|
||||||
@overload
|
|
||||||
def encode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = "strict") -> bytes: ...
|
|
||||||
@overload
|
|
||||||
def encode(obj: str, encoding: _StrToStrEncoding, errors: str = "strict") -> str: ... # type: ignore[overload-overlap]
|
|
||||||
@overload
|
|
||||||
def encode(obj: str, encoding: str = "utf-8", errors: str = "strict") -> bytes: ...
|
|
||||||
@overload
|
|
||||||
def decode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = "strict") -> bytes: ... # type: ignore[overload-overlap]
|
|
||||||
@overload
|
|
||||||
def decode(obj: str, encoding: _StrToStrEncoding, errors: str = "strict") -> str: ...
|
|
||||||
|
|
||||||
# these are documented as text encodings but in practice they also accept str as input
|
|
||||||
@overload
|
|
||||||
def decode(
|
|
||||||
obj: str,
|
|
||||||
encoding: Literal["unicode_escape", "unicode-escape", "raw_unicode_escape", "raw-unicode-escape"],
|
|
||||||
errors: str = "strict",
|
|
||||||
) -> str: ...
|
|
||||||
|
|
||||||
# hex is officially documented as a bytes to bytes encoding, but it appears to also work with str
|
|
||||||
@overload
|
|
||||||
def decode(obj: str, encoding: Literal["hex", "hex_codec"], errors: str = "strict") -> bytes: ...
|
|
||||||
@overload
|
|
||||||
def decode(obj: ReadableBuffer, encoding: str = "utf-8", errors: str = "strict") -> str: ...
|
|
||||||
def lookup(encoding: str, /) -> codecs.CodecInfo: ...
|
|
||||||
def charmap_build(map: str, /) -> _CharMap: ...
|
|
||||||
def ascii_decode(data: ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ...
|
|
||||||
def ascii_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
|
||||||
def charmap_decode(data: ReadableBuffer, errors: str | None = None, mapping: _CharMap | None = None, /) -> tuple[str, int]: ...
|
|
||||||
def charmap_encode(str: str, errors: str | None = None, mapping: _CharMap | None = None, /) -> tuple[bytes, int]: ...
|
|
||||||
def escape_decode(data: str | ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ...
|
|
||||||
def escape_encode(data: bytes, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
|
||||||
def latin_1_decode(data: ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ...
|
|
||||||
def latin_1_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 9):
|
|
||||||
def raw_unicode_escape_decode(
|
|
||||||
data: str | ReadableBuffer, errors: str | None = None, final: bool = True, /
|
|
||||||
) -> tuple[str, int]: ...
|
|
||||||
|
|
||||||
else:
|
|
||||||
def raw_unicode_escape_decode(data: str | ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ...
|
|
||||||
|
|
||||||
def raw_unicode_escape_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
|
||||||
def readbuffer_encode(data: str | ReadableBuffer, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 9):
|
|
||||||
def unicode_escape_decode(
|
|
||||||
data: str | ReadableBuffer, errors: str | None = None, final: bool = True, /
|
|
||||||
) -> tuple[str, int]: ...
|
|
||||||
|
|
||||||
else:
|
|
||||||
def unicode_escape_decode(data: str | ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ...
|
|
||||||
|
|
||||||
def unicode_escape_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
|
||||||
def utf_16_be_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
|
||||||
def utf_16_be_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
|
||||||
def utf_16_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
|
||||||
def utf_16_encode(str: str, errors: str | None = None, byteorder: int = 0, /) -> tuple[bytes, int]: ...
|
|
||||||
def utf_16_ex_decode(
|
|
||||||
data: ReadableBuffer, errors: str | None = None, byteorder: int = 0, final: bool = False, /
|
|
||||||
) -> tuple[str, int, int]: ...
|
|
||||||
def utf_16_le_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
|
||||||
def utf_16_le_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
|
||||||
def utf_32_be_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
|
||||||
def utf_32_be_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
|
||||||
def utf_32_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
|
||||||
def utf_32_encode(str: str, errors: str | None = None, byteorder: int = 0, /) -> tuple[bytes, int]: ...
|
|
||||||
def utf_32_ex_decode(
|
|
||||||
data: ReadableBuffer, errors: str | None = None, byteorder: int = 0, final: bool = False, /
|
|
||||||
) -> tuple[str, int, int]: ...
|
|
||||||
def utf_32_le_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
|
||||||
def utf_32_le_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
|
||||||
def utf_7_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
|
||||||
def utf_7_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
|
||||||
def utf_8_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
|
||||||
def utf_8_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
|
||||||
|
|
||||||
if sys.platform == "win32":
|
|
||||||
def mbcs_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
|
||||||
def mbcs_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
|
||||||
def code_page_decode(
|
|
||||||
codepage: int, data: ReadableBuffer, errors: str | None = None, final: bool = False, /
|
|
||||||
) -> tuple[str, int]: ...
|
|
||||||
def code_page_encode(code_page: int, str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
|
||||||
def oem_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
|
||||||
def oem_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
|
||||||
@@ -1,94 +0,0 @@
|
|||||||
import sys
|
|
||||||
from abc import abstractmethod
|
|
||||||
from types import MappingProxyType
|
|
||||||
from typing import ( # noqa: Y022,Y038,Y057
|
|
||||||
AbstractSet as Set,
|
|
||||||
AsyncGenerator as AsyncGenerator,
|
|
||||||
AsyncIterable as AsyncIterable,
|
|
||||||
AsyncIterator as AsyncIterator,
|
|
||||||
Awaitable as Awaitable,
|
|
||||||
ByteString as ByteString,
|
|
||||||
Callable as Callable,
|
|
||||||
Collection as Collection,
|
|
||||||
Container as Container,
|
|
||||||
Coroutine as Coroutine,
|
|
||||||
Generator as Generator,
|
|
||||||
Generic,
|
|
||||||
Hashable as Hashable,
|
|
||||||
ItemsView as ItemsView,
|
|
||||||
Iterable as Iterable,
|
|
||||||
Iterator as Iterator,
|
|
||||||
KeysView as KeysView,
|
|
||||||
Mapping as Mapping,
|
|
||||||
MappingView as MappingView,
|
|
||||||
MutableMapping as MutableMapping,
|
|
||||||
MutableSequence as MutableSequence,
|
|
||||||
MutableSet as MutableSet,
|
|
||||||
Protocol,
|
|
||||||
Reversible as Reversible,
|
|
||||||
Sequence as Sequence,
|
|
||||||
Sized as Sized,
|
|
||||||
TypeVar,
|
|
||||||
ValuesView as ValuesView,
|
|
||||||
final,
|
|
||||||
runtime_checkable,
|
|
||||||
)
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"Awaitable",
|
|
||||||
"Coroutine",
|
|
||||||
"AsyncIterable",
|
|
||||||
"AsyncIterator",
|
|
||||||
"AsyncGenerator",
|
|
||||||
"Hashable",
|
|
||||||
"Iterable",
|
|
||||||
"Iterator",
|
|
||||||
"Generator",
|
|
||||||
"Reversible",
|
|
||||||
"Sized",
|
|
||||||
"Container",
|
|
||||||
"Callable",
|
|
||||||
"Collection",
|
|
||||||
"Set",
|
|
||||||
"MutableSet",
|
|
||||||
"Mapping",
|
|
||||||
"MutableMapping",
|
|
||||||
"MappingView",
|
|
||||||
"KeysView",
|
|
||||||
"ItemsView",
|
|
||||||
"ValuesView",
|
|
||||||
"Sequence",
|
|
||||||
"MutableSequence",
|
|
||||||
"ByteString",
|
|
||||||
]
|
|
||||||
if sys.version_info >= (3, 12):
|
|
||||||
__all__ += ["Buffer"]
|
|
||||||
|
|
||||||
_KT_co = TypeVar("_KT_co", covariant=True) # Key type covariant containers.
|
|
||||||
_VT_co = TypeVar("_VT_co", covariant=True) # Value type covariant containers.
|
|
||||||
|
|
||||||
@final
|
|
||||||
class dict_keys(KeysView[_KT_co], Generic[_KT_co, _VT_co]): # undocumented
|
|
||||||
def __eq__(self, value: object, /) -> bool: ...
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
@property
|
|
||||||
def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ...
|
|
||||||
|
|
||||||
@final
|
|
||||||
class dict_values(ValuesView[_VT_co], Generic[_KT_co, _VT_co]): # undocumented
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
@property
|
|
||||||
def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ...
|
|
||||||
|
|
||||||
@final
|
|
||||||
class dict_items(ItemsView[_KT_co, _VT_co]): # undocumented
|
|
||||||
def __eq__(self, value: object, /) -> bool: ...
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
@property
|
|
||||||
def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ...
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 12):
|
|
||||||
@runtime_checkable
|
|
||||||
class Buffer(Protocol):
|
|
||||||
@abstractmethod
|
|
||||||
def __buffer__(self, flags: int, /) -> memoryview: ...
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
IMPORT_MAPPING: dict[str, str]
|
|
||||||
NAME_MAPPING: dict[tuple[str, str], tuple[str, str]]
|
|
||||||
PYTHON2_EXCEPTIONS: tuple[str, ...]
|
|
||||||
MULTIPROCESSING_EXCEPTIONS: tuple[str, ...]
|
|
||||||
REVERSE_IMPORT_MAPPING: dict[str, str]
|
|
||||||
REVERSE_NAME_MAPPING: dict[tuple[str, str], tuple[str, str]]
|
|
||||||
PYTHON3_OSERROR_EXCEPTIONS: tuple[str, ...]
|
|
||||||
PYTHON3_IMPORTERROR_EXCEPTIONS: tuple[str, ...]
|
|
||||||
@@ -1,25 +0,0 @@
|
|||||||
from _typeshed import WriteableBuffer
|
|
||||||
from collections.abc import Callable
|
|
||||||
from io import DEFAULT_BUFFER_SIZE, BufferedIOBase, RawIOBase
|
|
||||||
from typing import Any, Protocol
|
|
||||||
|
|
||||||
BUFFER_SIZE = DEFAULT_BUFFER_SIZE
|
|
||||||
|
|
||||||
class _Reader(Protocol):
|
|
||||||
def read(self, n: int, /) -> bytes: ...
|
|
||||||
def seekable(self) -> bool: ...
|
|
||||||
def seek(self, n: int, /) -> Any: ...
|
|
||||||
|
|
||||||
class BaseStream(BufferedIOBase): ...
|
|
||||||
|
|
||||||
class DecompressReader(RawIOBase):
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
fp: _Reader,
|
|
||||||
decomp_factory: Callable[..., object],
|
|
||||||
trailing_error: type[Exception] | tuple[type[Exception], ...] = (),
|
|
||||||
**decomp_args: Any,
|
|
||||||
) -> None: ...
|
|
||||||
def readinto(self, b: WriteableBuffer) -> int: ...
|
|
||||||
def read(self, size: int = -1) -> bytes: ...
|
|
||||||
def seek(self, offset: int, whence: int = 0) -> int: ...
|
|
||||||
90
crates/red_knot/vendor/typeshed/stdlib/_csv.pyi
vendored
90
crates/red_knot/vendor/typeshed/stdlib/_csv.pyi
vendored
@@ -1,90 +0,0 @@
|
|||||||
import sys
|
|
||||||
from _typeshed import SupportsWrite
|
|
||||||
from collections.abc import Iterable, Iterator
|
|
||||||
from typing import Any, Final, Literal
|
|
||||||
from typing_extensions import TypeAlias
|
|
||||||
|
|
||||||
__version__: Final[str]
|
|
||||||
|
|
||||||
QUOTE_ALL: Literal[1]
|
|
||||||
QUOTE_MINIMAL: Literal[0]
|
|
||||||
QUOTE_NONE: Literal[3]
|
|
||||||
QUOTE_NONNUMERIC: Literal[2]
|
|
||||||
if sys.version_info >= (3, 12):
|
|
||||||
QUOTE_STRINGS: Literal[4]
|
|
||||||
QUOTE_NOTNULL: Literal[5]
|
|
||||||
|
|
||||||
# Ideally this would be `QUOTE_ALL | QUOTE_MINIMAL | QUOTE_NONE | QUOTE_NONNUMERIC`
|
|
||||||
# However, using literals in situations like these can cause false-positives (see #7258)
|
|
||||||
_QuotingType: TypeAlias = int
|
|
||||||
|
|
||||||
class Error(Exception): ...
|
|
||||||
|
|
||||||
class Dialect:
|
|
||||||
delimiter: str
|
|
||||||
quotechar: str | None
|
|
||||||
escapechar: str | None
|
|
||||||
doublequote: bool
|
|
||||||
skipinitialspace: bool
|
|
||||||
lineterminator: str
|
|
||||||
quoting: _QuotingType
|
|
||||||
strict: bool
|
|
||||||
def __init__(self) -> None: ...
|
|
||||||
|
|
||||||
_DialectLike: TypeAlias = str | Dialect | type[Dialect]
|
|
||||||
|
|
||||||
class _reader(Iterator[list[str]]):
|
|
||||||
@property
|
|
||||||
def dialect(self) -> Dialect: ...
|
|
||||||
line_num: int
|
|
||||||
def __next__(self) -> list[str]: ...
|
|
||||||
|
|
||||||
class _writer:
|
|
||||||
@property
|
|
||||||
def dialect(self) -> Dialect: ...
|
|
||||||
def writerow(self, row: Iterable[Any]) -> Any: ...
|
|
||||||
def writerows(self, rows: Iterable[Iterable[Any]]) -> None: ...
|
|
||||||
|
|
||||||
def writer(
|
|
||||||
csvfile: SupportsWrite[str],
|
|
||||||
dialect: _DialectLike = "excel",
|
|
||||||
*,
|
|
||||||
delimiter: str = ",",
|
|
||||||
quotechar: str | None = '"',
|
|
||||||
escapechar: str | None = None,
|
|
||||||
doublequote: bool = True,
|
|
||||||
skipinitialspace: bool = False,
|
|
||||||
lineterminator: str = "\r\n",
|
|
||||||
quoting: _QuotingType = 0,
|
|
||||||
strict: bool = False,
|
|
||||||
) -> _writer: ...
|
|
||||||
def reader(
|
|
||||||
csvfile: Iterable[str],
|
|
||||||
dialect: _DialectLike = "excel",
|
|
||||||
*,
|
|
||||||
delimiter: str = ",",
|
|
||||||
quotechar: str | None = '"',
|
|
||||||
escapechar: str | None = None,
|
|
||||||
doublequote: bool = True,
|
|
||||||
skipinitialspace: bool = False,
|
|
||||||
lineterminator: str = "\r\n",
|
|
||||||
quoting: _QuotingType = 0,
|
|
||||||
strict: bool = False,
|
|
||||||
) -> _reader: ...
|
|
||||||
def register_dialect(
|
|
||||||
name: str,
|
|
||||||
dialect: type[Dialect] = ...,
|
|
||||||
*,
|
|
||||||
delimiter: str = ",",
|
|
||||||
quotechar: str | None = '"',
|
|
||||||
escapechar: str | None = None,
|
|
||||||
doublequote: bool = True,
|
|
||||||
skipinitialspace: bool = False,
|
|
||||||
lineterminator: str = "\r\n",
|
|
||||||
quoting: _QuotingType = 0,
|
|
||||||
strict: bool = False,
|
|
||||||
) -> None: ...
|
|
||||||
def unregister_dialect(name: str) -> None: ...
|
|
||||||
def get_dialect(name: str) -> Dialect: ...
|
|
||||||
def list_dialects() -> list[str]: ...
|
|
||||||
def field_size_limit(new_limit: int = ...) -> int: ...
|
|
||||||
211
crates/red_knot/vendor/typeshed/stdlib/_ctypes.pyi
vendored
211
crates/red_knot/vendor/typeshed/stdlib/_ctypes.pyi
vendored
@@ -1,211 +0,0 @@
|
|||||||
import sys
|
|
||||||
from _typeshed import ReadableBuffer, WriteableBuffer
|
|
||||||
from abc import abstractmethod
|
|
||||||
from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence
|
|
||||||
from ctypes import CDLL, ArgumentError as ArgumentError
|
|
||||||
from typing import Any, ClassVar, Generic, TypeVar, overload
|
|
||||||
from typing_extensions import Self, TypeAlias
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 9):
|
|
||||||
from types import GenericAlias
|
|
||||||
|
|
||||||
_T = TypeVar("_T")
|
|
||||||
_CT = TypeVar("_CT", bound=_CData)
|
|
||||||
|
|
||||||
FUNCFLAG_CDECL: int
|
|
||||||
FUNCFLAG_PYTHONAPI: int
|
|
||||||
FUNCFLAG_USE_ERRNO: int
|
|
||||||
FUNCFLAG_USE_LASTERROR: int
|
|
||||||
RTLD_GLOBAL: int
|
|
||||||
RTLD_LOCAL: int
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 11):
|
|
||||||
CTYPES_MAX_ARGCOUNT: int
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 12):
|
|
||||||
SIZEOF_TIME_T: int
|
|
||||||
|
|
||||||
if sys.platform == "win32":
|
|
||||||
# Description, Source, HelpFile, HelpContext, scode
|
|
||||||
_COMError_Details: TypeAlias = tuple[str | None, str | None, str | None, int | None, int | None]
|
|
||||||
|
|
||||||
class COMError(Exception):
|
|
||||||
hresult: int
|
|
||||||
text: str | None
|
|
||||||
details: _COMError_Details
|
|
||||||
|
|
||||||
def __init__(self, hresult: int, text: str | None, details: _COMError_Details) -> None: ...
|
|
||||||
|
|
||||||
def CopyComPointer(src: _PointerLike, dst: _PointerLike | _CArgObject) -> int: ...
|
|
||||||
|
|
||||||
FUNCFLAG_HRESULT: int
|
|
||||||
FUNCFLAG_STDCALL: int
|
|
||||||
|
|
||||||
def FormatError(code: int = ...) -> str: ...
|
|
||||||
def get_last_error() -> int: ...
|
|
||||||
def set_last_error(value: int) -> int: ...
|
|
||||||
def LoadLibrary(name: str, load_flags: int = 0, /) -> int: ...
|
|
||||||
def FreeLibrary(handle: int, /) -> None: ...
|
|
||||||
|
|
||||||
class _CDataMeta(type):
|
|
||||||
# By default mypy complains about the following two methods, because strictly speaking cls
|
|
||||||
# might not be a Type[_CT]. However this can never actually happen, because the only class that
|
|
||||||
# uses _CDataMeta as its metaclass is _CData. So it's safe to ignore the errors here.
|
|
||||||
def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc]
|
|
||||||
def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc]
|
|
||||||
|
|
||||||
class _CData(metaclass=_CDataMeta):
|
|
||||||
_b_base_: int
|
|
||||||
_b_needsfree_: bool
|
|
||||||
_objects: Mapping[Any, int] | None
|
|
||||||
# At runtime the following classmethods are available only on classes, not
|
|
||||||
# on instances. This can't be reflected properly in the type system:
|
|
||||||
#
|
|
||||||
# Structure.from_buffer(...) # valid at runtime
|
|
||||||
# Structure(...).from_buffer(...) # invalid at runtime
|
|
||||||
#
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_buffer(cls, source: WriteableBuffer, offset: int = ...) -> Self: ...
|
|
||||||
@classmethod
|
|
||||||
def from_buffer_copy(cls, source: ReadableBuffer, offset: int = ...) -> Self: ...
|
|
||||||
@classmethod
|
|
||||||
def from_address(cls, address: int) -> Self: ...
|
|
||||||
@classmethod
|
|
||||||
def from_param(cls, obj: Any) -> Self | _CArgObject: ...
|
|
||||||
@classmethod
|
|
||||||
def in_dll(cls, library: CDLL, name: str) -> Self: ...
|
|
||||||
def __buffer__(self, flags: int, /) -> memoryview: ...
|
|
||||||
def __release_buffer__(self, buffer: memoryview, /) -> None: ...
|
|
||||||
|
|
||||||
class _SimpleCData(_CData, Generic[_T]):
|
|
||||||
value: _T
|
|
||||||
# The TypeVar can be unsolved here,
|
|
||||||
# but we can't use overloads without creating many, many mypy false-positive errors
|
|
||||||
def __init__(self, value: _T = ...) -> None: ... # pyright: ignore[reportInvalidTypeVarUse]
|
|
||||||
|
|
||||||
class _CanCastTo(_CData): ...
|
|
||||||
class _PointerLike(_CanCastTo): ...
|
|
||||||
|
|
||||||
class _Pointer(_PointerLike, _CData, Generic[_CT]):
|
|
||||||
_type_: type[_CT]
|
|
||||||
contents: _CT
|
|
||||||
@overload
|
|
||||||
def __init__(self) -> None: ...
|
|
||||||
@overload
|
|
||||||
def __init__(self, arg: _CT) -> None: ...
|
|
||||||
@overload
|
|
||||||
def __getitem__(self, key: int, /) -> Any: ...
|
|
||||||
@overload
|
|
||||||
def __getitem__(self, key: slice, /) -> list[Any]: ...
|
|
||||||
def __setitem__(self, key: int, value: Any, /) -> None: ...
|
|
||||||
|
|
||||||
def POINTER(type: type[_CT]) -> type[_Pointer[_CT]]: ...
|
|
||||||
def pointer(arg: _CT, /) -> _Pointer[_CT]: ...
|
|
||||||
|
|
||||||
class _CArgObject: ...
|
|
||||||
|
|
||||||
def byref(obj: _CData, offset: int = ...) -> _CArgObject: ...
|
|
||||||
|
|
||||||
_ECT: TypeAlias = Callable[[_CData | None, CFuncPtr, tuple[_CData, ...]], _CData]
|
|
||||||
_PF: TypeAlias = tuple[int] | tuple[int, str | None] | tuple[int, str | None, Any]
|
|
||||||
|
|
||||||
class CFuncPtr(_PointerLike, _CData):
|
|
||||||
restype: type[_CData] | Callable[[int], Any] | None
|
|
||||||
argtypes: Sequence[type[_CData]]
|
|
||||||
errcheck: _ECT
|
|
||||||
# Abstract attribute that must be defined on subclasses
|
|
||||||
_flags_: ClassVar[int]
|
|
||||||
@overload
|
|
||||||
def __init__(self) -> None: ...
|
|
||||||
@overload
|
|
||||||
def __init__(self, address: int, /) -> None: ...
|
|
||||||
@overload
|
|
||||||
def __init__(self, callable: Callable[..., Any], /) -> None: ...
|
|
||||||
@overload
|
|
||||||
def __init__(self, func_spec: tuple[str | int, CDLL], paramflags: tuple[_PF, ...] | None = ..., /) -> None: ...
|
|
||||||
if sys.platform == "win32":
|
|
||||||
@overload
|
|
||||||
def __init__(
|
|
||||||
self, vtbl_index: int, name: str, paramflags: tuple[_PF, ...] | None = ..., iid: _CData | None = ..., /
|
|
||||||
) -> None: ...
|
|
||||||
|
|
||||||
def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
|
|
||||||
|
|
||||||
_GetT = TypeVar("_GetT")
|
|
||||||
_SetT = TypeVar("_SetT")
|
|
||||||
|
|
||||||
class _CField(Generic[_CT, _GetT, _SetT]):
|
|
||||||
offset: int
|
|
||||||
size: int
|
|
||||||
@overload
|
|
||||||
def __get__(self, instance: None, owner: type[Any] | None, /) -> Self: ...
|
|
||||||
@overload
|
|
||||||
def __get__(self, instance: Any, owner: type[Any] | None, /) -> _GetT: ...
|
|
||||||
def __set__(self, instance: Any, value: _SetT, /) -> None: ...
|
|
||||||
|
|
||||||
class _StructUnionMeta(_CDataMeta):
|
|
||||||
_fields_: Sequence[tuple[str, type[_CData]] | tuple[str, type[_CData], int]]
|
|
||||||
_pack_: int
|
|
||||||
_anonymous_: Sequence[str]
|
|
||||||
def __getattr__(self, name: str) -> _CField[Any, Any, Any]: ...
|
|
||||||
|
|
||||||
class _StructUnionBase(_CData, metaclass=_StructUnionMeta):
|
|
||||||
def __init__(self, *args: Any, **kw: Any) -> None: ...
|
|
||||||
def __getattr__(self, name: str) -> Any: ...
|
|
||||||
def __setattr__(self, name: str, value: Any) -> None: ...
|
|
||||||
|
|
||||||
class Union(_StructUnionBase): ...
|
|
||||||
class Structure(_StructUnionBase): ...
|
|
||||||
|
|
||||||
class Array(_CData, Generic[_CT]):
|
|
||||||
@property
|
|
||||||
@abstractmethod
|
|
||||||
def _length_(self) -> int: ...
|
|
||||||
@_length_.setter
|
|
||||||
def _length_(self, value: int) -> None: ...
|
|
||||||
@property
|
|
||||||
@abstractmethod
|
|
||||||
def _type_(self) -> type[_CT]: ...
|
|
||||||
@_type_.setter
|
|
||||||
def _type_(self, value: type[_CT]) -> None: ...
|
|
||||||
# Note: only available if _CT == c_char
|
|
||||||
@property
|
|
||||||
def raw(self) -> bytes: ...
|
|
||||||
@raw.setter
|
|
||||||
def raw(self, value: ReadableBuffer) -> None: ...
|
|
||||||
value: Any # Note: bytes if _CT == c_char, str if _CT == c_wchar, unavailable otherwise
|
|
||||||
# TODO These methods cannot be annotated correctly at the moment.
|
|
||||||
# All of these "Any"s stand for the array's element type, but it's not possible to use _CT
|
|
||||||
# here, because of a special feature of ctypes.
|
|
||||||
# By default, when accessing an element of an Array[_CT], the returned object has type _CT.
|
|
||||||
# However, when _CT is a "simple type" like c_int, ctypes automatically "unboxes" the object
|
|
||||||
# and converts it to the corresponding Python primitive. For example, when accessing an element
|
|
||||||
# of an Array[c_int], a Python int object is returned, not a c_int.
|
|
||||||
# This behavior does *not* apply to subclasses of "simple types".
|
|
||||||
# If MyInt is a subclass of c_int, then accessing an element of an Array[MyInt] returns
|
|
||||||
# a MyInt, not an int.
|
|
||||||
# This special behavior is not easy to model in a stub, so for now all places where
|
|
||||||
# the array element type would belong are annotated with Any instead.
|
|
||||||
def __init__(self, *args: Any) -> None: ...
|
|
||||||
@overload
|
|
||||||
def __getitem__(self, key: int, /) -> Any: ...
|
|
||||||
@overload
|
|
||||||
def __getitem__(self, key: slice, /) -> list[Any]: ...
|
|
||||||
@overload
|
|
||||||
def __setitem__(self, key: int, value: Any, /) -> None: ...
|
|
||||||
@overload
|
|
||||||
def __setitem__(self, key: slice, value: Iterable[Any], /) -> None: ...
|
|
||||||
def __iter__(self) -> Iterator[Any]: ...
|
|
||||||
# Can't inherit from Sized because the metaclass conflict between
|
|
||||||
# Sized and _CData prevents using _CDataMeta.
|
|
||||||
def __len__(self) -> int: ...
|
|
||||||
if sys.version_info >= (3, 9):
|
|
||||||
def __class_getitem__(cls, item: Any) -> GenericAlias: ...
|
|
||||||
|
|
||||||
def addressof(obj: _CData) -> int: ...
|
|
||||||
def alignment(obj_or_type: _CData | type[_CData]) -> int: ...
|
|
||||||
def get_errno() -> int: ...
|
|
||||||
def resize(obj: _CData, size: int) -> None: ...
|
|
||||||
def set_errno(value: int) -> int: ...
|
|
||||||
def sizeof(obj_or_type: _CData | type[_CData]) -> int: ...
|
|
||||||
566
crates/red_knot/vendor/typeshed/stdlib/_curses.pyi
vendored
566
crates/red_knot/vendor/typeshed/stdlib/_curses.pyi
vendored
@@ -1,566 +0,0 @@
|
|||||||
import sys
|
|
||||||
from _typeshed import ReadOnlyBuffer, SupportsRead
|
|
||||||
from typing import IO, Any, NamedTuple, final, overload
|
|
||||||
from typing_extensions import TypeAlias
|
|
||||||
|
|
||||||
# NOTE: This module is ordinarily only available on Unix, but the windows-curses
|
|
||||||
# package makes it available on Windows as well with the same contents.
|
|
||||||
|
|
||||||
# Handled by PyCurses_ConvertToChtype in _cursesmodule.c.
|
|
||||||
_ChType: TypeAlias = str | bytes | int
|
|
||||||
|
|
||||||
# ACS codes are only initialized after initscr is called
|
|
||||||
ACS_BBSS: int
|
|
||||||
ACS_BLOCK: int
|
|
||||||
ACS_BOARD: int
|
|
||||||
ACS_BSBS: int
|
|
||||||
ACS_BSSB: int
|
|
||||||
ACS_BSSS: int
|
|
||||||
ACS_BTEE: int
|
|
||||||
ACS_BULLET: int
|
|
||||||
ACS_CKBOARD: int
|
|
||||||
ACS_DARROW: int
|
|
||||||
ACS_DEGREE: int
|
|
||||||
ACS_DIAMOND: int
|
|
||||||
ACS_GEQUAL: int
|
|
||||||
ACS_HLINE: int
|
|
||||||
ACS_LANTERN: int
|
|
||||||
ACS_LARROW: int
|
|
||||||
ACS_LEQUAL: int
|
|
||||||
ACS_LLCORNER: int
|
|
||||||
ACS_LRCORNER: int
|
|
||||||
ACS_LTEE: int
|
|
||||||
ACS_NEQUAL: int
|
|
||||||
ACS_PI: int
|
|
||||||
ACS_PLMINUS: int
|
|
||||||
ACS_PLUS: int
|
|
||||||
ACS_RARROW: int
|
|
||||||
ACS_RTEE: int
|
|
||||||
ACS_S1: int
|
|
||||||
ACS_S3: int
|
|
||||||
ACS_S7: int
|
|
||||||
ACS_S9: int
|
|
||||||
ACS_SBBS: int
|
|
||||||
ACS_SBSB: int
|
|
||||||
ACS_SBSS: int
|
|
||||||
ACS_SSBB: int
|
|
||||||
ACS_SSBS: int
|
|
||||||
ACS_SSSB: int
|
|
||||||
ACS_SSSS: int
|
|
||||||
ACS_STERLING: int
|
|
||||||
ACS_TTEE: int
|
|
||||||
ACS_UARROW: int
|
|
||||||
ACS_ULCORNER: int
|
|
||||||
ACS_URCORNER: int
|
|
||||||
ACS_VLINE: int
|
|
||||||
ALL_MOUSE_EVENTS: int
|
|
||||||
A_ALTCHARSET: int
|
|
||||||
A_ATTRIBUTES: int
|
|
||||||
A_BLINK: int
|
|
||||||
A_BOLD: int
|
|
||||||
A_CHARTEXT: int
|
|
||||||
A_COLOR: int
|
|
||||||
A_DIM: int
|
|
||||||
A_HORIZONTAL: int
|
|
||||||
A_INVIS: int
|
|
||||||
if sys.platform != "darwin":
|
|
||||||
A_ITALIC: int
|
|
||||||
A_LEFT: int
|
|
||||||
A_LOW: int
|
|
||||||
A_NORMAL: int
|
|
||||||
A_PROTECT: int
|
|
||||||
A_REVERSE: int
|
|
||||||
A_RIGHT: int
|
|
||||||
A_STANDOUT: int
|
|
||||||
A_TOP: int
|
|
||||||
A_UNDERLINE: int
|
|
||||||
A_VERTICAL: int
|
|
||||||
BUTTON1_CLICKED: int
|
|
||||||
BUTTON1_DOUBLE_CLICKED: int
|
|
||||||
BUTTON1_PRESSED: int
|
|
||||||
BUTTON1_RELEASED: int
|
|
||||||
BUTTON1_TRIPLE_CLICKED: int
|
|
||||||
BUTTON2_CLICKED: int
|
|
||||||
BUTTON2_DOUBLE_CLICKED: int
|
|
||||||
BUTTON2_PRESSED: int
|
|
||||||
BUTTON2_RELEASED: int
|
|
||||||
BUTTON2_TRIPLE_CLICKED: int
|
|
||||||
BUTTON3_CLICKED: int
|
|
||||||
BUTTON3_DOUBLE_CLICKED: int
|
|
||||||
BUTTON3_PRESSED: int
|
|
||||||
BUTTON3_RELEASED: int
|
|
||||||
BUTTON3_TRIPLE_CLICKED: int
|
|
||||||
BUTTON4_CLICKED: int
|
|
||||||
BUTTON4_DOUBLE_CLICKED: int
|
|
||||||
BUTTON4_PRESSED: int
|
|
||||||
BUTTON4_RELEASED: int
|
|
||||||
BUTTON4_TRIPLE_CLICKED: int
|
|
||||||
# Darwin ncurses doesn't provide BUTTON5_* constants
|
|
||||||
if sys.version_info >= (3, 10) and sys.platform != "darwin":
|
|
||||||
BUTTON5_PRESSED: int
|
|
||||||
BUTTON5_RELEASED: int
|
|
||||||
BUTTON5_CLICKED: int
|
|
||||||
BUTTON5_DOUBLE_CLICKED: int
|
|
||||||
BUTTON5_TRIPLE_CLICKED: int
|
|
||||||
BUTTON_ALT: int
|
|
||||||
BUTTON_CTRL: int
|
|
||||||
BUTTON_SHIFT: int
|
|
||||||
COLOR_BLACK: int
|
|
||||||
COLOR_BLUE: int
|
|
||||||
COLOR_CYAN: int
|
|
||||||
COLOR_GREEN: int
|
|
||||||
COLOR_MAGENTA: int
|
|
||||||
COLOR_RED: int
|
|
||||||
COLOR_WHITE: int
|
|
||||||
COLOR_YELLOW: int
|
|
||||||
ERR: int
|
|
||||||
KEY_A1: int
|
|
||||||
KEY_A3: int
|
|
||||||
KEY_B2: int
|
|
||||||
KEY_BACKSPACE: int
|
|
||||||
KEY_BEG: int
|
|
||||||
KEY_BREAK: int
|
|
||||||
KEY_BTAB: int
|
|
||||||
KEY_C1: int
|
|
||||||
KEY_C3: int
|
|
||||||
KEY_CANCEL: int
|
|
||||||
KEY_CATAB: int
|
|
||||||
KEY_CLEAR: int
|
|
||||||
KEY_CLOSE: int
|
|
||||||
KEY_COMMAND: int
|
|
||||||
KEY_COPY: int
|
|
||||||
KEY_CREATE: int
|
|
||||||
KEY_CTAB: int
|
|
||||||
KEY_DC: int
|
|
||||||
KEY_DL: int
|
|
||||||
KEY_DOWN: int
|
|
||||||
KEY_EIC: int
|
|
||||||
KEY_END: int
|
|
||||||
KEY_ENTER: int
|
|
||||||
KEY_EOL: int
|
|
||||||
KEY_EOS: int
|
|
||||||
KEY_EXIT: int
|
|
||||||
KEY_F0: int
|
|
||||||
KEY_F1: int
|
|
||||||
KEY_F10: int
|
|
||||||
KEY_F11: int
|
|
||||||
KEY_F12: int
|
|
||||||
KEY_F13: int
|
|
||||||
KEY_F14: int
|
|
||||||
KEY_F15: int
|
|
||||||
KEY_F16: int
|
|
||||||
KEY_F17: int
|
|
||||||
KEY_F18: int
|
|
||||||
KEY_F19: int
|
|
||||||
KEY_F2: int
|
|
||||||
KEY_F20: int
|
|
||||||
KEY_F21: int
|
|
||||||
KEY_F22: int
|
|
||||||
KEY_F23: int
|
|
||||||
KEY_F24: int
|
|
||||||
KEY_F25: int
|
|
||||||
KEY_F26: int
|
|
||||||
KEY_F27: int
|
|
||||||
KEY_F28: int
|
|
||||||
KEY_F29: int
|
|
||||||
KEY_F3: int
|
|
||||||
KEY_F30: int
|
|
||||||
KEY_F31: int
|
|
||||||
KEY_F32: int
|
|
||||||
KEY_F33: int
|
|
||||||
KEY_F34: int
|
|
||||||
KEY_F35: int
|
|
||||||
KEY_F36: int
|
|
||||||
KEY_F37: int
|
|
||||||
KEY_F38: int
|
|
||||||
KEY_F39: int
|
|
||||||
KEY_F4: int
|
|
||||||
KEY_F40: int
|
|
||||||
KEY_F41: int
|
|
||||||
KEY_F42: int
|
|
||||||
KEY_F43: int
|
|
||||||
KEY_F44: int
|
|
||||||
KEY_F45: int
|
|
||||||
KEY_F46: int
|
|
||||||
KEY_F47: int
|
|
||||||
KEY_F48: int
|
|
||||||
KEY_F49: int
|
|
||||||
KEY_F5: int
|
|
||||||
KEY_F50: int
|
|
||||||
KEY_F51: int
|
|
||||||
KEY_F52: int
|
|
||||||
KEY_F53: int
|
|
||||||
KEY_F54: int
|
|
||||||
KEY_F55: int
|
|
||||||
KEY_F56: int
|
|
||||||
KEY_F57: int
|
|
||||||
KEY_F58: int
|
|
||||||
KEY_F59: int
|
|
||||||
KEY_F6: int
|
|
||||||
KEY_F60: int
|
|
||||||
KEY_F61: int
|
|
||||||
KEY_F62: int
|
|
||||||
KEY_F63: int
|
|
||||||
KEY_F7: int
|
|
||||||
KEY_F8: int
|
|
||||||
KEY_F9: int
|
|
||||||
KEY_FIND: int
|
|
||||||
KEY_HELP: int
|
|
||||||
KEY_HOME: int
|
|
||||||
KEY_IC: int
|
|
||||||
KEY_IL: int
|
|
||||||
KEY_LEFT: int
|
|
||||||
KEY_LL: int
|
|
||||||
KEY_MARK: int
|
|
||||||
KEY_MAX: int
|
|
||||||
KEY_MESSAGE: int
|
|
||||||
KEY_MIN: int
|
|
||||||
KEY_MOUSE: int
|
|
||||||
KEY_MOVE: int
|
|
||||||
KEY_NEXT: int
|
|
||||||
KEY_NPAGE: int
|
|
||||||
KEY_OPEN: int
|
|
||||||
KEY_OPTIONS: int
|
|
||||||
KEY_PPAGE: int
|
|
||||||
KEY_PREVIOUS: int
|
|
||||||
KEY_PRINT: int
|
|
||||||
KEY_REDO: int
|
|
||||||
KEY_REFERENCE: int
|
|
||||||
KEY_REFRESH: int
|
|
||||||
KEY_REPLACE: int
|
|
||||||
KEY_RESET: int
|
|
||||||
KEY_RESIZE: int
|
|
||||||
KEY_RESTART: int
|
|
||||||
KEY_RESUME: int
|
|
||||||
KEY_RIGHT: int
|
|
||||||
KEY_SAVE: int
|
|
||||||
KEY_SBEG: int
|
|
||||||
KEY_SCANCEL: int
|
|
||||||
KEY_SCOMMAND: int
|
|
||||||
KEY_SCOPY: int
|
|
||||||
KEY_SCREATE: int
|
|
||||||
KEY_SDC: int
|
|
||||||
KEY_SDL: int
|
|
||||||
KEY_SELECT: int
|
|
||||||
KEY_SEND: int
|
|
||||||
KEY_SEOL: int
|
|
||||||
KEY_SEXIT: int
|
|
||||||
KEY_SF: int
|
|
||||||
KEY_SFIND: int
|
|
||||||
KEY_SHELP: int
|
|
||||||
KEY_SHOME: int
|
|
||||||
KEY_SIC: int
|
|
||||||
KEY_SLEFT: int
|
|
||||||
KEY_SMESSAGE: int
|
|
||||||
KEY_SMOVE: int
|
|
||||||
KEY_SNEXT: int
|
|
||||||
KEY_SOPTIONS: int
|
|
||||||
KEY_SPREVIOUS: int
|
|
||||||
KEY_SPRINT: int
|
|
||||||
KEY_SR: int
|
|
||||||
KEY_SREDO: int
|
|
||||||
KEY_SREPLACE: int
|
|
||||||
KEY_SRESET: int
|
|
||||||
KEY_SRIGHT: int
|
|
||||||
KEY_SRSUME: int
|
|
||||||
KEY_SSAVE: int
|
|
||||||
KEY_SSUSPEND: int
|
|
||||||
KEY_STAB: int
|
|
||||||
KEY_SUNDO: int
|
|
||||||
KEY_SUSPEND: int
|
|
||||||
KEY_UNDO: int
|
|
||||||
KEY_UP: int
|
|
||||||
OK: int
|
|
||||||
REPORT_MOUSE_POSITION: int
|
|
||||||
_C_API: Any
|
|
||||||
version: bytes
|
|
||||||
|
|
||||||
def baudrate() -> int: ...
|
|
||||||
def beep() -> None: ...
|
|
||||||
def can_change_color() -> bool: ...
|
|
||||||
def cbreak(flag: bool = True, /) -> None: ...
|
|
||||||
def color_content(color_number: int, /) -> tuple[int, int, int]: ...
|
|
||||||
def color_pair(pair_number: int, /) -> int: ...
|
|
||||||
def curs_set(visibility: int, /) -> int: ...
|
|
||||||
def def_prog_mode() -> None: ...
|
|
||||||
def def_shell_mode() -> None: ...
|
|
||||||
def delay_output(ms: int, /) -> None: ...
|
|
||||||
def doupdate() -> None: ...
|
|
||||||
def echo(flag: bool = True, /) -> None: ...
|
|
||||||
def endwin() -> None: ...
|
|
||||||
def erasechar() -> bytes: ...
|
|
||||||
def filter() -> None: ...
|
|
||||||
def flash() -> None: ...
|
|
||||||
def flushinp() -> None: ...
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 9):
|
|
||||||
def get_escdelay() -> int: ...
|
|
||||||
def get_tabsize() -> int: ...
|
|
||||||
|
|
||||||
def getmouse() -> tuple[int, int, int, int, int]: ...
|
|
||||||
def getsyx() -> tuple[int, int]: ...
|
|
||||||
def getwin(file: SupportsRead[bytes], /) -> _CursesWindow: ...
|
|
||||||
def halfdelay(tenths: int, /) -> None: ...
|
|
||||||
def has_colors() -> bool: ...
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
def has_extended_color_support() -> bool: ...
|
|
||||||
|
|
||||||
def has_ic() -> bool: ...
|
|
||||||
def has_il() -> bool: ...
|
|
||||||
def has_key(key: int, /) -> bool: ...
|
|
||||||
def init_color(color_number: int, r: int, g: int, b: int, /) -> None: ...
|
|
||||||
def init_pair(pair_number: int, fg: int, bg: int, /) -> None: ...
|
|
||||||
def initscr() -> _CursesWindow: ...
|
|
||||||
def intrflush(flag: bool, /) -> None: ...
|
|
||||||
def is_term_resized(nlines: int, ncols: int, /) -> bool: ...
|
|
||||||
def isendwin() -> bool: ...
|
|
||||||
def keyname(key: int, /) -> bytes: ...
|
|
||||||
def killchar() -> bytes: ...
|
|
||||||
def longname() -> bytes: ...
|
|
||||||
def meta(yes: bool, /) -> None: ...
|
|
||||||
def mouseinterval(interval: int, /) -> None: ...
|
|
||||||
def mousemask(newmask: int, /) -> tuple[int, int]: ...
|
|
||||||
def napms(ms: int, /) -> int: ...
|
|
||||||
def newpad(nlines: int, ncols: int, /) -> _CursesWindow: ...
|
|
||||||
def newwin(nlines: int, ncols: int, begin_y: int = ..., begin_x: int = ..., /) -> _CursesWindow: ...
|
|
||||||
def nl(flag: bool = True, /) -> None: ...
|
|
||||||
def nocbreak() -> None: ...
|
|
||||||
def noecho() -> None: ...
|
|
||||||
def nonl() -> None: ...
|
|
||||||
def noqiflush() -> None: ...
|
|
||||||
def noraw() -> None: ...
|
|
||||||
def pair_content(pair_number: int, /) -> tuple[int, int]: ...
|
|
||||||
def pair_number(attr: int, /) -> int: ...
|
|
||||||
def putp(string: ReadOnlyBuffer, /) -> None: ...
|
|
||||||
def qiflush(flag: bool = True, /) -> None: ...
|
|
||||||
def raw(flag: bool = True, /) -> None: ...
|
|
||||||
def reset_prog_mode() -> None: ...
|
|
||||||
def reset_shell_mode() -> None: ...
|
|
||||||
def resetty() -> None: ...
|
|
||||||
def resize_term(nlines: int, ncols: int, /) -> None: ...
|
|
||||||
def resizeterm(nlines: int, ncols: int, /) -> None: ...
|
|
||||||
def savetty() -> None: ...
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 9):
|
|
||||||
def set_escdelay(ms: int, /) -> None: ...
|
|
||||||
def set_tabsize(size: int, /) -> None: ...
|
|
||||||
|
|
||||||
def setsyx(y: int, x: int, /) -> None: ...
|
|
||||||
def setupterm(term: str | None = None, fd: int = -1) -> None: ...
|
|
||||||
def start_color() -> None: ...
|
|
||||||
def termattrs() -> int: ...
|
|
||||||
def termname() -> bytes: ...
|
|
||||||
def tigetflag(capname: str, /) -> int: ...
|
|
||||||
def tigetnum(capname: str, /) -> int: ...
|
|
||||||
def tigetstr(capname: str, /) -> bytes | None: ...
|
|
||||||
def tparm(
|
|
||||||
str: ReadOnlyBuffer,
|
|
||||||
i1: int = 0,
|
|
||||||
i2: int = 0,
|
|
||||||
i3: int = 0,
|
|
||||||
i4: int = 0,
|
|
||||||
i5: int = 0,
|
|
||||||
i6: int = 0,
|
|
||||||
i7: int = 0,
|
|
||||||
i8: int = 0,
|
|
||||||
i9: int = 0,
|
|
||||||
/,
|
|
||||||
) -> bytes: ...
|
|
||||||
def typeahead(fd: int, /) -> None: ...
|
|
||||||
def unctrl(ch: _ChType, /) -> bytes: ...
|
|
||||||
|
|
||||||
if sys.version_info < (3, 12) or sys.platform != "darwin":
|
|
||||||
# The support for macos was dropped in 3.12
|
|
||||||
def unget_wch(ch: int | str, /) -> None: ...
|
|
||||||
|
|
||||||
def ungetch(ch: _ChType, /) -> None: ...
|
|
||||||
def ungetmouse(id: int, x: int, y: int, z: int, bstate: int, /) -> None: ...
|
|
||||||
def update_lines_cols() -> None: ...
|
|
||||||
def use_default_colors() -> None: ...
|
|
||||||
def use_env(flag: bool, /) -> None: ...
|
|
||||||
|
|
||||||
class error(Exception): ...
|
|
||||||
|
|
||||||
@final
|
|
||||||
class _CursesWindow:
|
|
||||||
encoding: str
|
|
||||||
@overload
|
|
||||||
def addch(self, ch: _ChType, attr: int = ...) -> None: ...
|
|
||||||
@overload
|
|
||||||
def addch(self, y: int, x: int, ch: _ChType, attr: int = ...) -> None: ...
|
|
||||||
@overload
|
|
||||||
def addnstr(self, str: str, n: int, attr: int = ...) -> None: ...
|
|
||||||
@overload
|
|
||||||
def addnstr(self, y: int, x: int, str: str, n: int, attr: int = ...) -> None: ...
|
|
||||||
@overload
|
|
||||||
def addstr(self, str: str, attr: int = ...) -> None: ...
|
|
||||||
@overload
|
|
||||||
def addstr(self, y: int, x: int, str: str, attr: int = ...) -> None: ...
|
|
||||||
def attroff(self, attr: int, /) -> None: ...
|
|
||||||
def attron(self, attr: int, /) -> None: ...
|
|
||||||
def attrset(self, attr: int, /) -> None: ...
|
|
||||||
def bkgd(self, ch: _ChType, attr: int = ..., /) -> None: ...
|
|
||||||
def bkgdset(self, ch: _ChType, attr: int = ..., /) -> None: ...
|
|
||||||
def border(
|
|
||||||
self,
|
|
||||||
ls: _ChType = ...,
|
|
||||||
rs: _ChType = ...,
|
|
||||||
ts: _ChType = ...,
|
|
||||||
bs: _ChType = ...,
|
|
||||||
tl: _ChType = ...,
|
|
||||||
tr: _ChType = ...,
|
|
||||||
bl: _ChType = ...,
|
|
||||||
br: _ChType = ...,
|
|
||||||
) -> None: ...
|
|
||||||
@overload
|
|
||||||
def box(self) -> None: ...
|
|
||||||
@overload
|
|
||||||
def box(self, vertch: _ChType = ..., horch: _ChType = ...) -> None: ...
|
|
||||||
@overload
|
|
||||||
def chgat(self, attr: int) -> None: ...
|
|
||||||
@overload
|
|
||||||
def chgat(self, num: int, attr: int) -> None: ...
|
|
||||||
@overload
|
|
||||||
def chgat(self, y: int, x: int, attr: int) -> None: ...
|
|
||||||
@overload
|
|
||||||
def chgat(self, y: int, x: int, num: int, attr: int) -> None: ...
|
|
||||||
def clear(self) -> None: ...
|
|
||||||
def clearok(self, yes: int) -> None: ...
|
|
||||||
def clrtobot(self) -> None: ...
|
|
||||||
def clrtoeol(self) -> None: ...
|
|
||||||
def cursyncup(self) -> None: ...
|
|
||||||
@overload
|
|
||||||
def delch(self) -> None: ...
|
|
||||||
@overload
|
|
||||||
def delch(self, y: int, x: int) -> None: ...
|
|
||||||
def deleteln(self) -> None: ...
|
|
||||||
@overload
|
|
||||||
def derwin(self, begin_y: int, begin_x: int) -> _CursesWindow: ...
|
|
||||||
@overload
|
|
||||||
def derwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> _CursesWindow: ...
|
|
||||||
def echochar(self, ch: _ChType, attr: int = ..., /) -> None: ...
|
|
||||||
def enclose(self, y: int, x: int, /) -> bool: ...
|
|
||||||
def erase(self) -> None: ...
|
|
||||||
def getbegyx(self) -> tuple[int, int]: ...
|
|
||||||
def getbkgd(self) -> tuple[int, int]: ...
|
|
||||||
@overload
|
|
||||||
def getch(self) -> int: ...
|
|
||||||
@overload
|
|
||||||
def getch(self, y: int, x: int) -> int: ...
|
|
||||||
if sys.version_info < (3, 12) or sys.platform != "darwin":
|
|
||||||
# The support for macos was dropped in 3.12
|
|
||||||
@overload
|
|
||||||
def get_wch(self) -> int | str: ...
|
|
||||||
@overload
|
|
||||||
def get_wch(self, y: int, x: int) -> int | str: ...
|
|
||||||
|
|
||||||
@overload
|
|
||||||
def getkey(self) -> str: ...
|
|
||||||
@overload
|
|
||||||
def getkey(self, y: int, x: int) -> str: ...
|
|
||||||
def getmaxyx(self) -> tuple[int, int]: ...
|
|
||||||
def getparyx(self) -> tuple[int, int]: ...
|
|
||||||
@overload
|
|
||||||
def getstr(self) -> bytes: ...
|
|
||||||
@overload
|
|
||||||
def getstr(self, n: int) -> bytes: ...
|
|
||||||
@overload
|
|
||||||
def getstr(self, y: int, x: int) -> bytes: ...
|
|
||||||
@overload
|
|
||||||
def getstr(self, y: int, x: int, n: int) -> bytes: ...
|
|
||||||
def getyx(self) -> tuple[int, int]: ...
|
|
||||||
@overload
|
|
||||||
def hline(self, ch: _ChType, n: int) -> None: ...
|
|
||||||
@overload
|
|
||||||
def hline(self, y: int, x: int, ch: _ChType, n: int) -> None: ...
|
|
||||||
def idcok(self, flag: bool) -> None: ...
|
|
||||||
def idlok(self, yes: bool) -> None: ...
|
|
||||||
def immedok(self, flag: bool) -> None: ...
|
|
||||||
@overload
|
|
||||||
def inch(self) -> int: ...
|
|
||||||
@overload
|
|
||||||
def inch(self, y: int, x: int) -> int: ...
|
|
||||||
@overload
|
|
||||||
def insch(self, ch: _ChType, attr: int = ...) -> None: ...
|
|
||||||
@overload
|
|
||||||
def insch(self, y: int, x: int, ch: _ChType, attr: int = ...) -> None: ...
|
|
||||||
def insdelln(self, nlines: int) -> None: ...
|
|
||||||
def insertln(self) -> None: ...
|
|
||||||
@overload
|
|
||||||
def insnstr(self, str: str, n: int, attr: int = ...) -> None: ...
|
|
||||||
@overload
|
|
||||||
def insnstr(self, y: int, x: int, str: str, n: int, attr: int = ...) -> None: ...
|
|
||||||
@overload
|
|
||||||
def insstr(self, str: str, attr: int = ...) -> None: ...
|
|
||||||
@overload
|
|
||||||
def insstr(self, y: int, x: int, str: str, attr: int = ...) -> None: ...
|
|
||||||
@overload
|
|
||||||
def instr(self, n: int = ...) -> bytes: ...
|
|
||||||
@overload
|
|
||||||
def instr(self, y: int, x: int, n: int = ...) -> bytes: ...
|
|
||||||
def is_linetouched(self, line: int, /) -> bool: ...
|
|
||||||
def is_wintouched(self) -> bool: ...
|
|
||||||
def keypad(self, yes: bool) -> None: ...
|
|
||||||
def leaveok(self, yes: bool) -> None: ...
|
|
||||||
def move(self, new_y: int, new_x: int) -> None: ...
|
|
||||||
def mvderwin(self, y: int, x: int) -> None: ...
|
|
||||||
def mvwin(self, new_y: int, new_x: int) -> None: ...
|
|
||||||
def nodelay(self, yes: bool) -> None: ...
|
|
||||||
def notimeout(self, yes: bool) -> None: ...
|
|
||||||
@overload
|
|
||||||
def noutrefresh(self) -> None: ...
|
|
||||||
@overload
|
|
||||||
def noutrefresh(self, pminrow: int, pmincol: int, sminrow: int, smincol: int, smaxrow: int, smaxcol: int) -> None: ...
|
|
||||||
@overload
|
|
||||||
def overlay(self, destwin: _CursesWindow) -> None: ...
|
|
||||||
@overload
|
|
||||||
def overlay(
|
|
||||||
self, destwin: _CursesWindow, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int
|
|
||||||
) -> None: ...
|
|
||||||
@overload
|
|
||||||
def overwrite(self, destwin: _CursesWindow) -> None: ...
|
|
||||||
@overload
|
|
||||||
def overwrite(
|
|
||||||
self, destwin: _CursesWindow, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int
|
|
||||||
) -> None: ...
|
|
||||||
def putwin(self, file: IO[Any], /) -> None: ...
|
|
||||||
def redrawln(self, beg: int, num: int, /) -> None: ...
|
|
||||||
def redrawwin(self) -> None: ...
|
|
||||||
@overload
|
|
||||||
def refresh(self) -> None: ...
|
|
||||||
@overload
|
|
||||||
def refresh(self, pminrow: int, pmincol: int, sminrow: int, smincol: int, smaxrow: int, smaxcol: int) -> None: ...
|
|
||||||
def resize(self, nlines: int, ncols: int) -> None: ...
|
|
||||||
def scroll(self, lines: int = ...) -> None: ...
|
|
||||||
def scrollok(self, flag: bool) -> None: ...
|
|
||||||
def setscrreg(self, top: int, bottom: int, /) -> None: ...
|
|
||||||
def standend(self) -> None: ...
|
|
||||||
def standout(self) -> None: ...
|
|
||||||
@overload
|
|
||||||
def subpad(self, begin_y: int, begin_x: int) -> _CursesWindow: ...
|
|
||||||
@overload
|
|
||||||
def subpad(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> _CursesWindow: ...
|
|
||||||
@overload
|
|
||||||
def subwin(self, begin_y: int, begin_x: int) -> _CursesWindow: ...
|
|
||||||
@overload
|
|
||||||
def subwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> _CursesWindow: ...
|
|
||||||
def syncdown(self) -> None: ...
|
|
||||||
def syncok(self, flag: bool) -> None: ...
|
|
||||||
def syncup(self) -> None: ...
|
|
||||||
def timeout(self, delay: int) -> None: ...
|
|
||||||
def touchline(self, start: int, count: int, changed: bool = ...) -> None: ...
|
|
||||||
def touchwin(self) -> None: ...
|
|
||||||
def untouchwin(self) -> None: ...
|
|
||||||
@overload
|
|
||||||
def vline(self, ch: _ChType, n: int) -> None: ...
|
|
||||||
@overload
|
|
||||||
def vline(self, y: int, x: int, ch: _ChType, n: int) -> None: ...
|
|
||||||
|
|
||||||
class _ncurses_version(NamedTuple):
|
|
||||||
major: int
|
|
||||||
minor: int
|
|
||||||
patch: int
|
|
||||||
|
|
||||||
ncurses_version: _ncurses_version
|
|
||||||
window = _CursesWindow # undocumented
|
|
||||||
281
crates/red_knot/vendor/typeshed/stdlib/_decimal.pyi
vendored
281
crates/red_knot/vendor/typeshed/stdlib/_decimal.pyi
vendored
@@ -1,281 +0,0 @@
|
|||||||
import numbers
|
|
||||||
import sys
|
|
||||||
from collections.abc import Container, Sequence
|
|
||||||
from types import TracebackType
|
|
||||||
from typing import Any, ClassVar, Final, Literal, NamedTuple, overload
|
|
||||||
from typing_extensions import Self, TypeAlias
|
|
||||||
|
|
||||||
_Decimal: TypeAlias = Decimal | int
|
|
||||||
_DecimalNew: TypeAlias = Decimal | float | str | tuple[int, Sequence[int], int]
|
|
||||||
_ComparableNum: TypeAlias = Decimal | float | numbers.Rational
|
|
||||||
|
|
||||||
__version__: Final[str]
|
|
||||||
__libmpdec_version__: Final[str]
|
|
||||||
|
|
||||||
class DecimalTuple(NamedTuple):
|
|
||||||
sign: int
|
|
||||||
digits: tuple[int, ...]
|
|
||||||
exponent: int | Literal["n", "N", "F"]
|
|
||||||
|
|
||||||
ROUND_DOWN: str
|
|
||||||
ROUND_HALF_UP: str
|
|
||||||
ROUND_HALF_EVEN: str
|
|
||||||
ROUND_CEILING: str
|
|
||||||
ROUND_FLOOR: str
|
|
||||||
ROUND_UP: str
|
|
||||||
ROUND_HALF_DOWN: str
|
|
||||||
ROUND_05UP: str
|
|
||||||
HAVE_CONTEXTVAR: bool
|
|
||||||
HAVE_THREADS: bool
|
|
||||||
MAX_EMAX: int
|
|
||||||
MAX_PREC: int
|
|
||||||
MIN_EMIN: int
|
|
||||||
MIN_ETINY: int
|
|
||||||
|
|
||||||
class DecimalException(ArithmeticError): ...
|
|
||||||
class Clamped(DecimalException): ...
|
|
||||||
class InvalidOperation(DecimalException): ...
|
|
||||||
class ConversionSyntax(InvalidOperation): ...
|
|
||||||
class DivisionByZero(DecimalException, ZeroDivisionError): ...
|
|
||||||
class DivisionImpossible(InvalidOperation): ...
|
|
||||||
class DivisionUndefined(InvalidOperation, ZeroDivisionError): ...
|
|
||||||
class Inexact(DecimalException): ...
|
|
||||||
class InvalidContext(InvalidOperation): ...
|
|
||||||
class Rounded(DecimalException): ...
|
|
||||||
class Subnormal(DecimalException): ...
|
|
||||||
class Overflow(Inexact, Rounded): ...
|
|
||||||
class Underflow(Inexact, Rounded, Subnormal): ...
|
|
||||||
class FloatOperation(DecimalException, TypeError): ...
|
|
||||||
|
|
||||||
def setcontext(context: Context, /) -> None: ...
|
|
||||||
def getcontext() -> Context: ...
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 11):
|
|
||||||
def localcontext(
|
|
||||||
ctx: Context | None = None,
|
|
||||||
*,
|
|
||||||
prec: int | None = ...,
|
|
||||||
rounding: str | None = ...,
|
|
||||||
Emin: int | None = ...,
|
|
||||||
Emax: int | None = ...,
|
|
||||||
capitals: int | None = ...,
|
|
||||||
clamp: int | None = ...,
|
|
||||||
traps: dict[_TrapType, bool] | None = ...,
|
|
||||||
flags: dict[_TrapType, bool] | None = ...,
|
|
||||||
) -> _ContextManager: ...
|
|
||||||
|
|
||||||
else:
|
|
||||||
def localcontext(ctx: Context | None = None) -> _ContextManager: ...
|
|
||||||
|
|
||||||
class Decimal:
|
|
||||||
def __new__(cls, value: _DecimalNew = ..., context: Context | None = ...) -> Self: ...
|
|
||||||
@classmethod
|
|
||||||
def from_float(cls, f: float, /) -> Self: ...
|
|
||||||
def __bool__(self) -> bool: ...
|
|
||||||
def compare(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
|
||||||
def __hash__(self) -> int: ...
|
|
||||||
def as_tuple(self) -> DecimalTuple: ...
|
|
||||||
def as_integer_ratio(self) -> tuple[int, int]: ...
|
|
||||||
def to_eng_string(self, context: Context | None = None) -> str: ...
|
|
||||||
def __abs__(self) -> Decimal: ...
|
|
||||||
def __add__(self, value: _Decimal, /) -> Decimal: ...
|
|
||||||
def __divmod__(self, value: _Decimal, /) -> tuple[Decimal, Decimal]: ...
|
|
||||||
def __eq__(self, value: object, /) -> bool: ...
|
|
||||||
def __floordiv__(self, value: _Decimal, /) -> Decimal: ...
|
|
||||||
def __ge__(self, value: _ComparableNum, /) -> bool: ...
|
|
||||||
def __gt__(self, value: _ComparableNum, /) -> bool: ...
|
|
||||||
def __le__(self, value: _ComparableNum, /) -> bool: ...
|
|
||||||
def __lt__(self, value: _ComparableNum, /) -> bool: ...
|
|
||||||
def __mod__(self, value: _Decimal, /) -> Decimal: ...
|
|
||||||
def __mul__(self, value: _Decimal, /) -> Decimal: ...
|
|
||||||
def __neg__(self) -> Decimal: ...
|
|
||||||
def __pos__(self) -> Decimal: ...
|
|
||||||
def __pow__(self, value: _Decimal, mod: _Decimal | None = None, /) -> Decimal: ...
|
|
||||||
def __radd__(self, value: _Decimal, /) -> Decimal: ...
|
|
||||||
def __rdivmod__(self, value: _Decimal, /) -> tuple[Decimal, Decimal]: ...
|
|
||||||
def __rfloordiv__(self, value: _Decimal, /) -> Decimal: ...
|
|
||||||
def __rmod__(self, value: _Decimal, /) -> Decimal: ...
|
|
||||||
def __rmul__(self, value: _Decimal, /) -> Decimal: ...
|
|
||||||
def __rsub__(self, value: _Decimal, /) -> Decimal: ...
|
|
||||||
def __rtruediv__(self, value: _Decimal, /) -> Decimal: ...
|
|
||||||
def __sub__(self, value: _Decimal, /) -> Decimal: ...
|
|
||||||
def __truediv__(self, value: _Decimal, /) -> Decimal: ...
|
|
||||||
def remainder_near(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
|
||||||
def __float__(self) -> float: ...
|
|
||||||
def __int__(self) -> int: ...
|
|
||||||
def __trunc__(self) -> int: ...
|
|
||||||
@property
|
|
||||||
def real(self) -> Decimal: ...
|
|
||||||
@property
|
|
||||||
def imag(self) -> Decimal: ...
|
|
||||||
def conjugate(self) -> Decimal: ...
|
|
||||||
def __complex__(self) -> complex: ...
|
|
||||||
@overload
|
|
||||||
def __round__(self) -> int: ...
|
|
||||||
@overload
|
|
||||||
def __round__(self, ndigits: int, /) -> Decimal: ...
|
|
||||||
def __floor__(self) -> int: ...
|
|
||||||
def __ceil__(self) -> int: ...
|
|
||||||
def fma(self, other: _Decimal, third: _Decimal, context: Context | None = None) -> Decimal: ...
|
|
||||||
def __rpow__(self, value: _Decimal, mod: Context | None = None, /) -> Decimal: ...
|
|
||||||
def normalize(self, context: Context | None = None) -> Decimal: ...
|
|
||||||
def quantize(self, exp: _Decimal, rounding: str | None = None, context: Context | None = None) -> Decimal: ...
|
|
||||||
def same_quantum(self, other: _Decimal, context: Context | None = None) -> bool: ...
|
|
||||||
def to_integral_exact(self, rounding: str | None = None, context: Context | None = None) -> Decimal: ...
|
|
||||||
def to_integral_value(self, rounding: str | None = None, context: Context | None = None) -> Decimal: ...
|
|
||||||
def to_integral(self, rounding: str | None = None, context: Context | None = None) -> Decimal: ...
|
|
||||||
def sqrt(self, context: Context | None = None) -> Decimal: ...
|
|
||||||
def max(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
|
||||||
def min(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
|
||||||
def adjusted(self) -> int: ...
|
|
||||||
def canonical(self) -> Decimal: ...
|
|
||||||
def compare_signal(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
|
||||||
def compare_total(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
|
||||||
def compare_total_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
|
||||||
def copy_abs(self) -> Decimal: ...
|
|
||||||
def copy_negate(self) -> Decimal: ...
|
|
||||||
def copy_sign(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
|
||||||
def exp(self, context: Context | None = None) -> Decimal: ...
|
|
||||||
def is_canonical(self) -> bool: ...
|
|
||||||
def is_finite(self) -> bool: ...
|
|
||||||
def is_infinite(self) -> bool: ...
|
|
||||||
def is_nan(self) -> bool: ...
|
|
||||||
def is_normal(self, context: Context | None = None) -> bool: ...
|
|
||||||
def is_qnan(self) -> bool: ...
|
|
||||||
def is_signed(self) -> bool: ...
|
|
||||||
def is_snan(self) -> bool: ...
|
|
||||||
def is_subnormal(self, context: Context | None = None) -> bool: ...
|
|
||||||
def is_zero(self) -> bool: ...
|
|
||||||
def ln(self, context: Context | None = None) -> Decimal: ...
|
|
||||||
def log10(self, context: Context | None = None) -> Decimal: ...
|
|
||||||
def logb(self, context: Context | None = None) -> Decimal: ...
|
|
||||||
def logical_and(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
|
||||||
def logical_invert(self, context: Context | None = None) -> Decimal: ...
|
|
||||||
def logical_or(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
|
||||||
def logical_xor(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
|
||||||
def max_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
|
||||||
def min_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
|
||||||
def next_minus(self, context: Context | None = None) -> Decimal: ...
|
|
||||||
def next_plus(self, context: Context | None = None) -> Decimal: ...
|
|
||||||
def next_toward(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
|
||||||
def number_class(self, context: Context | None = None) -> str: ...
|
|
||||||
def radix(self) -> Decimal: ...
|
|
||||||
def rotate(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
|
||||||
def scaleb(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
|
||||||
def shift(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
|
||||||
def __reduce__(self) -> tuple[type[Self], tuple[str]]: ...
|
|
||||||
def __copy__(self) -> Self: ...
|
|
||||||
def __deepcopy__(self, memo: Any, /) -> Self: ...
|
|
||||||
def __format__(self, specifier: str, context: Context | None = ..., /) -> str: ...
|
|
||||||
|
|
||||||
class _ContextManager:
|
|
||||||
new_context: Context
|
|
||||||
saved_context: Context
|
|
||||||
def __init__(self, new_context: Context) -> None: ...
|
|
||||||
def __enter__(self) -> Context: ...
|
|
||||||
def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ...
|
|
||||||
|
|
||||||
_TrapType: TypeAlias = type[DecimalException]
|
|
||||||
|
|
||||||
class Context:
|
|
||||||
# TODO: Context doesn't allow you to delete *any* attributes from instances of the class at runtime,
|
|
||||||
# even settable attributes like `prec` and `rounding`,
|
|
||||||
# but that's inexpressable in the stub.
|
|
||||||
# Type checkers either ignore it or misinterpret it
|
|
||||||
# if you add a `def __delattr__(self, name: str, /) -> NoReturn` method to the stub
|
|
||||||
prec: int
|
|
||||||
rounding: str
|
|
||||||
Emin: int
|
|
||||||
Emax: int
|
|
||||||
capitals: int
|
|
||||||
clamp: int
|
|
||||||
traps: dict[_TrapType, bool]
|
|
||||||
flags: dict[_TrapType, bool]
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
prec: int | None = ...,
|
|
||||||
rounding: str | None = ...,
|
|
||||||
Emin: int | None = ...,
|
|
||||||
Emax: int | None = ...,
|
|
||||||
capitals: int | None = ...,
|
|
||||||
clamp: int | None = ...,
|
|
||||||
flags: None | dict[_TrapType, bool] | Container[_TrapType] = ...,
|
|
||||||
traps: None | dict[_TrapType, bool] | Container[_TrapType] = ...,
|
|
||||||
_ignored_flags: list[_TrapType] | None = ...,
|
|
||||||
) -> None: ...
|
|
||||||
def __reduce__(self) -> tuple[type[Self], tuple[Any, ...]]: ...
|
|
||||||
def clear_flags(self) -> None: ...
|
|
||||||
def clear_traps(self) -> None: ...
|
|
||||||
def copy(self) -> Context: ...
|
|
||||||
def __copy__(self) -> Context: ...
|
|
||||||
# see https://github.com/python/cpython/issues/94107
|
|
||||||
__hash__: ClassVar[None] # type: ignore[assignment]
|
|
||||||
def Etiny(self) -> int: ...
|
|
||||||
def Etop(self) -> int: ...
|
|
||||||
def create_decimal(self, num: _DecimalNew = "0", /) -> Decimal: ...
|
|
||||||
def create_decimal_from_float(self, f: float, /) -> Decimal: ...
|
|
||||||
def abs(self, x: _Decimal, /) -> Decimal: ...
|
|
||||||
def add(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
|
||||||
def canonical(self, x: Decimal, /) -> Decimal: ...
|
|
||||||
def compare(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
|
||||||
def compare_signal(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
|
||||||
def compare_total(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
|
||||||
def compare_total_mag(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
|
||||||
def copy_abs(self, x: _Decimal, /) -> Decimal: ...
|
|
||||||
def copy_decimal(self, x: _Decimal, /) -> Decimal: ...
|
|
||||||
def copy_negate(self, x: _Decimal, /) -> Decimal: ...
|
|
||||||
def copy_sign(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
|
||||||
def divide(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
|
||||||
def divide_int(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
|
||||||
def divmod(self, x: _Decimal, y: _Decimal, /) -> tuple[Decimal, Decimal]: ...
|
|
||||||
def exp(self, x: _Decimal, /) -> Decimal: ...
|
|
||||||
def fma(self, x: _Decimal, y: _Decimal, z: _Decimal, /) -> Decimal: ...
|
|
||||||
def is_canonical(self, x: _Decimal, /) -> bool: ...
|
|
||||||
def is_finite(self, x: _Decimal, /) -> bool: ...
|
|
||||||
def is_infinite(self, x: _Decimal, /) -> bool: ...
|
|
||||||
def is_nan(self, x: _Decimal, /) -> bool: ...
|
|
||||||
def is_normal(self, x: _Decimal, /) -> bool: ...
|
|
||||||
def is_qnan(self, x: _Decimal, /) -> bool: ...
|
|
||||||
def is_signed(self, x: _Decimal, /) -> bool: ...
|
|
||||||
def is_snan(self, x: _Decimal, /) -> bool: ...
|
|
||||||
def is_subnormal(self, x: _Decimal, /) -> bool: ...
|
|
||||||
def is_zero(self, x: _Decimal, /) -> bool: ...
|
|
||||||
def ln(self, x: _Decimal, /) -> Decimal: ...
|
|
||||||
def log10(self, x: _Decimal, /) -> Decimal: ...
|
|
||||||
def logb(self, x: _Decimal, /) -> Decimal: ...
|
|
||||||
def logical_and(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
|
||||||
def logical_invert(self, x: _Decimal, /) -> Decimal: ...
|
|
||||||
def logical_or(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
|
||||||
def logical_xor(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
|
||||||
def max(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
|
||||||
def max_mag(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
|
||||||
def min(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
|
||||||
def min_mag(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
|
||||||
def minus(self, x: _Decimal, /) -> Decimal: ...
|
|
||||||
def multiply(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
|
||||||
def next_minus(self, x: _Decimal, /) -> Decimal: ...
|
|
||||||
def next_plus(self, x: _Decimal, /) -> Decimal: ...
|
|
||||||
def next_toward(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
|
||||||
def normalize(self, x: _Decimal, /) -> Decimal: ...
|
|
||||||
def number_class(self, x: _Decimal, /) -> str: ...
|
|
||||||
def plus(self, x: _Decimal, /) -> Decimal: ...
|
|
||||||
def power(self, a: _Decimal, b: _Decimal, modulo: _Decimal | None = None) -> Decimal: ...
|
|
||||||
def quantize(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
|
||||||
def radix(self) -> Decimal: ...
|
|
||||||
def remainder(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
|
||||||
def remainder_near(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
|
||||||
def rotate(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
|
||||||
def same_quantum(self, x: _Decimal, y: _Decimal, /) -> bool: ...
|
|
||||||
def scaleb(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
|
||||||
def shift(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
|
||||||
def sqrt(self, x: _Decimal, /) -> Decimal: ...
|
|
||||||
def subtract(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
|
||||||
def to_eng_string(self, x: _Decimal, /) -> str: ...
|
|
||||||
def to_sci_string(self, x: _Decimal, /) -> str: ...
|
|
||||||
def to_integral_exact(self, x: _Decimal, /) -> Decimal: ...
|
|
||||||
def to_integral_value(self, x: _Decimal, /) -> Decimal: ...
|
|
||||||
def to_integral(self, x: _Decimal, /) -> Decimal: ...
|
|
||||||
|
|
||||||
DefaultContext: Context
|
|
||||||
BasicContext: Context
|
|
||||||
ExtendedContext: Context
|
|
||||||
@@ -1,33 +0,0 @@
|
|||||||
from collections.abc import Callable
|
|
||||||
from types import TracebackType
|
|
||||||
from typing import Any, NoReturn, overload
|
|
||||||
from typing_extensions import TypeVarTuple, Unpack
|
|
||||||
|
|
||||||
__all__ = ["error", "start_new_thread", "exit", "get_ident", "allocate_lock", "interrupt_main", "LockType", "RLock"]
|
|
||||||
|
|
||||||
_Ts = TypeVarTuple("_Ts")
|
|
||||||
|
|
||||||
TIMEOUT_MAX: int
|
|
||||||
error = RuntimeError
|
|
||||||
|
|
||||||
@overload
|
|
||||||
def start_new_thread(function: Callable[[Unpack[_Ts]], object], args: tuple[Unpack[_Ts]]) -> None: ...
|
|
||||||
@overload
|
|
||||||
def start_new_thread(function: Callable[..., object], args: tuple[Any, ...], kwargs: dict[str, Any]) -> None: ...
|
|
||||||
def exit() -> NoReturn: ...
|
|
||||||
def get_ident() -> int: ...
|
|
||||||
def allocate_lock() -> LockType: ...
|
|
||||||
def stack_size(size: int | None = None) -> int: ...
|
|
||||||
|
|
||||||
class LockType:
|
|
||||||
locked_status: bool
|
|
||||||
def acquire(self, waitflag: bool | None = None, timeout: int = -1) -> bool: ...
|
|
||||||
def __enter__(self, waitflag: bool | None = None, timeout: int = -1) -> bool: ...
|
|
||||||
def __exit__(self, typ: type[BaseException] | None, val: BaseException | None, tb: TracebackType | None) -> None: ...
|
|
||||||
def release(self) -> bool: ...
|
|
||||||
def locked(self) -> bool: ...
|
|
||||||
|
|
||||||
class RLock(LockType):
|
|
||||||
def release(self) -> None: ... # type: ignore[override]
|
|
||||||
|
|
||||||
def interrupt_main() -> None: ...
|
|
||||||
@@ -1,164 +0,0 @@
|
|||||||
import sys
|
|
||||||
from _thread import _excepthook, _ExceptHookArgs
|
|
||||||
from _typeshed import ProfileFunction, TraceFunction
|
|
||||||
from collections.abc import Callable, Iterable, Mapping
|
|
||||||
from types import TracebackType
|
|
||||||
from typing import Any, TypeVar
|
|
||||||
|
|
||||||
_T = TypeVar("_T")
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"get_ident",
|
|
||||||
"active_count",
|
|
||||||
"Condition",
|
|
||||||
"current_thread",
|
|
||||||
"enumerate",
|
|
||||||
"main_thread",
|
|
||||||
"TIMEOUT_MAX",
|
|
||||||
"Event",
|
|
||||||
"Lock",
|
|
||||||
"RLock",
|
|
||||||
"Semaphore",
|
|
||||||
"BoundedSemaphore",
|
|
||||||
"Thread",
|
|
||||||
"Barrier",
|
|
||||||
"BrokenBarrierError",
|
|
||||||
"Timer",
|
|
||||||
"ThreadError",
|
|
||||||
"setprofile",
|
|
||||||
"settrace",
|
|
||||||
"local",
|
|
||||||
"stack_size",
|
|
||||||
"ExceptHookArgs",
|
|
||||||
"excepthook",
|
|
||||||
]
|
|
||||||
|
|
||||||
def active_count() -> int: ...
|
|
||||||
def current_thread() -> Thread: ...
|
|
||||||
def currentThread() -> Thread: ...
|
|
||||||
def get_ident() -> int: ...
|
|
||||||
def enumerate() -> list[Thread]: ...
|
|
||||||
def main_thread() -> Thread: ...
|
|
||||||
def settrace(func: TraceFunction) -> None: ...
|
|
||||||
def setprofile(func: ProfileFunction | None) -> None: ...
|
|
||||||
def stack_size(size: int | None = None) -> int: ...
|
|
||||||
|
|
||||||
TIMEOUT_MAX: float
|
|
||||||
|
|
||||||
class ThreadError(Exception): ...
|
|
||||||
|
|
||||||
class local:
|
|
||||||
def __getattribute__(self, name: str) -> Any: ...
|
|
||||||
def __setattr__(self, name: str, value: Any) -> None: ...
|
|
||||||
def __delattr__(self, name: str) -> None: ...
|
|
||||||
|
|
||||||
class Thread:
|
|
||||||
name: str
|
|
||||||
daemon: bool
|
|
||||||
@property
|
|
||||||
def ident(self) -> int | None: ...
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
group: None = None,
|
|
||||||
target: Callable[..., object] | None = None,
|
|
||||||
name: str | None = None,
|
|
||||||
args: Iterable[Any] = (),
|
|
||||||
kwargs: Mapping[str, Any] | None = None,
|
|
||||||
*,
|
|
||||||
daemon: bool | None = None,
|
|
||||||
) -> None: ...
|
|
||||||
def start(self) -> None: ...
|
|
||||||
def run(self) -> None: ...
|
|
||||||
def join(self, timeout: float | None = None) -> None: ...
|
|
||||||
def getName(self) -> str: ...
|
|
||||||
def setName(self, name: str) -> None: ...
|
|
||||||
@property
|
|
||||||
def native_id(self) -> int | None: ... # only available on some platforms
|
|
||||||
def is_alive(self) -> bool: ...
|
|
||||||
if sys.version_info < (3, 9):
|
|
||||||
def isAlive(self) -> bool: ...
|
|
||||||
|
|
||||||
def isDaemon(self) -> bool: ...
|
|
||||||
def setDaemon(self, daemonic: bool) -> None: ...
|
|
||||||
|
|
||||||
class _DummyThread(Thread): ...
|
|
||||||
|
|
||||||
class Lock:
|
|
||||||
def __enter__(self) -> bool: ...
|
|
||||||
def __exit__(
|
|
||||||
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
|
|
||||||
) -> bool | None: ...
|
|
||||||
def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ...
|
|
||||||
def release(self) -> None: ...
|
|
||||||
def locked(self) -> bool: ...
|
|
||||||
|
|
||||||
class _RLock:
|
|
||||||
def __enter__(self) -> bool: ...
|
|
||||||
def __exit__(
|
|
||||||
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
|
|
||||||
) -> bool | None: ...
|
|
||||||
def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: ...
|
|
||||||
def release(self) -> None: ...
|
|
||||||
|
|
||||||
RLock = _RLock
|
|
||||||
|
|
||||||
class Condition:
|
|
||||||
def __init__(self, lock: Lock | _RLock | None = None) -> None: ...
|
|
||||||
def __enter__(self) -> bool: ...
|
|
||||||
def __exit__(
|
|
||||||
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
|
|
||||||
) -> bool | None: ...
|
|
||||||
def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ...
|
|
||||||
def release(self) -> None: ...
|
|
||||||
def wait(self, timeout: float | None = None) -> bool: ...
|
|
||||||
def wait_for(self, predicate: Callable[[], _T], timeout: float | None = None) -> _T: ...
|
|
||||||
def notify(self, n: int = 1) -> None: ...
|
|
||||||
def notify_all(self) -> None: ...
|
|
||||||
def notifyAll(self) -> None: ...
|
|
||||||
|
|
||||||
class Semaphore:
|
|
||||||
def __init__(self, value: int = 1) -> None: ...
|
|
||||||
def __exit__(
|
|
||||||
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
|
|
||||||
) -> bool | None: ...
|
|
||||||
def acquire(self, blocking: bool = True, timeout: float | None = None) -> bool: ...
|
|
||||||
def __enter__(self, blocking: bool = True, timeout: float | None = None) -> bool: ...
|
|
||||||
if sys.version_info >= (3, 9):
|
|
||||||
def release(self, n: int = ...) -> None: ...
|
|
||||||
else:
|
|
||||||
def release(self) -> None: ...
|
|
||||||
|
|
||||||
class BoundedSemaphore(Semaphore): ...
|
|
||||||
|
|
||||||
class Event:
|
|
||||||
def is_set(self) -> bool: ...
|
|
||||||
def set(self) -> None: ...
|
|
||||||
def clear(self) -> None: ...
|
|
||||||
def wait(self, timeout: float | None = None) -> bool: ...
|
|
||||||
|
|
||||||
excepthook = _excepthook
|
|
||||||
ExceptHookArgs = _ExceptHookArgs
|
|
||||||
|
|
||||||
class Timer(Thread):
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
interval: float,
|
|
||||||
function: Callable[..., object],
|
|
||||||
args: Iterable[Any] | None = None,
|
|
||||||
kwargs: Mapping[str, Any] | None = None,
|
|
||||||
) -> None: ...
|
|
||||||
def cancel(self) -> None: ...
|
|
||||||
|
|
||||||
class Barrier:
|
|
||||||
@property
|
|
||||||
def parties(self) -> int: ...
|
|
||||||
@property
|
|
||||||
def n_waiting(self) -> int: ...
|
|
||||||
@property
|
|
||||||
def broken(self) -> bool: ...
|
|
||||||
def __init__(self, parties: int, action: Callable[[], None] | None = None, timeout: float | None = None) -> None: ...
|
|
||||||
def wait(self, timeout: float | None = None) -> int: ...
|
|
||||||
def reset(self) -> None: ...
|
|
||||||
def abort(self) -> None: ...
|
|
||||||
|
|
||||||
class BrokenBarrierError(RuntimeError): ...
|
|
||||||
@@ -1,11 +0,0 @@
|
|||||||
from typing import Any, Final, TypeVar
|
|
||||||
|
|
||||||
_T = TypeVar("_T")
|
|
||||||
|
|
||||||
__about__: Final[str]
|
|
||||||
|
|
||||||
def heapify(heap: list[Any], /) -> None: ...
|
|
||||||
def heappop(heap: list[_T], /) -> _T: ...
|
|
||||||
def heappush(heap: list[_T], item: _T, /) -> None: ...
|
|
||||||
def heappushpop(heap: list[_T], item: _T, /) -> _T: ...
|
|
||||||
def heapreplace(heap: list[_T], item: _T, /) -> _T: ...
|
|
||||||
28
crates/red_knot/vendor/typeshed/stdlib/_imp.pyi
vendored
28
crates/red_knot/vendor/typeshed/stdlib/_imp.pyi
vendored
@@ -1,28 +0,0 @@
|
|||||||
import sys
|
|
||||||
import types
|
|
||||||
from _typeshed import ReadableBuffer
|
|
||||||
from importlib.machinery import ModuleSpec
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
check_hash_based_pycs: str
|
|
||||||
|
|
||||||
def source_hash(key: int, source: ReadableBuffer) -> bytes: ...
|
|
||||||
def create_builtin(spec: ModuleSpec, /) -> types.ModuleType: ...
|
|
||||||
def create_dynamic(spec: ModuleSpec, file: Any = None, /) -> types.ModuleType: ...
|
|
||||||
def acquire_lock() -> None: ...
|
|
||||||
def exec_builtin(mod: types.ModuleType, /) -> int: ...
|
|
||||||
def exec_dynamic(mod: types.ModuleType, /) -> int: ...
|
|
||||||
def extension_suffixes() -> list[str]: ...
|
|
||||||
def init_frozen(name: str, /) -> types.ModuleType: ...
|
|
||||||
def is_builtin(name: str, /) -> int: ...
|
|
||||||
def is_frozen(name: str, /) -> bool: ...
|
|
||||||
def is_frozen_package(name: str, /) -> bool: ...
|
|
||||||
def lock_held() -> bool: ...
|
|
||||||
def release_lock() -> None: ...
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 11):
|
|
||||||
def find_frozen(name: str, /, *, withdata: bool = False) -> tuple[memoryview | None, bool, str | None] | None: ...
|
|
||||||
def get_frozen_object(name: str, data: ReadableBuffer | None = None, /) -> types.CodeType: ...
|
|
||||||
|
|
||||||
else:
|
|
||||||
def get_frozen_object(name: str, /) -> types.CodeType: ...
|
|
||||||
49
crates/red_knot/vendor/typeshed/stdlib/_json.pyi
vendored
49
crates/red_knot/vendor/typeshed/stdlib/_json.pyi
vendored
@@ -1,49 +0,0 @@
|
|||||||
from collections.abc import Callable
|
|
||||||
from typing import Any, final
|
|
||||||
|
|
||||||
@final
|
|
||||||
class make_encoder:
|
|
||||||
@property
|
|
||||||
def sort_keys(self) -> bool: ...
|
|
||||||
@property
|
|
||||||
def skipkeys(self) -> bool: ...
|
|
||||||
@property
|
|
||||||
def key_separator(self) -> str: ...
|
|
||||||
@property
|
|
||||||
def indent(self) -> int | None: ...
|
|
||||||
@property
|
|
||||||
def markers(self) -> dict[int, Any] | None: ...
|
|
||||||
@property
|
|
||||||
def default(self) -> Callable[[Any], Any]: ...
|
|
||||||
@property
|
|
||||||
def encoder(self) -> Callable[[str], str]: ...
|
|
||||||
@property
|
|
||||||
def item_separator(self) -> str: ...
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
markers: dict[int, Any] | None,
|
|
||||||
default: Callable[[Any], Any],
|
|
||||||
encoder: Callable[[str], str],
|
|
||||||
indent: int | None,
|
|
||||||
key_separator: str,
|
|
||||||
item_separator: str,
|
|
||||||
sort_keys: bool,
|
|
||||||
skipkeys: bool,
|
|
||||||
allow_nan: bool,
|
|
||||||
) -> None: ...
|
|
||||||
def __call__(self, obj: object, _current_indent_level: int) -> Any: ...
|
|
||||||
|
|
||||||
@final
|
|
||||||
class make_scanner:
|
|
||||||
object_hook: Any
|
|
||||||
object_pairs_hook: Any
|
|
||||||
parse_int: Any
|
|
||||||
parse_constant: Any
|
|
||||||
parse_float: Any
|
|
||||||
strict: bool
|
|
||||||
# TODO: 'context' needs the attrs above (ducktype), but not __call__.
|
|
||||||
def __init__(self, context: make_scanner) -> None: ...
|
|
||||||
def __call__(self, string: str, index: int) -> tuple[Any, int]: ...
|
|
||||||
|
|
||||||
def encode_basestring_ascii(s: str) -> str: ...
|
|
||||||
def scanstring(string: str, end: int, strict: bool = ...) -> tuple[str, int]: ...
|
|
||||||
100
crates/red_knot/vendor/typeshed/stdlib/_locale.pyi
vendored
100
crates/red_knot/vendor/typeshed/stdlib/_locale.pyi
vendored
@@ -1,100 +0,0 @@
|
|||||||
import sys
|
|
||||||
from _typeshed import StrPath
|
|
||||||
from collections.abc import Mapping
|
|
||||||
|
|
||||||
LC_CTYPE: int
|
|
||||||
LC_COLLATE: int
|
|
||||||
LC_TIME: int
|
|
||||||
LC_MONETARY: int
|
|
||||||
LC_NUMERIC: int
|
|
||||||
LC_ALL: int
|
|
||||||
CHAR_MAX: int
|
|
||||||
|
|
||||||
def setlocale(category: int, locale: str | None = None, /) -> str: ...
|
|
||||||
def localeconv() -> Mapping[str, int | str | list[int]]: ...
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 11):
|
|
||||||
def getencoding() -> str: ...
|
|
||||||
|
|
||||||
def strcoll(os1: str, os2: str, /) -> int: ...
|
|
||||||
def strxfrm(string: str, /) -> str: ...
|
|
||||||
|
|
||||||
# native gettext functions
|
|
||||||
# https://docs.python.org/3/library/locale.html#access-to-message-catalogs
|
|
||||||
# https://github.com/python/cpython/blob/f4c03484da59049eb62a9bf7777b963e2267d187/Modules/_localemodule.c#L626
|
|
||||||
if sys.platform != "win32":
|
|
||||||
LC_MESSAGES: int
|
|
||||||
|
|
||||||
ABDAY_1: int
|
|
||||||
ABDAY_2: int
|
|
||||||
ABDAY_3: int
|
|
||||||
ABDAY_4: int
|
|
||||||
ABDAY_5: int
|
|
||||||
ABDAY_6: int
|
|
||||||
ABDAY_7: int
|
|
||||||
|
|
||||||
ABMON_1: int
|
|
||||||
ABMON_2: int
|
|
||||||
ABMON_3: int
|
|
||||||
ABMON_4: int
|
|
||||||
ABMON_5: int
|
|
||||||
ABMON_6: int
|
|
||||||
ABMON_7: int
|
|
||||||
ABMON_8: int
|
|
||||||
ABMON_9: int
|
|
||||||
ABMON_10: int
|
|
||||||
ABMON_11: int
|
|
||||||
ABMON_12: int
|
|
||||||
|
|
||||||
DAY_1: int
|
|
||||||
DAY_2: int
|
|
||||||
DAY_3: int
|
|
||||||
DAY_4: int
|
|
||||||
DAY_5: int
|
|
||||||
DAY_6: int
|
|
||||||
DAY_7: int
|
|
||||||
|
|
||||||
ERA: int
|
|
||||||
ERA_D_T_FMT: int
|
|
||||||
ERA_D_FMT: int
|
|
||||||
ERA_T_FMT: int
|
|
||||||
|
|
||||||
MON_1: int
|
|
||||||
MON_2: int
|
|
||||||
MON_3: int
|
|
||||||
MON_4: int
|
|
||||||
MON_5: int
|
|
||||||
MON_6: int
|
|
||||||
MON_7: int
|
|
||||||
MON_8: int
|
|
||||||
MON_9: int
|
|
||||||
MON_10: int
|
|
||||||
MON_11: int
|
|
||||||
MON_12: int
|
|
||||||
|
|
||||||
CODESET: int
|
|
||||||
D_T_FMT: int
|
|
||||||
D_FMT: int
|
|
||||||
T_FMT: int
|
|
||||||
T_FMT_AMPM: int
|
|
||||||
AM_STR: int
|
|
||||||
PM_STR: int
|
|
||||||
|
|
||||||
RADIXCHAR: int
|
|
||||||
THOUSEP: int
|
|
||||||
YESEXPR: int
|
|
||||||
NOEXPR: int
|
|
||||||
CRNCYSTR: int
|
|
||||||
ALT_DIGITS: int
|
|
||||||
|
|
||||||
def nl_langinfo(key: int, /) -> str: ...
|
|
||||||
|
|
||||||
# This is dependent on `libintl.h` which is a part of `gettext`
|
|
||||||
# system dependency. These functions might be missing.
|
|
||||||
# But, we always say that they are present.
|
|
||||||
def gettext(msg: str, /) -> str: ...
|
|
||||||
def dgettext(domain: str | None, msg: str, /) -> str: ...
|
|
||||||
def dcgettext(domain: str | None, msg: str, category: int, /) -> str: ...
|
|
||||||
def textdomain(domain: str | None, /) -> str: ...
|
|
||||||
def bindtextdomain(domain: str, dir: StrPath | None, /) -> str: ...
|
|
||||||
def bind_textdomain_codeset(domain: str, codeset: str | None, /) -> str | None: ...
|
|
||||||
@@ -1,35 +0,0 @@
|
|||||||
import sys
|
|
||||||
from _typeshed import structseq
|
|
||||||
from collections.abc import Callable
|
|
||||||
from types import CodeType
|
|
||||||
from typing import Any, Final, final
|
|
||||||
|
|
||||||
class Profiler:
|
|
||||||
def __init__(
|
|
||||||
self, timer: Callable[[], float] | None = None, timeunit: float = 0.0, subcalls: bool = True, builtins: bool = True
|
|
||||||
) -> None: ...
|
|
||||||
def getstats(self) -> list[profiler_entry]: ...
|
|
||||||
def enable(self, subcalls: bool = True, builtins: bool = True) -> None: ...
|
|
||||||
def disable(self) -> None: ...
|
|
||||||
def clear(self) -> None: ...
|
|
||||||
|
|
||||||
@final
|
|
||||||
class profiler_entry(structseq[Any], tuple[CodeType | str, int, int, float, float, list[profiler_subentry]]):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__: Final = ("code", "callcount", "reccallcount", "totaltime", "inlinetime", "calls")
|
|
||||||
code: CodeType | str
|
|
||||||
callcount: int
|
|
||||||
reccallcount: int
|
|
||||||
totaltime: float
|
|
||||||
inlinetime: float
|
|
||||||
calls: list[profiler_subentry]
|
|
||||||
|
|
||||||
@final
|
|
||||||
class profiler_subentry(structseq[Any], tuple[CodeType | str, int, int, float, float]):
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
__match_args__: Final = ("code", "callcount", "reccallcount", "totaltime", "inlinetime")
|
|
||||||
code: CodeType | str
|
|
||||||
callcount: int
|
|
||||||
reccallcount: int
|
|
||||||
totaltime: float
|
|
||||||
inlinetime: float
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
import sys
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
class ParserBase:
|
|
||||||
def reset(self) -> None: ...
|
|
||||||
def getpos(self) -> tuple[int, int]: ...
|
|
||||||
def unknown_decl(self, data: str) -> None: ...
|
|
||||||
def parse_comment(self, i: int, report: int = 1) -> int: ... # undocumented
|
|
||||||
def parse_declaration(self, i: int) -> int: ... # undocumented
|
|
||||||
def parse_marked_section(self, i: int, report: int = 1) -> int: ... # undocumented
|
|
||||||
def updatepos(self, i: int, j: int) -> int: ... # undocumented
|
|
||||||
if sys.version_info < (3, 10):
|
|
||||||
# Removed from ParserBase: https://bugs.python.org/issue31844
|
|
||||||
def error(self, message: str) -> Any: ... # undocumented
|
|
||||||
lineno: int # undocumented
|
|
||||||
offset: int # undocumented
|
|
||||||
92
crates/red_knot/vendor/typeshed/stdlib/_msi.pyi
vendored
92
crates/red_knot/vendor/typeshed/stdlib/_msi.pyi
vendored
@@ -1,92 +0,0 @@
|
|||||||
import sys
|
|
||||||
|
|
||||||
if sys.platform == "win32":
|
|
||||||
class MSIError(Exception): ...
|
|
||||||
# Actual typename View, not exposed by the implementation
|
|
||||||
class _View:
|
|
||||||
def Execute(self, params: _Record | None = ...) -> None: ...
|
|
||||||
def GetColumnInfo(self, kind: int) -> _Record: ...
|
|
||||||
def Fetch(self) -> _Record: ...
|
|
||||||
def Modify(self, mode: int, record: _Record) -> None: ...
|
|
||||||
def Close(self) -> None: ...
|
|
||||||
# Don't exist at runtime
|
|
||||||
__new__: None # type: ignore[assignment]
|
|
||||||
__init__: None # type: ignore[assignment]
|
|
||||||
|
|
||||||
# Actual typename SummaryInformation, not exposed by the implementation
|
|
||||||
class _SummaryInformation:
|
|
||||||
def GetProperty(self, field: int) -> int | bytes | None: ...
|
|
||||||
def GetPropertyCount(self) -> int: ...
|
|
||||||
def SetProperty(self, field: int, value: int | str) -> None: ...
|
|
||||||
def Persist(self) -> None: ...
|
|
||||||
# Don't exist at runtime
|
|
||||||
__new__: None # type: ignore[assignment]
|
|
||||||
__init__: None # type: ignore[assignment]
|
|
||||||
|
|
||||||
# Actual typename Database, not exposed by the implementation
|
|
||||||
class _Database:
|
|
||||||
def OpenView(self, sql: str) -> _View: ...
|
|
||||||
def Commit(self) -> None: ...
|
|
||||||
def GetSummaryInformation(self, updateCount: int) -> _SummaryInformation: ...
|
|
||||||
def Close(self) -> None: ...
|
|
||||||
# Don't exist at runtime
|
|
||||||
__new__: None # type: ignore[assignment]
|
|
||||||
__init__: None # type: ignore[assignment]
|
|
||||||
|
|
||||||
# Actual typename Record, not exposed by the implementation
|
|
||||||
class _Record:
|
|
||||||
def GetFieldCount(self) -> int: ...
|
|
||||||
def GetInteger(self, field: int) -> int: ...
|
|
||||||
def GetString(self, field: int) -> str: ...
|
|
||||||
def SetString(self, field: int, str: str) -> None: ...
|
|
||||||
def SetStream(self, field: int, stream: str) -> None: ...
|
|
||||||
def SetInteger(self, field: int, int: int) -> None: ...
|
|
||||||
def ClearData(self) -> None: ...
|
|
||||||
# Don't exist at runtime
|
|
||||||
__new__: None # type: ignore[assignment]
|
|
||||||
__init__: None # type: ignore[assignment]
|
|
||||||
|
|
||||||
def UuidCreate() -> str: ...
|
|
||||||
def FCICreate(cabname: str, files: list[str], /) -> None: ...
|
|
||||||
def OpenDatabase(path: str, persist: int, /) -> _Database: ...
|
|
||||||
def CreateRecord(count: int, /) -> _Record: ...
|
|
||||||
|
|
||||||
MSICOLINFO_NAMES: int
|
|
||||||
MSICOLINFO_TYPES: int
|
|
||||||
MSIDBOPEN_CREATE: int
|
|
||||||
MSIDBOPEN_CREATEDIRECT: int
|
|
||||||
MSIDBOPEN_DIRECT: int
|
|
||||||
MSIDBOPEN_PATCHFILE: int
|
|
||||||
MSIDBOPEN_READONLY: int
|
|
||||||
MSIDBOPEN_TRANSACT: int
|
|
||||||
MSIMODIFY_ASSIGN: int
|
|
||||||
MSIMODIFY_DELETE: int
|
|
||||||
MSIMODIFY_INSERT: int
|
|
||||||
MSIMODIFY_INSERT_TEMPORARY: int
|
|
||||||
MSIMODIFY_MERGE: int
|
|
||||||
MSIMODIFY_REFRESH: int
|
|
||||||
MSIMODIFY_REPLACE: int
|
|
||||||
MSIMODIFY_SEEK: int
|
|
||||||
MSIMODIFY_UPDATE: int
|
|
||||||
MSIMODIFY_VALIDATE: int
|
|
||||||
MSIMODIFY_VALIDATE_DELETE: int
|
|
||||||
MSIMODIFY_VALIDATE_FIELD: int
|
|
||||||
MSIMODIFY_VALIDATE_NEW: int
|
|
||||||
|
|
||||||
PID_APPNAME: int
|
|
||||||
PID_AUTHOR: int
|
|
||||||
PID_CHARCOUNT: int
|
|
||||||
PID_CODEPAGE: int
|
|
||||||
PID_COMMENTS: int
|
|
||||||
PID_CREATE_DTM: int
|
|
||||||
PID_KEYWORDS: int
|
|
||||||
PID_LASTAUTHOR: int
|
|
||||||
PID_LASTPRINTED: int
|
|
||||||
PID_LASTSAVE_DTM: int
|
|
||||||
PID_PAGECOUNT: int
|
|
||||||
PID_REVNUMBER: int
|
|
||||||
PID_SECURITY: int
|
|
||||||
PID_SUBJECT: int
|
|
||||||
PID_TEMPLATE: int
|
|
||||||
PID_TITLE: int
|
|
||||||
PID_WORDCOUNT: int
|
|
||||||
147
crates/red_knot/vendor/typeshed/stdlib/_operator.pyi
vendored
147
crates/red_knot/vendor/typeshed/stdlib/_operator.pyi
vendored
@@ -1,147 +0,0 @@
|
|||||||
import sys
|
|
||||||
from _typeshed import SupportsGetItem
|
|
||||||
from collections.abc import Callable, Container, Iterable, MutableMapping, MutableSequence, Sequence
|
|
||||||
from typing import Any, AnyStr, Generic, Protocol, SupportsAbs, SupportsIndex, TypeVar, final, overload
|
|
||||||
from typing_extensions import ParamSpec, TypeAlias, TypeVarTuple, Unpack
|
|
||||||
|
|
||||||
_R = TypeVar("_R")
|
|
||||||
_T = TypeVar("_T")
|
|
||||||
_T_co = TypeVar("_T_co", covariant=True)
|
|
||||||
_T1 = TypeVar("_T1")
|
|
||||||
_T2 = TypeVar("_T2")
|
|
||||||
_K = TypeVar("_K")
|
|
||||||
_V = TypeVar("_V")
|
|
||||||
_P = ParamSpec("_P")
|
|
||||||
_Ts = TypeVarTuple("_Ts")
|
|
||||||
|
|
||||||
# The following protocols return "Any" instead of bool, since the comparison
|
|
||||||
# operators can be overloaded to return an arbitrary object. For example,
|
|
||||||
# the numpy.array comparison dunders return another numpy.array.
|
|
||||||
|
|
||||||
class _SupportsDunderLT(Protocol):
|
|
||||||
def __lt__(self, other: Any, /) -> Any: ...
|
|
||||||
|
|
||||||
class _SupportsDunderGT(Protocol):
|
|
||||||
def __gt__(self, other: Any, /) -> Any: ...
|
|
||||||
|
|
||||||
class _SupportsDunderLE(Protocol):
|
|
||||||
def __le__(self, other: Any, /) -> Any: ...
|
|
||||||
|
|
||||||
class _SupportsDunderGE(Protocol):
|
|
||||||
def __ge__(self, other: Any, /) -> Any: ...
|
|
||||||
|
|
||||||
_SupportsComparison: TypeAlias = _SupportsDunderLE | _SupportsDunderGE | _SupportsDunderGT | _SupportsDunderLT
|
|
||||||
|
|
||||||
class _SupportsInversion(Protocol[_T_co]):
|
|
||||||
def __invert__(self) -> _T_co: ...
|
|
||||||
|
|
||||||
class _SupportsNeg(Protocol[_T_co]):
|
|
||||||
def __neg__(self) -> _T_co: ...
|
|
||||||
|
|
||||||
class _SupportsPos(Protocol[_T_co]):
|
|
||||||
def __pos__(self) -> _T_co: ...
|
|
||||||
|
|
||||||
# All four comparison functions must have the same signature, or we get false-positive errors
|
|
||||||
def lt(a: _SupportsComparison, b: _SupportsComparison, /) -> Any: ...
|
|
||||||
def le(a: _SupportsComparison, b: _SupportsComparison, /) -> Any: ...
|
|
||||||
def eq(a: object, b: object, /) -> Any: ...
|
|
||||||
def ne(a: object, b: object, /) -> Any: ...
|
|
||||||
def ge(a: _SupportsComparison, b: _SupportsComparison, /) -> Any: ...
|
|
||||||
def gt(a: _SupportsComparison, b: _SupportsComparison, /) -> Any: ...
|
|
||||||
def not_(a: object, /) -> bool: ...
|
|
||||||
def truth(a: object, /) -> bool: ...
|
|
||||||
def is_(a: object, b: object, /) -> bool: ...
|
|
||||||
def is_not(a: object, b: object, /) -> bool: ...
|
|
||||||
def abs(a: SupportsAbs[_T], /) -> _T: ...
|
|
||||||
def add(a: Any, b: Any, /) -> Any: ...
|
|
||||||
def and_(a: Any, b: Any, /) -> Any: ...
|
|
||||||
def floordiv(a: Any, b: Any, /) -> Any: ...
|
|
||||||
def index(a: SupportsIndex, /) -> int: ...
|
|
||||||
def inv(a: _SupportsInversion[_T_co], /) -> _T_co: ...
|
|
||||||
def invert(a: _SupportsInversion[_T_co], /) -> _T_co: ...
|
|
||||||
def lshift(a: Any, b: Any, /) -> Any: ...
|
|
||||||
def mod(a: Any, b: Any, /) -> Any: ...
|
|
||||||
def mul(a: Any, b: Any, /) -> Any: ...
|
|
||||||
def matmul(a: Any, b: Any, /) -> Any: ...
|
|
||||||
def neg(a: _SupportsNeg[_T_co], /) -> _T_co: ...
|
|
||||||
def or_(a: Any, b: Any, /) -> Any: ...
|
|
||||||
def pos(a: _SupportsPos[_T_co], /) -> _T_co: ...
|
|
||||||
def pow(a: Any, b: Any, /) -> Any: ...
|
|
||||||
def rshift(a: Any, b: Any, /) -> Any: ...
|
|
||||||
def sub(a: Any, b: Any, /) -> Any: ...
|
|
||||||
def truediv(a: Any, b: Any, /) -> Any: ...
|
|
||||||
def xor(a: Any, b: Any, /) -> Any: ...
|
|
||||||
def concat(a: Sequence[_T], b: Sequence[_T], /) -> Sequence[_T]: ...
|
|
||||||
def contains(a: Container[object], b: object, /) -> bool: ...
|
|
||||||
def countOf(a: Iterable[object], b: object, /) -> int: ...
|
|
||||||
@overload
|
|
||||||
def delitem(a: MutableSequence[Any], b: SupportsIndex, /) -> None: ...
|
|
||||||
@overload
|
|
||||||
def delitem(a: MutableSequence[Any], b: slice, /) -> None: ...
|
|
||||||
@overload
|
|
||||||
def delitem(a: MutableMapping[_K, Any], b: _K, /) -> None: ...
|
|
||||||
@overload
|
|
||||||
def getitem(a: Sequence[_T], b: slice, /) -> Sequence[_T]: ...
|
|
||||||
@overload
|
|
||||||
def getitem(a: SupportsGetItem[_K, _V], b: _K, /) -> _V: ...
|
|
||||||
def indexOf(a: Iterable[_T], b: _T, /) -> int: ...
|
|
||||||
@overload
|
|
||||||
def setitem(a: MutableSequence[_T], b: SupportsIndex, c: _T, /) -> None: ...
|
|
||||||
@overload
|
|
||||||
def setitem(a: MutableSequence[_T], b: slice, c: Sequence[_T], /) -> None: ...
|
|
||||||
@overload
|
|
||||||
def setitem(a: MutableMapping[_K, _V], b: _K, c: _V, /) -> None: ...
|
|
||||||
def length_hint(obj: object, default: int = 0, /) -> int: ...
|
|
||||||
@final
|
|
||||||
class attrgetter(Generic[_T_co]):
|
|
||||||
@overload
|
|
||||||
def __new__(cls, attr: str, /) -> attrgetter[Any]: ...
|
|
||||||
@overload
|
|
||||||
def __new__(cls, attr: str, attr2: str, /) -> attrgetter[tuple[Any, Any]]: ...
|
|
||||||
@overload
|
|
||||||
def __new__(cls, attr: str, attr2: str, attr3: str, /) -> attrgetter[tuple[Any, Any, Any]]: ...
|
|
||||||
@overload
|
|
||||||
def __new__(cls, attr: str, attr2: str, attr3: str, attr4: str, /) -> attrgetter[tuple[Any, Any, Any, Any]]: ...
|
|
||||||
@overload
|
|
||||||
def __new__(cls, attr: str, /, *attrs: str) -> attrgetter[tuple[Any, ...]]: ...
|
|
||||||
def __call__(self, obj: Any, /) -> _T_co: ...
|
|
||||||
|
|
||||||
@final
|
|
||||||
class itemgetter(Generic[_T_co]):
|
|
||||||
@overload
|
|
||||||
def __new__(cls, item: _T, /) -> itemgetter[_T]: ...
|
|
||||||
@overload
|
|
||||||
def __new__(cls, item1: _T1, item2: _T2, /, *items: Unpack[_Ts]) -> itemgetter[tuple[_T1, _T2, Unpack[_Ts]]]: ...
|
|
||||||
# __key: _KT_contra in SupportsGetItem seems to be causing variance issues, ie:
|
|
||||||
# TypeVar "_KT_contra@SupportsGetItem" is contravariant
|
|
||||||
# "tuple[int, int]" is incompatible with protocol "SupportsIndex"
|
|
||||||
# preventing [_T_co, ...] instead of [Any, ...]
|
|
||||||
#
|
|
||||||
# A suspected mypy issue prevents using [..., _T] instead of [..., Any] here.
|
|
||||||
# https://github.com/python/mypy/issues/14032
|
|
||||||
def __call__(self, obj: SupportsGetItem[Any, Any]) -> Any: ...
|
|
||||||
|
|
||||||
@final
|
|
||||||
class methodcaller:
|
|
||||||
def __init__(self, name: str, /, *args: Any, **kwargs: Any) -> None: ...
|
|
||||||
def __call__(self, obj: Any) -> Any: ...
|
|
||||||
|
|
||||||
def iadd(a: Any, b: Any, /) -> Any: ...
|
|
||||||
def iand(a: Any, b: Any, /) -> Any: ...
|
|
||||||
def iconcat(a: Any, b: Any, /) -> Any: ...
|
|
||||||
def ifloordiv(a: Any, b: Any, /) -> Any: ...
|
|
||||||
def ilshift(a: Any, b: Any, /) -> Any: ...
|
|
||||||
def imod(a: Any, b: Any, /) -> Any: ...
|
|
||||||
def imul(a: Any, b: Any, /) -> Any: ...
|
|
||||||
def imatmul(a: Any, b: Any, /) -> Any: ...
|
|
||||||
def ior(a: Any, b: Any, /) -> Any: ...
|
|
||||||
def ipow(a: Any, b: Any, /) -> Any: ...
|
|
||||||
def irshift(a: Any, b: Any, /) -> Any: ...
|
|
||||||
def isub(a: Any, b: Any, /) -> Any: ...
|
|
||||||
def itruediv(a: Any, b: Any, /) -> Any: ...
|
|
||||||
def ixor(a: Any, b: Any, /) -> Any: ...
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 11):
|
|
||||||
def call(obj: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs) -> _R: ...
|
|
||||||
|
|
||||||
def _compare_digest(a: AnyStr, b: AnyStr, /) -> bool: ...
|
|
||||||
@@ -1,34 +0,0 @@
|
|||||||
from collections.abc import Iterable, Sequence
|
|
||||||
from typing import TypeVar
|
|
||||||
|
|
||||||
_T = TypeVar("_T")
|
|
||||||
_K = TypeVar("_K")
|
|
||||||
_V = TypeVar("_V")
|
|
||||||
|
|
||||||
__all__ = ["compiler_fixup", "customize_config_vars", "customize_compiler", "get_platform_osx"]
|
|
||||||
|
|
||||||
_UNIVERSAL_CONFIG_VARS: tuple[str, ...] # undocumented
|
|
||||||
_COMPILER_CONFIG_VARS: tuple[str, ...] # undocumented
|
|
||||||
_INITPRE: str # undocumented
|
|
||||||
|
|
||||||
def _find_executable(executable: str, path: str | None = None) -> str | None: ... # undocumented
|
|
||||||
def _read_output(commandstring: str, capture_stderr: bool = False) -> str | None: ... # undocumented
|
|
||||||
def _find_build_tool(toolname: str) -> str: ... # undocumented
|
|
||||||
|
|
||||||
_SYSTEM_VERSION: str | None # undocumented
|
|
||||||
|
|
||||||
def _get_system_version() -> str: ... # undocumented
|
|
||||||
def _remove_original_values(_config_vars: dict[str, str]) -> None: ... # undocumented
|
|
||||||
def _save_modified_value(_config_vars: dict[str, str], cv: str, newvalue: str) -> None: ... # undocumented
|
|
||||||
def _supports_universal_builds() -> bool: ... # undocumented
|
|
||||||
def _find_appropriate_compiler(_config_vars: dict[str, str]) -> dict[str, str]: ... # undocumented
|
|
||||||
def _remove_universal_flags(_config_vars: dict[str, str]) -> dict[str, str]: ... # undocumented
|
|
||||||
def _remove_unsupported_archs(_config_vars: dict[str, str]) -> dict[str, str]: ... # undocumented
|
|
||||||
def _override_all_archs(_config_vars: dict[str, str]) -> dict[str, str]: ... # undocumented
|
|
||||||
def _check_for_unavailable_sdk(_config_vars: dict[str, str]) -> dict[str, str]: ... # undocumented
|
|
||||||
def compiler_fixup(compiler_so: Iterable[str], cc_args: Sequence[str]) -> list[str]: ...
|
|
||||||
def customize_config_vars(_config_vars: dict[str, str]) -> dict[str, str]: ...
|
|
||||||
def customize_compiler(_config_vars: dict[str, str]) -> dict[str, str]: ...
|
|
||||||
def get_platform_osx(
|
|
||||||
_config_vars: dict[str, str], osname: _T, release: _K, machine: _V
|
|
||||||
) -> tuple[str | _T, str | _K, str | _V]: ...
|
|
||||||
@@ -1,33 +0,0 @@
|
|||||||
import sys
|
|
||||||
from _typeshed import StrOrBytesPath
|
|
||||||
from collections.abc import Callable, Sequence
|
|
||||||
from typing import SupportsIndex
|
|
||||||
|
|
||||||
if sys.platform != "win32":
|
|
||||||
def cloexec_pipe() -> tuple[int, int]: ...
|
|
||||||
def fork_exec(
|
|
||||||
args: Sequence[StrOrBytesPath] | None,
|
|
||||||
executable_list: Sequence[bytes],
|
|
||||||
close_fds: bool,
|
|
||||||
pass_fds: tuple[int, ...],
|
|
||||||
cwd: str,
|
|
||||||
env: Sequence[bytes] | None,
|
|
||||||
p2cread: int,
|
|
||||||
p2cwrite: int,
|
|
||||||
c2pread: int,
|
|
||||||
c2pwrite: int,
|
|
||||||
errread: int,
|
|
||||||
errwrite: int,
|
|
||||||
errpipe_read: int,
|
|
||||||
errpipe_write: int,
|
|
||||||
restore_signals: int,
|
|
||||||
call_setsid: int,
|
|
||||||
pgid_to_set: int,
|
|
||||||
gid: SupportsIndex | None,
|
|
||||||
extra_groups: list[int] | None,
|
|
||||||
uid: SupportsIndex | None,
|
|
||||||
child_umask: int,
|
|
||||||
preexec_fn: Callable[[], None],
|
|
||||||
allow_vfork: bool,
|
|
||||||
/,
|
|
||||||
) -> int: ...
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
import _typeshed
|
|
||||||
from typing import Any, NewType, TypeVar
|
|
||||||
|
|
||||||
_T = TypeVar("_T")
|
|
||||||
|
|
||||||
_CacheToken = NewType("_CacheToken", int)
|
|
||||||
|
|
||||||
def get_cache_token() -> _CacheToken: ...
|
|
||||||
|
|
||||||
class ABCMeta(type):
|
|
||||||
def __new__(
|
|
||||||
mcls: type[_typeshed.Self], name: str, bases: tuple[type[Any], ...], namespace: dict[str, Any], /
|
|
||||||
) -> _typeshed.Self: ...
|
|
||||||
def register(cls, subclass: type[_T]) -> type[_T]: ...
|
|
||||||
@@ -1,43 +0,0 @@
|
|||||||
# This is a slight lie, the implementations aren't exactly identical
|
|
||||||
# However, in all likelihood, the differences are inconsequential
|
|
||||||
from _decimal import *
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"Decimal",
|
|
||||||
"Context",
|
|
||||||
"DecimalTuple",
|
|
||||||
"DefaultContext",
|
|
||||||
"BasicContext",
|
|
||||||
"ExtendedContext",
|
|
||||||
"DecimalException",
|
|
||||||
"Clamped",
|
|
||||||
"InvalidOperation",
|
|
||||||
"DivisionByZero",
|
|
||||||
"Inexact",
|
|
||||||
"Rounded",
|
|
||||||
"Subnormal",
|
|
||||||
"Overflow",
|
|
||||||
"Underflow",
|
|
||||||
"FloatOperation",
|
|
||||||
"DivisionImpossible",
|
|
||||||
"InvalidContext",
|
|
||||||
"ConversionSyntax",
|
|
||||||
"DivisionUndefined",
|
|
||||||
"ROUND_DOWN",
|
|
||||||
"ROUND_HALF_UP",
|
|
||||||
"ROUND_HALF_EVEN",
|
|
||||||
"ROUND_CEILING",
|
|
||||||
"ROUND_FLOOR",
|
|
||||||
"ROUND_UP",
|
|
||||||
"ROUND_HALF_DOWN",
|
|
||||||
"ROUND_05UP",
|
|
||||||
"setcontext",
|
|
||||||
"getcontext",
|
|
||||||
"localcontext",
|
|
||||||
"MAX_PREC",
|
|
||||||
"MAX_EMAX",
|
|
||||||
"MIN_EMIN",
|
|
||||||
"MIN_ETINY",
|
|
||||||
"HAVE_THREADS",
|
|
||||||
"HAVE_CONTEXTVAR",
|
|
||||||
]
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
from typing_extensions import TypeAlias
|
|
||||||
|
|
||||||
# Actually Tuple[(int,) * 625]
|
|
||||||
_State: TypeAlias = tuple[int, ...]
|
|
||||||
|
|
||||||
class Random:
|
|
||||||
def __init__(self, seed: object = ...) -> None: ...
|
|
||||||
def seed(self, n: object = None, /) -> None: ...
|
|
||||||
def getstate(self) -> _State: ...
|
|
||||||
def setstate(self, state: _State, /) -> None: ...
|
|
||||||
def random(self) -> float: ...
|
|
||||||
def getrandbits(self, k: int, /) -> int: ...
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user