Compare commits
5 Commits
dhruv/form
...
deps/parse
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
35cc48a64c | ||
|
|
0d4f1d86ad | ||
|
|
834910947e | ||
|
|
e34cfeb475 | ||
|
|
bfaa1f9530 |
@@ -1,3 +1,37 @@
|
||||
[alias]
|
||||
dev = "run --package ruff_dev --bin ruff_dev"
|
||||
benchmark = "bench -p ruff_benchmark --bench linter --bench formatter --"
|
||||
|
||||
[target.'cfg(all())']
|
||||
rustflags = [
|
||||
# CLIPPY LINT SETTINGS
|
||||
# This is a workaround to configure lints for the entire workspace, pending the ability to configure this via TOML.
|
||||
# See: `https://github.com/rust-lang/cargo/issues/5034`
|
||||
# `https://github.com/EmbarkStudios/rust-ecosystem/issues/22#issuecomment-947011395`
|
||||
"-Dunsafe_code",
|
||||
"-Wclippy::pedantic",
|
||||
# Allowed pedantic lints
|
||||
"-Wclippy::char_lit_as_u8",
|
||||
"-Aclippy::collapsible_else_if",
|
||||
"-Aclippy::collapsible_if",
|
||||
"-Aclippy::implicit_hasher",
|
||||
"-Aclippy::match_same_arms",
|
||||
"-Aclippy::missing_errors_doc",
|
||||
"-Aclippy::missing_panics_doc",
|
||||
"-Aclippy::module_name_repetitions",
|
||||
"-Aclippy::must_use_candidate",
|
||||
"-Aclippy::similar_names",
|
||||
"-Aclippy::too_many_lines",
|
||||
# Disallowed restriction lints
|
||||
"-Wclippy::print_stdout",
|
||||
"-Wclippy::print_stderr",
|
||||
"-Wclippy::dbg_macro",
|
||||
"-Wclippy::empty_drop",
|
||||
"-Wclippy::empty_structs_with_brackets",
|
||||
"-Wclippy::exit",
|
||||
"-Wclippy::get_unwrap",
|
||||
"-Wclippy::rc_buffer",
|
||||
"-Wclippy::rc_mutex",
|
||||
"-Wclippy::rest_pat_in_fully_bound_structs",
|
||||
"-Wunreachable_pub"
|
||||
]
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
[profile.ci]
|
||||
# Print out output for failing tests as soon as they fail, and also at the end
|
||||
# of the run (for easy scrollability).
|
||||
failure-output = "immediate-final"
|
||||
# Do not cancel the test run on the first failure.
|
||||
fail-fast = false
|
||||
|
||||
status-level = "skip"
|
||||
@@ -10,7 +10,7 @@ indent_style = space
|
||||
insert_final_newline = true
|
||||
indent_size = 2
|
||||
|
||||
[*.{rs,py,pyi}]
|
||||
[*.{rs,py}]
|
||||
indent_size = 4
|
||||
|
||||
[*.snap]
|
||||
|
||||
11
.gitattributes
vendored
11
.gitattributes
vendored
@@ -1,14 +1,7 @@
|
||||
* text=auto eol=lf
|
||||
|
||||
crates/ruff_linter/resources/test/fixtures/isort/line_ending_crlf.py text eol=crlf
|
||||
crates/ruff_linter/resources/test/fixtures/pycodestyle/W605_1.py text eol=crlf
|
||||
crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_2.py text eol=crlf
|
||||
crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_3.py text eol=crlf
|
||||
|
||||
crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring_code_examples_crlf.py text eol=crlf
|
||||
crates/ruff_python_formatter/tests/snapshots/format@docstring_code_examples_crlf.py.snap text eol=crlf
|
||||
|
||||
crates/ruff_python_parser/resources/inline linguist-generated=true
|
||||
crates/ruff/resources/test/fixtures/isort/line_ending_crlf.py text eol=crlf
|
||||
crates/ruff/resources/test/fixtures/pycodestyle/W605_1.py text eol=crlf
|
||||
|
||||
ruff.schema.json linguist-generated=true text=auto eol=lf
|
||||
*.md.snap linguist-language=Markdown
|
||||
|
||||
12
.github/CODEOWNERS
vendored
12
.github/CODEOWNERS
vendored
@@ -5,13 +5,5 @@
|
||||
# - The '*' pattern is global owners.
|
||||
# - Order is important. The last matching pattern has the most precedence.
|
||||
|
||||
/crates/ruff_notebook/ @dhruvmanila
|
||||
/crates/ruff_formatter/ @MichaReiser
|
||||
/crates/ruff_python_formatter/ @MichaReiser
|
||||
/crates/ruff_python_parser/ @MichaReiser @dhruvmanila
|
||||
|
||||
# flake8-pyi
|
||||
/crates/ruff_linter/src/rules/flake8_pyi/ @AlexWaygood
|
||||
|
||||
# Script for fuzzing the parser
|
||||
/scripts/fuzz-parser/ @AlexWaygood
|
||||
# Jupyter
|
||||
/crates/ruff/src/jupyter/ @dhruvmanila
|
||||
|
||||
2
.github/ISSUE_TEMPLATE.md
vendored
2
.github/ISSUE_TEMPLATE.md
vendored
@@ -3,8 +3,6 @@ Thank you for taking the time to report an issue! We're glad to have you involve
|
||||
|
||||
If you're filing a bug report, please consider including the following information:
|
||||
|
||||
* List of keywords you searched for before creating this issue. Write them down here so that others can find this issue more easily and help provide feedback.
|
||||
e.g. "RUF001", "unused variable", "Jupyter notebook"
|
||||
* A minimal code snippet that reproduces the bug.
|
||||
* The command you invoked (e.g., `ruff /path/to/file.py --fix`), ideally including the `--isolated` flag.
|
||||
* The current Ruff settings (any relevant sections from your `pyproject.toml`).
|
||||
|
||||
11
.github/dependabot.yml
vendored
Normal file
11
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "monday"
|
||||
time: "12:00"
|
||||
timezone: "America/New_York"
|
||||
commit-message:
|
||||
prefix: "ci(deps)"
|
||||
24
.github/release.yml
vendored
Normal file
24
.github/release.yml
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
# https://docs.github.com/en/repositories/releasing-projects-on-github/automatically-generated-release-notes#configuring-automatically-generated-release-notes
|
||||
changelog:
|
||||
exclude:
|
||||
labels:
|
||||
- internal
|
||||
- documentation
|
||||
categories:
|
||||
- title: Breaking Changes
|
||||
labels:
|
||||
- breaking
|
||||
- title: Rules
|
||||
labels:
|
||||
- rule
|
||||
- autofix
|
||||
- title: Settings
|
||||
labels:
|
||||
- configuration
|
||||
- cli
|
||||
- title: Bug Fixes
|
||||
labels:
|
||||
- bug
|
||||
- title: Other Changes
|
||||
labels:
|
||||
- "*"
|
||||
79
.github/renovate.json5
vendored
79
.github/renovate.json5
vendored
@@ -1,79 +0,0 @@
|
||||
{
|
||||
$schema: "https://docs.renovatebot.com/renovate-schema.json",
|
||||
dependencyDashboard: true,
|
||||
suppressNotifications: ["prEditedNotification"],
|
||||
extends: ["config:recommended"],
|
||||
labels: ["internal"],
|
||||
schedule: ["before 4am on Monday"],
|
||||
semanticCommits: "disabled",
|
||||
separateMajorMinor: false,
|
||||
prHourlyLimit: 10,
|
||||
enabledManagers: ["github-actions", "pre-commit", "cargo", "pep621", "npm"],
|
||||
cargo: {
|
||||
// See https://docs.renovatebot.com/configuration-options/#rangestrategy
|
||||
rangeStrategy: "update-lockfile",
|
||||
},
|
||||
pep621: {
|
||||
fileMatch: ["^(python|scripts)/.*pyproject\\.toml$"],
|
||||
},
|
||||
npm: {
|
||||
fileMatch: ["^playground/.*package\\.json$"],
|
||||
},
|
||||
"pre-commit": {
|
||||
enabled: true,
|
||||
},
|
||||
packageRules: [
|
||||
{
|
||||
// Group upload/download artifact updates, the versions are dependent
|
||||
groupName: "Artifact GitHub Actions dependencies",
|
||||
matchManagers: ["github-actions"],
|
||||
matchDatasources: ["gitea-tags", "github-tags"],
|
||||
matchPackagePatterns: ["actions/.*-artifact"],
|
||||
description: "Weekly update of artifact-related GitHub Actions dependencies",
|
||||
},
|
||||
{
|
||||
// This package rule disables updates for GitHub runners:
|
||||
// we'd only pin them to a specific version
|
||||
// if there was a deliberate reason to do so
|
||||
groupName: "GitHub runners",
|
||||
matchManagers: ["github-actions"],
|
||||
matchDatasources: ["github-runners"],
|
||||
description: "Disable PRs updating GitHub runners (e.g. 'runs-on: macos-14')",
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
groupName: "pre-commit dependencies",
|
||||
matchManagers: ["pre-commit"],
|
||||
description: "Weekly update of pre-commit dependencies",
|
||||
},
|
||||
{
|
||||
groupName: "NPM Development dependencies",
|
||||
matchManagers: ["npm"],
|
||||
matchDepTypes: ["devDependencies"],
|
||||
description: "Weekly update of NPM development dependencies",
|
||||
},
|
||||
{
|
||||
groupName: "Monaco",
|
||||
matchManagers: ["npm"],
|
||||
matchPackagePatterns: ["monaco"],
|
||||
description: "Weekly update of the Monaco editor",
|
||||
},
|
||||
{
|
||||
groupName: "strum",
|
||||
matchManagers: ["cargo"],
|
||||
matchPackagePatterns: ["strum"],
|
||||
description: "Weekly update of strum dependencies",
|
||||
},
|
||||
{
|
||||
groupName: "ESLint",
|
||||
matchManagers: ["npm"],
|
||||
matchPackageNames: ["eslint"],
|
||||
allowedVersions: "<9",
|
||||
description: "Constraint ESLint to version 8 until TypeScript-eslint supports ESLint 9", // https://github.com/typescript-eslint/typescript-eslint/issues/8211
|
||||
},
|
||||
],
|
||||
vulnerabilityAlerts: {
|
||||
commitMessageSuffix: "",
|
||||
labels: ["internal", "security"],
|
||||
},
|
||||
}
|
||||
128
.github/workflows/benchmark.yaml
vendored
Normal file
128
.github/workflows/benchmark.yaml
vendored
Normal file
@@ -0,0 +1,128 @@
|
||||
name: Benchmark
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.event.pull_request.number || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
run-benchmark:
|
||||
if: github.event_name == 'pull_request'
|
||||
name: "Run | ${{ matrix.os }}"
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ ubuntu-latest, windows-latest ]
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- name: "PR - Checkout Branch"
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
|
||||
- name: "PR - Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: "PR - Build benchmarks"
|
||||
run: cargo bench -p ruff_benchmark --no-run
|
||||
|
||||
- name: "PR - Run benchmarks"
|
||||
run: cargo benchmark --save-baseline=pr
|
||||
|
||||
- name: "Main - Checkout Branch"
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
clean: false
|
||||
ref: main
|
||||
|
||||
- name: "Main - Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- name: "Main - Build benchmarks"
|
||||
run: cargo bench -p ruff_benchmark --no-run
|
||||
|
||||
- name: "Main - Run benchmarks"
|
||||
run: cargo benchmark --save-baseline=main
|
||||
|
||||
- name: "Upload benchmark results"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: benchmark-results-${{ matrix.os }}
|
||||
path: ./target/criterion
|
||||
|
||||
# Cleanup
|
||||
- name: Remove Criterion Artifact
|
||||
uses: JesseTG/rm@v1.0.3
|
||||
with:
|
||||
path: ./target/criterion
|
||||
|
||||
benchmark-compare:
|
||||
if: github.event_name == 'pull_request'
|
||||
runs-on: ubuntu-latest
|
||||
name: Compare
|
||||
needs:
|
||||
- run-benchmark
|
||||
|
||||
steps:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- name: "Install critcmp"
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: critcmp
|
||||
|
||||
- name: "Linux | Download PR benchmark results"
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: benchmark-results-ubuntu-latest
|
||||
path: ./target/criterion
|
||||
|
||||
- name: "Linux | Compare benchmark results"
|
||||
shell: bash
|
||||
run: |
|
||||
echo "### Benchmark" >> summary.md
|
||||
echo "#### Linux" >> summary.md
|
||||
echo "\`\`\`" >> summary.md
|
||||
critcmp main pr >> summary.md
|
||||
echo "\`\`\`" >> summary.md
|
||||
echo "" >> summary.md
|
||||
|
||||
- name: "Linux | Cleanup benchmark results"
|
||||
run: rm -rf ./target/criterion
|
||||
|
||||
- name: "Windows | Download PR benchmark results"
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: benchmark-results-windows-latest
|
||||
path: ./target/criterion
|
||||
|
||||
- name: "Windows | Compare benchmark results"
|
||||
shell: bash
|
||||
run: |
|
||||
echo "#### Windows" >> summary.md
|
||||
echo "\`\`\`" >> summary.md
|
||||
critcmp main pr >> summary.md
|
||||
echo "\`\`\`" >> summary.md
|
||||
echo "" >> summary.md
|
||||
|
||||
echo ${{ github.event.pull_request.number }} > pr-number
|
||||
|
||||
cat summary.md > $GITHUB_STEP_SUMMARY
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
name: Upload PR Number
|
||||
with:
|
||||
name: pr-number
|
||||
path: pr-number
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
name: Upload Summary
|
||||
with:
|
||||
name: summary
|
||||
path: summary.md
|
||||
460
.github/workflows/ci.yaml
vendored
460
.github/workflows/ci.yaml
vendored
@@ -2,7 +2,7 @@ name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
|
||||
@@ -16,81 +16,14 @@ env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
PACKAGE_NAME: ruff
|
||||
PYTHON_VERSION: "3.11"
|
||||
PYTHON_VERSION: "3.11" # to build abi3 wheels
|
||||
|
||||
jobs:
|
||||
determine_changes:
|
||||
name: "Determine changes"
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
# Flag that is raised when any code that affects parser is changed
|
||||
parser: ${{ steps.changed.outputs.parser_any_changed }}
|
||||
# Flag that is raised when any code that affects linter is changed
|
||||
linter: ${{ steps.changed.outputs.linter_any_changed }}
|
||||
# Flag that is raised when any code that affects formatter is changed
|
||||
formatter: ${{ steps.changed.outputs.formatter_any_changed }}
|
||||
# Flag that is raised when any code is changed
|
||||
# This is superset of the linter and formatter
|
||||
code: ${{ steps.changed.outputs.code_any_changed }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: tj-actions/changed-files@v44
|
||||
id: changed
|
||||
with:
|
||||
files_yaml: |
|
||||
parser:
|
||||
- Cargo.toml
|
||||
- Cargo.lock
|
||||
- crates/ruff_python_trivia/**
|
||||
- crates/ruff_source_file/**
|
||||
- crates/ruff_text_size/**
|
||||
- crates/ruff_python_ast/**
|
||||
- crates/ruff_python_parser/**
|
||||
- scripts/fuzz-parser/**
|
||||
- .github/workflows/ci.yaml
|
||||
|
||||
linter:
|
||||
- Cargo.toml
|
||||
- Cargo.lock
|
||||
- crates/**
|
||||
- "!crates/ruff_python_formatter/**"
|
||||
- "!crates/ruff_formatter/**"
|
||||
- "!crates/ruff_dev/**"
|
||||
- scripts/*
|
||||
- python/**
|
||||
- .github/workflows/ci.yaml
|
||||
|
||||
formatter:
|
||||
- Cargo.toml
|
||||
- Cargo.lock
|
||||
- crates/ruff_python_formatter/**
|
||||
- crates/ruff_formatter/**
|
||||
- crates/ruff_python_trivia/**
|
||||
- crates/ruff_python_ast/**
|
||||
- crates/ruff_source_file/**
|
||||
- crates/ruff_python_index/**
|
||||
- crates/ruff_text_size/**
|
||||
- crates/ruff_python_parser/**
|
||||
- crates/ruff_dev/**
|
||||
- scripts/*
|
||||
- python/**
|
||||
- .github/workflows/ci.yaml
|
||||
|
||||
code:
|
||||
- "**/*"
|
||||
- "!**/*.md"
|
||||
- "!docs/**"
|
||||
- "!assets/**"
|
||||
|
||||
cargo-fmt:
|
||||
name: "cargo fmt"
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup component add rustfmt
|
||||
- run: cargo fmt --all --check
|
||||
@@ -98,90 +31,79 @@ jobs:
|
||||
cargo-clippy:
|
||||
name: "cargo clippy"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: "Install Rust toolchain"
|
||||
run: |
|
||||
rustup component add clippy
|
||||
rustup target add wasm32-unknown-unknown
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Clippy"
|
||||
run: cargo clippy --workspace --all-targets --all-features --locked -- -D warnings
|
||||
run: cargo clippy --workspace --all-targets --all-features -- -D warnings
|
||||
- name: "Clippy (wasm)"
|
||||
run: cargo clippy -p ruff_wasm --target wasm32-unknown-unknown --all-features --locked -- -D warnings
|
||||
run: cargo clippy -p ruff_wasm --target wasm32-unknown-unknown --all-features -- -D warnings
|
||||
|
||||
cargo-test-linux:
|
||||
name: "cargo test (linux)"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
cargo-test:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ ubuntu-latest, windows-latest ]
|
||||
runs-on: ${{ matrix.os }}
|
||||
name: "cargo test | ${{ matrix.os }}"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run tests"
|
||||
# cargo insta 1.30.0 fails for some reason (https://github.com/mitsuhiko/insta/issues/392)
|
||||
- run: cargo install cargo-insta@=1.29.0
|
||||
- run: pip install black[d]==23.1.0
|
||||
- name: "Run tests (Ubuntu)"
|
||||
if: ${{ matrix.os == 'ubuntu-latest' }}
|
||||
run: cargo insta test --all --all-features --unreferenced reject
|
||||
- name: "Run tests (Windows)"
|
||||
if: ${{ matrix.os == 'windows-latest' }}
|
||||
shell: bash
|
||||
env:
|
||||
NEXTEST_PROFILE: "ci"
|
||||
run: cargo insta test --all-features --unreferenced reject --test-runner nextest
|
||||
|
||||
# We can't reject unreferenced snapshots on windows because flake8_executable can't run on windows
|
||||
run: cargo insta test --all --all-features
|
||||
- run: cargo test --package ruff_cli --test black_compatibility_test -- --ignored
|
||||
# TODO: Skipped as it's currently broken. The resource were moved from the
|
||||
# ruff_cli to ruff crate, but this test was not updated.
|
||||
if: false
|
||||
# Check for broken links in the documentation.
|
||||
- run: cargo doc --all --no-deps
|
||||
env:
|
||||
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
|
||||
RUSTDOCFLAGS: "-D warnings"
|
||||
- uses: actions/upload-artifact@v4
|
||||
- uses: actions/upload-artifact@v3
|
||||
if: ${{ matrix.os == 'ubuntu-latest' }}
|
||||
with:
|
||||
name: ruff
|
||||
path: target/debug/ruff
|
||||
|
||||
cargo-test-windows:
|
||||
name: "cargo test (windows)"
|
||||
runs-on: windows-latest
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
cargo-fuzz:
|
||||
runs-on: ubuntu-latest
|
||||
name: "cargo fuzz"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install cargo nextest"
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
workspaces: "fuzz -> target"
|
||||
- name: "Install cargo-fuzz"
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
run: |
|
||||
cargo nextest run --all-features --profile ci
|
||||
cargo test --all-features --doc
|
||||
tool: cargo-fuzz@0.11
|
||||
- run: cargo fuzz build -s none
|
||||
|
||||
cargo-test-wasm:
|
||||
name: "cargo test (wasm)"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 10
|
||||
name: "cargo test (wasm)"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 18
|
||||
cache: "npm"
|
||||
@@ -193,81 +115,11 @@ jobs:
|
||||
cd crates/ruff_wasm
|
||||
wasm-pack test --node
|
||||
|
||||
cargo-build-release:
|
||||
name: "cargo build (release)"
|
||||
runs-on: macos-latest
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@v1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Build"
|
||||
run: cargo build --release --locked
|
||||
|
||||
cargo-fuzz:
|
||||
name: "cargo fuzz"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
workspaces: "fuzz -> target"
|
||||
- name: "Install cargo-fuzz"
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-fuzz@0.11.2
|
||||
- run: cargo fuzz build -s none
|
||||
|
||||
fuzz-parser:
|
||||
name: "Fuzz the parser"
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- cargo-test-linux
|
||||
- determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.parser == 'true' }}
|
||||
timeout-minutes: 20
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: Install uv
|
||||
run: curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
- name: Install Python requirements
|
||||
run: uv pip install -r scripts/fuzz-parser/requirements.txt --system
|
||||
- uses: actions/download-artifact@v4
|
||||
name: Download Ruff binary to test
|
||||
id: download-cached-binary
|
||||
with:
|
||||
name: ruff
|
||||
path: ruff-to-test
|
||||
- name: Fuzz
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x ${{ steps.download-cached-binary.outputs.download-path }}/ruff
|
||||
|
||||
python scripts/fuzz-parser/fuzz.py 0-500 --test-executable ${{ steps.download-cached-binary.outputs.download-path }}/ruff
|
||||
|
||||
scripts:
|
||||
name: "test scripts"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 5
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup component add rustfmt
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
@@ -283,136 +135,72 @@ jobs:
|
||||
ecosystem:
|
||||
name: "ecosystem"
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- cargo-test-linux
|
||||
- determine_changes
|
||||
needs: cargo-test
|
||||
# Only runs on pull requests, since that is the only we way we can find the base version for comparison.
|
||||
# Ecosystem check needs linter and/or formatter changes.
|
||||
if: ${{ github.event_name == 'pull_request' && needs.determine_changes.outputs.code == 'true' }}
|
||||
timeout-minutes: 20
|
||||
if: github.event_name == 'pull_request'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
name: Download comparison Ruff binary
|
||||
- uses: actions/download-artifact@v3
|
||||
name: Download Ruff binary
|
||||
id: ruff-target
|
||||
with:
|
||||
name: ruff
|
||||
path: target/debug
|
||||
|
||||
- uses: dawidd6/action-download-artifact@v3
|
||||
name: Download baseline Ruff binary
|
||||
- uses: dawidd6/action-download-artifact@v2
|
||||
name: Download base results
|
||||
with:
|
||||
name: ruff
|
||||
branch: ${{ github.event.pull_request.base.ref }}
|
||||
workflow: "ci.yaml"
|
||||
check_artifacts: true
|
||||
|
||||
- name: Install ruff-ecosystem
|
||||
run: |
|
||||
pip install ./python/ruff-ecosystem
|
||||
|
||||
- name: Run `ruff check` stable ecosystem check
|
||||
if: ${{ needs.determine_changes.outputs.linter == 'true' }}
|
||||
- name: Run ecosystem check
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
|
||||
chmod +x ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
|
||||
|
||||
# Set pipefail to avoid hiding errors with tee
|
||||
set -eo pipefail
|
||||
scripts/check_ecosystem.py ruff ${{ steps.ruff-target.outputs.download-path }}/ruff | tee ecosystem-result
|
||||
cat ecosystem-result > $GITHUB_STEP_SUMMARY
|
||||
|
||||
ruff-ecosystem check ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown | tee ecosystem-result-check-stable
|
||||
|
||||
cat ecosystem-result-check-stable > $GITHUB_STEP_SUMMARY
|
||||
echo "### Linter (stable)" > ecosystem-result
|
||||
cat ecosystem-result-check-stable >> ecosystem-result
|
||||
echo "" >> ecosystem-result
|
||||
|
||||
- name: Run `ruff check` preview ecosystem check
|
||||
if: ${{ needs.determine_changes.outputs.linter == 'true' }}
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
|
||||
|
||||
# Set pipefail to avoid hiding errors with tee
|
||||
set -eo pipefail
|
||||
|
||||
ruff-ecosystem check ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-check-preview
|
||||
|
||||
cat ecosystem-result-check-preview > $GITHUB_STEP_SUMMARY
|
||||
echo "### Linter (preview)" >> ecosystem-result
|
||||
cat ecosystem-result-check-preview >> ecosystem-result
|
||||
echo "" >> ecosystem-result
|
||||
|
||||
- name: Run `ruff format` stable ecosystem check
|
||||
if: ${{ needs.determine_changes.outputs.formatter == 'true' }}
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
|
||||
|
||||
# Set pipefail to avoid hiding errors with tee
|
||||
set -eo pipefail
|
||||
|
||||
ruff-ecosystem format ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown | tee ecosystem-result-format-stable
|
||||
|
||||
cat ecosystem-result-format-stable > $GITHUB_STEP_SUMMARY
|
||||
echo "### Formatter (stable)" >> ecosystem-result
|
||||
cat ecosystem-result-format-stable >> ecosystem-result
|
||||
echo "" >> ecosystem-result
|
||||
|
||||
- name: Run `ruff format` preview ecosystem check
|
||||
if: ${{ needs.determine_changes.outputs.formatter == 'true' }}
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
|
||||
|
||||
# Set pipefail to avoid hiding errors with tee
|
||||
set -eo pipefail
|
||||
|
||||
ruff-ecosystem format ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-format-preview
|
||||
|
||||
cat ecosystem-result-format-preview > $GITHUB_STEP_SUMMARY
|
||||
echo "### Formatter (preview)" >> ecosystem-result
|
||||
cat ecosystem-result-format-preview >> ecosystem-result
|
||||
echo "" >> ecosystem-result
|
||||
|
||||
- name: Export pull request number
|
||||
run: |
|
||||
echo ${{ github.event.number }} > pr-number
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
- uses: actions/upload-artifact@v3
|
||||
name: Upload PR Number
|
||||
with:
|
||||
name: pr-number
|
||||
path: pr-number
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
- uses: actions/upload-artifact@v3
|
||||
name: Upload Results
|
||||
with:
|
||||
name: ecosystem-result
|
||||
path: ecosystem-result
|
||||
|
||||
cargo-shear:
|
||||
name: "cargo shear"
|
||||
cargo-udeps:
|
||||
name: "cargo udeps"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: cargo-bins/cargo-binstall@main
|
||||
- run: cargo binstall --no-confirm cargo-shear
|
||||
- run: cargo shear
|
||||
- uses: actions/checkout@v3
|
||||
- name: "Install nightly Rust toolchain"
|
||||
# Only pinned to make caching work, update freely
|
||||
run: rustup toolchain install nightly-2023-06-08
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install cargo-udeps"
|
||||
uses: taiki-e/install-action@cargo-udeps
|
||||
- name: "Run cargo-udeps"
|
||||
run: cargo +nightly-2023-06-08 udeps
|
||||
|
||||
|
||||
python-package:
|
||||
name: "python package"
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
@@ -434,10 +222,9 @@ jobs:
|
||||
pre-commit:
|
||||
name: "pre-commit"
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Install Rust toolchain"
|
||||
@@ -446,7 +233,7 @@ jobs:
|
||||
- name: "Install pre-commit"
|
||||
run: pip install pre-commit
|
||||
- name: "Cache pre-commit"
|
||||
uses: actions/cache@v4
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cache/pre-commit
|
||||
key: pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
@@ -463,15 +250,14 @@ jobs:
|
||||
docs:
|
||||
name: "mkdocs"
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
env:
|
||||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@v0.9.0
|
||||
uses: webfactory/ssh-agent@v0.8.0
|
||||
with:
|
||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||
- name: "Install Rust toolchain"
|
||||
@@ -494,94 +280,18 @@ jobs:
|
||||
run: mkdocs build --strict -f mkdocs.insiders.yml
|
||||
- name: "Build docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.public.yml
|
||||
run: mkdocs build --strict -f mkdocs.generated.yml
|
||||
|
||||
check-formatter-instability-and-black-similarity:
|
||||
name: "formatter instabilities and black similarity"
|
||||
check-formatter-stability:
|
||||
name: "Check formatter stability"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: needs.determine_changes.outputs.formatter == 'true' || github.ref == 'refs/heads/main'
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Cache rust"
|
||||
uses: Swatinem/rust-cache@v2
|
||||
- name: "Formatter progress"
|
||||
run: scripts/formatter_ecosystem_checks.sh
|
||||
- name: "Github step summary"
|
||||
run: cat target/progress_projects_stats.txt > $GITHUB_STEP_SUMMARY
|
||||
- name: "Remove checkouts from cache"
|
||||
run: rm -r target/progress_projects
|
||||
|
||||
check-ruff-lsp:
|
||||
name: "test ruff-lsp"
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
needs:
|
||||
- cargo-test-linux
|
||||
- determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
steps:
|
||||
- uses: extractions/setup-just@v2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
name: "Download ruff-lsp source"
|
||||
with:
|
||||
repository: "astral-sh/ruff-lsp"
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
name: Download development ruff binary
|
||||
id: ruff-target
|
||||
with:
|
||||
name: ruff
|
||||
path: target/debug
|
||||
|
||||
- name: Install ruff-lsp dependencies
|
||||
run: |
|
||||
just install
|
||||
|
||||
- name: Run ruff-lsp tests
|
||||
run: |
|
||||
# Setup development binary
|
||||
pip uninstall --yes ruff
|
||||
chmod +x ${{ steps.ruff-target.outputs.download-path }}/ruff
|
||||
export PATH=${{ steps.ruff-target.outputs.download-path }}:$PATH
|
||||
ruff version
|
||||
|
||||
just test
|
||||
|
||||
benchmarks:
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ github.repository == 'astral-sh/ruff' && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- name: "Checkout Branch"
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- name: "Install codspeed"
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: "Build benchmarks"
|
||||
run: cargo codspeed build --features codspeed -p ruff_benchmark
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@v2
|
||||
with:
|
||||
run: cargo codspeed run
|
||||
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||
- name: "Clone CPython 3.10"
|
||||
run: git clone --branch 3.10 --depth 1 https://github.com/python/cpython.git crates/ruff/resources/test/cpython
|
||||
- name: "Check stability"
|
||||
run: cargo run --bin ruff_dev -- format-dev --stability-check crates/ruff/resources/test/cpython
|
||||
|
||||
72
.github/workflows/daily_fuzz.yaml
vendored
72
.github/workflows/daily_fuzz.yaml
vendored
@@ -1,72 +0,0 @@
|
||||
name: Daily parser fuzz
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "0 0 * * *"
|
||||
pull_request:
|
||||
paths:
|
||||
- ".github/workflows/daily_fuzz.yaml"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
PACKAGE_NAME: ruff
|
||||
FORCE_COLOR: 1
|
||||
|
||||
jobs:
|
||||
fuzz:
|
||||
name: Fuzz
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
# Don't run the cron job on forks:
|
||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Install uv
|
||||
run: curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
- name: Install Python requirements
|
||||
run: uv pip install -r scripts/fuzz-parser/requirements.txt --system
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@v1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Build ruff
|
||||
# A debug build means the script runs slower once it gets started,
|
||||
# but this is outweighed by the fact that a release build takes *much* longer to compile in CI
|
||||
run: cargo build --locked
|
||||
- name: Fuzz
|
||||
run: python scripts/fuzz-parser/fuzz.py $(shuf -i 0-9999999999999999999 -n 1000) --test-executable target/debug/ruff
|
||||
|
||||
create-issue-on-failure:
|
||||
name: Create an issue if the daily fuzz surfaced any bugs
|
||||
runs-on: ubuntu-latest
|
||||
needs: fuzz
|
||||
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && needs.fuzz.result == 'failure' }}
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
- uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
await github.rest.issues.create({
|
||||
owner: "astral-sh",
|
||||
repo: "ruff",
|
||||
title: `Daily parser fuzz failed on ${new Date().toDateString()}`,
|
||||
body: "Runs listed here: https://github.com/astral-sh/ruff/actions/workflows/daily_fuzz.yml",
|
||||
labels: ["bug", "parser", "fuzzer"],
|
||||
})
|
||||
22
.github/workflows/docs.yaml
vendored
22
.github/workflows/docs.yaml
vendored
@@ -2,13 +2,8 @@ name: mkdocs
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
ref:
|
||||
description: "The commit SHA, tag, or branch to publish. Uses the default branch if not specified."
|
||||
default: ""
|
||||
type: string
|
||||
release:
|
||||
types: [published]
|
||||
types: [ published ]
|
||||
|
||||
jobs:
|
||||
mkdocs:
|
||||
@@ -17,13 +12,11 @@ jobs:
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@v0.9.0
|
||||
uses: webfactory/ssh-agent@v0.8.0
|
||||
with:
|
||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||
- name: "Install Rust toolchain"
|
||||
@@ -44,12 +37,11 @@ jobs:
|
||||
run: mkdocs build --strict -f mkdocs.insiders.yml
|
||||
- name: "Build docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.public.yml
|
||||
run: mkdocs build --strict -f mkdocs.generated.yml
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@v3.5.0
|
||||
uses: cloudflare/wrangler-action@2.0.0
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
# `github.head_ref` is only set during pull requests and for manual runs or tags we use `main` to deploy to production
|
||||
command: pages deploy site --project-name=astral-docs --branch ${{ github.head_ref || 'main' }} --commit-hash ${GITHUB_SHA}
|
||||
command: pages publish site --project-name=ruff-docs --branch ${GITHUB_HEAD_REF} --commit-hash ${GITHUB_SHA}
|
||||
|
||||
247
.github/workflows/flake8-to-ruff.yaml
vendored
Normal file
247
.github/workflows/flake8-to-ruff.yaml
vendored
Normal file
@@ -0,0 +1,247 @@
|
||||
name: "[flake8-to-ruff] Release"
|
||||
|
||||
on: workflow_dispatch
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
PACKAGE_NAME: flake8-to-ruff
|
||||
CRATE_NAME: flake8_to_ruff
|
||||
PYTHON_VERSION: "3.11"
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
macos-x86_64:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Build wheels - x86_64"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: x86_64
|
||||
args: --release --out dist --sdist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||
- name: "Install built wheel - x86_64"
|
||||
run: |
|
||||
pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
macos-universal:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Build wheels - universal2"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
args: --release --target universal2-apple-darwin --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||
- name: "Install built wheel - universal2"
|
||||
run: |
|
||||
pip install dist/${{ env.CRATE_NAME }}-*universal2.whl --force-reinstall
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
|
||||
windows:
|
||||
runs-on: windows-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target: [ x64, x86 ]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: ${{ matrix.target }}
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||
- name: "Install built wheel"
|
||||
shell: bash
|
||||
run: |
|
||||
python -m pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
|
||||
linux:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target: [ x86_64, i686 ]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: auto
|
||||
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||
- name: "Install built wheel"
|
||||
if: matrix.target == 'x86_64'
|
||||
run: |
|
||||
pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
|
||||
linux-cross:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target: [ aarch64, armv7, s390x, ppc64le, ppc64 ]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: auto
|
||||
args: --no-default-features --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||
- uses: uraimo/run-on-arch-action@v2
|
||||
if: matrix.target != 'ppc64'
|
||||
name: Install built wheel
|
||||
with:
|
||||
arch: ${{ matrix.target }}
|
||||
distro: ubuntu20.04
|
||||
githubToken: ${{ github.token }}
|
||||
install: |
|
||||
apt-get update
|
||||
apt-get install -y --no-install-recommends python3 python3-pip
|
||||
pip3 install -U pip
|
||||
run: |
|
||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
|
||||
musllinux:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target:
|
||||
- x86_64-unknown-linux-musl
|
||||
- i686-unknown-linux-musl
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: musllinux_1_2
|
||||
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||
- name: "Install built wheel"
|
||||
if: matrix.target == 'x86_64-unknown-linux-musl'
|
||||
uses: addnab/docker-run-action@v3
|
||||
with:
|
||||
image: alpine:latest
|
||||
options: -v ${{ github.workspace }}:/io -w /io
|
||||
run: |
|
||||
apk add py3-pip
|
||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links /io/dist/ --force-reinstall
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
|
||||
musllinux-cross:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
platform:
|
||||
- target: aarch64-unknown-linux-musl
|
||||
arch: aarch64
|
||||
- target: armv7-unknown-linux-musleabihf
|
||||
arch: armv7
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: musllinux_1_2
|
||||
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||
- uses: uraimo/run-on-arch-action@v2
|
||||
name: Install built wheel
|
||||
with:
|
||||
arch: ${{ matrix.platform.arch }}
|
||||
distro: alpine_latest
|
||||
githubToken: ${{ github.token }}
|
||||
install: |
|
||||
apk add py3-pip
|
||||
run: |
|
||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
|
||||
release:
|
||||
name: Release
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- macos-universal
|
||||
- macos-x86_64
|
||||
- windows
|
||||
- linux
|
||||
- linux-cross
|
||||
- musllinux
|
||||
- musllinux-cross
|
||||
steps:
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
- uses: actions/setup-python@v4
|
||||
- name: "Publish to PyPi"
|
||||
env:
|
||||
TWINE_USERNAME: __token__
|
||||
TWINE_PASSWORD: ${{ secrets.FLAKE8_TO_RUFF_TOKEN }}
|
||||
run: |
|
||||
pip install --upgrade twine
|
||||
twine upload --skip-existing *
|
||||
11
.github/workflows/playground.yaml
vendored
11
.github/workflows/playground.yaml
vendored
@@ -3,7 +3,7 @@ name: "[Playground] Release"
|
||||
on:
|
||||
workflow_dispatch:
|
||||
release:
|
||||
types: [published]
|
||||
types: [ published ]
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
@@ -17,10 +17,10 @@ jobs:
|
||||
env:
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 18
|
||||
cache: "npm"
|
||||
@@ -40,9 +40,8 @@ jobs:
|
||||
working-directory: playground
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@v3.5.0
|
||||
uses: cloudflare/wrangler-action@2.0.0
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
# `github.head_ref` is only set during pull requests and for manual runs or tags we use `main` to deploy to production
|
||||
command: pages deploy playground/dist --project-name=ruff-playground --branch ${{ github.head_ref || 'main' }} --commit-hash ${GITHUB_SHA}
|
||||
command: pages publish playground/dist --project-name=ruff --branch ${GITHUB_HEAD_REF} --commit-hash ${GITHUB_SHA}
|
||||
|
||||
75
.github/workflows/pr-comment.yaml
vendored
75
.github/workflows/pr-comment.yaml
vendored
@@ -1,9 +1,9 @@
|
||||
name: Ecosystem check comment
|
||||
name: PR Check Comment
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: [CI]
|
||||
types: [completed]
|
||||
workflows: [ CI, Benchmark ]
|
||||
types: [ completed ]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
workflow_run_id:
|
||||
@@ -17,14 +17,14 @@ jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@v3
|
||||
name: Download pull request number
|
||||
- uses: dawidd6/action-download-artifact@v2
|
||||
name: Download PR Number
|
||||
with:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
|
||||
- name: Parse pull request number
|
||||
- name: Extract PR Number
|
||||
id: pr-number
|
||||
run: |
|
||||
if [[ -f pr-number ]]
|
||||
@@ -32,8 +32,8 @@ jobs:
|
||||
echo "pr-number=$(<pr-number)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@v3
|
||||
name: "Download ecosystem results"
|
||||
- uses: dawidd6/action-download-artifact@v2
|
||||
name: "Download Ecosystem Result"
|
||||
id: download-ecosystem-result
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
@@ -41,39 +41,44 @@ jobs:
|
||||
workflow: ci.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/ecosystem
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
|
||||
- name: Generate comment content
|
||||
- uses: dawidd6/action-download-artifact@v2
|
||||
name: "Download Benchmark Result"
|
||||
id: download-benchmark-result
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
name: summary
|
||||
workflow: benchmark.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/benchmark
|
||||
if_no_artifact_found: ignore
|
||||
|
||||
- name: Generate Comment
|
||||
id: generate-comment
|
||||
if: steps.download-ecosystem-result.outputs.found_artifact == 'true'
|
||||
if: steps.download-ecosystem-result.outputs.found_artifact == 'true' || steps.download-benchmark-result.outputs.found_artifact == 'true'
|
||||
run: |
|
||||
# Note this identifier is used to find the comment to update on
|
||||
# subsequent runs
|
||||
echo '<!-- generated-comment ecosystem -->' >> comment.txt
|
||||
|
||||
echo '## `ruff-ecosystem` results' >> comment.txt
|
||||
cat pr/ecosystem/ecosystem-result >> comment.txt
|
||||
echo "" >> comment.txt
|
||||
|
||||
echo 'comment<<EOF' >> $GITHUB_OUTPUT
|
||||
cat comment.txt >> $GITHUB_OUTPUT
|
||||
echo '## PR Check Results' >> $GITHUB_OUTPUT
|
||||
|
||||
if [[ -f pr/ecosystem/ecosystem-result ]]
|
||||
then
|
||||
echo "### Ecosystem" >> $GITHUB_OUTPUT
|
||||
cat pr/ecosystem/ecosystem-result >> $GITHUB_OUTPUT
|
||||
echo "" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [[ -f pr/benchmark/summary.md ]]
|
||||
then
|
||||
cat pr/benchmark/summary.md >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
echo 'EOF' >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Find existing comment
|
||||
uses: peter-evans/find-comment@v3
|
||||
if: steps.generate-comment.outcome == 'success'
|
||||
id: find-comment
|
||||
with:
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
comment-author: "github-actions[bot]"
|
||||
body-includes: "<!-- generated-comment ecosystem -->"
|
||||
|
||||
- name: Create or update comment
|
||||
if: steps.find-comment.outcome == 'success'
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
if: steps.generate-comment.outputs.comment
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
body-path: comment.txt
|
||||
edit-mode: replace
|
||||
pr_number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
message: ${{ steps.generate-comment.outputs.comment }}
|
||||
comment_tag: PR Check Results
|
||||
|
||||
279
.github/workflows/release.yaml
vendored
279
.github/workflows/release.yaml
vendored
@@ -7,15 +7,12 @@ on:
|
||||
description: "The version to tag, without the leading 'v'. If omitted, will initiate a dry run (no uploads)."
|
||||
type: string
|
||||
sha:
|
||||
description: "The full sha of the commit to be released. If omitted, the latest commit on the default branch will be used."
|
||||
default: ""
|
||||
description: "Optionally, the full sha of the commit to be released"
|
||||
type: string
|
||||
pull_request:
|
||||
paths:
|
||||
# When we change pyproject.toml, we want to ensure that the maturin builds still work
|
||||
- pyproject.toml
|
||||
# And when we change this workflow itself...
|
||||
- .github/workflows/release.yaml
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
@@ -33,10 +30,8 @@ jobs:
|
||||
sdist:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
@@ -48,22 +43,21 @@ jobs:
|
||||
args: --out dist
|
||||
- name: "Test sdist"
|
||||
run: |
|
||||
rustup default $(cat rust-toolchain)
|
||||
pip install dist/${{ env.PACKAGE_NAME }}-*.tar.gz --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload sdist"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels-sdist
|
||||
name: wheels
|
||||
path: dist
|
||||
|
||||
macos-x86_64:
|
||||
runs-on: macos-12
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
@@ -73,66 +67,63 @@ jobs:
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: x86_64
|
||||
args: --release --locked --out dist
|
||||
args: --release --out dist
|
||||
- name: "Test wheel - x86_64"
|
||||
run: |
|
||||
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels-macos-x86_64
|
||||
name: wheels
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-x86_64-apple-darwin.tar.gz
|
||||
ARCHIVE_FILE=ruff-x86_64-apple-darwin.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/x86_64-apple-darwin/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries-macos-x86_64
|
||||
name: binaries
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
macos-aarch64:
|
||||
runs-on: macos-14
|
||||
macos-universal:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: arm64
|
||||
architecture: x64
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels - aarch64"
|
||||
- name: "Build wheels - universal2"
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: aarch64
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel - aarch64"
|
||||
args: --release --target universal2-apple-darwin --out dist
|
||||
- name: "Test wheel - universal2"
|
||||
run: |
|
||||
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
||||
pip install dist/${{ env.PACKAGE_NAME }}-*universal2.whl --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels-aarch64-apple-darwin
|
||||
name: wheels
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-aarch64-apple-darwin.tar.gz
|
||||
ARCHIVE_FILE=ruff-aarch64-apple-darwin.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/aarch64-apple-darwin/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries-aarch64-apple-darwin
|
||||
name: binaries
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
@@ -149,10 +140,8 @@ jobs:
|
||||
- target: aarch64-pc-windows-msvc
|
||||
arch: x64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: ${{ matrix.platform.arch }}
|
||||
@@ -162,7 +151,7 @@ jobs:
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
args: --release --locked --out dist
|
||||
args: --release --out dist
|
||||
- name: "Test wheel"
|
||||
if: ${{ !startsWith(matrix.platform.target, 'aarch64') }}
|
||||
shell: bash
|
||||
@@ -171,20 +160,20 @@ jobs:
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels-${{ matrix.platform.target }}
|
||||
name: wheels
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.zip
|
||||
ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.zip
|
||||
7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/ruff.exe
|
||||
sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries-${{ matrix.platform.target }}
|
||||
name: binaries
|
||||
path: |
|
||||
*.zip
|
||||
*.sha256
|
||||
@@ -197,10 +186,8 @@ jobs:
|
||||
- x86_64-unknown-linux-gnu
|
||||
- i686-unknown-linux-gnu
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
@@ -211,7 +198,7 @@ jobs:
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: auto
|
||||
args: --release --locked --out dist
|
||||
args: --release --out dist
|
||||
- name: "Test wheel"
|
||||
if: ${{ startsWith(matrix.target, 'x86_64') }}
|
||||
run: |
|
||||
@@ -219,19 +206,19 @@ jobs:
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels-${{ matrix.target }}
|
||||
name: wheels
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.target }}.tar.gz
|
||||
ARCHIVE_FILE=ruff-${{ matrix.target }}.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries-${{ matrix.target }}
|
||||
name: binaries
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
@@ -252,18 +239,12 @@ jobs:
|
||||
arch: s390x
|
||||
- target: powerpc64le-unknown-linux-gnu
|
||||
arch: ppc64le
|
||||
# see https://github.com/astral-sh/ruff/issues/10073
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
- target: powerpc64-unknown-linux-gnu
|
||||
arch: ppc64
|
||||
# see https://github.com/astral-sh/ruff/issues/10073
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
@@ -274,7 +255,7 @@ jobs:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
args: --release --locked --out dist
|
||||
args: --release --out dist
|
||||
- uses: uraimo/run-on-arch-action@v2
|
||||
if: matrix.platform.arch != 'ppc64'
|
||||
name: Test wheel
|
||||
@@ -290,19 +271,19 @@ jobs:
|
||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels-${{ matrix.platform.target }}
|
||||
name: wheels
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.tar.gz
|
||||
ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries-${{ matrix.platform.target }}
|
||||
name: binaries
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
@@ -315,10 +296,8 @@ jobs:
|
||||
- x86_64-unknown-linux-musl
|
||||
- i686-unknown-linux-musl
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
@@ -329,7 +308,7 @@ jobs:
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: musllinux_1_2
|
||||
args: --release --locked --out dist
|
||||
args: --release --out dist
|
||||
- name: "Test wheel"
|
||||
if: matrix.target == 'x86_64-unknown-linux-musl'
|
||||
uses: addnab/docker-run-action@v3
|
||||
@@ -337,24 +316,24 @@ jobs:
|
||||
image: alpine:latest
|
||||
options: -v ${{ github.workspace }}:/io -w /io
|
||||
run: |
|
||||
apk add python3
|
||||
python -m venv .venv
|
||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
.venv/bin/ruff check --help
|
||||
apk add py3-pip
|
||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links /io/dist/ --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels-${{ matrix.target }}
|
||||
name: wheels
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.target }}.tar.gz
|
||||
ARCHIVE_FILE=ruff-${{ matrix.target }}.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries-${{ matrix.target }}
|
||||
name: binaries
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
@@ -371,10 +350,8 @@ jobs:
|
||||
arch: armv7
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
@@ -384,7 +361,7 @@ jobs:
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: musllinux_1_2
|
||||
args: --release --locked --out dist
|
||||
args: --release --out dist
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
- uses: uraimo/run-on-arch-action@v2
|
||||
name: Test wheel
|
||||
@@ -393,25 +370,24 @@ jobs:
|
||||
distro: alpine_latest
|
||||
githubToken: ${{ github.token }}
|
||||
install: |
|
||||
apk add python3
|
||||
apk add py3-pip
|
||||
run: |
|
||||
python -m venv .venv
|
||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
.venv/bin/ruff check --help
|
||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
ruff check --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels-${{ matrix.platform.target }}
|
||||
name: wheels
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.tar.gz
|
||||
ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries-${{ matrix.platform.target }}
|
||||
name: binaries
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
@@ -422,22 +398,9 @@ jobs:
|
||||
# If you don't set an input tag, it's a dry run (no uploads).
|
||||
if: ${{ inputs.tag }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: main # We checkout the main branch to check for the commit
|
||||
- name: Check main branch
|
||||
if: ${{ inputs.sha }}
|
||||
run: |
|
||||
# Fetch the main branch since a shallow checkout is used by default
|
||||
git fetch origin main --unshallow
|
||||
if ! git branch --contains ${{ inputs.sha }} | grep -E '(^|\s)main$'; then
|
||||
echo "The specified sha is not on the main branch" >&2
|
||||
exit 1
|
||||
fi
|
||||
- uses: actions/checkout@v3
|
||||
- name: Check tag consistency
|
||||
run: |
|
||||
# Switch to the commit we want to release
|
||||
git checkout ${{ inputs.sha }}
|
||||
version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
|
||||
if [ "${{ inputs.tag }}" != "${version}" ]; then
|
||||
echo "The input tag does not match the version from pyproject.toml:" >&2
|
||||
@@ -447,12 +410,24 @@ jobs:
|
||||
else
|
||||
echo "Releasing ${version}"
|
||||
fi
|
||||
- name: Check SHA consistency
|
||||
if: ${{ inputs.sha }}
|
||||
run: |
|
||||
git_sha=$(git rev-parse HEAD)
|
||||
if [ "${{ inputs.sha }}" != "${git_sha}" ]; then
|
||||
echo "The specified sha does not match the git checkout" >&2
|
||||
echo "${{ inputs.sha }}" >&2
|
||||
echo "${git_sha}" >&2
|
||||
exit 1
|
||||
else
|
||||
echo "Releasing ${git_sha}"
|
||||
fi
|
||||
|
||||
upload-release:
|
||||
name: Upload to PyPI
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- macos-aarch64
|
||||
- macos-universal
|
||||
- macos-x86_64
|
||||
- windows
|
||||
- linux
|
||||
@@ -468,11 +443,10 @@ jobs:
|
||||
# For pypi trusted publishing
|
||||
id-token: write
|
||||
steps:
|
||||
- uses: actions/download-artifact@v4
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
pattern: wheels-*
|
||||
name: wheels
|
||||
path: wheels
|
||||
merge-multiple: true
|
||||
- name: Publish to PyPi
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
@@ -490,9 +464,7 @@ jobs:
|
||||
# For git tag
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/checkout@v3
|
||||
- name: git tag
|
||||
run: |
|
||||
git config user.email "hey@astral.sh"
|
||||
@@ -512,74 +484,17 @@ jobs:
|
||||
# For GitHub release publishing
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/download-artifact@v4
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
pattern: binaries-*
|
||||
name: binaries
|
||||
path: binaries
|
||||
merge-multiple: true
|
||||
- name: "Publish to GitHub"
|
||||
uses: softprops/action-gh-release@v2
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
draft: true
|
||||
files: binaries/*
|
||||
tag_name: v${{ inputs.tag }}
|
||||
|
||||
docker-publish:
|
||||
# This action doesn't need to wait on any other task, it's easy to re-tag if something failed and we're validating
|
||||
# the tag here also
|
||||
name: Push Docker image ghcr.io/astral-sh/ruff
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
permissions:
|
||||
# For the docker push
|
||||
packages: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
- uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ghcr.io/astral-sh/ruff
|
||||
|
||||
- name: Check tag consistency
|
||||
# Unlike validate-tag we don't check if the commit is on the main branch, but it seems good enough since we can
|
||||
# change docker tags
|
||||
if: ${{ inputs.tag }}
|
||||
run: |
|
||||
version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
|
||||
if [ "${{ inputs.tag }}" != "${version}" ]; then
|
||||
echo "The input tag does not match the version from pyproject.toml:" >&2
|
||||
echo "${{ inputs.tag }}" >&2
|
||||
echo "${version}" >&2
|
||||
exit 1
|
||||
else
|
||||
echo "Releasing ${version}"
|
||||
fi
|
||||
|
||||
- name: "Build and push Docker image"
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
# Reuse the builder
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
push: ${{ inputs.tag != '' }}
|
||||
tags: ghcr.io/astral-sh/ruff:latest,ghcr.io/astral-sh/ruff:${{ inputs.tag || 'dry-run' }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
|
||||
# After the release has been published, we update downstream repositories
|
||||
# This is separate because if this fails the release is still fine, we just need to do some manual workflow triggers
|
||||
update-dependents:
|
||||
@@ -588,7 +503,7 @@ jobs:
|
||||
needs: publish-release
|
||||
steps:
|
||||
- name: "Update pre-commit mirror"
|
||||
uses: actions/github-script@v7
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
github-token: ${{ secrets.RUFF_PRE_COMMIT_PAT }}
|
||||
script: |
|
||||
|
||||
9
.gitignore
vendored
9
.gitignore
vendored
@@ -1,5 +1,5 @@
|
||||
# Benchmarking cpython (CONTRIBUTING.md)
|
||||
crates/ruff_linter/resources/test/cpython
|
||||
crates/ruff/resources/test/cpython
|
||||
# generate_mkdocs.py
|
||||
mkdocs.generated.yml
|
||||
# check_ecosystem.py
|
||||
@@ -92,7 +92,6 @@ coverage.xml
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
repos/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
@@ -209,9 +208,3 @@ cython_debug/
|
||||
# VIM
|
||||
.*.sw?
|
||||
.sw?
|
||||
|
||||
# Custom re-inclusions for the resolver test cases
|
||||
!crates/ruff_python_resolver/resources/test/airflow/venv/
|
||||
!crates/ruff_python_resolver/resources/test/airflow/venv/lib
|
||||
!crates/ruff_python_resolver/resources/test/airflow/venv/lib/python3.11/site-packages/_watchdog_fsevents.cpython-311-darwin.so
|
||||
!crates/ruff_python_resolver/resources/test/airflow/venv/lib/python3.11/site-packages/orjson/orjson.cpython-311-darwin.so
|
||||
|
||||
@@ -13,8 +13,3 @@ MD041: false
|
||||
|
||||
# MD013/line-length
|
||||
MD013: false
|
||||
|
||||
# MD024/no-duplicate-heading
|
||||
MD024:
|
||||
# Allow when nested under different parents e.g. CHANGELOG.md
|
||||
siblings_only: true
|
||||
|
||||
@@ -2,10 +2,9 @@ fail_fast: true
|
||||
|
||||
exclude: |
|
||||
(?x)^(
|
||||
crates/red_knot/vendor/.*|
|
||||
crates/ruff_linter/resources/.*|
|
||||
crates/ruff_linter/src/rules/.*/snapshots/.*|
|
||||
crates/ruff/resources/.*|
|
||||
crates/ruff/src/rules/.*/snapshots/.*|
|
||||
crates/ruff_cli/resources/.*|
|
||||
crates/ruff_python_formatter/resources/.*|
|
||||
crates/ruff_python_formatter/tests/snapshots/.*|
|
||||
crates/ruff_python_resolver/resources/.*|
|
||||
@@ -14,35 +13,26 @@ exclude: |
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/abravalheri/validate-pyproject
|
||||
rev: v0.17
|
||||
rev: v0.12.1
|
||||
hooks:
|
||||
- id: validate-pyproject
|
||||
|
||||
- repo: https://github.com/executablebooks/mdformat
|
||||
rev: 0.7.17
|
||||
rev: 0.7.16
|
||||
hooks:
|
||||
- id: mdformat
|
||||
additional_dependencies:
|
||||
- mdformat-mkdocs
|
||||
- mdformat-admon
|
||||
exclude: |
|
||||
(?x)^(
|
||||
docs/formatter/black\.md
|
||||
| docs/\w+\.md
|
||||
)$
|
||||
- mdformat-black
|
||||
- black==23.1.0 # Must be the latest version of Black
|
||||
|
||||
- repo: https://github.com/igorshubovych/markdownlint-cli
|
||||
rev: v0.40.0
|
||||
rev: v0.33.0
|
||||
hooks:
|
||||
- id: markdownlint-fix
|
||||
exclude: |
|
||||
(?x)^(
|
||||
docs/formatter/black\.md
|
||||
| docs/\w+\.md
|
||||
)$
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.21.0
|
||||
rev: v1.14.12
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
@@ -52,29 +42,25 @@ repos:
|
||||
name: cargo fmt
|
||||
entry: cargo fmt --
|
||||
language: system
|
||||
types: [rust]
|
||||
types: [ rust ]
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.4.4
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
args: [--fix, --exit-non-zero-on-fix]
|
||||
types_or: [python, pyi]
|
||||
name: ruff
|
||||
entry: cargo run --bin ruff -- check --no-cache --force-exclude --fix --exit-non-zero-on-fix
|
||||
language: system
|
||||
types_or: [ python, pyi ]
|
||||
require_serial: true
|
||||
exclude: |
|
||||
(?x)^(
|
||||
crates/ruff_linter/resources/.*|
|
||||
crates/ruff/resources/.*|
|
||||
crates/ruff_python_formatter/resources/.*
|
||||
)$
|
||||
|
||||
# Prettier
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: v3.1.0
|
||||
# Black
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 23.1.0
|
||||
hooks:
|
||||
- id: prettier
|
||||
types: [yaml]
|
||||
- id: black
|
||||
|
||||
ci:
|
||||
skip: [cargo-fmt, dev-generate-all]
|
||||
skip: [ cargo-fmt, dev-generate-all ]
|
||||
|
||||
@@ -1,148 +1,5 @@
|
||||
# Breaking Changes
|
||||
|
||||
## 0.3.0
|
||||
|
||||
### Ruff 2024.2 style
|
||||
|
||||
The formatter now formats code according to the Ruff 2024.2 style guide. Read the [changelog](./CHANGELOG.md#030) for a detailed list of stabilized style changes.
|
||||
|
||||
### `isort`: Use one blank line after imports in typing stub files ([#9971](https://github.com/astral-sh/ruff/pull/9971))
|
||||
|
||||
Previously, Ruff used one or two blank lines (or the number configured by `isort.lines-after-imports`) after imports in typing stub files (`.pyi` files).
|
||||
The [typing style guide for stubs](https://typing.readthedocs.io/en/latest/source/stubs.html#style-guide) recommends using at most 1 blank line for grouping.
|
||||
As of this release, `isort` now always uses one blank line after imports in stub files, the same as the formatter.
|
||||
|
||||
### `build` is no longer excluded by default ([#10093](https://github.com/astral-sh/ruff/pull/10093))
|
||||
|
||||
Ruff maintains a list of directories and files that are excluded by default. This list now consists of the following patterns:
|
||||
|
||||
- `.bzr`
|
||||
- `.direnv`
|
||||
- `.eggs`
|
||||
- `.git`
|
||||
- `.git-rewrite`
|
||||
- `.hg`
|
||||
- `.ipynb_checkpoints`
|
||||
- `.mypy_cache`
|
||||
- `.nox`
|
||||
- `.pants.d`
|
||||
- `.pyenv`
|
||||
- `.pytest_cache`
|
||||
- `.pytype`
|
||||
- `.ruff_cache`
|
||||
- `.svn`
|
||||
- `.tox`
|
||||
- `.venv`
|
||||
- `.vscode`
|
||||
- `__pypackages__`
|
||||
- `_build`
|
||||
- `buck-out`
|
||||
- `dist`
|
||||
- `node_modules`
|
||||
- `site-packages`
|
||||
- `venv`
|
||||
|
||||
Previously, the `build` directory was included in this list. However, the `build` directory tends to be a not-unpopular directory
|
||||
name, and excluding it by default caused confusion. Ruff now no longer excludes `build` except if it is excluded by a `.gitignore` file
|
||||
or because it is listed in `extend-exclude`.
|
||||
|
||||
### `--format` is no longer a valid `rule` or `linter` command option
|
||||
|
||||
Previously, `ruff rule` and `ruff linter` accepted the `--format <FORMAT>` option as an alias for `--output-format`. Ruff no longer
|
||||
supports this alias. Please use `ruff rule --output-format <FORMAT>` and `ruff linter --output-format <FORMAT>` instead.
|
||||
|
||||
## 0.1.9
|
||||
|
||||
### `site-packages` is now excluded by default ([#5513](https://github.com/astral-sh/ruff/pull/5513))
|
||||
|
||||
Ruff maintains a list of default exclusions, which now consists of the following patterns:
|
||||
|
||||
- `.bzr`
|
||||
- `.direnv`
|
||||
- `.eggs`
|
||||
- `.git-rewrite`
|
||||
- `.git`
|
||||
- `.hg`
|
||||
- `.ipynb_checkpoints`
|
||||
- `.mypy_cache`
|
||||
- `.nox`
|
||||
- `.pants.d`
|
||||
- `.pyenv`
|
||||
- `.pytest_cache`
|
||||
- `.pytype`
|
||||
- `.ruff_cache`
|
||||
- `.svn`
|
||||
- `.tox`
|
||||
- `.venv`
|
||||
- `.vscode`
|
||||
- `__pypackages__`
|
||||
- `_build`
|
||||
- `buck-out`
|
||||
- `build`
|
||||
- `dist`
|
||||
- `node_modules`
|
||||
- `site-packages`
|
||||
- `venv`
|
||||
|
||||
Previously, the `site-packages` directory was not excluded by default. While `site-packages` tends
|
||||
to be excluded anyway by virtue of the `.venv` exclusion, this may not be the case when using Ruff
|
||||
from VS Code outside a virtual environment.
|
||||
|
||||
## 0.1.0
|
||||
|
||||
### The deprecated `format` setting has been removed
|
||||
|
||||
Ruff previously used the `format` setting, `--format` CLI option, and `RUFF_FORMAT` environment variable to
|
||||
configure the output format of the CLI. This usage was deprecated in `v0.0.291` — the `format` setting is now used
|
||||
to control Ruff's code formatting. As of this release:
|
||||
|
||||
- The `format` setting cannot be used to configure the output format, use `output-format` instead
|
||||
- The `RUFF_FORMAT` environment variable is ignored, use `RUFF_OUTPUT_FORMAT` instead
|
||||
- The `--format` option has been removed from `ruff check`, use `--output-format` instead
|
||||
|
||||
### Unsafe fixes are not applied by default ([#7769](https://github.com/astral-sh/ruff/pull/7769))
|
||||
|
||||
Ruff labels fixes as "safe" and "unsafe". The meaning and intent of your code will be retained when applying safe
|
||||
fixes, but the meaning could be changed when applying unsafe fixes. Previously, unsafe fixes were always displayed
|
||||
and applied when fixing was enabled. Now, unsafe fixes are hidden by default and not applied. The `--unsafe-fixes`
|
||||
flag or `unsafe-fixes` configuration option can be used to enable unsafe fixes.
|
||||
|
||||
See the [docs](https://docs.astral.sh/ruff/configuration/#fix-safety) for details.
|
||||
|
||||
### Remove formatter-conflicting rules from the default rule set ([#7900](https://github.com/astral-sh/ruff/pull/7900))
|
||||
|
||||
Previously, Ruff enabled all implemented rules in Pycodestyle (`E`) by default. Ruff now only includes the
|
||||
Pycodestyle prefixes `E4`, `E7`, and `E9` to exclude rules that conflict with automatic formatters. Consequently,
|
||||
the stable rule set no longer includes `line-too-long` (`E501`) and `mixed-spaces-and-tabs` (`E101`). Other
|
||||
excluded Pycodestyle rules include whitespace enforcement in `E1` and `E2`; these rules are currently in preview, and are already omitted by default.
|
||||
|
||||
This change only affects those using Ruff under its default rule set. Users that include `E` in their `select` will experience no change in behavior.
|
||||
|
||||
## 0.0.288
|
||||
|
||||
### Remove support for emoji identifiers ([#7212](https://github.com/astral-sh/ruff/pull/7212))
|
||||
|
||||
Previously, Ruff supported the non-standard compliant emoji identifiers e.g. `📦 = 1`.
|
||||
We decided to remove this non-standard language extension, and Ruff now reports syntax errors for emoji identifiers in your code, the same as CPython.
|
||||
|
||||
### Improved GitLab fingerprints ([#7203](https://github.com/astral-sh/ruff/pull/7203))
|
||||
|
||||
GitLab uses fingerprints to identify new, existing, or fixed violations. Previously, Ruff included the violation's position in the fingerprint. Using the location has the downside that changing any code before the violation causes the fingerprint to change, resulting in GitLab reporting one fixed and one new violation even though it is a pre-existing violation.
|
||||
|
||||
Ruff now uses a more stable location-agnostic fingerprint to minimize that existing violations incorrectly get marked as fixed and re-reported as new violations.
|
||||
|
||||
Expect GitLab to report each pre-existing violation in your project as fixed and a new violation in your Ruff upgrade PR.
|
||||
|
||||
## 0.0.283 / 0.284
|
||||
|
||||
### The target Python version now defaults to 3.8 instead of 3.10 ([#6397](https://github.com/astral-sh/ruff/pull/6397))
|
||||
|
||||
Previously, when a target Python version was not specified, Ruff would use a default of Python 3.10. However, it is safer to default to an _older_ Python version to avoid assuming the availability of new features. We now default to the oldest supported Python version which is currently Python 3.8.
|
||||
|
||||
(We still support Python 3.7 but since [it has reached EOL](https://devguide.python.org/versions/#unsupported-versions) we've decided not to make it the default here.)
|
||||
|
||||
Note this change was announced in 0.0.283 but not active until 0.0.284.
|
||||
|
||||
## 0.0.277
|
||||
|
||||
### `.ipynb_checkpoints`, `.pyenv`, `.pytest_cache`, and `.vscode` are now excluded by default ([#5513](https://github.com/astral-sh/ruff/pull/5513))
|
||||
@@ -417,4 +274,4 @@ default.
|
||||
`pyproject.toml` files are now resolved hierarchically, such that for each Python file, we find
|
||||
the first `pyproject.toml` file in its path, and use that to determine its lint settings.
|
||||
|
||||
See the [documentation](https://docs.astral.sh/ruff/configuration/#python-file-discovery) for more.
|
||||
See the [documentation](https://beta.ruff.rs/docs/configuration/#python-file-discovery) for more.
|
||||
|
||||
1660
CHANGELOG.md
1660
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
@@ -72,7 +72,7 @@ representative at an online or offline event.
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported to the community leaders responsible for enforcement at
|
||||
<charlie.r.marsh@gmail.com>.
|
||||
charlie.r.marsh@gmail.com.
|
||||
All complaints will be reviewed and investigated promptly and fairly.
|
||||
|
||||
All community leaders are obligated to respect the privacy and security of the
|
||||
|
||||
398
CONTRIBUTING.md
398
CONTRIBUTING.md
@@ -26,25 +26,30 @@ Welcome! We're happy to have you here. Thank you in advance for your contributio
|
||||
- [`cargo dev`](#cargo-dev)
|
||||
- [Subsystems](#subsystems)
|
||||
- [Compilation Pipeline](#compilation-pipeline)
|
||||
- [Import Categorization](#import-categorization)
|
||||
- [Project root](#project-root)
|
||||
- [Package root](#package-root)
|
||||
- [Import categorization](#import-categorization-1)
|
||||
|
||||
## The Basics
|
||||
|
||||
Ruff welcomes contributions in the form of pull requests.
|
||||
Ruff welcomes contributions in the form of Pull Requests.
|
||||
|
||||
For small changes (e.g., bug fixes), feel free to submit a PR.
|
||||
|
||||
For larger changes (e.g., new lint rules, new functionality, new configuration options), consider
|
||||
creating an [**issue**](https://github.com/astral-sh/ruff/issues) outlining your proposed change.
|
||||
You can also join us on [Discord](https://discord.com/invite/astral-sh) to discuss your idea with the
|
||||
You can also join us on [**Discord**](https://discord.gg/c9MhzV8aU5) to discuss your idea with the
|
||||
community. We've labeled [beginner-friendly tasks](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22)
|
||||
in the issue tracker, along with [bugs](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3Abug)
|
||||
and [improvements](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3Aaccepted)
|
||||
that are ready for contributions.
|
||||
|
||||
If you're looking for a place to start, we recommend implementing a new lint rule (see:
|
||||
[_Adding a new lint rule_](#example-adding-a-new-lint-rule), which will allow you to learn from and
|
||||
pattern-match against the examples in the existing codebase. Many lint rules are inspired by
|
||||
existing Python plugins, which can be used as a reference implementation.
|
||||
|
||||
As a concrete example: consider taking on one of the rules from the [`flake8-pyi`](https://github.com/astral-sh/ruff/issues/848)
|
||||
plugin, and looking to the originating [Python source](https://github.com/PyCQA/flake8-pyi) for
|
||||
guidance.
|
||||
|
||||
If you have suggestions on how we might improve the contributing documentation, [let us know](https://github.com/astral-sh/ruff/discussions/5693)!
|
||||
|
||||
### Prerequisites
|
||||
@@ -58,35 +63,18 @@ You'll also need [Insta](https://insta.rs/docs/) to update snapshot tests:
|
||||
cargo install cargo-insta
|
||||
```
|
||||
|
||||
And you'll need pre-commit to run some validation checks:
|
||||
and pre-commit to run some validation checks:
|
||||
|
||||
```shell
|
||||
pipx install pre-commit # or `pip install pre-commit` if you have a virtualenv
|
||||
```
|
||||
|
||||
You can optionally install pre-commit hooks to automatically run the validation checks
|
||||
when making a commit:
|
||||
|
||||
```shell
|
||||
pre-commit install
|
||||
```
|
||||
|
||||
We recommend [nextest](https://nexte.st/) to run Ruff's test suite (via `cargo nextest run`),
|
||||
though it's not strictly necessary:
|
||||
|
||||
```shell
|
||||
cargo install cargo-nextest --locked
|
||||
```
|
||||
|
||||
Throughout this guide, any usages of `cargo test` can be replaced with `cargo nextest run`,
|
||||
if you choose to install `nextest`.
|
||||
|
||||
### Development
|
||||
|
||||
After cloning the repository, run Ruff locally from the repository root with:
|
||||
After cloning the repository, run Ruff locally with:
|
||||
|
||||
```shell
|
||||
cargo run -p ruff -- check /path/to/file.py --no-cache
|
||||
cargo run -p ruff_cli -- check /path/to/file.py --no-cache
|
||||
```
|
||||
|
||||
Prior to opening a pull request, ensure that your code has been auto-formatted,
|
||||
@@ -98,7 +86,7 @@ RUFF_UPDATE_SCHEMA=1 cargo test # Rust testing and updating ruff.schema.json
|
||||
pre-commit run --all-files --show-diff-on-failure # Rust and Python formatting, Markdown and Python linting, etc.
|
||||
```
|
||||
|
||||
These checks will run on GitHub Actions when you open your pull request, but running them locally
|
||||
These checks will run on GitHub Actions when you open your Pull Request, but running them locally
|
||||
will save you time and expedite the merge process.
|
||||
|
||||
Note that many code changes also require updating the snapshot tests, which is done interactively
|
||||
@@ -108,14 +96,7 @@ after running `cargo test` like so:
|
||||
cargo insta review
|
||||
```
|
||||
|
||||
If your pull request relates to a specific lint rule, include the category and rule code in the
|
||||
title, as in the following examples:
|
||||
|
||||
- \[`flake8-bugbear`\] Avoid false positive for usage after `continue` (`B031`)
|
||||
- \[`flake8-simplify`\] Detect implicit `else` cases in `needless-bool` (`SIM103`)
|
||||
- \[`pycodestyle`\] Implement `redundant-backslash` (`E502`)
|
||||
|
||||
Your pull request will be reviewed by a maintainer, which may involve a few rounds of iteration
|
||||
Your Pull Request will be reviewed by a maintainer, which may involve a few rounds of iteration
|
||||
prior to merging.
|
||||
|
||||
### Project Structure
|
||||
@@ -123,16 +104,16 @@ prior to merging.
|
||||
Ruff is structured as a monorepo with a [flat crate structure](https://matklad.github.io/2021/08/22/large-rust-workspaces.html),
|
||||
such that all crates are contained in a flat `crates` directory.
|
||||
|
||||
The vast majority of the code, including all lint rules, lives in the `ruff_linter` crate (located
|
||||
at `crates/ruff_linter`). As a contributor, that's the crate that'll be most relevant to you.
|
||||
The vast majority of the code, including all lint rules, lives in the `ruff` crate (located at
|
||||
`crates/ruff`). As a contributor, that's the crate that'll be most relevant to you.
|
||||
|
||||
At the time of writing, the repository includes the following crates:
|
||||
At time of writing, the repository includes the following crates:
|
||||
|
||||
- `crates/ruff_linter`: library crate containing all lint rules and the core logic for running them.
|
||||
- `crates/ruff`: library crate containing all lint rules and the core logic for running them.
|
||||
If you're working on a rule, this is the crate for you.
|
||||
- `crates/ruff_benchmark`: binary crate for running micro-benchmarks.
|
||||
- `crates/ruff_cache`: library crate for caching lint results.
|
||||
- `crates/ruff`: binary crate containing Ruff's command-line interface.
|
||||
- `crates/ruff_cli`: binary crate containing Ruff's command-line interface.
|
||||
- `crates/ruff_dev`: binary crate containing utilities used in the development of Ruff itself (e.g.,
|
||||
`cargo dev generate-all`), see the [`cargo dev`](#cargo-dev) section below.
|
||||
- `crates/ruff_diagnostics`: library crate for the rule-independent abstractions in the lint
|
||||
@@ -141,9 +122,9 @@ At the time of writing, the repository includes the following crates:
|
||||
intermediate representation. The backend for `ruff_python_formatter`.
|
||||
- `crates/ruff_index`: library crate inspired by `rustc_index`.
|
||||
- `crates/ruff_macros`: proc macro crate containing macros used by Ruff.
|
||||
- `crates/ruff_notebook`: library crate for parsing and manipulating Jupyter notebooks.
|
||||
- `crates/ruff_python_ast`: library crate containing Python-specific AST types and utilities.
|
||||
- `crates/ruff_python_codegen`: library crate containing utilities for generating Python source code.
|
||||
- `crates/ruff_python_ast`: library crate containing Python-specific AST types and utilities. Note
|
||||
that the AST schema itself is defined in the
|
||||
[rustpython-ast](https://github.com/astral-sh/RustPython-Parser) crate.
|
||||
- `crates/ruff_python_formatter`: library crate implementing the Python formatter. Emits an
|
||||
intermediate representation for each node, which `ruff_formatter` prints based on the configured
|
||||
line length.
|
||||
@@ -152,9 +133,10 @@ At the time of writing, the repository includes the following crates:
|
||||
refer to?"
|
||||
- `crates/ruff_python_stdlib`: library crate containing Python-specific standard library data, e.g.
|
||||
the names of all built-in exceptions and which standard library types are immutable.
|
||||
- `crates/ruff_python_trivia`: library crate containing Python-specific trivia utilities (e.g.,
|
||||
for analyzing indentation, newlines, etc.).
|
||||
- `crates/ruff_python_parser`: library crate containing the Python parser.
|
||||
- `crates/ruff_python_whitespace`: library crate containing Python-specific whitespace analysis
|
||||
logic (indentation and newlines).
|
||||
- `crates/ruff_rustpython`: library crate containing `RustPython`-specific utilities.
|
||||
- `crates/ruff_textwrap`: library crate to indent and dedent Python source code.
|
||||
- `crates/ruff_wasm`: library crate for exposing Ruff as a WebAssembly module. Powers the
|
||||
[Ruff Playground](https://play.ruff.rs/).
|
||||
|
||||
@@ -165,7 +147,7 @@ At a high level, the steps involved in adding a new lint rule are as follows:
|
||||
1. Determine a name for the new rule as per our [rule naming convention](#rule-naming-convention)
|
||||
(e.g., `AssertFalse`, as in, "allow `assert False`").
|
||||
|
||||
1. Create a file for your rule (e.g., `crates/ruff_linter/src/rules/flake8_bugbear/rules/assert_false.rs`).
|
||||
1. Create a file for your rule (e.g., `crates/ruff/src/rules/flake8_bugbear/rules/assert_false.rs`).
|
||||
|
||||
1. In that file, define a violation struct (e.g., `pub struct AssertFalse`). You can grep for
|
||||
`#[violation]` to see examples.
|
||||
@@ -174,22 +156,18 @@ At a high level, the steps involved in adding a new lint rule are as follows:
|
||||
(e.g., `pub(crate) fn assert_false`) based on whatever inputs are required for the rule (e.g.,
|
||||
an `ast::StmtAssert` node).
|
||||
|
||||
1. Define the logic for invoking the diagnostic in `crates/ruff_linter/src/checkers/ast/analyze` (for
|
||||
AST-based rules), `crates/ruff_linter/src/checkers/tokens.rs` (for token-based rules),
|
||||
`crates/ruff_linter/src/checkers/physical_lines.rs` (for text-based rules),
|
||||
`crates/ruff_linter/src/checkers/filesystem.rs` (for filesystem-based rules), etc. For AST-based rules,
|
||||
you'll likely want to modify `analyze/statement.rs` (if your rule is based on analyzing
|
||||
statements, like imports) or `analyze/expression.rs` (if your rule is based on analyzing
|
||||
expressions, like function calls).
|
||||
1. Define the logic for triggering the violation in `crates/ruff/src/checkers/ast/mod.rs` (for
|
||||
AST-based checks), `crates/ruff/src/checkers/tokens.rs` (for token-based checks),
|
||||
`crates/ruff/src/checkers/lines.rs` (for text-based checks), or
|
||||
`crates/ruff/src/checkers/filesystem.rs` (for filesystem-based checks).
|
||||
|
||||
1. Map the violation struct to a rule code in `crates/ruff_linter/src/codes.rs` (e.g., `B011`). New rules
|
||||
should be added in `RuleGroup::Preview`.
|
||||
1. Map the violation struct to a rule code in `crates/ruff/src/codes.rs` (e.g., `B011`).
|
||||
|
||||
1. Add proper [testing](#rule-testing-fixtures-and-snapshots) for your rule.
|
||||
|
||||
1. Update the generated files (documentation and generated code).
|
||||
|
||||
To trigger the violation, you'll likely want to augment the logic in `crates/ruff_linter/src/checkers/ast.rs`
|
||||
To trigger the violation, you'll likely want to augment the logic in `crates/ruff/src/checkers/ast.rs`
|
||||
to call your new function at the appropriate time and with the appropriate inputs. The `Checker`
|
||||
defined therein is a Python AST visitor, which iterates over the AST, building up a semantic model,
|
||||
and calling out to lint rule analyzer functions as it goes.
|
||||
@@ -197,14 +175,11 @@ and calling out to lint rule analyzer functions as it goes.
|
||||
If you need to inspect the AST, you can run `cargo dev print-ast` with a Python file. Grep
|
||||
for the `Diagnostic::new` invocations to understand how other, similar rules are implemented.
|
||||
|
||||
Once you're satisfied with your code, add tests for your rule
|
||||
(see: [rule testing](#rule-testing-fixtures-and-snapshots)), and regenerate the documentation and
|
||||
associated assets (like our JSON Schema) with `cargo dev generate-all`.
|
||||
Once you're satisfied with your code, add tests for your rule. See [rule testing](#rule-testing-fixtures-and-snapshots)
|
||||
for more details.
|
||||
|
||||
Finally, submit a pull request, and include the category, rule name, and rule code in the title, as
|
||||
in:
|
||||
|
||||
> \[`pycodestyle`\] Implement `redundant-backslash` (`E502`)
|
||||
Finally, regenerate the documentation and other generated assets (like our JSON Schema) with:
|
||||
`cargo dev generate-all`.
|
||||
|
||||
#### Rule naming convention
|
||||
|
||||
@@ -220,7 +195,7 @@ As such, rule names should...
|
||||
For example, `AssertFalse` guards against `assert False` statements.
|
||||
|
||||
- _Not_ contain instructions on how to fix the violation, which instead belong in the rule
|
||||
documentation and the `fix_title`.
|
||||
documentation and the `autofix_title`.
|
||||
|
||||
- _Not_ contain a redundant prefix, like `Disallow` or `Banned`, which are already implied by the
|
||||
convention.
|
||||
@@ -237,7 +212,7 @@ Ruff's output for each fixture, which you can then commit alongside your changes
|
||||
|
||||
Once you've completed the code for the rule itself, you can define tests with the following steps:
|
||||
|
||||
1. Add a Python file to `crates/ruff_linter/resources/test/fixtures/[linter]` that contains the code you
|
||||
1. Add a Python file to `crates/ruff/resources/test/fixtures/[linter]` that contains the code you
|
||||
want to test. The file name should match the rule name (e.g., `E402.py`), and it should include
|
||||
examples of both violations and non-violations.
|
||||
|
||||
@@ -246,16 +221,13 @@ Once you've completed the code for the rule itself, you can define tests with th
|
||||
For example, if you're adding a new rule named `E402`, you would run:
|
||||
|
||||
```shell
|
||||
cargo run -p ruff -- check crates/ruff_linter/resources/test/fixtures/pycodestyle/E402.py --no-cache --preview --select E402
|
||||
cargo run -p ruff_cli -- check crates/ruff/resources/test/fixtures/pycodestyle/E402.py --no-cache
|
||||
```
|
||||
|
||||
**Note:** Only a subset of rules are enabled by default. When testing a new rule, ensure that
|
||||
you activate it by adding `--select ${rule_code}` to the command.
|
||||
|
||||
1. Add the test to the relevant `crates/ruff_linter/src/rules/[linter]/mod.rs` file. If you're contributing
|
||||
1. Add the test to the relevant `crates/ruff/src/rules/[linter]/mod.rs` file. If you're contributing
|
||||
a rule to a pre-existing set, you should be able to find a similar example to pattern-match
|
||||
against. If you're adding a new linter, you'll need to create a new `mod.rs` file (see,
|
||||
e.g., `crates/ruff_linter/src/rules/flake8_bugbear/mod.rs`)
|
||||
e.g., `crates/ruff/src/rules/flake8_bugbear/mod.rs`)
|
||||
|
||||
1. Run `cargo test`. Your test will fail, but you'll be prompted to follow-up
|
||||
with `cargo insta review`. Run `cargo insta review`, review and accept the generated snapshot,
|
||||
@@ -267,24 +239,25 @@ Once you've completed the code for the rule itself, you can define tests with th
|
||||
|
||||
Ruff's user-facing settings live in a few different places.
|
||||
|
||||
First, the command-line options are defined via the `Args` struct in `crates/ruff/src/args.rs`.
|
||||
First, the command-line options are defined via the `Cli` struct in `crates/ruff/src/cli.rs`.
|
||||
|
||||
Second, the `pyproject.toml` options are defined in `crates/ruff_workspace/src/options.rs` (via the
|
||||
`Options` struct), `crates/ruff_workspace/src/configuration.rs` (via the `Configuration` struct),
|
||||
and `crates/ruff_workspace/src/settings.rs` (via the `Settings` struct), which then includes
|
||||
the `LinterSettings` struct as a field.
|
||||
|
||||
These represent, respectively: the schema used to parse the `pyproject.toml` file; an internal,
|
||||
intermediate representation; and the final, internal representation used to power Ruff.
|
||||
Second, the `pyproject.toml` options are defined in `crates/ruff/src/settings/options.rs` (via the
|
||||
`Options` struct), `crates/ruff/src/settings/configuration.rs` (via the `Configuration` struct), and
|
||||
`crates/ruff/src/settings/mod.rs` (via the `Settings` struct). These represent, respectively: the
|
||||
schema used to parse the `pyproject.toml` file; an internal, intermediate representation; and the
|
||||
final, internal representation used to power Ruff.
|
||||
|
||||
To add a new configuration option, you'll likely want to modify these latter few files (along with
|
||||
`arg.rs`, if appropriate). If you want to pattern-match against an existing example, grep for
|
||||
`cli.rs`, if appropriate). If you want to pattern-match against an existing example, grep for
|
||||
`dummy_variable_rgx`, which defines a regular expression to match against acceptable unused
|
||||
variables (e.g., `_`).
|
||||
|
||||
Note that plugin-specific configuration options are defined in their own modules (e.g.,
|
||||
`Settings` in `crates/ruff_linter/src/flake8_unused_arguments/settings.rs` coupled with
|
||||
`Flake8UnusedArgumentsOptions` in `crates/ruff_workspace/src/options.rs`).
|
||||
`crates/ruff/src/flake8_unused_arguments/settings.rs`).
|
||||
|
||||
You may also want to add the new configuration option to the `flake8-to-ruff` tool, which is
|
||||
responsible for converting `flake8` configuration files to Ruff's TOML format. This logic
|
||||
lives in `crates/ruff/src/flake8_to_ruff/converter.rs`.
|
||||
|
||||
Finally, regenerate the documentation and generated code with `cargo dev generate-all`.
|
||||
|
||||
@@ -310,14 +283,14 @@ To preview any changes to the documentation locally:
|
||||
|
||||
```shell
|
||||
# For contributors.
|
||||
mkdocs serve -f mkdocs.public.yml
|
||||
mkdocs serve -f mkdocs.generated.yml
|
||||
|
||||
# For members of the Astral org, which has access to MkDocs Insiders via sponsorship.
|
||||
mkdocs serve -f mkdocs.insiders.yml
|
||||
```
|
||||
|
||||
The documentation should then be available locally at
|
||||
[http://127.0.0.1:8000/ruff/](http://127.0.0.1:8000/ruff/).
|
||||
[http://127.0.0.1:8000/docs/](http://127.0.0.1:8000/docs/).
|
||||
|
||||
## Release Process
|
||||
|
||||
@@ -330,115 +303,93 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
|
||||
|
||||
### Creating a new release
|
||||
|
||||
1. Install `uv`: `curl -LsSf https://astral.sh/uv/install.sh | sh`
|
||||
1. Run `./scripts/release/bump.sh`; this command will:
|
||||
- Generate a temporary virtual environment with `rooster`
|
||||
- Generate a changelog entry in `CHANGELOG.md`
|
||||
- Update versions in `pyproject.toml` and `Cargo.toml`
|
||||
- Update references to versions in the `README.md` and documentation
|
||||
- Display contributors for the release
|
||||
1. The changelog should then be editorialized for consistency
|
||||
- Often labels will be missing from pull requests they will need to be manually organized into the proper section
|
||||
- Changes should be edited to be user-facing descriptions, avoiding internal details
|
||||
1. Highlight any breaking changes in `BREAKING_CHANGES.md`
|
||||
1. Run `cargo check`. This should update the lock file with new versions.
|
||||
1. Create a pull request with the changelog and version updates
|
||||
1. Update the version with `rg 0.0.269 --files-with-matches | xargs sed -i 's/0.0.269/0.0.270/g'`
|
||||
1. Update `BREAKING_CHANGES.md`
|
||||
1. Create a PR with the version and `BREAKING_CHANGES.md` updated
|
||||
1. Merge the PR
|
||||
1. Run the [release workflow](https://github.com/astral-sh/ruff/actions/workflows/release.yaml) with:
|
||||
- The new version number (without starting `v`)
|
||||
- The commit hash of the merged release pull request on `main`
|
||||
1. Run the release workflow with the version number (without starting `v`) as input. Make sure
|
||||
main has your merged PR as last commit
|
||||
1. The release workflow will do the following:
|
||||
1. Build all the assets. If this fails (even though we tested in step 4), we haven't tagged or
|
||||
1. Build all the assets. If this fails (even though we tested in step 4), we haven’t tagged or
|
||||
uploaded anything, you can restart after pushing a fix.
|
||||
1. Upload to PyPI.
|
||||
1. Create and push the Git tag (as extracted from `pyproject.toml`). We create the Git tag only
|
||||
after building the wheels and uploading to PyPI, since we can't delete or modify the tag ([#4468](https://github.com/astral-sh/ruff/issues/4468)).
|
||||
after building the wheels and uploading to PyPI, since we can't delete or modify the tag ([#4468](https://github.com/charliermarsh/ruff/issues/4468)).
|
||||
1. Attach artifacts to draft GitHub release
|
||||
1. Trigger downstream repositories. This can fail non-catastrophically, as we can run any
|
||||
downstream jobs manually if needed.
|
||||
1. Publish the GitHub release
|
||||
1. Open the draft release in the GitHub release section
|
||||
1. Copy the changelog for the release into the GitHub release
|
||||
- See previous releases for formatting of section headers
|
||||
1. Append the contributors from the `bump.sh` script
|
||||
1. If needed, [update the schemastore](https://github.com/astral-sh/ruff/blob/main/scripts/update_schemastore.py).
|
||||
1. One can determine if an update is needed when
|
||||
`git diff old-version-tag new-version-tag -- ruff.schema.json` returns a non-empty diff.
|
||||
1. Once run successfully, you should follow the link in the output to create a PR.
|
||||
1. Create release notes in GitHub UI and promote from draft.
|
||||
1. If needed, [update the schemastore](https://github.com/charliermarsh/ruff/blob/main/scripts/update_schemastore.py)
|
||||
1. If needed, update the `ruff-lsp` and `ruff-vscode` repositories.
|
||||
|
||||
## Ecosystem CI
|
||||
|
||||
GitHub Actions will run your changes against a number of real-world projects from GitHub and
|
||||
report on any linter or formatter differences. You can also run those checks locally via:
|
||||
report on any diagnostic differences. You can also run those checks locally via:
|
||||
|
||||
```shell
|
||||
pip install -e ./python/ruff-ecosystem
|
||||
ruff-ecosystem check ruff "./target/debug/ruff"
|
||||
ruff-ecosystem format ruff "./target/debug/ruff"
|
||||
python scripts/check_ecosystem.py path/to/your/ruff path/to/older/ruff
|
||||
```
|
||||
|
||||
See the [ruff-ecosystem package](https://github.com/astral-sh/ruff/tree/main/python/ruff-ecosystem) for more details.
|
||||
You can also run the Ecosystem CI check in a Docker container across a larger set of projects by
|
||||
downloading the [`known-github-tomls.json`](https://github.com/akx/ruff-usage-aggregate/blob/master/data/known-github-tomls.jsonl)
|
||||
as `github_search.jsonl` and following the instructions in [scripts/Dockerfile.ecosystem](https://github.com/astral-sh/ruff/blob/main/scripts/Dockerfile.ecosystem).
|
||||
Note that this check will take a while to run.
|
||||
|
||||
## Benchmarking and Profiling
|
||||
|
||||
We have several ways of benchmarking and profiling Ruff:
|
||||
|
||||
- Our main performance benchmark comparing Ruff with other tools on the CPython codebase
|
||||
- Microbenchmarks which run the linter or the formatter on individual files. These run on pull requests.
|
||||
- Microbenchmarks which the linter or the formatter on individual files. There run on pull requests.
|
||||
- Profiling the linter on either the microbenchmarks or entire projects
|
||||
|
||||
> \[!NOTE\]
|
||||
> When running benchmarks, ensure that your CPU is otherwise idle (e.g., close any background
|
||||
> applications, like web browsers). You may also want to switch your CPU to a "performance"
|
||||
> mode, if it exists, especially when benchmarking short-lived processes.
|
||||
|
||||
### CPython Benchmark
|
||||
|
||||
First, clone [CPython](https://github.com/python/cpython). It's a large and diverse Python codebase,
|
||||
which makes it a good target for benchmarking.
|
||||
|
||||
```shell
|
||||
git clone --branch 3.10 https://github.com/python/cpython.git crates/ruff_linter/resources/test/cpython
|
||||
git clone --branch 3.10 https://github.com/python/cpython.git crates/ruff/resources/test/cpython
|
||||
```
|
||||
|
||||
To benchmark the release build:
|
||||
|
||||
```shell
|
||||
cargo build --release && hyperfine --warmup 10 \
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache -e" \
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ -e"
|
||||
"./target/release/ruff ./crates/ruff/resources/test/cpython/ --no-cache -e" \
|
||||
"./target/release/ruff ./crates/ruff/resources/test/cpython/ -e"
|
||||
|
||||
Benchmark 1: ./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache
|
||||
Benchmark 1: ./target/release/ruff ./crates/ruff/resources/test/cpython/ --no-cache
|
||||
Time (mean ± σ): 293.8 ms ± 3.2 ms [User: 2384.6 ms, System: 90.3 ms]
|
||||
Range (min … max): 289.9 ms … 301.6 ms 10 runs
|
||||
|
||||
Benchmark 2: ./target/release/ruff ./crates/ruff_linter/resources/test/cpython/
|
||||
Benchmark 2: ./target/release/ruff ./crates/ruff/resources/test/cpython/
|
||||
Time (mean ± σ): 48.0 ms ± 3.1 ms [User: 65.2 ms, System: 124.7 ms]
|
||||
Range (min … max): 45.0 ms … 66.7 ms 62 runs
|
||||
|
||||
Summary
|
||||
'./target/release/ruff ./crates/ruff_linter/resources/test/cpython/' ran
|
||||
6.12 ± 0.41 times faster than './target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache'
|
||||
'./target/release/ruff ./crates/ruff/resources/test/cpython/' ran
|
||||
6.12 ± 0.41 times faster than './target/release/ruff ./crates/ruff/resources/test/cpython/ --no-cache'
|
||||
```
|
||||
|
||||
To benchmark against the ecosystem's existing tools:
|
||||
|
||||
```shell
|
||||
hyperfine --ignore-failure --warmup 5 \
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache" \
|
||||
"pyflakes crates/ruff_linter/resources/test/cpython" \
|
||||
"./target/release/ruff ./crates/ruff/resources/test/cpython/ --no-cache" \
|
||||
"pyflakes crates/ruff/resources/test/cpython" \
|
||||
"autoflake --recursive --expand-star-imports --remove-all-unused-imports --remove-unused-variables --remove-duplicate-keys resources/test/cpython" \
|
||||
"pycodestyle crates/ruff_linter/resources/test/cpython" \
|
||||
"flake8 crates/ruff_linter/resources/test/cpython"
|
||||
"pycodestyle crates/ruff/resources/test/cpython" \
|
||||
"flake8 crates/ruff/resources/test/cpython"
|
||||
|
||||
Benchmark 1: ./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache
|
||||
Benchmark 1: ./target/release/ruff ./crates/ruff/resources/test/cpython/ --no-cache
|
||||
Time (mean ± σ): 294.3 ms ± 3.3 ms [User: 2467.5 ms, System: 89.6 ms]
|
||||
Range (min … max): 291.1 ms … 302.8 ms 10 runs
|
||||
|
||||
Warning: Ignoring non-zero exit code.
|
||||
|
||||
Benchmark 2: pyflakes crates/ruff_linter/resources/test/cpython
|
||||
Benchmark 2: pyflakes crates/ruff/resources/test/cpython
|
||||
Time (mean ± σ): 15.786 s ± 0.143 s [User: 15.560 s, System: 0.214 s]
|
||||
Range (min … max): 15.640 s … 16.157 s 10 runs
|
||||
|
||||
@@ -448,31 +399,31 @@ Benchmark 3: autoflake --recursive --expand-star-imports --remove-all-unused-imp
|
||||
Time (mean ± σ): 6.175 s ± 0.169 s [User: 54.102 s, System: 1.057 s]
|
||||
Range (min … max): 5.950 s … 6.391 s 10 runs
|
||||
|
||||
Benchmark 4: pycodestyle crates/ruff_linter/resources/test/cpython
|
||||
Benchmark 4: pycodestyle crates/ruff/resources/test/cpython
|
||||
Time (mean ± σ): 46.921 s ± 0.508 s [User: 46.699 s, System: 0.202 s]
|
||||
Range (min … max): 46.171 s … 47.863 s 10 runs
|
||||
|
||||
Warning: Ignoring non-zero exit code.
|
||||
|
||||
Benchmark 5: flake8 crates/ruff_linter/resources/test/cpython
|
||||
Benchmark 5: flake8 crates/ruff/resources/test/cpython
|
||||
Time (mean ± σ): 12.260 s ± 0.321 s [User: 102.934 s, System: 1.230 s]
|
||||
Range (min … max): 11.848 s … 12.933 s 10 runs
|
||||
|
||||
Warning: Ignoring non-zero exit code.
|
||||
|
||||
Summary
|
||||
'./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache' ran
|
||||
'./target/release/ruff ./crates/ruff/resources/test/cpython/ --no-cache' ran
|
||||
20.98 ± 0.62 times faster than 'autoflake --recursive --expand-star-imports --remove-all-unused-imports --remove-unused-variables --remove-duplicate-keys resources/test/cpython'
|
||||
41.66 ± 1.18 times faster than 'flake8 crates/ruff_linter/resources/test/cpython'
|
||||
53.64 ± 0.77 times faster than 'pyflakes crates/ruff_linter/resources/test/cpython'
|
||||
159.43 ± 2.48 times faster than 'pycodestyle crates/ruff_linter/resources/test/cpython'
|
||||
41.66 ± 1.18 times faster than 'flake8 crates/ruff/resources/test/cpython'
|
||||
53.64 ± 0.77 times faster than 'pyflakes crates/ruff/resources/test/cpython'
|
||||
159.43 ± 2.48 times faster than 'pycodestyle crates/ruff/resources/test/cpython'
|
||||
```
|
||||
|
||||
To benchmark a subset of rules, e.g. `LineTooLong` and `DocLineTooLong`:
|
||||
|
||||
```shell
|
||||
cargo build --release && hyperfine --warmup 10 \
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache -e --select W505,E501"
|
||||
"./target/release/ruff ./crates/ruff/resources/test/cpython/ --no-cache -e --select W505,E501"
|
||||
```
|
||||
|
||||
You can run `poetry install` from `./scripts/benchmarks` to create a working environment for the
|
||||
@@ -505,10 +456,10 @@ rm Lib/test/bad_coding.py \
|
||||
Lib/test/test_typing.py
|
||||
```
|
||||
|
||||
Then, from `crates/ruff_linter/resources/test/cpython`, run: `time pylint -j 0 -E $(git ls-files '*.py')`. This
|
||||
Then, from `crates/ruff/resources/test/cpython`, run: `time pylint -j 0 -E $(git ls-files '*.py')`. This
|
||||
will execute Pylint with maximum parallelism and only report errors.
|
||||
|
||||
To benchmark Pyupgrade, run the following from `crates/ruff_linter/resources/test/cpython`:
|
||||
To benchmark Pyupgrade, run the following from `crates/ruff/resources/test/cpython`:
|
||||
|
||||
```shell
|
||||
hyperfine --ignore-failure --warmup 5 --prepare "git reset --hard HEAD" \
|
||||
@@ -538,10 +489,10 @@ if the benchmark improved/regressed compared to that baseline.
|
||||
|
||||
```shell
|
||||
# Run once on your "baseline" code
|
||||
cargo bench -p ruff_benchmark -- --save-baseline=main
|
||||
cargo benchmark --save-baseline=main
|
||||
|
||||
# Then iterate with
|
||||
cargo bench -p ruff_benchmark -- --baseline=main
|
||||
cargo benchmark --baseline=main
|
||||
```
|
||||
|
||||
#### PR Summary
|
||||
@@ -551,10 +502,10 @@ This is useful to illustrate the improvements of a PR.
|
||||
|
||||
```shell
|
||||
# On main
|
||||
cargo bench -p ruff_benchmark -- --save-baseline=main
|
||||
cargo benchmark --save-baseline=main
|
||||
|
||||
# After applying your changes
|
||||
cargo bench -p ruff_benchmark -- --save-baseline=pr
|
||||
cargo benchmark --save-baseline=pr
|
||||
|
||||
critcmp main pr
|
||||
```
|
||||
@@ -567,10 +518,10 @@ cargo install critcmp
|
||||
|
||||
#### Tips
|
||||
|
||||
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo benchmark lexer`
|
||||
to only run the lexer benchmarks.
|
||||
- Use `cargo bench -p ruff_benchmark -- --quiet` for a more cleaned up output (without statistical relevance)
|
||||
- Use `cargo bench -p ruff_benchmark -- --quick` to get faster results (more prone to noise)
|
||||
- Use `cargo benchmark <filter>` to only run specific benchmarks. For example: `cargo benchmark linter/pydantic`
|
||||
to only run the pydantic tests.
|
||||
- Use `cargo benchmark --quiet` for a more cleaned up output (without statistical relevance)
|
||||
- Use `cargo benchmark --quick` to get faster results (more prone to noise)
|
||||
|
||||
### Profiling Projects
|
||||
|
||||
@@ -581,10 +532,10 @@ examples.
|
||||
|
||||
#### Linux
|
||||
|
||||
Install `perf` and build `ruff_benchmark` with the `profiling` profile and then run it with perf
|
||||
Install `perf` and build `ruff_benchmark` with the `release-debug` profile and then run it with perf
|
||||
|
||||
```shell
|
||||
cargo bench -p ruff_benchmark --no-run --profile=profiling && perf record --call-graph dwarf -F 9999 cargo bench -p ruff_benchmark --profile=profiling -- --profile-time=1
|
||||
cargo bench -p ruff_benchmark --no-run --profile=release-debug && perf record --call-graph dwarf -F 9999 cargo bench -p ruff_benchmark --profile=release-debug -- --profile-time=1
|
||||
```
|
||||
|
||||
You can also use the `ruff_dev` launcher to run `ruff check` multiple times on a repository to
|
||||
@@ -592,8 +543,8 @@ gather enough samples for a good flamegraph (change the 999, the sample rate, an
|
||||
of checks, to your liking)
|
||||
|
||||
```shell
|
||||
cargo build --bin ruff_dev --profile=profiling
|
||||
perf record -g -F 999 target/profiling/ruff_dev repeat --repeat 30 --exit-zero --no-cache path/to/cpython > /dev/null
|
||||
cargo build --bin ruff_dev --profile=release-debug
|
||||
perf record -g -F 999 target/release-debug/ruff_dev repeat --repeat 30 --exit-zero --no-cache path/to/cpython > /dev/null
|
||||
```
|
||||
|
||||
Then convert the recorded profile
|
||||
@@ -609,7 +560,7 @@ An alternative is to convert the perf data to `flamegraph.svg` using
|
||||
[flamegraph](https://github.com/flamegraph-rs/flamegraph) (`cargo install flamegraph`):
|
||||
|
||||
```shell
|
||||
flamegraph --perfdata perf.data --no-inline
|
||||
flamegraph --perfdata perf.data
|
||||
```
|
||||
|
||||
#### Mac
|
||||
@@ -623,7 +574,7 @@ cargo install cargo-instruments
|
||||
Then run the profiler with
|
||||
|
||||
```shell
|
||||
cargo instruments -t time --bench linter --profile profiling -p ruff_benchmark -- --profile-time=1
|
||||
cargo instruments -t time --bench linter --profile release-debug -p ruff_benchmark -- --profile-time=1
|
||||
```
|
||||
|
||||
- `-t`: Specifies what to profile. Useful options are `time` to profile the wall time and `alloc`
|
||||
@@ -638,7 +589,7 @@ Otherwise, follow the instructions from the linux section.
|
||||
utils with it:
|
||||
|
||||
- `cargo dev print-ast <file>`: Print the AST of a python file using the
|
||||
[RustPython parser](https://github.com/astral-sh/ruff/tree/main/crates/ruff_python_parser) that is
|
||||
[RustPython parser](https://github.com/astral-sh/RustPython-Parser/tree/main/parser) that is
|
||||
mainly used in Ruff. For `if True: pass # comment`, you can see the syntax tree, the byte offsets
|
||||
for start and stop of each node and also how the `:` token, the comment and whitespace are not
|
||||
represented anymore:
|
||||
@@ -756,8 +707,8 @@ Module {
|
||||
- `cargo dev generate-cli-help`, `cargo dev generate-docs` and `cargo dev generate-json-schema`:
|
||||
Update just `docs/configuration.md`, `docs/rules` and `ruff.schema.json` respectively.
|
||||
- `cargo dev generate-options`: Generate a markdown-compatible table of all `pyproject.toml`
|
||||
options. Used for <https://docs.astral.sh/ruff/settings/>.
|
||||
- `cargo dev generate-rules-table`: Generate a markdown-compatible table of all rules. Used for <https://docs.astral.sh/ruff/rules/>.
|
||||
options. Used for <https://beta.ruff.rs/docs/settings/>
|
||||
- `cargo dev generate-rules-table`: Generate a markdown-compatible table of all rules. Used for <https://beta.ruff.rs/docs/rules/>
|
||||
- `cargo dev round-trip <python file or jupyter notebook>`: Read a Python file or Jupyter Notebook,
|
||||
parse it, serialize the parsed representation and write it back. Used to check how good our
|
||||
representation is so that fixes don't rewrite irrelevant parts of a file.
|
||||
@@ -772,9 +723,9 @@ diagnostics, then our current compilation pipeline proceeds as follows:
|
||||
|
||||
1. **File discovery**: Given paths like `foo/`, locate all Python files in any specified subdirectories, taking into account our hierarchical settings system and any `exclude` options.
|
||||
|
||||
1. **Package resolution**: Determine the "package root" for every file by traversing over its parent directories and looking for `__init__.py` files.
|
||||
1. **Package resolution**: Determine the “package root” for every file by traversing over its parent directories and looking for `__init__.py` files.
|
||||
|
||||
1. **Cache initialization**: For every "package root", initialize an empty cache.
|
||||
1. **Cache initialization**: For every “package root”, initialize an empty cache.
|
||||
|
||||
1. **Analysis**: For every file, in parallel:
|
||||
|
||||
@@ -782,7 +733,7 @@ diagnostics, then our current compilation pipeline proceeds as follows:
|
||||
|
||||
1. **Tokenization**: Run the lexer over the file to generate a token stream.
|
||||
|
||||
1. **Indexing**: Extract metadata from the token stream, such as: comment ranges, `# noqa` locations, `# isort: off` locations, "doc lines", etc.
|
||||
1. **Indexing**: Extract metadata from the token stream, such as: comment ranges, `# noqa` locations, `# isort: off` locations, “doc lines”, etc.
|
||||
|
||||
1. **Token-based rule evaluation**: Run any lint rules that are based on the contents of the token stream (e.g., commented-out code).
|
||||
|
||||
@@ -792,9 +743,9 @@ diagnostics, then our current compilation pipeline proceeds as follows:
|
||||
|
||||
1. **Parsing**: Run the parser over the token stream to produce an AST. (This consumes the token stream, so anything that relies on the token stream needs to happen before parsing.)
|
||||
|
||||
1. **AST-based rule evaluation**: Run any lint rules that are based on the AST. This includes the vast majority of lint rules. As part of this step, we also build the semantic model for the current file as we traverse over the AST. Some lint rules are evaluated eagerly, as we iterate over the AST, while others are evaluated in a deferred manner (e.g., unused imports, since we can't determine whether an import is unused until we've finished analyzing the entire file), after we've finished the initial traversal.
|
||||
1. **AST-based rule evaluation**: Run any lint rules that are based on the AST. This includes the vast majority of lint rules. As part of this step, we also build the semantic model for the current file as we traverse over the AST. Some lint rules are evaluated eagerly, as we iterate over the AST, while others are evaluated in a deferred manner (e.g., unused imports, since we can’t determine whether an import is unused until we’ve finished analyzing the entire file), after we’ve finished the initial traversal.
|
||||
|
||||
1. **Import-based rule evaluation**: Run any lint rules that are based on the module's imports (e.g., import sorting). These could, in theory, be included in the AST-based rule evaluation phase — they're just separated for simplicity.
|
||||
1. **Import-based rule evaluation**: Run any lint rules that are based on the module’s imports (e.g., import sorting). These could, in theory, be included in the AST-based rule evaluation phase — they’re just separated for simplicity.
|
||||
|
||||
1. **Physical line-based rule evaluation**: Run any lint rules that are based on physical lines (e.g., line-length).
|
||||
|
||||
@@ -803,116 +754,3 @@ diagnostics, then our current compilation pipeline proceeds as follows:
|
||||
1. **Cache write**: Write the generated diagnostics to the package cache using the file as a key.
|
||||
|
||||
1. **Reporting**: Print diagnostics in the specified format (text, JSON, etc.), to the specified output channel (stdout, a file, etc.).
|
||||
|
||||
### Import Categorization
|
||||
|
||||
To understand Ruff's import categorization system, we first need to define two concepts:
|
||||
|
||||
- "Project root": The directory containing the `pyproject.toml`, `ruff.toml`, or `.ruff.toml` file,
|
||||
discovered by identifying the "closest" such directory for each Python file. (If you're running
|
||||
via `ruff --config /path/to/pyproject.toml`, then the current working directory is used as the
|
||||
"project root".)
|
||||
- "Package root": The top-most directory defining the Python package that includes a given Python
|
||||
file. To find the package root for a given Python file, traverse up its parent directories until
|
||||
you reach a parent directory that doesn't contain an `__init__.py` file (and isn't in a subtree
|
||||
marked as a [namespace package](https://docs.astral.sh/ruff/settings/#namespace-packages)); take the directory
|
||||
just before that, i.e., the first directory in the package.
|
||||
|
||||
For example, given:
|
||||
|
||||
```text
|
||||
my_project
|
||||
├── pyproject.toml
|
||||
└── src
|
||||
└── foo
|
||||
├── __init__.py
|
||||
└── bar
|
||||
├── __init__.py
|
||||
└── baz.py
|
||||
```
|
||||
|
||||
Then when analyzing `baz.py`, the project root would be the top-level directory (`./my_project`),
|
||||
and the package root would be `./my_project/src/foo`.
|
||||
|
||||
#### Project root
|
||||
|
||||
The project root does not have a significant impact beyond that all relative paths within the loaded
|
||||
configuration file are resolved relative to the project root.
|
||||
|
||||
For example, to indicate that `bar` above is a namespace package (it isn't, but let's run with it),
|
||||
the `pyproject.toml` would list `namespace-packages = ["./src/bar"]`, which would resolve
|
||||
to `my_project/src/bar`.
|
||||
|
||||
The same logic applies when providing a configuration file via `--config`. In that case, the
|
||||
_current working directory_ is used as the project root, and so all paths in that configuration file
|
||||
are resolved relative to the current working directory. (As a general rule, we want to avoid relying
|
||||
on the current working directory as much as possible, to ensure that Ruff exhibits the same behavior
|
||||
regardless of where and how you invoke it — but that's hard to avoid in this case.)
|
||||
|
||||
Additionally, if a `pyproject.toml` file _extends_ another configuration file, Ruff will still use
|
||||
the directory containing that `pyproject.toml` file as the project root. For example, if
|
||||
`./my_project/pyproject.toml` contains:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
extend = "/path/to/pyproject.toml"
|
||||
```
|
||||
|
||||
Then Ruff will use `./my_project` as the project root, even though the configuration file extends
|
||||
`/path/to/pyproject.toml`. As such, if the configuration file at `/path/to/pyproject.toml` contains
|
||||
any relative paths, they will be resolved relative to `./my_project`.
|
||||
|
||||
If a project uses nested configuration files, then Ruff would detect multiple project roots, one for
|
||||
each configuration file.
|
||||
|
||||
#### Package root
|
||||
|
||||
The package root is used to determine a file's "module path". Consider, again, `baz.py`. In that
|
||||
case, `./my_project/src/foo` was identified as the package root, so the module path for `baz.py`
|
||||
would resolve to `foo.bar.baz` — as computed by taking the relative path from the package root
|
||||
(inclusive of the root itself). The module path can be thought of as "the path you would use to
|
||||
import the module" (e.g., `import foo.bar.baz`).
|
||||
|
||||
The package root and module path are used to, e.g., convert relative to absolute imports, and for
|
||||
import categorization, as described below.
|
||||
|
||||
#### Import categorization
|
||||
|
||||
When sorting and formatting import blocks, Ruff categorizes every import into one of five
|
||||
categories:
|
||||
|
||||
1. **"Future"**: the import is a `__future__` import. That's easy: just look at the name of the
|
||||
imported module!
|
||||
1. **"Standard library"**: the import comes from the Python standard library (e.g., `import os`).
|
||||
This is easy too: we include a list of all known standard library modules in Ruff itself, so it's
|
||||
a simple lookup.
|
||||
1. **"Local folder"**: the import is a relative import (e.g., `from .foo import bar`). This is easy
|
||||
too: just check if the import includes a `level` (i.e., a dot-prefix).
|
||||
1. **"First party"**: the import is part of the current project. (More on this below.)
|
||||
1. **"Third party"**: everything else.
|
||||
|
||||
The real challenge lies in determining whether an import is first-party — everything else is either
|
||||
trivial, or (as in the case of third-party) merely defined as "not first-party".
|
||||
|
||||
There are three ways in which an import can be categorized as "first-party":
|
||||
|
||||
1. **Explicit settings**: the import is marked as such via the `known-first-party` setting. (This
|
||||
should generally be seen as an escape hatch.)
|
||||
1. **Same-package**: the imported module is in the same package as the current file. This gets back
|
||||
to the importance of the "package root" and the file's "module path". Imagine that we're
|
||||
analyzing `baz.py` above. If `baz.py` contains any imports that appear to come from the `foo`
|
||||
package (e.g., `from foo import bar` or `import foo.bar`), they'll be classified as first-party
|
||||
automatically. This check is as simple as comparing the first segment of the current file's
|
||||
module path to the first segment of the import.
|
||||
1. **Source roots**: Ruff supports a `[src](https://docs.astral.sh/ruff/settings/#src)` setting, which
|
||||
sets the directories to scan when identifying first-party imports. The algorithm is
|
||||
straightforward: given an import, like `import foo`, iterate over the directories enumerated in
|
||||
the `src` setting and, for each directory, check for the existence of a subdirectory `foo` or a
|
||||
file `foo.py`.
|
||||
|
||||
By default, `src` is set to the project root. In the above example, we'd want to set
|
||||
`src = ["./src"]` to ensure that we locate `./my_project/src/foo` and thus categorize `import foo`
|
||||
as first-party in `baz.py`. In practice, for this limited example, setting `src = ["./src"]` is
|
||||
unnecessary, as all imports within `./my_project/src/foo` would be categorized as first-party via
|
||||
the same-package heuristic; but if your project contains multiple packages, you'll want to set `src`
|
||||
explicitly.
|
||||
|
||||
2570
Cargo.lock
generated
2570
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
201
Cargo.toml
201
Cargo.toml
@@ -4,168 +4,65 @@ resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
edition = "2021"
|
||||
rust-version = "1.71"
|
||||
homepage = "https://docs.astral.sh/ruff"
|
||||
documentation = "https://docs.astral.sh/ruff"
|
||||
rust-version = "1.70"
|
||||
homepage = "https://beta.ruff.rs/docs"
|
||||
documentation = "https://beta.ruff.rs/docs"
|
||||
repository = "https://github.com/astral-sh/ruff"
|
||||
authors = ["Charlie Marsh <charlie.r.marsh@gmail.com>"]
|
||||
license = "MIT"
|
||||
|
||||
[workspace.dependencies]
|
||||
aho-corasick = { version = "1.1.3" }
|
||||
annotate-snippets = { version = "0.9.2", features = ["color"] }
|
||||
anyhow = { version = "1.0.80" }
|
||||
argfile = { version = "0.2.0" }
|
||||
bincode = { version = "1.3.3" }
|
||||
bitflags = { version = "2.5.0" }
|
||||
bstr = { version = "1.9.1" }
|
||||
cachedir = { version = "0.3.1" }
|
||||
chrono = { version = "0.4.35", default-features = false, features = ["clock"] }
|
||||
clap = { version = "4.5.3", features = ["derive"] }
|
||||
clap_complete_command = { version = "0.5.1" }
|
||||
clearscreen = { version = "3.0.0" }
|
||||
codspeed-criterion-compat = { version = "2.6.0", default-features = false }
|
||||
colored = { version = "2.1.0" }
|
||||
console_error_panic_hook = { version = "0.1.7" }
|
||||
console_log = { version = "1.0.0" }
|
||||
countme = { version = "3.0.1" }
|
||||
criterion = { version = "0.5.1", default-features = false }
|
||||
crossbeam = { version = "0.8.4" }
|
||||
dashmap = { version = "5.5.3" }
|
||||
dirs = { version = "5.0.0" }
|
||||
drop_bomb = { version = "0.1.5" }
|
||||
env_logger = { version = "0.11.0" }
|
||||
fern = { version = "0.6.1" }
|
||||
filetime = { version = "0.2.23" }
|
||||
anyhow = { version = "1.0.69" }
|
||||
bitflags = { version = "2.3.1" }
|
||||
chrono = { version = "0.4.23", default-features = false, features = ["clock"] }
|
||||
clap = { version = "4.1.8", features = ["derive"] }
|
||||
colored = { version = "2.0.0" }
|
||||
filetime = { version = "0.2.20" }
|
||||
glob = { version = "0.3.1" }
|
||||
globset = { version = "0.4.14" }
|
||||
hashbrown = "0.14.3"
|
||||
hexf-parse = { version = "0.2.1" }
|
||||
ignore = { version = "0.4.22" }
|
||||
imara-diff = { version = "0.1.5" }
|
||||
imperative = { version = "1.0.4" }
|
||||
indexmap = { version = "2.2.6" }
|
||||
indicatif = { version = "0.17.8" }
|
||||
indoc = { version = "2.0.4" }
|
||||
insta = { version = "1.35.1", feature = ["filters", "glob"] }
|
||||
insta-cmd = { version = "0.6.0" }
|
||||
is-macro = { version = "0.3.5" }
|
||||
is-wsl = { version = "0.4.0" }
|
||||
itertools = { version = "0.12.1" }
|
||||
js-sys = { version = "0.3.69" }
|
||||
jod-thread = { version = "0.1.2" }
|
||||
lexical-parse-float = { version = "0.8.0", features = ["format"] }
|
||||
libc = { version = "0.2.153" }
|
||||
libcst = { version = "1.1.0", default-features = false }
|
||||
globset = { version = "0.4.10" }
|
||||
ignore = { version = "0.4.20" }
|
||||
insta = { version = "1.30.0" }
|
||||
is-macro = { version = "0.2.2" }
|
||||
itertools = { version = "0.10.5" }
|
||||
log = { version = "0.4.17" }
|
||||
lsp-server = { version = "0.7.6" }
|
||||
lsp-types = { version = "0.95.0", features = ["proposed"] }
|
||||
matchit = { version = "0.8.1" }
|
||||
memchr = { version = "2.7.1" }
|
||||
mimalloc = { version = "0.1.39" }
|
||||
natord = { version = "1.0.9" }
|
||||
notify = { version = "6.1.1" }
|
||||
once_cell = { version = "1.19.0" }
|
||||
path-absolutize = { version = "3.1.1" }
|
||||
path-slash = { version = "0.2.1" }
|
||||
pathdiff = { version = "0.2.1" }
|
||||
parking_lot = "0.12.1"
|
||||
pep440_rs = { version = "0.6.0", features = ["serde"] }
|
||||
pretty_assertions = "1.3.0"
|
||||
proc-macro2 = { version = "1.0.79" }
|
||||
pyproject-toml = { version = "0.9.0" }
|
||||
quick-junit = { version = "0.4.0" }
|
||||
memchr = "2.5.0"
|
||||
nohash-hasher = { version = "0.2.0" }
|
||||
num-bigint = { version = "0.4.3" }
|
||||
num-traits = { version = "0.2.15" }
|
||||
once_cell = { version = "1.17.1" }
|
||||
path-absolutize = { version = "3.0.14" }
|
||||
proc-macro2 = { version = "1.0.51" }
|
||||
quote = { version = "1.0.23" }
|
||||
rand = { version = "0.8.5" }
|
||||
rayon = { version = "1.10.0" }
|
||||
regex = { version = "1.10.2" }
|
||||
result-like = { version = "0.5.0" }
|
||||
regex = { version = "1.7.1" }
|
||||
rustc-hash = { version = "1.1.0" }
|
||||
schemars = { version = "0.8.16" }
|
||||
seahash = { version = "4.1.0" }
|
||||
serde = { version = "1.0.197", features = ["derive"] }
|
||||
serde-wasm-bindgen = { version = "0.6.4" }
|
||||
serde_json = { version = "1.0.113" }
|
||||
serde_test = { version = "1.0.152" }
|
||||
serde_with = { version = "3.6.0", default-features = false, features = ["macros"] }
|
||||
schemars = { version = "0.8.12" }
|
||||
serde = { version = "1.0.152", features = ["derive"] }
|
||||
serde_json = { version = "1.0.93" }
|
||||
shellexpand = { version = "3.0.0" }
|
||||
similar = { version = "2.4.0", features = ["inline"] }
|
||||
smallvec = { version = "1.13.2" }
|
||||
static_assertions = "1.1.0"
|
||||
strum = { version = "0.26.0", features = ["strum_macros"] }
|
||||
strum_macros = { version = "0.26.0" }
|
||||
syn = { version = "2.0.55" }
|
||||
tempfile = { version = "3.9.0" }
|
||||
test-case = { version = "3.3.1" }
|
||||
thiserror = { version = "1.0.58" }
|
||||
tikv-jemallocator = { version = "0.5.0" }
|
||||
toml = { version = "0.8.11" }
|
||||
tracing = { version = "0.1.40" }
|
||||
tracing-indicatif = { version = "0.3.6" }
|
||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
|
||||
tracing-tree = { version = "0.3.0" }
|
||||
typed-arena = { version = "2.0.2" }
|
||||
unic-ucd-category = { version = "0.9" }
|
||||
unicode-ident = { version = "1.0.12" }
|
||||
unicode-width = { version = "0.1.11" }
|
||||
unicode_names2 = { version = "1.2.2" }
|
||||
unicode-normalization = { version = "0.1.23" }
|
||||
ureq = { version = "2.9.6" }
|
||||
url = { version = "2.5.0" }
|
||||
uuid = { version = "1.6.1", features = ["v4", "fast-rng", "macro-diagnostics", "js"] }
|
||||
walkdir = { version = "2.3.2" }
|
||||
wasm-bindgen = { version = "0.2.92" }
|
||||
wasm-bindgen-test = { version = "0.3.42" }
|
||||
wild = { version = "2" }
|
||||
similar = { version = "2.2.1", features = ["inline"] }
|
||||
smallvec = { version = "1.10.0" }
|
||||
strum = { version = "0.24.1", features = ["strum_macros"] }
|
||||
strum_macros = { version = "0.24.3" }
|
||||
syn = { version = "2.0.15" }
|
||||
test-case = { version = "3.0.0" }
|
||||
thiserror = { version = "1.0.43" }
|
||||
toml = { version = "0.7.2" }
|
||||
wsl = { version = "0.1.0" }
|
||||
|
||||
[workspace.lints.rust]
|
||||
unsafe_code = "warn"
|
||||
unreachable_pub = "warn"
|
||||
# v1.0.1
|
||||
libcst = { git = "https://github.com/Instagram/LibCST.git", rev = "3cacca1a1029f05707e50703b49fe3dd860aa839", default-features = false }
|
||||
|
||||
[workspace.lints.clippy]
|
||||
pedantic = { level = "warn", priority = -2 }
|
||||
# Allowed pedantic lints
|
||||
char_lit_as_u8 = "allow"
|
||||
collapsible_else_if = "allow"
|
||||
collapsible_if = "allow"
|
||||
implicit_hasher = "allow"
|
||||
map_unwrap_or = "allow"
|
||||
match_same_arms = "allow"
|
||||
missing_errors_doc = "allow"
|
||||
missing_panics_doc = "allow"
|
||||
module_name_repetitions = "allow"
|
||||
must_use_candidate = "allow"
|
||||
similar_names = "allow"
|
||||
too_many_lines = "allow"
|
||||
# To allow `#[allow(clippy::all)]` in `crates/ruff_python_parser/src/python.rs`.
|
||||
needless_raw_string_hashes = "allow"
|
||||
# Disallowed restriction lints
|
||||
print_stdout = "warn"
|
||||
print_stderr = "warn"
|
||||
dbg_macro = "warn"
|
||||
empty_drop = "warn"
|
||||
empty_structs_with_brackets = "warn"
|
||||
exit = "warn"
|
||||
get_unwrap = "warn"
|
||||
rc_buffer = "warn"
|
||||
rc_mutex = "warn"
|
||||
rest_pat_in_fully_bound_structs = "warn"
|
||||
# Please tag the RustPython version every time you update its revision here and in fuzz/Cargo.toml
|
||||
# Tagging the version ensures that older ruff versions continue to build from source even when we rebase our RustPython fork.
|
||||
# Note: As of tag v0.0.8 we are cherry-picking commits instead of rebasing so the tag is not necessary
|
||||
ruff_text_size = { git = "https://github.com/astral-sh/RustPython-Parser.git", rev = "126652b684910c29a7bcc32293d4ca0f81454e34" }
|
||||
rustpython-ast = { git = "https://github.com/astral-sh/RustPython-Parser.git", rev = "126652b684910c29a7bcc32293d4ca0f81454e34" , default-features = false, features = ["num-bigint"]}
|
||||
rustpython-format = { git = "https://github.com/astral-sh/RustPython-Parser.git", rev = "126652b684910c29a7bcc32293d4ca0f81454e34", default-features = false, features = ["num-bigint"] }
|
||||
rustpython-literal = { git = "https://github.com/astral-sh/RustPython-Parser.git", rev = "126652b684910c29a7bcc32293d4ca0f81454e34", default-features = false }
|
||||
rustpython-parser = { git = "https://github.com/astral-sh/RustPython-Parser.git", rev = "126652b684910c29a7bcc32293d4ca0f81454e34" , default-features = false, features = ["full-lexer", "num-bigint"] }
|
||||
|
||||
[profile.release]
|
||||
# Note that we set these explicitly, and these values
|
||||
# were chosen based on a trade-off between compile times
|
||||
# and runtime performance[1].
|
||||
#
|
||||
# [1]: https://github.com/astral-sh/ruff/pull/9031
|
||||
lto = "thin"
|
||||
codegen-units = 16
|
||||
|
||||
# Some crates don't change as much but benefit more from
|
||||
# more expensive optimization passes, so we selectively
|
||||
# decrease codegen-units in some cases.
|
||||
[profile.release.package.ruff_python_parser]
|
||||
codegen-units = 1
|
||||
[profile.release.package.ruff_python_ast]
|
||||
lto = "fat"
|
||||
codegen-units = 1
|
||||
|
||||
[profile.dev.package.insta]
|
||||
@@ -176,11 +73,11 @@ opt-level = 3
|
||||
|
||||
# Reduce complexity of a parser function that would trigger a locals limit in a wasm tool.
|
||||
# https://github.com/bytecodealliance/wasm-tools/blob/b5c3d98e40590512a3b12470ef358d5c7b983b15/crates/wasmparser/src/limits.rs#L29
|
||||
[profile.dev.package.ruff_python_parser]
|
||||
[profile.dev.package.rustpython-parser]
|
||||
opt-level = 1
|
||||
|
||||
# Use the `--profile profiling` flag to show symbols in release mode.
|
||||
# e.g. `cargo build --profile profiling`
|
||||
[profile.profiling]
|
||||
# Use the `--profile release-debug` flag to show symbols in release mode.
|
||||
# e.g. `cargo build --profile release-debug`
|
||||
[profile.release-debug]
|
||||
inherits = "release"
|
||||
debug = 1
|
||||
|
||||
38
Dockerfile
38
Dockerfile
@@ -1,38 +0,0 @@
|
||||
FROM --platform=$BUILDPLATFORM ubuntu as build
|
||||
ENV HOME="/root"
|
||||
WORKDIR $HOME
|
||||
|
||||
RUN apt update && apt install -y build-essential curl python3-venv
|
||||
|
||||
# Setup zig as cross compiling linker
|
||||
RUN python3 -m venv $HOME/.venv
|
||||
RUN .venv/bin/pip install cargo-zigbuild
|
||||
ENV PATH="$HOME/.venv/bin:$PATH"
|
||||
|
||||
# Install rust
|
||||
ARG TARGETPLATFORM
|
||||
RUN case "$TARGETPLATFORM" in \
|
||||
"linux/arm64") echo "aarch64-unknown-linux-musl" > rust_target.txt ;; \
|
||||
"linux/amd64") echo "x86_64-unknown-linux-musl" > rust_target.txt ;; \
|
||||
*) exit 1 ;; \
|
||||
esac
|
||||
# Update rustup whenever we bump the rust version
|
||||
COPY rust-toolchain.toml rust-toolchain.toml
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --target $(cat rust_target.txt) --profile minimal --default-toolchain none
|
||||
ENV PATH="$HOME/.cargo/bin:$PATH"
|
||||
# Installs the correct toolchain version from rust-toolchain.toml and then the musl target
|
||||
RUN rustup target add $(cat rust_target.txt)
|
||||
|
||||
# Build
|
||||
COPY crates crates
|
||||
COPY Cargo.toml Cargo.toml
|
||||
COPY Cargo.lock Cargo.lock
|
||||
RUN cargo zigbuild --bin ruff --target $(cat rust_target.txt) --release
|
||||
RUN cp target/$(cat rust_target.txt)/release/ruff /ruff
|
||||
# TODO: Optimize binary size, with a version that also works when cross compiling
|
||||
# RUN strip --strip-all /ruff
|
||||
|
||||
FROM scratch
|
||||
COPY --from=build /ruff /ruff
|
||||
WORKDIR /io
|
||||
ENTRYPOINT ["/ruff"]
|
||||
72
LICENSE
72
LICENSE
@@ -1194,27 +1194,7 @@ are:
|
||||
|
||||
- flake8-self, licensed as follows:
|
||||
"""
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2023 Korijn van Golen
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
Freely Distributable
|
||||
"""
|
||||
|
||||
- flake8-django, licensed under the GPL license.
|
||||
@@ -1244,56 +1224,6 @@ are:
|
||||
SOFTWARE.
|
||||
"""
|
||||
|
||||
- flake8-logging, licensed as follows:
|
||||
"""
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2023 Adam Johnson
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
"""
|
||||
|
||||
- flake8-trio, licensed as follows:
|
||||
"""
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2022 Zac Hatfield-Dodds
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
"""
|
||||
|
||||
- Pyright, licensed as follows:
|
||||
"""
|
||||
MIT License
|
||||
|
||||
202
README.md
202
README.md
@@ -2,16 +2,15 @@
|
||||
|
||||
# Ruff
|
||||
|
||||
[](https://github.com/astral-sh/ruff)
|
||||
[](https://github.com/astral-sh/ruff)
|
||||
[](https://pypi.python.org/pypi/ruff)
|
||||
[](https://github.com/astral-sh/ruff/blob/main/LICENSE)
|
||||
[](https://pypi.python.org/pypi/ruff)
|
||||
[](https://pypi.python.org/pypi/ruff)
|
||||
[](https://github.com/astral-sh/ruff/actions)
|
||||
[](https://discord.com/invite/astral-sh)
|
||||
|
||||
[**Docs**](https://docs.astral.sh/ruff/) | [**Playground**](https://play.ruff.rs/)
|
||||
[**Discord**](https://discord.gg/c9MhzV8aU5) | [**Docs**](https://beta.ruff.rs/docs/) | [**Playground**](https://play.ruff.rs/)
|
||||
|
||||
An extremely fast Python linter and code formatter, written in Rust.
|
||||
An extremely fast Python linter, written in Rust.
|
||||
|
||||
<p align="center">
|
||||
<picture align="center">
|
||||
@@ -25,38 +24,38 @@ An extremely fast Python linter and code formatter, written in Rust.
|
||||
<i>Linting the CPython codebase from scratch.</i>
|
||||
</p>
|
||||
|
||||
- ⚡️ 10-100x faster than existing linters (like Flake8) and formatters (like Black)
|
||||
- ⚡️ 10-100x faster than existing linters
|
||||
- 🐍 Installable via `pip`
|
||||
- 🛠️ `pyproject.toml` support
|
||||
- 🤝 Python 3.12 compatibility
|
||||
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruff-compare-to-flake8), isort, and Black
|
||||
- 🤝 Python 3.11 compatibility
|
||||
- 📦 Built-in caching, to avoid re-analyzing unchanged files
|
||||
- 🔧 Fix support, for automatic error correction (e.g., automatically remove unused imports)
|
||||
- 📏 Over [800 built-in rules](https://docs.astral.sh/ruff/rules/), with native re-implementations
|
||||
of popular Flake8 plugins, like flake8-bugbear
|
||||
- ⌨️ First-party [editor integrations](https://docs.astral.sh/ruff/integrations/) for
|
||||
- 🔧 Autofix support, for automatic error correction (e.g., automatically remove unused imports)
|
||||
- 📏 Over [500 built-in rules](https://beta.ruff.rs/docs/rules/)
|
||||
- ⚖️ [Near-parity](https://beta.ruff.rs/docs/faq/#how-does-ruff-compare-to-flake8) with the
|
||||
built-in Flake8 rule set
|
||||
- 🔌 Native re-implementations of dozens of Flake8 plugins, like flake8-bugbear
|
||||
- ⌨️ First-party [editor integrations](https://beta.ruff.rs/docs/editor-integrations/) for
|
||||
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://github.com/astral-sh/ruff-lsp)
|
||||
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://docs.astral.sh/ruff/configuration/#pyprojecttoml-discovery)
|
||||
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://beta.ruff.rs/docs/configuration/#pyprojecttoml-discovery)
|
||||
|
||||
Ruff aims to be orders of magnitude faster than alternative tools while integrating more
|
||||
functionality behind a single, common interface.
|
||||
|
||||
Ruff can be used to replace [Flake8](https://pypi.org/project/flake8/) (plus dozens of plugins),
|
||||
[Black](https://github.com/psf/black), [isort](https://pypi.org/project/isort/),
|
||||
[pydocstyle](https://pypi.org/project/pydocstyle/), [pyupgrade](https://pypi.org/project/pyupgrade/),
|
||||
[autoflake](https://pypi.org/project/autoflake/), and more, all while executing tens or hundreds of
|
||||
times faster than any individual tool.
|
||||
[isort](https://pypi.org/project/isort/), [pydocstyle](https://pypi.org/project/pydocstyle/),
|
||||
[yesqa](https://github.com/asottile/yesqa), [eradicate](https://pypi.org/project/eradicate/),
|
||||
[pyupgrade](https://pypi.org/project/pyupgrade/), and [autoflake](https://pypi.org/project/autoflake/),
|
||||
all while executing tens or hundreds of times faster than any individual tool.
|
||||
|
||||
Ruff is extremely actively developed and used in major open-source projects like:
|
||||
|
||||
- [Apache Airflow](https://github.com/apache/airflow)
|
||||
- [Apache Superset](https://github.com/apache/superset)
|
||||
- [FastAPI](https://github.com/tiangolo/fastapi)
|
||||
- [Hugging Face](https://github.com/huggingface/transformers)
|
||||
- [Pandas](https://github.com/pandas-dev/pandas)
|
||||
- [SciPy](https://github.com/scipy/scipy)
|
||||
|
||||
...and [many more](#whos-using-ruff).
|
||||
...and many more.
|
||||
|
||||
Ruff is backed by [Astral](https://astral.sh). Read the [launch post](https://astral.sh/blog/announcing-astral-the-company-behind-ruff),
|
||||
or the original [project announcement](https://notes.crmarsh.com/python-tooling-could-be-much-much-faster).
|
||||
@@ -99,7 +98,7 @@ developer of [Zulip](https://github.com/zulip/zulip):
|
||||
|
||||
## Table of Contents
|
||||
|
||||
For more, see the [documentation](https://docs.astral.sh/ruff/).
|
||||
For more, see the [documentation](https://beta.ruff.rs/docs/).
|
||||
|
||||
1. [Getting Started](#getting-started)
|
||||
1. [Configuration](#configuration)
|
||||
@@ -112,7 +111,7 @@ For more, see the [documentation](https://docs.astral.sh/ruff/).
|
||||
|
||||
## Getting Started
|
||||
|
||||
For more, see the [documentation](https://docs.astral.sh/ruff/).
|
||||
For more, see the [documentation](https://beta.ruff.rs/docs/).
|
||||
|
||||
### Installation
|
||||
|
||||
@@ -123,42 +122,27 @@ pip install ruff
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
and with [a variety of other package managers](https://docs.astral.sh/ruff/installation/).
|
||||
and with [a variety of other package managers](https://beta.ruff.rs/docs/installation/).
|
||||
|
||||
### Usage
|
||||
|
||||
To run Ruff as a linter, try any of the following:
|
||||
To run Ruff, try any of the following:
|
||||
|
||||
```shell
|
||||
ruff check # Lint all files in the current directory (and any subdirectories).
|
||||
ruff check path/to/code/ # Lint all files in `/path/to/code` (and any subdirectories).
|
||||
ruff check path/to/code/*.py # Lint all `.py` files in `/path/to/code`.
|
||||
ruff check path/to/code/to/file.py # Lint `file.py`.
|
||||
ruff check @arguments.txt # Lint using an input file, treating its contents as newline-delimited command-line arguments.
|
||||
ruff check . # Lint all files in the current directory (and any subdirectories)
|
||||
ruff check path/to/code/ # Lint all files in `/path/to/code` (and any subdirectories)
|
||||
ruff check path/to/code/*.py # Lint all `.py` files in `/path/to/code`
|
||||
ruff check path/to/code/to/file.py # Lint `file.py`
|
||||
```
|
||||
|
||||
Or, to run Ruff as a formatter:
|
||||
|
||||
```shell
|
||||
ruff format # Format all files in the current directory (and any subdirectories).
|
||||
ruff format path/to/code/ # Format all files in `/path/to/code` (and any subdirectories).
|
||||
ruff format path/to/code/*.py # Format all `.py` files in `/path/to/code`.
|
||||
ruff format path/to/code/to/file.py # Format `file.py`.
|
||||
ruff format @arguments.txt # Format using an input file, treating its contents as newline-delimited command-line arguments.
|
||||
```
|
||||
|
||||
Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff-pre-commit`](https://github.com/astral-sh/ruff-pre-commit):
|
||||
Ruff can also be used as a [pre-commit](https://pre-commit.com) hook:
|
||||
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.4.4
|
||||
rev: v0.0.278
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
args: [ --fix ]
|
||||
# Run the formatter.
|
||||
- id: ruff-format
|
||||
```
|
||||
|
||||
Ruff can also be used as a [VS Code extension](https://github.com/astral-sh/ruff-vscode) or
|
||||
@@ -174,19 +158,28 @@ jobs:
|
||||
ruff:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- uses: chartboost/ruff-action@v1
|
||||
```
|
||||
|
||||
### Configuration
|
||||
|
||||
Ruff can be configured through a `pyproject.toml`, `ruff.toml`, or `.ruff.toml` file (see:
|
||||
[_Configuration_](https://docs.astral.sh/ruff/configuration/), or [_Settings_](https://docs.astral.sh/ruff/settings/)
|
||||
[_Configuration_](https://beta.ruff.rs/docs/configuration/), or [_Settings_](https://beta.ruff.rs/docs/settings/)
|
||||
for a complete list of all configuration options).
|
||||
|
||||
If left unspecified, Ruff's default configuration is equivalent to the following `ruff.toml` file:
|
||||
If left unspecified, the default configuration is equivalent to:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
# Enable pycodestyle (`E`) and Pyflakes (`F`) codes by default.
|
||||
select = ["E", "F"]
|
||||
ignore = []
|
||||
|
||||
# Allow autofix for all enabled rules (when `--fix`) is provided.
|
||||
fixable = ["A", "B", "C", "D", "E", "F", "G", "I", "N", "Q", "S", "T", "W", "ANN", "ARG", "BLE", "COM", "DJ", "DTZ", "EM", "ERA", "EXE", "FBT", "ICN", "INP", "ISC", "NPY", "PD", "PGH", "PIE", "PL", "PT", "PTH", "PYI", "RET", "RSE", "RUF", "SIM", "SLF", "TCH", "TID", "TRY", "UP", "YTT"]
|
||||
unfixable = []
|
||||
|
||||
# Exclude a variety of commonly ignored directories.
|
||||
exclude = [
|
||||
".bzr",
|
||||
@@ -195,90 +188,58 @@ exclude = [
|
||||
".git",
|
||||
".git-rewrite",
|
||||
".hg",
|
||||
".ipynb_checkpoints",
|
||||
".mypy_cache",
|
||||
".nox",
|
||||
".pants.d",
|
||||
".pyenv",
|
||||
".pytest_cache",
|
||||
".pytype",
|
||||
".ruff_cache",
|
||||
".svn",
|
||||
".tox",
|
||||
".venv",
|
||||
".vscode",
|
||||
"__pypackages__",
|
||||
"_build",
|
||||
"buck-out",
|
||||
"build",
|
||||
"dist",
|
||||
"node_modules",
|
||||
"site-packages",
|
||||
"venv",
|
||||
]
|
||||
|
||||
# Same as Black.
|
||||
line-length = 88
|
||||
indent-width = 4
|
||||
|
||||
# Assume Python 3.8
|
||||
target-version = "py38"
|
||||
|
||||
[lint]
|
||||
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
|
||||
select = ["E4", "E7", "E9", "F"]
|
||||
ignore = []
|
||||
|
||||
# Allow fix for all enabled rules (when `--fix`) is provided.
|
||||
fixable = ["ALL"]
|
||||
unfixable = []
|
||||
|
||||
# Allow unused variables when underscore-prefixed.
|
||||
dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
|
||||
|
||||
[format]
|
||||
# Like Black, use double quotes for strings.
|
||||
quote-style = "double"
|
||||
# Assume Python 3.10.
|
||||
target-version = "py310"
|
||||
|
||||
# Like Black, indent with spaces, rather than tabs.
|
||||
indent-style = "space"
|
||||
|
||||
# Like Black, respect magic trailing commas.
|
||||
skip-magic-trailing-comma = false
|
||||
|
||||
# Like Black, automatically detect the appropriate line ending.
|
||||
line-ending = "auto"
|
||||
[tool.ruff.mccabe]
|
||||
# Unlike Flake8, default to a complexity level of 10.
|
||||
max-complexity = 10
|
||||
```
|
||||
|
||||
Note that, in a `pyproject.toml`, each section header should be prefixed with `tool.ruff`. For
|
||||
example, `[lint]` should be replaced with `[tool.ruff.lint]`.
|
||||
|
||||
Some configuration options can be provided via dedicated command-line arguments, such as those
|
||||
related to rule enablement and disablement, file discovery, and logging level:
|
||||
Some configuration options can be provided via the command-line, such as those related to
|
||||
rule enablement and disablement, file discovery, logging level, and more:
|
||||
|
||||
```shell
|
||||
ruff check --select F401 --select F403 --quiet
|
||||
ruff check path/to/code/ --select F401 --select F403 --quiet
|
||||
```
|
||||
|
||||
The remaining configuration options can be provided through a catch-all `--config` argument:
|
||||
|
||||
```shell
|
||||
ruff check --config "lint.per-file-ignores = {'some_file.py' = ['F841']}"
|
||||
```
|
||||
|
||||
See `ruff help` for more on Ruff's top-level commands, or `ruff help check` and `ruff help format`
|
||||
for more on the linting and formatting commands, respectively.
|
||||
See `ruff help` for more on Ruff's top-level commands, or `ruff help check` for more on the
|
||||
linting command.
|
||||
|
||||
## Rules
|
||||
|
||||
<!-- Begin section: Rules -->
|
||||
|
||||
**Ruff supports over 800 lint rules**, many of which are inspired by popular tools like Flake8,
|
||||
**Ruff supports over 500 lint rules**, many of which are inspired by popular tools like Flake8,
|
||||
isort, pyupgrade, and others. Regardless of the rule's origin, Ruff re-implements every rule in
|
||||
Rust as a first-party feature.
|
||||
|
||||
By default, Ruff enables Flake8's `F` rules, along with a subset of the `E` rules, omitting any
|
||||
stylistic rules that overlap with the use of a formatter, like `ruff format` or
|
||||
By default, Ruff enables Flake8's `E` and `F` rules. Ruff supports all rules from the `F` category,
|
||||
and a [subset](https://beta.ruff.rs/docs/rules/#error-e) of the `E` category, omitting those
|
||||
stylistic rules made obsolete by the use of an autoformatter, like
|
||||
[Black](https://github.com/psf/black).
|
||||
|
||||
If you're just getting started with Ruff, **the default rule set is a great place to start**: it
|
||||
@@ -313,7 +274,6 @@ quality tools, including:
|
||||
- [flake8-gettext](https://pypi.org/project/flake8-gettext/)
|
||||
- [flake8-implicit-str-concat](https://pypi.org/project/flake8-implicit-str-concat/)
|
||||
- [flake8-import-conventions](https://github.com/joaopalmeiro/flake8-import-conventions)
|
||||
- [flake8-logging](https://pypi.org/project/flake8-logging/)
|
||||
- [flake8-logging-format](https://pypi.org/project/flake8-logging-format/)
|
||||
- [flake8-no-pep420](https://pypi.org/project/flake8-no-pep420)
|
||||
- [flake8-pie](https://pypi.org/project/flake8-pie/)
|
||||
@@ -329,7 +289,6 @@ quality tools, including:
|
||||
- [flake8-super](https://pypi.org/project/flake8-super/)
|
||||
- [flake8-tidy-imports](https://pypi.org/project/flake8-tidy-imports/)
|
||||
- [flake8-todos](https://pypi.org/project/flake8-todos/)
|
||||
- [flake8-trio](https://pypi.org/project/flake8-trio/)
|
||||
- [flake8-type-checking](https://pypi.org/project/flake8-type-checking/)
|
||||
- [flake8-use-pathlib](https://pypi.org/project/flake8-use-pathlib/)
|
||||
- [flynt](https://pypi.org/project/flynt/) ([#2102](https://github.com/astral-sh/ruff/issues/2102))
|
||||
@@ -344,21 +303,21 @@ quality tools, including:
|
||||
- [tryceratops](https://pypi.org/project/tryceratops/)
|
||||
- [yesqa](https://pypi.org/project/yesqa/)
|
||||
|
||||
For a complete enumeration of the supported rules, see [_Rules_](https://docs.astral.sh/ruff/rules/).
|
||||
For a complete enumeration of the supported rules, see [_Rules_](https://beta.ruff.rs/docs/rules/).
|
||||
|
||||
## Contributing
|
||||
|
||||
Contributions are welcome and highly appreciated. To get started, check out the
|
||||
[**contributing guidelines**](https://docs.astral.sh/ruff/contributing/).
|
||||
[**contributing guidelines**](https://beta.ruff.rs/docs/contributing/).
|
||||
|
||||
You can also join us on [**Discord**](https://discord.com/invite/astral-sh).
|
||||
You can also join us on [**Discord**](https://discord.gg/c9MhzV8aU5).
|
||||
|
||||
## Support
|
||||
|
||||
Having trouble? Check out the existing issues on [**GitHub**](https://github.com/astral-sh/ruff/issues),
|
||||
or feel free to [**open a new one**](https://github.com/astral-sh/ruff/issues/new).
|
||||
|
||||
You can also ask for help on [**Discord**](https://discord.com/invite/astral-sh).
|
||||
You can also ask for help on [**Discord**](https://discord.gg/c9MhzV8aU5).
|
||||
|
||||
## Acknowledgements
|
||||
|
||||
@@ -371,7 +330,7 @@ In some cases, Ruff includes a "direct" Rust port of the corresponding tool.
|
||||
We're grateful to the maintainers of these tools for their work, and for all
|
||||
the value they've provided to the Python community.
|
||||
|
||||
Ruff's formatter is built on a fork of Rome's [`rome_formatter`](https://github.com/rome/tools/tree/main/crates/rome_formatter),
|
||||
Ruff's autoformatter is built on a fork of Rome's [`rome_formatter`](https://github.com/rome/tools/tree/main/crates/rome_formatter),
|
||||
and again draws on both API and implementation details from [Rome](https://github.com/rome/tools),
|
||||
[Prettier](https://github.com/prettier/prettier), and [Black](https://github.com/psf/black).
|
||||
|
||||
@@ -388,16 +347,14 @@ Ruff is released under the MIT license.
|
||||
|
||||
Ruff is used by a number of major open-source projects and companies, including:
|
||||
|
||||
- [Albumentations](https://github.com/albumentations-team/albumentations)
|
||||
- Amazon ([AWS SAM](https://github.com/aws/serverless-application-model))
|
||||
- Anthropic ([Python SDK](https://github.com/anthropics/anthropic-sdk-python))
|
||||
- [Apache Airflow](https://github.com/apache/airflow)
|
||||
- AstraZeneca ([Magnus](https://github.com/AstraZeneca/magnus-core))
|
||||
- [Babel](https://github.com/python-babel/babel)
|
||||
- Benchling ([Refac](https://github.com/benchling/refac))
|
||||
- [Babel](https://github.com/python-babel/babel)
|
||||
- [Bokeh](https://github.com/bokeh/bokeh)
|
||||
- [Cryptography (PyCA)](https://github.com/pyca/cryptography)
|
||||
- CERN ([Indico](https://getindico.io/))
|
||||
- [DVC](https://github.com/iterative/dvc)
|
||||
- [Dagger](https://github.com/dagger/dagger)
|
||||
- [Dagster](https://github.com/dagster-io/dagster)
|
||||
@@ -406,18 +363,15 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [Gradio](https://github.com/gradio-app/gradio)
|
||||
- [Great Expectations](https://github.com/great-expectations/great_expectations)
|
||||
- [HTTPX](https://github.com/encode/httpx)
|
||||
- [Hatch](https://github.com/pypa/hatch)
|
||||
- [Home Assistant](https://github.com/home-assistant/core)
|
||||
- Hugging Face ([Transformers](https://github.com/huggingface/transformers),
|
||||
[Datasets](https://github.com/huggingface/datasets),
|
||||
[Diffusers](https://github.com/huggingface/diffusers))
|
||||
- [Hatch](https://github.com/pypa/hatch)
|
||||
- [Home Assistant](https://github.com/home-assistant/core)
|
||||
- ING Bank ([popmon](https://github.com/ing-bank/popmon), [probatus](https://github.com/ing-bank/probatus))
|
||||
- [Ibis](https://github.com/ibis-project/ibis)
|
||||
- [ivy](https://github.com/unifyai/ivy)
|
||||
- [Jupyter](https://github.com/jupyter-server/jupyter_server)
|
||||
- [Kraken Tech](https://kraken.tech/)
|
||||
- [LangChain](https://github.com/hwchase17/langchain)
|
||||
- [Litestar](https://litestar.dev/)
|
||||
- [LlamaIndex](https://github.com/jerryjliu/llama_index)
|
||||
- Matrix ([Synapse](https://github.com/matrix-org/synapse))
|
||||
- [MegaLinter](https://github.com/oxsecurity/megalinter)
|
||||
@@ -425,43 +379,33 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- Microsoft ([Semantic Kernel](https://github.com/microsoft/semantic-kernel),
|
||||
[ONNX Runtime](https://github.com/microsoft/onnxruntime),
|
||||
[LightGBM](https://github.com/microsoft/LightGBM))
|
||||
- Modern Treasury ([Python SDK](https://github.com/Modern-Treasury/modern-treasury-python))
|
||||
- Modern Treasury ([Python SDK](https://github.com/Modern-Treasury/modern-treasury-python-sdk))
|
||||
- Mozilla ([Firefox](https://github.com/mozilla/gecko-dev))
|
||||
- [Mypy](https://github.com/python/mypy)
|
||||
- Netflix ([Dispatch](https://github.com/Netflix/dispatch))
|
||||
- [Neon](https://github.com/neondatabase/neon)
|
||||
- [Nokia](https://nokia.com/)
|
||||
- [NoneBot](https://github.com/nonebot/nonebot2)
|
||||
- [NumPyro](https://github.com/pyro-ppl/numpyro)
|
||||
- [ONNX](https://github.com/onnx/onnx)
|
||||
- [OpenBB](https://github.com/OpenBB-finance/OpenBBTerminal)
|
||||
- [PDM](https://github.com/pdm-project/pdm)
|
||||
- [PaddlePaddle](https://github.com/PaddlePaddle/Paddle)
|
||||
- [Pandas](https://github.com/pandas-dev/pandas)
|
||||
- [Pillow](https://github.com/python-pillow/Pillow)
|
||||
- [Poetry](https://github.com/python-poetry/poetry)
|
||||
- [Polars](https://github.com/pola-rs/polars)
|
||||
- [PostHog](https://github.com/PostHog/posthog)
|
||||
- Prefect ([Python SDK](https://github.com/PrefectHQ/prefect), [Marvin](https://github.com/PrefectHQ/marvin))
|
||||
- [PyInstaller](https://github.com/pyinstaller/pyinstaller)
|
||||
- [PyMC](https://github.com/pymc-devs/pymc/)
|
||||
- [PyMC-Marketing](https://github.com/pymc-labs/pymc-marketing)
|
||||
- [pytest](https://github.com/pytest-dev/pytest)
|
||||
- [PyTorch](https://github.com/pytorch/pytorch)
|
||||
- [Pydantic](https://github.com/pydantic/pydantic)
|
||||
- [Pylint](https://github.com/PyCQA/pylint)
|
||||
- [PyVista](https://github.com/pyvista/pyvista)
|
||||
- [Reflex](https://github.com/reflex-dev/reflex)
|
||||
- [River](https://github.com/online-ml/river)
|
||||
- [Rippling](https://rippling.com)
|
||||
- [Pynecone](https://github.com/pynecone-io/pynecone)
|
||||
- [Robyn](https://github.com/sansyrox/robyn)
|
||||
- [Saleor](https://github.com/saleor/saleor)
|
||||
- Scale AI ([Launch SDK](https://github.com/scaleapi/launch-python-client))
|
||||
- [SciPy](https://github.com/scipy/scipy)
|
||||
- Snowflake ([SnowCLI](https://github.com/Snowflake-Labs/snowcli))
|
||||
- [Saleor](https://github.com/saleor/saleor)
|
||||
- [SciPy](https://github.com/scipy/scipy)
|
||||
- [Sphinx](https://github.com/sphinx-doc/sphinx)
|
||||
- [Stable Baselines3](https://github.com/DLR-RM/stable-baselines3)
|
||||
- [Starlette](https://github.com/encode/starlette)
|
||||
- [Litestar](https://litestar.dev/)
|
||||
- [The Algorithms](https://github.com/TheAlgorithms/Python)
|
||||
- [Vega-Altair](https://github.com/altair-viz/altair)
|
||||
- WordPress ([Openverse](https://github.com/WordPress/openverse))
|
||||
@@ -477,16 +421,16 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
|
||||
### Show Your Support
|
||||
|
||||
If you're using Ruff, consider adding the Ruff badge to your project's `README.md`:
|
||||
If you're using Ruff, consider adding the Ruff badge to project's `README.md`:
|
||||
|
||||
```md
|
||||
[](https://github.com/astral-sh/ruff)
|
||||
[](https://github.com/astral-sh/ruff)
|
||||
```
|
||||
|
||||
...or `README.rst`:
|
||||
|
||||
```rst
|
||||
.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json
|
||||
.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
|
||||
:target: https://github.com/astral-sh/ruff
|
||||
:alt: Ruff
|
||||
```
|
||||
@@ -494,15 +438,15 @@ If you're using Ruff, consider adding the Ruff badge to your project's `README.m
|
||||
...or, as HTML:
|
||||
|
||||
```html
|
||||
<a href="https://github.com/astral-sh/ruff"><img src="https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json" alt="Ruff" style="max-width:100%;"></a>
|
||||
<a href="https://github.com/astral-sh/ruff"><img src="https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json" alt="Ruff" style="max-width:100%;"></a>
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
This repository is licensed under the [MIT License](https://github.com/astral-sh/ruff/blob/main/LICENSE)
|
||||
MIT
|
||||
|
||||
<div align="center">
|
||||
<a target="_blank" href="https://astral.sh" style="background:none">
|
||||
<img src="https://raw.githubusercontent.com/astral-sh/ruff/main/assets/svg/Astral.svg" alt="Made by Astral">
|
||||
<img src="https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/svg/Astral.svg">
|
||||
</a>
|
||||
</div>
|
||||
|
||||
12
_typos.toml
12
_typos.toml
@@ -1,20 +1,10 @@
|
||||
[files]
|
||||
# https://github.com/crate-ci/typos/issues/868
|
||||
extend-exclude = ["crates/red_knot/vendor/**/*", "**/resources/**/*", "**/snapshots/**/*"]
|
||||
extend-exclude = ["resources", "snapshots"]
|
||||
|
||||
[default.extend-words]
|
||||
"arange" = "arange" # e.g. `numpy.arange`
|
||||
hel = "hel"
|
||||
whos = "whos"
|
||||
spawnve = "spawnve"
|
||||
ned = "ned"
|
||||
pn = "pn" # `import panel as pd` is a thing
|
||||
poit = "poit"
|
||||
BA = "BA" # acronym for "Bad Allowed", used in testing.
|
||||
jod = "jod" # e.g., `jod-thread`
|
||||
|
||||
[default]
|
||||
extend-ignore-re = [
|
||||
# Line ignore with trailing "spellchecker:disable-line"
|
||||
"(?Rm)^.*#\\s*spellchecker:disable-line$"
|
||||
]
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
{
|
||||
"label": "code style",
|
||||
"message": "Ruff",
|
||||
"logoSvg": "<svg width=\"510\" height=\"622\" viewBox=\"0 0 510 622\" fill=\"none\" xmlns=\"http://www.w3.org/2000/svg\"><path fill-rule=\"evenodd\" clip-rule=\"evenodd\" d=\"M206.701 0C200.964 0 196.314 4.64131 196.314 10.3667V41.4667C196.314 47.192 191.663 51.8333 185.927 51.8333H156.843C151.107 51.8333 146.456 56.4746 146.456 62.2V145.133C146.456 150.859 141.806 155.5 136.069 155.5H106.986C101.249 155.5 96.5988 160.141 96.5988 165.867V222.883C96.5988 228.609 91.9484 233.25 86.2118 233.25H57.1283C51.3917 233.25 46.7413 237.891 46.7413 243.617V300.633C46.7413 306.359 42.0909 311 36.3544 311H10.387C4.6504 311 0 315.641 0 321.367V352.467C0 358.192 4.6504 362.833 10.387 362.833H145.418C151.154 362.833 155.804 367.475 155.804 373.2V430.217C155.804 435.942 151.154 440.583 145.418 440.583H116.334C110.597 440.583 105.947 445.225 105.947 450.95V507.967C105.947 513.692 101.297 518.333 95.5601 518.333H66.4766C60.74 518.333 56.0896 522.975 56.0896 528.7V611.633C56.0896 617.359 60.74 622 66.4766 622H149.572C155.309 622 159.959 617.359 159.959 611.633V570.167H201.507C207.244 570.167 211.894 565.525 211.894 559.8V528.7C211.894 522.975 216.544 518.333 222.281 518.333H251.365C257.101 518.333 261.752 513.692 261.752 507.967V476.867C261.752 471.141 266.402 466.5 272.138 466.5H301.222C306.959 466.5 311.609 461.859 311.609 456.133V425.033C311.609 419.308 316.259 414.667 321.996 414.667H351.079C356.816 414.667 361.466 410.025 361.466 404.3V373.2C361.466 367.475 366.117 362.833 371.853 362.833H400.937C406.673 362.833 411.324 358.192 411.324 352.467V321.367C411.324 315.641 415.974 311 421.711 311H450.794C456.531 311 461.181 306.359 461.181 300.633V217.7C461.181 211.975 456.531 207.333 450.794 207.333H420.672C414.936 207.333 410.285 202.692 410.285 196.967V165.867C410.285 160.141 414.936 155.5 420.672 155.5H449.756C455.492 155.5 460.143 150.859 460.143 145.133V114.033C460.143 108.308 464.793 103.667 470.53 103.667H499.613C505.35 103.667 510 99.0253 510 93.3V10.3667C510 4.64132 505.35 0 499.613 0H206.701ZM168.269 440.583C162.532 440.583 157.882 445.225 157.882 450.95V507.967C157.882 513.692 153.231 518.333 147.495 518.333H118.411C112.675 518.333 108.024 522.975 108.024 528.7V559.8C108.024 565.525 112.675 570.167 118.411 570.167H159.959V528.7C159.959 522.975 164.61 518.333 170.346 518.333H199.43C205.166 518.333 209.817 513.692 209.817 507.967V476.867C209.817 471.141 214.467 466.5 220.204 466.5H249.287C255.024 466.5 259.674 461.859 259.674 456.133V425.033C259.674 419.308 264.325 414.667 270.061 414.667H299.145C304.881 414.667 309.532 410.025 309.532 404.3V373.2C309.532 367.475 314.182 362.833 319.919 362.833H349.002C354.739 362.833 359.389 358.192 359.389 352.467V321.367C359.389 315.641 364.039 311 369.776 311H398.859C404.596 311 409.246 306.359 409.246 300.633V269.533C409.246 263.808 404.596 259.167 398.859 259.167H318.88C313.143 259.167 308.493 254.525 308.493 248.8V217.7C308.493 211.975 313.143 207.333 318.88 207.333H347.963C353.7 207.333 358.35 202.692 358.35 196.967V165.867C358.35 160.141 363.001 155.5 368.737 155.5H397.821C403.557 155.5 408.208 150.859 408.208 145.133V114.033C408.208 108.308 412.858 103.667 418.595 103.667H447.678C453.415 103.667 458.065 99.0253 458.065 93.3V62.2C458.065 56.4746 453.415 51.8333 447.678 51.8333H208.778C203.041 51.8333 198.391 56.4746 198.391 62.2V145.133C198.391 150.859 193.741 155.5 188.004 155.5H158.921C153.184 155.5 148.534 160.141 148.534 165.867V222.883C148.534 228.609 143.883 233.25 138.147 233.25H109.063C103.327 233.25 98.6762 237.891 98.6762 243.617V300.633C98.6762 306.359 103.327 311 109.063 311H197.352C203.089 311 207.739 315.641 207.739 321.367V430.217C207.739 435.942 203.089 440.583 197.352 440.583H168.269Z\" fill=\"#D7FF64\"/></svg>",
|
||||
"logoWidth": 10,
|
||||
"labelColor": "grey",
|
||||
"color": "#261230"
|
||||
}
|
||||
14
clippy.toml
14
clippy.toml
@@ -1,13 +1,7 @@
|
||||
doc-valid-idents = [
|
||||
"..",
|
||||
"CodeQL",
|
||||
"FastAPI",
|
||||
"IPython",
|
||||
"LangChain",
|
||||
"LibCST",
|
||||
"McCabe",
|
||||
"NumPy",
|
||||
"SCREAMING_SNAKE_CASE",
|
||||
"SQLAlchemy",
|
||||
"StackOverflow",
|
||||
"CodeQL",
|
||||
"IPython",
|
||||
"NumPy",
|
||||
"..",
|
||||
]
|
||||
|
||||
29
crates/flake8_to_ruff/Cargo.toml
Normal file
29
crates/flake8_to_ruff/Cargo.toml
Normal file
@@ -0,0 +1,29 @@
|
||||
[package]
|
||||
name = "flake8-to-ruff"
|
||||
version = "0.0.278"
|
||||
description = """
|
||||
Convert Flake8 configuration files to Ruff configuration files.
|
||||
"""
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
ruff = { path = "../ruff", default-features = false }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
clap = { workspace = true }
|
||||
colored = { workspace = true }
|
||||
configparser = { version = "3.0.2" }
|
||||
once_cell = { workspace = true }
|
||||
regex = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
strum = { workspace = true }
|
||||
strum_macros = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
99
crates/flake8_to_ruff/README.md
Normal file
99
crates/flake8_to_ruff/README.md
Normal file
@@ -0,0 +1,99 @@
|
||||
# flake8-to-ruff
|
||||
|
||||
Convert existing Flake8 configuration files (`setup.cfg`, `tox.ini`, or `.flake8`) for use with
|
||||
[Ruff](https://github.com/astral-sh/ruff).
|
||||
|
||||
Generates a Ruff-compatible `pyproject.toml` section.
|
||||
|
||||
## Installation and Usage
|
||||
|
||||
### Installation
|
||||
|
||||
Available as [`flake8-to-ruff`](https://pypi.org/project/flake8-to-ruff/) on PyPI:
|
||||
|
||||
```shell
|
||||
pip install flake8-to-ruff
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
To run `flake8-to-ruff`:
|
||||
|
||||
```shell
|
||||
flake8-to-ruff path/to/setup.cfg
|
||||
flake8-to-ruff path/to/tox.ini
|
||||
flake8-to-ruff path/to/.flake8
|
||||
```
|
||||
|
||||
`flake8-to-ruff` will print the relevant `pyproject.toml` sections to standard output, like so:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
exclude = [
|
||||
'.svn',
|
||||
'CVS',
|
||||
'.bzr',
|
||||
'.hg',
|
||||
'.git',
|
||||
'__pycache__',
|
||||
'.tox',
|
||||
'.idea',
|
||||
'.mypy_cache',
|
||||
'.venv',
|
||||
'node_modules',
|
||||
'_state_machine.py',
|
||||
'test_fstring.py',
|
||||
'bad_coding2.py',
|
||||
'badsyntax_*.py',
|
||||
]
|
||||
select = [
|
||||
'A',
|
||||
'E',
|
||||
'F',
|
||||
'Q',
|
||||
]
|
||||
ignore = []
|
||||
|
||||
[tool.ruff.flake8-quotes]
|
||||
inline-quotes = 'single'
|
||||
|
||||
[tool.ruff.pep8-naming]
|
||||
ignore-names = [
|
||||
'foo',
|
||||
'bar',
|
||||
]
|
||||
```
|
||||
|
||||
### Plugins
|
||||
|
||||
`flake8-to-ruff` will attempt to infer any activated plugins based on the settings provided in your
|
||||
configuration file.
|
||||
|
||||
For example, if your `.flake8` file includes a `docstring-convention` property, `flake8-to-ruff`
|
||||
will enable the appropriate [`flake8-docstrings`](https://pypi.org/project/flake8-docstrings/)
|
||||
checks.
|
||||
|
||||
Alternatively, you can manually specify plugins on the command-line:
|
||||
|
||||
```shell
|
||||
flake8-to-ruff path/to/.flake8 --plugin flake8-builtins --plugin flake8-quotes
|
||||
```
|
||||
|
||||
## Limitations
|
||||
|
||||
1. Ruff only supports a subset of the Flake configuration options. `flake8-to-ruff` will warn on and
|
||||
ignore unsupported options in the `.flake8` file (or equivalent). (Similarly, Ruff has a few
|
||||
configuration options that don't exist in Flake8.)
|
||||
1. Ruff will omit any rule codes that are unimplemented or unsupported by Ruff, including rule
|
||||
codes from unsupported plugins. (See the
|
||||
[documentation](https://beta.ruff.rs/docs/faq/#how-does-ruff-compare-to-flake8) for the complete
|
||||
list of supported plugins.)
|
||||
|
||||
## License
|
||||
|
||||
MIT
|
||||
|
||||
## Contributing
|
||||
|
||||
Contributions are welcome and hugely appreciated. To get started, check out the
|
||||
[contributing guidelines](https://github.com/astral-sh/ruff/blob/main/CONTRIBUTING.md).
|
||||
65
crates/flake8_to_ruff/examples/cryptography/pyproject.toml
Normal file
65
crates/flake8_to_ruff/examples/cryptography/pyproject.toml
Normal file
@@ -0,0 +1,65 @@
|
||||
[build-system]
|
||||
requires = [
|
||||
# The minimum setuptools version is specific to the PEP 517 backend,
|
||||
# and may be stricter than the version required in `setup.cfg`
|
||||
"setuptools>=40.6.0,!=60.9.0",
|
||||
"wheel",
|
||||
# Must be kept in sync with the `install_requirements` in `setup.cfg`
|
||||
"cffi>=1.12; platform_python_implementation != 'PyPy'",
|
||||
"setuptools-rust>=0.11.4",
|
||||
]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[tool.black]
|
||||
line-length = 79
|
||||
target-version = ["py36"]
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
addopts = "-r s --capture=no --strict-markers --benchmark-disable"
|
||||
markers = [
|
||||
"skip_fips: this test is not executed in FIPS mode",
|
||||
"supported: parametrized test requiring only_if and skip_message",
|
||||
]
|
||||
|
||||
[tool.mypy]
|
||||
show_error_codes = true
|
||||
check_untyped_defs = true
|
||||
no_implicit_reexport = true
|
||||
warn_redundant_casts = true
|
||||
warn_unused_ignores = true
|
||||
warn_unused_configs = true
|
||||
strict_equality = true
|
||||
|
||||
[[tool.mypy.overrides]]
|
||||
module = [
|
||||
"pretend"
|
||||
]
|
||||
ignore_missing_imports = true
|
||||
|
||||
[tool.coverage.run]
|
||||
branch = true
|
||||
relative_files = true
|
||||
source = [
|
||||
"cryptography",
|
||||
"tests/",
|
||||
]
|
||||
|
||||
[tool.coverage.paths]
|
||||
source = [
|
||||
"src/cryptography",
|
||||
"*.tox/*/lib*/python*/site-packages/cryptography",
|
||||
"*.tox\\*\\Lib\\site-packages\\cryptography",
|
||||
"*.tox/pypy/site-packages/cryptography",
|
||||
]
|
||||
tests =[
|
||||
"tests/",
|
||||
"*tests\\",
|
||||
]
|
||||
|
||||
[tool.coverage.report]
|
||||
exclude_lines = [
|
||||
"@abc.abstractmethod",
|
||||
"@abc.abstractproperty",
|
||||
"@typing.overload",
|
||||
"if typing.TYPE_CHECKING",
|
||||
]
|
||||
91
crates/flake8_to_ruff/examples/cryptography/setup.cfg
Normal file
91
crates/flake8_to_ruff/examples/cryptography/setup.cfg
Normal file
@@ -0,0 +1,91 @@
|
||||
[metadata]
|
||||
name = cryptography
|
||||
version = attr: cryptography.__version__
|
||||
description = cryptography is a package which provides cryptographic recipes and primitives to Python developers.
|
||||
long_description = file: README.rst
|
||||
long_description_content_type = text/x-rst
|
||||
license = BSD-3-Clause OR Apache-2.0
|
||||
url = https://github.com/pyca/cryptography
|
||||
author = The Python Cryptographic Authority and individual contributors
|
||||
author_email = cryptography-dev@python.org
|
||||
project_urls =
|
||||
Documentation=https://cryptography.io/
|
||||
Source=https://github.com/pyca/cryptography/
|
||||
Issues=https://github.com/pyca/cryptography/issues
|
||||
Changelog=https://cryptography.io/en/latest/changelog/
|
||||
classifiers =
|
||||
Development Status :: 5 - Production/Stable
|
||||
Intended Audience :: Developers
|
||||
License :: OSI Approved :: Apache Software License
|
||||
License :: OSI Approved :: BSD License
|
||||
Natural Language :: English
|
||||
Operating System :: MacOS :: MacOS X
|
||||
Operating System :: POSIX
|
||||
Operating System :: POSIX :: BSD
|
||||
Operating System :: POSIX :: Linux
|
||||
Operating System :: Microsoft :: Windows
|
||||
Programming Language :: Python
|
||||
Programming Language :: Python :: 3
|
||||
Programming Language :: Python :: 3 :: Only
|
||||
Programming Language :: Python :: 3.6
|
||||
Programming Language :: Python :: 3.7
|
||||
Programming Language :: Python :: 3.8
|
||||
Programming Language :: Python :: 3.9
|
||||
Programming Language :: Python :: 3.10
|
||||
Programming Language :: Python :: 3.11
|
||||
Programming Language :: Python :: Implementation :: CPython
|
||||
Programming Language :: Python :: Implementation :: PyPy
|
||||
Topic :: Security :: Cryptography
|
||||
|
||||
[options]
|
||||
python_requires = >=3.6
|
||||
include_package_data = True
|
||||
zip_safe = False
|
||||
package_dir =
|
||||
=src
|
||||
packages = find:
|
||||
# `install_requires` must be kept in sync with `pyproject.toml`
|
||||
install_requires =
|
||||
cffi >=1.12
|
||||
|
||||
[options.packages.find]
|
||||
where = src
|
||||
exclude =
|
||||
_cffi_src
|
||||
_cffi_src.*
|
||||
|
||||
[options.extras_require]
|
||||
test =
|
||||
pytest>=6.2.0
|
||||
pytest-benchmark
|
||||
pytest-cov
|
||||
pytest-subtests
|
||||
pytest-xdist
|
||||
pretend
|
||||
iso8601
|
||||
pytz
|
||||
hypothesis>=1.11.4,!=3.79.2
|
||||
docs =
|
||||
sphinx >= 1.6.5,!=1.8.0,!=3.1.0,!=3.1.1,!=5.2.0,!=5.2.0.post0
|
||||
sphinx_rtd_theme
|
||||
docstest =
|
||||
pyenchant >= 1.6.11
|
||||
twine >= 1.12.0
|
||||
sphinxcontrib-spelling >= 4.0.1
|
||||
sdist =
|
||||
setuptools_rust >= 0.11.4
|
||||
pep8test =
|
||||
black
|
||||
flake8
|
||||
flake8-import-order
|
||||
pep8-naming
|
||||
# This extra is for OpenSSH private keys that use bcrypt KDF
|
||||
# Versions: v3.1.3 - ignore_few_rounds, v3.1.5 - abi3
|
||||
ssh =
|
||||
bcrypt >= 3.1.5
|
||||
|
||||
[flake8]
|
||||
ignore = E203,E211,W503,W504,N818
|
||||
exclude = .tox,*.egg,.git,_build,.hypothesis
|
||||
select = E,W,F,N,I
|
||||
application-import-names = cryptography,cryptography_vectors,tests
|
||||
19
crates/flake8_to_ruff/examples/jupyterhub.ini
Normal file
19
crates/flake8_to_ruff/examples/jupyterhub.ini
Normal file
@@ -0,0 +1,19 @@
|
||||
[flake8]
|
||||
# Ignore style and complexity
|
||||
# E: style errors
|
||||
# W: style warnings
|
||||
# C: complexity
|
||||
# D: docstring warnings (unused pydocstyle extension)
|
||||
# F841: local variable assigned but never used
|
||||
ignore = E, C, W, D, F841
|
||||
builtins = c, get_config
|
||||
exclude =
|
||||
.cache,
|
||||
.github,
|
||||
docs,
|
||||
jupyterhub/alembic*,
|
||||
onbuild,
|
||||
scripts,
|
||||
share,
|
||||
tools,
|
||||
setup.py
|
||||
43
crates/flake8_to_ruff/examples/manim.ini
Normal file
43
crates/flake8_to_ruff/examples/manim.ini
Normal file
@@ -0,0 +1,43 @@
|
||||
[flake8]
|
||||
# Exclude the grpc generated code
|
||||
exclude = ./manim/grpc/gen/*
|
||||
max-complexity = 15
|
||||
max-line-length = 88
|
||||
statistics = True
|
||||
# Prevents some flake8-rst-docstrings errors
|
||||
rst-roles = attr,class,func,meth,mod,obj,ref,doc,exc
|
||||
rst-directives = manim, SEEALSO, seealso
|
||||
docstring-convention=numpy
|
||||
|
||||
select = A,A00,B,B9,C4,C90,D,E,F,F,PT,RST,SIM,W
|
||||
|
||||
# General Compatibility
|
||||
extend-ignore = E203, W503, D202, D212, D213, D404
|
||||
|
||||
# Misc
|
||||
F401, F403, F405, F841, E501, E731, E402, F811, F821,
|
||||
|
||||
# Plug-in: flake8-builtins
|
||||
A001, A002, A003,
|
||||
|
||||
# Plug-in: flake8-bugbear
|
||||
B006, B007, B008, B009, B010, B903, B950,
|
||||
|
||||
# Plug-in: flake8-simplify
|
||||
SIM105, SIM106, SIM119,
|
||||
|
||||
# Plug-in: flake8-comprehensions
|
||||
C901
|
||||
|
||||
# Plug-in: flake8-pytest-style
|
||||
PT001, PT004, PT006, PT011, PT018, PT022, PT023,
|
||||
|
||||
# Plug-in: flake8-docstrings
|
||||
D100, D101, D102, D103, D104, D105, D106, D107,
|
||||
D200, D202, D204, D205, D209,
|
||||
D301,
|
||||
D400, D401, D402, D403, D405, D406, D407, D409, D411, D412, D414,
|
||||
|
||||
# Plug-in: flake8-rst-docstrings
|
||||
RST201, RST203, RST210, RST212, RST213, RST215,
|
||||
RST301, RST303,
|
||||
36
crates/flake8_to_ruff/examples/poetry.ini
Normal file
36
crates/flake8_to_ruff/examples/poetry.ini
Normal file
@@ -0,0 +1,36 @@
|
||||
[flake8]
|
||||
min_python_version = 3.7.0
|
||||
max-line-length = 88
|
||||
ban-relative-imports = true
|
||||
# flake8-use-fstring: https://github.com/MichaelKim0407/flake8-use-fstring#--percent-greedy-and---format-greedy
|
||||
format-greedy = 1
|
||||
inline-quotes = double
|
||||
enable-extensions = TC, TC1
|
||||
type-checking-strict = true
|
||||
eradicate-whitelist-extend = ^-.*;
|
||||
extend-ignore =
|
||||
# E203: Whitespace before ':' (pycqa/pycodestyle#373)
|
||||
E203,
|
||||
# SIM106: Handle error-cases first
|
||||
SIM106,
|
||||
# ANN101: Missing type annotation for self in method
|
||||
ANN101,
|
||||
# ANN102: Missing type annotation for cls in classmethod
|
||||
ANN102,
|
||||
# PIE781: assign-and-return
|
||||
PIE781,
|
||||
# PIE798 no-unnecessary-class: Consider using a module for namespacing instead
|
||||
PIE798,
|
||||
per-file-ignores =
|
||||
# TC002: Move third-party import '...' into a type-checking block
|
||||
__init__.py:TC002,
|
||||
# ANN201: Missing return type annotation for public function
|
||||
tests/test_*:ANN201
|
||||
tests/**/test_*:ANN201
|
||||
extend-exclude =
|
||||
# Frozen and not subject to change in this repo:
|
||||
get-poetry.py,
|
||||
install-poetry.py,
|
||||
# External to the project's coding standards:
|
||||
tests/fixtures/*,
|
||||
tests/**/fixtures/*,
|
||||
19
crates/flake8_to_ruff/examples/python-discord.ini
Normal file
19
crates/flake8_to_ruff/examples/python-discord.ini
Normal file
@@ -0,0 +1,19 @@
|
||||
[flake8]
|
||||
max-line-length=120
|
||||
docstring-convention=all
|
||||
import-order-style=pycharm
|
||||
application_import_names=bot,tests
|
||||
exclude=.cache,.venv,.git,constants.py
|
||||
extend-ignore=
|
||||
B311,W503,E226,S311,T000,E731
|
||||
# Missing Docstrings
|
||||
D100,D104,D105,D107,
|
||||
# Docstring Whitespace
|
||||
D203,D212,D214,D215,
|
||||
# Docstring Quotes
|
||||
D301,D302,
|
||||
# Docstring Content
|
||||
D400,D401,D402,D404,D405,D406,D407,D408,D409,D410,D411,D412,D413,D414,D416,D417
|
||||
# Type Annotations
|
||||
ANN002,ANN003,ANN101,ANN102,ANN204,ANN206,ANN401
|
||||
per-file-ignores=tests/*:D,ANN
|
||||
6
crates/flake8_to_ruff/examples/requests.ini
Normal file
6
crates/flake8_to_ruff/examples/requests.ini
Normal file
@@ -0,0 +1,6 @@
|
||||
[flake8]
|
||||
ignore = E203, E501, W503
|
||||
per-file-ignores =
|
||||
requests/__init__.py:E402, F401
|
||||
requests/compat.py:E402, F401
|
||||
tests/compat.py:F401
|
||||
34
crates/flake8_to_ruff/pyproject.toml
Normal file
34
crates/flake8_to_ruff/pyproject.toml
Normal file
@@ -0,0 +1,34 @@
|
||||
[project]
|
||||
name = "flake8-to-ruff"
|
||||
keywords = ["automation", "flake8", "pycodestyle", "pyflakes", "pylint", "clippy"]
|
||||
classifiers = [
|
||||
"Development Status :: 3 - Alpha",
|
||||
"Environment :: Console",
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Operating System :: OS Independent",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3 :: Only",
|
||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||
"Topic :: Software Development :: Quality Assurance",
|
||||
]
|
||||
author = "Charlie Marsh"
|
||||
author_email = "charlie.r.marsh@gmail.com"
|
||||
description = "Convert existing Flake8 configuration to Ruff."
|
||||
requires-python = ">=3.7"
|
||||
|
||||
[project.urls]
|
||||
repository = "https://github.com/astral-sh/ruff#subdirectory=crates/flake8_to_ruff"
|
||||
|
||||
[build-system]
|
||||
requires = ["maturin>=1.0,<2.0"]
|
||||
build-backend = "maturin"
|
||||
|
||||
[tool.maturin]
|
||||
bindings = "bin"
|
||||
strip = true
|
||||
68
crates/flake8_to_ruff/src/main.rs
Normal file
68
crates/flake8_to_ruff/src/main.rs
Normal file
@@ -0,0 +1,68 @@
|
||||
//! Utility to generate Ruff's `pyproject.toml` section from a Flake8 INI file.
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
use anyhow::Result;
|
||||
use clap::Parser;
|
||||
use configparser::ini::Ini;
|
||||
|
||||
use ruff::flake8_to_ruff::{self, ExternalConfig};
|
||||
use ruff::logging::{set_up_logging, LogLevel};
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(
|
||||
about = "Convert existing Flake8 configuration to Ruff.",
|
||||
long_about = None
|
||||
)]
|
||||
struct Args {
|
||||
/// Path to the Flake8 configuration file (e.g., `setup.cfg`, `tox.ini`, or
|
||||
/// `.flake8`).
|
||||
#[arg(required = true)]
|
||||
file: PathBuf,
|
||||
/// Optional path to a `pyproject.toml` file, used to ensure compatibility
|
||||
/// with Black.
|
||||
#[arg(long)]
|
||||
pyproject: Option<PathBuf>,
|
||||
/// List of plugins to enable.
|
||||
#[arg(long, value_delimiter = ',')]
|
||||
plugin: Option<Vec<flake8_to_ruff::Plugin>>,
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
set_up_logging(&LogLevel::Default)?;
|
||||
|
||||
let args = Args::parse();
|
||||
|
||||
// Read the INI file.
|
||||
let mut ini = Ini::new_cs();
|
||||
ini.set_multiline(true);
|
||||
let config = ini.load(args.file).map_err(|msg| anyhow::anyhow!(msg))?;
|
||||
|
||||
// Read the pyproject.toml file.
|
||||
let pyproject = args.pyproject.map(flake8_to_ruff::parse).transpose()?;
|
||||
let external_config = pyproject
|
||||
.as_ref()
|
||||
.and_then(|pyproject| pyproject.tool.as_ref())
|
||||
.map(|tool| ExternalConfig {
|
||||
black: tool.black.as_ref(),
|
||||
isort: tool.isort.as_ref(),
|
||||
..Default::default()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
let external_config = ExternalConfig {
|
||||
project: pyproject
|
||||
.as_ref()
|
||||
.and_then(|pyproject| pyproject.project.as_ref()),
|
||||
..external_config
|
||||
};
|
||||
|
||||
// Create Ruff's pyproject.toml section.
|
||||
let pyproject = flake8_to_ruff::convert(&config, &external_config, args.plugin)?;
|
||||
|
||||
#[allow(clippy::print_stdout)]
|
||||
{
|
||||
println!("{}", toml::to_string_pretty(&pyproject)?);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,42 +0,0 @@
|
||||
[package]
|
||||
name = "red_knot"
|
||||
version = "0.1.0"
|
||||
edition.workspace = true
|
||||
rust-version.workspace = true
|
||||
homepage.workspace = true
|
||||
documentation.workspace = true
|
||||
repository.workspace = true
|
||||
authors.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
ruff_python_parser = { path = "../ruff_python_parser" }
|
||||
ruff_python_ast = { path = "../ruff_python_ast" }
|
||||
ruff_text_size = { path = "../ruff_text_size" }
|
||||
ruff_index = { path = "../ruff_index" }
|
||||
ruff_notebook = { path = "../ruff_notebook" }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
bitflags = { workspace = true }
|
||||
crossbeam = { workspace = true }
|
||||
ctrlc = { version = "3.4.4" }
|
||||
dashmap = { workspace = true }
|
||||
hashbrown = { workspace = true }
|
||||
indexmap = { workspace = true }
|
||||
notify = { workspace = true }
|
||||
parking_lot = { workspace = true }
|
||||
rayon = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
smol_str = { version = "0.2.1" }
|
||||
tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true }
|
||||
tracing-tree = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
textwrap = { version = "0.16.1" }
|
||||
tempfile = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
@@ -1,18 +0,0 @@
|
||||
# Red Knot
|
||||
|
||||
The Red Knot crate contains code working towards multifile analysis, type inference and, ultimately, type-checking. It's very much a work in progress for now.
|
||||
|
||||
## Vendored types for the stdlib
|
||||
|
||||
Red Knot vendors [typeshed](https://github.com/python/typeshed)'s stubs for the standard library. The vendored stubs can be found in `crates/red_knot/vendor/typeshed`. The file `crates/red_knot/vendor/typeshed/source_commit.txt` tells you the typeshed commit that our vendored stdlib stubs currently correspond to.
|
||||
|
||||
Updating the vendored stubs is currently done manually. On a Unix machine, follow the following steps (if you have a typeshed clone in a `typeshed` directory, and a Ruff clone in a `ruff` directory):
|
||||
|
||||
```shell
|
||||
rm -rf ruff/crates/red_knot/vendor/typeshed
|
||||
mkdir ruff/crates/red_knot/vendor/typeshed
|
||||
cp typeshed/README.md ruff/crates/red_knot/vendor/typeshed
|
||||
cp typeshed/LICENSE ruff/crates/red_knot/vendor/typeshed
|
||||
cp -r typeshed/stdlib ruff/crates/red_knot/vendor/typeshed/stdlib
|
||||
git -C typeshed rev-parse HEAD > ruff/crates/red_knot/vendor/typeshed/source_commit.txt
|
||||
```
|
||||
@@ -1,415 +0,0 @@
|
||||
use std::any::type_name;
|
||||
use std::fmt::{Debug, Formatter};
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::marker::PhantomData;
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_index::{Idx, IndexVec};
|
||||
use ruff_python_ast::visitor::preorder;
|
||||
use ruff_python_ast::visitor::preorder::{PreorderVisitor, TraversalSignal};
|
||||
use ruff_python_ast::{
|
||||
AnyNodeRef, AstNode, ExceptHandler, ExceptHandlerExceptHandler, Expr, MatchCase, ModModule,
|
||||
NodeKind, Parameter, Stmt, StmtAnnAssign, StmtAssign, StmtAugAssign, StmtClassDef,
|
||||
StmtFunctionDef, StmtGlobal, StmtImport, StmtImportFrom, StmtNonlocal, StmtTypeAlias,
|
||||
TypeParam, TypeParamParamSpec, TypeParamTypeVar, TypeParamTypeVarTuple, WithItem,
|
||||
};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
/// A type agnostic ID that uniquely identifies an AST node in a file.
|
||||
#[ruff_index::newtype_index]
|
||||
pub struct AstId;
|
||||
|
||||
/// A typed ID that uniquely identifies an AST node in a file.
|
||||
///
|
||||
/// This is different from [`AstId`] in that it is a combination of ID and the type of the node the ID identifies.
|
||||
/// Typing the ID prevents mixing IDs of different node types and allows to restrict the API to only accept
|
||||
/// nodes for which an ID has been created (not all AST nodes get an ID).
|
||||
pub struct TypedAstId<N: HasAstId> {
|
||||
erased: AstId,
|
||||
_marker: PhantomData<fn() -> N>,
|
||||
}
|
||||
|
||||
impl<N: HasAstId> TypedAstId<N> {
|
||||
/// Upcasts this ID from a more specific node type to a more general node type.
|
||||
pub fn upcast<M: HasAstId>(self) -> TypedAstId<M>
|
||||
where
|
||||
N: Into<M>,
|
||||
{
|
||||
TypedAstId {
|
||||
erased: self.erased,
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> Copy for TypedAstId<N> {}
|
||||
impl<N: HasAstId> Clone for TypedAstId<N> {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> PartialEq for TypedAstId<N> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.erased == other.erased
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> Eq for TypedAstId<N> {}
|
||||
impl<N: HasAstId> Hash for TypedAstId<N> {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.erased.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> Debug for TypedAstId<N> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_tuple("TypedAstId")
|
||||
.field(&self.erased)
|
||||
.field(&type_name::<N>())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct AstIds {
|
||||
ids: IndexVec<AstId, NodeKey>,
|
||||
reverse: FxHashMap<NodeKey, AstId>,
|
||||
}
|
||||
|
||||
impl AstIds {
|
||||
// TODO rust analyzer doesn't allocate an ID for every node. It only allocates ids for
|
||||
// nodes with a corresponding HIR element, that is nodes that are definitions.
|
||||
pub fn from_module(module: &ModModule) -> Self {
|
||||
let mut visitor = AstIdsVisitor::default();
|
||||
|
||||
// TODO: visit_module?
|
||||
// Make sure we visit the root
|
||||
visitor.create_id(module);
|
||||
visitor.visit_body(&module.body);
|
||||
|
||||
while let Some(deferred) = visitor.deferred.pop() {
|
||||
match deferred {
|
||||
DeferredNode::FunctionDefinition(def) => {
|
||||
def.visit_preorder(&mut visitor);
|
||||
}
|
||||
DeferredNode::ClassDefinition(def) => def.visit_preorder(&mut visitor),
|
||||
}
|
||||
}
|
||||
|
||||
AstIds {
|
||||
ids: visitor.ids,
|
||||
reverse: visitor.reverse,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the ID to the root node.
|
||||
pub fn root(&self) -> NodeKey {
|
||||
self.ids[AstId::new(0)]
|
||||
}
|
||||
|
||||
/// Returns the [`TypedAstId`] for a node.
|
||||
pub fn ast_id<N: HasAstId>(&self, node: &N) -> TypedAstId<N> {
|
||||
let key = node.syntax_node_key();
|
||||
TypedAstId {
|
||||
erased: self.reverse.get(&key).copied().unwrap(),
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the [`TypedAstId`] for the node identified with the given [`TypedNodeKey`].
|
||||
pub fn ast_id_for_key<N: HasAstId>(&self, node: &TypedNodeKey<N>) -> TypedAstId<N> {
|
||||
let ast_id = self.ast_id_for_node_key(node.inner);
|
||||
|
||||
TypedAstId {
|
||||
erased: ast_id,
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the untyped [`AstId`] for the node identified by the given `node` key.
|
||||
pub fn ast_id_for_node_key(&self, node: NodeKey) -> AstId {
|
||||
self.reverse
|
||||
.get(&node)
|
||||
.copied()
|
||||
.expect("Can't find node in AstIds map.")
|
||||
}
|
||||
|
||||
/// Returns the [`TypedNodeKey`] for the node identified by the given [`TypedAstId`].
|
||||
pub fn key<N: HasAstId>(&self, id: TypedAstId<N>) -> TypedNodeKey<N> {
|
||||
let syntax_key = self.ids[id.erased];
|
||||
|
||||
TypedNodeKey::new(syntax_key).unwrap()
|
||||
}
|
||||
|
||||
pub fn node_key<H: HasAstId>(&self, id: TypedAstId<H>) -> NodeKey {
|
||||
self.ids[id.erased]
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for AstIds {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
let mut map = f.debug_map();
|
||||
for (key, value) in self.ids.iter_enumerated() {
|
||||
map.entry(&key, &value);
|
||||
}
|
||||
|
||||
map.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for AstIds {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.ids == other.ids
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for AstIds {}
|
||||
|
||||
#[derive(Default)]
|
||||
struct AstIdsVisitor<'a> {
|
||||
ids: IndexVec<AstId, NodeKey>,
|
||||
reverse: FxHashMap<NodeKey, AstId>,
|
||||
deferred: Vec<DeferredNode<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> AstIdsVisitor<'a> {
|
||||
fn create_id<A: HasAstId>(&mut self, node: &A) {
|
||||
let node_key = node.syntax_node_key();
|
||||
|
||||
let id = self.ids.push(node_key);
|
||||
self.reverse.insert(node_key, id);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> PreorderVisitor<'a> for AstIdsVisitor<'a> {
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt) {
|
||||
match stmt {
|
||||
Stmt::FunctionDef(def) => {
|
||||
self.create_id(def);
|
||||
self.deferred.push(DeferredNode::FunctionDefinition(def));
|
||||
return;
|
||||
}
|
||||
// TODO defer visiting the assignment body, type alias parameters etc?
|
||||
Stmt::ClassDef(def) => {
|
||||
self.create_id(def);
|
||||
self.deferred.push(DeferredNode::ClassDefinition(def));
|
||||
return;
|
||||
}
|
||||
Stmt::Expr(_) => {
|
||||
// Skip
|
||||
return;
|
||||
}
|
||||
Stmt::Return(_) => {}
|
||||
Stmt::Delete(_) => {}
|
||||
Stmt::Assign(assignment) => self.create_id(assignment),
|
||||
Stmt::AugAssign(assignment) => {
|
||||
self.create_id(assignment);
|
||||
}
|
||||
Stmt::AnnAssign(assignment) => self.create_id(assignment),
|
||||
Stmt::TypeAlias(assignment) => self.create_id(assignment),
|
||||
Stmt::For(_) => {}
|
||||
Stmt::While(_) => {}
|
||||
Stmt::If(_) => {}
|
||||
Stmt::With(_) => {}
|
||||
Stmt::Match(_) => {}
|
||||
Stmt::Raise(_) => {}
|
||||
Stmt::Try(_) => {}
|
||||
Stmt::Assert(_) => {}
|
||||
Stmt::Import(import) => self.create_id(import),
|
||||
Stmt::ImportFrom(import_from) => self.create_id(import_from),
|
||||
Stmt::Global(global) => self.create_id(global),
|
||||
Stmt::Nonlocal(non_local) => self.create_id(non_local),
|
||||
Stmt::Pass(_) => {}
|
||||
Stmt::Break(_) => {}
|
||||
Stmt::Continue(_) => {}
|
||||
Stmt::IpyEscapeCommand(_) => {}
|
||||
}
|
||||
|
||||
preorder::walk_stmt(self, stmt);
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, _expr: &'a Expr) {}
|
||||
|
||||
fn visit_parameter(&mut self, parameter: &'a Parameter) {
|
||||
self.create_id(parameter);
|
||||
preorder::walk_parameter(self, parameter);
|
||||
}
|
||||
|
||||
fn visit_except_handler(&mut self, except_handler: &'a ExceptHandler) {
|
||||
match except_handler {
|
||||
ExceptHandler::ExceptHandler(except_handler) => {
|
||||
self.create_id(except_handler);
|
||||
}
|
||||
}
|
||||
|
||||
preorder::walk_except_handler(self, except_handler);
|
||||
}
|
||||
|
||||
fn visit_with_item(&mut self, with_item: &'a WithItem) {
|
||||
self.create_id(with_item);
|
||||
preorder::walk_with_item(self, with_item);
|
||||
}
|
||||
|
||||
fn visit_match_case(&mut self, match_case: &'a MatchCase) {
|
||||
self.create_id(match_case);
|
||||
preorder::walk_match_case(self, match_case);
|
||||
}
|
||||
|
||||
fn visit_type_param(&mut self, type_param: &'a TypeParam) {
|
||||
self.create_id(type_param);
|
||||
}
|
||||
}
|
||||
|
||||
enum DeferredNode<'a> {
|
||||
FunctionDefinition(&'a StmtFunctionDef),
|
||||
ClassDefinition(&'a StmtClassDef),
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub struct TypedNodeKey<N: AstNode> {
|
||||
/// The type erased node key.
|
||||
inner: NodeKey,
|
||||
_marker: PhantomData<fn() -> N>,
|
||||
}
|
||||
|
||||
impl<N: AstNode> TypedNodeKey<N> {
|
||||
pub fn from_node(node: &N) -> Self {
|
||||
let inner = NodeKey {
|
||||
kind: node.as_any_node_ref().kind(),
|
||||
range: node.range(),
|
||||
};
|
||||
Self {
|
||||
inner,
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new(node_key: NodeKey) -> Option<Self> {
|
||||
N::can_cast(node_key.kind).then_some(TypedNodeKey {
|
||||
inner: node_key,
|
||||
_marker: PhantomData,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn resolve<'a>(&self, root: AnyNodeRef<'a>) -> Option<N::Ref<'a>> {
|
||||
let node_ref = self.inner.resolve(root)?;
|
||||
|
||||
Some(N::cast_ref(node_ref).unwrap())
|
||||
}
|
||||
|
||||
pub fn resolve_unwrap<'a>(&self, root: AnyNodeRef<'a>) -> N::Ref<'a> {
|
||||
self.resolve(root).expect("node should resolve")
|
||||
}
|
||||
|
||||
pub fn erased(&self) -> &NodeKey {
|
||||
&self.inner
|
||||
}
|
||||
}
|
||||
|
||||
struct FindNodeKeyVisitor<'a> {
|
||||
key: NodeKey,
|
||||
result: Option<AnyNodeRef<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> PreorderVisitor<'a> for FindNodeKeyVisitor<'a> {
|
||||
fn enter_node(&mut self, node: AnyNodeRef<'a>) -> TraversalSignal {
|
||||
if self.result.is_some() {
|
||||
return TraversalSignal::Skip;
|
||||
}
|
||||
|
||||
if node.range() == self.key.range && node.kind() == self.key.kind {
|
||||
self.result = Some(node);
|
||||
TraversalSignal::Skip
|
||||
} else if node.range().contains_range(self.key.range) {
|
||||
TraversalSignal::Traverse
|
||||
} else {
|
||||
TraversalSignal::Skip
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_body(&mut self, body: &'a [Stmt]) {
|
||||
// TODO it would be more efficient to use binary search instead of linear
|
||||
for stmt in body {
|
||||
if stmt.range().start() > self.key.range.end() {
|
||||
break;
|
||||
}
|
||||
|
||||
self.visit_stmt(stmt);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO an alternative to this is to have a `NodeId` on each node (in increasing order depending on the position).
|
||||
// This would allow to reduce the size of this to a u32.
|
||||
// What would be nice if we could use an `Arc::weak_ref` here but that only works if we use
|
||||
// `Arc` internally
|
||||
// TODO: Implement the logic to resolve a node, given a db (and the correct file).
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub struct NodeKey {
|
||||
kind: NodeKind,
|
||||
range: TextRange,
|
||||
}
|
||||
|
||||
impl NodeKey {
|
||||
pub fn resolve<'a>(&self, root: AnyNodeRef<'a>) -> Option<AnyNodeRef<'a>> {
|
||||
// We need to do a binary search here. Only traverse into a node if the range is withint the node
|
||||
let mut visitor = FindNodeKeyVisitor {
|
||||
key: *self,
|
||||
result: None,
|
||||
};
|
||||
|
||||
if visitor.enter_node(root) == TraversalSignal::Traverse {
|
||||
root.visit_preorder(&mut visitor);
|
||||
}
|
||||
|
||||
visitor.result
|
||||
}
|
||||
}
|
||||
|
||||
/// Marker trait implemented by AST nodes for which we extract the `AstId`.
|
||||
pub trait HasAstId: AstNode {
|
||||
fn node_key(&self) -> TypedNodeKey<Self>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
TypedNodeKey {
|
||||
inner: self.syntax_node_key(),
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
fn syntax_node_key(&self) -> NodeKey {
|
||||
NodeKey {
|
||||
kind: self.as_any_node_ref().kind(),
|
||||
range: self.range(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HasAstId for StmtFunctionDef {}
|
||||
impl HasAstId for StmtClassDef {}
|
||||
impl HasAstId for StmtAnnAssign {}
|
||||
impl HasAstId for StmtAugAssign {}
|
||||
impl HasAstId for StmtAssign {}
|
||||
impl HasAstId for StmtTypeAlias {}
|
||||
|
||||
impl HasAstId for ModModule {}
|
||||
|
||||
impl HasAstId for StmtImport {}
|
||||
|
||||
impl HasAstId for StmtImportFrom {}
|
||||
|
||||
impl HasAstId for Parameter {}
|
||||
|
||||
impl HasAstId for TypeParam {}
|
||||
impl HasAstId for Stmt {}
|
||||
impl HasAstId for TypeParamTypeVar {}
|
||||
impl HasAstId for TypeParamTypeVarTuple {}
|
||||
impl HasAstId for TypeParamParamSpec {}
|
||||
impl HasAstId for StmtGlobal {}
|
||||
impl HasAstId for StmtNonlocal {}
|
||||
|
||||
impl HasAstId for ExceptHandlerExceptHandler {}
|
||||
impl HasAstId for WithItem {}
|
||||
impl HasAstId for MatchCase {}
|
||||
@@ -1,165 +0,0 @@
|
||||
use std::fmt::Formatter;
|
||||
use std::hash::Hash;
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
|
||||
use crate::db::QueryResult;
|
||||
use dashmap::mapref::entry::Entry;
|
||||
|
||||
use crate::FxDashMap;
|
||||
|
||||
/// Simple key value cache that locks on a per-key level.
|
||||
pub struct KeyValueCache<K, V> {
|
||||
map: FxDashMap<K, V>,
|
||||
statistics: CacheStatistics,
|
||||
}
|
||||
|
||||
impl<K, V> KeyValueCache<K, V>
|
||||
where
|
||||
K: Eq + Hash + Clone,
|
||||
V: Clone,
|
||||
{
|
||||
pub fn try_get(&self, key: &K) -> Option<V> {
|
||||
if let Some(existing) = self.map.get(key) {
|
||||
self.statistics.hit();
|
||||
Some(existing.clone())
|
||||
} else {
|
||||
self.statistics.miss();
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get<F>(&self, key: &K, compute: F) -> QueryResult<V>
|
||||
where
|
||||
F: FnOnce(&K) -> QueryResult<V>,
|
||||
{
|
||||
Ok(match self.map.entry(key.clone()) {
|
||||
Entry::Occupied(cached) => {
|
||||
self.statistics.hit();
|
||||
|
||||
cached.get().clone()
|
||||
}
|
||||
Entry::Vacant(vacant) => {
|
||||
self.statistics.miss();
|
||||
|
||||
let value = compute(key)?;
|
||||
vacant.insert(value.clone());
|
||||
value
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn set(&mut self, key: K, value: V) {
|
||||
self.map.insert(key, value);
|
||||
}
|
||||
|
||||
pub fn remove(&mut self, key: &K) -> Option<V> {
|
||||
self.map.remove(key).map(|(_, value)| value)
|
||||
}
|
||||
|
||||
pub fn clear(&mut self) {
|
||||
self.map.clear();
|
||||
self.map.shrink_to_fit();
|
||||
}
|
||||
|
||||
pub fn statistics(&self) -> Option<Statistics> {
|
||||
self.statistics.to_statistics()
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> Default for KeyValueCache<K, V>
|
||||
where
|
||||
K: Eq + Hash,
|
||||
V: Clone,
|
||||
{
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
map: FxDashMap::default(),
|
||||
statistics: CacheStatistics::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> std::fmt::Debug for KeyValueCache<K, V>
|
||||
where
|
||||
K: std::fmt::Debug + Eq + Hash,
|
||||
V: std::fmt::Debug,
|
||||
{
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
let mut debug = f.debug_map();
|
||||
|
||||
for entry in &self.map {
|
||||
debug.entry(&entry.value(), &entry.key());
|
||||
}
|
||||
|
||||
debug.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct Statistics {
|
||||
pub hits: usize,
|
||||
pub misses: usize,
|
||||
}
|
||||
|
||||
impl Statistics {
|
||||
#[allow(clippy::cast_precision_loss)]
|
||||
pub fn hit_rate(&self) -> Option<f64> {
|
||||
if self.hits + self.misses == 0 {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some((self.hits as f64) / (self.hits + self.misses) as f64)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
pub type CacheStatistics = DebugStatistics;
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
pub type CacheStatistics = ReleaseStatistics;
|
||||
|
||||
pub trait StatisticsRecorder {
|
||||
fn hit(&self);
|
||||
fn miss(&self);
|
||||
fn to_statistics(&self) -> Option<Statistics>;
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct DebugStatistics {
|
||||
hits: AtomicUsize,
|
||||
misses: AtomicUsize,
|
||||
}
|
||||
|
||||
impl StatisticsRecorder for DebugStatistics {
|
||||
// TODO figure out appropriate Ordering
|
||||
fn hit(&self) {
|
||||
self.hits.fetch_add(1, Ordering::SeqCst);
|
||||
}
|
||||
|
||||
fn miss(&self) {
|
||||
self.misses.fetch_add(1, Ordering::SeqCst);
|
||||
}
|
||||
|
||||
fn to_statistics(&self) -> Option<Statistics> {
|
||||
let hits = self.hits.load(Ordering::SeqCst);
|
||||
let misses = self.misses.load(Ordering::SeqCst);
|
||||
|
||||
Some(Statistics { hits, misses })
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct ReleaseStatistics;
|
||||
|
||||
impl StatisticsRecorder for ReleaseStatistics {
|
||||
#[inline]
|
||||
fn hit(&self) {}
|
||||
|
||||
#[inline]
|
||||
fn miss(&self) {}
|
||||
|
||||
#[inline]
|
||||
fn to_statistics(&self) -> Option<Statistics> {
|
||||
None
|
||||
}
|
||||
}
|
||||
@@ -1,42 +0,0 @@
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct CancellationTokenSource {
|
||||
signal: Arc<AtomicBool>,
|
||||
}
|
||||
|
||||
impl CancellationTokenSource {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
signal: Arc::new(AtomicBool::new(false)),
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "trace", skip_all)]
|
||||
pub fn cancel(&self) {
|
||||
self.signal.store(true, std::sync::atomic::Ordering::SeqCst);
|
||||
}
|
||||
|
||||
pub fn is_cancelled(&self) -> bool {
|
||||
self.signal.load(std::sync::atomic::Ordering::SeqCst)
|
||||
}
|
||||
|
||||
pub fn token(&self) -> CancellationToken {
|
||||
CancellationToken {
|
||||
signal: self.signal.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct CancellationToken {
|
||||
signal: Arc<AtomicBool>,
|
||||
}
|
||||
|
||||
impl CancellationToken {
|
||||
/// Returns `true` if cancellation has been requested.
|
||||
pub fn is_cancelled(&self) -> bool {
|
||||
self.signal.load(std::sync::atomic::Ordering::SeqCst)
|
||||
}
|
||||
}
|
||||
@@ -1,248 +0,0 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
pub use jars::{HasJar, HasJars};
|
||||
pub use query::{QueryError, QueryResult};
|
||||
pub use runtime::DbRuntime;
|
||||
pub use storage::JarsStorage;
|
||||
|
||||
use crate::files::FileId;
|
||||
use crate::lint::{LintSemanticStorage, LintSyntaxStorage};
|
||||
use crate::module::ModuleResolver;
|
||||
use crate::parse::ParsedStorage;
|
||||
use crate::source::SourceStorage;
|
||||
use crate::symbols::SymbolTablesStorage;
|
||||
use crate::types::TypeStore;
|
||||
|
||||
mod jars;
|
||||
mod query;
|
||||
mod runtime;
|
||||
mod storage;
|
||||
|
||||
pub trait Database {
|
||||
/// Returns a reference to the runtime of the current worker.
|
||||
fn runtime(&self) -> &DbRuntime;
|
||||
|
||||
/// Returns a mutable reference to the runtime. Only one worker can hold a mutable reference to the runtime.
|
||||
fn runtime_mut(&mut self) -> &mut DbRuntime;
|
||||
|
||||
/// Returns `Ok` if the queries have not been cancelled and `Err(QueryError::Cancelled)` otherwise.
|
||||
fn cancelled(&self) -> QueryResult<()> {
|
||||
self.runtime().cancelled()
|
||||
}
|
||||
|
||||
/// Returns `true` if the queries have been cancelled.
|
||||
fn is_cancelled(&self) -> bool {
|
||||
self.runtime().is_cancelled()
|
||||
}
|
||||
}
|
||||
|
||||
/// Database that supports running queries from multiple threads.
|
||||
pub trait ParallelDatabase: Database + Send {
|
||||
/// Creates a snapshot of the database state that can be used to query the database in another thread.
|
||||
///
|
||||
/// The snapshot is a read-only view of the database but query results are shared between threads.
|
||||
/// All queries will be automatically cancelled when applying any mutations (calling [`HasJars::jars_mut`])
|
||||
/// to the database (not the snapshot, because they're readonly).
|
||||
///
|
||||
/// ## Creating a snapshot
|
||||
///
|
||||
/// Creating a snapshot of the database's jars is cheap but creating a snapshot of
|
||||
/// other state stored on the database might require deep-cloning data. That's why you should
|
||||
/// avoid creating snapshots in a hot function (e.g. don't create a snapshot for each file, instead
|
||||
/// create a snapshot when scheduling the check of an entire program).
|
||||
///
|
||||
/// ## Salsa compatibility
|
||||
/// Salsa prohibits creating a snapshot while running a local query (it's fine if other workers run a query) [[source](https://github.com/salsa-rs/salsa/issues/80)].
|
||||
/// We should avoid creating snapshots while running a query because we might want to adopt Salsa in the future (if we can figure out persistent caching).
|
||||
/// Unfortunately, the infrastructure doesn't provide an automated way of knowing when a query is run, that's
|
||||
/// why we have to "enforce" this constraint manually.
|
||||
#[must_use]
|
||||
fn snapshot(&self) -> Snapshot<Self>;
|
||||
}
|
||||
|
||||
pub trait DbWithJar<Jar>: Database + HasJar<Jar> {}
|
||||
|
||||
/// Readonly snapshot of a database.
|
||||
///
|
||||
/// ## Dead locks
|
||||
/// A snapshot should always be dropped as soon as it is no longer necessary to run queries.
|
||||
/// Storing the snapshot without running a query or periodically checking if cancellation was requested
|
||||
/// can lead to deadlocks because mutating the [`Database`] requires cancels all pending queries
|
||||
/// and waiting for all [`Snapshot`]s to be dropped.
|
||||
#[derive(Debug)]
|
||||
pub struct Snapshot<DB: ?Sized>
|
||||
where
|
||||
DB: ParallelDatabase,
|
||||
{
|
||||
db: DB,
|
||||
}
|
||||
|
||||
impl<DB> Snapshot<DB>
|
||||
where
|
||||
DB: ParallelDatabase,
|
||||
{
|
||||
pub fn new(db: DB) -> Self {
|
||||
Snapshot { db }
|
||||
}
|
||||
}
|
||||
|
||||
impl<DB> std::ops::Deref for Snapshot<DB>
|
||||
where
|
||||
DB: ParallelDatabase,
|
||||
{
|
||||
type Target = DB;
|
||||
|
||||
fn deref(&self) -> &DB {
|
||||
&self.db
|
||||
}
|
||||
}
|
||||
|
||||
pub trait Upcast<T: ?Sized> {
|
||||
fn upcast(&self) -> &T;
|
||||
}
|
||||
|
||||
// Red knot specific databases code.
|
||||
|
||||
pub trait SourceDb: DbWithJar<SourceJar> {
|
||||
// queries
|
||||
fn file_id(&self, path: &std::path::Path) -> FileId;
|
||||
|
||||
fn file_path(&self, file_id: FileId) -> Arc<std::path::Path>;
|
||||
}
|
||||
|
||||
pub trait SemanticDb: SourceDb + DbWithJar<SemanticJar> + Upcast<dyn SourceDb> {}
|
||||
|
||||
pub trait LintDb: SemanticDb + DbWithJar<LintJar> + Upcast<dyn SemanticDb> {}
|
||||
|
||||
pub trait Db: LintDb + Upcast<dyn LintDb> {}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SourceJar {
|
||||
pub sources: SourceStorage,
|
||||
pub parsed: ParsedStorage,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SemanticJar {
|
||||
pub module_resolver: ModuleResolver,
|
||||
pub symbol_tables: SymbolTablesStorage,
|
||||
pub type_store: TypeStore,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct LintJar {
|
||||
pub lint_syntax: LintSyntaxStorage,
|
||||
pub lint_semantic: LintSemanticStorage,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::db::{
|
||||
Database, DbRuntime, DbWithJar, HasJar, HasJars, JarsStorage, LintDb, LintJar, QueryResult,
|
||||
SourceDb, SourceJar, Upcast,
|
||||
};
|
||||
use crate::files::{FileId, Files};
|
||||
|
||||
use super::{SemanticDb, SemanticJar};
|
||||
|
||||
// This can be a partial database used in a single crate for testing.
|
||||
// It would hold fewer data than the full database.
|
||||
#[derive(Debug, Default)]
|
||||
pub(crate) struct TestDb {
|
||||
files: Files,
|
||||
jars: JarsStorage<Self>,
|
||||
}
|
||||
|
||||
impl HasJar<SourceJar> for TestDb {
|
||||
fn jar(&self) -> QueryResult<&SourceJar> {
|
||||
Ok(&self.jars()?.0)
|
||||
}
|
||||
|
||||
fn jar_mut(&mut self) -> &mut SourceJar {
|
||||
&mut self.jars_mut().0
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJar<SemanticJar> for TestDb {
|
||||
fn jar(&self) -> QueryResult<&SemanticJar> {
|
||||
Ok(&self.jars()?.1)
|
||||
}
|
||||
|
||||
fn jar_mut(&mut self) -> &mut SemanticJar {
|
||||
&mut self.jars_mut().1
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJar<LintJar> for TestDb {
|
||||
fn jar(&self) -> QueryResult<&LintJar> {
|
||||
Ok(&self.jars()?.2)
|
||||
}
|
||||
|
||||
fn jar_mut(&mut self) -> &mut LintJar {
|
||||
&mut self.jars_mut().2
|
||||
}
|
||||
}
|
||||
|
||||
impl SourceDb for TestDb {
|
||||
fn file_id(&self, path: &Path) -> FileId {
|
||||
self.files.intern(path)
|
||||
}
|
||||
|
||||
fn file_path(&self, file_id: FileId) -> Arc<Path> {
|
||||
self.files.path(file_id)
|
||||
}
|
||||
}
|
||||
|
||||
impl DbWithJar<SourceJar> for TestDb {}
|
||||
|
||||
impl Upcast<dyn SourceDb> for TestDb {
|
||||
fn upcast(&self) -> &(dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl SemanticDb for TestDb {}
|
||||
|
||||
impl DbWithJar<SemanticJar> for TestDb {}
|
||||
|
||||
impl Upcast<dyn SemanticDb> for TestDb {
|
||||
fn upcast(&self) -> &(dyn SemanticDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl LintDb for TestDb {}
|
||||
|
||||
impl Upcast<dyn LintDb> for TestDb {
|
||||
fn upcast(&self) -> &(dyn LintDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl DbWithJar<LintJar> for TestDb {}
|
||||
|
||||
impl HasJars for TestDb {
|
||||
type Jars = (SourceJar, SemanticJar, LintJar);
|
||||
|
||||
fn jars(&self) -> QueryResult<&Self::Jars> {
|
||||
self.jars.jars()
|
||||
}
|
||||
|
||||
fn jars_mut(&mut self) -> &mut Self::Jars {
|
||||
self.jars.jars_mut()
|
||||
}
|
||||
}
|
||||
|
||||
impl Database for TestDb {
|
||||
fn runtime(&self) -> &DbRuntime {
|
||||
self.jars.runtime()
|
||||
}
|
||||
|
||||
fn runtime_mut(&mut self) -> &mut DbRuntime {
|
||||
self.jars.runtime_mut()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,37 +0,0 @@
|
||||
use crate::db::query::QueryResult;
|
||||
|
||||
/// Gives access to a specific jar in the database.
|
||||
///
|
||||
/// Nope, the terminology isn't borrowed from Java but from Salsa <https://salsa-rs.github.io/salsa/>,
|
||||
/// which is an analogy to storing the salsa in different jars.
|
||||
///
|
||||
/// The basic idea is that each crate can define its own jar and the jars can be combined to a single
|
||||
/// database in the top level crate. Each crate also defines its own `Database` trait. The combination of
|
||||
/// `Database` trait and the jar allows to write queries in isolation without having to know how they get composed at the upper levels.
|
||||
///
|
||||
/// Salsa further defines a `HasIngredient` trait which slices the jar to a specific storage (e.g. a specific cache).
|
||||
/// We don't need this just yet because we write our queries by hand. We may want a similar trait if we decide
|
||||
/// to use a macro to generate the queries.
|
||||
pub trait HasJar<T> {
|
||||
/// Gives a read-only reference to the jar.
|
||||
fn jar(&self) -> QueryResult<&T>;
|
||||
|
||||
/// Gives a mutable reference to the jar.
|
||||
fn jar_mut(&mut self) -> &mut T;
|
||||
}
|
||||
|
||||
/// Gives access to the jars in a database.
|
||||
pub trait HasJars {
|
||||
/// A type storing the jars.
|
||||
///
|
||||
/// Most commonly, this is a tuple where each jar is a tuple element.
|
||||
type Jars: Default;
|
||||
|
||||
/// Gives access to the underlying jars but tests if the queries have been cancelled.
|
||||
///
|
||||
/// Returns `Err(QueryError::Cancelled)` if the queries have been cancelled.
|
||||
fn jars(&self) -> QueryResult<&Self::Jars>;
|
||||
|
||||
/// Gives mutable access to the underlying jars.
|
||||
fn jars_mut(&mut self) -> &mut Self::Jars;
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
use std::fmt::{Display, Formatter};
|
||||
|
||||
/// Reason why a db query operation failed.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum QueryError {
|
||||
/// The query was cancelled because the DB was mutated or the query was cancelled by the host (e.g. on a file change or when pressing CTRL+C).
|
||||
Cancelled,
|
||||
}
|
||||
|
||||
impl Display for QueryError {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
QueryError::Cancelled => f.write_str("query was cancelled"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for QueryError {}
|
||||
|
||||
pub type QueryResult<T> = Result<T, QueryError>;
|
||||
@@ -1,41 +0,0 @@
|
||||
use crate::cancellation::CancellationTokenSource;
|
||||
use crate::db::{QueryError, QueryResult};
|
||||
|
||||
/// Holds the jar agnostic state of the database.
|
||||
#[derive(Debug, Default)]
|
||||
pub struct DbRuntime {
|
||||
/// The cancellation token source used to signal other works that the queries should be aborted and
|
||||
/// exit at the next possible point.
|
||||
cancellation_token: CancellationTokenSource,
|
||||
}
|
||||
|
||||
impl DbRuntime {
|
||||
pub(super) fn snapshot(&self) -> Self {
|
||||
Self {
|
||||
cancellation_token: self.cancellation_token.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Cancels the pending queries of other workers. The current worker cannot have any pending
|
||||
/// queries because we're holding a mutable reference to the runtime.
|
||||
pub(super) fn cancel_other_workers(&mut self) {
|
||||
self.cancellation_token.cancel();
|
||||
// Set a new cancellation token so that we're in a non-cancelled state again when running the next
|
||||
// query.
|
||||
self.cancellation_token = CancellationTokenSource::default();
|
||||
}
|
||||
|
||||
/// Returns `Ok` if the queries have not been cancelled and `Err(QueryError::Cancelled)` otherwise.
|
||||
pub(super) fn cancelled(&self) -> QueryResult<()> {
|
||||
if self.cancellation_token.is_cancelled() {
|
||||
Err(QueryError::Cancelled)
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the queries have been cancelled.
|
||||
pub(super) fn is_cancelled(&self) -> bool {
|
||||
self.cancellation_token.is_cancelled()
|
||||
}
|
||||
}
|
||||
@@ -1,117 +0,0 @@
|
||||
use std::fmt::Formatter;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crossbeam::sync::WaitGroup;
|
||||
|
||||
use crate::db::query::QueryResult;
|
||||
use crate::db::runtime::DbRuntime;
|
||||
use crate::db::{HasJars, ParallelDatabase};
|
||||
|
||||
/// Stores the jars of a database and the state for each worker.
|
||||
///
|
||||
/// Today, all state is shared across all workers, but it may be desired to store data per worker in the future.
|
||||
pub struct JarsStorage<T>
|
||||
where
|
||||
T: HasJars + Sized,
|
||||
{
|
||||
// It's important that `jars_wait_group` is declared after `jars` to ensure that `jars` is dropped first.
|
||||
// See https://doc.rust-lang.org/reference/destructors.html
|
||||
/// Stores the jars of the database.
|
||||
jars: Arc<T::Jars>,
|
||||
|
||||
/// Used to count the references to `jars`. Allows implementing `jars_mut` without requiring to clone `jars`.
|
||||
jars_wait_group: WaitGroup,
|
||||
|
||||
/// The data agnostic state.
|
||||
runtime: DbRuntime,
|
||||
}
|
||||
|
||||
impl<Db> JarsStorage<Db>
|
||||
where
|
||||
Db: HasJars,
|
||||
{
|
||||
pub(super) fn new() -> Self {
|
||||
Self {
|
||||
jars: Arc::new(Db::Jars::default()),
|
||||
jars_wait_group: WaitGroup::default(),
|
||||
runtime: DbRuntime::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a snapshot of the jars.
|
||||
///
|
||||
/// Creating the snapshot is cheap because it doesn't clone the jars, it only increments a ref counter.
|
||||
#[must_use]
|
||||
pub fn snapshot(&self) -> JarsStorage<Db>
|
||||
where
|
||||
Db: ParallelDatabase,
|
||||
{
|
||||
Self {
|
||||
jars: self.jars.clone(),
|
||||
jars_wait_group: self.jars_wait_group.clone(),
|
||||
runtime: self.runtime.snapshot(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn jars(&self) -> QueryResult<&Db::Jars> {
|
||||
self.runtime.cancelled()?;
|
||||
Ok(&self.jars)
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the jars without cloning their content.
|
||||
///
|
||||
/// The method cancels any pending queries of other works and waits for them to complete so that
|
||||
/// this instance is the only instance holding a reference to the jars.
|
||||
pub(crate) fn jars_mut(&mut self) -> &mut Db::Jars {
|
||||
// We have a mutable ref here, so no more workers can be spawned between calling this function and taking the mut ref below.
|
||||
self.cancel_other_workers();
|
||||
|
||||
// Now all other references to `self.jars` should have been released. We can now safely return a mutable reference
|
||||
// to the Arc's content.
|
||||
let jars =
|
||||
Arc::get_mut(&mut self.jars).expect("All references to jars should have been released");
|
||||
|
||||
jars
|
||||
}
|
||||
|
||||
pub(crate) fn runtime(&self) -> &DbRuntime {
|
||||
&self.runtime
|
||||
}
|
||||
|
||||
pub(crate) fn runtime_mut(&mut self) -> &mut DbRuntime {
|
||||
// Note: This method may need to use a similar trick to `jars_mut` if `DbRuntime` is ever to store data that is shared between workers.
|
||||
&mut self.runtime
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "trace", skip(self))]
|
||||
fn cancel_other_workers(&mut self) {
|
||||
self.runtime.cancel_other_workers();
|
||||
|
||||
// Wait for all other works to complete.
|
||||
let existing_wait = std::mem::take(&mut self.jars_wait_group);
|
||||
existing_wait.wait();
|
||||
}
|
||||
}
|
||||
|
||||
impl<Db> Default for JarsStorage<Db>
|
||||
where
|
||||
Db: HasJars,
|
||||
{
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> std::fmt::Debug for JarsStorage<T>
|
||||
where
|
||||
T: HasJars,
|
||||
<T as HasJars>::Jars: std::fmt::Debug,
|
||||
{
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("SharedStorage")
|
||||
.field("jars", &self.jars)
|
||||
.field("jars_wait_group", &self.jars_wait_group)
|
||||
.field("runtime", &self.runtime)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
@@ -1,180 +0,0 @@
|
||||
use std::fmt::{Debug, Formatter};
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
|
||||
use hashbrown::hash_map::RawEntryMut;
|
||||
use parking_lot::RwLock;
|
||||
use rustc_hash::FxHasher;
|
||||
|
||||
use ruff_index::{newtype_index, IndexVec};
|
||||
|
||||
type Map<K, V> = hashbrown::HashMap<K, V, ()>;
|
||||
|
||||
#[newtype_index]
|
||||
pub struct FileId;
|
||||
|
||||
// TODO we'll need a higher level virtual file system abstraction that allows testing if a file exists
|
||||
// or retrieving its content (ideally lazily and in a way that the memory can be retained later)
|
||||
// I suspect that we'll end up with a FileSystem trait and our own Path abstraction.
|
||||
#[derive(Default)]
|
||||
pub struct Files {
|
||||
inner: Arc<RwLock<FilesInner>>,
|
||||
}
|
||||
|
||||
impl Files {
|
||||
#[tracing::instrument(level = "debug", skip(self))]
|
||||
pub fn intern(&self, path: &Path) -> FileId {
|
||||
self.inner.write().intern(path)
|
||||
}
|
||||
|
||||
pub fn try_get(&self, path: &Path) -> Option<FileId> {
|
||||
self.inner.read().try_get(path)
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(self))]
|
||||
pub fn path(&self, id: FileId) -> Arc<Path> {
|
||||
self.inner.read().path(id)
|
||||
}
|
||||
|
||||
/// Snapshots files for a new database snapshot.
|
||||
///
|
||||
/// This method should not be used outside a database snapshot.
|
||||
#[must_use]
|
||||
pub fn snapshot(&self) -> Files {
|
||||
Files {
|
||||
inner: self.inner.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Files {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
let files = self.inner.read();
|
||||
let mut debug = f.debug_map();
|
||||
for item in files.iter() {
|
||||
debug.entry(&item.0, &item.1);
|
||||
}
|
||||
|
||||
debug.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Files {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.inner.read().eq(&other.inner.read())
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for Files {}
|
||||
|
||||
#[derive(Default)]
|
||||
struct FilesInner {
|
||||
by_path: Map<FileId, ()>,
|
||||
// TODO should we use a map here to reclaim the space for removed files?
|
||||
// TODO I think we should use our own path abstraction here to avoid having to normalize paths
|
||||
// and dealing with non-utf paths everywhere.
|
||||
by_id: IndexVec<FileId, Arc<Path>>,
|
||||
}
|
||||
|
||||
impl FilesInner {
|
||||
/// Inserts the path and returns a new id for it or returns the id if it is an existing path.
|
||||
// TODO should this accept Path or PathBuf?
|
||||
pub(crate) fn intern(&mut self, path: &Path) -> FileId {
|
||||
let hash = FilesInner::hash_path(path);
|
||||
|
||||
let entry = self
|
||||
.by_path
|
||||
.raw_entry_mut()
|
||||
.from_hash(hash, |existing_file| &*self.by_id[*existing_file] == path);
|
||||
|
||||
match entry {
|
||||
RawEntryMut::Occupied(entry) => *entry.key(),
|
||||
RawEntryMut::Vacant(entry) => {
|
||||
let id = self.by_id.push(Arc::from(path));
|
||||
entry.insert_with_hasher(hash, id, (), |file| {
|
||||
FilesInner::hash_path(&self.by_id[*file])
|
||||
});
|
||||
id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn hash_path(path: &Path) -> u64 {
|
||||
let mut hasher = FxHasher::default();
|
||||
path.hash(&mut hasher);
|
||||
hasher.finish()
|
||||
}
|
||||
|
||||
pub(crate) fn try_get(&self, path: &Path) -> Option<FileId> {
|
||||
let mut hasher = FxHasher::default();
|
||||
path.hash(&mut hasher);
|
||||
let hash = hasher.finish();
|
||||
|
||||
Some(
|
||||
*self
|
||||
.by_path
|
||||
.raw_entry()
|
||||
.from_hash(hash, |existing_file| &*self.by_id[*existing_file] == path)?
|
||||
.0,
|
||||
)
|
||||
}
|
||||
|
||||
/// Returns the path for the file with the given id.
|
||||
pub(crate) fn path(&self, id: FileId) -> Arc<Path> {
|
||||
self.by_id[id].clone()
|
||||
}
|
||||
|
||||
pub(crate) fn iter(&self) -> impl Iterator<Item = (FileId, Arc<Path>)> + '_ {
|
||||
self.by_path.keys().map(|id| (*id, self.by_id[*id].clone()))
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for FilesInner {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.by_id == other.by_id
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for FilesInner {}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[test]
|
||||
fn insert_path_twice_same_id() {
|
||||
let files = Files::default();
|
||||
let path = PathBuf::from("foo/bar");
|
||||
let id1 = files.intern(&path);
|
||||
let id2 = files.intern(&path);
|
||||
assert_eq!(id1, id2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_different_paths_different_ids() {
|
||||
let files = Files::default();
|
||||
let path1 = PathBuf::from("foo/bar");
|
||||
let path2 = PathBuf::from("foo/bar/baz");
|
||||
let id1 = files.intern(&path1);
|
||||
let id2 = files.intern(&path2);
|
||||
assert_ne!(id1, id2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn four_files() {
|
||||
let files = Files::default();
|
||||
let foo_path = PathBuf::from("foo");
|
||||
let foo_id = files.intern(&foo_path);
|
||||
let bar_path = PathBuf::from("bar");
|
||||
files.intern(&bar_path);
|
||||
let baz_path = PathBuf::from("baz");
|
||||
files.intern(&baz_path);
|
||||
let qux_path = PathBuf::from("qux");
|
||||
files.intern(&qux_path);
|
||||
|
||||
let foo_id_2 = files.try_get(&foo_path).expect("foo_path to be found");
|
||||
assert_eq!(foo_id_2, foo_id);
|
||||
}
|
||||
}
|
||||
@@ -1,67 +0,0 @@
|
||||
//! Key observations
|
||||
//!
|
||||
//! The HIR (High-Level Intermediate Representation) avoids allocations to large extends by:
|
||||
//! * Using an arena per node type
|
||||
//! * using ids and id ranges to reference items.
|
||||
//!
|
||||
//! Using separate arena per node type has the advantage that the IDs are relatively stable, because
|
||||
//! they only change when a node of the same kind has been added or removed. (What's unclear is if that matters or if
|
||||
//! it still triggers a re-compute because the AST-id in the node has changed).
|
||||
//!
|
||||
//! The HIR does not store all details. It mainly stores the *public* interface. There's a reference
|
||||
//! back to the AST node to get more details.
|
||||
//!
|
||||
//!
|
||||
|
||||
use crate::ast_ids::{HasAstId, TypedAstId};
|
||||
use crate::files::FileId;
|
||||
use std::fmt::Formatter;
|
||||
use std::hash::{Hash, Hasher};
|
||||
|
||||
pub struct HirAstId<N: HasAstId> {
|
||||
file_id: FileId,
|
||||
node_id: TypedAstId<N>,
|
||||
}
|
||||
|
||||
impl<N: HasAstId> Copy for HirAstId<N> {}
|
||||
impl<N: HasAstId> Clone for HirAstId<N> {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> PartialEq for HirAstId<N> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.file_id == other.file_id && self.node_id == other.node_id
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> Eq for HirAstId<N> {}
|
||||
|
||||
impl<N: HasAstId> std::fmt::Debug for HirAstId<N> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("HirAstId")
|
||||
.field("file_id", &self.file_id)
|
||||
.field("node_id", &self.node_id)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> Hash for HirAstId<N> {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.file_id.hash(state);
|
||||
self.node_id.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> HirAstId<N> {
|
||||
pub fn upcast<M: HasAstId>(self) -> HirAstId<M>
|
||||
where
|
||||
N: Into<M>,
|
||||
{
|
||||
HirAstId {
|
||||
file_id: self.file_id,
|
||||
node_id: self.node_id.upcast(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,556 +0,0 @@
|
||||
use std::ops::{Index, Range};
|
||||
|
||||
use ruff_index::{newtype_index, IndexVec};
|
||||
use ruff_python_ast::visitor::preorder;
|
||||
use ruff_python_ast::visitor::preorder::PreorderVisitor;
|
||||
use ruff_python_ast::{
|
||||
Decorator, ExceptHandler, ExceptHandlerExceptHandler, Expr, MatchCase, ModModule, Stmt,
|
||||
StmtAnnAssign, StmtAssign, StmtClassDef, StmtFunctionDef, StmtGlobal, StmtImport,
|
||||
StmtImportFrom, StmtNonlocal, StmtTypeAlias, TypeParam, TypeParamParamSpec, TypeParamTypeVar,
|
||||
TypeParamTypeVarTuple, WithItem,
|
||||
};
|
||||
|
||||
use crate::ast_ids::{AstIds, HasAstId};
|
||||
use crate::files::FileId;
|
||||
use crate::hir::HirAstId;
|
||||
use crate::Name;
|
||||
|
||||
#[newtype_index]
|
||||
pub struct FunctionId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Function {
|
||||
ast_id: HirAstId<StmtFunctionDef>,
|
||||
name: Name,
|
||||
parameters: Range<ParameterId>,
|
||||
type_parameters: Range<TypeParameterId>, // TODO: type_parameters, return expression, decorators
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct ParameterId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Parameter {
|
||||
kind: ParameterKind,
|
||||
name: Name,
|
||||
default: Option<()>, // TODO use expression HIR
|
||||
ast_id: HirAstId<ruff_python_ast::Parameter>,
|
||||
}
|
||||
|
||||
// TODO or should `Parameter` be an enum?
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub enum ParameterKind {
|
||||
PositionalOnly,
|
||||
Arguments,
|
||||
Vararg,
|
||||
KeywordOnly,
|
||||
Kwarg,
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct ClassId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Class {
|
||||
name: Name,
|
||||
ast_id: HirAstId<StmtClassDef>,
|
||||
// TODO type parameters, inheritance, decorators, members
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct AssignmentId;
|
||||
|
||||
// This can have more than one name...
|
||||
// but that means we can't implement `name()` on `ModuleItem`.
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Assignment {
|
||||
// TODO: Handle multiple names / targets
|
||||
name: Name,
|
||||
ast_id: HirAstId<StmtAssign>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct AnnotatedAssignment {
|
||||
name: Name,
|
||||
ast_id: HirAstId<StmtAnnAssign>,
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct AnnotatedAssignmentId;
|
||||
|
||||
#[newtype_index]
|
||||
pub struct TypeAliasId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct TypeAlias {
|
||||
name: Name,
|
||||
ast_id: HirAstId<StmtTypeAlias>,
|
||||
parameters: Range<TypeParameterId>,
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct TypeParameterId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub enum TypeParameter {
|
||||
TypeVar(TypeParameterTypeVar),
|
||||
ParamSpec(TypeParameterParamSpec),
|
||||
TypeVarTuple(TypeParameterTypeVarTuple),
|
||||
}
|
||||
|
||||
impl TypeParameter {
|
||||
pub fn ast_id(&self) -> HirAstId<TypeParam> {
|
||||
match self {
|
||||
TypeParameter::TypeVar(type_var) => type_var.ast_id.upcast(),
|
||||
TypeParameter::ParamSpec(param_spec) => param_spec.ast_id.upcast(),
|
||||
TypeParameter::TypeVarTuple(type_var_tuple) => type_var_tuple.ast_id.upcast(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct TypeParameterTypeVar {
|
||||
name: Name,
|
||||
ast_id: HirAstId<TypeParamTypeVar>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct TypeParameterParamSpec {
|
||||
name: Name,
|
||||
ast_id: HirAstId<TypeParamParamSpec>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct TypeParameterTypeVarTuple {
|
||||
name: Name,
|
||||
ast_id: HirAstId<TypeParamTypeVarTuple>,
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct GlobalId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Global {
|
||||
// TODO track names
|
||||
ast_id: HirAstId<StmtGlobal>,
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct NonLocalId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct NonLocal {
|
||||
// TODO track names
|
||||
ast_id: HirAstId<StmtNonlocal>,
|
||||
}
|
||||
|
||||
pub enum DefinitionId {
|
||||
Function(FunctionId),
|
||||
Parameter(ParameterId),
|
||||
Class(ClassId),
|
||||
Assignment(AssignmentId),
|
||||
AnnotatedAssignment(AnnotatedAssignmentId),
|
||||
Global(GlobalId),
|
||||
NonLocal(NonLocalId),
|
||||
TypeParameter(TypeParameterId),
|
||||
TypeAlias(TypeAlias),
|
||||
}
|
||||
|
||||
pub enum DefinitionItem {
|
||||
Function(Function),
|
||||
Parameter(Parameter),
|
||||
Class(Class),
|
||||
Assignment(Assignment),
|
||||
AnnotatedAssignment(AnnotatedAssignment),
|
||||
Global(Global),
|
||||
NonLocal(NonLocal),
|
||||
TypeParameter(TypeParameter),
|
||||
TypeAlias(TypeAlias),
|
||||
}
|
||||
|
||||
// The closest is rust-analyzers item-tree. It only represents "Items" which make the public interface of a module
|
||||
// (it excludes any other statement or expressions). rust-analyzer uses it as the main input to the name resolution
|
||||
// algorithm
|
||||
// > It is the input to the name resolution algorithm, as well as to the queries defined in `adt.rs`,
|
||||
// > `data.rs`, and most things in `attr.rs`.
|
||||
//
|
||||
// > One important purpose of this layer is to provide an "invalidation barrier" for incremental
|
||||
// > computations: when typing inside an item body, the `ItemTree` of the modified file is typically
|
||||
// > unaffected, so we don't have to recompute name resolution results or item data (see `data.rs`).
|
||||
//
|
||||
// I haven't fully figured this out but I think that this composes the "public" interface of a module?
|
||||
// But maybe that's too optimistic.
|
||||
//
|
||||
//
|
||||
#[derive(Debug, Clone, Default, Eq, PartialEq)]
|
||||
pub struct Definitions {
|
||||
functions: IndexVec<FunctionId, Function>,
|
||||
parameters: IndexVec<ParameterId, Parameter>,
|
||||
classes: IndexVec<ClassId, Class>,
|
||||
assignments: IndexVec<AssignmentId, Assignment>,
|
||||
annotated_assignments: IndexVec<AnnotatedAssignmentId, AnnotatedAssignment>,
|
||||
type_aliases: IndexVec<TypeAliasId, TypeAlias>,
|
||||
type_parameters: IndexVec<TypeParameterId, TypeParameter>,
|
||||
globals: IndexVec<GlobalId, Global>,
|
||||
non_locals: IndexVec<NonLocalId, NonLocal>,
|
||||
}
|
||||
|
||||
impl Definitions {
|
||||
pub fn from_module(module: &ModModule, ast_ids: &AstIds, file_id: FileId) -> Self {
|
||||
let mut visitor = DefinitionsVisitor {
|
||||
definitions: Definitions::default(),
|
||||
ast_ids,
|
||||
file_id,
|
||||
};
|
||||
|
||||
visitor.visit_body(&module.body);
|
||||
|
||||
visitor.definitions
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<FunctionId> for Definitions {
|
||||
type Output = Function;
|
||||
|
||||
fn index(&self, index: FunctionId) -> &Self::Output {
|
||||
&self.functions[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<ParameterId> for Definitions {
|
||||
type Output = Parameter;
|
||||
|
||||
fn index(&self, index: ParameterId) -> &Self::Output {
|
||||
&self.parameters[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<ClassId> for Definitions {
|
||||
type Output = Class;
|
||||
|
||||
fn index(&self, index: ClassId) -> &Self::Output {
|
||||
&self.classes[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<AssignmentId> for Definitions {
|
||||
type Output = Assignment;
|
||||
|
||||
fn index(&self, index: AssignmentId) -> &Self::Output {
|
||||
&self.assignments[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<AnnotatedAssignmentId> for Definitions {
|
||||
type Output = AnnotatedAssignment;
|
||||
|
||||
fn index(&self, index: AnnotatedAssignmentId) -> &Self::Output {
|
||||
&self.annotated_assignments[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<TypeAliasId> for Definitions {
|
||||
type Output = TypeAlias;
|
||||
|
||||
fn index(&self, index: TypeAliasId) -> &Self::Output {
|
||||
&self.type_aliases[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<GlobalId> for Definitions {
|
||||
type Output = Global;
|
||||
|
||||
fn index(&self, index: GlobalId) -> &Self::Output {
|
||||
&self.globals[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<NonLocalId> for Definitions {
|
||||
type Output = NonLocal;
|
||||
|
||||
fn index(&self, index: NonLocalId) -> &Self::Output {
|
||||
&self.non_locals[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<TypeParameterId> for Definitions {
|
||||
type Output = TypeParameter;
|
||||
|
||||
fn index(&self, index: TypeParameterId) -> &Self::Output {
|
||||
&self.type_parameters[index]
|
||||
}
|
||||
}
|
||||
|
||||
struct DefinitionsVisitor<'a> {
|
||||
definitions: Definitions,
|
||||
ast_ids: &'a AstIds,
|
||||
file_id: FileId,
|
||||
}
|
||||
|
||||
impl DefinitionsVisitor<'_> {
|
||||
fn ast_id<N: HasAstId>(&self, node: &N) -> HirAstId<N> {
|
||||
HirAstId {
|
||||
file_id: self.file_id,
|
||||
node_id: self.ast_ids.ast_id(node),
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_function_def(&mut self, function: &StmtFunctionDef) -> FunctionId {
|
||||
let name = Name::new(&function.name);
|
||||
|
||||
let first_type_parameter_id = self.definitions.type_parameters.next_index();
|
||||
let mut last_type_parameter_id = first_type_parameter_id;
|
||||
|
||||
if let Some(type_params) = &function.type_params {
|
||||
for parameter in &type_params.type_params {
|
||||
let id = self.lower_type_parameter(parameter);
|
||||
last_type_parameter_id = id;
|
||||
}
|
||||
}
|
||||
|
||||
let parameters = self.lower_parameters(&function.parameters);
|
||||
|
||||
self.definitions.functions.push(Function {
|
||||
name,
|
||||
ast_id: self.ast_id(function),
|
||||
parameters,
|
||||
type_parameters: first_type_parameter_id..last_type_parameter_id,
|
||||
})
|
||||
}
|
||||
|
||||
fn lower_parameters(&mut self, parameters: &ruff_python_ast::Parameters) -> Range<ParameterId> {
|
||||
let first_parameter_id = self.definitions.parameters.next_index();
|
||||
let mut last_parameter_id = first_parameter_id;
|
||||
|
||||
for parameter in ¶meters.posonlyargs {
|
||||
last_parameter_id = self.definitions.parameters.push(Parameter {
|
||||
kind: ParameterKind::PositionalOnly,
|
||||
name: Name::new(¶meter.parameter.name),
|
||||
default: None,
|
||||
ast_id: self.ast_id(¶meter.parameter),
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(vararg) = ¶meters.vararg {
|
||||
last_parameter_id = self.definitions.parameters.push(Parameter {
|
||||
kind: ParameterKind::Vararg,
|
||||
name: Name::new(&vararg.name),
|
||||
default: None,
|
||||
ast_id: self.ast_id(vararg),
|
||||
});
|
||||
}
|
||||
|
||||
for parameter in ¶meters.kwonlyargs {
|
||||
last_parameter_id = self.definitions.parameters.push(Parameter {
|
||||
kind: ParameterKind::KeywordOnly,
|
||||
name: Name::new(¶meter.parameter.name),
|
||||
default: None,
|
||||
ast_id: self.ast_id(¶meter.parameter),
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(kwarg) = ¶meters.kwarg {
|
||||
last_parameter_id = self.definitions.parameters.push(Parameter {
|
||||
kind: ParameterKind::KeywordOnly,
|
||||
name: Name::new(&kwarg.name),
|
||||
default: None,
|
||||
ast_id: self.ast_id(kwarg),
|
||||
});
|
||||
}
|
||||
|
||||
first_parameter_id..last_parameter_id
|
||||
}
|
||||
|
||||
fn lower_class_def(&mut self, class: &StmtClassDef) -> ClassId {
|
||||
let name = Name::new(&class.name);
|
||||
|
||||
self.definitions.classes.push(Class {
|
||||
name,
|
||||
ast_id: self.ast_id(class),
|
||||
})
|
||||
}
|
||||
|
||||
fn lower_assignment(&mut self, assignment: &StmtAssign) {
|
||||
// FIXME handle multiple names
|
||||
if let Some(Expr::Name(name)) = assignment.targets.first() {
|
||||
self.definitions.assignments.push(Assignment {
|
||||
name: Name::new(&name.id),
|
||||
ast_id: self.ast_id(assignment),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_annotated_assignment(&mut self, annotated_assignment: &StmtAnnAssign) {
|
||||
if let Expr::Name(name) = &*annotated_assignment.target {
|
||||
self.definitions
|
||||
.annotated_assignments
|
||||
.push(AnnotatedAssignment {
|
||||
name: Name::new(&name.id),
|
||||
ast_id: self.ast_id(annotated_assignment),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_type_alias(&mut self, type_alias: &StmtTypeAlias) {
|
||||
if let Expr::Name(name) = &*type_alias.name {
|
||||
let name = Name::new(&name.id);
|
||||
|
||||
let lower_parameters_id = self.definitions.type_parameters.next_index();
|
||||
let mut last_parameter_id = lower_parameters_id;
|
||||
|
||||
if let Some(type_params) = &type_alias.type_params {
|
||||
for type_parameter in &type_params.type_params {
|
||||
let id = self.lower_type_parameter(type_parameter);
|
||||
last_parameter_id = id;
|
||||
}
|
||||
}
|
||||
|
||||
self.definitions.type_aliases.push(TypeAlias {
|
||||
name,
|
||||
ast_id: self.ast_id(type_alias),
|
||||
parameters: lower_parameters_id..last_parameter_id,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_type_parameter(&mut self, type_parameter: &TypeParam) -> TypeParameterId {
|
||||
match type_parameter {
|
||||
TypeParam::TypeVar(type_var) => {
|
||||
self.definitions
|
||||
.type_parameters
|
||||
.push(TypeParameter::TypeVar(TypeParameterTypeVar {
|
||||
name: Name::new(&type_var.name),
|
||||
ast_id: self.ast_id(type_var),
|
||||
}))
|
||||
}
|
||||
TypeParam::ParamSpec(param_spec) => {
|
||||
self.definitions
|
||||
.type_parameters
|
||||
.push(TypeParameter::ParamSpec(TypeParameterParamSpec {
|
||||
name: Name::new(¶m_spec.name),
|
||||
ast_id: self.ast_id(param_spec),
|
||||
}))
|
||||
}
|
||||
TypeParam::TypeVarTuple(type_var_tuple) => {
|
||||
self.definitions
|
||||
.type_parameters
|
||||
.push(TypeParameter::TypeVarTuple(TypeParameterTypeVarTuple {
|
||||
name: Name::new(&type_var_tuple.name),
|
||||
ast_id: self.ast_id(type_var_tuple),
|
||||
}))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_import(&mut self, _import: &StmtImport) {
|
||||
// TODO
|
||||
}
|
||||
|
||||
fn lower_import_from(&mut self, _import_from: &StmtImportFrom) {
|
||||
// TODO
|
||||
}
|
||||
|
||||
fn lower_global(&mut self, global: &StmtGlobal) -> GlobalId {
|
||||
self.definitions.globals.push(Global {
|
||||
ast_id: self.ast_id(global),
|
||||
})
|
||||
}
|
||||
|
||||
fn lower_non_local(&mut self, non_local: &StmtNonlocal) -> NonLocalId {
|
||||
self.definitions.non_locals.push(NonLocal {
|
||||
ast_id: self.ast_id(non_local),
|
||||
})
|
||||
}
|
||||
|
||||
fn lower_except_handler(&mut self, _except_handler: &ExceptHandlerExceptHandler) {
|
||||
// TODO
|
||||
}
|
||||
|
||||
fn lower_with_item(&mut self, _with_item: &WithItem) {
|
||||
// TODO
|
||||
}
|
||||
|
||||
fn lower_match_case(&mut self, _match_case: &MatchCase) {
|
||||
// TODO
|
||||
}
|
||||
}
|
||||
|
||||
impl PreorderVisitor<'_> for DefinitionsVisitor<'_> {
|
||||
fn visit_stmt(&mut self, stmt: &Stmt) {
|
||||
match stmt {
|
||||
// Definition statements
|
||||
Stmt::FunctionDef(definition) => {
|
||||
self.lower_function_def(definition);
|
||||
self.visit_body(&definition.body);
|
||||
}
|
||||
Stmt::ClassDef(definition) => {
|
||||
self.lower_class_def(definition);
|
||||
self.visit_body(&definition.body);
|
||||
}
|
||||
Stmt::Assign(assignment) => {
|
||||
self.lower_assignment(assignment);
|
||||
}
|
||||
Stmt::AnnAssign(annotated_assignment) => {
|
||||
self.lower_annotated_assignment(annotated_assignment);
|
||||
}
|
||||
Stmt::TypeAlias(type_alias) => {
|
||||
self.lower_type_alias(type_alias);
|
||||
}
|
||||
|
||||
Stmt::Import(import) => self.lower_import(import),
|
||||
Stmt::ImportFrom(import_from) => self.lower_import_from(import_from),
|
||||
Stmt::Global(global) => {
|
||||
self.lower_global(global);
|
||||
}
|
||||
Stmt::Nonlocal(non_local) => {
|
||||
self.lower_non_local(non_local);
|
||||
}
|
||||
|
||||
// Visit the compound statement bodies because they can contain other definitions.
|
||||
Stmt::For(_)
|
||||
| Stmt::While(_)
|
||||
| Stmt::If(_)
|
||||
| Stmt::With(_)
|
||||
| Stmt::Match(_)
|
||||
| Stmt::Try(_) => {
|
||||
preorder::walk_stmt(self, stmt);
|
||||
}
|
||||
|
||||
// Skip over simple statements because they can't contain any other definitions.
|
||||
Stmt::Return(_)
|
||||
| Stmt::Delete(_)
|
||||
| Stmt::AugAssign(_)
|
||||
| Stmt::Raise(_)
|
||||
| Stmt::Assert(_)
|
||||
| Stmt::Expr(_)
|
||||
| Stmt::Pass(_)
|
||||
| Stmt::Break(_)
|
||||
| Stmt::Continue(_)
|
||||
| Stmt::IpyEscapeCommand(_) => {
|
||||
// No op
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, _: &'_ Expr) {}
|
||||
|
||||
fn visit_decorator(&mut self, _decorator: &'_ Decorator) {}
|
||||
|
||||
fn visit_except_handler(&mut self, except_handler: &'_ ExceptHandler) {
|
||||
match except_handler {
|
||||
ExceptHandler::ExceptHandler(except_handler) => {
|
||||
self.lower_except_handler(except_handler);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_with_item(&mut self, with_item: &'_ WithItem) {
|
||||
self.lower_with_item(with_item);
|
||||
}
|
||||
|
||||
fn visit_match_case(&mut self, match_case: &'_ MatchCase) {
|
||||
self.lower_match_case(match_case);
|
||||
self.visit_body(&match_case.body);
|
||||
}
|
||||
}
|
||||
@@ -1,109 +0,0 @@
|
||||
use std::fmt::Formatter;
|
||||
use std::hash::BuildHasherDefault;
|
||||
use std::ops::Deref;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use rustc_hash::{FxHashSet, FxHasher};
|
||||
|
||||
use crate::files::FileId;
|
||||
|
||||
pub mod ast_ids;
|
||||
pub mod cache;
|
||||
pub mod cancellation;
|
||||
pub mod db;
|
||||
pub mod files;
|
||||
pub mod hir;
|
||||
pub mod lint;
|
||||
pub mod module;
|
||||
mod parse;
|
||||
pub mod program;
|
||||
pub mod source;
|
||||
mod symbols;
|
||||
mod types;
|
||||
pub mod watch;
|
||||
|
||||
pub(crate) type FxDashMap<K, V> = dashmap::DashMap<K, V, BuildHasherDefault<FxHasher>>;
|
||||
#[allow(unused)]
|
||||
pub(crate) type FxDashSet<V> = dashmap::DashSet<V, BuildHasherDefault<FxHasher>>;
|
||||
pub(crate) type FxIndexSet<V> = indexmap::set::IndexSet<V, BuildHasherDefault<FxHasher>>;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Workspace {
|
||||
/// TODO this should be a resolved path. We should probably use a newtype wrapper that guarantees that
|
||||
/// PATH is a UTF-8 path and is normalized.
|
||||
root: PathBuf,
|
||||
/// The files that are open in the workspace.
|
||||
///
|
||||
/// * Editor: The files that are actively being edited in the editor (the user has a tab open with the file).
|
||||
/// * CLI: The resolved files passed as arguments to the CLI.
|
||||
open_files: FxHashSet<FileId>,
|
||||
}
|
||||
|
||||
impl Workspace {
|
||||
pub fn new(root: PathBuf) -> Self {
|
||||
Self {
|
||||
root,
|
||||
open_files: FxHashSet::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn root(&self) -> &Path {
|
||||
self.root.as_path()
|
||||
}
|
||||
|
||||
// TODO having the content in workspace feels wrong.
|
||||
pub fn open_file(&mut self, file_id: FileId) {
|
||||
self.open_files.insert(file_id);
|
||||
}
|
||||
|
||||
pub fn close_file(&mut self, file_id: FileId) {
|
||||
self.open_files.remove(&file_id);
|
||||
}
|
||||
|
||||
// TODO introduce an `OpenFile` type instead of using an anonymous tuple.
|
||||
pub fn open_files(&self) -> impl Iterator<Item = FileId> + '_ {
|
||||
self.open_files.iter().copied()
|
||||
}
|
||||
|
||||
pub fn is_file_open(&self, file_id: FileId) -> bool {
|
||||
self.open_files.contains(&file_id)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
|
||||
pub struct Name(smol_str::SmolStr);
|
||||
|
||||
impl Name {
|
||||
#[inline]
|
||||
pub fn new(name: &str) -> Self {
|
||||
Self(smol_str::SmolStr::new(name))
|
||||
}
|
||||
|
||||
pub fn as_str(&self) -> &str {
|
||||
self.0.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Name {
|
||||
type Target = str;
|
||||
|
||||
#[inline]
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> From<T> for Name
|
||||
where
|
||||
T: Into<smol_str::SmolStr>,
|
||||
{
|
||||
fn from(value: T) -> Self {
|
||||
Self(value.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Name {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
@@ -1,321 +0,0 @@
|
||||
use std::cell::RefCell;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use ruff_python_ast::visitor::Visitor;
|
||||
use ruff_python_ast::{ModModule, StringLiteral};
|
||||
|
||||
use crate::cache::KeyValueCache;
|
||||
use crate::db::{LintDb, LintJar, QueryResult};
|
||||
use crate::files::FileId;
|
||||
use crate::module::ModuleName;
|
||||
use crate::parse::{parse, Parsed};
|
||||
use crate::source::{source_text, Source};
|
||||
use crate::symbols::{
|
||||
resolve_global_symbol, symbol_table, Definition, GlobalSymbolId, SymbolId, SymbolTable,
|
||||
};
|
||||
use crate::types::{infer_definition_type, infer_symbol_type, Type};
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub(crate) fn lint_syntax(db: &dyn LintDb, file_id: FileId) -> QueryResult<Diagnostics> {
|
||||
let lint_jar: &LintJar = db.jar()?;
|
||||
let storage = &lint_jar.lint_syntax;
|
||||
|
||||
#[allow(clippy::print_stdout)]
|
||||
if std::env::var("RED_KNOT_SLOW_LINT").is_ok() {
|
||||
for i in 0..10 {
|
||||
db.cancelled()?;
|
||||
println!("RED_KNOT_SLOW_LINT is set, sleeping for {i}/10 seconds");
|
||||
std::thread::sleep(Duration::from_secs(1));
|
||||
}
|
||||
}
|
||||
|
||||
storage.get(&file_id, |file_id| {
|
||||
let mut diagnostics = Vec::new();
|
||||
|
||||
let source = source_text(db.upcast(), *file_id)?;
|
||||
lint_lines(source.text(), &mut diagnostics);
|
||||
|
||||
let parsed = parse(db.upcast(), *file_id)?;
|
||||
|
||||
if parsed.errors().is_empty() {
|
||||
let ast = parsed.ast();
|
||||
|
||||
let mut visitor = SyntaxLintVisitor {
|
||||
diagnostics,
|
||||
source: source.text(),
|
||||
};
|
||||
visitor.visit_body(&ast.body);
|
||||
diagnostics = visitor.diagnostics;
|
||||
} else {
|
||||
diagnostics.extend(parsed.errors().iter().map(std::string::ToString::to_string));
|
||||
}
|
||||
|
||||
Ok(Diagnostics::from(diagnostics))
|
||||
})
|
||||
}
|
||||
|
||||
fn lint_lines(source: &str, diagnostics: &mut Vec<String>) {
|
||||
for (line_number, line) in source.lines().enumerate() {
|
||||
if line.len() < 88 {
|
||||
continue;
|
||||
}
|
||||
|
||||
let char_count = line.chars().count();
|
||||
if char_count > 88 {
|
||||
diagnostics.push(format!(
|
||||
"Line {} is too long ({} characters)",
|
||||
line_number + 1,
|
||||
char_count
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub(crate) fn lint_semantic(db: &dyn LintDb, file_id: FileId) -> QueryResult<Diagnostics> {
|
||||
let lint_jar: &LintJar = db.jar()?;
|
||||
let storage = &lint_jar.lint_semantic;
|
||||
|
||||
storage.get(&file_id, |file_id| {
|
||||
let source = source_text(db.upcast(), *file_id)?;
|
||||
let parsed = parse(db.upcast(), *file_id)?;
|
||||
let symbols = symbol_table(db.upcast(), *file_id)?;
|
||||
|
||||
let context = SemanticLintContext {
|
||||
file_id: *file_id,
|
||||
source,
|
||||
parsed,
|
||||
symbols,
|
||||
db,
|
||||
diagnostics: RefCell::new(Vec::new()),
|
||||
};
|
||||
|
||||
lint_unresolved_imports(&context)?;
|
||||
lint_bad_overrides(&context)?;
|
||||
|
||||
Ok(Diagnostics::from(context.diagnostics.take()))
|
||||
})
|
||||
}
|
||||
|
||||
fn lint_unresolved_imports(context: &SemanticLintContext) -> QueryResult<()> {
|
||||
// TODO: Consider iterating over the dependencies (imports) only instead of all definitions.
|
||||
for (symbol, definition) in context.symbols().all_definitions() {
|
||||
match definition {
|
||||
Definition::Import(import) => {
|
||||
let ty = context.infer_symbol_type(symbol)?;
|
||||
|
||||
if ty.is_unknown() {
|
||||
context.push_diagnostic(format!("Unresolved module {}", import.module));
|
||||
}
|
||||
}
|
||||
Definition::ImportFrom(import) => {
|
||||
let ty = context.infer_symbol_type(symbol)?;
|
||||
|
||||
if ty.is_unknown() {
|
||||
let module_name = import.module().map(Deref::deref).unwrap_or_default();
|
||||
let message = if import.level() > 0 {
|
||||
format!(
|
||||
"Unresolved relative import '{}' from {}{}",
|
||||
import.name(),
|
||||
".".repeat(import.level() as usize),
|
||||
module_name
|
||||
)
|
||||
} else {
|
||||
format!(
|
||||
"Unresolved import '{}' from '{}'",
|
||||
import.name(),
|
||||
module_name
|
||||
)
|
||||
};
|
||||
|
||||
context.push_diagnostic(message);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn lint_bad_overrides(context: &SemanticLintContext) -> QueryResult<()> {
|
||||
// TODO we should have a special marker on the real typing module (from typeshed) so if you
|
||||
// have your own "typing" module in your project, we don't consider it THE typing module (and
|
||||
// same for other stdlib modules that our lint rules care about)
|
||||
let Some(typing_override) =
|
||||
resolve_global_symbol(context.db.upcast(), ModuleName::new("typing"), "override")?
|
||||
else {
|
||||
// TODO once we bundle typeshed, this should be unreachable!()
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
// TODO we should maybe index definitions by type instead of iterating all, or else iterate all
|
||||
// just once, match, and branch to all lint rules that care about a type of definition
|
||||
for (symbol, definition) in context.symbols().all_definitions() {
|
||||
if !matches!(definition, Definition::FunctionDef(_)) {
|
||||
continue;
|
||||
}
|
||||
let ty = infer_definition_type(
|
||||
context.db.upcast(),
|
||||
GlobalSymbolId {
|
||||
file_id: context.file_id,
|
||||
symbol_id: symbol,
|
||||
},
|
||||
definition.clone(),
|
||||
)?;
|
||||
let Type::Function(func) = ty else {
|
||||
unreachable!("type of a FunctionDef should always be a Function");
|
||||
};
|
||||
let Some(class) = func.get_containing_class(context.db.upcast())? else {
|
||||
// not a method of a class
|
||||
continue;
|
||||
};
|
||||
if func.has_decorator(context.db.upcast(), typing_override)? {
|
||||
let method_name = func.name(context.db.upcast())?;
|
||||
if class
|
||||
.get_super_class_member(context.db.upcast(), &method_name)?
|
||||
.is_none()
|
||||
{
|
||||
// TODO should have a qualname() method to support nested classes
|
||||
context.push_diagnostic(
|
||||
format!(
|
||||
"Method {}.{} is decorated with `typing.override` but does not override any base class method",
|
||||
class.name(context.db.upcast())?,
|
||||
method_name,
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub struct SemanticLintContext<'a> {
|
||||
file_id: FileId,
|
||||
source: Source,
|
||||
parsed: Parsed,
|
||||
symbols: Arc<SymbolTable>,
|
||||
db: &'a dyn LintDb,
|
||||
diagnostics: RefCell<Vec<String>>,
|
||||
}
|
||||
|
||||
impl<'a> SemanticLintContext<'a> {
|
||||
pub fn source_text(&self) -> &str {
|
||||
self.source.text()
|
||||
}
|
||||
|
||||
pub fn file_id(&self) -> FileId {
|
||||
self.file_id
|
||||
}
|
||||
|
||||
pub fn ast(&self) -> &ModModule {
|
||||
self.parsed.ast()
|
||||
}
|
||||
|
||||
pub fn symbols(&self) -> &SymbolTable {
|
||||
&self.symbols
|
||||
}
|
||||
|
||||
pub fn infer_symbol_type(&self, symbol_id: SymbolId) -> QueryResult<Type> {
|
||||
infer_symbol_type(
|
||||
self.db.upcast(),
|
||||
GlobalSymbolId {
|
||||
file_id: self.file_id,
|
||||
symbol_id,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
pub fn push_diagnostic(&self, diagnostic: String) {
|
||||
self.diagnostics.borrow_mut().push(diagnostic);
|
||||
}
|
||||
|
||||
pub fn extend_diagnostics(&mut self, diagnostics: impl IntoIterator<Item = String>) {
|
||||
self.diagnostics.get_mut().extend(diagnostics);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct SyntaxLintVisitor<'a> {
|
||||
diagnostics: Vec<String>,
|
||||
source: &'a str,
|
||||
}
|
||||
|
||||
impl Visitor<'_> for SyntaxLintVisitor<'_> {
|
||||
fn visit_string_literal(&mut self, string_literal: &'_ StringLiteral) {
|
||||
// A very naive implementation of use double quotes
|
||||
let text = &self.source[string_literal.range];
|
||||
|
||||
if text.starts_with('\'') {
|
||||
self.diagnostics
|
||||
.push("Use double quotes for strings".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Diagnostics {
|
||||
Empty,
|
||||
List(Arc<Vec<String>>),
|
||||
}
|
||||
|
||||
impl Diagnostics {
|
||||
pub fn as_slice(&self) -> &[String] {
|
||||
match self {
|
||||
Diagnostics::Empty => &[],
|
||||
Diagnostics::List(list) => list.as_slice(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Diagnostics {
|
||||
type Target = [String];
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.as_slice()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Vec<String>> for Diagnostics {
|
||||
fn from(value: Vec<String>) -> Self {
|
||||
if value.is_empty() {
|
||||
Diagnostics::Empty
|
||||
} else {
|
||||
Diagnostics::List(Arc::new(value))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub struct LintSyntaxStorage(KeyValueCache<FileId, Diagnostics>);
|
||||
|
||||
impl Deref for LintSyntaxStorage {
|
||||
type Target = KeyValueCache<FileId, Diagnostics>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for LintSyntaxStorage {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub struct LintSemanticStorage(KeyValueCache<FileId, Diagnostics>);
|
||||
|
||||
impl Deref for LintSemanticStorage {
|
||||
type Target = KeyValueCache<FileId, Diagnostics>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for LintSemanticStorage {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
@@ -1,359 +0,0 @@
|
||||
#![allow(clippy::dbg_macro)]
|
||||
|
||||
use std::path::Path;
|
||||
use std::sync::Mutex;
|
||||
|
||||
use crossbeam::channel as crossbeam_channel;
|
||||
use tracing::subscriber::Interest;
|
||||
use tracing::{Level, Metadata};
|
||||
use tracing_subscriber::filter::LevelFilter;
|
||||
use tracing_subscriber::layer::{Context, Filter, SubscriberExt};
|
||||
use tracing_subscriber::{Layer, Registry};
|
||||
use tracing_tree::time::Uptime;
|
||||
|
||||
use red_knot::db::{HasJar, ParallelDatabase, QueryError, SourceDb, SourceJar};
|
||||
use red_knot::module::{set_module_search_paths, ModuleSearchPath, ModuleSearchPathKind};
|
||||
use red_knot::program::check::ExecutionMode;
|
||||
use red_knot::program::{FileWatcherChange, Program};
|
||||
use red_knot::watch::FileWatcher;
|
||||
use red_knot::Workspace;
|
||||
|
||||
#[allow(clippy::print_stdout, clippy::unnecessary_wraps, clippy::print_stderr)]
|
||||
fn main() -> anyhow::Result<()> {
|
||||
setup_tracing();
|
||||
|
||||
let arguments: Vec<_> = std::env::args().collect();
|
||||
|
||||
if arguments.len() < 2 {
|
||||
eprintln!("Usage: red_knot <path>");
|
||||
return Err(anyhow::anyhow!("Invalid arguments"));
|
||||
}
|
||||
|
||||
let entry_point = Path::new(&arguments[1]);
|
||||
|
||||
if !entry_point.exists() {
|
||||
eprintln!("The entry point does not exist.");
|
||||
return Err(anyhow::anyhow!("Invalid arguments"));
|
||||
}
|
||||
|
||||
if !entry_point.is_file() {
|
||||
eprintln!("The entry point is not a file.");
|
||||
return Err(anyhow::anyhow!("Invalid arguments"));
|
||||
}
|
||||
|
||||
let workspace_folder = entry_point.parent().unwrap();
|
||||
let workspace = Workspace::new(workspace_folder.to_path_buf());
|
||||
|
||||
let workspace_search_path = ModuleSearchPath::new(
|
||||
workspace.root().to_path_buf(),
|
||||
ModuleSearchPathKind::FirstParty,
|
||||
);
|
||||
let mut program = Program::new(workspace);
|
||||
set_module_search_paths(&mut program, vec![workspace_search_path]);
|
||||
|
||||
let entry_id = program.file_id(entry_point);
|
||||
program.workspace_mut().open_file(entry_id);
|
||||
|
||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new();
|
||||
|
||||
// Listen to Ctrl+C and abort the watch mode.
|
||||
let main_loop_cancellation_token = Mutex::new(Some(main_loop_cancellation_token));
|
||||
ctrlc::set_handler(move || {
|
||||
let mut lock = main_loop_cancellation_token.lock().unwrap();
|
||||
|
||||
if let Some(token) = lock.take() {
|
||||
token.stop();
|
||||
}
|
||||
})?;
|
||||
|
||||
let file_changes_notifier = main_loop.file_changes_notifier();
|
||||
|
||||
// Watch for file changes and re-trigger the analysis.
|
||||
let mut file_watcher = FileWatcher::new(move |changes| {
|
||||
file_changes_notifier.notify(changes);
|
||||
})?;
|
||||
|
||||
file_watcher.watch_folder(workspace_folder)?;
|
||||
|
||||
main_loop.run(&mut program);
|
||||
|
||||
let source_jar: &SourceJar = program.jar().unwrap();
|
||||
|
||||
dbg!(source_jar.parsed.statistics());
|
||||
dbg!(source_jar.sources.statistics());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct MainLoop {
|
||||
orchestrator_sender: crossbeam_channel::Sender<OrchestratorMessage>,
|
||||
main_loop_receiver: crossbeam_channel::Receiver<MainLoopMessage>,
|
||||
}
|
||||
|
||||
impl MainLoop {
|
||||
fn new() -> (Self, MainLoopCancellationToken) {
|
||||
let (orchestrator_sender, orchestrator_receiver) = crossbeam_channel::bounded(1);
|
||||
let (main_loop_sender, main_loop_receiver) = crossbeam_channel::bounded(1);
|
||||
|
||||
let mut orchestrator = Orchestrator {
|
||||
receiver: orchestrator_receiver,
|
||||
sender: main_loop_sender.clone(),
|
||||
revision: 0,
|
||||
};
|
||||
|
||||
std::thread::spawn(move || {
|
||||
orchestrator.run();
|
||||
});
|
||||
|
||||
(
|
||||
Self {
|
||||
orchestrator_sender,
|
||||
main_loop_receiver,
|
||||
},
|
||||
MainLoopCancellationToken {
|
||||
sender: main_loop_sender,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn file_changes_notifier(&self) -> FileChangesNotifier {
|
||||
FileChangesNotifier {
|
||||
sender: self.orchestrator_sender.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn run(self, program: &mut Program) {
|
||||
self.orchestrator_sender
|
||||
.send(OrchestratorMessage::Run)
|
||||
.unwrap();
|
||||
|
||||
for message in &self.main_loop_receiver {
|
||||
tracing::trace!("Main Loop: Tick");
|
||||
|
||||
match message {
|
||||
MainLoopMessage::CheckProgram { revision } => {
|
||||
let program = program.snapshot();
|
||||
let sender = self.orchestrator_sender.clone();
|
||||
|
||||
// Spawn a new task that checks the program. This needs to be done in a separate thread
|
||||
// to prevent blocking the main loop here.
|
||||
rayon::spawn(move || match program.check(ExecutionMode::ThreadPool) {
|
||||
Ok(result) => {
|
||||
sender
|
||||
.send(OrchestratorMessage::CheckProgramCompleted {
|
||||
diagnostics: result,
|
||||
revision,
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
Err(QueryError::Cancelled) => {}
|
||||
});
|
||||
}
|
||||
MainLoopMessage::ApplyChanges(changes) => {
|
||||
// Automatically cancels any pending queries and waits for them to complete.
|
||||
program.apply_changes(changes);
|
||||
}
|
||||
MainLoopMessage::CheckCompleted(diagnostics) => {
|
||||
dbg!(diagnostics);
|
||||
}
|
||||
MainLoopMessage::Exit => {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for MainLoop {
|
||||
fn drop(&mut self) {
|
||||
self.orchestrator_sender
|
||||
.send(OrchestratorMessage::Shutdown)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct FileChangesNotifier {
|
||||
sender: crossbeam_channel::Sender<OrchestratorMessage>,
|
||||
}
|
||||
|
||||
impl FileChangesNotifier {
|
||||
fn notify(&self, changes: Vec<FileWatcherChange>) {
|
||||
self.sender
|
||||
.send(OrchestratorMessage::FileChanges(changes))
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct MainLoopCancellationToken {
|
||||
sender: crossbeam_channel::Sender<MainLoopMessage>,
|
||||
}
|
||||
|
||||
impl MainLoopCancellationToken {
|
||||
fn stop(self) {
|
||||
self.sender.send(MainLoopMessage::Exit).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
struct Orchestrator {
|
||||
/// Sends messages to the main loop.
|
||||
sender: crossbeam_channel::Sender<MainLoopMessage>,
|
||||
/// Receives messages from the main loop.
|
||||
receiver: crossbeam_channel::Receiver<OrchestratorMessage>,
|
||||
revision: usize,
|
||||
}
|
||||
|
||||
impl Orchestrator {
|
||||
fn run(&mut self) {
|
||||
while let Ok(message) = self.receiver.recv() {
|
||||
match message {
|
||||
OrchestratorMessage::Run => {
|
||||
self.sender
|
||||
.send(MainLoopMessage::CheckProgram {
|
||||
revision: self.revision,
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
OrchestratorMessage::CheckProgramCompleted {
|
||||
diagnostics,
|
||||
revision,
|
||||
} => {
|
||||
// Only take the diagnostics if they are for the latest revision.
|
||||
if self.revision == revision {
|
||||
self.sender
|
||||
.send(MainLoopMessage::CheckCompleted(diagnostics))
|
||||
.unwrap();
|
||||
} else {
|
||||
tracing::debug!("Discarding diagnostics for outdated revision {revision} (current: {}).", self.revision);
|
||||
}
|
||||
}
|
||||
|
||||
OrchestratorMessage::FileChanges(changes) => {
|
||||
// Request cancellation, but wait until all analysis tasks have completed to
|
||||
// avoid stale messages in the next main loop.
|
||||
|
||||
self.revision += 1;
|
||||
self.debounce_changes(changes);
|
||||
}
|
||||
OrchestratorMessage::Shutdown => {
|
||||
return self.shutdown();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn debounce_changes(&self, mut changes: Vec<FileWatcherChange>) {
|
||||
loop {
|
||||
// Consume possibly incoming file change messages before running a new analysis, but don't wait for more than 100ms.
|
||||
crossbeam_channel::select! {
|
||||
recv(self.receiver) -> message => {
|
||||
match message {
|
||||
Ok(OrchestratorMessage::Shutdown) => {
|
||||
return self.shutdown();
|
||||
}
|
||||
Ok(OrchestratorMessage::FileChanges(file_changes)) => {
|
||||
changes.extend(file_changes);
|
||||
}
|
||||
|
||||
Ok(OrchestratorMessage::CheckProgramCompleted { .. })=> {
|
||||
// disregard any outdated completion message.
|
||||
}
|
||||
Ok(OrchestratorMessage::Run) => unreachable!("The orchestrator is already running."),
|
||||
|
||||
Err(_) => {
|
||||
// There are no more senders, no point in waiting for more messages
|
||||
return;
|
||||
}
|
||||
}
|
||||
},
|
||||
default(std::time::Duration::from_millis(10)) => {
|
||||
// No more file changes after 10 ms, send the changes and schedule a new analysis
|
||||
self.sender.send(MainLoopMessage::ApplyChanges(changes)).unwrap();
|
||||
self.sender.send(MainLoopMessage::CheckProgram { revision: self.revision}).unwrap();
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::unused_self)]
|
||||
fn shutdown(&self) {
|
||||
tracing::trace!("Shutting down orchestrator.");
|
||||
}
|
||||
}
|
||||
|
||||
/// Message sent from the orchestrator to the main loop.
|
||||
#[derive(Debug)]
|
||||
enum MainLoopMessage {
|
||||
CheckProgram { revision: usize },
|
||||
CheckCompleted(Vec<String>),
|
||||
ApplyChanges(Vec<FileWatcherChange>),
|
||||
Exit,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum OrchestratorMessage {
|
||||
Run,
|
||||
Shutdown,
|
||||
|
||||
CheckProgramCompleted {
|
||||
diagnostics: Vec<String>,
|
||||
revision: usize,
|
||||
},
|
||||
|
||||
FileChanges(Vec<FileWatcherChange>),
|
||||
}
|
||||
|
||||
fn setup_tracing() {
|
||||
let subscriber = Registry::default().with(
|
||||
tracing_tree::HierarchicalLayer::default()
|
||||
.with_indent_lines(true)
|
||||
.with_indent_amount(2)
|
||||
.with_bracketed_fields(true)
|
||||
.with_thread_ids(true)
|
||||
.with_targets(true)
|
||||
.with_writer(|| Box::new(std::io::stderr()))
|
||||
.with_timer(Uptime::default())
|
||||
.with_filter(LoggingFilter {
|
||||
trace_level: Level::TRACE,
|
||||
}),
|
||||
);
|
||||
|
||||
tracing::subscriber::set_global_default(subscriber).unwrap();
|
||||
}
|
||||
|
||||
struct LoggingFilter {
|
||||
trace_level: Level,
|
||||
}
|
||||
|
||||
impl LoggingFilter {
|
||||
fn is_enabled(&self, meta: &Metadata<'_>) -> bool {
|
||||
let filter = if meta.target().starts_with("red_knot") || meta.target().starts_with("ruff") {
|
||||
self.trace_level
|
||||
} else {
|
||||
Level::INFO
|
||||
};
|
||||
|
||||
meta.level() <= &filter
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> Filter<S> for LoggingFilter {
|
||||
fn enabled(&self, meta: &Metadata<'_>, _cx: &Context<'_, S>) -> bool {
|
||||
self.is_enabled(meta)
|
||||
}
|
||||
|
||||
fn callsite_enabled(&self, meta: &'static Metadata<'static>) -> Interest {
|
||||
if self.is_enabled(meta) {
|
||||
Interest::always()
|
||||
} else {
|
||||
Interest::never()
|
||||
}
|
||||
}
|
||||
|
||||
fn max_level_hint(&self) -> Option<LevelFilter> {
|
||||
Some(LevelFilter::from_level(self.trace_level))
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,93 +0,0 @@
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::sync::Arc;
|
||||
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_parser::{Mode, ParseError};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
use crate::cache::KeyValueCache;
|
||||
use crate::db::{QueryResult, SourceDb};
|
||||
use crate::files::FileId;
|
||||
use crate::source::source_text;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct Parsed {
|
||||
inner: Arc<ParsedInner>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
struct ParsedInner {
|
||||
ast: ast::ModModule,
|
||||
errors: Vec<ParseError>,
|
||||
}
|
||||
|
||||
impl Parsed {
|
||||
fn new(ast: ast::ModModule, errors: Vec<ParseError>) -> Self {
|
||||
Self {
|
||||
inner: Arc::new(ParsedInner { ast, errors }),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn from_text(text: &str) -> Self {
|
||||
let result = ruff_python_parser::parse(text, Mode::Module);
|
||||
|
||||
let (module, errors) = match result {
|
||||
Ok(ast::Mod::Module(module)) => (module, vec![]),
|
||||
Ok(ast::Mod::Expression(expression)) => (
|
||||
ast::ModModule {
|
||||
range: expression.range(),
|
||||
body: vec![ast::Stmt::Expr(ast::StmtExpr {
|
||||
range: expression.range(),
|
||||
value: expression.body,
|
||||
})],
|
||||
},
|
||||
vec![],
|
||||
),
|
||||
Err(errors) => (
|
||||
ast::ModModule {
|
||||
range: TextRange::default(),
|
||||
body: Vec::new(),
|
||||
},
|
||||
vec![errors],
|
||||
),
|
||||
};
|
||||
|
||||
Parsed::new(module, errors)
|
||||
}
|
||||
|
||||
pub fn ast(&self) -> &ast::ModModule {
|
||||
&self.inner.ast
|
||||
}
|
||||
|
||||
pub fn errors(&self) -> &[ParseError] {
|
||||
&self.inner.errors
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub(crate) fn parse(db: &dyn SourceDb, file_id: FileId) -> QueryResult<Parsed> {
|
||||
let jar = db.jar()?;
|
||||
|
||||
jar.parsed.get(&file_id, |file_id| {
|
||||
let source = source_text(db, *file_id)?;
|
||||
|
||||
Ok(Parsed::from_text(source.text()))
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct ParsedStorage(KeyValueCache<FileId, Parsed>);
|
||||
|
||||
impl Deref for ParsedStorage {
|
||||
type Target = KeyValueCache<FileId, Parsed>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for ParsedStorage {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
@@ -1,413 +0,0 @@
|
||||
use rayon::{current_num_threads, yield_local};
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use crate::db::{Database, QueryError, QueryResult};
|
||||
use crate::files::FileId;
|
||||
use crate::lint::{lint_semantic, lint_syntax, Diagnostics};
|
||||
use crate::module::{file_to_module, resolve_module};
|
||||
use crate::program::Program;
|
||||
use crate::symbols::{symbol_table, Dependency};
|
||||
|
||||
impl Program {
|
||||
/// Checks all open files in the workspace and its dependencies.
|
||||
#[tracing::instrument(level = "debug", skip_all)]
|
||||
pub fn check(&self, mode: ExecutionMode) -> QueryResult<Vec<String>> {
|
||||
self.cancelled()?;
|
||||
|
||||
let mut context = CheckContext::new(self);
|
||||
|
||||
match mode {
|
||||
ExecutionMode::SingleThreaded => SingleThreadedExecutor.run(&mut context)?,
|
||||
ExecutionMode::ThreadPool => ThreadPoolExecutor.run(&mut context)?,
|
||||
};
|
||||
|
||||
Ok(context.finish())
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(self, context))]
|
||||
fn check_file(&self, file: FileId, context: &CheckFileContext) -> QueryResult<Diagnostics> {
|
||||
self.cancelled()?;
|
||||
|
||||
let symbol_table = symbol_table(self, file)?;
|
||||
let dependencies = symbol_table.dependencies();
|
||||
|
||||
if !dependencies.is_empty() {
|
||||
let module = file_to_module(self, file)?;
|
||||
|
||||
// TODO scheduling all dependencies here is wasteful if we don't infer any types on them
|
||||
// but I think that's unlikely, so it is okay?
|
||||
// Anyway, we need to figure out a way to retrieve the dependencies of a module
|
||||
// from the persistent cache. So maybe it should be a separate query after all.
|
||||
for dependency in dependencies {
|
||||
let dependency_name = match dependency {
|
||||
Dependency::Module(name) => Some(name.clone()),
|
||||
Dependency::Relative { .. } => match &module {
|
||||
Some(module) => module.resolve_dependency(self, dependency)?,
|
||||
None => None,
|
||||
},
|
||||
};
|
||||
|
||||
if let Some(dependency_name) = dependency_name {
|
||||
// TODO We may want to have a different check functions for non-first-party
|
||||
// files because we only need to index them and not check them.
|
||||
// Supporting non-first-party code also requires supporting typing stubs.
|
||||
if let Some(dependency) = resolve_module(self, dependency_name)? {
|
||||
if dependency.path(self)?.root().kind().is_first_party() {
|
||||
context.schedule_dependency(dependency.path(self)?.file());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut diagnostics = Vec::new();
|
||||
|
||||
if self.workspace().is_file_open(file) {
|
||||
diagnostics.extend_from_slice(&lint_syntax(self, file)?);
|
||||
diagnostics.extend_from_slice(&lint_semantic(self, file)?);
|
||||
}
|
||||
|
||||
Ok(Diagnostics::from(diagnostics))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub enum ExecutionMode {
|
||||
SingleThreaded,
|
||||
ThreadPool,
|
||||
}
|
||||
|
||||
/// Context that stores state information about the entire check operation.
|
||||
struct CheckContext<'a> {
|
||||
/// IDs of the files that have been queued for checking.
|
||||
///
|
||||
/// Used to avoid queuing the same file twice.
|
||||
scheduled_files: FxHashSet<FileId>,
|
||||
|
||||
/// Reference to the program that is checked.
|
||||
program: &'a Program,
|
||||
|
||||
/// The aggregated diagnostics
|
||||
diagnostics: Vec<String>,
|
||||
}
|
||||
|
||||
impl<'a> CheckContext<'a> {
|
||||
fn new(program: &'a Program) -> Self {
|
||||
Self {
|
||||
scheduled_files: FxHashSet::default(),
|
||||
program,
|
||||
diagnostics: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the tasks to check all open files in the workspace.
|
||||
fn check_open_files(&mut self) -> Vec<CheckOpenFileTask> {
|
||||
self.scheduled_files
|
||||
.extend(self.program.workspace().open_files());
|
||||
|
||||
self.program
|
||||
.workspace()
|
||||
.open_files()
|
||||
.map(|file_id| CheckOpenFileTask { file_id })
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Returns the task to check a dependency.
|
||||
fn check_dependency(&mut self, file_id: FileId) -> Option<CheckDependencyTask> {
|
||||
if self.scheduled_files.insert(file_id) {
|
||||
Some(CheckDependencyTask { file_id })
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Pushes the result for a single file check operation
|
||||
fn push_diagnostics(&mut self, diagnostics: &Diagnostics) {
|
||||
self.diagnostics.extend_from_slice(diagnostics);
|
||||
}
|
||||
|
||||
/// Returns a reference to the program that is being checked.
|
||||
fn program(&self) -> &'a Program {
|
||||
self.program
|
||||
}
|
||||
|
||||
/// Creates a task context that is used to check a single file.
|
||||
fn task_context<'b, S>(&self, dependency_scheduler: &'b S) -> CheckTaskContext<'a, 'b, S>
|
||||
where
|
||||
S: ScheduleDependency,
|
||||
{
|
||||
CheckTaskContext {
|
||||
program: self.program,
|
||||
dependency_scheduler,
|
||||
}
|
||||
}
|
||||
|
||||
fn finish(self) -> Vec<String> {
|
||||
self.diagnostics
|
||||
}
|
||||
}
|
||||
|
||||
/// Trait that abstracts away how a dependency of a file gets scheduled for checking.
|
||||
trait ScheduleDependency {
|
||||
/// Schedules the file with the given ID for checking.
|
||||
fn schedule(&self, file_id: FileId);
|
||||
}
|
||||
|
||||
impl<T> ScheduleDependency for T
|
||||
where
|
||||
T: Fn(FileId),
|
||||
{
|
||||
fn schedule(&self, file_id: FileId) {
|
||||
let f = self;
|
||||
f(file_id);
|
||||
}
|
||||
}
|
||||
|
||||
/// Context that is used to run a single file check task.
|
||||
///
|
||||
/// The task is generic over `S` because it is passed across thread boundaries and
|
||||
/// we don't want to add the requirement that [`ScheduleDependency`] must be [`Send`].
|
||||
struct CheckTaskContext<'a, 'scheduler, S>
|
||||
where
|
||||
S: ScheduleDependency,
|
||||
{
|
||||
dependency_scheduler: &'scheduler S,
|
||||
program: &'a Program,
|
||||
}
|
||||
|
||||
impl<'a, 'scheduler, S> CheckTaskContext<'a, 'scheduler, S>
|
||||
where
|
||||
S: ScheduleDependency,
|
||||
{
|
||||
fn as_file_context(&self) -> CheckFileContext<'scheduler> {
|
||||
CheckFileContext {
|
||||
dependency_scheduler: self.dependency_scheduler,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Context passed when checking a single file.
|
||||
///
|
||||
/// This is a trimmed down version of [`CheckTaskContext`] with the type parameter `S` erased
|
||||
/// to avoid monomorphization of [`Program:check_file`].
|
||||
struct CheckFileContext<'a> {
|
||||
dependency_scheduler: &'a dyn ScheduleDependency,
|
||||
}
|
||||
|
||||
impl<'a> CheckFileContext<'a> {
|
||||
fn schedule_dependency(&self, file_id: FileId) {
|
||||
self.dependency_scheduler.schedule(file_id);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum CheckFileTask {
|
||||
OpenFile(CheckOpenFileTask),
|
||||
Dependency(CheckDependencyTask),
|
||||
}
|
||||
|
||||
impl CheckFileTask {
|
||||
/// Runs the task and returns the results for checking this file.
|
||||
fn run<S>(&self, context: &CheckTaskContext<S>) -> QueryResult<Diagnostics>
|
||||
where
|
||||
S: ScheduleDependency,
|
||||
{
|
||||
match self {
|
||||
Self::OpenFile(task) => task.run(context),
|
||||
Self::Dependency(task) => task.run(context),
|
||||
}
|
||||
}
|
||||
|
||||
fn file_id(&self) -> FileId {
|
||||
match self {
|
||||
CheckFileTask::OpenFile(task) => task.file_id,
|
||||
CheckFileTask::Dependency(task) => task.file_id,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Task to check an open file.
|
||||
|
||||
#[derive(Debug)]
|
||||
struct CheckOpenFileTask {
|
||||
file_id: FileId,
|
||||
}
|
||||
|
||||
impl CheckOpenFileTask {
|
||||
fn run<S>(&self, context: &CheckTaskContext<S>) -> QueryResult<Diagnostics>
|
||||
where
|
||||
S: ScheduleDependency,
|
||||
{
|
||||
context
|
||||
.program
|
||||
.check_file(self.file_id, &context.as_file_context())
|
||||
}
|
||||
}
|
||||
|
||||
/// Task to check a dependency file.
|
||||
#[derive(Debug)]
|
||||
struct CheckDependencyTask {
|
||||
file_id: FileId,
|
||||
}
|
||||
|
||||
impl CheckDependencyTask {
|
||||
fn run<S>(&self, context: &CheckTaskContext<S>) -> QueryResult<Diagnostics>
|
||||
where
|
||||
S: ScheduleDependency,
|
||||
{
|
||||
context
|
||||
.program
|
||||
.check_file(self.file_id, &context.as_file_context())
|
||||
}
|
||||
}
|
||||
|
||||
/// Executor that schedules the checking of individual program files.
|
||||
trait CheckExecutor {
|
||||
fn run(self, context: &mut CheckContext) -> QueryResult<()>;
|
||||
}
|
||||
|
||||
/// Executor that runs all check operations on the current thread.
|
||||
///
|
||||
/// The executor does not schedule dependencies for checking.
|
||||
/// The main motivation for scheduling dependencies
|
||||
/// in a multithreaded environment is to parse and index the dependencies concurrently.
|
||||
/// However, that doesn't make sense in a single threaded environment, because the dependencies then compute
|
||||
/// with checking the open files. Checking dependencies in a single threaded environment is more likely
|
||||
/// to hurt performance because we end up analyzing files in their entirety, even if we only need to type check parts of them.
|
||||
#[derive(Debug, Default)]
|
||||
struct SingleThreadedExecutor;
|
||||
|
||||
impl CheckExecutor for SingleThreadedExecutor {
|
||||
fn run(self, context: &mut CheckContext) -> QueryResult<()> {
|
||||
let mut queue = context.check_open_files();
|
||||
|
||||
let noop_schedule_dependency = |_| {};
|
||||
|
||||
while let Some(file) = queue.pop() {
|
||||
context.program().cancelled()?;
|
||||
|
||||
let task_context = context.task_context(&noop_schedule_dependency);
|
||||
context.push_diagnostics(&file.run(&task_context)?);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Executor that runs the check operations on a thread pool.
|
||||
///
|
||||
/// The executor runs each check operation as its own task using a thread pool.
|
||||
///
|
||||
/// Other than [`SingleThreadedExecutor`], this executor schedules dependencies for checking. It
|
||||
/// even schedules dependencies for checking when the thread pool size is 1 for a better debugging experience.
|
||||
#[derive(Debug, Default)]
|
||||
struct ThreadPoolExecutor;
|
||||
|
||||
impl CheckExecutor for ThreadPoolExecutor {
|
||||
fn run(self, context: &mut CheckContext) -> QueryResult<()> {
|
||||
let num_threads = current_num_threads();
|
||||
let single_threaded = num_threads == 1;
|
||||
let span = tracing::trace_span!("ThreadPoolExecutor::run", num_threads);
|
||||
let _ = span.enter();
|
||||
|
||||
let mut queue: Vec<_> = context
|
||||
.check_open_files()
|
||||
.into_iter()
|
||||
.map(CheckFileTask::OpenFile)
|
||||
.collect();
|
||||
|
||||
let (sender, receiver) = if single_threaded {
|
||||
// Use an unbounded queue for single threaded execution to prevent deadlocks
|
||||
// when a single file schedules multiple dependencies.
|
||||
crossbeam::channel::unbounded()
|
||||
} else {
|
||||
// Use a bounded queue to apply backpressure when the orchestration thread isn't able to keep
|
||||
// up processing messages from the worker threads.
|
||||
crossbeam::channel::bounded(num_threads)
|
||||
};
|
||||
|
||||
let schedule_sender = sender.clone();
|
||||
let schedule_dependency = move |file_id| {
|
||||
schedule_sender
|
||||
.send(ThreadPoolMessage::ScheduleDependency(file_id))
|
||||
.unwrap();
|
||||
};
|
||||
|
||||
let result = rayon::in_place_scope(|scope| {
|
||||
let mut pending = 0usize;
|
||||
|
||||
loop {
|
||||
context.program().cancelled()?;
|
||||
|
||||
// 1. Try to get a queued message to ensure that we have always remaining space in the channel to prevent blocking the worker threads.
|
||||
// 2. Try to process a queued file
|
||||
// 3. If there's no queued file wait for the next incoming message.
|
||||
// 4. Exit if there are no more messages and no senders.
|
||||
let message = if let Ok(message) = receiver.try_recv() {
|
||||
message
|
||||
} else if let Some(task) = queue.pop() {
|
||||
pending += 1;
|
||||
|
||||
let task_context = context.task_context(&schedule_dependency);
|
||||
let sender = sender.clone();
|
||||
let task_span = tracing::trace_span!(
|
||||
parent: &span,
|
||||
"CheckFileTask::run",
|
||||
file_id = task.file_id().as_u32(),
|
||||
);
|
||||
|
||||
scope.spawn(move |_| {
|
||||
task_span.in_scope(|| match task.run(&task_context) {
|
||||
Ok(result) => {
|
||||
sender.send(ThreadPoolMessage::Completed(result)).unwrap();
|
||||
}
|
||||
Err(err) => sender.send(ThreadPoolMessage::Errored(err)).unwrap(),
|
||||
});
|
||||
});
|
||||
|
||||
// If this is a single threaded rayon thread pool, yield the current thread
|
||||
// or we never start processing the work items.
|
||||
if single_threaded {
|
||||
yield_local();
|
||||
}
|
||||
|
||||
continue;
|
||||
} else if let Ok(message) = receiver.recv() {
|
||||
message
|
||||
} else {
|
||||
break;
|
||||
};
|
||||
|
||||
match message {
|
||||
ThreadPoolMessage::ScheduleDependency(dependency) => {
|
||||
if let Some(task) = context.check_dependency(dependency) {
|
||||
queue.push(CheckFileTask::Dependency(task));
|
||||
}
|
||||
}
|
||||
ThreadPoolMessage::Completed(diagnostics) => {
|
||||
context.push_diagnostics(&diagnostics);
|
||||
pending -= 1;
|
||||
|
||||
if pending == 0 && queue.is_empty() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
ThreadPoolMessage::Errored(err) => {
|
||||
return Err(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
});
|
||||
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum ThreadPoolMessage {
|
||||
ScheduleDependency(FileId),
|
||||
Completed(Diagnostics),
|
||||
Errored(QueryError),
|
||||
}
|
||||
@@ -1,275 +0,0 @@
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::db::{
|
||||
Database, Db, DbRuntime, DbWithJar, HasJar, HasJars, JarsStorage, LintDb, LintJar,
|
||||
ParallelDatabase, QueryResult, SemanticDb, SemanticJar, Snapshot, SourceDb, SourceJar, Upcast,
|
||||
};
|
||||
use crate::files::{FileId, Files};
|
||||
use crate::Workspace;
|
||||
|
||||
pub mod check;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Program {
|
||||
jars: JarsStorage<Program>,
|
||||
files: Files,
|
||||
workspace: Workspace,
|
||||
}
|
||||
|
||||
impl Program {
|
||||
pub fn new(workspace: Workspace) -> Self {
|
||||
Self {
|
||||
jars: JarsStorage::default(),
|
||||
files: Files::default(),
|
||||
workspace,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn apply_changes<I>(&mut self, changes: I)
|
||||
where
|
||||
I: IntoIterator<Item = FileWatcherChange>,
|
||||
{
|
||||
let mut aggregated_changes = AggregatedChanges::default();
|
||||
|
||||
aggregated_changes.extend(changes.into_iter().map(|change| FileChange {
|
||||
id: self.files.intern(&change.path),
|
||||
kind: change.kind,
|
||||
}));
|
||||
|
||||
let (source, semantic, lint) = self.jars_mut();
|
||||
for change in aggregated_changes.iter() {
|
||||
semantic.module_resolver.remove_module(change.id);
|
||||
semantic.symbol_tables.remove(&change.id);
|
||||
source.sources.remove(&change.id);
|
||||
source.parsed.remove(&change.id);
|
||||
// TODO: remove all dependent modules as well
|
||||
semantic.type_store.remove_module(change.id);
|
||||
lint.lint_syntax.remove(&change.id);
|
||||
lint.lint_semantic.remove(&change.id);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn files(&self) -> &Files {
|
||||
&self.files
|
||||
}
|
||||
|
||||
pub fn workspace(&self) -> &Workspace {
|
||||
&self.workspace
|
||||
}
|
||||
|
||||
pub fn workspace_mut(&mut self) -> &mut Workspace {
|
||||
&mut self.workspace
|
||||
}
|
||||
}
|
||||
|
||||
impl SourceDb for Program {
|
||||
fn file_id(&self, path: &Path) -> FileId {
|
||||
self.files.intern(path)
|
||||
}
|
||||
|
||||
fn file_path(&self, file_id: FileId) -> Arc<Path> {
|
||||
self.files.path(file_id)
|
||||
}
|
||||
}
|
||||
|
||||
impl DbWithJar<SourceJar> for Program {}
|
||||
|
||||
impl SemanticDb for Program {}
|
||||
|
||||
impl DbWithJar<SemanticJar> for Program {}
|
||||
|
||||
impl LintDb for Program {}
|
||||
|
||||
impl DbWithJar<LintJar> for Program {}
|
||||
|
||||
impl Upcast<dyn SemanticDb> for Program {
|
||||
fn upcast(&self) -> &(dyn SemanticDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SourceDb> for Program {
|
||||
fn upcast(&self) -> &(dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn LintDb> for Program {
|
||||
fn upcast(&self) -> &(dyn LintDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Db for Program {}
|
||||
|
||||
impl Database for Program {
|
||||
fn runtime(&self) -> &DbRuntime {
|
||||
self.jars.runtime()
|
||||
}
|
||||
|
||||
fn runtime_mut(&mut self) -> &mut DbRuntime {
|
||||
self.jars.runtime_mut()
|
||||
}
|
||||
}
|
||||
|
||||
impl ParallelDatabase for Program {
|
||||
fn snapshot(&self) -> Snapshot<Self> {
|
||||
Snapshot::new(Self {
|
||||
jars: self.jars.snapshot(),
|
||||
files: self.files.snapshot(),
|
||||
workspace: self.workspace.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJars for Program {
|
||||
type Jars = (SourceJar, SemanticJar, LintJar);
|
||||
|
||||
fn jars(&self) -> QueryResult<&Self::Jars> {
|
||||
self.jars.jars()
|
||||
}
|
||||
|
||||
fn jars_mut(&mut self) -> &mut Self::Jars {
|
||||
self.jars.jars_mut()
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJar<SourceJar> for Program {
|
||||
fn jar(&self) -> QueryResult<&SourceJar> {
|
||||
Ok(&self.jars()?.0)
|
||||
}
|
||||
|
||||
fn jar_mut(&mut self) -> &mut SourceJar {
|
||||
&mut self.jars_mut().0
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJar<SemanticJar> for Program {
|
||||
fn jar(&self) -> QueryResult<&SemanticJar> {
|
||||
Ok(&self.jars()?.1)
|
||||
}
|
||||
|
||||
fn jar_mut(&mut self) -> &mut SemanticJar {
|
||||
&mut self.jars_mut().1
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJar<LintJar> for Program {
|
||||
fn jar(&self) -> QueryResult<&LintJar> {
|
||||
Ok(&self.jars()?.2)
|
||||
}
|
||||
|
||||
fn jar_mut(&mut self) -> &mut LintJar {
|
||||
&mut self.jars_mut().2
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct FileWatcherChange {
|
||||
path: PathBuf,
|
||||
kind: FileChangeKind,
|
||||
}
|
||||
|
||||
impl FileWatcherChange {
|
||||
pub fn new(path: PathBuf, kind: FileChangeKind) -> Self {
|
||||
Self { path, kind }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
struct FileChange {
|
||||
id: FileId,
|
||||
kind: FileChangeKind,
|
||||
}
|
||||
|
||||
impl FileChange {
|
||||
fn file_id(self) -> FileId {
|
||||
self.id
|
||||
}
|
||||
|
||||
fn kind(self) -> FileChangeKind {
|
||||
self.kind
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
pub enum FileChangeKind {
|
||||
Created,
|
||||
Modified,
|
||||
Deleted,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
struct AggregatedChanges {
|
||||
changes: FxHashMap<FileId, FileChangeKind>,
|
||||
}
|
||||
|
||||
impl AggregatedChanges {
|
||||
fn add(&mut self, change: FileChange) {
|
||||
match self.changes.entry(change.file_id()) {
|
||||
Entry::Occupied(mut entry) => {
|
||||
let merged = entry.get_mut();
|
||||
|
||||
match (merged, change.kind()) {
|
||||
(FileChangeKind::Created, FileChangeKind::Deleted) => {
|
||||
// Deletion after creations means that ruff never saw the file.
|
||||
entry.remove();
|
||||
}
|
||||
(FileChangeKind::Created, FileChangeKind::Modified) => {
|
||||
// No-op, for ruff, modifying a file that it doesn't yet know that it exists is still considered a creation.
|
||||
}
|
||||
|
||||
(FileChangeKind::Modified, FileChangeKind::Created) => {
|
||||
// Uhh, that should probably not happen. Continue considering it a modification.
|
||||
}
|
||||
|
||||
(FileChangeKind::Modified, FileChangeKind::Deleted) => {
|
||||
*entry.get_mut() = FileChangeKind::Deleted;
|
||||
}
|
||||
|
||||
(FileChangeKind::Deleted, FileChangeKind::Created) => {
|
||||
*entry.get_mut() = FileChangeKind::Modified;
|
||||
}
|
||||
|
||||
(FileChangeKind::Deleted, FileChangeKind::Modified) => {
|
||||
// That's weird, but let's consider it a modification.
|
||||
*entry.get_mut() = FileChangeKind::Modified;
|
||||
}
|
||||
|
||||
(FileChangeKind::Created, FileChangeKind::Created)
|
||||
| (FileChangeKind::Modified, FileChangeKind::Modified)
|
||||
| (FileChangeKind::Deleted, FileChangeKind::Deleted) => {
|
||||
// No-op transitions. Some of them should be impossible but we handle them anyway.
|
||||
}
|
||||
}
|
||||
}
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(change.kind());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn extend<I>(&mut self, changes: I)
|
||||
where
|
||||
I: IntoIterator<Item = FileChange>,
|
||||
{
|
||||
let iter = changes.into_iter();
|
||||
let (lower, _) = iter.size_hint();
|
||||
self.changes.reserve(lower);
|
||||
|
||||
for change in iter {
|
||||
self.add(change);
|
||||
}
|
||||
}
|
||||
|
||||
fn iter(&self) -> impl Iterator<Item = FileChange> + '_ {
|
||||
self.changes.iter().map(|(id, kind)| FileChange {
|
||||
id: *id,
|
||||
kind: *kind,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,95 +0,0 @@
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::sync::Arc;
|
||||
|
||||
use ruff_notebook::Notebook;
|
||||
use ruff_python_ast::PySourceType;
|
||||
|
||||
use crate::cache::KeyValueCache;
|
||||
use crate::db::{QueryResult, SourceDb};
|
||||
use crate::files::FileId;
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub(crate) fn source_text(db: &dyn SourceDb, file_id: FileId) -> QueryResult<Source> {
|
||||
let jar = db.jar()?;
|
||||
let sources = &jar.sources;
|
||||
|
||||
sources.get(&file_id, |file_id| {
|
||||
let path = db.file_path(*file_id);
|
||||
|
||||
let source_text = std::fs::read_to_string(&path).unwrap_or_else(|err| {
|
||||
tracing::error!("Failed to read file '{path:?}: {err}'. Falling back to empty text");
|
||||
String::new()
|
||||
});
|
||||
|
||||
let python_ty = PySourceType::from(&path);
|
||||
|
||||
let kind = match python_ty {
|
||||
PySourceType::Python => {
|
||||
SourceKind::Python(Arc::from(source_text))
|
||||
}
|
||||
PySourceType::Stub => SourceKind::Stub(Arc::from(source_text)),
|
||||
PySourceType::Ipynb => {
|
||||
let notebook = Notebook::from_source_code(&source_text).unwrap_or_else(|err| {
|
||||
// TODO should this be changed to never fail?
|
||||
// or should we instead add a diagnostic somewhere? But what would we return in this case?
|
||||
tracing::error!(
|
||||
"Failed to parse notebook '{path:?}: {err}'. Falling back to an empty notebook"
|
||||
);
|
||||
Notebook::from_source_code("").unwrap()
|
||||
});
|
||||
|
||||
SourceKind::IpyNotebook(Arc::new(notebook))
|
||||
}
|
||||
};
|
||||
|
||||
Ok(Source { kind })
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum SourceKind {
|
||||
Python(Arc<str>),
|
||||
Stub(Arc<str>),
|
||||
IpyNotebook(Arc<Notebook>),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct Source {
|
||||
kind: SourceKind,
|
||||
}
|
||||
|
||||
impl Source {
|
||||
pub fn python<T: Into<Arc<str>>>(source: T) -> Self {
|
||||
Self {
|
||||
kind: SourceKind::Python(source.into()),
|
||||
}
|
||||
}
|
||||
pub fn kind(&self) -> &SourceKind {
|
||||
&self.kind
|
||||
}
|
||||
|
||||
pub fn text(&self) -> &str {
|
||||
match &self.kind {
|
||||
SourceKind::Python(text) => text,
|
||||
SourceKind::Stub(text) => text,
|
||||
SourceKind::IpyNotebook(notebook) => notebook.source_code(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SourceStorage(pub(crate) KeyValueCache<FileId, Source>);
|
||||
|
||||
impl Deref for SourceStorage {
|
||||
type Target = KeyValueCache<FileId, Source>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for SourceStorage {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,745 +0,0 @@
|
||||
#![allow(dead_code)]
|
||||
use crate::ast_ids::NodeKey;
|
||||
use crate::db::{QueryResult, SemanticDb, SemanticJar};
|
||||
use crate::files::FileId;
|
||||
use crate::symbols::{symbol_table, GlobalSymbolId, ScopeId, ScopeKind, SymbolId};
|
||||
use crate::{FxDashMap, FxIndexSet, Name};
|
||||
use ruff_index::{newtype_index, IndexVec};
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
pub(crate) mod infer;
|
||||
|
||||
pub(crate) use infer::{infer_definition_type, infer_symbol_type};
|
||||
|
||||
/// unique ID for a type
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
pub enum Type {
|
||||
/// the dynamic or gradual type: a statically-unknown set of values
|
||||
Any,
|
||||
/// the empty set of values
|
||||
Never,
|
||||
/// unknown type (no annotation)
|
||||
/// equivalent to Any, or to object in strict mode
|
||||
Unknown,
|
||||
/// name is not bound to any value
|
||||
Unbound,
|
||||
/// a specific function object
|
||||
Function(FunctionTypeId),
|
||||
/// a specific class object
|
||||
Class(ClassTypeId),
|
||||
/// the set of Python objects with the given class in their __class__'s method resolution order
|
||||
Instance(ClassTypeId),
|
||||
Union(UnionTypeId),
|
||||
Intersection(IntersectionTypeId),
|
||||
// TODO protocols, callable types, overloads, generics, type vars
|
||||
}
|
||||
|
||||
impl Type {
|
||||
fn display<'a>(&'a self, store: &'a TypeStore) -> DisplayType<'a> {
|
||||
DisplayType { ty: self, store }
|
||||
}
|
||||
|
||||
pub const fn is_unbound(&self) -> bool {
|
||||
matches!(self, Type::Unbound)
|
||||
}
|
||||
|
||||
pub const fn is_unknown(&self) -> bool {
|
||||
matches!(self, Type::Unknown)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<FunctionTypeId> for Type {
|
||||
fn from(id: FunctionTypeId) -> Self {
|
||||
Type::Function(id)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<UnionTypeId> for Type {
|
||||
fn from(id: UnionTypeId) -> Self {
|
||||
Type::Union(id)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<IntersectionTypeId> for Type {
|
||||
fn from(id: IntersectionTypeId) -> Self {
|
||||
Type::Intersection(id)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: currently calling `get_function` et al and holding on to the `FunctionTypeRef` will lock a
|
||||
// shard of this dashmap, for as long as you hold the reference. This may be a problem. We could
|
||||
// switch to having all the arenas hold Arc, or we could see if we can split up ModuleTypeStore,
|
||||
// and/or give it inner mutability and finer-grained internal locking.
|
||||
#[derive(Debug, Default)]
|
||||
pub struct TypeStore {
|
||||
modules: FxDashMap<FileId, ModuleTypeStore>,
|
||||
}
|
||||
|
||||
impl TypeStore {
|
||||
pub fn remove_module(&mut self, file_id: FileId) {
|
||||
self.modules.remove(&file_id);
|
||||
}
|
||||
|
||||
pub fn cache_symbol_type(&self, symbol: GlobalSymbolId, ty: Type) {
|
||||
self.add_or_get_module(symbol.file_id)
|
||||
.symbol_types
|
||||
.insert(symbol.symbol_id, ty);
|
||||
}
|
||||
|
||||
pub fn cache_node_type(&self, file_id: FileId, node_key: NodeKey, ty: Type) {
|
||||
self.add_or_get_module(file_id)
|
||||
.node_types
|
||||
.insert(node_key, ty);
|
||||
}
|
||||
|
||||
pub fn get_cached_symbol_type(&self, symbol: GlobalSymbolId) -> Option<Type> {
|
||||
self.try_get_module(symbol.file_id)?
|
||||
.symbol_types
|
||||
.get(&symbol.symbol_id)
|
||||
.copied()
|
||||
}
|
||||
|
||||
pub fn get_cached_node_type(&self, file_id: FileId, node_key: &NodeKey) -> Option<Type> {
|
||||
self.try_get_module(file_id)?
|
||||
.node_types
|
||||
.get(node_key)
|
||||
.copied()
|
||||
}
|
||||
|
||||
fn add_or_get_module(&self, file_id: FileId) -> ModuleStoreRefMut {
|
||||
self.modules
|
||||
.entry(file_id)
|
||||
.or_insert_with(|| ModuleTypeStore::new(file_id))
|
||||
}
|
||||
|
||||
fn get_module(&self, file_id: FileId) -> ModuleStoreRef {
|
||||
self.try_get_module(file_id).expect("module should exist")
|
||||
}
|
||||
|
||||
fn try_get_module(&self, file_id: FileId) -> Option<ModuleStoreRef> {
|
||||
self.modules.get(&file_id)
|
||||
}
|
||||
|
||||
fn add_function(
|
||||
&self,
|
||||
file_id: FileId,
|
||||
name: &str,
|
||||
symbol_id: SymbolId,
|
||||
scope_id: ScopeId,
|
||||
decorators: Vec<Type>,
|
||||
) -> FunctionTypeId {
|
||||
self.add_or_get_module(file_id)
|
||||
.add_function(name, symbol_id, scope_id, decorators)
|
||||
}
|
||||
|
||||
fn add_class(
|
||||
&self,
|
||||
file_id: FileId,
|
||||
name: &str,
|
||||
scope_id: ScopeId,
|
||||
bases: Vec<Type>,
|
||||
) -> ClassTypeId {
|
||||
self.add_or_get_module(file_id)
|
||||
.add_class(name, scope_id, bases)
|
||||
}
|
||||
|
||||
fn add_union(&mut self, file_id: FileId, elems: &[Type]) -> UnionTypeId {
|
||||
self.add_or_get_module(file_id).add_union(elems)
|
||||
}
|
||||
|
||||
fn add_intersection(
|
||||
&mut self,
|
||||
file_id: FileId,
|
||||
positive: &[Type],
|
||||
negative: &[Type],
|
||||
) -> IntersectionTypeId {
|
||||
self.add_or_get_module(file_id)
|
||||
.add_intersection(positive, negative)
|
||||
}
|
||||
|
||||
fn get_function(&self, id: FunctionTypeId) -> FunctionTypeRef {
|
||||
FunctionTypeRef {
|
||||
module_store: self.get_module(id.file_id),
|
||||
function_id: id.func_id,
|
||||
}
|
||||
}
|
||||
|
||||
fn get_class(&self, id: ClassTypeId) -> ClassTypeRef {
|
||||
ClassTypeRef {
|
||||
module_store: self.get_module(id.file_id),
|
||||
class_id: id.class_id,
|
||||
}
|
||||
}
|
||||
|
||||
fn get_union(&self, id: UnionTypeId) -> UnionTypeRef {
|
||||
UnionTypeRef {
|
||||
module_store: self.get_module(id.file_id),
|
||||
union_id: id.union_id,
|
||||
}
|
||||
}
|
||||
|
||||
fn get_intersection(&self, id: IntersectionTypeId) -> IntersectionTypeRef {
|
||||
IntersectionTypeRef {
|
||||
module_store: self.get_module(id.file_id),
|
||||
intersection_id: id.intersection_id,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type ModuleStoreRef<'a> = dashmap::mapref::one::Ref<
|
||||
'a,
|
||||
FileId,
|
||||
ModuleTypeStore,
|
||||
std::hash::BuildHasherDefault<rustc_hash::FxHasher>,
|
||||
>;
|
||||
|
||||
type ModuleStoreRefMut<'a> = dashmap::mapref::one::RefMut<
|
||||
'a,
|
||||
FileId,
|
||||
ModuleTypeStore,
|
||||
std::hash::BuildHasherDefault<rustc_hash::FxHasher>,
|
||||
>;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct FunctionTypeRef<'a> {
|
||||
module_store: ModuleStoreRef<'a>,
|
||||
function_id: ModuleFunctionTypeId,
|
||||
}
|
||||
|
||||
impl<'a> std::ops::Deref for FunctionTypeRef<'a> {
|
||||
type Target = FunctionType;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.module_store.get_function(self.function_id)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct ClassTypeRef<'a> {
|
||||
module_store: ModuleStoreRef<'a>,
|
||||
class_id: ModuleClassTypeId,
|
||||
}
|
||||
|
||||
impl<'a> std::ops::Deref for ClassTypeRef<'a> {
|
||||
type Target = ClassType;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.module_store.get_class(self.class_id)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct UnionTypeRef<'a> {
|
||||
module_store: ModuleStoreRef<'a>,
|
||||
union_id: ModuleUnionTypeId,
|
||||
}
|
||||
|
||||
impl<'a> std::ops::Deref for UnionTypeRef<'a> {
|
||||
type Target = UnionType;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.module_store.get_union(self.union_id)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct IntersectionTypeRef<'a> {
|
||||
module_store: ModuleStoreRef<'a>,
|
||||
intersection_id: ModuleIntersectionTypeId,
|
||||
}
|
||||
|
||||
impl<'a> std::ops::Deref for IntersectionTypeRef<'a> {
|
||||
type Target = IntersectionType;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.module_store.get_intersection(self.intersection_id)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
|
||||
pub struct FunctionTypeId {
|
||||
file_id: FileId,
|
||||
func_id: ModuleFunctionTypeId,
|
||||
}
|
||||
|
||||
impl FunctionTypeId {
|
||||
fn function(self, db: &dyn SemanticDb) -> QueryResult<FunctionTypeRef> {
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
Ok(jar.type_store.get_function(self))
|
||||
}
|
||||
|
||||
pub(crate) fn name(self, db: &dyn SemanticDb) -> QueryResult<Name> {
|
||||
Ok(self.function(db)?.name().into())
|
||||
}
|
||||
|
||||
pub(crate) fn global_symbol(self, db: &dyn SemanticDb) -> QueryResult<GlobalSymbolId> {
|
||||
Ok(GlobalSymbolId {
|
||||
file_id: self.file(),
|
||||
symbol_id: self.symbol(db)?,
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn file(self) -> FileId {
|
||||
self.file_id
|
||||
}
|
||||
|
||||
pub(crate) fn symbol(self, db: &dyn SemanticDb) -> QueryResult<SymbolId> {
|
||||
let FunctionType { symbol_id, .. } = *self.function(db)?;
|
||||
Ok(symbol_id)
|
||||
}
|
||||
|
||||
pub(crate) fn get_containing_class(
|
||||
self,
|
||||
db: &dyn SemanticDb,
|
||||
) -> QueryResult<Option<ClassTypeId>> {
|
||||
let table = symbol_table(db, self.file_id)?;
|
||||
let FunctionType { symbol_id, .. } = *self.function(db)?;
|
||||
let scope_id = symbol_id.symbol(&table).scope_id();
|
||||
let scope = scope_id.scope(&table);
|
||||
if !matches!(scope.kind(), ScopeKind::Class) {
|
||||
return Ok(None);
|
||||
};
|
||||
let Some(def) = scope.definition() else {
|
||||
return Ok(None);
|
||||
};
|
||||
let Some(symbol_id) = scope.defining_symbol() else {
|
||||
return Ok(None);
|
||||
};
|
||||
let Type::Class(class) = infer_definition_type(
|
||||
db,
|
||||
GlobalSymbolId {
|
||||
file_id: self.file_id,
|
||||
symbol_id,
|
||||
},
|
||||
def,
|
||||
)?
|
||||
else {
|
||||
return Ok(None);
|
||||
};
|
||||
Ok(Some(class))
|
||||
}
|
||||
|
||||
pub(crate) fn has_decorator(
|
||||
self,
|
||||
db: &dyn SemanticDb,
|
||||
decorator_symbol: GlobalSymbolId,
|
||||
) -> QueryResult<bool> {
|
||||
for deco_ty in self.function(db)?.decorators() {
|
||||
let Type::Function(deco_func) = deco_ty else {
|
||||
continue;
|
||||
};
|
||||
if deco_func.global_symbol(db)? == decorator_symbol {
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
Ok(false)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
|
||||
pub struct ClassTypeId {
|
||||
file_id: FileId,
|
||||
class_id: ModuleClassTypeId,
|
||||
}
|
||||
|
||||
impl ClassTypeId {
|
||||
fn class(self, db: &dyn SemanticDb) -> QueryResult<ClassTypeRef> {
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
Ok(jar.type_store.get_class(self))
|
||||
}
|
||||
|
||||
pub(crate) fn name(self, db: &dyn SemanticDb) -> QueryResult<Name> {
|
||||
Ok(self.class(db)?.name().into())
|
||||
}
|
||||
|
||||
pub(crate) fn get_super_class_member(
|
||||
self,
|
||||
db: &dyn SemanticDb,
|
||||
name: &Name,
|
||||
) -> QueryResult<Option<Type>> {
|
||||
// TODO we should linearize the MRO instead of doing this recursively
|
||||
let class = self.class(db)?;
|
||||
for base in class.bases() {
|
||||
if let Type::Class(base) = base {
|
||||
if let Some(own_member) = base.get_own_class_member(db, name)? {
|
||||
return Ok(Some(own_member));
|
||||
}
|
||||
if let Some(base_member) = base.get_super_class_member(db, name)? {
|
||||
return Ok(Some(base_member));
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
fn get_own_class_member(self, db: &dyn SemanticDb, name: &Name) -> QueryResult<Option<Type>> {
|
||||
// TODO: this should distinguish instance-only members (e.g. `x: int`) and not return them
|
||||
let ClassType { scope_id, .. } = *self.class(db)?;
|
||||
let table = symbol_table(db, self.file_id)?;
|
||||
if let Some(symbol_id) = table.symbol_id_by_name(scope_id, name) {
|
||||
Ok(Some(infer_symbol_type(
|
||||
db,
|
||||
GlobalSymbolId {
|
||||
file_id: self.file_id,
|
||||
symbol_id,
|
||||
},
|
||||
)?))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: get_own_instance_member, get_class_member, get_instance_member
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
|
||||
pub struct UnionTypeId {
|
||||
file_id: FileId,
|
||||
union_id: ModuleUnionTypeId,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
|
||||
pub struct IntersectionTypeId {
|
||||
file_id: FileId,
|
||||
intersection_id: ModuleIntersectionTypeId,
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
struct ModuleFunctionTypeId;
|
||||
|
||||
#[newtype_index]
|
||||
struct ModuleClassTypeId;
|
||||
|
||||
#[newtype_index]
|
||||
struct ModuleUnionTypeId;
|
||||
|
||||
#[newtype_index]
|
||||
struct ModuleIntersectionTypeId;
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ModuleTypeStore {
|
||||
file_id: FileId,
|
||||
/// arena of all function types defined in this module
|
||||
functions: IndexVec<ModuleFunctionTypeId, FunctionType>,
|
||||
/// arena of all class types defined in this module
|
||||
classes: IndexVec<ModuleClassTypeId, ClassType>,
|
||||
/// arenda of all union types created in this module
|
||||
unions: IndexVec<ModuleUnionTypeId, UnionType>,
|
||||
/// arena of all intersection types created in this module
|
||||
intersections: IndexVec<ModuleIntersectionTypeId, IntersectionType>,
|
||||
/// cached types of symbols in this module
|
||||
symbol_types: FxHashMap<SymbolId, Type>,
|
||||
/// cached types of AST nodes in this module
|
||||
node_types: FxHashMap<NodeKey, Type>,
|
||||
}
|
||||
|
||||
impl ModuleTypeStore {
|
||||
fn new(file_id: FileId) -> Self {
|
||||
Self {
|
||||
file_id,
|
||||
functions: IndexVec::default(),
|
||||
classes: IndexVec::default(),
|
||||
unions: IndexVec::default(),
|
||||
intersections: IndexVec::default(),
|
||||
symbol_types: FxHashMap::default(),
|
||||
node_types: FxHashMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
fn add_function(
|
||||
&mut self,
|
||||
name: &str,
|
||||
symbol_id: SymbolId,
|
||||
scope_id: ScopeId,
|
||||
decorators: Vec<Type>,
|
||||
) -> FunctionTypeId {
|
||||
let func_id = self.functions.push(FunctionType {
|
||||
name: Name::new(name),
|
||||
symbol_id,
|
||||
scope_id,
|
||||
decorators,
|
||||
});
|
||||
FunctionTypeId {
|
||||
file_id: self.file_id,
|
||||
func_id,
|
||||
}
|
||||
}
|
||||
|
||||
fn add_class(&mut self, name: &str, scope_id: ScopeId, bases: Vec<Type>) -> ClassTypeId {
|
||||
let class_id = self.classes.push(ClassType {
|
||||
name: Name::new(name),
|
||||
scope_id,
|
||||
// TODO: if no bases are given, that should imply [object]
|
||||
bases,
|
||||
});
|
||||
ClassTypeId {
|
||||
file_id: self.file_id,
|
||||
class_id,
|
||||
}
|
||||
}
|
||||
|
||||
fn add_union(&mut self, elems: &[Type]) -> UnionTypeId {
|
||||
let union_id = self.unions.push(UnionType {
|
||||
elements: elems.iter().copied().collect(),
|
||||
});
|
||||
UnionTypeId {
|
||||
file_id: self.file_id,
|
||||
union_id,
|
||||
}
|
||||
}
|
||||
|
||||
fn add_intersection(&mut self, positive: &[Type], negative: &[Type]) -> IntersectionTypeId {
|
||||
let intersection_id = self.intersections.push(IntersectionType {
|
||||
positive: positive.iter().copied().collect(),
|
||||
negative: negative.iter().copied().collect(),
|
||||
});
|
||||
IntersectionTypeId {
|
||||
file_id: self.file_id,
|
||||
intersection_id,
|
||||
}
|
||||
}
|
||||
|
||||
fn get_function(&self, func_id: ModuleFunctionTypeId) -> &FunctionType {
|
||||
&self.functions[func_id]
|
||||
}
|
||||
|
||||
fn get_class(&self, class_id: ModuleClassTypeId) -> &ClassType {
|
||||
&self.classes[class_id]
|
||||
}
|
||||
|
||||
fn get_union(&self, union_id: ModuleUnionTypeId) -> &UnionType {
|
||||
&self.unions[union_id]
|
||||
}
|
||||
|
||||
fn get_intersection(&self, intersection_id: ModuleIntersectionTypeId) -> &IntersectionType {
|
||||
&self.intersections[intersection_id]
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
struct DisplayType<'a> {
|
||||
ty: &'a Type,
|
||||
store: &'a TypeStore,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for DisplayType<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self.ty {
|
||||
Type::Any => f.write_str("Any"),
|
||||
Type::Never => f.write_str("Never"),
|
||||
Type::Unknown => f.write_str("Unknown"),
|
||||
Type::Unbound => f.write_str("Unbound"),
|
||||
// TODO functions and classes should display using a fully qualified name
|
||||
Type::Class(class_id) => {
|
||||
f.write_str("Literal[")?;
|
||||
f.write_str(self.store.get_class(*class_id).name())?;
|
||||
f.write_str("]")
|
||||
}
|
||||
Type::Instance(class_id) => f.write_str(self.store.get_class(*class_id).name()),
|
||||
Type::Function(func_id) => f.write_str(self.store.get_function(*func_id).name()),
|
||||
Type::Union(union_id) => self
|
||||
.store
|
||||
.get_module(union_id.file_id)
|
||||
.get_union(union_id.union_id)
|
||||
.display(f, self.store),
|
||||
Type::Intersection(int_id) => self
|
||||
.store
|
||||
.get_module(int_id.file_id)
|
||||
.get_intersection(int_id.intersection_id)
|
||||
.display(f, self.store),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct ClassType {
|
||||
/// Name of the class at definition
|
||||
name: Name,
|
||||
/// `ScopeId` of the class body
|
||||
scope_id: ScopeId,
|
||||
/// Types of all class bases
|
||||
bases: Vec<Type>,
|
||||
}
|
||||
|
||||
impl ClassType {
|
||||
fn name(&self) -> &str {
|
||||
self.name.as_str()
|
||||
}
|
||||
|
||||
fn bases(&self) -> &[Type] {
|
||||
self.bases.as_slice()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct FunctionType {
|
||||
/// name of the function at definition
|
||||
name: Name,
|
||||
/// symbol which this function is a definition of
|
||||
symbol_id: SymbolId,
|
||||
/// scope of this function's body
|
||||
scope_id: ScopeId,
|
||||
/// types of all decorators on this function
|
||||
decorators: Vec<Type>,
|
||||
}
|
||||
|
||||
impl FunctionType {
|
||||
fn name(&self) -> &str {
|
||||
self.name.as_str()
|
||||
}
|
||||
|
||||
fn scope_id(&self) -> ScopeId {
|
||||
self.scope_id
|
||||
}
|
||||
|
||||
pub(crate) fn decorators(&self) -> &[Type] {
|
||||
self.decorators.as_slice()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct UnionType {
|
||||
// the union type includes values in any of these types
|
||||
elements: FxIndexSet<Type>,
|
||||
}
|
||||
|
||||
impl UnionType {
|
||||
fn display(&self, f: &mut std::fmt::Formatter<'_>, store: &TypeStore) -> std::fmt::Result {
|
||||
f.write_str("(")?;
|
||||
let mut first = true;
|
||||
for ty in &self.elements {
|
||||
if !first {
|
||||
f.write_str(" | ")?;
|
||||
};
|
||||
first = false;
|
||||
write!(f, "{}", ty.display(store))?;
|
||||
}
|
||||
f.write_str(")")
|
||||
}
|
||||
}
|
||||
|
||||
// Negation types aren't expressible in annotations, and are most likely to arise from type
|
||||
// narrowing along with intersections (e.g. `if not isinstance(...)`), so we represent them
|
||||
// directly in intersections rather than as a separate type. This sacrifices some efficiency in the
|
||||
// case where a Not appears outside an intersection (unclear when that could even happen, but we'd
|
||||
// have to represent it as a single-element intersection if it did) in exchange for better
|
||||
// efficiency in the within-intersection case.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct IntersectionType {
|
||||
// the intersection type includes only values in all of these types
|
||||
positive: FxIndexSet<Type>,
|
||||
// the intersection type does not include any value in any of these types
|
||||
negative: FxIndexSet<Type>,
|
||||
}
|
||||
|
||||
impl IntersectionType {
|
||||
fn display(&self, f: &mut std::fmt::Formatter<'_>, store: &TypeStore) -> std::fmt::Result {
|
||||
f.write_str("(")?;
|
||||
let mut first = true;
|
||||
for (neg, ty) in self
|
||||
.positive
|
||||
.iter()
|
||||
.map(|ty| (false, ty))
|
||||
.chain(self.negative.iter().map(|ty| (true, ty)))
|
||||
{
|
||||
if !first {
|
||||
f.write_str(" & ")?;
|
||||
};
|
||||
first = false;
|
||||
if neg {
|
||||
f.write_str("~")?;
|
||||
};
|
||||
write!(f, "{}", ty.display(store))?;
|
||||
}
|
||||
f.write_str(")")
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::path::Path;
|
||||
|
||||
use crate::files::Files;
|
||||
use crate::symbols::{SymbolFlags, SymbolTable};
|
||||
use crate::types::{Type, TypeStore};
|
||||
use crate::FxIndexSet;
|
||||
|
||||
#[test]
|
||||
fn add_class() {
|
||||
let store = TypeStore::default();
|
||||
let files = Files::default();
|
||||
let file_id = files.intern(Path::new("/foo"));
|
||||
let id = store.add_class(file_id, "C", SymbolTable::root_scope_id(), Vec::new());
|
||||
assert_eq!(store.get_class(id).name(), "C");
|
||||
let inst = Type::Instance(id);
|
||||
assert_eq!(format!("{}", inst.display(&store)), "C");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_function() {
|
||||
let store = TypeStore::default();
|
||||
let files = Files::default();
|
||||
let file_id = files.intern(Path::new("/foo"));
|
||||
let mut table = SymbolTable::new();
|
||||
let func_symbol = table.add_or_update_symbol(
|
||||
SymbolTable::root_scope_id(),
|
||||
"func",
|
||||
SymbolFlags::IS_DEFINED,
|
||||
);
|
||||
|
||||
let id = store.add_function(
|
||||
file_id,
|
||||
"func",
|
||||
func_symbol,
|
||||
SymbolTable::root_scope_id(),
|
||||
vec![Type::Unknown],
|
||||
);
|
||||
assert_eq!(store.get_function(id).name(), "func");
|
||||
assert_eq!(store.get_function(id).decorators(), vec![Type::Unknown]);
|
||||
let func = Type::Function(id);
|
||||
assert_eq!(format!("{}", func.display(&store)), "func");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_union() {
|
||||
let mut store = TypeStore::default();
|
||||
let files = Files::default();
|
||||
let file_id = files.intern(Path::new("/foo"));
|
||||
let c1 = store.add_class(file_id, "C1", SymbolTable::root_scope_id(), Vec::new());
|
||||
let c2 = store.add_class(file_id, "C2", SymbolTable::root_scope_id(), Vec::new());
|
||||
let elems = vec![Type::Instance(c1), Type::Instance(c2)];
|
||||
let id = store.add_union(file_id, &elems);
|
||||
assert_eq!(
|
||||
store.get_union(id).elements,
|
||||
elems.into_iter().collect::<FxIndexSet<_>>()
|
||||
);
|
||||
let union = Type::Union(id);
|
||||
assert_eq!(format!("{}", union.display(&store)), "(C1 | C2)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_intersection() {
|
||||
let mut store = TypeStore::default();
|
||||
let files = Files::default();
|
||||
let file_id = files.intern(Path::new("/foo"));
|
||||
let c1 = store.add_class(file_id, "C1", SymbolTable::root_scope_id(), Vec::new());
|
||||
let c2 = store.add_class(file_id, "C2", SymbolTable::root_scope_id(), Vec::new());
|
||||
let c3 = store.add_class(file_id, "C3", SymbolTable::root_scope_id(), Vec::new());
|
||||
let pos = vec![Type::Instance(c1), Type::Instance(c2)];
|
||||
let neg = vec![Type::Instance(c3)];
|
||||
let id = store.add_intersection(file_id, &pos, &neg);
|
||||
assert_eq!(
|
||||
store.get_intersection(id).positive,
|
||||
pos.into_iter().collect::<FxIndexSet<_>>()
|
||||
);
|
||||
assert_eq!(
|
||||
store.get_intersection(id).negative,
|
||||
neg.into_iter().collect::<FxIndexSet<_>>()
|
||||
);
|
||||
let intersection = Type::Intersection(id);
|
||||
assert_eq!(
|
||||
format!("{}", intersection.display(&store)),
|
||||
"(C1 & C2 & ~C3)"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,292 +0,0 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::AstNode;
|
||||
|
||||
use crate::db::{QueryResult, SemanticDb, SemanticJar};
|
||||
|
||||
use crate::module::ModuleName;
|
||||
use crate::parse::parse;
|
||||
use crate::symbols::{
|
||||
resolve_global_symbol, symbol_table, Definition, GlobalSymbolId, ImportFromDefinition,
|
||||
};
|
||||
use crate::types::Type;
|
||||
use crate::FileId;
|
||||
|
||||
// FIXME: Figure out proper dead-lock free synchronisation now that this takes `&db` instead of `&mut db`.
|
||||
#[tracing::instrument(level = "trace", skip(db))]
|
||||
pub fn infer_symbol_type(db: &dyn SemanticDb, symbol: GlobalSymbolId) -> QueryResult<Type> {
|
||||
let symbols = symbol_table(db, symbol.file_id)?;
|
||||
let defs = symbols.definitions(symbol.symbol_id);
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
|
||||
if let Some(ty) = jar.type_store.get_cached_symbol_type(symbol) {
|
||||
return Ok(ty);
|
||||
}
|
||||
|
||||
// TODO handle multiple defs, conditional defs...
|
||||
assert_eq!(defs.len(), 1);
|
||||
|
||||
let ty = infer_definition_type(db, symbol, defs[0].clone())?;
|
||||
|
||||
jar.type_store.cache_symbol_type(symbol, ty);
|
||||
|
||||
// TODO record dependencies
|
||||
Ok(ty)
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "trace", skip(db))]
|
||||
pub fn infer_definition_type(
|
||||
db: &dyn SemanticDb,
|
||||
symbol: GlobalSymbolId,
|
||||
definition: Definition,
|
||||
) -> QueryResult<Type> {
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
let type_store = &jar.type_store;
|
||||
let file_id = symbol.file_id;
|
||||
|
||||
match definition {
|
||||
Definition::ImportFrom(ImportFromDefinition {
|
||||
module,
|
||||
name,
|
||||
level,
|
||||
}) => {
|
||||
// TODO relative imports
|
||||
assert!(matches!(level, 0));
|
||||
let module_name = ModuleName::new(module.as_ref().expect("TODO relative imports"));
|
||||
if let Some(remote_symbol) = resolve_global_symbol(db, module_name, &name)? {
|
||||
infer_symbol_type(db, remote_symbol)
|
||||
} else {
|
||||
Ok(Type::Unknown)
|
||||
}
|
||||
}
|
||||
Definition::ClassDef(node_key) => {
|
||||
if let Some(ty) = type_store.get_cached_node_type(file_id, node_key.erased()) {
|
||||
Ok(ty)
|
||||
} else {
|
||||
let parsed = parse(db.upcast(), file_id)?;
|
||||
let ast = parsed.ast();
|
||||
let table = symbol_table(db, file_id)?;
|
||||
let node = node_key.resolve_unwrap(ast.as_any_node_ref());
|
||||
|
||||
let mut bases = Vec::with_capacity(node.bases().len());
|
||||
|
||||
for base in node.bases() {
|
||||
bases.push(infer_expr_type(db, file_id, base)?);
|
||||
}
|
||||
let scope_id = table.scope_id_for_node(node_key.erased());
|
||||
let ty = Type::Class(type_store.add_class(file_id, &node.name.id, scope_id, bases));
|
||||
type_store.cache_node_type(file_id, *node_key.erased(), ty);
|
||||
Ok(ty)
|
||||
}
|
||||
}
|
||||
Definition::FunctionDef(node_key) => {
|
||||
if let Some(ty) = type_store.get_cached_node_type(file_id, node_key.erased()) {
|
||||
Ok(ty)
|
||||
} else {
|
||||
let parsed = parse(db.upcast(), file_id)?;
|
||||
let ast = parsed.ast();
|
||||
let table = symbol_table(db, file_id)?;
|
||||
let node = node_key
|
||||
.resolve(ast.as_any_node_ref())
|
||||
.expect("node key should resolve");
|
||||
|
||||
let decorator_tys = node
|
||||
.decorator_list
|
||||
.iter()
|
||||
.map(|decorator| infer_expr_type(db, file_id, &decorator.expression))
|
||||
.collect::<QueryResult<_>>()?;
|
||||
let scope_id = table.scope_id_for_node(node_key.erased());
|
||||
let ty = type_store
|
||||
.add_function(
|
||||
file_id,
|
||||
&node.name.id,
|
||||
symbol.symbol_id,
|
||||
scope_id,
|
||||
decorator_tys,
|
||||
)
|
||||
.into();
|
||||
type_store.cache_node_type(file_id, *node_key.erased(), ty);
|
||||
Ok(ty)
|
||||
}
|
||||
}
|
||||
Definition::Assignment(node_key) => {
|
||||
let parsed = parse(db.upcast(), file_id)?;
|
||||
let ast = parsed.ast();
|
||||
let node = node_key.resolve_unwrap(ast.as_any_node_ref());
|
||||
// TODO handle unpacking assignment correctly
|
||||
infer_expr_type(db, file_id, &node.value)
|
||||
}
|
||||
_ => todo!("other kinds of definitions"),
|
||||
}
|
||||
}
|
||||
|
||||
fn infer_expr_type(db: &dyn SemanticDb, file_id: FileId, expr: &ast::Expr) -> QueryResult<Type> {
|
||||
// TODO cache the resolution of the type on the node
|
||||
let symbols = symbol_table(db, file_id)?;
|
||||
match expr {
|
||||
ast::Expr::Name(name) => {
|
||||
// TODO look up in the correct scope, don't assume global
|
||||
if let Some(symbol_id) = symbols.root_symbol_id_by_name(&name.id) {
|
||||
infer_symbol_type(db, GlobalSymbolId { file_id, symbol_id })
|
||||
} else {
|
||||
Ok(Type::Unknown)
|
||||
}
|
||||
}
|
||||
_ => todo!("full expression type resolution"),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::db::{HasJar, SemanticJar};
|
||||
use crate::module::{
|
||||
resolve_module, set_module_search_paths, ModuleName, ModuleSearchPath, ModuleSearchPathKind,
|
||||
};
|
||||
use crate::symbols::{symbol_table, GlobalSymbolId};
|
||||
use crate::types::{infer_symbol_type, Type};
|
||||
use crate::Name;
|
||||
|
||||
// TODO with virtual filesystem we shouldn't have to write files to disk for these
|
||||
// tests
|
||||
|
||||
struct TestCase {
|
||||
temp_dir: tempfile::TempDir,
|
||||
db: TestDb,
|
||||
|
||||
src: ModuleSearchPath,
|
||||
}
|
||||
|
||||
fn create_test() -> std::io::Result<TestCase> {
|
||||
let temp_dir = tempfile::tempdir()?;
|
||||
|
||||
let src = temp_dir.path().join("src");
|
||||
std::fs::create_dir(&src)?;
|
||||
let src = ModuleSearchPath::new(src.canonicalize()?, ModuleSearchPathKind::FirstParty);
|
||||
|
||||
let roots = vec![src.clone()];
|
||||
|
||||
let mut db = TestDb::default();
|
||||
set_module_search_paths(&mut db, roots);
|
||||
|
||||
Ok(TestCase { temp_dir, db, src })
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn follow_import_to_class() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
let db = &case.db;
|
||||
|
||||
let a_path = case.src.path().join("a.py");
|
||||
let b_path = case.src.path().join("b.py");
|
||||
std::fs::write(a_path, "from b import C as D; E = D")?;
|
||||
std::fs::write(b_path, "class C: pass")?;
|
||||
let a_file = resolve_module(db, ModuleName::new("a"))?
|
||||
.expect("module should be found")
|
||||
.path(db)?
|
||||
.file();
|
||||
let a_syms = symbol_table(db, a_file)?;
|
||||
let e_sym = a_syms
|
||||
.root_symbol_id_by_name("E")
|
||||
.expect("E symbol should be found");
|
||||
|
||||
let ty = infer_symbol_type(
|
||||
db,
|
||||
GlobalSymbolId {
|
||||
file_id: a_file,
|
||||
symbol_id: e_sym,
|
||||
},
|
||||
)?;
|
||||
|
||||
let jar = HasJar::<SemanticJar>::jar(db)?;
|
||||
assert!(matches!(ty, Type::Class(_)));
|
||||
assert_eq!(format!("{}", ty.display(&jar.type_store)), "Literal[C]");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_base_class_by_name() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
let db = &case.db;
|
||||
|
||||
let path = case.src.path().join("mod.py");
|
||||
std::fs::write(path, "class Base: pass\nclass Sub(Base): pass")?;
|
||||
let file = resolve_module(db, ModuleName::new("mod"))?
|
||||
.expect("module should be found")
|
||||
.path(db)?
|
||||
.file();
|
||||
let syms = symbol_table(db, file)?;
|
||||
let sym = syms
|
||||
.root_symbol_id_by_name("Sub")
|
||||
.expect("Sub symbol should be found");
|
||||
|
||||
let ty = infer_symbol_type(
|
||||
db,
|
||||
GlobalSymbolId {
|
||||
file_id: file,
|
||||
symbol_id: sym,
|
||||
},
|
||||
)?;
|
||||
|
||||
let Type::Class(class_id) = ty else {
|
||||
panic!("Sub is not a Class")
|
||||
};
|
||||
let jar = HasJar::<SemanticJar>::jar(db)?;
|
||||
let base_names: Vec<_> = jar
|
||||
.type_store
|
||||
.get_class(class_id)
|
||||
.bases()
|
||||
.iter()
|
||||
.map(|base_ty| format!("{}", base_ty.display(&jar.type_store)))
|
||||
.collect();
|
||||
|
||||
assert_eq!(base_names, vec!["Literal[Base]"]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_method() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
let db = &case.db;
|
||||
|
||||
let path = case.src.path().join("mod.py");
|
||||
std::fs::write(path, "class C:\n def f(self): pass")?;
|
||||
let file = resolve_module(db, ModuleName::new("mod"))?
|
||||
.expect("module should be found")
|
||||
.path(db)?
|
||||
.file();
|
||||
let syms = symbol_table(db, file)?;
|
||||
let sym = syms
|
||||
.root_symbol_id_by_name("C")
|
||||
.expect("C symbol should be found");
|
||||
|
||||
let ty = infer_symbol_type(
|
||||
db,
|
||||
GlobalSymbolId {
|
||||
file_id: file,
|
||||
symbol_id: sym,
|
||||
},
|
||||
)?;
|
||||
|
||||
let Type::Class(class_id) = ty else {
|
||||
panic!("C is not a Class");
|
||||
};
|
||||
|
||||
let member_ty = class_id
|
||||
.get_own_class_member(db, &Name::new("f"))
|
||||
.expect("C.f to resolve");
|
||||
|
||||
let Some(Type::Function(func_id)) = member_ty else {
|
||||
panic!("C.f is not a Function");
|
||||
};
|
||||
|
||||
let jar = HasJar::<SemanticJar>::jar(db)?;
|
||||
let function = jar.type_store.get_function(func_id);
|
||||
assert_eq!(function.name(), "f");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -1,77 +0,0 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Context;
|
||||
use notify::event::{CreateKind, RemoveKind};
|
||||
use notify::{recommended_watcher, Event, EventKind, RecommendedWatcher, RecursiveMode, Watcher};
|
||||
|
||||
use crate::program::{FileChangeKind, FileWatcherChange};
|
||||
|
||||
pub struct FileWatcher {
|
||||
watcher: RecommendedWatcher,
|
||||
}
|
||||
|
||||
pub trait EventHandler: Send + 'static {
|
||||
fn handle(&self, changes: Vec<FileWatcherChange>);
|
||||
}
|
||||
|
||||
impl<F> EventHandler for F
|
||||
where
|
||||
F: Fn(Vec<FileWatcherChange>) + Send + 'static,
|
||||
{
|
||||
fn handle(&self, changes: Vec<FileWatcherChange>) {
|
||||
let f = self;
|
||||
f(changes);
|
||||
}
|
||||
}
|
||||
|
||||
impl FileWatcher {
|
||||
pub fn new<E>(handler: E) -> anyhow::Result<Self>
|
||||
where
|
||||
E: EventHandler,
|
||||
{
|
||||
Self::from_handler(Box::new(handler))
|
||||
}
|
||||
|
||||
fn from_handler(handler: Box<dyn EventHandler>) -> anyhow::Result<Self> {
|
||||
let watcher = recommended_watcher(move |changes: notify::Result<Event>| {
|
||||
match changes {
|
||||
Ok(event) => {
|
||||
// TODO verify that this handles all events correctly
|
||||
let change_kind = match event.kind {
|
||||
EventKind::Create(CreateKind::File) => FileChangeKind::Created,
|
||||
EventKind::Modify(_) => FileChangeKind::Modified,
|
||||
EventKind::Remove(RemoveKind::File) => FileChangeKind::Deleted,
|
||||
_ => {
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let mut changes = Vec::new();
|
||||
|
||||
for path in event.paths {
|
||||
if path.is_file() {
|
||||
changes.push(FileWatcherChange::new(path, change_kind));
|
||||
}
|
||||
}
|
||||
|
||||
if !changes.is_empty() {
|
||||
handler.handle(changes);
|
||||
}
|
||||
}
|
||||
// TODO proper error handling
|
||||
Err(err) => {
|
||||
panic!("Error: {err}");
|
||||
}
|
||||
}
|
||||
})
|
||||
.context("Failed to create file watcher.")?;
|
||||
|
||||
Ok(Self { watcher })
|
||||
}
|
||||
|
||||
pub fn watch_folder(&mut self, path: &Path) -> anyhow::Result<()> {
|
||||
self.watcher.watch(path, RecursiveMode::Recursive)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
237
crates/red_knot/vendor/typeshed/LICENSE
vendored
237
crates/red_knot/vendor/typeshed/LICENSE
vendored
@@ -1,237 +0,0 @@
|
||||
The "typeshed" project is licensed under the terms of the Apache license, as
|
||||
reproduced below.
|
||||
|
||||
= = = = =
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright {yyyy} {name of copyright owner}
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
= = = = =
|
||||
|
||||
Parts of typeshed are licensed under different licenses (like the MIT
|
||||
license), reproduced below.
|
||||
|
||||
= = = = =
|
||||
|
||||
The MIT License
|
||||
|
||||
Copyright (c) 2015 Jukka Lehtosalo and contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a
|
||||
copy of this software and associated documentation files (the "Software"),
|
||||
to deal in the Software without restriction, including without limitation
|
||||
the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||
and/or sell copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
||||
|
||||
= = = = =
|
||||
124
crates/red_knot/vendor/typeshed/README.md
vendored
124
crates/red_knot/vendor/typeshed/README.md
vendored
@@ -1,124 +0,0 @@
|
||||
# typeshed
|
||||
|
||||
[](https://github.com/python/typeshed/actions/workflows/tests.yml)
|
||||
[](https://gitter.im/python/typing?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
[](https://github.com/python/typeshed/blob/main/CONTRIBUTING.md)
|
||||
|
||||
## About
|
||||
|
||||
Typeshed contains external type annotations for the Python standard library
|
||||
and Python builtins, as well as third party packages as contributed by
|
||||
people external to those projects.
|
||||
|
||||
This data can e.g. be used for static analysis, type checking, type inference,
|
||||
and autocompletion.
|
||||
|
||||
For information on how to use typeshed, read below. Information for
|
||||
contributors can be found in [CONTRIBUTING.md](CONTRIBUTING.md). **Please read
|
||||
it before submitting pull requests; do not report issues with annotations to
|
||||
the project the stubs are for, but instead report them here to typeshed.**
|
||||
|
||||
Further documentation on stub files, typeshed, and Python's typing system in
|
||||
general, can also be found at https://typing.readthedocs.io/en/latest/.
|
||||
|
||||
Typeshed supports Python versions 3.8 and up.
|
||||
|
||||
## Using
|
||||
|
||||
If you're just using a type checker ([mypy](https://github.com/python/mypy/),
|
||||
[pyright](https://github.com/microsoft/pyright),
|
||||
[pytype](https://github.com/google/pytype/), PyCharm, ...), as opposed to
|
||||
developing it, you don't need to interact with the typeshed repo at
|
||||
all: a copy of standard library part of typeshed is bundled with type checkers.
|
||||
And type stubs for third party packages and modules you are using can
|
||||
be installed from PyPI. For example, if you are using `html5lib` and `requests`,
|
||||
you can install the type stubs using
|
||||
|
||||
```bash
|
||||
$ pip install types-html5lib types-requests
|
||||
```
|
||||
|
||||
These PyPI packages follow [PEP 561](http://www.python.org/dev/peps/pep-0561/)
|
||||
and are automatically released (up to once a day) by
|
||||
[typeshed internal machinery](https://github.com/typeshed-internal/stub_uploader).
|
||||
|
||||
Type checkers should be able to use these stub packages when installed. For more
|
||||
details, see the documentation for your type checker.
|
||||
|
||||
### Package versioning for third-party stubs
|
||||
|
||||
Version numbers of third-party stub packages consist of at least four parts.
|
||||
All parts of the stub version, except for the last part, correspond to the
|
||||
version of the runtime package being stubbed. For example, if the `types-foo`
|
||||
package has version `1.2.0.20240309`, this guarantees that the `types-foo` package
|
||||
contains stubs targeted against `foo==1.2.*` and tested against the latest
|
||||
version of `foo` matching that specifier. In this example, the final element
|
||||
of the version number (20240309) indicates that the stub package was pushed on
|
||||
March 9, 2024.
|
||||
|
||||
At typeshed, we try to keep breaking changes to a minimum. However, due to the
|
||||
nature of stubs, any version bump can introduce changes that might make your
|
||||
code fail to type check.
|
||||
|
||||
There are several strategies available for specifying the version of a stubs
|
||||
package you're using, each with its own tradeoffs:
|
||||
|
||||
1. Use the same bounds that you use for the package being stubbed. For example,
|
||||
if you use `requests>=2.30.0,<2.32`, you can use
|
||||
`types-requests>=2.30.0,<2.32`. This ensures that the stubs are compatible
|
||||
with the package you are using, but it carries a small risk of breaking
|
||||
type checking due to changes in the stubs.
|
||||
|
||||
Another risk of this strategy is that stubs often lag behind
|
||||
the package being stubbed. You might want to force the package being stubbed
|
||||
to a certain minimum version because it fixes a critical bug, but if
|
||||
correspondingly updated stubs have not been released, your type
|
||||
checking results may not be fully accurate.
|
||||
2. Pin the stubs to a known good version and update the pin from time to time
|
||||
(either manually, or using a tool such as dependabot or renovate).
|
||||
|
||||
For example, if you use `types-requests==2.31.0.1`, you can have confidence
|
||||
that upgrading dependencies will not break type checking. However, you will
|
||||
miss out on improvements in the stubs that could potentially improve type
|
||||
checking until you update the pin. This strategy also has the risk that the
|
||||
stubs you are using might become incompatible with the package being stubbed.
|
||||
3. Don't pin the stubs. This is the option that demands the least work from
|
||||
you when it comes to updating version pins, and has the advantage that you
|
||||
will automatically benefit from improved stubs whenever a new version of the
|
||||
stubs package is released. However, it carries the risk that the stubs
|
||||
become incompatible with the package being stubbed.
|
||||
|
||||
For example, if a new major version of the package is released, there's a
|
||||
chance the stubs might be updated to reflect the new version of the runtime
|
||||
package before you update the package being stubbed.
|
||||
|
||||
You can also switch between the different strategies as needed. For example,
|
||||
you could default to strategy (1), but fall back to strategy (2) when
|
||||
a problem arises that can't easily be fixed.
|
||||
|
||||
### The `_typeshed` package
|
||||
|
||||
typeshed includes a package `_typeshed` as part of the standard library.
|
||||
This package and its submodules contain utility types, but are not
|
||||
available at runtime. For more information about how to use this package,
|
||||
[see the `stdlib/_typeshed` directory](https://github.com/python/typeshed/tree/main/stdlib/_typeshed).
|
||||
|
||||
## Discussion
|
||||
|
||||
If you've run into behavior in the type checker that suggests the type
|
||||
stubs for a given library are incorrect or incomplete,
|
||||
we want to hear from you!
|
||||
|
||||
Our main forum for discussion is the project's [GitHub issue
|
||||
tracker](https://github.com/python/typeshed/issues). This is the right
|
||||
place to start a discussion of any of the above or most any other
|
||||
topic concerning the project.
|
||||
|
||||
If you have general questions about typing with Python, or you need
|
||||
a review of your type annotations or stubs outside of typeshed, head over to
|
||||
[our discussion forum](https://github.com/python/typing/discussions).
|
||||
For less formal discussion, try the typing chat room on
|
||||
[gitter.im](https://gitter.im/python/typing). Some typeshed maintainers
|
||||
are almost always present; feel free to find us there and we're happy
|
||||
to chat. Substantive technical discussion will be directed to the
|
||||
issue tracker.
|
||||
@@ -1 +0,0 @@
|
||||
2d33fe212221a05661c0db5215a91cf3d7b7f072
|
||||
309
crates/red_knot/vendor/typeshed/stdlib/VERSIONS
vendored
309
crates/red_knot/vendor/typeshed/stdlib/VERSIONS
vendored
@@ -1,309 +0,0 @@
|
||||
# The structure of this file is as follows:
|
||||
# - Blank lines and comments starting with `#` are ignored.
|
||||
# - Lines contain the name of a module, followed by a colon,
|
||||
# a space, and a version range (for example: `symbol: 3.0-3.9`).
|
||||
#
|
||||
# Version ranges may be of the form "X.Y-A.B" or "X.Y-". The
|
||||
# first form means that a module was introduced in version X.Y and last
|
||||
# available in version A.B. The second form means that the module was
|
||||
# introduced in version X.Y and is still available in the latest
|
||||
# version of Python.
|
||||
#
|
||||
# If a submodule is not listed separately, it has the same lifetime as
|
||||
# its parent module.
|
||||
#
|
||||
# Python versions before 3.0 are ignored, so any module that was already
|
||||
# present in 3.0 will have "3.0" as its minimum version. Version ranges
|
||||
# for unsupported versions of Python 3 are generally accurate but we do
|
||||
# not guarantee their correctness.
|
||||
|
||||
__future__: 3.0-
|
||||
__main__: 3.0-
|
||||
_ast: 3.0-
|
||||
_bisect: 3.0-
|
||||
_bootlocale: 3.4-3.9
|
||||
_codecs: 3.0-
|
||||
_collections_abc: 3.3-
|
||||
_compat_pickle: 3.1-
|
||||
_compression: 3.5-
|
||||
_csv: 3.0-
|
||||
_ctypes: 3.0-
|
||||
_curses: 3.0-
|
||||
_decimal: 3.3-
|
||||
_dummy_thread: 3.0-3.8
|
||||
_dummy_threading: 3.0-3.8
|
||||
_heapq: 3.0-
|
||||
_imp: 3.0-
|
||||
_json: 3.0-
|
||||
_locale: 3.0-
|
||||
_lsprof: 3.0-
|
||||
_markupbase: 3.0-
|
||||
_msi: 3.0-
|
||||
_operator: 3.4-
|
||||
_osx_support: 3.0-
|
||||
_posixsubprocess: 3.2-
|
||||
_py_abc: 3.7-
|
||||
_pydecimal: 3.5-
|
||||
_random: 3.0-
|
||||
_sitebuiltins: 3.4-
|
||||
_socket: 3.0- # present in 3.0 at runtime, but not in typeshed
|
||||
_stat: 3.4-
|
||||
_thread: 3.0-
|
||||
_threading_local: 3.0-
|
||||
_tkinter: 3.0-
|
||||
_tracemalloc: 3.4-
|
||||
_typeshed: 3.0- # not present at runtime, only for type checking
|
||||
_warnings: 3.0-
|
||||
_weakref: 3.0-
|
||||
_weakrefset: 3.0-
|
||||
_winapi: 3.3-
|
||||
abc: 3.0-
|
||||
aifc: 3.0-3.12
|
||||
antigravity: 3.0-
|
||||
argparse: 3.0-
|
||||
array: 3.0-
|
||||
ast: 3.0-
|
||||
asynchat: 3.0-3.11
|
||||
asyncio: 3.4-
|
||||
asyncio.mixins: 3.10-
|
||||
asyncio.exceptions: 3.8-
|
||||
asyncio.format_helpers: 3.7-
|
||||
asyncio.runners: 3.7-
|
||||
asyncio.staggered: 3.8-
|
||||
asyncio.taskgroups: 3.11-
|
||||
asyncio.threads: 3.9-
|
||||
asyncio.timeouts: 3.11-
|
||||
asyncio.trsock: 3.8-
|
||||
asyncore: 3.0-3.11
|
||||
atexit: 3.0-
|
||||
audioop: 3.0-3.12
|
||||
base64: 3.0-
|
||||
bdb: 3.0-
|
||||
binascii: 3.0-
|
||||
binhex: 3.0-3.10
|
||||
bisect: 3.0-
|
||||
builtins: 3.0-
|
||||
bz2: 3.0-
|
||||
cProfile: 3.0-
|
||||
calendar: 3.0-
|
||||
cgi: 3.0-3.12
|
||||
cgitb: 3.0-3.12
|
||||
chunk: 3.0-3.12
|
||||
cmath: 3.0-
|
||||
cmd: 3.0-
|
||||
code: 3.0-
|
||||
codecs: 3.0-
|
||||
codeop: 3.0-
|
||||
collections: 3.0-
|
||||
collections.abc: 3.3-
|
||||
colorsys: 3.0-
|
||||
compileall: 3.0-
|
||||
concurrent: 3.2-
|
||||
configparser: 3.0-
|
||||
contextlib: 3.0-
|
||||
contextvars: 3.7-
|
||||
copy: 3.0-
|
||||
copyreg: 3.0-
|
||||
crypt: 3.0-3.12
|
||||
csv: 3.0-
|
||||
ctypes: 3.0-
|
||||
curses: 3.0-
|
||||
dataclasses: 3.7-
|
||||
datetime: 3.0-
|
||||
dbm: 3.0-
|
||||
decimal: 3.0-
|
||||
difflib: 3.0-
|
||||
dis: 3.0-
|
||||
distutils: 3.0-3.11
|
||||
distutils.command.bdist_msi: 3.0-3.10
|
||||
distutils.command.bdist_wininst: 3.0-3.9
|
||||
doctest: 3.0-
|
||||
dummy_threading: 3.0-3.8
|
||||
email: 3.0-
|
||||
encodings: 3.0-
|
||||
ensurepip: 3.0-
|
||||
enum: 3.4-
|
||||
errno: 3.0-
|
||||
faulthandler: 3.3-
|
||||
fcntl: 3.0-
|
||||
filecmp: 3.0-
|
||||
fileinput: 3.0-
|
||||
fnmatch: 3.0-
|
||||
formatter: 3.0-3.9
|
||||
fractions: 3.0-
|
||||
ftplib: 3.0-
|
||||
functools: 3.0-
|
||||
gc: 3.0-
|
||||
genericpath: 3.0-
|
||||
getopt: 3.0-
|
||||
getpass: 3.0-
|
||||
gettext: 3.0-
|
||||
glob: 3.0-
|
||||
graphlib: 3.9-
|
||||
grp: 3.0-
|
||||
gzip: 3.0-
|
||||
hashlib: 3.0-
|
||||
heapq: 3.0-
|
||||
hmac: 3.0-
|
||||
html: 3.0-
|
||||
http: 3.0-
|
||||
imaplib: 3.0-
|
||||
imghdr: 3.0-3.12
|
||||
imp: 3.0-3.11
|
||||
importlib: 3.0-
|
||||
importlib._abc: 3.10-
|
||||
importlib.metadata: 3.8-
|
||||
importlib.metadata._meta: 3.10-
|
||||
importlib.readers: 3.10-
|
||||
importlib.resources: 3.7-
|
||||
importlib.resources.abc: 3.11-
|
||||
importlib.resources.readers: 3.11-
|
||||
importlib.resources.simple: 3.11-
|
||||
importlib.simple: 3.11-
|
||||
inspect: 3.0-
|
||||
io: 3.0-
|
||||
ipaddress: 3.3-
|
||||
itertools: 3.0-
|
||||
json: 3.0-
|
||||
keyword: 3.0-
|
||||
lib2to3: 3.0-
|
||||
linecache: 3.0-
|
||||
locale: 3.0-
|
||||
logging: 3.0-
|
||||
lzma: 3.3-
|
||||
mailbox: 3.0-
|
||||
mailcap: 3.0-3.12
|
||||
marshal: 3.0-
|
||||
math: 3.0-
|
||||
mimetypes: 3.0-
|
||||
mmap: 3.0-
|
||||
modulefinder: 3.0-
|
||||
msilib: 3.0-3.12
|
||||
msvcrt: 3.0-
|
||||
multiprocessing: 3.0-
|
||||
multiprocessing.resource_tracker: 3.8-
|
||||
multiprocessing.shared_memory: 3.8-
|
||||
netrc: 3.0-
|
||||
nis: 3.0-3.12
|
||||
nntplib: 3.0-3.12
|
||||
nt: 3.0-
|
||||
ntpath: 3.0-
|
||||
nturl2path: 3.0-
|
||||
numbers: 3.0-
|
||||
opcode: 3.0-
|
||||
operator: 3.0-
|
||||
optparse: 3.0-
|
||||
os: 3.0-
|
||||
ossaudiodev: 3.0-3.12
|
||||
parser: 3.0-3.9
|
||||
pathlib: 3.4-
|
||||
pdb: 3.0-
|
||||
pickle: 3.0-
|
||||
pickletools: 3.0-
|
||||
pipes: 3.0-3.12
|
||||
pkgutil: 3.0-
|
||||
platform: 3.0-
|
||||
plistlib: 3.0-
|
||||
poplib: 3.0-
|
||||
posix: 3.0-
|
||||
posixpath: 3.0-
|
||||
pprint: 3.0-
|
||||
profile: 3.0-
|
||||
pstats: 3.0-
|
||||
pty: 3.0-
|
||||
pwd: 3.0-
|
||||
py_compile: 3.0-
|
||||
pyclbr: 3.0-
|
||||
pydoc: 3.0-
|
||||
pydoc_data: 3.0-
|
||||
pyexpat: 3.0-
|
||||
queue: 3.0-
|
||||
quopri: 3.0-
|
||||
random: 3.0-
|
||||
re: 3.0-
|
||||
readline: 3.0-
|
||||
reprlib: 3.0-
|
||||
resource: 3.0-
|
||||
rlcompleter: 3.0-
|
||||
runpy: 3.0-
|
||||
sched: 3.0-
|
||||
secrets: 3.6-
|
||||
select: 3.0-
|
||||
selectors: 3.4-
|
||||
shelve: 3.0-
|
||||
shlex: 3.0-
|
||||
shutil: 3.0-
|
||||
signal: 3.0-
|
||||
site: 3.0-
|
||||
smtpd: 3.0-3.11
|
||||
smtplib: 3.0-
|
||||
sndhdr: 3.0-3.12
|
||||
socket: 3.0-
|
||||
socketserver: 3.0-
|
||||
spwd: 3.0-3.12
|
||||
sqlite3: 3.0-
|
||||
sre_compile: 3.0-
|
||||
sre_constants: 3.0-
|
||||
sre_parse: 3.0-
|
||||
ssl: 3.0-
|
||||
stat: 3.0-
|
||||
statistics: 3.4-
|
||||
string: 3.0-
|
||||
stringprep: 3.0-
|
||||
struct: 3.0-
|
||||
subprocess: 3.0-
|
||||
sunau: 3.0-3.12
|
||||
symbol: 3.0-3.9
|
||||
symtable: 3.0-
|
||||
sys: 3.0-
|
||||
sys._monitoring: 3.12- # Doesn't actually exist. See comments in the stub.
|
||||
sysconfig: 3.0-
|
||||
syslog: 3.0-
|
||||
tabnanny: 3.0-
|
||||
tarfile: 3.0-
|
||||
telnetlib: 3.0-3.12
|
||||
tempfile: 3.0-
|
||||
termios: 3.0-
|
||||
textwrap: 3.0-
|
||||
this: 3.0-
|
||||
threading: 3.0-
|
||||
time: 3.0-
|
||||
timeit: 3.0-
|
||||
tkinter: 3.0-
|
||||
token: 3.0-
|
||||
tokenize: 3.0-
|
||||
tomllib: 3.11-
|
||||
trace: 3.0-
|
||||
traceback: 3.0-
|
||||
tracemalloc: 3.4-
|
||||
tty: 3.0-
|
||||
turtle: 3.0-
|
||||
types: 3.0-
|
||||
typing: 3.5-
|
||||
typing_extensions: 3.0-
|
||||
unicodedata: 3.0-
|
||||
unittest: 3.0-
|
||||
unittest._log: 3.9-
|
||||
unittest.async_case: 3.8-
|
||||
urllib: 3.0-
|
||||
uu: 3.0-3.12
|
||||
uuid: 3.0-
|
||||
venv: 3.3-
|
||||
warnings: 3.0-
|
||||
wave: 3.0-
|
||||
weakref: 3.0-
|
||||
webbrowser: 3.0-
|
||||
winreg: 3.0-
|
||||
winsound: 3.0-
|
||||
wsgiref: 3.0-
|
||||
wsgiref.types: 3.11-
|
||||
xdrlib: 3.0-3.12
|
||||
xml: 3.0-
|
||||
xmlrpc: 3.0-
|
||||
xxlimited: 3.2-
|
||||
zipapp: 3.5-
|
||||
zipfile: 3.0-
|
||||
zipfile._path: 3.12-
|
||||
zipimport: 3.0-
|
||||
zlib: 3.0-
|
||||
zoneinfo: 3.9-
|
||||
@@ -1,36 +0,0 @@
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
_VersionInfo: TypeAlias = tuple[int, int, int, str, int]
|
||||
|
||||
class _Feature:
|
||||
def __init__(self, optionalRelease: _VersionInfo, mandatoryRelease: _VersionInfo | None, compiler_flag: int) -> None: ...
|
||||
def getOptionalRelease(self) -> _VersionInfo: ...
|
||||
def getMandatoryRelease(self) -> _VersionInfo | None: ...
|
||||
compiler_flag: int
|
||||
|
||||
absolute_import: _Feature
|
||||
division: _Feature
|
||||
generators: _Feature
|
||||
nested_scopes: _Feature
|
||||
print_function: _Feature
|
||||
unicode_literals: _Feature
|
||||
with_statement: _Feature
|
||||
barry_as_FLUFL: _Feature
|
||||
generator_stop: _Feature
|
||||
annotations: _Feature
|
||||
|
||||
all_feature_names: list[str] # undocumented
|
||||
|
||||
__all__ = [
|
||||
"all_feature_names",
|
||||
"absolute_import",
|
||||
"division",
|
||||
"generators",
|
||||
"nested_scopes",
|
||||
"print_function",
|
||||
"unicode_literals",
|
||||
"with_statement",
|
||||
"barry_as_FLUFL",
|
||||
"generator_stop",
|
||||
"annotations",
|
||||
]
|
||||
@@ -1,3 +0,0 @@
|
||||
from typing import Any
|
||||
|
||||
def __getattr__(name: str) -> Any: ...
|
||||
591
crates/red_knot/vendor/typeshed/stdlib/_ast.pyi
vendored
591
crates/red_knot/vendor/typeshed/stdlib/_ast.pyi
vendored
@@ -1,591 +0,0 @@
|
||||
import sys
|
||||
import typing_extensions
|
||||
from typing import Any, ClassVar, Literal
|
||||
|
||||
PyCF_ONLY_AST: Literal[1024]
|
||||
PyCF_TYPE_COMMENTS: Literal[4096]
|
||||
PyCF_ALLOW_TOP_LEVEL_AWAIT: Literal[8192]
|
||||
|
||||
# Alias used for fields that must always be valid identifiers
|
||||
# A string `x` counts as a valid identifier if both the following are True
|
||||
# (1) `x.isidentifier()` evaluates to `True`
|
||||
# (2) `keyword.iskeyword(x)` evaluates to `False`
|
||||
_Identifier: typing_extensions.TypeAlias = str
|
||||
|
||||
class AST:
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ()
|
||||
_attributes: ClassVar[tuple[str, ...]]
|
||||
_fields: ClassVar[tuple[str, ...]]
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None: ...
|
||||
# TODO: Not all nodes have all of the following attributes
|
||||
lineno: int
|
||||
col_offset: int
|
||||
end_lineno: int | None
|
||||
end_col_offset: int | None
|
||||
type_comment: str | None
|
||||
|
||||
class mod(AST): ...
|
||||
class type_ignore(AST): ...
|
||||
|
||||
class TypeIgnore(type_ignore):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("lineno", "tag")
|
||||
tag: str
|
||||
|
||||
class FunctionType(mod):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("argtypes", "returns")
|
||||
argtypes: list[expr]
|
||||
returns: expr
|
||||
|
||||
class Module(mod):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("body", "type_ignores")
|
||||
body: list[stmt]
|
||||
type_ignores: list[TypeIgnore]
|
||||
|
||||
class Interactive(mod):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("body",)
|
||||
body: list[stmt]
|
||||
|
||||
class Expression(mod):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("body",)
|
||||
body: expr
|
||||
|
||||
class stmt(AST): ...
|
||||
|
||||
class FunctionDef(stmt):
|
||||
if sys.version_info >= (3, 12):
|
||||
__match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment", "type_params")
|
||||
elif sys.version_info >= (3, 10):
|
||||
__match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment")
|
||||
name: _Identifier
|
||||
args: arguments
|
||||
body: list[stmt]
|
||||
decorator_list: list[expr]
|
||||
returns: expr | None
|
||||
if sys.version_info >= (3, 12):
|
||||
type_params: list[type_param]
|
||||
|
||||
class AsyncFunctionDef(stmt):
|
||||
if sys.version_info >= (3, 12):
|
||||
__match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment", "type_params")
|
||||
elif sys.version_info >= (3, 10):
|
||||
__match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment")
|
||||
name: _Identifier
|
||||
args: arguments
|
||||
body: list[stmt]
|
||||
decorator_list: list[expr]
|
||||
returns: expr | None
|
||||
if sys.version_info >= (3, 12):
|
||||
type_params: list[type_param]
|
||||
|
||||
class ClassDef(stmt):
|
||||
if sys.version_info >= (3, 12):
|
||||
__match_args__ = ("name", "bases", "keywords", "body", "decorator_list", "type_params")
|
||||
elif sys.version_info >= (3, 10):
|
||||
__match_args__ = ("name", "bases", "keywords", "body", "decorator_list")
|
||||
name: _Identifier
|
||||
bases: list[expr]
|
||||
keywords: list[keyword]
|
||||
body: list[stmt]
|
||||
decorator_list: list[expr]
|
||||
if sys.version_info >= (3, 12):
|
||||
type_params: list[type_param]
|
||||
|
||||
class Return(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value",)
|
||||
value: expr | None
|
||||
|
||||
class Delete(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("targets",)
|
||||
targets: list[expr]
|
||||
|
||||
class Assign(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("targets", "value", "type_comment")
|
||||
targets: list[expr]
|
||||
value: expr
|
||||
|
||||
class AugAssign(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "op", "value")
|
||||
target: Name | Attribute | Subscript
|
||||
op: operator
|
||||
value: expr
|
||||
|
||||
class AnnAssign(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "annotation", "value", "simple")
|
||||
target: Name | Attribute | Subscript
|
||||
annotation: expr
|
||||
value: expr | None
|
||||
simple: int
|
||||
|
||||
class For(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "iter", "body", "orelse", "type_comment")
|
||||
target: expr
|
||||
iter: expr
|
||||
body: list[stmt]
|
||||
orelse: list[stmt]
|
||||
|
||||
class AsyncFor(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "iter", "body", "orelse", "type_comment")
|
||||
target: expr
|
||||
iter: expr
|
||||
body: list[stmt]
|
||||
orelse: list[stmt]
|
||||
|
||||
class While(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("test", "body", "orelse")
|
||||
test: expr
|
||||
body: list[stmt]
|
||||
orelse: list[stmt]
|
||||
|
||||
class If(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("test", "body", "orelse")
|
||||
test: expr
|
||||
body: list[stmt]
|
||||
orelse: list[stmt]
|
||||
|
||||
class With(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("items", "body", "type_comment")
|
||||
items: list[withitem]
|
||||
body: list[stmt]
|
||||
|
||||
class AsyncWith(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("items", "body", "type_comment")
|
||||
items: list[withitem]
|
||||
body: list[stmt]
|
||||
|
||||
class Raise(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("exc", "cause")
|
||||
exc: expr | None
|
||||
cause: expr | None
|
||||
|
||||
class Try(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("body", "handlers", "orelse", "finalbody")
|
||||
body: list[stmt]
|
||||
handlers: list[ExceptHandler]
|
||||
orelse: list[stmt]
|
||||
finalbody: list[stmt]
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
class TryStar(stmt):
|
||||
__match_args__ = ("body", "handlers", "orelse", "finalbody")
|
||||
body: list[stmt]
|
||||
handlers: list[ExceptHandler]
|
||||
orelse: list[stmt]
|
||||
finalbody: list[stmt]
|
||||
|
||||
class Assert(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("test", "msg")
|
||||
test: expr
|
||||
msg: expr | None
|
||||
|
||||
class Import(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("names",)
|
||||
names: list[alias]
|
||||
|
||||
class ImportFrom(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("module", "names", "level")
|
||||
module: str | None
|
||||
names: list[alias]
|
||||
level: int
|
||||
|
||||
class Global(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("names",)
|
||||
names: list[_Identifier]
|
||||
|
||||
class Nonlocal(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("names",)
|
||||
names: list[_Identifier]
|
||||
|
||||
class Expr(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value",)
|
||||
value: expr
|
||||
|
||||
class Pass(stmt): ...
|
||||
class Break(stmt): ...
|
||||
class Continue(stmt): ...
|
||||
class expr(AST): ...
|
||||
|
||||
class BoolOp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("op", "values")
|
||||
op: boolop
|
||||
values: list[expr]
|
||||
|
||||
class BinOp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("left", "op", "right")
|
||||
left: expr
|
||||
op: operator
|
||||
right: expr
|
||||
|
||||
class UnaryOp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("op", "operand")
|
||||
op: unaryop
|
||||
operand: expr
|
||||
|
||||
class Lambda(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("args", "body")
|
||||
args: arguments
|
||||
body: expr
|
||||
|
||||
class IfExp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("test", "body", "orelse")
|
||||
test: expr
|
||||
body: expr
|
||||
orelse: expr
|
||||
|
||||
class Dict(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("keys", "values")
|
||||
keys: list[expr | None]
|
||||
values: list[expr]
|
||||
|
||||
class Set(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elts",)
|
||||
elts: list[expr]
|
||||
|
||||
class ListComp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elt", "generators")
|
||||
elt: expr
|
||||
generators: list[comprehension]
|
||||
|
||||
class SetComp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elt", "generators")
|
||||
elt: expr
|
||||
generators: list[comprehension]
|
||||
|
||||
class DictComp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("key", "value", "generators")
|
||||
key: expr
|
||||
value: expr
|
||||
generators: list[comprehension]
|
||||
|
||||
class GeneratorExp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elt", "generators")
|
||||
elt: expr
|
||||
generators: list[comprehension]
|
||||
|
||||
class Await(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value",)
|
||||
value: expr
|
||||
|
||||
class Yield(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value",)
|
||||
value: expr | None
|
||||
|
||||
class YieldFrom(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value",)
|
||||
value: expr
|
||||
|
||||
class Compare(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("left", "ops", "comparators")
|
||||
left: expr
|
||||
ops: list[cmpop]
|
||||
comparators: list[expr]
|
||||
|
||||
class Call(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("func", "args", "keywords")
|
||||
func: expr
|
||||
args: list[expr]
|
||||
keywords: list[keyword]
|
||||
|
||||
class FormattedValue(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value", "conversion", "format_spec")
|
||||
value: expr
|
||||
conversion: int
|
||||
format_spec: expr | None
|
||||
|
||||
class JoinedStr(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("values",)
|
||||
values: list[expr]
|
||||
|
||||
class Constant(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value", "kind")
|
||||
value: Any # None, str, bytes, bool, int, float, complex, Ellipsis
|
||||
kind: str | None
|
||||
# Aliases for value, for backwards compatibility
|
||||
s: Any
|
||||
n: int | float | complex
|
||||
|
||||
class NamedExpr(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "value")
|
||||
target: Name
|
||||
value: expr
|
||||
|
||||
class Attribute(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value", "attr", "ctx")
|
||||
value: expr
|
||||
attr: _Identifier
|
||||
ctx: expr_context
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
_Slice: typing_extensions.TypeAlias = expr
|
||||
else:
|
||||
class slice(AST): ...
|
||||
_Slice: typing_extensions.TypeAlias = slice
|
||||
|
||||
class Slice(_Slice):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("lower", "upper", "step")
|
||||
lower: expr | None
|
||||
upper: expr | None
|
||||
step: expr | None
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
class ExtSlice(slice):
|
||||
dims: list[slice]
|
||||
|
||||
class Index(slice):
|
||||
value: expr
|
||||
|
||||
class Subscript(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value", "slice", "ctx")
|
||||
value: expr
|
||||
slice: _Slice
|
||||
ctx: expr_context
|
||||
|
||||
class Starred(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value", "ctx")
|
||||
value: expr
|
||||
ctx: expr_context
|
||||
|
||||
class Name(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("id", "ctx")
|
||||
id: _Identifier
|
||||
ctx: expr_context
|
||||
|
||||
class List(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elts", "ctx")
|
||||
elts: list[expr]
|
||||
ctx: expr_context
|
||||
|
||||
class Tuple(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elts", "ctx")
|
||||
elts: list[expr]
|
||||
ctx: expr_context
|
||||
if sys.version_info >= (3, 9):
|
||||
dims: list[expr]
|
||||
|
||||
class expr_context(AST): ...
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
class AugLoad(expr_context): ...
|
||||
class AugStore(expr_context): ...
|
||||
class Param(expr_context): ...
|
||||
|
||||
class Suite(mod):
|
||||
body: list[stmt]
|
||||
|
||||
class Del(expr_context): ...
|
||||
class Load(expr_context): ...
|
||||
class Store(expr_context): ...
|
||||
class boolop(AST): ...
|
||||
class And(boolop): ...
|
||||
class Or(boolop): ...
|
||||
class operator(AST): ...
|
||||
class Add(operator): ...
|
||||
class BitAnd(operator): ...
|
||||
class BitOr(operator): ...
|
||||
class BitXor(operator): ...
|
||||
class Div(operator): ...
|
||||
class FloorDiv(operator): ...
|
||||
class LShift(operator): ...
|
||||
class Mod(operator): ...
|
||||
class Mult(operator): ...
|
||||
class MatMult(operator): ...
|
||||
class Pow(operator): ...
|
||||
class RShift(operator): ...
|
||||
class Sub(operator): ...
|
||||
class unaryop(AST): ...
|
||||
class Invert(unaryop): ...
|
||||
class Not(unaryop): ...
|
||||
class UAdd(unaryop): ...
|
||||
class USub(unaryop): ...
|
||||
class cmpop(AST): ...
|
||||
class Eq(cmpop): ...
|
||||
class Gt(cmpop): ...
|
||||
class GtE(cmpop): ...
|
||||
class In(cmpop): ...
|
||||
class Is(cmpop): ...
|
||||
class IsNot(cmpop): ...
|
||||
class Lt(cmpop): ...
|
||||
class LtE(cmpop): ...
|
||||
class NotEq(cmpop): ...
|
||||
class NotIn(cmpop): ...
|
||||
|
||||
class comprehension(AST):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "iter", "ifs", "is_async")
|
||||
target: expr
|
||||
iter: expr
|
||||
ifs: list[expr]
|
||||
is_async: int
|
||||
|
||||
class excepthandler(AST): ...
|
||||
|
||||
class ExceptHandler(excepthandler):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("type", "name", "body")
|
||||
type: expr | None
|
||||
name: _Identifier | None
|
||||
body: list[stmt]
|
||||
|
||||
class arguments(AST):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("posonlyargs", "args", "vararg", "kwonlyargs", "kw_defaults", "kwarg", "defaults")
|
||||
posonlyargs: list[arg]
|
||||
args: list[arg]
|
||||
vararg: arg | None
|
||||
kwonlyargs: list[arg]
|
||||
kw_defaults: list[expr | None]
|
||||
kwarg: arg | None
|
||||
defaults: list[expr]
|
||||
|
||||
class arg(AST):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("arg", "annotation", "type_comment")
|
||||
arg: _Identifier
|
||||
annotation: expr | None
|
||||
|
||||
class keyword(AST):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("arg", "value")
|
||||
arg: _Identifier | None
|
||||
value: expr
|
||||
|
||||
class alias(AST):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("name", "asname")
|
||||
name: str
|
||||
asname: _Identifier | None
|
||||
|
||||
class withitem(AST):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("context_expr", "optional_vars")
|
||||
context_expr: expr
|
||||
optional_vars: expr | None
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
class Match(stmt):
|
||||
__match_args__ = ("subject", "cases")
|
||||
subject: expr
|
||||
cases: list[match_case]
|
||||
|
||||
class pattern(AST): ...
|
||||
# Without the alias, Pyright complains variables named pattern are recursively defined
|
||||
_Pattern: typing_extensions.TypeAlias = pattern
|
||||
|
||||
class match_case(AST):
|
||||
__match_args__ = ("pattern", "guard", "body")
|
||||
pattern: _Pattern
|
||||
guard: expr | None
|
||||
body: list[stmt]
|
||||
|
||||
class MatchValue(pattern):
|
||||
__match_args__ = ("value",)
|
||||
value: expr
|
||||
|
||||
class MatchSingleton(pattern):
|
||||
__match_args__ = ("value",)
|
||||
value: Literal[True, False] | None
|
||||
|
||||
class MatchSequence(pattern):
|
||||
__match_args__ = ("patterns",)
|
||||
patterns: list[pattern]
|
||||
|
||||
class MatchStar(pattern):
|
||||
__match_args__ = ("name",)
|
||||
name: _Identifier | None
|
||||
|
||||
class MatchMapping(pattern):
|
||||
__match_args__ = ("keys", "patterns", "rest")
|
||||
keys: list[expr]
|
||||
patterns: list[pattern]
|
||||
rest: _Identifier | None
|
||||
|
||||
class MatchClass(pattern):
|
||||
__match_args__ = ("cls", "patterns", "kwd_attrs", "kwd_patterns")
|
||||
cls: expr
|
||||
patterns: list[pattern]
|
||||
kwd_attrs: list[_Identifier]
|
||||
kwd_patterns: list[pattern]
|
||||
|
||||
class MatchAs(pattern):
|
||||
__match_args__ = ("pattern", "name")
|
||||
pattern: _Pattern | None
|
||||
name: _Identifier | None
|
||||
|
||||
class MatchOr(pattern):
|
||||
__match_args__ = ("patterns",)
|
||||
patterns: list[pattern]
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
class type_param(AST):
|
||||
end_lineno: int
|
||||
end_col_offset: int
|
||||
|
||||
class TypeVar(type_param):
|
||||
__match_args__ = ("name", "bound")
|
||||
name: _Identifier
|
||||
bound: expr | None
|
||||
|
||||
class ParamSpec(type_param):
|
||||
__match_args__ = ("name",)
|
||||
name: _Identifier
|
||||
|
||||
class TypeVarTuple(type_param):
|
||||
__match_args__ = ("name",)
|
||||
name: _Identifier
|
||||
|
||||
class TypeAlias(stmt):
|
||||
__match_args__ = ("name", "type_params", "value")
|
||||
name: Name
|
||||
type_params: list[type_param]
|
||||
value: expr
|
||||
@@ -1,84 +0,0 @@
|
||||
import sys
|
||||
from _typeshed import SupportsLenAndGetItem, SupportsRichComparisonT
|
||||
from collections.abc import Callable, MutableSequence
|
||||
from typing import TypeVar, overload
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
@overload
|
||||
def bisect_left(
|
||||
a: SupportsLenAndGetItem[SupportsRichComparisonT],
|
||||
x: SupportsRichComparisonT,
|
||||
lo: int = 0,
|
||||
hi: int | None = None,
|
||||
*,
|
||||
key: None = None,
|
||||
) -> int: ...
|
||||
@overload
|
||||
def bisect_left(
|
||||
a: SupportsLenAndGetItem[_T],
|
||||
x: SupportsRichComparisonT,
|
||||
lo: int = 0,
|
||||
hi: int | None = None,
|
||||
*,
|
||||
key: Callable[[_T], SupportsRichComparisonT],
|
||||
) -> int: ...
|
||||
@overload
|
||||
def bisect_right(
|
||||
a: SupportsLenAndGetItem[SupportsRichComparisonT],
|
||||
x: SupportsRichComparisonT,
|
||||
lo: int = 0,
|
||||
hi: int | None = None,
|
||||
*,
|
||||
key: None = None,
|
||||
) -> int: ...
|
||||
@overload
|
||||
def bisect_right(
|
||||
a: SupportsLenAndGetItem[_T],
|
||||
x: SupportsRichComparisonT,
|
||||
lo: int = 0,
|
||||
hi: int | None = None,
|
||||
*,
|
||||
key: Callable[[_T], SupportsRichComparisonT],
|
||||
) -> int: ...
|
||||
@overload
|
||||
def insort_left(
|
||||
a: MutableSequence[SupportsRichComparisonT],
|
||||
x: SupportsRichComparisonT,
|
||||
lo: int = 0,
|
||||
hi: int | None = None,
|
||||
*,
|
||||
key: None = None,
|
||||
) -> None: ...
|
||||
@overload
|
||||
def insort_left(
|
||||
a: MutableSequence[_T], x: _T, lo: int = 0, hi: int | None = None, *, key: Callable[[_T], SupportsRichComparisonT]
|
||||
) -> None: ...
|
||||
@overload
|
||||
def insort_right(
|
||||
a: MutableSequence[SupportsRichComparisonT],
|
||||
x: SupportsRichComparisonT,
|
||||
lo: int = 0,
|
||||
hi: int | None = None,
|
||||
*,
|
||||
key: None = None,
|
||||
) -> None: ...
|
||||
@overload
|
||||
def insort_right(
|
||||
a: MutableSequence[_T], x: _T, lo: int = 0, hi: int | None = None, *, key: Callable[[_T], SupportsRichComparisonT]
|
||||
) -> None: ...
|
||||
|
||||
else:
|
||||
def bisect_left(
|
||||
a: SupportsLenAndGetItem[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None
|
||||
) -> int: ...
|
||||
def bisect_right(
|
||||
a: SupportsLenAndGetItem[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None
|
||||
) -> int: ...
|
||||
def insort_left(
|
||||
a: MutableSequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None
|
||||
) -> None: ...
|
||||
def insort_right(
|
||||
a: MutableSequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None
|
||||
) -> None: ...
|
||||
@@ -1 +0,0 @@
|
||||
def getpreferredencoding(do_setlocale: bool = True) -> str: ...
|
||||
133
crates/red_knot/vendor/typeshed/stdlib/_codecs.pyi
vendored
133
crates/red_knot/vendor/typeshed/stdlib/_codecs.pyi
vendored
@@ -1,133 +0,0 @@
|
||||
import codecs
|
||||
import sys
|
||||
from _typeshed import ReadableBuffer
|
||||
from collections.abc import Callable
|
||||
from typing import Literal, overload
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
# This type is not exposed; it is defined in unicodeobject.c
|
||||
class _EncodingMap:
|
||||
def size(self) -> int: ...
|
||||
|
||||
_CharMap: TypeAlias = dict[int, int] | _EncodingMap
|
||||
_Handler: TypeAlias = Callable[[UnicodeError], tuple[str | bytes, int]]
|
||||
_SearchFunction: TypeAlias = Callable[[str], codecs.CodecInfo | None]
|
||||
|
||||
def register(search_function: _SearchFunction, /) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
def unregister(search_function: _SearchFunction, /) -> None: ...
|
||||
|
||||
def register_error(errors: str, handler: _Handler, /) -> None: ...
|
||||
def lookup_error(name: str, /) -> _Handler: ...
|
||||
|
||||
# The type ignore on `encode` and `decode` is to avoid issues with overlapping overloads, for more details, see #300
|
||||
# https://docs.python.org/3/library/codecs.html#binary-transforms
|
||||
_BytesToBytesEncoding: TypeAlias = Literal[
|
||||
"base64",
|
||||
"base_64",
|
||||
"base64_codec",
|
||||
"bz2",
|
||||
"bz2_codec",
|
||||
"hex",
|
||||
"hex_codec",
|
||||
"quopri",
|
||||
"quotedprintable",
|
||||
"quoted_printable",
|
||||
"quopri_codec",
|
||||
"uu",
|
||||
"uu_codec",
|
||||
"zip",
|
||||
"zlib",
|
||||
"zlib_codec",
|
||||
]
|
||||
# https://docs.python.org/3/library/codecs.html#text-transforms
|
||||
_StrToStrEncoding: TypeAlias = Literal["rot13", "rot_13"]
|
||||
|
||||
@overload
|
||||
def encode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = "strict") -> bytes: ...
|
||||
@overload
|
||||
def encode(obj: str, encoding: _StrToStrEncoding, errors: str = "strict") -> str: ... # type: ignore[overload-overlap]
|
||||
@overload
|
||||
def encode(obj: str, encoding: str = "utf-8", errors: str = "strict") -> bytes: ...
|
||||
@overload
|
||||
def decode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = "strict") -> bytes: ... # type: ignore[overload-overlap]
|
||||
@overload
|
||||
def decode(obj: str, encoding: _StrToStrEncoding, errors: str = "strict") -> str: ...
|
||||
|
||||
# these are documented as text encodings but in practice they also accept str as input
|
||||
@overload
|
||||
def decode(
|
||||
obj: str,
|
||||
encoding: Literal["unicode_escape", "unicode-escape", "raw_unicode_escape", "raw-unicode-escape"],
|
||||
errors: str = "strict",
|
||||
) -> str: ...
|
||||
|
||||
# hex is officially documented as a bytes to bytes encoding, but it appears to also work with str
|
||||
@overload
|
||||
def decode(obj: str, encoding: Literal["hex", "hex_codec"], errors: str = "strict") -> bytes: ...
|
||||
@overload
|
||||
def decode(obj: ReadableBuffer, encoding: str = "utf-8", errors: str = "strict") -> str: ...
|
||||
def lookup(encoding: str, /) -> codecs.CodecInfo: ...
|
||||
def charmap_build(map: str, /) -> _CharMap: ...
|
||||
def ascii_decode(data: ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ...
|
||||
def ascii_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
||||
def charmap_decode(data: ReadableBuffer, errors: str | None = None, mapping: _CharMap | None = None, /) -> tuple[str, int]: ...
|
||||
def charmap_encode(str: str, errors: str | None = None, mapping: _CharMap | None = None, /) -> tuple[bytes, int]: ...
|
||||
def escape_decode(data: str | ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ...
|
||||
def escape_encode(data: bytes, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
||||
def latin_1_decode(data: ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ...
|
||||
def latin_1_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
def raw_unicode_escape_decode(
|
||||
data: str | ReadableBuffer, errors: str | None = None, final: bool = True, /
|
||||
) -> tuple[str, int]: ...
|
||||
|
||||
else:
|
||||
def raw_unicode_escape_decode(data: str | ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ...
|
||||
|
||||
def raw_unicode_escape_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
||||
def readbuffer_encode(data: str | ReadableBuffer, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
def unicode_escape_decode(
|
||||
data: str | ReadableBuffer, errors: str | None = None, final: bool = True, /
|
||||
) -> tuple[str, int]: ...
|
||||
|
||||
else:
|
||||
def unicode_escape_decode(data: str | ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ...
|
||||
|
||||
def unicode_escape_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
||||
def utf_16_be_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
||||
def utf_16_be_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
||||
def utf_16_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
||||
def utf_16_encode(str: str, errors: str | None = None, byteorder: int = 0, /) -> tuple[bytes, int]: ...
|
||||
def utf_16_ex_decode(
|
||||
data: ReadableBuffer, errors: str | None = None, byteorder: int = 0, final: bool = False, /
|
||||
) -> tuple[str, int, int]: ...
|
||||
def utf_16_le_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
||||
def utf_16_le_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
||||
def utf_32_be_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
||||
def utf_32_be_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
||||
def utf_32_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
||||
def utf_32_encode(str: str, errors: str | None = None, byteorder: int = 0, /) -> tuple[bytes, int]: ...
|
||||
def utf_32_ex_decode(
|
||||
data: ReadableBuffer, errors: str | None = None, byteorder: int = 0, final: bool = False, /
|
||||
) -> tuple[str, int, int]: ...
|
||||
def utf_32_le_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
||||
def utf_32_le_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
||||
def utf_7_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
||||
def utf_7_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
||||
def utf_8_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
||||
def utf_8_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
||||
|
||||
if sys.platform == "win32":
|
||||
def mbcs_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
||||
def mbcs_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
||||
def code_page_decode(
|
||||
codepage: int, data: ReadableBuffer, errors: str | None = None, final: bool = False, /
|
||||
) -> tuple[str, int]: ...
|
||||
def code_page_encode(code_page: int, str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
||||
def oem_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
||||
def oem_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
||||
@@ -1,94 +0,0 @@
|
||||
import sys
|
||||
from abc import abstractmethod
|
||||
from types import MappingProxyType
|
||||
from typing import ( # noqa: Y022,Y038,Y057
|
||||
AbstractSet as Set,
|
||||
AsyncGenerator as AsyncGenerator,
|
||||
AsyncIterable as AsyncIterable,
|
||||
AsyncIterator as AsyncIterator,
|
||||
Awaitable as Awaitable,
|
||||
ByteString as ByteString,
|
||||
Callable as Callable,
|
||||
Collection as Collection,
|
||||
Container as Container,
|
||||
Coroutine as Coroutine,
|
||||
Generator as Generator,
|
||||
Generic,
|
||||
Hashable as Hashable,
|
||||
ItemsView as ItemsView,
|
||||
Iterable as Iterable,
|
||||
Iterator as Iterator,
|
||||
KeysView as KeysView,
|
||||
Mapping as Mapping,
|
||||
MappingView as MappingView,
|
||||
MutableMapping as MutableMapping,
|
||||
MutableSequence as MutableSequence,
|
||||
MutableSet as MutableSet,
|
||||
Protocol,
|
||||
Reversible as Reversible,
|
||||
Sequence as Sequence,
|
||||
Sized as Sized,
|
||||
TypeVar,
|
||||
ValuesView as ValuesView,
|
||||
final,
|
||||
runtime_checkable,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"Awaitable",
|
||||
"Coroutine",
|
||||
"AsyncIterable",
|
||||
"AsyncIterator",
|
||||
"AsyncGenerator",
|
||||
"Hashable",
|
||||
"Iterable",
|
||||
"Iterator",
|
||||
"Generator",
|
||||
"Reversible",
|
||||
"Sized",
|
||||
"Container",
|
||||
"Callable",
|
||||
"Collection",
|
||||
"Set",
|
||||
"MutableSet",
|
||||
"Mapping",
|
||||
"MutableMapping",
|
||||
"MappingView",
|
||||
"KeysView",
|
||||
"ItemsView",
|
||||
"ValuesView",
|
||||
"Sequence",
|
||||
"MutableSequence",
|
||||
"ByteString",
|
||||
]
|
||||
if sys.version_info >= (3, 12):
|
||||
__all__ += ["Buffer"]
|
||||
|
||||
_KT_co = TypeVar("_KT_co", covariant=True) # Key type covariant containers.
|
||||
_VT_co = TypeVar("_VT_co", covariant=True) # Value type covariant containers.
|
||||
|
||||
@final
|
||||
class dict_keys(KeysView[_KT_co], Generic[_KT_co, _VT_co]): # undocumented
|
||||
def __eq__(self, value: object, /) -> bool: ...
|
||||
if sys.version_info >= (3, 10):
|
||||
@property
|
||||
def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ...
|
||||
|
||||
@final
|
||||
class dict_values(ValuesView[_VT_co], Generic[_KT_co, _VT_co]): # undocumented
|
||||
if sys.version_info >= (3, 10):
|
||||
@property
|
||||
def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ...
|
||||
|
||||
@final
|
||||
class dict_items(ItemsView[_KT_co, _VT_co]): # undocumented
|
||||
def __eq__(self, value: object, /) -> bool: ...
|
||||
if sys.version_info >= (3, 10):
|
||||
@property
|
||||
def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ...
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
@runtime_checkable
|
||||
class Buffer(Protocol):
|
||||
@abstractmethod
|
||||
def __buffer__(self, flags: int, /) -> memoryview: ...
|
||||
@@ -1,8 +0,0 @@
|
||||
IMPORT_MAPPING: dict[str, str]
|
||||
NAME_MAPPING: dict[tuple[str, str], tuple[str, str]]
|
||||
PYTHON2_EXCEPTIONS: tuple[str, ...]
|
||||
MULTIPROCESSING_EXCEPTIONS: tuple[str, ...]
|
||||
REVERSE_IMPORT_MAPPING: dict[str, str]
|
||||
REVERSE_NAME_MAPPING: dict[tuple[str, str], tuple[str, str]]
|
||||
PYTHON3_OSERROR_EXCEPTIONS: tuple[str, ...]
|
||||
PYTHON3_IMPORTERROR_EXCEPTIONS: tuple[str, ...]
|
||||
@@ -1,25 +0,0 @@
|
||||
from _typeshed import WriteableBuffer
|
||||
from collections.abc import Callable
|
||||
from io import DEFAULT_BUFFER_SIZE, BufferedIOBase, RawIOBase
|
||||
from typing import Any, Protocol
|
||||
|
||||
BUFFER_SIZE = DEFAULT_BUFFER_SIZE
|
||||
|
||||
class _Reader(Protocol):
|
||||
def read(self, n: int, /) -> bytes: ...
|
||||
def seekable(self) -> bool: ...
|
||||
def seek(self, n: int, /) -> Any: ...
|
||||
|
||||
class BaseStream(BufferedIOBase): ...
|
||||
|
||||
class DecompressReader(RawIOBase):
|
||||
def __init__(
|
||||
self,
|
||||
fp: _Reader,
|
||||
decomp_factory: Callable[..., object],
|
||||
trailing_error: type[Exception] | tuple[type[Exception], ...] = (),
|
||||
**decomp_args: Any,
|
||||
) -> None: ...
|
||||
def readinto(self, b: WriteableBuffer) -> int: ...
|
||||
def read(self, size: int = -1) -> bytes: ...
|
||||
def seek(self, offset: int, whence: int = 0) -> int: ...
|
||||
90
crates/red_knot/vendor/typeshed/stdlib/_csv.pyi
vendored
90
crates/red_knot/vendor/typeshed/stdlib/_csv.pyi
vendored
@@ -1,90 +0,0 @@
|
||||
import sys
|
||||
from _typeshed import SupportsWrite
|
||||
from collections.abc import Iterable, Iterator
|
||||
from typing import Any, Final, Literal
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
__version__: Final[str]
|
||||
|
||||
QUOTE_ALL: Literal[1]
|
||||
QUOTE_MINIMAL: Literal[0]
|
||||
QUOTE_NONE: Literal[3]
|
||||
QUOTE_NONNUMERIC: Literal[2]
|
||||
if sys.version_info >= (3, 12):
|
||||
QUOTE_STRINGS: Literal[4]
|
||||
QUOTE_NOTNULL: Literal[5]
|
||||
|
||||
# Ideally this would be `QUOTE_ALL | QUOTE_MINIMAL | QUOTE_NONE | QUOTE_NONNUMERIC`
|
||||
# However, using literals in situations like these can cause false-positives (see #7258)
|
||||
_QuotingType: TypeAlias = int
|
||||
|
||||
class Error(Exception): ...
|
||||
|
||||
class Dialect:
|
||||
delimiter: str
|
||||
quotechar: str | None
|
||||
escapechar: str | None
|
||||
doublequote: bool
|
||||
skipinitialspace: bool
|
||||
lineterminator: str
|
||||
quoting: _QuotingType
|
||||
strict: bool
|
||||
def __init__(self) -> None: ...
|
||||
|
||||
_DialectLike: TypeAlias = str | Dialect | type[Dialect]
|
||||
|
||||
class _reader(Iterator[list[str]]):
|
||||
@property
|
||||
def dialect(self) -> Dialect: ...
|
||||
line_num: int
|
||||
def __next__(self) -> list[str]: ...
|
||||
|
||||
class _writer:
|
||||
@property
|
||||
def dialect(self) -> Dialect: ...
|
||||
def writerow(self, row: Iterable[Any]) -> Any: ...
|
||||
def writerows(self, rows: Iterable[Iterable[Any]]) -> None: ...
|
||||
|
||||
def writer(
|
||||
csvfile: SupportsWrite[str],
|
||||
dialect: _DialectLike = "excel",
|
||||
*,
|
||||
delimiter: str = ",",
|
||||
quotechar: str | None = '"',
|
||||
escapechar: str | None = None,
|
||||
doublequote: bool = True,
|
||||
skipinitialspace: bool = False,
|
||||
lineterminator: str = "\r\n",
|
||||
quoting: _QuotingType = 0,
|
||||
strict: bool = False,
|
||||
) -> _writer: ...
|
||||
def reader(
|
||||
csvfile: Iterable[str],
|
||||
dialect: _DialectLike = "excel",
|
||||
*,
|
||||
delimiter: str = ",",
|
||||
quotechar: str | None = '"',
|
||||
escapechar: str | None = None,
|
||||
doublequote: bool = True,
|
||||
skipinitialspace: bool = False,
|
||||
lineterminator: str = "\r\n",
|
||||
quoting: _QuotingType = 0,
|
||||
strict: bool = False,
|
||||
) -> _reader: ...
|
||||
def register_dialect(
|
||||
name: str,
|
||||
dialect: type[Dialect] = ...,
|
||||
*,
|
||||
delimiter: str = ",",
|
||||
quotechar: str | None = '"',
|
||||
escapechar: str | None = None,
|
||||
doublequote: bool = True,
|
||||
skipinitialspace: bool = False,
|
||||
lineterminator: str = "\r\n",
|
||||
quoting: _QuotingType = 0,
|
||||
strict: bool = False,
|
||||
) -> None: ...
|
||||
def unregister_dialect(name: str) -> None: ...
|
||||
def get_dialect(name: str) -> Dialect: ...
|
||||
def list_dialects() -> list[str]: ...
|
||||
def field_size_limit(new_limit: int = ...) -> int: ...
|
||||
211
crates/red_knot/vendor/typeshed/stdlib/_ctypes.pyi
vendored
211
crates/red_knot/vendor/typeshed/stdlib/_ctypes.pyi
vendored
@@ -1,211 +0,0 @@
|
||||
import sys
|
||||
from _typeshed import ReadableBuffer, WriteableBuffer
|
||||
from abc import abstractmethod
|
||||
from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence
|
||||
from ctypes import CDLL, ArgumentError as ArgumentError
|
||||
from typing import Any, ClassVar, Generic, TypeVar, overload
|
||||
from typing_extensions import Self, TypeAlias
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
from types import GenericAlias
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_CT = TypeVar("_CT", bound=_CData)
|
||||
|
||||
FUNCFLAG_CDECL: int
|
||||
FUNCFLAG_PYTHONAPI: int
|
||||
FUNCFLAG_USE_ERRNO: int
|
||||
FUNCFLAG_USE_LASTERROR: int
|
||||
RTLD_GLOBAL: int
|
||||
RTLD_LOCAL: int
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
CTYPES_MAX_ARGCOUNT: int
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
SIZEOF_TIME_T: int
|
||||
|
||||
if sys.platform == "win32":
|
||||
# Description, Source, HelpFile, HelpContext, scode
|
||||
_COMError_Details: TypeAlias = tuple[str | None, str | None, str | None, int | None, int | None]
|
||||
|
||||
class COMError(Exception):
|
||||
hresult: int
|
||||
text: str | None
|
||||
details: _COMError_Details
|
||||
|
||||
def __init__(self, hresult: int, text: str | None, details: _COMError_Details) -> None: ...
|
||||
|
||||
def CopyComPointer(src: _PointerLike, dst: _PointerLike | _CArgObject) -> int: ...
|
||||
|
||||
FUNCFLAG_HRESULT: int
|
||||
FUNCFLAG_STDCALL: int
|
||||
|
||||
def FormatError(code: int = ...) -> str: ...
|
||||
def get_last_error() -> int: ...
|
||||
def set_last_error(value: int) -> int: ...
|
||||
def LoadLibrary(name: str, load_flags: int = 0, /) -> int: ...
|
||||
def FreeLibrary(handle: int, /) -> None: ...
|
||||
|
||||
class _CDataMeta(type):
|
||||
# By default mypy complains about the following two methods, because strictly speaking cls
|
||||
# might not be a Type[_CT]. However this can never actually happen, because the only class that
|
||||
# uses _CDataMeta as its metaclass is _CData. So it's safe to ignore the errors here.
|
||||
def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc]
|
||||
def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc]
|
||||
|
||||
class _CData(metaclass=_CDataMeta):
|
||||
_b_base_: int
|
||||
_b_needsfree_: bool
|
||||
_objects: Mapping[Any, int] | None
|
||||
# At runtime the following classmethods are available only on classes, not
|
||||
# on instances. This can't be reflected properly in the type system:
|
||||
#
|
||||
# Structure.from_buffer(...) # valid at runtime
|
||||
# Structure(...).from_buffer(...) # invalid at runtime
|
||||
#
|
||||
|
||||
@classmethod
|
||||
def from_buffer(cls, source: WriteableBuffer, offset: int = ...) -> Self: ...
|
||||
@classmethod
|
||||
def from_buffer_copy(cls, source: ReadableBuffer, offset: int = ...) -> Self: ...
|
||||
@classmethod
|
||||
def from_address(cls, address: int) -> Self: ...
|
||||
@classmethod
|
||||
def from_param(cls, obj: Any) -> Self | _CArgObject: ...
|
||||
@classmethod
|
||||
def in_dll(cls, library: CDLL, name: str) -> Self: ...
|
||||
def __buffer__(self, flags: int, /) -> memoryview: ...
|
||||
def __release_buffer__(self, buffer: memoryview, /) -> None: ...
|
||||
|
||||
class _SimpleCData(_CData, Generic[_T]):
|
||||
value: _T
|
||||
# The TypeVar can be unsolved here,
|
||||
# but we can't use overloads without creating many, many mypy false-positive errors
|
||||
def __init__(self, value: _T = ...) -> None: ... # pyright: ignore[reportInvalidTypeVarUse]
|
||||
|
||||
class _CanCastTo(_CData): ...
|
||||
class _PointerLike(_CanCastTo): ...
|
||||
|
||||
class _Pointer(_PointerLike, _CData, Generic[_CT]):
|
||||
_type_: type[_CT]
|
||||
contents: _CT
|
||||
@overload
|
||||
def __init__(self) -> None: ...
|
||||
@overload
|
||||
def __init__(self, arg: _CT) -> None: ...
|
||||
@overload
|
||||
def __getitem__(self, key: int, /) -> Any: ...
|
||||
@overload
|
||||
def __getitem__(self, key: slice, /) -> list[Any]: ...
|
||||
def __setitem__(self, key: int, value: Any, /) -> None: ...
|
||||
|
||||
def POINTER(type: type[_CT]) -> type[_Pointer[_CT]]: ...
|
||||
def pointer(arg: _CT, /) -> _Pointer[_CT]: ...
|
||||
|
||||
class _CArgObject: ...
|
||||
|
||||
def byref(obj: _CData, offset: int = ...) -> _CArgObject: ...
|
||||
|
||||
_ECT: TypeAlias = Callable[[_CData | None, CFuncPtr, tuple[_CData, ...]], _CData]
|
||||
_PF: TypeAlias = tuple[int] | tuple[int, str | None] | tuple[int, str | None, Any]
|
||||
|
||||
class CFuncPtr(_PointerLike, _CData):
|
||||
restype: type[_CData] | Callable[[int], Any] | None
|
||||
argtypes: Sequence[type[_CData]]
|
||||
errcheck: _ECT
|
||||
# Abstract attribute that must be defined on subclasses
|
||||
_flags_: ClassVar[int]
|
||||
@overload
|
||||
def __init__(self) -> None: ...
|
||||
@overload
|
||||
def __init__(self, address: int, /) -> None: ...
|
||||
@overload
|
||||
def __init__(self, callable: Callable[..., Any], /) -> None: ...
|
||||
@overload
|
||||
def __init__(self, func_spec: tuple[str | int, CDLL], paramflags: tuple[_PF, ...] | None = ..., /) -> None: ...
|
||||
if sys.platform == "win32":
|
||||
@overload
|
||||
def __init__(
|
||||
self, vtbl_index: int, name: str, paramflags: tuple[_PF, ...] | None = ..., iid: _CData | None = ..., /
|
||||
) -> None: ...
|
||||
|
||||
def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
|
||||
|
||||
_GetT = TypeVar("_GetT")
|
||||
_SetT = TypeVar("_SetT")
|
||||
|
||||
class _CField(Generic[_CT, _GetT, _SetT]):
|
||||
offset: int
|
||||
size: int
|
||||
@overload
|
||||
def __get__(self, instance: None, owner: type[Any] | None, /) -> Self: ...
|
||||
@overload
|
||||
def __get__(self, instance: Any, owner: type[Any] | None, /) -> _GetT: ...
|
||||
def __set__(self, instance: Any, value: _SetT, /) -> None: ...
|
||||
|
||||
class _StructUnionMeta(_CDataMeta):
|
||||
_fields_: Sequence[tuple[str, type[_CData]] | tuple[str, type[_CData], int]]
|
||||
_pack_: int
|
||||
_anonymous_: Sequence[str]
|
||||
def __getattr__(self, name: str) -> _CField[Any, Any, Any]: ...
|
||||
|
||||
class _StructUnionBase(_CData, metaclass=_StructUnionMeta):
|
||||
def __init__(self, *args: Any, **kw: Any) -> None: ...
|
||||
def __getattr__(self, name: str) -> Any: ...
|
||||
def __setattr__(self, name: str, value: Any) -> None: ...
|
||||
|
||||
class Union(_StructUnionBase): ...
|
||||
class Structure(_StructUnionBase): ...
|
||||
|
||||
class Array(_CData, Generic[_CT]):
|
||||
@property
|
||||
@abstractmethod
|
||||
def _length_(self) -> int: ...
|
||||
@_length_.setter
|
||||
def _length_(self, value: int) -> None: ...
|
||||
@property
|
||||
@abstractmethod
|
||||
def _type_(self) -> type[_CT]: ...
|
||||
@_type_.setter
|
||||
def _type_(self, value: type[_CT]) -> None: ...
|
||||
# Note: only available if _CT == c_char
|
||||
@property
|
||||
def raw(self) -> bytes: ...
|
||||
@raw.setter
|
||||
def raw(self, value: ReadableBuffer) -> None: ...
|
||||
value: Any # Note: bytes if _CT == c_char, str if _CT == c_wchar, unavailable otherwise
|
||||
# TODO These methods cannot be annotated correctly at the moment.
|
||||
# All of these "Any"s stand for the array's element type, but it's not possible to use _CT
|
||||
# here, because of a special feature of ctypes.
|
||||
# By default, when accessing an element of an Array[_CT], the returned object has type _CT.
|
||||
# However, when _CT is a "simple type" like c_int, ctypes automatically "unboxes" the object
|
||||
# and converts it to the corresponding Python primitive. For example, when accessing an element
|
||||
# of an Array[c_int], a Python int object is returned, not a c_int.
|
||||
# This behavior does *not* apply to subclasses of "simple types".
|
||||
# If MyInt is a subclass of c_int, then accessing an element of an Array[MyInt] returns
|
||||
# a MyInt, not an int.
|
||||
# This special behavior is not easy to model in a stub, so for now all places where
|
||||
# the array element type would belong are annotated with Any instead.
|
||||
def __init__(self, *args: Any) -> None: ...
|
||||
@overload
|
||||
def __getitem__(self, key: int, /) -> Any: ...
|
||||
@overload
|
||||
def __getitem__(self, key: slice, /) -> list[Any]: ...
|
||||
@overload
|
||||
def __setitem__(self, key: int, value: Any, /) -> None: ...
|
||||
@overload
|
||||
def __setitem__(self, key: slice, value: Iterable[Any], /) -> None: ...
|
||||
def __iter__(self) -> Iterator[Any]: ...
|
||||
# Can't inherit from Sized because the metaclass conflict between
|
||||
# Sized and _CData prevents using _CDataMeta.
|
||||
def __len__(self) -> int: ...
|
||||
if sys.version_info >= (3, 9):
|
||||
def __class_getitem__(cls, item: Any) -> GenericAlias: ...
|
||||
|
||||
def addressof(obj: _CData) -> int: ...
|
||||
def alignment(obj_or_type: _CData | type[_CData]) -> int: ...
|
||||
def get_errno() -> int: ...
|
||||
def resize(obj: _CData, size: int) -> None: ...
|
||||
def set_errno(value: int) -> int: ...
|
||||
def sizeof(obj_or_type: _CData | type[_CData]) -> int: ...
|
||||
566
crates/red_knot/vendor/typeshed/stdlib/_curses.pyi
vendored
566
crates/red_knot/vendor/typeshed/stdlib/_curses.pyi
vendored
@@ -1,566 +0,0 @@
|
||||
import sys
|
||||
from _typeshed import ReadOnlyBuffer, SupportsRead
|
||||
from typing import IO, Any, NamedTuple, final, overload
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
# NOTE: This module is ordinarily only available on Unix, but the windows-curses
|
||||
# package makes it available on Windows as well with the same contents.
|
||||
|
||||
# Handled by PyCurses_ConvertToChtype in _cursesmodule.c.
|
||||
_ChType: TypeAlias = str | bytes | int
|
||||
|
||||
# ACS codes are only initialized after initscr is called
|
||||
ACS_BBSS: int
|
||||
ACS_BLOCK: int
|
||||
ACS_BOARD: int
|
||||
ACS_BSBS: int
|
||||
ACS_BSSB: int
|
||||
ACS_BSSS: int
|
||||
ACS_BTEE: int
|
||||
ACS_BULLET: int
|
||||
ACS_CKBOARD: int
|
||||
ACS_DARROW: int
|
||||
ACS_DEGREE: int
|
||||
ACS_DIAMOND: int
|
||||
ACS_GEQUAL: int
|
||||
ACS_HLINE: int
|
||||
ACS_LANTERN: int
|
||||
ACS_LARROW: int
|
||||
ACS_LEQUAL: int
|
||||
ACS_LLCORNER: int
|
||||
ACS_LRCORNER: int
|
||||
ACS_LTEE: int
|
||||
ACS_NEQUAL: int
|
||||
ACS_PI: int
|
||||
ACS_PLMINUS: int
|
||||
ACS_PLUS: int
|
||||
ACS_RARROW: int
|
||||
ACS_RTEE: int
|
||||
ACS_S1: int
|
||||
ACS_S3: int
|
||||
ACS_S7: int
|
||||
ACS_S9: int
|
||||
ACS_SBBS: int
|
||||
ACS_SBSB: int
|
||||
ACS_SBSS: int
|
||||
ACS_SSBB: int
|
||||
ACS_SSBS: int
|
||||
ACS_SSSB: int
|
||||
ACS_SSSS: int
|
||||
ACS_STERLING: int
|
||||
ACS_TTEE: int
|
||||
ACS_UARROW: int
|
||||
ACS_ULCORNER: int
|
||||
ACS_URCORNER: int
|
||||
ACS_VLINE: int
|
||||
ALL_MOUSE_EVENTS: int
|
||||
A_ALTCHARSET: int
|
||||
A_ATTRIBUTES: int
|
||||
A_BLINK: int
|
||||
A_BOLD: int
|
||||
A_CHARTEXT: int
|
||||
A_COLOR: int
|
||||
A_DIM: int
|
||||
A_HORIZONTAL: int
|
||||
A_INVIS: int
|
||||
if sys.platform != "darwin":
|
||||
A_ITALIC: int
|
||||
A_LEFT: int
|
||||
A_LOW: int
|
||||
A_NORMAL: int
|
||||
A_PROTECT: int
|
||||
A_REVERSE: int
|
||||
A_RIGHT: int
|
||||
A_STANDOUT: int
|
||||
A_TOP: int
|
||||
A_UNDERLINE: int
|
||||
A_VERTICAL: int
|
||||
BUTTON1_CLICKED: int
|
||||
BUTTON1_DOUBLE_CLICKED: int
|
||||
BUTTON1_PRESSED: int
|
||||
BUTTON1_RELEASED: int
|
||||
BUTTON1_TRIPLE_CLICKED: int
|
||||
BUTTON2_CLICKED: int
|
||||
BUTTON2_DOUBLE_CLICKED: int
|
||||
BUTTON2_PRESSED: int
|
||||
BUTTON2_RELEASED: int
|
||||
BUTTON2_TRIPLE_CLICKED: int
|
||||
BUTTON3_CLICKED: int
|
||||
BUTTON3_DOUBLE_CLICKED: int
|
||||
BUTTON3_PRESSED: int
|
||||
BUTTON3_RELEASED: int
|
||||
BUTTON3_TRIPLE_CLICKED: int
|
||||
BUTTON4_CLICKED: int
|
||||
BUTTON4_DOUBLE_CLICKED: int
|
||||
BUTTON4_PRESSED: int
|
||||
BUTTON4_RELEASED: int
|
||||
BUTTON4_TRIPLE_CLICKED: int
|
||||
# Darwin ncurses doesn't provide BUTTON5_* constants
|
||||
if sys.version_info >= (3, 10) and sys.platform != "darwin":
|
||||
BUTTON5_PRESSED: int
|
||||
BUTTON5_RELEASED: int
|
||||
BUTTON5_CLICKED: int
|
||||
BUTTON5_DOUBLE_CLICKED: int
|
||||
BUTTON5_TRIPLE_CLICKED: int
|
||||
BUTTON_ALT: int
|
||||
BUTTON_CTRL: int
|
||||
BUTTON_SHIFT: int
|
||||
COLOR_BLACK: int
|
||||
COLOR_BLUE: int
|
||||
COLOR_CYAN: int
|
||||
COLOR_GREEN: int
|
||||
COLOR_MAGENTA: int
|
||||
COLOR_RED: int
|
||||
COLOR_WHITE: int
|
||||
COLOR_YELLOW: int
|
||||
ERR: int
|
||||
KEY_A1: int
|
||||
KEY_A3: int
|
||||
KEY_B2: int
|
||||
KEY_BACKSPACE: int
|
||||
KEY_BEG: int
|
||||
KEY_BREAK: int
|
||||
KEY_BTAB: int
|
||||
KEY_C1: int
|
||||
KEY_C3: int
|
||||
KEY_CANCEL: int
|
||||
KEY_CATAB: int
|
||||
KEY_CLEAR: int
|
||||
KEY_CLOSE: int
|
||||
KEY_COMMAND: int
|
||||
KEY_COPY: int
|
||||
KEY_CREATE: int
|
||||
KEY_CTAB: int
|
||||
KEY_DC: int
|
||||
KEY_DL: int
|
||||
KEY_DOWN: int
|
||||
KEY_EIC: int
|
||||
KEY_END: int
|
||||
KEY_ENTER: int
|
||||
KEY_EOL: int
|
||||
KEY_EOS: int
|
||||
KEY_EXIT: int
|
||||
KEY_F0: int
|
||||
KEY_F1: int
|
||||
KEY_F10: int
|
||||
KEY_F11: int
|
||||
KEY_F12: int
|
||||
KEY_F13: int
|
||||
KEY_F14: int
|
||||
KEY_F15: int
|
||||
KEY_F16: int
|
||||
KEY_F17: int
|
||||
KEY_F18: int
|
||||
KEY_F19: int
|
||||
KEY_F2: int
|
||||
KEY_F20: int
|
||||
KEY_F21: int
|
||||
KEY_F22: int
|
||||
KEY_F23: int
|
||||
KEY_F24: int
|
||||
KEY_F25: int
|
||||
KEY_F26: int
|
||||
KEY_F27: int
|
||||
KEY_F28: int
|
||||
KEY_F29: int
|
||||
KEY_F3: int
|
||||
KEY_F30: int
|
||||
KEY_F31: int
|
||||
KEY_F32: int
|
||||
KEY_F33: int
|
||||
KEY_F34: int
|
||||
KEY_F35: int
|
||||
KEY_F36: int
|
||||
KEY_F37: int
|
||||
KEY_F38: int
|
||||
KEY_F39: int
|
||||
KEY_F4: int
|
||||
KEY_F40: int
|
||||
KEY_F41: int
|
||||
KEY_F42: int
|
||||
KEY_F43: int
|
||||
KEY_F44: int
|
||||
KEY_F45: int
|
||||
KEY_F46: int
|
||||
KEY_F47: int
|
||||
KEY_F48: int
|
||||
KEY_F49: int
|
||||
KEY_F5: int
|
||||
KEY_F50: int
|
||||
KEY_F51: int
|
||||
KEY_F52: int
|
||||
KEY_F53: int
|
||||
KEY_F54: int
|
||||
KEY_F55: int
|
||||
KEY_F56: int
|
||||
KEY_F57: int
|
||||
KEY_F58: int
|
||||
KEY_F59: int
|
||||
KEY_F6: int
|
||||
KEY_F60: int
|
||||
KEY_F61: int
|
||||
KEY_F62: int
|
||||
KEY_F63: int
|
||||
KEY_F7: int
|
||||
KEY_F8: int
|
||||
KEY_F9: int
|
||||
KEY_FIND: int
|
||||
KEY_HELP: int
|
||||
KEY_HOME: int
|
||||
KEY_IC: int
|
||||
KEY_IL: int
|
||||
KEY_LEFT: int
|
||||
KEY_LL: int
|
||||
KEY_MARK: int
|
||||
KEY_MAX: int
|
||||
KEY_MESSAGE: int
|
||||
KEY_MIN: int
|
||||
KEY_MOUSE: int
|
||||
KEY_MOVE: int
|
||||
KEY_NEXT: int
|
||||
KEY_NPAGE: int
|
||||
KEY_OPEN: int
|
||||
KEY_OPTIONS: int
|
||||
KEY_PPAGE: int
|
||||
KEY_PREVIOUS: int
|
||||
KEY_PRINT: int
|
||||
KEY_REDO: int
|
||||
KEY_REFERENCE: int
|
||||
KEY_REFRESH: int
|
||||
KEY_REPLACE: int
|
||||
KEY_RESET: int
|
||||
KEY_RESIZE: int
|
||||
KEY_RESTART: int
|
||||
KEY_RESUME: int
|
||||
KEY_RIGHT: int
|
||||
KEY_SAVE: int
|
||||
KEY_SBEG: int
|
||||
KEY_SCANCEL: int
|
||||
KEY_SCOMMAND: int
|
||||
KEY_SCOPY: int
|
||||
KEY_SCREATE: int
|
||||
KEY_SDC: int
|
||||
KEY_SDL: int
|
||||
KEY_SELECT: int
|
||||
KEY_SEND: int
|
||||
KEY_SEOL: int
|
||||
KEY_SEXIT: int
|
||||
KEY_SF: int
|
||||
KEY_SFIND: int
|
||||
KEY_SHELP: int
|
||||
KEY_SHOME: int
|
||||
KEY_SIC: int
|
||||
KEY_SLEFT: int
|
||||
KEY_SMESSAGE: int
|
||||
KEY_SMOVE: int
|
||||
KEY_SNEXT: int
|
||||
KEY_SOPTIONS: int
|
||||
KEY_SPREVIOUS: int
|
||||
KEY_SPRINT: int
|
||||
KEY_SR: int
|
||||
KEY_SREDO: int
|
||||
KEY_SREPLACE: int
|
||||
KEY_SRESET: int
|
||||
KEY_SRIGHT: int
|
||||
KEY_SRSUME: int
|
||||
KEY_SSAVE: int
|
||||
KEY_SSUSPEND: int
|
||||
KEY_STAB: int
|
||||
KEY_SUNDO: int
|
||||
KEY_SUSPEND: int
|
||||
KEY_UNDO: int
|
||||
KEY_UP: int
|
||||
OK: int
|
||||
REPORT_MOUSE_POSITION: int
|
||||
_C_API: Any
|
||||
version: bytes
|
||||
|
||||
def baudrate() -> int: ...
|
||||
def beep() -> None: ...
|
||||
def can_change_color() -> bool: ...
|
||||
def cbreak(flag: bool = True, /) -> None: ...
|
||||
def color_content(color_number: int, /) -> tuple[int, int, int]: ...
|
||||
def color_pair(pair_number: int, /) -> int: ...
|
||||
def curs_set(visibility: int, /) -> int: ...
|
||||
def def_prog_mode() -> None: ...
|
||||
def def_shell_mode() -> None: ...
|
||||
def delay_output(ms: int, /) -> None: ...
|
||||
def doupdate() -> None: ...
|
||||
def echo(flag: bool = True, /) -> None: ...
|
||||
def endwin() -> None: ...
|
||||
def erasechar() -> bytes: ...
|
||||
def filter() -> None: ...
|
||||
def flash() -> None: ...
|
||||
def flushinp() -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
def get_escdelay() -> int: ...
|
||||
def get_tabsize() -> int: ...
|
||||
|
||||
def getmouse() -> tuple[int, int, int, int, int]: ...
|
||||
def getsyx() -> tuple[int, int]: ...
|
||||
def getwin(file: SupportsRead[bytes], /) -> _CursesWindow: ...
|
||||
def halfdelay(tenths: int, /) -> None: ...
|
||||
def has_colors() -> bool: ...
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
def has_extended_color_support() -> bool: ...
|
||||
|
||||
def has_ic() -> bool: ...
|
||||
def has_il() -> bool: ...
|
||||
def has_key(key: int, /) -> bool: ...
|
||||
def init_color(color_number: int, r: int, g: int, b: int, /) -> None: ...
|
||||
def init_pair(pair_number: int, fg: int, bg: int, /) -> None: ...
|
||||
def initscr() -> _CursesWindow: ...
|
||||
def intrflush(flag: bool, /) -> None: ...
|
||||
def is_term_resized(nlines: int, ncols: int, /) -> bool: ...
|
||||
def isendwin() -> bool: ...
|
||||
def keyname(key: int, /) -> bytes: ...
|
||||
def killchar() -> bytes: ...
|
||||
def longname() -> bytes: ...
|
||||
def meta(yes: bool, /) -> None: ...
|
||||
def mouseinterval(interval: int, /) -> None: ...
|
||||
def mousemask(newmask: int, /) -> tuple[int, int]: ...
|
||||
def napms(ms: int, /) -> int: ...
|
||||
def newpad(nlines: int, ncols: int, /) -> _CursesWindow: ...
|
||||
def newwin(nlines: int, ncols: int, begin_y: int = ..., begin_x: int = ..., /) -> _CursesWindow: ...
|
||||
def nl(flag: bool = True, /) -> None: ...
|
||||
def nocbreak() -> None: ...
|
||||
def noecho() -> None: ...
|
||||
def nonl() -> None: ...
|
||||
def noqiflush() -> None: ...
|
||||
def noraw() -> None: ...
|
||||
def pair_content(pair_number: int, /) -> tuple[int, int]: ...
|
||||
def pair_number(attr: int, /) -> int: ...
|
||||
def putp(string: ReadOnlyBuffer, /) -> None: ...
|
||||
def qiflush(flag: bool = True, /) -> None: ...
|
||||
def raw(flag: bool = True, /) -> None: ...
|
||||
def reset_prog_mode() -> None: ...
|
||||
def reset_shell_mode() -> None: ...
|
||||
def resetty() -> None: ...
|
||||
def resize_term(nlines: int, ncols: int, /) -> None: ...
|
||||
def resizeterm(nlines: int, ncols: int, /) -> None: ...
|
||||
def savetty() -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
def set_escdelay(ms: int, /) -> None: ...
|
||||
def set_tabsize(size: int, /) -> None: ...
|
||||
|
||||
def setsyx(y: int, x: int, /) -> None: ...
|
||||
def setupterm(term: str | None = None, fd: int = -1) -> None: ...
|
||||
def start_color() -> None: ...
|
||||
def termattrs() -> int: ...
|
||||
def termname() -> bytes: ...
|
||||
def tigetflag(capname: str, /) -> int: ...
|
||||
def tigetnum(capname: str, /) -> int: ...
|
||||
def tigetstr(capname: str, /) -> bytes | None: ...
|
||||
def tparm(
|
||||
str: ReadOnlyBuffer,
|
||||
i1: int = 0,
|
||||
i2: int = 0,
|
||||
i3: int = 0,
|
||||
i4: int = 0,
|
||||
i5: int = 0,
|
||||
i6: int = 0,
|
||||
i7: int = 0,
|
||||
i8: int = 0,
|
||||
i9: int = 0,
|
||||
/,
|
||||
) -> bytes: ...
|
||||
def typeahead(fd: int, /) -> None: ...
|
||||
def unctrl(ch: _ChType, /) -> bytes: ...
|
||||
|
||||
if sys.version_info < (3, 12) or sys.platform != "darwin":
|
||||
# The support for macos was dropped in 3.12
|
||||
def unget_wch(ch: int | str, /) -> None: ...
|
||||
|
||||
def ungetch(ch: _ChType, /) -> None: ...
|
||||
def ungetmouse(id: int, x: int, y: int, z: int, bstate: int, /) -> None: ...
|
||||
def update_lines_cols() -> None: ...
|
||||
def use_default_colors() -> None: ...
|
||||
def use_env(flag: bool, /) -> None: ...
|
||||
|
||||
class error(Exception): ...
|
||||
|
||||
@final
|
||||
class _CursesWindow:
|
||||
encoding: str
|
||||
@overload
|
||||
def addch(self, ch: _ChType, attr: int = ...) -> None: ...
|
||||
@overload
|
||||
def addch(self, y: int, x: int, ch: _ChType, attr: int = ...) -> None: ...
|
||||
@overload
|
||||
def addnstr(self, str: str, n: int, attr: int = ...) -> None: ...
|
||||
@overload
|
||||
def addnstr(self, y: int, x: int, str: str, n: int, attr: int = ...) -> None: ...
|
||||
@overload
|
||||
def addstr(self, str: str, attr: int = ...) -> None: ...
|
||||
@overload
|
||||
def addstr(self, y: int, x: int, str: str, attr: int = ...) -> None: ...
|
||||
def attroff(self, attr: int, /) -> None: ...
|
||||
def attron(self, attr: int, /) -> None: ...
|
||||
def attrset(self, attr: int, /) -> None: ...
|
||||
def bkgd(self, ch: _ChType, attr: int = ..., /) -> None: ...
|
||||
def bkgdset(self, ch: _ChType, attr: int = ..., /) -> None: ...
|
||||
def border(
|
||||
self,
|
||||
ls: _ChType = ...,
|
||||
rs: _ChType = ...,
|
||||
ts: _ChType = ...,
|
||||
bs: _ChType = ...,
|
||||
tl: _ChType = ...,
|
||||
tr: _ChType = ...,
|
||||
bl: _ChType = ...,
|
||||
br: _ChType = ...,
|
||||
) -> None: ...
|
||||
@overload
|
||||
def box(self) -> None: ...
|
||||
@overload
|
||||
def box(self, vertch: _ChType = ..., horch: _ChType = ...) -> None: ...
|
||||
@overload
|
||||
def chgat(self, attr: int) -> None: ...
|
||||
@overload
|
||||
def chgat(self, num: int, attr: int) -> None: ...
|
||||
@overload
|
||||
def chgat(self, y: int, x: int, attr: int) -> None: ...
|
||||
@overload
|
||||
def chgat(self, y: int, x: int, num: int, attr: int) -> None: ...
|
||||
def clear(self) -> None: ...
|
||||
def clearok(self, yes: int) -> None: ...
|
||||
def clrtobot(self) -> None: ...
|
||||
def clrtoeol(self) -> None: ...
|
||||
def cursyncup(self) -> None: ...
|
||||
@overload
|
||||
def delch(self) -> None: ...
|
||||
@overload
|
||||
def delch(self, y: int, x: int) -> None: ...
|
||||
def deleteln(self) -> None: ...
|
||||
@overload
|
||||
def derwin(self, begin_y: int, begin_x: int) -> _CursesWindow: ...
|
||||
@overload
|
||||
def derwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> _CursesWindow: ...
|
||||
def echochar(self, ch: _ChType, attr: int = ..., /) -> None: ...
|
||||
def enclose(self, y: int, x: int, /) -> bool: ...
|
||||
def erase(self) -> None: ...
|
||||
def getbegyx(self) -> tuple[int, int]: ...
|
||||
def getbkgd(self) -> tuple[int, int]: ...
|
||||
@overload
|
||||
def getch(self) -> int: ...
|
||||
@overload
|
||||
def getch(self, y: int, x: int) -> int: ...
|
||||
if sys.version_info < (3, 12) or sys.platform != "darwin":
|
||||
# The support for macos was dropped in 3.12
|
||||
@overload
|
||||
def get_wch(self) -> int | str: ...
|
||||
@overload
|
||||
def get_wch(self, y: int, x: int) -> int | str: ...
|
||||
|
||||
@overload
|
||||
def getkey(self) -> str: ...
|
||||
@overload
|
||||
def getkey(self, y: int, x: int) -> str: ...
|
||||
def getmaxyx(self) -> tuple[int, int]: ...
|
||||
def getparyx(self) -> tuple[int, int]: ...
|
||||
@overload
|
||||
def getstr(self) -> bytes: ...
|
||||
@overload
|
||||
def getstr(self, n: int) -> bytes: ...
|
||||
@overload
|
||||
def getstr(self, y: int, x: int) -> bytes: ...
|
||||
@overload
|
||||
def getstr(self, y: int, x: int, n: int) -> bytes: ...
|
||||
def getyx(self) -> tuple[int, int]: ...
|
||||
@overload
|
||||
def hline(self, ch: _ChType, n: int) -> None: ...
|
||||
@overload
|
||||
def hline(self, y: int, x: int, ch: _ChType, n: int) -> None: ...
|
||||
def idcok(self, flag: bool) -> None: ...
|
||||
def idlok(self, yes: bool) -> None: ...
|
||||
def immedok(self, flag: bool) -> None: ...
|
||||
@overload
|
||||
def inch(self) -> int: ...
|
||||
@overload
|
||||
def inch(self, y: int, x: int) -> int: ...
|
||||
@overload
|
||||
def insch(self, ch: _ChType, attr: int = ...) -> None: ...
|
||||
@overload
|
||||
def insch(self, y: int, x: int, ch: _ChType, attr: int = ...) -> None: ...
|
||||
def insdelln(self, nlines: int) -> None: ...
|
||||
def insertln(self) -> None: ...
|
||||
@overload
|
||||
def insnstr(self, str: str, n: int, attr: int = ...) -> None: ...
|
||||
@overload
|
||||
def insnstr(self, y: int, x: int, str: str, n: int, attr: int = ...) -> None: ...
|
||||
@overload
|
||||
def insstr(self, str: str, attr: int = ...) -> None: ...
|
||||
@overload
|
||||
def insstr(self, y: int, x: int, str: str, attr: int = ...) -> None: ...
|
||||
@overload
|
||||
def instr(self, n: int = ...) -> bytes: ...
|
||||
@overload
|
||||
def instr(self, y: int, x: int, n: int = ...) -> bytes: ...
|
||||
def is_linetouched(self, line: int, /) -> bool: ...
|
||||
def is_wintouched(self) -> bool: ...
|
||||
def keypad(self, yes: bool) -> None: ...
|
||||
def leaveok(self, yes: bool) -> None: ...
|
||||
def move(self, new_y: int, new_x: int) -> None: ...
|
||||
def mvderwin(self, y: int, x: int) -> None: ...
|
||||
def mvwin(self, new_y: int, new_x: int) -> None: ...
|
||||
def nodelay(self, yes: bool) -> None: ...
|
||||
def notimeout(self, yes: bool) -> None: ...
|
||||
@overload
|
||||
def noutrefresh(self) -> None: ...
|
||||
@overload
|
||||
def noutrefresh(self, pminrow: int, pmincol: int, sminrow: int, smincol: int, smaxrow: int, smaxcol: int) -> None: ...
|
||||
@overload
|
||||
def overlay(self, destwin: _CursesWindow) -> None: ...
|
||||
@overload
|
||||
def overlay(
|
||||
self, destwin: _CursesWindow, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int
|
||||
) -> None: ...
|
||||
@overload
|
||||
def overwrite(self, destwin: _CursesWindow) -> None: ...
|
||||
@overload
|
||||
def overwrite(
|
||||
self, destwin: _CursesWindow, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int
|
||||
) -> None: ...
|
||||
def putwin(self, file: IO[Any], /) -> None: ...
|
||||
def redrawln(self, beg: int, num: int, /) -> None: ...
|
||||
def redrawwin(self) -> None: ...
|
||||
@overload
|
||||
def refresh(self) -> None: ...
|
||||
@overload
|
||||
def refresh(self, pminrow: int, pmincol: int, sminrow: int, smincol: int, smaxrow: int, smaxcol: int) -> None: ...
|
||||
def resize(self, nlines: int, ncols: int) -> None: ...
|
||||
def scroll(self, lines: int = ...) -> None: ...
|
||||
def scrollok(self, flag: bool) -> None: ...
|
||||
def setscrreg(self, top: int, bottom: int, /) -> None: ...
|
||||
def standend(self) -> None: ...
|
||||
def standout(self) -> None: ...
|
||||
@overload
|
||||
def subpad(self, begin_y: int, begin_x: int) -> _CursesWindow: ...
|
||||
@overload
|
||||
def subpad(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> _CursesWindow: ...
|
||||
@overload
|
||||
def subwin(self, begin_y: int, begin_x: int) -> _CursesWindow: ...
|
||||
@overload
|
||||
def subwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> _CursesWindow: ...
|
||||
def syncdown(self) -> None: ...
|
||||
def syncok(self, flag: bool) -> None: ...
|
||||
def syncup(self) -> None: ...
|
||||
def timeout(self, delay: int) -> None: ...
|
||||
def touchline(self, start: int, count: int, changed: bool = ...) -> None: ...
|
||||
def touchwin(self) -> None: ...
|
||||
def untouchwin(self) -> None: ...
|
||||
@overload
|
||||
def vline(self, ch: _ChType, n: int) -> None: ...
|
||||
@overload
|
||||
def vline(self, y: int, x: int, ch: _ChType, n: int) -> None: ...
|
||||
|
||||
class _ncurses_version(NamedTuple):
|
||||
major: int
|
||||
minor: int
|
||||
patch: int
|
||||
|
||||
ncurses_version: _ncurses_version
|
||||
window = _CursesWindow # undocumented
|
||||
281
crates/red_knot/vendor/typeshed/stdlib/_decimal.pyi
vendored
281
crates/red_knot/vendor/typeshed/stdlib/_decimal.pyi
vendored
@@ -1,281 +0,0 @@
|
||||
import numbers
|
||||
import sys
|
||||
from collections.abc import Container, Sequence
|
||||
from types import TracebackType
|
||||
from typing import Any, ClassVar, Final, Literal, NamedTuple, overload
|
||||
from typing_extensions import Self, TypeAlias
|
||||
|
||||
_Decimal: TypeAlias = Decimal | int
|
||||
_DecimalNew: TypeAlias = Decimal | float | str | tuple[int, Sequence[int], int]
|
||||
_ComparableNum: TypeAlias = Decimal | float | numbers.Rational
|
||||
|
||||
__version__: Final[str]
|
||||
__libmpdec_version__: Final[str]
|
||||
|
||||
class DecimalTuple(NamedTuple):
|
||||
sign: int
|
||||
digits: tuple[int, ...]
|
||||
exponent: int | Literal["n", "N", "F"]
|
||||
|
||||
ROUND_DOWN: str
|
||||
ROUND_HALF_UP: str
|
||||
ROUND_HALF_EVEN: str
|
||||
ROUND_CEILING: str
|
||||
ROUND_FLOOR: str
|
||||
ROUND_UP: str
|
||||
ROUND_HALF_DOWN: str
|
||||
ROUND_05UP: str
|
||||
HAVE_CONTEXTVAR: bool
|
||||
HAVE_THREADS: bool
|
||||
MAX_EMAX: int
|
||||
MAX_PREC: int
|
||||
MIN_EMIN: int
|
||||
MIN_ETINY: int
|
||||
|
||||
class DecimalException(ArithmeticError): ...
|
||||
class Clamped(DecimalException): ...
|
||||
class InvalidOperation(DecimalException): ...
|
||||
class ConversionSyntax(InvalidOperation): ...
|
||||
class DivisionByZero(DecimalException, ZeroDivisionError): ...
|
||||
class DivisionImpossible(InvalidOperation): ...
|
||||
class DivisionUndefined(InvalidOperation, ZeroDivisionError): ...
|
||||
class Inexact(DecimalException): ...
|
||||
class InvalidContext(InvalidOperation): ...
|
||||
class Rounded(DecimalException): ...
|
||||
class Subnormal(DecimalException): ...
|
||||
class Overflow(Inexact, Rounded): ...
|
||||
class Underflow(Inexact, Rounded, Subnormal): ...
|
||||
class FloatOperation(DecimalException, TypeError): ...
|
||||
|
||||
def setcontext(context: Context, /) -> None: ...
|
||||
def getcontext() -> Context: ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
def localcontext(
|
||||
ctx: Context | None = None,
|
||||
*,
|
||||
prec: int | None = ...,
|
||||
rounding: str | None = ...,
|
||||
Emin: int | None = ...,
|
||||
Emax: int | None = ...,
|
||||
capitals: int | None = ...,
|
||||
clamp: int | None = ...,
|
||||
traps: dict[_TrapType, bool] | None = ...,
|
||||
flags: dict[_TrapType, bool] | None = ...,
|
||||
) -> _ContextManager: ...
|
||||
|
||||
else:
|
||||
def localcontext(ctx: Context | None = None) -> _ContextManager: ...
|
||||
|
||||
class Decimal:
|
||||
def __new__(cls, value: _DecimalNew = ..., context: Context | None = ...) -> Self: ...
|
||||
@classmethod
|
||||
def from_float(cls, f: float, /) -> Self: ...
|
||||
def __bool__(self) -> bool: ...
|
||||
def compare(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
||||
def __hash__(self) -> int: ...
|
||||
def as_tuple(self) -> DecimalTuple: ...
|
||||
def as_integer_ratio(self) -> tuple[int, int]: ...
|
||||
def to_eng_string(self, context: Context | None = None) -> str: ...
|
||||
def __abs__(self) -> Decimal: ...
|
||||
def __add__(self, value: _Decimal, /) -> Decimal: ...
|
||||
def __divmod__(self, value: _Decimal, /) -> tuple[Decimal, Decimal]: ...
|
||||
def __eq__(self, value: object, /) -> bool: ...
|
||||
def __floordiv__(self, value: _Decimal, /) -> Decimal: ...
|
||||
def __ge__(self, value: _ComparableNum, /) -> bool: ...
|
||||
def __gt__(self, value: _ComparableNum, /) -> bool: ...
|
||||
def __le__(self, value: _ComparableNum, /) -> bool: ...
|
||||
def __lt__(self, value: _ComparableNum, /) -> bool: ...
|
||||
def __mod__(self, value: _Decimal, /) -> Decimal: ...
|
||||
def __mul__(self, value: _Decimal, /) -> Decimal: ...
|
||||
def __neg__(self) -> Decimal: ...
|
||||
def __pos__(self) -> Decimal: ...
|
||||
def __pow__(self, value: _Decimal, mod: _Decimal | None = None, /) -> Decimal: ...
|
||||
def __radd__(self, value: _Decimal, /) -> Decimal: ...
|
||||
def __rdivmod__(self, value: _Decimal, /) -> tuple[Decimal, Decimal]: ...
|
||||
def __rfloordiv__(self, value: _Decimal, /) -> Decimal: ...
|
||||
def __rmod__(self, value: _Decimal, /) -> Decimal: ...
|
||||
def __rmul__(self, value: _Decimal, /) -> Decimal: ...
|
||||
def __rsub__(self, value: _Decimal, /) -> Decimal: ...
|
||||
def __rtruediv__(self, value: _Decimal, /) -> Decimal: ...
|
||||
def __sub__(self, value: _Decimal, /) -> Decimal: ...
|
||||
def __truediv__(self, value: _Decimal, /) -> Decimal: ...
|
||||
def remainder_near(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
||||
def __float__(self) -> float: ...
|
||||
def __int__(self) -> int: ...
|
||||
def __trunc__(self) -> int: ...
|
||||
@property
|
||||
def real(self) -> Decimal: ...
|
||||
@property
|
||||
def imag(self) -> Decimal: ...
|
||||
def conjugate(self) -> Decimal: ...
|
||||
def __complex__(self) -> complex: ...
|
||||
@overload
|
||||
def __round__(self) -> int: ...
|
||||
@overload
|
||||
def __round__(self, ndigits: int, /) -> Decimal: ...
|
||||
def __floor__(self) -> int: ...
|
||||
def __ceil__(self) -> int: ...
|
||||
def fma(self, other: _Decimal, third: _Decimal, context: Context | None = None) -> Decimal: ...
|
||||
def __rpow__(self, value: _Decimal, mod: Context | None = None, /) -> Decimal: ...
|
||||
def normalize(self, context: Context | None = None) -> Decimal: ...
|
||||
def quantize(self, exp: _Decimal, rounding: str | None = None, context: Context | None = None) -> Decimal: ...
|
||||
def same_quantum(self, other: _Decimal, context: Context | None = None) -> bool: ...
|
||||
def to_integral_exact(self, rounding: str | None = None, context: Context | None = None) -> Decimal: ...
|
||||
def to_integral_value(self, rounding: str | None = None, context: Context | None = None) -> Decimal: ...
|
||||
def to_integral(self, rounding: str | None = None, context: Context | None = None) -> Decimal: ...
|
||||
def sqrt(self, context: Context | None = None) -> Decimal: ...
|
||||
def max(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
||||
def min(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
||||
def adjusted(self) -> int: ...
|
||||
def canonical(self) -> Decimal: ...
|
||||
def compare_signal(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
||||
def compare_total(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
||||
def compare_total_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
||||
def copy_abs(self) -> Decimal: ...
|
||||
def copy_negate(self) -> Decimal: ...
|
||||
def copy_sign(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
||||
def exp(self, context: Context | None = None) -> Decimal: ...
|
||||
def is_canonical(self) -> bool: ...
|
||||
def is_finite(self) -> bool: ...
|
||||
def is_infinite(self) -> bool: ...
|
||||
def is_nan(self) -> bool: ...
|
||||
def is_normal(self, context: Context | None = None) -> bool: ...
|
||||
def is_qnan(self) -> bool: ...
|
||||
def is_signed(self) -> bool: ...
|
||||
def is_snan(self) -> bool: ...
|
||||
def is_subnormal(self, context: Context | None = None) -> bool: ...
|
||||
def is_zero(self) -> bool: ...
|
||||
def ln(self, context: Context | None = None) -> Decimal: ...
|
||||
def log10(self, context: Context | None = None) -> Decimal: ...
|
||||
def logb(self, context: Context | None = None) -> Decimal: ...
|
||||
def logical_and(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
||||
def logical_invert(self, context: Context | None = None) -> Decimal: ...
|
||||
def logical_or(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
||||
def logical_xor(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
||||
def max_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
||||
def min_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
||||
def next_minus(self, context: Context | None = None) -> Decimal: ...
|
||||
def next_plus(self, context: Context | None = None) -> Decimal: ...
|
||||
def next_toward(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
||||
def number_class(self, context: Context | None = None) -> str: ...
|
||||
def radix(self) -> Decimal: ...
|
||||
def rotate(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
||||
def scaleb(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
||||
def shift(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
|
||||
def __reduce__(self) -> tuple[type[Self], tuple[str]]: ...
|
||||
def __copy__(self) -> Self: ...
|
||||
def __deepcopy__(self, memo: Any, /) -> Self: ...
|
||||
def __format__(self, specifier: str, context: Context | None = ..., /) -> str: ...
|
||||
|
||||
class _ContextManager:
|
||||
new_context: Context
|
||||
saved_context: Context
|
||||
def __init__(self, new_context: Context) -> None: ...
|
||||
def __enter__(self) -> Context: ...
|
||||
def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ...
|
||||
|
||||
_TrapType: TypeAlias = type[DecimalException]
|
||||
|
||||
class Context:
|
||||
# TODO: Context doesn't allow you to delete *any* attributes from instances of the class at runtime,
|
||||
# even settable attributes like `prec` and `rounding`,
|
||||
# but that's inexpressable in the stub.
|
||||
# Type checkers either ignore it or misinterpret it
|
||||
# if you add a `def __delattr__(self, name: str, /) -> NoReturn` method to the stub
|
||||
prec: int
|
||||
rounding: str
|
||||
Emin: int
|
||||
Emax: int
|
||||
capitals: int
|
||||
clamp: int
|
||||
traps: dict[_TrapType, bool]
|
||||
flags: dict[_TrapType, bool]
|
||||
def __init__(
|
||||
self,
|
||||
prec: int | None = ...,
|
||||
rounding: str | None = ...,
|
||||
Emin: int | None = ...,
|
||||
Emax: int | None = ...,
|
||||
capitals: int | None = ...,
|
||||
clamp: int | None = ...,
|
||||
flags: None | dict[_TrapType, bool] | Container[_TrapType] = ...,
|
||||
traps: None | dict[_TrapType, bool] | Container[_TrapType] = ...,
|
||||
_ignored_flags: list[_TrapType] | None = ...,
|
||||
) -> None: ...
|
||||
def __reduce__(self) -> tuple[type[Self], tuple[Any, ...]]: ...
|
||||
def clear_flags(self) -> None: ...
|
||||
def clear_traps(self) -> None: ...
|
||||
def copy(self) -> Context: ...
|
||||
def __copy__(self) -> Context: ...
|
||||
# see https://github.com/python/cpython/issues/94107
|
||||
__hash__: ClassVar[None] # type: ignore[assignment]
|
||||
def Etiny(self) -> int: ...
|
||||
def Etop(self) -> int: ...
|
||||
def create_decimal(self, num: _DecimalNew = "0", /) -> Decimal: ...
|
||||
def create_decimal_from_float(self, f: float, /) -> Decimal: ...
|
||||
def abs(self, x: _Decimal, /) -> Decimal: ...
|
||||
def add(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def canonical(self, x: Decimal, /) -> Decimal: ...
|
||||
def compare(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def compare_signal(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def compare_total(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def compare_total_mag(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def copy_abs(self, x: _Decimal, /) -> Decimal: ...
|
||||
def copy_decimal(self, x: _Decimal, /) -> Decimal: ...
|
||||
def copy_negate(self, x: _Decimal, /) -> Decimal: ...
|
||||
def copy_sign(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def divide(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def divide_int(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def divmod(self, x: _Decimal, y: _Decimal, /) -> tuple[Decimal, Decimal]: ...
|
||||
def exp(self, x: _Decimal, /) -> Decimal: ...
|
||||
def fma(self, x: _Decimal, y: _Decimal, z: _Decimal, /) -> Decimal: ...
|
||||
def is_canonical(self, x: _Decimal, /) -> bool: ...
|
||||
def is_finite(self, x: _Decimal, /) -> bool: ...
|
||||
def is_infinite(self, x: _Decimal, /) -> bool: ...
|
||||
def is_nan(self, x: _Decimal, /) -> bool: ...
|
||||
def is_normal(self, x: _Decimal, /) -> bool: ...
|
||||
def is_qnan(self, x: _Decimal, /) -> bool: ...
|
||||
def is_signed(self, x: _Decimal, /) -> bool: ...
|
||||
def is_snan(self, x: _Decimal, /) -> bool: ...
|
||||
def is_subnormal(self, x: _Decimal, /) -> bool: ...
|
||||
def is_zero(self, x: _Decimal, /) -> bool: ...
|
||||
def ln(self, x: _Decimal, /) -> Decimal: ...
|
||||
def log10(self, x: _Decimal, /) -> Decimal: ...
|
||||
def logb(self, x: _Decimal, /) -> Decimal: ...
|
||||
def logical_and(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def logical_invert(self, x: _Decimal, /) -> Decimal: ...
|
||||
def logical_or(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def logical_xor(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def max(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def max_mag(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def min(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def min_mag(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def minus(self, x: _Decimal, /) -> Decimal: ...
|
||||
def multiply(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def next_minus(self, x: _Decimal, /) -> Decimal: ...
|
||||
def next_plus(self, x: _Decimal, /) -> Decimal: ...
|
||||
def next_toward(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def normalize(self, x: _Decimal, /) -> Decimal: ...
|
||||
def number_class(self, x: _Decimal, /) -> str: ...
|
||||
def plus(self, x: _Decimal, /) -> Decimal: ...
|
||||
def power(self, a: _Decimal, b: _Decimal, modulo: _Decimal | None = None) -> Decimal: ...
|
||||
def quantize(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def radix(self) -> Decimal: ...
|
||||
def remainder(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def remainder_near(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def rotate(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def same_quantum(self, x: _Decimal, y: _Decimal, /) -> bool: ...
|
||||
def scaleb(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def shift(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def sqrt(self, x: _Decimal, /) -> Decimal: ...
|
||||
def subtract(self, x: _Decimal, y: _Decimal, /) -> Decimal: ...
|
||||
def to_eng_string(self, x: _Decimal, /) -> str: ...
|
||||
def to_sci_string(self, x: _Decimal, /) -> str: ...
|
||||
def to_integral_exact(self, x: _Decimal, /) -> Decimal: ...
|
||||
def to_integral_value(self, x: _Decimal, /) -> Decimal: ...
|
||||
def to_integral(self, x: _Decimal, /) -> Decimal: ...
|
||||
|
||||
DefaultContext: Context
|
||||
BasicContext: Context
|
||||
ExtendedContext: Context
|
||||
@@ -1,33 +0,0 @@
|
||||
from collections.abc import Callable
|
||||
from types import TracebackType
|
||||
from typing import Any, NoReturn, overload
|
||||
from typing_extensions import TypeVarTuple, Unpack
|
||||
|
||||
__all__ = ["error", "start_new_thread", "exit", "get_ident", "allocate_lock", "interrupt_main", "LockType", "RLock"]
|
||||
|
||||
_Ts = TypeVarTuple("_Ts")
|
||||
|
||||
TIMEOUT_MAX: int
|
||||
error = RuntimeError
|
||||
|
||||
@overload
|
||||
def start_new_thread(function: Callable[[Unpack[_Ts]], object], args: tuple[Unpack[_Ts]]) -> None: ...
|
||||
@overload
|
||||
def start_new_thread(function: Callable[..., object], args: tuple[Any, ...], kwargs: dict[str, Any]) -> None: ...
|
||||
def exit() -> NoReturn: ...
|
||||
def get_ident() -> int: ...
|
||||
def allocate_lock() -> LockType: ...
|
||||
def stack_size(size: int | None = None) -> int: ...
|
||||
|
||||
class LockType:
|
||||
locked_status: bool
|
||||
def acquire(self, waitflag: bool | None = None, timeout: int = -1) -> bool: ...
|
||||
def __enter__(self, waitflag: bool | None = None, timeout: int = -1) -> bool: ...
|
||||
def __exit__(self, typ: type[BaseException] | None, val: BaseException | None, tb: TracebackType | None) -> None: ...
|
||||
def release(self) -> bool: ...
|
||||
def locked(self) -> bool: ...
|
||||
|
||||
class RLock(LockType):
|
||||
def release(self) -> None: ... # type: ignore[override]
|
||||
|
||||
def interrupt_main() -> None: ...
|
||||
@@ -1,164 +0,0 @@
|
||||
import sys
|
||||
from _thread import _excepthook, _ExceptHookArgs
|
||||
from _typeshed import ProfileFunction, TraceFunction
|
||||
from collections.abc import Callable, Iterable, Mapping
|
||||
from types import TracebackType
|
||||
from typing import Any, TypeVar
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
__all__ = [
|
||||
"get_ident",
|
||||
"active_count",
|
||||
"Condition",
|
||||
"current_thread",
|
||||
"enumerate",
|
||||
"main_thread",
|
||||
"TIMEOUT_MAX",
|
||||
"Event",
|
||||
"Lock",
|
||||
"RLock",
|
||||
"Semaphore",
|
||||
"BoundedSemaphore",
|
||||
"Thread",
|
||||
"Barrier",
|
||||
"BrokenBarrierError",
|
||||
"Timer",
|
||||
"ThreadError",
|
||||
"setprofile",
|
||||
"settrace",
|
||||
"local",
|
||||
"stack_size",
|
||||
"ExceptHookArgs",
|
||||
"excepthook",
|
||||
]
|
||||
|
||||
def active_count() -> int: ...
|
||||
def current_thread() -> Thread: ...
|
||||
def currentThread() -> Thread: ...
|
||||
def get_ident() -> int: ...
|
||||
def enumerate() -> list[Thread]: ...
|
||||
def main_thread() -> Thread: ...
|
||||
def settrace(func: TraceFunction) -> None: ...
|
||||
def setprofile(func: ProfileFunction | None) -> None: ...
|
||||
def stack_size(size: int | None = None) -> int: ...
|
||||
|
||||
TIMEOUT_MAX: float
|
||||
|
||||
class ThreadError(Exception): ...
|
||||
|
||||
class local:
|
||||
def __getattribute__(self, name: str) -> Any: ...
|
||||
def __setattr__(self, name: str, value: Any) -> None: ...
|
||||
def __delattr__(self, name: str) -> None: ...
|
||||
|
||||
class Thread:
|
||||
name: str
|
||||
daemon: bool
|
||||
@property
|
||||
def ident(self) -> int | None: ...
|
||||
def __init__(
|
||||
self,
|
||||
group: None = None,
|
||||
target: Callable[..., object] | None = None,
|
||||
name: str | None = None,
|
||||
args: Iterable[Any] = (),
|
||||
kwargs: Mapping[str, Any] | None = None,
|
||||
*,
|
||||
daemon: bool | None = None,
|
||||
) -> None: ...
|
||||
def start(self) -> None: ...
|
||||
def run(self) -> None: ...
|
||||
def join(self, timeout: float | None = None) -> None: ...
|
||||
def getName(self) -> str: ...
|
||||
def setName(self, name: str) -> None: ...
|
||||
@property
|
||||
def native_id(self) -> int | None: ... # only available on some platforms
|
||||
def is_alive(self) -> bool: ...
|
||||
if sys.version_info < (3, 9):
|
||||
def isAlive(self) -> bool: ...
|
||||
|
||||
def isDaemon(self) -> bool: ...
|
||||
def setDaemon(self, daemonic: bool) -> None: ...
|
||||
|
||||
class _DummyThread(Thread): ...
|
||||
|
||||
class Lock:
|
||||
def __enter__(self) -> bool: ...
|
||||
def __exit__(
|
||||
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
|
||||
) -> bool | None: ...
|
||||
def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ...
|
||||
def release(self) -> None: ...
|
||||
def locked(self) -> bool: ...
|
||||
|
||||
class _RLock:
|
||||
def __enter__(self) -> bool: ...
|
||||
def __exit__(
|
||||
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
|
||||
) -> bool | None: ...
|
||||
def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: ...
|
||||
def release(self) -> None: ...
|
||||
|
||||
RLock = _RLock
|
||||
|
||||
class Condition:
|
||||
def __init__(self, lock: Lock | _RLock | None = None) -> None: ...
|
||||
def __enter__(self) -> bool: ...
|
||||
def __exit__(
|
||||
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
|
||||
) -> bool | None: ...
|
||||
def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ...
|
||||
def release(self) -> None: ...
|
||||
def wait(self, timeout: float | None = None) -> bool: ...
|
||||
def wait_for(self, predicate: Callable[[], _T], timeout: float | None = None) -> _T: ...
|
||||
def notify(self, n: int = 1) -> None: ...
|
||||
def notify_all(self) -> None: ...
|
||||
def notifyAll(self) -> None: ...
|
||||
|
||||
class Semaphore:
|
||||
def __init__(self, value: int = 1) -> None: ...
|
||||
def __exit__(
|
||||
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
|
||||
) -> bool | None: ...
|
||||
def acquire(self, blocking: bool = True, timeout: float | None = None) -> bool: ...
|
||||
def __enter__(self, blocking: bool = True, timeout: float | None = None) -> bool: ...
|
||||
if sys.version_info >= (3, 9):
|
||||
def release(self, n: int = ...) -> None: ...
|
||||
else:
|
||||
def release(self) -> None: ...
|
||||
|
||||
class BoundedSemaphore(Semaphore): ...
|
||||
|
||||
class Event:
|
||||
def is_set(self) -> bool: ...
|
||||
def set(self) -> None: ...
|
||||
def clear(self) -> None: ...
|
||||
def wait(self, timeout: float | None = None) -> bool: ...
|
||||
|
||||
excepthook = _excepthook
|
||||
ExceptHookArgs = _ExceptHookArgs
|
||||
|
||||
class Timer(Thread):
|
||||
def __init__(
|
||||
self,
|
||||
interval: float,
|
||||
function: Callable[..., object],
|
||||
args: Iterable[Any] | None = None,
|
||||
kwargs: Mapping[str, Any] | None = None,
|
||||
) -> None: ...
|
||||
def cancel(self) -> None: ...
|
||||
|
||||
class Barrier:
|
||||
@property
|
||||
def parties(self) -> int: ...
|
||||
@property
|
||||
def n_waiting(self) -> int: ...
|
||||
@property
|
||||
def broken(self) -> bool: ...
|
||||
def __init__(self, parties: int, action: Callable[[], None] | None = None, timeout: float | None = None) -> None: ...
|
||||
def wait(self, timeout: float | None = None) -> int: ...
|
||||
def reset(self) -> None: ...
|
||||
def abort(self) -> None: ...
|
||||
|
||||
class BrokenBarrierError(RuntimeError): ...
|
||||
@@ -1,11 +0,0 @@
|
||||
from typing import Any, Final, TypeVar
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
__about__: Final[str]
|
||||
|
||||
def heapify(heap: list[Any], /) -> None: ...
|
||||
def heappop(heap: list[_T], /) -> _T: ...
|
||||
def heappush(heap: list[_T], item: _T, /) -> None: ...
|
||||
def heappushpop(heap: list[_T], item: _T, /) -> _T: ...
|
||||
def heapreplace(heap: list[_T], item: _T, /) -> _T: ...
|
||||
28
crates/red_knot/vendor/typeshed/stdlib/_imp.pyi
vendored
28
crates/red_knot/vendor/typeshed/stdlib/_imp.pyi
vendored
@@ -1,28 +0,0 @@
|
||||
import sys
|
||||
import types
|
||||
from _typeshed import ReadableBuffer
|
||||
from importlib.machinery import ModuleSpec
|
||||
from typing import Any
|
||||
|
||||
check_hash_based_pycs: str
|
||||
|
||||
def source_hash(key: int, source: ReadableBuffer) -> bytes: ...
|
||||
def create_builtin(spec: ModuleSpec, /) -> types.ModuleType: ...
|
||||
def create_dynamic(spec: ModuleSpec, file: Any = None, /) -> types.ModuleType: ...
|
||||
def acquire_lock() -> None: ...
|
||||
def exec_builtin(mod: types.ModuleType, /) -> int: ...
|
||||
def exec_dynamic(mod: types.ModuleType, /) -> int: ...
|
||||
def extension_suffixes() -> list[str]: ...
|
||||
def init_frozen(name: str, /) -> types.ModuleType: ...
|
||||
def is_builtin(name: str, /) -> int: ...
|
||||
def is_frozen(name: str, /) -> bool: ...
|
||||
def is_frozen_package(name: str, /) -> bool: ...
|
||||
def lock_held() -> bool: ...
|
||||
def release_lock() -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
def find_frozen(name: str, /, *, withdata: bool = False) -> tuple[memoryview | None, bool, str | None] | None: ...
|
||||
def get_frozen_object(name: str, data: ReadableBuffer | None = None, /) -> types.CodeType: ...
|
||||
|
||||
else:
|
||||
def get_frozen_object(name: str, /) -> types.CodeType: ...
|
||||
49
crates/red_knot/vendor/typeshed/stdlib/_json.pyi
vendored
49
crates/red_knot/vendor/typeshed/stdlib/_json.pyi
vendored
@@ -1,49 +0,0 @@
|
||||
from collections.abc import Callable
|
||||
from typing import Any, final
|
||||
|
||||
@final
|
||||
class make_encoder:
|
||||
@property
|
||||
def sort_keys(self) -> bool: ...
|
||||
@property
|
||||
def skipkeys(self) -> bool: ...
|
||||
@property
|
||||
def key_separator(self) -> str: ...
|
||||
@property
|
||||
def indent(self) -> int | None: ...
|
||||
@property
|
||||
def markers(self) -> dict[int, Any] | None: ...
|
||||
@property
|
||||
def default(self) -> Callable[[Any], Any]: ...
|
||||
@property
|
||||
def encoder(self) -> Callable[[str], str]: ...
|
||||
@property
|
||||
def item_separator(self) -> str: ...
|
||||
def __init__(
|
||||
self,
|
||||
markers: dict[int, Any] | None,
|
||||
default: Callable[[Any], Any],
|
||||
encoder: Callable[[str], str],
|
||||
indent: int | None,
|
||||
key_separator: str,
|
||||
item_separator: str,
|
||||
sort_keys: bool,
|
||||
skipkeys: bool,
|
||||
allow_nan: bool,
|
||||
) -> None: ...
|
||||
def __call__(self, obj: object, _current_indent_level: int) -> Any: ...
|
||||
|
||||
@final
|
||||
class make_scanner:
|
||||
object_hook: Any
|
||||
object_pairs_hook: Any
|
||||
parse_int: Any
|
||||
parse_constant: Any
|
||||
parse_float: Any
|
||||
strict: bool
|
||||
# TODO: 'context' needs the attrs above (ducktype), but not __call__.
|
||||
def __init__(self, context: make_scanner) -> None: ...
|
||||
def __call__(self, string: str, index: int) -> tuple[Any, int]: ...
|
||||
|
||||
def encode_basestring_ascii(s: str) -> str: ...
|
||||
def scanstring(string: str, end: int, strict: bool = ...) -> tuple[str, int]: ...
|
||||
100
crates/red_knot/vendor/typeshed/stdlib/_locale.pyi
vendored
100
crates/red_knot/vendor/typeshed/stdlib/_locale.pyi
vendored
@@ -1,100 +0,0 @@
|
||||
import sys
|
||||
from _typeshed import StrPath
|
||||
from collections.abc import Mapping
|
||||
|
||||
LC_CTYPE: int
|
||||
LC_COLLATE: int
|
||||
LC_TIME: int
|
||||
LC_MONETARY: int
|
||||
LC_NUMERIC: int
|
||||
LC_ALL: int
|
||||
CHAR_MAX: int
|
||||
|
||||
def setlocale(category: int, locale: str | None = None, /) -> str: ...
|
||||
def localeconv() -> Mapping[str, int | str | list[int]]: ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
def getencoding() -> str: ...
|
||||
|
||||
def strcoll(os1: str, os2: str, /) -> int: ...
|
||||
def strxfrm(string: str, /) -> str: ...
|
||||
|
||||
# native gettext functions
|
||||
# https://docs.python.org/3/library/locale.html#access-to-message-catalogs
|
||||
# https://github.com/python/cpython/blob/f4c03484da59049eb62a9bf7777b963e2267d187/Modules/_localemodule.c#L626
|
||||
if sys.platform != "win32":
|
||||
LC_MESSAGES: int
|
||||
|
||||
ABDAY_1: int
|
||||
ABDAY_2: int
|
||||
ABDAY_3: int
|
||||
ABDAY_4: int
|
||||
ABDAY_5: int
|
||||
ABDAY_6: int
|
||||
ABDAY_7: int
|
||||
|
||||
ABMON_1: int
|
||||
ABMON_2: int
|
||||
ABMON_3: int
|
||||
ABMON_4: int
|
||||
ABMON_5: int
|
||||
ABMON_6: int
|
||||
ABMON_7: int
|
||||
ABMON_8: int
|
||||
ABMON_9: int
|
||||
ABMON_10: int
|
||||
ABMON_11: int
|
||||
ABMON_12: int
|
||||
|
||||
DAY_1: int
|
||||
DAY_2: int
|
||||
DAY_3: int
|
||||
DAY_4: int
|
||||
DAY_5: int
|
||||
DAY_6: int
|
||||
DAY_7: int
|
||||
|
||||
ERA: int
|
||||
ERA_D_T_FMT: int
|
||||
ERA_D_FMT: int
|
||||
ERA_T_FMT: int
|
||||
|
||||
MON_1: int
|
||||
MON_2: int
|
||||
MON_3: int
|
||||
MON_4: int
|
||||
MON_5: int
|
||||
MON_6: int
|
||||
MON_7: int
|
||||
MON_8: int
|
||||
MON_9: int
|
||||
MON_10: int
|
||||
MON_11: int
|
||||
MON_12: int
|
||||
|
||||
CODESET: int
|
||||
D_T_FMT: int
|
||||
D_FMT: int
|
||||
T_FMT: int
|
||||
T_FMT_AMPM: int
|
||||
AM_STR: int
|
||||
PM_STR: int
|
||||
|
||||
RADIXCHAR: int
|
||||
THOUSEP: int
|
||||
YESEXPR: int
|
||||
NOEXPR: int
|
||||
CRNCYSTR: int
|
||||
ALT_DIGITS: int
|
||||
|
||||
def nl_langinfo(key: int, /) -> str: ...
|
||||
|
||||
# This is dependent on `libintl.h` which is a part of `gettext`
|
||||
# system dependency. These functions might be missing.
|
||||
# But, we always say that they are present.
|
||||
def gettext(msg: str, /) -> str: ...
|
||||
def dgettext(domain: str | None, msg: str, /) -> str: ...
|
||||
def dcgettext(domain: str | None, msg: str, category: int, /) -> str: ...
|
||||
def textdomain(domain: str | None, /) -> str: ...
|
||||
def bindtextdomain(domain: str, dir: StrPath | None, /) -> str: ...
|
||||
def bind_textdomain_codeset(domain: str, codeset: str | None, /) -> str | None: ...
|
||||
@@ -1,35 +0,0 @@
|
||||
import sys
|
||||
from _typeshed import structseq
|
||||
from collections.abc import Callable
|
||||
from types import CodeType
|
||||
from typing import Any, Final, final
|
||||
|
||||
class Profiler:
|
||||
def __init__(
|
||||
self, timer: Callable[[], float] | None = None, timeunit: float = 0.0, subcalls: bool = True, builtins: bool = True
|
||||
) -> None: ...
|
||||
def getstats(self) -> list[profiler_entry]: ...
|
||||
def enable(self, subcalls: bool = True, builtins: bool = True) -> None: ...
|
||||
def disable(self) -> None: ...
|
||||
def clear(self) -> None: ...
|
||||
|
||||
@final
|
||||
class profiler_entry(structseq[Any], tuple[CodeType | str, int, int, float, float, list[profiler_subentry]]):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__: Final = ("code", "callcount", "reccallcount", "totaltime", "inlinetime", "calls")
|
||||
code: CodeType | str
|
||||
callcount: int
|
||||
reccallcount: int
|
||||
totaltime: float
|
||||
inlinetime: float
|
||||
calls: list[profiler_subentry]
|
||||
|
||||
@final
|
||||
class profiler_subentry(structseq[Any], tuple[CodeType | str, int, int, float, float]):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__: Final = ("code", "callcount", "reccallcount", "totaltime", "inlinetime")
|
||||
code: CodeType | str
|
||||
callcount: int
|
||||
reccallcount: int
|
||||
totaltime: float
|
||||
inlinetime: float
|
||||
@@ -1,16 +0,0 @@
|
||||
import sys
|
||||
from typing import Any
|
||||
|
||||
class ParserBase:
|
||||
def reset(self) -> None: ...
|
||||
def getpos(self) -> tuple[int, int]: ...
|
||||
def unknown_decl(self, data: str) -> None: ...
|
||||
def parse_comment(self, i: int, report: int = 1) -> int: ... # undocumented
|
||||
def parse_declaration(self, i: int) -> int: ... # undocumented
|
||||
def parse_marked_section(self, i: int, report: int = 1) -> int: ... # undocumented
|
||||
def updatepos(self, i: int, j: int) -> int: ... # undocumented
|
||||
if sys.version_info < (3, 10):
|
||||
# Removed from ParserBase: https://bugs.python.org/issue31844
|
||||
def error(self, message: str) -> Any: ... # undocumented
|
||||
lineno: int # undocumented
|
||||
offset: int # undocumented
|
||||
92
crates/red_knot/vendor/typeshed/stdlib/_msi.pyi
vendored
92
crates/red_knot/vendor/typeshed/stdlib/_msi.pyi
vendored
@@ -1,92 +0,0 @@
|
||||
import sys
|
||||
|
||||
if sys.platform == "win32":
|
||||
class MSIError(Exception): ...
|
||||
# Actual typename View, not exposed by the implementation
|
||||
class _View:
|
||||
def Execute(self, params: _Record | None = ...) -> None: ...
|
||||
def GetColumnInfo(self, kind: int) -> _Record: ...
|
||||
def Fetch(self) -> _Record: ...
|
||||
def Modify(self, mode: int, record: _Record) -> None: ...
|
||||
def Close(self) -> None: ...
|
||||
# Don't exist at runtime
|
||||
__new__: None # type: ignore[assignment]
|
||||
__init__: None # type: ignore[assignment]
|
||||
|
||||
# Actual typename SummaryInformation, not exposed by the implementation
|
||||
class _SummaryInformation:
|
||||
def GetProperty(self, field: int) -> int | bytes | None: ...
|
||||
def GetPropertyCount(self) -> int: ...
|
||||
def SetProperty(self, field: int, value: int | str) -> None: ...
|
||||
def Persist(self) -> None: ...
|
||||
# Don't exist at runtime
|
||||
__new__: None # type: ignore[assignment]
|
||||
__init__: None # type: ignore[assignment]
|
||||
|
||||
# Actual typename Database, not exposed by the implementation
|
||||
class _Database:
|
||||
def OpenView(self, sql: str) -> _View: ...
|
||||
def Commit(self) -> None: ...
|
||||
def GetSummaryInformation(self, updateCount: int) -> _SummaryInformation: ...
|
||||
def Close(self) -> None: ...
|
||||
# Don't exist at runtime
|
||||
__new__: None # type: ignore[assignment]
|
||||
__init__: None # type: ignore[assignment]
|
||||
|
||||
# Actual typename Record, not exposed by the implementation
|
||||
class _Record:
|
||||
def GetFieldCount(self) -> int: ...
|
||||
def GetInteger(self, field: int) -> int: ...
|
||||
def GetString(self, field: int) -> str: ...
|
||||
def SetString(self, field: int, str: str) -> None: ...
|
||||
def SetStream(self, field: int, stream: str) -> None: ...
|
||||
def SetInteger(self, field: int, int: int) -> None: ...
|
||||
def ClearData(self) -> None: ...
|
||||
# Don't exist at runtime
|
||||
__new__: None # type: ignore[assignment]
|
||||
__init__: None # type: ignore[assignment]
|
||||
|
||||
def UuidCreate() -> str: ...
|
||||
def FCICreate(cabname: str, files: list[str], /) -> None: ...
|
||||
def OpenDatabase(path: str, persist: int, /) -> _Database: ...
|
||||
def CreateRecord(count: int, /) -> _Record: ...
|
||||
|
||||
MSICOLINFO_NAMES: int
|
||||
MSICOLINFO_TYPES: int
|
||||
MSIDBOPEN_CREATE: int
|
||||
MSIDBOPEN_CREATEDIRECT: int
|
||||
MSIDBOPEN_DIRECT: int
|
||||
MSIDBOPEN_PATCHFILE: int
|
||||
MSIDBOPEN_READONLY: int
|
||||
MSIDBOPEN_TRANSACT: int
|
||||
MSIMODIFY_ASSIGN: int
|
||||
MSIMODIFY_DELETE: int
|
||||
MSIMODIFY_INSERT: int
|
||||
MSIMODIFY_INSERT_TEMPORARY: int
|
||||
MSIMODIFY_MERGE: int
|
||||
MSIMODIFY_REFRESH: int
|
||||
MSIMODIFY_REPLACE: int
|
||||
MSIMODIFY_SEEK: int
|
||||
MSIMODIFY_UPDATE: int
|
||||
MSIMODIFY_VALIDATE: int
|
||||
MSIMODIFY_VALIDATE_DELETE: int
|
||||
MSIMODIFY_VALIDATE_FIELD: int
|
||||
MSIMODIFY_VALIDATE_NEW: int
|
||||
|
||||
PID_APPNAME: int
|
||||
PID_AUTHOR: int
|
||||
PID_CHARCOUNT: int
|
||||
PID_CODEPAGE: int
|
||||
PID_COMMENTS: int
|
||||
PID_CREATE_DTM: int
|
||||
PID_KEYWORDS: int
|
||||
PID_LASTAUTHOR: int
|
||||
PID_LASTPRINTED: int
|
||||
PID_LASTSAVE_DTM: int
|
||||
PID_PAGECOUNT: int
|
||||
PID_REVNUMBER: int
|
||||
PID_SECURITY: int
|
||||
PID_SUBJECT: int
|
||||
PID_TEMPLATE: int
|
||||
PID_TITLE: int
|
||||
PID_WORDCOUNT: int
|
||||
147
crates/red_knot/vendor/typeshed/stdlib/_operator.pyi
vendored
147
crates/red_knot/vendor/typeshed/stdlib/_operator.pyi
vendored
@@ -1,147 +0,0 @@
|
||||
import sys
|
||||
from _typeshed import SupportsGetItem
|
||||
from collections.abc import Callable, Container, Iterable, MutableMapping, MutableSequence, Sequence
|
||||
from typing import Any, AnyStr, Generic, Protocol, SupportsAbs, SupportsIndex, TypeVar, final, overload
|
||||
from typing_extensions import ParamSpec, TypeAlias, TypeVarTuple, Unpack
|
||||
|
||||
_R = TypeVar("_R")
|
||||
_T = TypeVar("_T")
|
||||
_T_co = TypeVar("_T_co", covariant=True)
|
||||
_T1 = TypeVar("_T1")
|
||||
_T2 = TypeVar("_T2")
|
||||
_K = TypeVar("_K")
|
||||
_V = TypeVar("_V")
|
||||
_P = ParamSpec("_P")
|
||||
_Ts = TypeVarTuple("_Ts")
|
||||
|
||||
# The following protocols return "Any" instead of bool, since the comparison
|
||||
# operators can be overloaded to return an arbitrary object. For example,
|
||||
# the numpy.array comparison dunders return another numpy.array.
|
||||
|
||||
class _SupportsDunderLT(Protocol):
|
||||
def __lt__(self, other: Any, /) -> Any: ...
|
||||
|
||||
class _SupportsDunderGT(Protocol):
|
||||
def __gt__(self, other: Any, /) -> Any: ...
|
||||
|
||||
class _SupportsDunderLE(Protocol):
|
||||
def __le__(self, other: Any, /) -> Any: ...
|
||||
|
||||
class _SupportsDunderGE(Protocol):
|
||||
def __ge__(self, other: Any, /) -> Any: ...
|
||||
|
||||
_SupportsComparison: TypeAlias = _SupportsDunderLE | _SupportsDunderGE | _SupportsDunderGT | _SupportsDunderLT
|
||||
|
||||
class _SupportsInversion(Protocol[_T_co]):
|
||||
def __invert__(self) -> _T_co: ...
|
||||
|
||||
class _SupportsNeg(Protocol[_T_co]):
|
||||
def __neg__(self) -> _T_co: ...
|
||||
|
||||
class _SupportsPos(Protocol[_T_co]):
|
||||
def __pos__(self) -> _T_co: ...
|
||||
|
||||
# All four comparison functions must have the same signature, or we get false-positive errors
|
||||
def lt(a: _SupportsComparison, b: _SupportsComparison, /) -> Any: ...
|
||||
def le(a: _SupportsComparison, b: _SupportsComparison, /) -> Any: ...
|
||||
def eq(a: object, b: object, /) -> Any: ...
|
||||
def ne(a: object, b: object, /) -> Any: ...
|
||||
def ge(a: _SupportsComparison, b: _SupportsComparison, /) -> Any: ...
|
||||
def gt(a: _SupportsComparison, b: _SupportsComparison, /) -> Any: ...
|
||||
def not_(a: object, /) -> bool: ...
|
||||
def truth(a: object, /) -> bool: ...
|
||||
def is_(a: object, b: object, /) -> bool: ...
|
||||
def is_not(a: object, b: object, /) -> bool: ...
|
||||
def abs(a: SupportsAbs[_T], /) -> _T: ...
|
||||
def add(a: Any, b: Any, /) -> Any: ...
|
||||
def and_(a: Any, b: Any, /) -> Any: ...
|
||||
def floordiv(a: Any, b: Any, /) -> Any: ...
|
||||
def index(a: SupportsIndex, /) -> int: ...
|
||||
def inv(a: _SupportsInversion[_T_co], /) -> _T_co: ...
|
||||
def invert(a: _SupportsInversion[_T_co], /) -> _T_co: ...
|
||||
def lshift(a: Any, b: Any, /) -> Any: ...
|
||||
def mod(a: Any, b: Any, /) -> Any: ...
|
||||
def mul(a: Any, b: Any, /) -> Any: ...
|
||||
def matmul(a: Any, b: Any, /) -> Any: ...
|
||||
def neg(a: _SupportsNeg[_T_co], /) -> _T_co: ...
|
||||
def or_(a: Any, b: Any, /) -> Any: ...
|
||||
def pos(a: _SupportsPos[_T_co], /) -> _T_co: ...
|
||||
def pow(a: Any, b: Any, /) -> Any: ...
|
||||
def rshift(a: Any, b: Any, /) -> Any: ...
|
||||
def sub(a: Any, b: Any, /) -> Any: ...
|
||||
def truediv(a: Any, b: Any, /) -> Any: ...
|
||||
def xor(a: Any, b: Any, /) -> Any: ...
|
||||
def concat(a: Sequence[_T], b: Sequence[_T], /) -> Sequence[_T]: ...
|
||||
def contains(a: Container[object], b: object, /) -> bool: ...
|
||||
def countOf(a: Iterable[object], b: object, /) -> int: ...
|
||||
@overload
|
||||
def delitem(a: MutableSequence[Any], b: SupportsIndex, /) -> None: ...
|
||||
@overload
|
||||
def delitem(a: MutableSequence[Any], b: slice, /) -> None: ...
|
||||
@overload
|
||||
def delitem(a: MutableMapping[_K, Any], b: _K, /) -> None: ...
|
||||
@overload
|
||||
def getitem(a: Sequence[_T], b: slice, /) -> Sequence[_T]: ...
|
||||
@overload
|
||||
def getitem(a: SupportsGetItem[_K, _V], b: _K, /) -> _V: ...
|
||||
def indexOf(a: Iterable[_T], b: _T, /) -> int: ...
|
||||
@overload
|
||||
def setitem(a: MutableSequence[_T], b: SupportsIndex, c: _T, /) -> None: ...
|
||||
@overload
|
||||
def setitem(a: MutableSequence[_T], b: slice, c: Sequence[_T], /) -> None: ...
|
||||
@overload
|
||||
def setitem(a: MutableMapping[_K, _V], b: _K, c: _V, /) -> None: ...
|
||||
def length_hint(obj: object, default: int = 0, /) -> int: ...
|
||||
@final
|
||||
class attrgetter(Generic[_T_co]):
|
||||
@overload
|
||||
def __new__(cls, attr: str, /) -> attrgetter[Any]: ...
|
||||
@overload
|
||||
def __new__(cls, attr: str, attr2: str, /) -> attrgetter[tuple[Any, Any]]: ...
|
||||
@overload
|
||||
def __new__(cls, attr: str, attr2: str, attr3: str, /) -> attrgetter[tuple[Any, Any, Any]]: ...
|
||||
@overload
|
||||
def __new__(cls, attr: str, attr2: str, attr3: str, attr4: str, /) -> attrgetter[tuple[Any, Any, Any, Any]]: ...
|
||||
@overload
|
||||
def __new__(cls, attr: str, /, *attrs: str) -> attrgetter[tuple[Any, ...]]: ...
|
||||
def __call__(self, obj: Any, /) -> _T_co: ...
|
||||
|
||||
@final
|
||||
class itemgetter(Generic[_T_co]):
|
||||
@overload
|
||||
def __new__(cls, item: _T, /) -> itemgetter[_T]: ...
|
||||
@overload
|
||||
def __new__(cls, item1: _T1, item2: _T2, /, *items: Unpack[_Ts]) -> itemgetter[tuple[_T1, _T2, Unpack[_Ts]]]: ...
|
||||
# __key: _KT_contra in SupportsGetItem seems to be causing variance issues, ie:
|
||||
# TypeVar "_KT_contra@SupportsGetItem" is contravariant
|
||||
# "tuple[int, int]" is incompatible with protocol "SupportsIndex"
|
||||
# preventing [_T_co, ...] instead of [Any, ...]
|
||||
#
|
||||
# A suspected mypy issue prevents using [..., _T] instead of [..., Any] here.
|
||||
# https://github.com/python/mypy/issues/14032
|
||||
def __call__(self, obj: SupportsGetItem[Any, Any]) -> Any: ...
|
||||
|
||||
@final
|
||||
class methodcaller:
|
||||
def __init__(self, name: str, /, *args: Any, **kwargs: Any) -> None: ...
|
||||
def __call__(self, obj: Any) -> Any: ...
|
||||
|
||||
def iadd(a: Any, b: Any, /) -> Any: ...
|
||||
def iand(a: Any, b: Any, /) -> Any: ...
|
||||
def iconcat(a: Any, b: Any, /) -> Any: ...
|
||||
def ifloordiv(a: Any, b: Any, /) -> Any: ...
|
||||
def ilshift(a: Any, b: Any, /) -> Any: ...
|
||||
def imod(a: Any, b: Any, /) -> Any: ...
|
||||
def imul(a: Any, b: Any, /) -> Any: ...
|
||||
def imatmul(a: Any, b: Any, /) -> Any: ...
|
||||
def ior(a: Any, b: Any, /) -> Any: ...
|
||||
def ipow(a: Any, b: Any, /) -> Any: ...
|
||||
def irshift(a: Any, b: Any, /) -> Any: ...
|
||||
def isub(a: Any, b: Any, /) -> Any: ...
|
||||
def itruediv(a: Any, b: Any, /) -> Any: ...
|
||||
def ixor(a: Any, b: Any, /) -> Any: ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
def call(obj: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs) -> _R: ...
|
||||
|
||||
def _compare_digest(a: AnyStr, b: AnyStr, /) -> bool: ...
|
||||
@@ -1,34 +0,0 @@
|
||||
from collections.abc import Iterable, Sequence
|
||||
from typing import TypeVar
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_K = TypeVar("_K")
|
||||
_V = TypeVar("_V")
|
||||
|
||||
__all__ = ["compiler_fixup", "customize_config_vars", "customize_compiler", "get_platform_osx"]
|
||||
|
||||
_UNIVERSAL_CONFIG_VARS: tuple[str, ...] # undocumented
|
||||
_COMPILER_CONFIG_VARS: tuple[str, ...] # undocumented
|
||||
_INITPRE: str # undocumented
|
||||
|
||||
def _find_executable(executable: str, path: str | None = None) -> str | None: ... # undocumented
|
||||
def _read_output(commandstring: str, capture_stderr: bool = False) -> str | None: ... # undocumented
|
||||
def _find_build_tool(toolname: str) -> str: ... # undocumented
|
||||
|
||||
_SYSTEM_VERSION: str | None # undocumented
|
||||
|
||||
def _get_system_version() -> str: ... # undocumented
|
||||
def _remove_original_values(_config_vars: dict[str, str]) -> None: ... # undocumented
|
||||
def _save_modified_value(_config_vars: dict[str, str], cv: str, newvalue: str) -> None: ... # undocumented
|
||||
def _supports_universal_builds() -> bool: ... # undocumented
|
||||
def _find_appropriate_compiler(_config_vars: dict[str, str]) -> dict[str, str]: ... # undocumented
|
||||
def _remove_universal_flags(_config_vars: dict[str, str]) -> dict[str, str]: ... # undocumented
|
||||
def _remove_unsupported_archs(_config_vars: dict[str, str]) -> dict[str, str]: ... # undocumented
|
||||
def _override_all_archs(_config_vars: dict[str, str]) -> dict[str, str]: ... # undocumented
|
||||
def _check_for_unavailable_sdk(_config_vars: dict[str, str]) -> dict[str, str]: ... # undocumented
|
||||
def compiler_fixup(compiler_so: Iterable[str], cc_args: Sequence[str]) -> list[str]: ...
|
||||
def customize_config_vars(_config_vars: dict[str, str]) -> dict[str, str]: ...
|
||||
def customize_compiler(_config_vars: dict[str, str]) -> dict[str, str]: ...
|
||||
def get_platform_osx(
|
||||
_config_vars: dict[str, str], osname: _T, release: _K, machine: _V
|
||||
) -> tuple[str | _T, str | _K, str | _V]: ...
|
||||
@@ -1,33 +0,0 @@
|
||||
import sys
|
||||
from _typeshed import StrOrBytesPath
|
||||
from collections.abc import Callable, Sequence
|
||||
from typing import SupportsIndex
|
||||
|
||||
if sys.platform != "win32":
|
||||
def cloexec_pipe() -> tuple[int, int]: ...
|
||||
def fork_exec(
|
||||
args: Sequence[StrOrBytesPath] | None,
|
||||
executable_list: Sequence[bytes],
|
||||
close_fds: bool,
|
||||
pass_fds: tuple[int, ...],
|
||||
cwd: str,
|
||||
env: Sequence[bytes] | None,
|
||||
p2cread: int,
|
||||
p2cwrite: int,
|
||||
c2pread: int,
|
||||
c2pwrite: int,
|
||||
errread: int,
|
||||
errwrite: int,
|
||||
errpipe_read: int,
|
||||
errpipe_write: int,
|
||||
restore_signals: int,
|
||||
call_setsid: int,
|
||||
pgid_to_set: int,
|
||||
gid: SupportsIndex | None,
|
||||
extra_groups: list[int] | None,
|
||||
uid: SupportsIndex | None,
|
||||
child_umask: int,
|
||||
preexec_fn: Callable[[], None],
|
||||
allow_vfork: bool,
|
||||
/,
|
||||
) -> int: ...
|
||||
@@ -1,14 +0,0 @@
|
||||
import _typeshed
|
||||
from typing import Any, NewType, TypeVar
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
_CacheToken = NewType("_CacheToken", int)
|
||||
|
||||
def get_cache_token() -> _CacheToken: ...
|
||||
|
||||
class ABCMeta(type):
|
||||
def __new__(
|
||||
mcls: type[_typeshed.Self], name: str, bases: tuple[type[Any], ...], namespace: dict[str, Any], /
|
||||
) -> _typeshed.Self: ...
|
||||
def register(cls, subclass: type[_T]) -> type[_T]: ...
|
||||
@@ -1,43 +0,0 @@
|
||||
# This is a slight lie, the implementations aren't exactly identical
|
||||
# However, in all likelihood, the differences are inconsequential
|
||||
from _decimal import *
|
||||
|
||||
__all__ = [
|
||||
"Decimal",
|
||||
"Context",
|
||||
"DecimalTuple",
|
||||
"DefaultContext",
|
||||
"BasicContext",
|
||||
"ExtendedContext",
|
||||
"DecimalException",
|
||||
"Clamped",
|
||||
"InvalidOperation",
|
||||
"DivisionByZero",
|
||||
"Inexact",
|
||||
"Rounded",
|
||||
"Subnormal",
|
||||
"Overflow",
|
||||
"Underflow",
|
||||
"FloatOperation",
|
||||
"DivisionImpossible",
|
||||
"InvalidContext",
|
||||
"ConversionSyntax",
|
||||
"DivisionUndefined",
|
||||
"ROUND_DOWN",
|
||||
"ROUND_HALF_UP",
|
||||
"ROUND_HALF_EVEN",
|
||||
"ROUND_CEILING",
|
||||
"ROUND_FLOOR",
|
||||
"ROUND_UP",
|
||||
"ROUND_HALF_DOWN",
|
||||
"ROUND_05UP",
|
||||
"setcontext",
|
||||
"getcontext",
|
||||
"localcontext",
|
||||
"MAX_PREC",
|
||||
"MAX_EMAX",
|
||||
"MIN_EMIN",
|
||||
"MIN_ETINY",
|
||||
"HAVE_THREADS",
|
||||
"HAVE_CONTEXTVAR",
|
||||
]
|
||||
@@ -1,12 +0,0 @@
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
# Actually Tuple[(int,) * 625]
|
||||
_State: TypeAlias = tuple[int, ...]
|
||||
|
||||
class Random:
|
||||
def __init__(self, seed: object = ...) -> None: ...
|
||||
def seed(self, n: object = None, /) -> None: ...
|
||||
def getstate(self) -> _State: ...
|
||||
def setstate(self, state: _State, /) -> None: ...
|
||||
def random(self) -> float: ...
|
||||
def getrandbits(self, k: int, /) -> int: ...
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user