Compare commits
1 Commits
0.5.5
...
schemastor
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d65addffa0 |
@@ -2,9 +2,36 @@
|
|||||||
dev = "run --package ruff_dev --bin ruff_dev"
|
dev = "run --package ruff_dev --bin ruff_dev"
|
||||||
benchmark = "bench -p ruff_benchmark --bench linter --bench formatter --"
|
benchmark = "bench -p ruff_benchmark --bench linter --bench formatter --"
|
||||||
|
|
||||||
# statically link the C runtime so the executable does not depend on
|
[target.'cfg(all())']
|
||||||
# that shared/dynamic library.
|
rustflags = [
|
||||||
#
|
# CLIPPY LINT SETTINGS
|
||||||
# See: https://github.com/astral-sh/ruff/issues/11503
|
# This is a workaround to configure lints for the entire workspace, pending the ability to configure this via TOML.
|
||||||
[target.'cfg(all(target_env="msvc", target_os = "windows"))']
|
# See: `https://github.com/rust-lang/cargo/issues/5034`
|
||||||
rustflags = ["-C", "target-feature=+crt-static"]
|
# `https://github.com/EmbarkStudios/rust-ecosystem/issues/22#issuecomment-947011395`
|
||||||
|
"-Dunsafe_code",
|
||||||
|
"-Wclippy::pedantic",
|
||||||
|
# Allowed pedantic lints
|
||||||
|
"-Wclippy::char_lit_as_u8",
|
||||||
|
"-Aclippy::collapsible_else_if",
|
||||||
|
"-Aclippy::collapsible_if",
|
||||||
|
"-Aclippy::implicit_hasher",
|
||||||
|
"-Aclippy::match_same_arms",
|
||||||
|
"-Aclippy::missing_errors_doc",
|
||||||
|
"-Aclippy::missing_panics_doc",
|
||||||
|
"-Aclippy::module_name_repetitions",
|
||||||
|
"-Aclippy::must_use_candidate",
|
||||||
|
"-Aclippy::similar_names",
|
||||||
|
"-Aclippy::too_many_lines",
|
||||||
|
# Disallowed restriction lints
|
||||||
|
"-Wclippy::print_stdout",
|
||||||
|
"-Wclippy::print_stderr",
|
||||||
|
"-Wclippy::dbg_macro",
|
||||||
|
"-Wclippy::empty_drop",
|
||||||
|
"-Wclippy::empty_structs_with_brackets",
|
||||||
|
"-Wclippy::exit",
|
||||||
|
"-Wclippy::get_unwrap",
|
||||||
|
"-Wclippy::rc_buffer",
|
||||||
|
"-Wclippy::rc_mutex",
|
||||||
|
"-Wclippy::rest_pat_in_fully_bound_structs",
|
||||||
|
"-Wunreachable_pub"
|
||||||
|
]
|
||||||
|
|||||||
@@ -1,8 +0,0 @@
|
|||||||
[profile.ci]
|
|
||||||
# Print out output for failing tests as soon as they fail, and also at the end
|
|
||||||
# of the run (for easy scrollability).
|
|
||||||
failure-output = "immediate-final"
|
|
||||||
# Do not cancel the test run on the first failure.
|
|
||||||
fail-fast = false
|
|
||||||
|
|
||||||
status-level = "skip"
|
|
||||||
11
.gitattributes
vendored
11
.gitattributes
vendored
@@ -2,17 +2,6 @@
|
|||||||
|
|
||||||
crates/ruff_linter/resources/test/fixtures/isort/line_ending_crlf.py text eol=crlf
|
crates/ruff_linter/resources/test/fixtures/isort/line_ending_crlf.py text eol=crlf
|
||||||
crates/ruff_linter/resources/test/fixtures/pycodestyle/W605_1.py text eol=crlf
|
crates/ruff_linter/resources/test/fixtures/pycodestyle/W605_1.py text eol=crlf
|
||||||
crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_2.py text eol=crlf
|
|
||||||
crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_3.py text eol=crlf
|
|
||||||
|
|
||||||
crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring_code_examples_crlf.py text eol=crlf
|
|
||||||
crates/ruff_python_formatter/tests/snapshots/format@docstring_code_examples_crlf.py.snap text eol=crlf
|
|
||||||
|
|
||||||
crates/ruff_python_parser/resources/invalid/re_lexing/line_continuation_windows_eol.py text eol=crlf
|
|
||||||
crates/ruff_python_parser/resources/invalid/re_lex_logical_token_windows_eol.py text eol=crlf
|
|
||||||
crates/ruff_python_parser/resources/invalid/re_lex_logical_token_mac_eol.py text eol=cr
|
|
||||||
|
|
||||||
crates/ruff_python_parser/resources/inline linguist-generated=true
|
|
||||||
|
|
||||||
ruff.schema.json linguist-generated=true text=auto eol=lf
|
ruff.schema.json linguist-generated=true text=auto eol=lf
|
||||||
*.md.snap linguist-language=Markdown
|
*.md.snap linguist-language=Markdown
|
||||||
|
|||||||
16
.github/CODEOWNERS
vendored
16
.github/CODEOWNERS
vendored
@@ -5,17 +5,5 @@
|
|||||||
# - The '*' pattern is global owners.
|
# - The '*' pattern is global owners.
|
||||||
# - Order is important. The last matching pattern has the most precedence.
|
# - Order is important. The last matching pattern has the most precedence.
|
||||||
|
|
||||||
/crates/ruff_notebook/ @dhruvmanila
|
# Jupyter
|
||||||
/crates/ruff_formatter/ @MichaReiser
|
/crates/ruff_linter/src/jupyter/ @dhruvmanila
|
||||||
/crates/ruff_python_formatter/ @MichaReiser
|
|
||||||
/crates/ruff_python_parser/ @MichaReiser @dhruvmanila
|
|
||||||
|
|
||||||
# flake8-pyi
|
|
||||||
/crates/ruff_linter/src/rules/flake8_pyi/ @AlexWaygood
|
|
||||||
|
|
||||||
# Script for fuzzing the parser
|
|
||||||
/scripts/fuzz-parser/ @AlexWaygood
|
|
||||||
|
|
||||||
# red-knot
|
|
||||||
/crates/red_knot* @carljm @MichaReiser @AlexWaygood
|
|
||||||
/crates/ruff_db/ @carljm @MichaReiser @AlexWaygood
|
|
||||||
|
|||||||
2
.github/ISSUE_TEMPLATE.md
vendored
2
.github/ISSUE_TEMPLATE.md
vendored
@@ -3,8 +3,6 @@ Thank you for taking the time to report an issue! We're glad to have you involve
|
|||||||
|
|
||||||
If you're filing a bug report, please consider including the following information:
|
If you're filing a bug report, please consider including the following information:
|
||||||
|
|
||||||
* List of keywords you searched for before creating this issue. Write them down here so that others can find this issue more easily and help provide feedback.
|
|
||||||
e.g. "RUF001", "unused variable", "Jupyter notebook"
|
|
||||||
* A minimal code snippet that reproduces the bug.
|
* A minimal code snippet that reproduces the bug.
|
||||||
* The command you invoked (e.g., `ruff /path/to/file.py --fix`), ideally including the `--isolated` flag.
|
* The command you invoked (e.g., `ruff /path/to/file.py --fix`), ideally including the `--isolated` flag.
|
||||||
* The current Ruff settings (any relevant sections from your `pyproject.toml`).
|
* The current Ruff settings (any relevant sections from your `pyproject.toml`).
|
||||||
|
|||||||
13
.github/dependabot.yml
vendored
Normal file
13
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: "github-actions"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
labels: ["internal"]
|
||||||
|
|
||||||
|
- package-ecosystem: "cargo"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
labels: ["internal"]
|
||||||
86
.github/renovate.json5
vendored
86
.github/renovate.json5
vendored
@@ -1,86 +0,0 @@
|
|||||||
{
|
|
||||||
$schema: "https://docs.renovatebot.com/renovate-schema.json",
|
|
||||||
dependencyDashboard: true,
|
|
||||||
suppressNotifications: ["prEditedNotification"],
|
|
||||||
extends: ["config:recommended"],
|
|
||||||
labels: ["internal"],
|
|
||||||
schedule: ["before 4am on Monday"],
|
|
||||||
semanticCommits: "disabled",
|
|
||||||
separateMajorMinor: false,
|
|
||||||
prHourlyLimit: 10,
|
|
||||||
enabledManagers: ["github-actions", "pre-commit", "cargo", "pep621", "npm"],
|
|
||||||
cargo: {
|
|
||||||
// See https://docs.renovatebot.com/configuration-options/#rangestrategy
|
|
||||||
rangeStrategy: "update-lockfile",
|
|
||||||
},
|
|
||||||
pep621: {
|
|
||||||
fileMatch: ["^(python|scripts)/.*pyproject\\.toml$"],
|
|
||||||
},
|
|
||||||
npm: {
|
|
||||||
fileMatch: ["^playground/.*package\\.json$"],
|
|
||||||
},
|
|
||||||
"pre-commit": {
|
|
||||||
enabled: true,
|
|
||||||
},
|
|
||||||
packageRules: [
|
|
||||||
{
|
|
||||||
// Group upload/download artifact updates, the versions are dependent
|
|
||||||
groupName: "Artifact GitHub Actions dependencies",
|
|
||||||
matchManagers: ["github-actions"],
|
|
||||||
matchDatasources: ["gitea-tags", "github-tags"],
|
|
||||||
matchPackagePatterns: ["actions/.*-artifact"],
|
|
||||||
description: "Weekly update of artifact-related GitHub Actions dependencies",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
// This package rule disables updates for GitHub runners:
|
|
||||||
// we'd only pin them to a specific version
|
|
||||||
// if there was a deliberate reason to do so
|
|
||||||
groupName: "GitHub runners",
|
|
||||||
matchManagers: ["github-actions"],
|
|
||||||
matchDatasources: ["github-runners"],
|
|
||||||
description: "Disable PRs updating GitHub runners (e.g. 'runs-on: macos-14')",
|
|
||||||
enabled: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
// Disable updates of `zip-rs`; intentionally pinned for now due to ownership change
|
|
||||||
// See: https://github.com/astral-sh/uv/issues/3642
|
|
||||||
matchPackagePatterns: ["zip"],
|
|
||||||
matchManagers: ["cargo"],
|
|
||||||
enabled: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
groupName: "pre-commit dependencies",
|
|
||||||
matchManagers: ["pre-commit"],
|
|
||||||
description: "Weekly update of pre-commit dependencies",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
groupName: "NPM Development dependencies",
|
|
||||||
matchManagers: ["npm"],
|
|
||||||
matchDepTypes: ["devDependencies"],
|
|
||||||
description: "Weekly update of NPM development dependencies",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
groupName: "Monaco",
|
|
||||||
matchManagers: ["npm"],
|
|
||||||
matchPackagePatterns: ["monaco"],
|
|
||||||
description: "Weekly update of the Monaco editor",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
groupName: "strum",
|
|
||||||
matchManagers: ["cargo"],
|
|
||||||
matchPackagePatterns: ["strum"],
|
|
||||||
description: "Weekly update of strum dependencies",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
groupName: "ESLint",
|
|
||||||
matchManagers: ["npm"],
|
|
||||||
matchPackageNames: ["eslint"],
|
|
||||||
allowedVersions: "<9",
|
|
||||||
description: "Constraint ESLint to version 8 until TypeScript-eslint supports ESLint 9", // https://github.com/typescript-eslint/typescript-eslint/issues/8211
|
|
||||||
},
|
|
||||||
],
|
|
||||||
vulnerabilityAlerts: {
|
|
||||||
commitMessageSuffix: "",
|
|
||||||
labels: ["internal", "security"],
|
|
||||||
},
|
|
||||||
}
|
|
||||||
68
.github/workflows/build-docker.yml
vendored
68
.github/workflows/build-docker.yml
vendored
@@ -1,68 +0,0 @@
|
|||||||
# Build and publish a Docker image.
|
|
||||||
#
|
|
||||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a local
|
|
||||||
# artifacts job within `cargo-dist`.
|
|
||||||
#
|
|
||||||
# TODO(charlie): Ideally, the publish step would happen as a publish job within `cargo-dist`, but
|
|
||||||
# sharing the built image as an artifact between jobs is challenging.
|
|
||||||
name: "[ruff] Build Docker image"
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
plan:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- .github/workflows/build-docker.yml
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
docker-publish:
|
|
||||||
name: Build Docker image (ghcr.io/astral-sh/ruff)
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
environment:
|
|
||||||
name: release
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
submodules: recursive
|
|
||||||
|
|
||||||
- uses: docker/setup-buildx-action@v3
|
|
||||||
|
|
||||||
- uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
registry: ghcr.io
|
|
||||||
username: ${{ github.repository_owner }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Extract metadata (tags, labels) for Docker
|
|
||||||
id: meta
|
|
||||||
uses: docker/metadata-action@v5
|
|
||||||
with:
|
|
||||||
images: ghcr.io/astral-sh/ruff
|
|
||||||
|
|
||||||
- name: Check tag consistency
|
|
||||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
|
||||||
run: |
|
|
||||||
version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
|
|
||||||
if [ "${{ fromJson(inputs.plan).announcement_tag }}" != "${version}" ]; then
|
|
||||||
echo "The input tag does not match the version from pyproject.toml:" >&2
|
|
||||||
echo "${{ fromJson(inputs.plan).announcement_tag }}" >&2
|
|
||||||
echo "${version}" >&2
|
|
||||||
exit 1
|
|
||||||
else
|
|
||||||
echo "Releasing ${version}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: "Build and push Docker image"
|
|
||||||
uses: docker/build-push-action@v6
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
# Reuse the builder
|
|
||||||
cache-from: type=gha
|
|
||||||
cache-to: type=gha,mode=max
|
|
||||||
push: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
|
||||||
tags: ghcr.io/astral-sh/ruff:latest,ghcr.io/astral-sh/ruff:${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || 'dry-run' }}
|
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
|
||||||
276
.github/workflows/ci.yaml
vendored
276
.github/workflows/ci.yaml
vendored
@@ -23,35 +23,17 @@ jobs:
|
|||||||
name: "Determine changes"
|
name: "Determine changes"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
outputs:
|
outputs:
|
||||||
# Flag that is raised when any code that affects parser is changed
|
|
||||||
parser: ${{ steps.changed.outputs.parser_any_changed }}
|
|
||||||
# Flag that is raised when any code that affects linter is changed
|
|
||||||
linter: ${{ steps.changed.outputs.linter_any_changed }}
|
linter: ${{ steps.changed.outputs.linter_any_changed }}
|
||||||
# Flag that is raised when any code that affects formatter is changed
|
|
||||||
formatter: ${{ steps.changed.outputs.formatter_any_changed }}
|
formatter: ${{ steps.changed.outputs.formatter_any_changed }}
|
||||||
# Flag that is raised when any code is changed
|
|
||||||
# This is superset of the linter and formatter
|
|
||||||
code: ${{ steps.changed.outputs.code_any_changed }}
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- uses: tj-actions/changed-files@v44
|
- uses: tj-actions/changed-files@v40
|
||||||
id: changed
|
id: changed
|
||||||
with:
|
with:
|
||||||
files_yaml: |
|
files_yaml: |
|
||||||
parser:
|
|
||||||
- Cargo.toml
|
|
||||||
- Cargo.lock
|
|
||||||
- crates/ruff_python_trivia/**
|
|
||||||
- crates/ruff_source_file/**
|
|
||||||
- crates/ruff_text_size/**
|
|
||||||
- crates/ruff_python_ast/**
|
|
||||||
- crates/ruff_python_parser/**
|
|
||||||
- scripts/fuzz-parser/**
|
|
||||||
- .github/workflows/ci.yaml
|
|
||||||
|
|
||||||
linter:
|
linter:
|
||||||
- Cargo.toml
|
- Cargo.toml
|
||||||
- Cargo.lock
|
- Cargo.lock
|
||||||
@@ -59,8 +41,8 @@ jobs:
|
|||||||
- "!crates/ruff_python_formatter/**"
|
- "!crates/ruff_python_formatter/**"
|
||||||
- "!crates/ruff_formatter/**"
|
- "!crates/ruff_formatter/**"
|
||||||
- "!crates/ruff_dev/**"
|
- "!crates/ruff_dev/**"
|
||||||
|
- "!crates/ruff_shrinking/**"
|
||||||
- scripts/*
|
- scripts/*
|
||||||
- python/**
|
|
||||||
- .github/workflows/ci.yaml
|
- .github/workflows/ci.yaml
|
||||||
|
|
||||||
formatter:
|
formatter:
|
||||||
@@ -76,19 +58,11 @@ jobs:
|
|||||||
- crates/ruff_python_parser/**
|
- crates/ruff_python_parser/**
|
||||||
- crates/ruff_dev/**
|
- crates/ruff_dev/**
|
||||||
- scripts/*
|
- scripts/*
|
||||||
- python/**
|
|
||||||
- .github/workflows/ci.yaml
|
- .github/workflows/ci.yaml
|
||||||
|
|
||||||
code:
|
|
||||||
- "**/*"
|
|
||||||
- "!**/*.md"
|
|
||||||
- "!docs/**"
|
|
||||||
- "!assets/**"
|
|
||||||
|
|
||||||
cargo-fmt:
|
cargo-fmt:
|
||||||
name: "cargo fmt"
|
name: "cargo fmt"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 10
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
@@ -98,9 +72,6 @@ jobs:
|
|||||||
cargo-clippy:
|
cargo-clippy:
|
||||||
name: "cargo clippy"
|
name: "cargo clippy"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: determine_changes
|
|
||||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
|
||||||
timeout-minutes: 20
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
@@ -109,26 +80,41 @@ jobs:
|
|||||||
rustup target add wasm32-unknown-unknown
|
rustup target add wasm32-unknown-unknown
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
- name: "Clippy"
|
- name: "Clippy"
|
||||||
run: cargo clippy --workspace --all-targets --all-features --locked -- -D warnings
|
run: cargo clippy --workspace --all-targets --all-features -- -D warnings
|
||||||
- name: "Clippy (wasm)"
|
- name: "Clippy (wasm)"
|
||||||
run: cargo clippy -p ruff_wasm --target wasm32-unknown-unknown --all-features --locked -- -D warnings
|
run: cargo clippy -p ruff_wasm --target wasm32-unknown-unknown --all-features -- -D warnings
|
||||||
|
|
||||||
cargo-test-linux:
|
cargo-test-linux:
|
||||||
name: "cargo test (linux)"
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: determine_changes
|
name: "cargo test (linux)"
|
||||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
|
||||||
timeout-minutes: 20
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup show
|
run: rustup show
|
||||||
- name: "Install mold"
|
- name: "Install cargo insta"
|
||||||
uses: rui314/setup-mold@v1
|
|
||||||
- name: "Install cargo nextest"
|
|
||||||
uses: taiki-e/install-action@v2
|
uses: taiki-e/install-action@v2
|
||||||
with:
|
with:
|
||||||
tool: cargo-nextest
|
tool: cargo-insta
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
- name: "Run tests"
|
||||||
|
run: cargo insta test --all --all-features --unreferenced reject
|
||||||
|
# Check for broken links in the documentation.
|
||||||
|
- run: cargo doc --all --no-deps
|
||||||
|
env:
|
||||||
|
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
|
||||||
|
RUSTDOCFLAGS: "-D warnings"
|
||||||
|
- uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: ruff
|
||||||
|
path: target/debug/ruff
|
||||||
|
|
||||||
|
cargo-test-windows:
|
||||||
|
runs-on: windows-latest
|
||||||
|
name: "cargo test (windows)"
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- name: "Install Rust toolchain"
|
||||||
|
run: rustup show
|
||||||
- name: "Install cargo insta"
|
- name: "Install cargo insta"
|
||||||
uses: taiki-e/install-action@v2
|
uses: taiki-e/install-action@v2
|
||||||
with:
|
with:
|
||||||
@@ -136,50 +122,12 @@ jobs:
|
|||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
- name: "Run tests"
|
- name: "Run tests"
|
||||||
shell: bash
|
shell: bash
|
||||||
env:
|
# We can't reject unreferenced snapshots on windows because flake8_executable can't run on windows
|
||||||
NEXTEST_PROFILE: "ci"
|
run: cargo insta test --all --all-features
|
||||||
run: cargo insta test --all-features --unreferenced reject --test-runner nextest
|
|
||||||
|
|
||||||
# Check for broken links in the documentation.
|
|
||||||
- run: cargo doc --all --no-deps
|
|
||||||
env:
|
|
||||||
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
|
|
||||||
RUSTDOCFLAGS: "-D warnings"
|
|
||||||
- uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: ruff
|
|
||||||
path: target/debug/ruff
|
|
||||||
|
|
||||||
cargo-test-windows:
|
|
||||||
name: "cargo test (windows)"
|
|
||||||
runs-on: windows-latest
|
|
||||||
needs: determine_changes
|
|
||||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
|
||||||
timeout-minutes: 20
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- name: "Install Rust toolchain"
|
|
||||||
run: rustup show
|
|
||||||
- name: "Install cargo nextest"
|
|
||||||
uses: taiki-e/install-action@v2
|
|
||||||
with:
|
|
||||||
tool: cargo-nextest
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
- name: "Run tests"
|
|
||||||
shell: bash
|
|
||||||
env:
|
|
||||||
# Workaround for <https://github.com/nextest-rs/nextest/issues/1493>.
|
|
||||||
RUSTUP_WINDOWS_PATH_ADD_BIN: 1
|
|
||||||
run: |
|
|
||||||
cargo nextest run --all-features --profile ci
|
|
||||||
cargo test --all-features --doc
|
|
||||||
|
|
||||||
cargo-test-wasm:
|
cargo-test-wasm:
|
||||||
name: "cargo test (wasm)"
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: determine_changes
|
name: "cargo test (wasm)"
|
||||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
|
||||||
timeout-minutes: 10
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
@@ -196,60 +144,9 @@ jobs:
|
|||||||
cd crates/ruff_wasm
|
cd crates/ruff_wasm
|
||||||
wasm-pack test --node
|
wasm-pack test --node
|
||||||
|
|
||||||
cargo-build-release:
|
|
||||||
name: "cargo build (release)"
|
|
||||||
runs-on: macos-latest
|
|
||||||
needs: determine_changes
|
|
||||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
|
||||||
timeout-minutes: 20
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- name: "Install Rust toolchain"
|
|
||||||
run: rustup show
|
|
||||||
- name: "Install mold"
|
|
||||||
uses: rui314/setup-mold@v1
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
- name: "Build"
|
|
||||||
run: cargo build --release --locked
|
|
||||||
|
|
||||||
cargo-build-msrv:
|
|
||||||
name: "cargo build (msrv)"
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: determine_changes
|
|
||||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
|
||||||
timeout-minutes: 20
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: SebRollen/toml-action@v1.2.0
|
|
||||||
id: msrv
|
|
||||||
with:
|
|
||||||
file: "Cargo.toml"
|
|
||||||
field: "workspace.package.rust-version"
|
|
||||||
- name: "Install Rust toolchain"
|
|
||||||
run: rustup default ${{ steps.msrv.outputs.value }}
|
|
||||||
- name: "Install mold"
|
|
||||||
uses: rui314/setup-mold@v1
|
|
||||||
- name: "Install cargo nextest"
|
|
||||||
uses: taiki-e/install-action@v2
|
|
||||||
with:
|
|
||||||
tool: cargo-nextest
|
|
||||||
- name: "Install cargo insta"
|
|
||||||
uses: taiki-e/install-action@v2
|
|
||||||
with:
|
|
||||||
tool: cargo-insta
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
- name: "Run tests"
|
|
||||||
shell: bash
|
|
||||||
env:
|
|
||||||
NEXTEST_PROFILE: "ci"
|
|
||||||
run: cargo +${{ steps.msrv.outputs.value }} insta test --all-features --unreferenced reject --test-runner nextest
|
|
||||||
|
|
||||||
cargo-fuzz:
|
cargo-fuzz:
|
||||||
name: "cargo fuzz"
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: determine_changes
|
name: "cargo fuzz"
|
||||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
|
||||||
timeout-minutes: 10
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
@@ -257,53 +154,15 @@ jobs:
|
|||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
with:
|
with:
|
||||||
workspaces: "fuzz -> target"
|
workspaces: "fuzz -> target"
|
||||||
- name: "Install cargo-binstall"
|
|
||||||
uses: cargo-bins/cargo-binstall@main
|
|
||||||
with:
|
|
||||||
tool: cargo-fuzz@0.11.2
|
|
||||||
- name: "Install cargo-fuzz"
|
- name: "Install cargo-fuzz"
|
||||||
# Download the latest version from quick install and not the github releases because github releases only has MUSL targets.
|
uses: taiki-e/install-action@v2
|
||||||
run: cargo binstall cargo-fuzz --force --disable-strategies crate-meta-data --no-confirm
|
with:
|
||||||
|
tool: cargo-fuzz@0.11
|
||||||
- run: cargo fuzz build -s none
|
- run: cargo fuzz build -s none
|
||||||
|
|
||||||
fuzz-parser:
|
|
||||||
name: "Fuzz the parser"
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs:
|
|
||||||
- cargo-test-linux
|
|
||||||
- determine_changes
|
|
||||||
if: ${{ needs.determine_changes.outputs.parser == 'true' }}
|
|
||||||
timeout-minutes: 20
|
|
||||||
env:
|
|
||||||
FORCE_COLOR: 1
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
|
||||||
- name: Install uv
|
|
||||||
run: curl -LsSf https://astral.sh/uv/install.sh | sh
|
|
||||||
- name: Install Python requirements
|
|
||||||
run: uv pip install -r scripts/fuzz-parser/requirements.txt --system
|
|
||||||
- uses: actions/download-artifact@v4
|
|
||||||
name: Download Ruff binary to test
|
|
||||||
id: download-cached-binary
|
|
||||||
with:
|
|
||||||
name: ruff
|
|
||||||
path: ruff-to-test
|
|
||||||
- name: Fuzz
|
|
||||||
run: |
|
|
||||||
# Make executable, since artifact download doesn't preserve this
|
|
||||||
chmod +x ${{ steps.download-cached-binary.outputs.download-path }}/ruff
|
|
||||||
|
|
||||||
python scripts/fuzz-parser/fuzz.py 0-500 --test-executable ${{ steps.download-cached-binary.outputs.download-path }}/ruff
|
|
||||||
|
|
||||||
scripts:
|
scripts:
|
||||||
name: "test scripts"
|
name: "test scripts"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: determine_changes
|
|
||||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
|
||||||
timeout-minutes: 5
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
@@ -326,27 +185,28 @@ jobs:
|
|||||||
- determine_changes
|
- determine_changes
|
||||||
# Only runs on pull requests, since that is the only we way we can find the base version for comparison.
|
# Only runs on pull requests, since that is the only we way we can find the base version for comparison.
|
||||||
# Ecosystem check needs linter and/or formatter changes.
|
# Ecosystem check needs linter and/or formatter changes.
|
||||||
if: ${{ github.event_name == 'pull_request' && needs.determine_changes.outputs.code == 'true' }}
|
if: github.event_name == 'pull_request' && ${{
|
||||||
timeout-minutes: 20
|
needs.determine_changes.outputs.linter == 'true' ||
|
||||||
|
needs.determine_changes.outputs.formatter == 'true'
|
||||||
|
}}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
|
||||||
- uses: actions/download-artifact@v4
|
- uses: actions/download-artifact@v3
|
||||||
name: Download comparison Ruff binary
|
name: Download comparison Ruff binary
|
||||||
id: ruff-target
|
id: ruff-target
|
||||||
with:
|
with:
|
||||||
name: ruff
|
name: ruff
|
||||||
path: target/debug
|
path: target/debug
|
||||||
|
|
||||||
- uses: dawidd6/action-download-artifact@v6
|
- uses: dawidd6/action-download-artifact@v2
|
||||||
name: Download baseline Ruff binary
|
name: Download baseline Ruff binary
|
||||||
with:
|
with:
|
||||||
name: ruff
|
name: ruff
|
||||||
branch: ${{ github.event.pull_request.base.ref }}
|
branch: ${{ github.event.pull_request.base.ref }}
|
||||||
workflow: "ci.yaml"
|
|
||||||
check_artifacts: true
|
check_artifacts: true
|
||||||
|
|
||||||
- name: Install ruff-ecosystem
|
- name: Install ruff-ecosystem
|
||||||
@@ -421,36 +281,38 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
echo ${{ github.event.number }} > pr-number
|
echo ${{ github.event.number }} > pr-number
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v4
|
- uses: actions/upload-artifact@v3
|
||||||
name: Upload PR Number
|
name: Upload PR Number
|
||||||
with:
|
with:
|
||||||
name: pr-number
|
name: pr-number
|
||||||
path: pr-number
|
path: pr-number
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v4
|
- uses: actions/upload-artifact@v3
|
||||||
name: Upload Results
|
name: Upload Results
|
||||||
with:
|
with:
|
||||||
name: ecosystem-result
|
name: ecosystem-result
|
||||||
path: ecosystem-result
|
path: ecosystem-result
|
||||||
|
|
||||||
cargo-shear:
|
cargo-udeps:
|
||||||
name: "cargo shear"
|
name: "cargo udeps"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: determine_changes
|
|
||||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: cargo-bins/cargo-binstall@main
|
- name: "Install nightly Rust toolchain"
|
||||||
- run: cargo binstall --no-confirm cargo-shear
|
# Only pinned to make caching work, update freely
|
||||||
- run: cargo shear
|
run: rustup toolchain install nightly-2023-10-15
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
- name: "Install cargo-udeps"
|
||||||
|
uses: taiki-e/install-action@cargo-udeps
|
||||||
|
- name: "Run cargo-udeps"
|
||||||
|
run: cargo +nightly-2023-10-15 udeps
|
||||||
|
|
||||||
python-package:
|
python-package:
|
||||||
name: "python package"
|
name: "python package"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 20
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
architecture: x64
|
architecture: x64
|
||||||
@@ -472,10 +334,9 @@ jobs:
|
|||||||
pre-commit:
|
pre-commit:
|
||||||
name: "pre-commit"
|
name: "pre-commit"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 10
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
@@ -484,7 +345,7 @@ jobs:
|
|||||||
- name: "Install pre-commit"
|
- name: "Install pre-commit"
|
||||||
run: pip install pre-commit
|
run: pip install pre-commit
|
||||||
- name: "Cache pre-commit"
|
- name: "Cache pre-commit"
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v3
|
||||||
with:
|
with:
|
||||||
path: ~/.cache/pre-commit
|
path: ~/.cache/pre-commit
|
||||||
key: pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
key: pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
@@ -501,15 +362,14 @@ jobs:
|
|||||||
docs:
|
docs:
|
||||||
name: "mkdocs"
|
name: "mkdocs"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 10
|
|
||||||
env:
|
env:
|
||||||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
- name: "Add SSH key"
|
- name: "Add SSH key"
|
||||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||||
uses: webfactory/ssh-agent@v0.9.0
|
uses: webfactory/ssh-agent@v0.8.0
|
||||||
with:
|
with:
|
||||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
@@ -532,14 +392,13 @@ jobs:
|
|||||||
run: mkdocs build --strict -f mkdocs.insiders.yml
|
run: mkdocs build --strict -f mkdocs.insiders.yml
|
||||||
- name: "Build docs"
|
- name: "Build docs"
|
||||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||||
run: mkdocs build --strict -f mkdocs.public.yml
|
run: mkdocs build --strict -f mkdocs.generated.yml
|
||||||
|
|
||||||
check-formatter-instability-and-black-similarity:
|
check-formatter-instability-and-black-similarity:
|
||||||
name: "formatter instabilities and black similarity"
|
name: "formatter instabilities and black similarity"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: determine_changes
|
needs: determine_changes
|
||||||
if: needs.determine_changes.outputs.formatter == 'true' || github.ref == 'refs/heads/main'
|
if: needs.determine_changes.outputs.formatter == 'true' || github.ref == 'refs/heads/main'
|
||||||
timeout-minutes: 10
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
@@ -556,13 +415,9 @@ jobs:
|
|||||||
check-ruff-lsp:
|
check-ruff-lsp:
|
||||||
name: "test ruff-lsp"
|
name: "test ruff-lsp"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 5
|
needs: cargo-test-linux
|
||||||
needs:
|
|
||||||
- cargo-test-linux
|
|
||||||
- determine_changes
|
|
||||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
|
||||||
steps:
|
steps:
|
||||||
- uses: extractions/setup-just@v2
|
- uses: extractions/setup-just@v1
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
@@ -571,11 +426,11 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
repository: "astral-sh/ruff-lsp"
|
repository: "astral-sh/ruff-lsp"
|
||||||
|
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
|
||||||
- uses: actions/download-artifact@v4
|
- uses: actions/download-artifact@v3
|
||||||
name: Download development ruff binary
|
name: Download development ruff binary
|
||||||
id: ruff-target
|
id: ruff-target
|
||||||
with:
|
with:
|
||||||
@@ -598,9 +453,6 @@ jobs:
|
|||||||
|
|
||||||
benchmarks:
|
benchmarks:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: determine_changes
|
|
||||||
if: ${{ github.repository == 'astral-sh/ruff' && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
|
||||||
timeout-minutes: 20
|
|
||||||
steps:
|
steps:
|
||||||
- name: "Checkout Branch"
|
- name: "Checkout Branch"
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
@@ -619,7 +471,7 @@ jobs:
|
|||||||
run: cargo codspeed build --features codspeed -p ruff_benchmark
|
run: cargo codspeed build --features codspeed -p ruff_benchmark
|
||||||
|
|
||||||
- name: "Run benchmarks"
|
- name: "Run benchmarks"
|
||||||
uses: CodSpeedHQ/action@v2
|
uses: CodSpeedHQ/action@v1
|
||||||
with:
|
with:
|
||||||
run: cargo codspeed run
|
run: cargo codspeed run
|
||||||
token: ${{ secrets.CODSPEED_TOKEN }}
|
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||||
|
|||||||
72
.github/workflows/daily_fuzz.yaml
vendored
72
.github/workflows/daily_fuzz.yaml
vendored
@@ -1,72 +0,0 @@
|
|||||||
name: Daily parser fuzz
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
schedule:
|
|
||||||
- cron: "0 0 * * *"
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- ".github/workflows/daily_fuzz.yaml"
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
env:
|
|
||||||
CARGO_INCREMENTAL: 0
|
|
||||||
CARGO_NET_RETRY: 10
|
|
||||||
CARGO_TERM_COLOR: always
|
|
||||||
RUSTUP_MAX_RETRIES: 10
|
|
||||||
PACKAGE_NAME: ruff
|
|
||||||
FORCE_COLOR: 1
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
fuzz:
|
|
||||||
name: Fuzz
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
timeout-minutes: 20
|
|
||||||
# Don't run the cron job on forks:
|
|
||||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: "3.12"
|
|
||||||
- name: Install uv
|
|
||||||
run: curl -LsSf https://astral.sh/uv/install.sh | sh
|
|
||||||
- name: Install Python requirements
|
|
||||||
run: uv pip install -r scripts/fuzz-parser/requirements.txt --system
|
|
||||||
- name: "Install Rust toolchain"
|
|
||||||
run: rustup show
|
|
||||||
- name: "Install mold"
|
|
||||||
uses: rui314/setup-mold@v1
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
- name: Build ruff
|
|
||||||
# A debug build means the script runs slower once it gets started,
|
|
||||||
# but this is outweighed by the fact that a release build takes *much* longer to compile in CI
|
|
||||||
run: cargo build --locked
|
|
||||||
- name: Fuzz
|
|
||||||
run: python scripts/fuzz-parser/fuzz.py $(shuf -i 0-9999999999999999999 -n 1000) --test-executable target/debug/ruff
|
|
||||||
|
|
||||||
create-issue-on-failure:
|
|
||||||
name: Create an issue if the daily fuzz surfaced any bugs
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: fuzz
|
|
||||||
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && needs.fuzz.result == 'failure' }}
|
|
||||||
permissions:
|
|
||||||
issues: write
|
|
||||||
steps:
|
|
||||||
- uses: actions/github-script@v7
|
|
||||||
with:
|
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
script: |
|
|
||||||
await github.rest.issues.create({
|
|
||||||
owner: "astral-sh",
|
|
||||||
repo: "ruff",
|
|
||||||
title: `Daily parser fuzz failed on ${new Date().toDateString()}`,
|
|
||||||
body: "Runs listed here: https://github.com/astral-sh/ruff/actions/workflows/daily_fuzz.yml",
|
|
||||||
labels: ["bug", "parser", "fuzzer"],
|
|
||||||
})
|
|
||||||
55
.github/workflows/docs.yaml
vendored
Normal file
55
.github/workflows/docs.yaml
vendored
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
name: mkdocs
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
ref:
|
||||||
|
description: "The commit SHA, tag, or branch to publish. Uses the default branch if not specified."
|
||||||
|
default: ""
|
||||||
|
type: string
|
||||||
|
release:
|
||||||
|
types: [published]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
mkdocs:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||||
|
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: ${{ inputs.ref }}
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
- name: "Add SSH key"
|
||||||
|
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||||
|
uses: webfactory/ssh-agent@v0.8.0
|
||||||
|
with:
|
||||||
|
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||||
|
- name: "Install Rust toolchain"
|
||||||
|
run: rustup show
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
- name: "Install Insiders dependencies"
|
||||||
|
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||||
|
run: pip install -r docs/requirements-insiders.txt
|
||||||
|
- name: "Install dependencies"
|
||||||
|
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||||
|
run: pip install -r docs/requirements.txt
|
||||||
|
- name: "Copy README File"
|
||||||
|
run: |
|
||||||
|
python scripts/transform_readme.py --target mkdocs
|
||||||
|
python scripts/generate_mkdocs.py
|
||||||
|
- name: "Build Insiders docs"
|
||||||
|
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||||
|
run: mkdocs build --strict -f mkdocs.insiders.yml
|
||||||
|
- name: "Build docs"
|
||||||
|
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||||
|
run: mkdocs build --strict -f mkdocs.generated.yml
|
||||||
|
- name: "Deploy to Cloudflare Pages"
|
||||||
|
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||||
|
uses: cloudflare/wrangler-action@v3.3.2
|
||||||
|
with:
|
||||||
|
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||||
|
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||||
|
# `github.head_ref` is only set during pull requests and for manual runs or tags we use `main` to deploy to production
|
||||||
|
command: pages deploy site --project-name=astral-docs --branch ${{ github.head_ref || 'main' }} --commit-hash ${GITHUB_SHA}
|
||||||
247
.github/workflows/flake8-to-ruff.yaml
vendored
Normal file
247
.github/workflows/flake8-to-ruff.yaml
vendored
Normal file
@@ -0,0 +1,247 @@
|
|||||||
|
name: "[flake8-to-ruff] Release"
|
||||||
|
|
||||||
|
on: workflow_dispatch
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
env:
|
||||||
|
PACKAGE_NAME: flake8-to-ruff
|
||||||
|
CRATE_NAME: flake8_to_ruff
|
||||||
|
PYTHON_VERSION: "3.11"
|
||||||
|
CARGO_INCREMENTAL: 0
|
||||||
|
CARGO_NET_RETRY: 10
|
||||||
|
CARGO_TERM_COLOR: always
|
||||||
|
RUSTUP_MAX_RETRIES: 10
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
macos-x86_64:
|
||||||
|
runs-on: macos-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
architecture: x64
|
||||||
|
- name: "Install Rust toolchain"
|
||||||
|
run: rustup show
|
||||||
|
- name: "Build wheels - x86_64"
|
||||||
|
uses: PyO3/maturin-action@v1
|
||||||
|
with:
|
||||||
|
target: x86_64
|
||||||
|
args: --release --out dist --sdist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||||
|
- name: "Install built wheel - x86_64"
|
||||||
|
run: |
|
||||||
|
pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
|
||||||
|
- name: "Upload wheels"
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: wheels
|
||||||
|
path: dist
|
||||||
|
macos-universal:
|
||||||
|
runs-on: macos-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
architecture: x64
|
||||||
|
- name: "Install Rust toolchain"
|
||||||
|
run: rustup show
|
||||||
|
- name: "Build wheels - universal2"
|
||||||
|
uses: PyO3/maturin-action@v1
|
||||||
|
with:
|
||||||
|
args: --release --target universal2-apple-darwin --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||||
|
- name: "Install built wheel - universal2"
|
||||||
|
run: |
|
||||||
|
pip install dist/${{ env.CRATE_NAME }}-*universal2.whl --force-reinstall
|
||||||
|
- name: "Upload wheels"
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: wheels
|
||||||
|
path: dist
|
||||||
|
|
||||||
|
windows:
|
||||||
|
runs-on: windows-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
target: [x64, x86]
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
architecture: ${{ matrix.target }}
|
||||||
|
- name: "Install Rust toolchain"
|
||||||
|
run: rustup show
|
||||||
|
- name: "Build wheels"
|
||||||
|
uses: PyO3/maturin-action@v1
|
||||||
|
with:
|
||||||
|
target: ${{ matrix.target }}
|
||||||
|
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||||
|
- name: "Install built wheel"
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
python -m pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
|
||||||
|
- name: "Upload wheels"
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: wheels
|
||||||
|
path: dist
|
||||||
|
|
||||||
|
linux:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
target: [x86_64, i686]
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
architecture: x64
|
||||||
|
- name: "Build wheels"
|
||||||
|
uses: PyO3/maturin-action@v1
|
||||||
|
with:
|
||||||
|
target: ${{ matrix.target }}
|
||||||
|
manylinux: auto
|
||||||
|
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||||
|
- name: "Install built wheel"
|
||||||
|
if: matrix.target == 'x86_64'
|
||||||
|
run: |
|
||||||
|
pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
|
||||||
|
- name: "Upload wheels"
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: wheels
|
||||||
|
path: dist
|
||||||
|
|
||||||
|
linux-cross:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
target: [aarch64, armv7, s390x, ppc64le, ppc64]
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
- name: "Build wheels"
|
||||||
|
uses: PyO3/maturin-action@v1
|
||||||
|
with:
|
||||||
|
target: ${{ matrix.target }}
|
||||||
|
manylinux: auto
|
||||||
|
args: --no-default-features --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||||
|
- uses: uraimo/run-on-arch-action@v2
|
||||||
|
if: matrix.target != 'ppc64'
|
||||||
|
name: Install built wheel
|
||||||
|
with:
|
||||||
|
arch: ${{ matrix.target }}
|
||||||
|
distro: ubuntu20.04
|
||||||
|
githubToken: ${{ github.token }}
|
||||||
|
install: |
|
||||||
|
apt-get update
|
||||||
|
apt-get install -y --no-install-recommends python3 python3-pip
|
||||||
|
pip3 install -U pip
|
||||||
|
run: |
|
||||||
|
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||||
|
- name: "Upload wheels"
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: wheels
|
||||||
|
path: dist
|
||||||
|
|
||||||
|
musllinux:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
target:
|
||||||
|
- x86_64-unknown-linux-musl
|
||||||
|
- i686-unknown-linux-musl
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
architecture: x64
|
||||||
|
- name: "Build wheels"
|
||||||
|
uses: PyO3/maturin-action@v1
|
||||||
|
with:
|
||||||
|
target: ${{ matrix.target }}
|
||||||
|
manylinux: musllinux_1_2
|
||||||
|
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||||
|
- name: "Install built wheel"
|
||||||
|
if: matrix.target == 'x86_64-unknown-linux-musl'
|
||||||
|
uses: addnab/docker-run-action@v3
|
||||||
|
with:
|
||||||
|
image: alpine:latest
|
||||||
|
options: -v ${{ github.workspace }}:/io -w /io
|
||||||
|
run: |
|
||||||
|
apk add py3-pip
|
||||||
|
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links /io/dist/ --force-reinstall
|
||||||
|
- name: "Upload wheels"
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: wheels
|
||||||
|
path: dist
|
||||||
|
|
||||||
|
musllinux-cross:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
platform:
|
||||||
|
- target: aarch64-unknown-linux-musl
|
||||||
|
arch: aarch64
|
||||||
|
- target: armv7-unknown-linux-musleabihf
|
||||||
|
arch: armv7
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
- name: "Build wheels"
|
||||||
|
uses: PyO3/maturin-action@v1
|
||||||
|
with:
|
||||||
|
target: ${{ matrix.platform.target }}
|
||||||
|
manylinux: musllinux_1_2
|
||||||
|
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
||||||
|
- uses: uraimo/run-on-arch-action@v2
|
||||||
|
name: Install built wheel
|
||||||
|
with:
|
||||||
|
arch: ${{ matrix.platform.arch }}
|
||||||
|
distro: alpine_latest
|
||||||
|
githubToken: ${{ github.token }}
|
||||||
|
install: |
|
||||||
|
apk add py3-pip
|
||||||
|
run: |
|
||||||
|
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||||
|
- name: "Upload wheels"
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: wheels
|
||||||
|
path: dist
|
||||||
|
|
||||||
|
release:
|
||||||
|
name: Release
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs:
|
||||||
|
- macos-universal
|
||||||
|
- macos-x86_64
|
||||||
|
- windows
|
||||||
|
- linux
|
||||||
|
- linux-cross
|
||||||
|
- musllinux
|
||||||
|
- musllinux-cross
|
||||||
|
steps:
|
||||||
|
- uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: wheels
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
- name: "Publish to PyPi"
|
||||||
|
env:
|
||||||
|
TWINE_USERNAME: __token__
|
||||||
|
TWINE_PASSWORD: ${{ secrets.FLAKE8_TO_RUFF_TOKEN }}
|
||||||
|
run: |
|
||||||
|
pip install --upgrade twine
|
||||||
|
twine upload --skip-existing *
|
||||||
29
.github/workflows/notify-dependents.yml
vendored
29
.github/workflows/notify-dependents.yml
vendored
@@ -1,29 +0,0 @@
|
|||||||
# Notify downstream repositories of a new release.
|
|
||||||
#
|
|
||||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a post-announce
|
|
||||||
# job within `cargo-dist`.
|
|
||||||
name: "[ruff] Notify dependents"
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
plan:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
update-dependents:
|
|
||||||
name: Notify dependents
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: "Update pre-commit mirror"
|
|
||||||
uses: actions/github-script@v7
|
|
||||||
with:
|
|
||||||
github-token: ${{ secrets.RUFF_PRE_COMMIT_PAT }}
|
|
||||||
script: |
|
|
||||||
github.rest.actions.createWorkflowDispatch({
|
|
||||||
owner: 'astral-sh',
|
|
||||||
repo: 'ruff-pre-commit',
|
|
||||||
workflow_id: 'main.yml',
|
|
||||||
ref: 'main',
|
|
||||||
})
|
|
||||||
@@ -1,16 +1,9 @@
|
|||||||
# Publish the Ruff playground.
|
|
||||||
#
|
|
||||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a post-announce
|
|
||||||
# job within `cargo-dist`.
|
|
||||||
name: "[Playground] Release"
|
name: "[Playground] Release"
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
workflow_call:
|
release:
|
||||||
inputs:
|
types: [published]
|
||||||
plan:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
|
|
||||||
env:
|
env:
|
||||||
CARGO_INCREMENTAL: 0
|
CARGO_INCREMENTAL: 0
|
||||||
@@ -47,7 +40,7 @@ jobs:
|
|||||||
working-directory: playground
|
working-directory: playground
|
||||||
- name: "Deploy to Cloudflare Pages"
|
- name: "Deploy to Cloudflare Pages"
|
||||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||||
uses: cloudflare/wrangler-action@v3.7.0
|
uses: cloudflare/wrangler-action@v3.3.2
|
||||||
with:
|
with:
|
||||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||||
16
.github/workflows/pr-comment.yaml
vendored
16
.github/workflows/pr-comment.yaml
vendored
@@ -17,7 +17,7 @@ jobs:
|
|||||||
comment:
|
comment:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: dawidd6/action-download-artifact@v6
|
- uses: dawidd6/action-download-artifact@v2
|
||||||
name: Download pull request number
|
name: Download pull request number
|
||||||
with:
|
with:
|
||||||
name: pr-number
|
name: pr-number
|
||||||
@@ -32,7 +32,7 @@ jobs:
|
|||||||
echo "pr-number=$(<pr-number)" >> $GITHUB_OUTPUT
|
echo "pr-number=$(<pr-number)" >> $GITHUB_OUTPUT
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- uses: dawidd6/action-download-artifact@v6
|
- uses: dawidd6/action-download-artifact@v2
|
||||||
name: "Download ecosystem results"
|
name: "Download ecosystem results"
|
||||||
id: download-ecosystem-result
|
id: download-ecosystem-result
|
||||||
if: steps.pr-number.outputs.pr-number
|
if: steps.pr-number.outputs.pr-number
|
||||||
@@ -48,14 +48,6 @@ jobs:
|
|||||||
id: generate-comment
|
id: generate-comment
|
||||||
if: steps.download-ecosystem-result.outputs.found_artifact == 'true'
|
if: steps.download-ecosystem-result.outputs.found_artifact == 'true'
|
||||||
run: |
|
run: |
|
||||||
# Guard against malicious ecosystem results that symlink to a secret
|
|
||||||
# file on this runner
|
|
||||||
if [[ -L pr/ecosystem/ecosystem-result ]]
|
|
||||||
then
|
|
||||||
echo "Error: ecosystem-result cannot be a symlink"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Note this identifier is used to find the comment to update on
|
# Note this identifier is used to find the comment to update on
|
||||||
# subsequent runs
|
# subsequent runs
|
||||||
echo '<!-- generated-comment ecosystem -->' >> comment.txt
|
echo '<!-- generated-comment ecosystem -->' >> comment.txt
|
||||||
@@ -69,7 +61,7 @@ jobs:
|
|||||||
echo 'EOF' >> $GITHUB_OUTPUT
|
echo 'EOF' >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Find existing comment
|
- name: Find existing comment
|
||||||
uses: peter-evans/find-comment@v3
|
uses: peter-evans/find-comment@v2
|
||||||
if: steps.generate-comment.outcome == 'success'
|
if: steps.generate-comment.outcome == 'success'
|
||||||
id: find-comment
|
id: find-comment
|
||||||
with:
|
with:
|
||||||
@@ -79,7 +71,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Create or update comment
|
- name: Create or update comment
|
||||||
if: steps.find-comment.outcome == 'success'
|
if: steps.find-comment.outcome == 'success'
|
||||||
uses: peter-evans/create-or-update-comment@v4
|
uses: peter-evans/create-or-update-comment@v3
|
||||||
with:
|
with:
|
||||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||||
|
|||||||
151
.github/workflows/publish-docs.yml
vendored
151
.github/workflows/publish-docs.yml
vendored
@@ -1,151 +0,0 @@
|
|||||||
# Publish the Ruff documentation.
|
|
||||||
#
|
|
||||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a post-announce
|
|
||||||
# job within `cargo-dist`.
|
|
||||||
name: mkdocs
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
ref:
|
|
||||||
description: "The commit SHA, tag, or branch to publish. Uses the default branch if not specified."
|
|
||||||
default: ""
|
|
||||||
type: string
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
plan:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
mkdocs:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
ref: ${{ inputs.ref }}
|
|
||||||
|
|
||||||
- uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: 3.12
|
|
||||||
|
|
||||||
- name: "Set docs version"
|
|
||||||
run: |
|
|
||||||
version="${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || inputs.ref }}"
|
|
||||||
# if version is missing, exit with error
|
|
||||||
if [[ -z "$version" ]]; then
|
|
||||||
echo "Can't build docs without a version."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Use version as display name for now
|
|
||||||
display_name="$version"
|
|
||||||
|
|
||||||
echo "version=$version" >> $GITHUB_ENV
|
|
||||||
echo "display_name=$display_name" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: "Set branch name"
|
|
||||||
run: |
|
|
||||||
version="${{ env.version }}"
|
|
||||||
display_name="${{ env.display_name }}"
|
|
||||||
timestamp="$(date +%s)"
|
|
||||||
|
|
||||||
# create branch_display_name from display_name by replacing all
|
|
||||||
# characters disallowed in git branch names with hyphens
|
|
||||||
branch_display_name="$(echo "$display_name" | tr -c '[:alnum:]._' '-' | tr -s '-')"
|
|
||||||
|
|
||||||
echo "branch_name=update-docs-$branch_display_name-$timestamp" >> $GITHUB_ENV
|
|
||||||
echo "timestamp=$timestamp" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: "Add SSH key"
|
|
||||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
|
||||||
uses: webfactory/ssh-agent@v0.9.0
|
|
||||||
with:
|
|
||||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
|
||||||
|
|
||||||
- name: "Install Rust toolchain"
|
|
||||||
run: rustup show
|
|
||||||
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
|
|
||||||
- name: "Install Insiders dependencies"
|
|
||||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
|
||||||
run: pip install -r docs/requirements-insiders.txt
|
|
||||||
|
|
||||||
- name: "Install dependencies"
|
|
||||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
|
||||||
run: pip install -r docs/requirements.txt
|
|
||||||
|
|
||||||
- name: "Copy README File"
|
|
||||||
run: |
|
|
||||||
python scripts/transform_readme.py --target mkdocs
|
|
||||||
python scripts/generate_mkdocs.py
|
|
||||||
|
|
||||||
- name: "Build Insiders docs"
|
|
||||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
|
||||||
run: mkdocs build --strict -f mkdocs.insiders.yml
|
|
||||||
|
|
||||||
- name: "Build docs"
|
|
||||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
|
||||||
run: mkdocs build --strict -f mkdocs.public.yml
|
|
||||||
|
|
||||||
- name: "Clone docs repo"
|
|
||||||
run: |
|
|
||||||
version="${{ env.version }}"
|
|
||||||
git clone https://${{ secrets.ASTRAL_DOCS_PAT }}@github.com/astral-sh/docs.git astral-docs
|
|
||||||
|
|
||||||
- name: "Copy docs"
|
|
||||||
run: rm -rf astral-docs/site/ruff && mkdir -p astral-docs/site && cp -r site/ruff astral-docs/site/
|
|
||||||
|
|
||||||
- name: "Commit docs"
|
|
||||||
working-directory: astral-docs
|
|
||||||
run: |
|
|
||||||
branch_name="${{ env.branch_name }}"
|
|
||||||
|
|
||||||
git config user.name "$GITHUB_ACTOR"
|
|
||||||
git config user.email "$GITHUB_ACTOR@users.noreply.github.com"
|
|
||||||
|
|
||||||
git checkout -b $branch_name
|
|
||||||
git add site/ruff
|
|
||||||
git commit -m "Update ruff documentation for $version"
|
|
||||||
|
|
||||||
- name: "Create Pull Request"
|
|
||||||
working-directory: astral-docs
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
|
|
||||||
run: |
|
|
||||||
version="${{ env.version }}"
|
|
||||||
display_name="${{ env.display_name }}"
|
|
||||||
branch_name="${{ env.branch_name }}"
|
|
||||||
|
|
||||||
# set the PR title
|
|
||||||
pull_request_title="Update ruff documentation for $display_name"
|
|
||||||
|
|
||||||
# Delete any existing pull requests that are open for this version
|
|
||||||
# by checking against pull_request_title because the new PR will
|
|
||||||
# supersede the old one.
|
|
||||||
gh pr list --state open --json title --jq '.[] | select(.title == "$pull_request_title") | .number' | \
|
|
||||||
xargs -I {} gh pr close {}
|
|
||||||
|
|
||||||
# push the branch to GitHub
|
|
||||||
git push origin $branch_name
|
|
||||||
|
|
||||||
# create the PR
|
|
||||||
gh pr create --base main --head $branch_name \
|
|
||||||
--title "$pull_request_title" \
|
|
||||||
--body "Automated documentation update for $display_name" \
|
|
||||||
--label "documentation"
|
|
||||||
|
|
||||||
- name: "Merge Pull Request"
|
|
||||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
|
||||||
working-directory: astral-docs
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
|
|
||||||
run: |
|
|
||||||
branch_name="${{ env.branch_name }}"
|
|
||||||
# auto-merge the PR if the build was triggered by a release. Manual builds should be reviewed by a human.
|
|
||||||
# give the PR a few seconds to be created before trying to auto-merge it
|
|
||||||
sleep 10
|
|
||||||
gh pr merge --squash $branch_name
|
|
||||||
34
.github/workflows/publish-pypi.yml
vendored
34
.github/workflows/publish-pypi.yml
vendored
@@ -1,34 +0,0 @@
|
|||||||
# Publish a release to PyPI.
|
|
||||||
#
|
|
||||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a publish job
|
|
||||||
# within `cargo-dist`.
|
|
||||||
name: "[ruff] Publish to PyPI"
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
plan:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
pypi-publish:
|
|
||||||
name: Upload to PyPI
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
environment:
|
|
||||||
name: release
|
|
||||||
permissions:
|
|
||||||
# For PyPI's trusted publishing.
|
|
||||||
id-token: write
|
|
||||||
steps:
|
|
||||||
- uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
pattern: wheels-*
|
|
||||||
path: wheels
|
|
||||||
merge-multiple: true
|
|
||||||
- name: Publish to PyPi
|
|
||||||
uses: pypa/gh-action-pypi-publish@release/v1
|
|
||||||
with:
|
|
||||||
skip-existing: true
|
|
||||||
packages-dir: wheels
|
|
||||||
verbose: true
|
|
||||||
55
.github/workflows/publish-wasm.yml
vendored
55
.github/workflows/publish-wasm.yml
vendored
@@ -1,55 +0,0 @@
|
|||||||
# Build and publish ruff-api for wasm.
|
|
||||||
#
|
|
||||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a publish
|
|
||||||
# job within `cargo-dist`.
|
|
||||||
name: "Build and publish wasm"
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
plan:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
|
|
||||||
env:
|
|
||||||
CARGO_INCREMENTAL: 0
|
|
||||||
CARGO_NET_RETRY: 10
|
|
||||||
CARGO_TERM_COLOR: always
|
|
||||||
RUSTUP_MAX_RETRIES: 10
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
ruff_wasm:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
id-token: write
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
target: [web, bundler, nodejs]
|
|
||||||
fail-fast: false
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- name: "Install Rust toolchain"
|
|
||||||
run: rustup target add wasm32-unknown-unknown
|
|
||||||
- uses: jetli/wasm-pack-action@v0.4.0
|
|
||||||
- uses: jetli/wasm-bindgen-action@v0.2.0
|
|
||||||
- name: "Run wasm-pack build"
|
|
||||||
run: wasm-pack build --target ${{ matrix.target }} crates/ruff_wasm
|
|
||||||
- name: "Rename generated package"
|
|
||||||
run: | # Replace the package name w/ jq
|
|
||||||
jq '.name="@astral-sh/ruff-wasm-${{ matrix.target }}"' crates/ruff_wasm/pkg/package.json > /tmp/package.json
|
|
||||||
mv /tmp/package.json crates/ruff_wasm/pkg
|
|
||||||
- run: cp LICENSE crates/ruff_wasm/pkg # wasm-pack does not put the LICENSE file in the pkg
|
|
||||||
- uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: 18
|
|
||||||
registry-url: "https://registry.npmjs.org"
|
|
||||||
- name: "Publish (dry-run)"
|
|
||||||
if: ${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}
|
|
||||||
run: npm publish --dry-run crates/ruff_wasm/pkg
|
|
||||||
- name: "Publish"
|
|
||||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
|
||||||
run: npm publish --provenance --access public crates/ruff_wasm/pkg
|
|
||||||
env:
|
|
||||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
|
||||||
@@ -1,23 +1,21 @@
|
|||||||
# Build ruff on all platforms.
|
name: "[ruff] Release"
|
||||||
#
|
|
||||||
# Generates both wheels (for PyPI) and archived binaries (for GitHub releases).
|
|
||||||
#
|
|
||||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a local
|
|
||||||
# artifacts job within `cargo-dist`.
|
|
||||||
name: "Build binaries"
|
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
plan:
|
tag:
|
||||||
required: true
|
description: "The version to tag, without the leading 'v'. If omitted, will initiate a dry run (no uploads)."
|
||||||
|
type: string
|
||||||
|
sha:
|
||||||
|
description: "The full sha of the commit to be released. If omitted, the latest commit on the default branch will be used."
|
||||||
|
default: ""
|
||||||
type: string
|
type: string
|
||||||
pull_request:
|
pull_request:
|
||||||
paths:
|
paths:
|
||||||
# When we change pyproject.toml, we want to ensure that the maturin builds still work.
|
# When we change pyproject.toml, we want to ensure that the maturin builds still work
|
||||||
- pyproject.toml
|
- pyproject.toml
|
||||||
# And when we change this workflow itself...
|
# And when we change this workflow itself...
|
||||||
- .github/workflows/build-binaries.yml
|
- .github/workflows/release.yaml
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
@@ -25,7 +23,6 @@ concurrency:
|
|||||||
|
|
||||||
env:
|
env:
|
||||||
PACKAGE_NAME: ruff
|
PACKAGE_NAME: ruff
|
||||||
MODULE_NAME: ruff
|
|
||||||
PYTHON_VERSION: "3.11"
|
PYTHON_VERSION: "3.11"
|
||||||
CARGO_INCREMENTAL: 0
|
CARGO_INCREMENTAL: 0
|
||||||
CARGO_NET_RETRY: 10
|
CARGO_NET_RETRY: 10
|
||||||
@@ -34,13 +31,12 @@ env:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
sdist:
|
sdist:
|
||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
ref: ${{ inputs.sha }}
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
- name: "Prep README.md"
|
- name: "Prep README.md"
|
||||||
@@ -53,22 +49,21 @@ jobs:
|
|||||||
- name: "Test sdist"
|
- name: "Test sdist"
|
||||||
run: |
|
run: |
|
||||||
pip install dist/${{ env.PACKAGE_NAME }}-*.tar.gz --force-reinstall
|
pip install dist/${{ env.PACKAGE_NAME }}-*.tar.gz --force-reinstall
|
||||||
${{ env.MODULE_NAME }} --help
|
ruff --help
|
||||||
python -m ${{ env.MODULE_NAME }} --help
|
python -m ruff --help
|
||||||
- name: "Upload sdist"
|
- name: "Upload sdist"
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: wheels-sdist
|
name: wheels
|
||||||
path: dist
|
path: dist
|
||||||
|
|
||||||
macos-x86_64:
|
macos-x86_64:
|
||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
runs-on: macos-latest
|
||||||
runs-on: macos-12
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
ref: ${{ inputs.sha }}
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
architecture: x64
|
architecture: x64
|
||||||
@@ -78,78 +73,70 @@ jobs:
|
|||||||
uses: PyO3/maturin-action@v1
|
uses: PyO3/maturin-action@v1
|
||||||
with:
|
with:
|
||||||
target: x86_64
|
target: x86_64
|
||||||
args: --release --locked --out dist
|
args: --release --out dist
|
||||||
- name: "Upload wheels"
|
- name: "Test wheel - x86_64"
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: wheels-macos-x86_64
|
|
||||||
path: dist
|
|
||||||
- name: "Archive binary"
|
|
||||||
run: |
|
|
||||||
TARGET=x86_64-apple-darwin
|
|
||||||
ARCHIVE_NAME=ruff-$TARGET
|
|
||||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
|
||||||
|
|
||||||
mkdir -p $ARCHIVE_NAME
|
|
||||||
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
|
|
||||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
|
||||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
|
||||||
- name: "Upload binary"
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: artifacts-macos-x86_64
|
|
||||||
path: |
|
|
||||||
*.tar.gz
|
|
||||||
*.sha256
|
|
||||||
|
|
||||||
macos-aarch64:
|
|
||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
|
||||||
runs-on: macos-14
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
submodules: recursive
|
|
||||||
- uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
|
||||||
architecture: arm64
|
|
||||||
- name: "Prep README.md"
|
|
||||||
run: python scripts/transform_readme.py --target pypi
|
|
||||||
- name: "Build wheels - aarch64"
|
|
||||||
uses: PyO3/maturin-action@v1
|
|
||||||
with:
|
|
||||||
target: aarch64
|
|
||||||
args: --release --locked --out dist
|
|
||||||
- name: "Test wheel - aarch64"
|
|
||||||
run: |
|
run: |
|
||||||
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
||||||
ruff --help
|
ruff --help
|
||||||
python -m ruff --help
|
python -m ruff --help
|
||||||
- name: "Upload wheels"
|
- name: "Upload wheels"
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: wheels-aarch64-apple-darwin
|
name: wheels
|
||||||
path: dist
|
path: dist
|
||||||
- name: "Archive binary"
|
- name: "Archive binary"
|
||||||
run: |
|
run: |
|
||||||
TARGET=aarch64-apple-darwin
|
ARCHIVE_FILE=ruff-x86_64-apple-darwin.tar.gz
|
||||||
ARCHIVE_NAME=ruff-$TARGET
|
tar czvf $ARCHIVE_FILE -C target/x86_64-apple-darwin/release ruff
|
||||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
|
||||||
|
|
||||||
mkdir -p $ARCHIVE_NAME
|
|
||||||
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
|
|
||||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
|
||||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||||
- name: "Upload binary"
|
- name: "Upload binary"
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: artifacts-aarch64-apple-darwin
|
name: binaries
|
||||||
|
path: |
|
||||||
|
*.tar.gz
|
||||||
|
*.sha256
|
||||||
|
|
||||||
|
macos-universal:
|
||||||
|
runs-on: macos-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: ${{ inputs.sha }}
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
architecture: x64
|
||||||
|
- name: "Prep README.md"
|
||||||
|
run: python scripts/transform_readme.py --target pypi
|
||||||
|
- name: "Build wheels - universal2"
|
||||||
|
uses: PyO3/maturin-action@v1
|
||||||
|
with:
|
||||||
|
args: --release --target universal2-apple-darwin --out dist
|
||||||
|
- name: "Test wheel - universal2"
|
||||||
|
run: |
|
||||||
|
pip install dist/${{ env.PACKAGE_NAME }}-*universal2.whl --force-reinstall
|
||||||
|
ruff --help
|
||||||
|
python -m ruff --help
|
||||||
|
- name: "Upload wheels"
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: wheels
|
||||||
|
path: dist
|
||||||
|
- name: "Archive binary"
|
||||||
|
run: |
|
||||||
|
ARCHIVE_FILE=ruff-aarch64-apple-darwin.tar.gz
|
||||||
|
tar czvf $ARCHIVE_FILE -C target/aarch64-apple-darwin/release ruff
|
||||||
|
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||||
|
- name: "Upload binary"
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: binaries
|
||||||
path: |
|
path: |
|
||||||
*.tar.gz
|
*.tar.gz
|
||||||
*.sha256
|
*.sha256
|
||||||
|
|
||||||
windows:
|
windows:
|
||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
@@ -163,8 +150,8 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
ref: ${{ inputs.sha }}
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
architecture: ${{ matrix.platform.arch }}
|
architecture: ${{ matrix.platform.arch }}
|
||||||
@@ -174,21 +161,18 @@ jobs:
|
|||||||
uses: PyO3/maturin-action@v1
|
uses: PyO3/maturin-action@v1
|
||||||
with:
|
with:
|
||||||
target: ${{ matrix.platform.target }}
|
target: ${{ matrix.platform.target }}
|
||||||
args: --release --locked --out dist
|
args: --release --out dist
|
||||||
env:
|
|
||||||
# aarch64 build fails, see https://github.com/PyO3/maturin/issues/2110
|
|
||||||
XWIN_VERSION: 16
|
|
||||||
- name: "Test wheel"
|
- name: "Test wheel"
|
||||||
if: ${{ !startsWith(matrix.platform.target, 'aarch64') }}
|
if: ${{ !startsWith(matrix.platform.target, 'aarch64') }}
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
python -m pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
python -m pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
||||||
${{ env.MODULE_NAME }} --help
|
ruff --help
|
||||||
python -m ${{ env.MODULE_NAME }} --help
|
python -m ruff --help
|
||||||
- name: "Upload wheels"
|
- name: "Upload wheels"
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: wheels-${{ matrix.platform.target }}
|
name: wheels
|
||||||
path: dist
|
path: dist
|
||||||
- name: "Archive binary"
|
- name: "Archive binary"
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -197,15 +181,14 @@ jobs:
|
|||||||
7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/ruff.exe
|
7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/ruff.exe
|
||||||
sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||||
- name: "Upload binary"
|
- name: "Upload binary"
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: artifacts-${{ matrix.platform.target }}
|
name: binaries
|
||||||
path: |
|
path: |
|
||||||
*.zip
|
*.zip
|
||||||
*.sha256
|
*.sha256
|
||||||
|
|
||||||
linux:
|
linux:
|
||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
@@ -215,8 +198,8 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
ref: ${{ inputs.sha }}
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
architecture: x64
|
architecture: x64
|
||||||
@@ -227,41 +210,32 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
target: ${{ matrix.target }}
|
target: ${{ matrix.target }}
|
||||||
manylinux: auto
|
manylinux: auto
|
||||||
args: --release --locked --out dist
|
args: --release --out dist
|
||||||
- name: "Test wheel"
|
- name: "Test wheel"
|
||||||
if: ${{ startsWith(matrix.target, 'x86_64') }}
|
if: ${{ startsWith(matrix.target, 'x86_64') }}
|
||||||
run: |
|
run: |
|
||||||
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
||||||
${{ env.MODULE_NAME }} --help
|
ruff --help
|
||||||
python -m ${{ env.MODULE_NAME }} --help
|
python -m ruff --help
|
||||||
- name: "Upload wheels"
|
- name: "Upload wheels"
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: wheels-${{ matrix.target }}
|
name: wheels
|
||||||
path: dist
|
path: dist
|
||||||
- name: "Archive binary"
|
- name: "Archive binary"
|
||||||
shell: bash
|
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
ARCHIVE_FILE=ruff-${{ matrix.target }}.tar.gz
|
||||||
|
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
|
||||||
TARGET=${{ matrix.target }}
|
|
||||||
ARCHIVE_NAME=ruff-$TARGET
|
|
||||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
|
||||||
|
|
||||||
mkdir -p $ARCHIVE_NAME
|
|
||||||
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
|
|
||||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
|
||||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||||
- name: "Upload binary"
|
- name: "Upload binary"
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: artifacts-${{ matrix.target }}
|
name: binaries
|
||||||
path: |
|
path: |
|
||||||
*.tar.gz
|
*.tar.gz
|
||||||
*.sha256
|
*.sha256
|
||||||
|
|
||||||
linux-cross:
|
linux-cross:
|
||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
@@ -277,20 +251,14 @@ jobs:
|
|||||||
arch: s390x
|
arch: s390x
|
||||||
- target: powerpc64le-unknown-linux-gnu
|
- target: powerpc64le-unknown-linux-gnu
|
||||||
arch: ppc64le
|
arch: ppc64le
|
||||||
# see https://github.com/astral-sh/ruff/issues/10073
|
|
||||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
|
||||||
- target: powerpc64-unknown-linux-gnu
|
- target: powerpc64-unknown-linux-gnu
|
||||||
arch: ppc64
|
arch: ppc64
|
||||||
# see https://github.com/astral-sh/ruff/issues/10073
|
|
||||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
|
||||||
- target: arm-unknown-linux-musleabihf
|
|
||||||
arch: arm
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
ref: ${{ inputs.sha }}
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
- name: "Prep README.md"
|
- name: "Prep README.md"
|
||||||
@@ -301,13 +269,13 @@ jobs:
|
|||||||
target: ${{ matrix.platform.target }}
|
target: ${{ matrix.platform.target }}
|
||||||
manylinux: auto
|
manylinux: auto
|
||||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||||
args: --release --locked --out dist
|
args: --release --out dist
|
||||||
- uses: uraimo/run-on-arch-action@v2
|
- uses: uraimo/run-on-arch-action@v2
|
||||||
if: matrix.platform.arch != 'ppc64'
|
if: matrix.platform.arch != 'ppc64'
|
||||||
name: Test wheel
|
name: Test wheel
|
||||||
with:
|
with:
|
||||||
arch: ${{ matrix.platform.arch == 'arm' && 'armv6' || matrix.platform.arch }}
|
arch: ${{ matrix.platform.arch }}
|
||||||
distro: ${{ matrix.platform.arch == 'arm' && 'bullseye' || 'ubuntu20.04' }}
|
distro: ubuntu20.04
|
||||||
githubToken: ${{ github.token }}
|
githubToken: ${{ github.token }}
|
||||||
install: |
|
install: |
|
||||||
apt-get update
|
apt-get update
|
||||||
@@ -317,33 +285,24 @@ jobs:
|
|||||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||||
ruff --help
|
ruff --help
|
||||||
- name: "Upload wheels"
|
- name: "Upload wheels"
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: wheels-${{ matrix.platform.target }}
|
name: wheels
|
||||||
path: dist
|
path: dist
|
||||||
- name: "Archive binary"
|
- name: "Archive binary"
|
||||||
shell: bash
|
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.tar.gz
|
||||||
|
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
|
||||||
TARGET=${{ matrix.platform.target }}
|
|
||||||
ARCHIVE_NAME=ruff-$TARGET
|
|
||||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
|
||||||
|
|
||||||
mkdir -p $ARCHIVE_NAME
|
|
||||||
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
|
|
||||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
|
||||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||||
- name: "Upload binary"
|
- name: "Upload binary"
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: artifacts-${{ matrix.platform.target }}
|
name: binaries
|
||||||
path: |
|
path: |
|
||||||
*.tar.gz
|
*.tar.gz
|
||||||
*.sha256
|
*.sha256
|
||||||
|
|
||||||
musllinux:
|
musllinux:
|
||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
@@ -353,8 +312,8 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
ref: ${{ inputs.sha }}
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
architecture: x64
|
architecture: x64
|
||||||
@@ -365,7 +324,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
target: ${{ matrix.target }}
|
target: ${{ matrix.target }}
|
||||||
manylinux: musllinux_1_2
|
manylinux: musllinux_1_2
|
||||||
args: --release --locked --out dist
|
args: --release --out dist
|
||||||
- name: "Test wheel"
|
- name: "Test wheel"
|
||||||
if: matrix.target == 'x86_64-unknown-linux-musl'
|
if: matrix.target == 'x86_64-unknown-linux-musl'
|
||||||
uses: addnab/docker-run-action@v3
|
uses: addnab/docker-run-action@v3
|
||||||
@@ -373,38 +332,29 @@ jobs:
|
|||||||
image: alpine:latest
|
image: alpine:latest
|
||||||
options: -v ${{ github.workspace }}:/io -w /io
|
options: -v ${{ github.workspace }}:/io -w /io
|
||||||
run: |
|
run: |
|
||||||
apk add python3
|
apk add py3-pip
|
||||||
python -m venv .venv
|
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links /io/dist/ --force-reinstall
|
||||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
ruff --help
|
||||||
.venv/bin/${{ env.MODULE_NAME }} --help
|
python -m ruff --help
|
||||||
- name: "Upload wheels"
|
- name: "Upload wheels"
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: wheels-${{ matrix.target }}
|
name: wheels
|
||||||
path: dist
|
path: dist
|
||||||
- name: "Archive binary"
|
- name: "Archive binary"
|
||||||
shell: bash
|
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
ARCHIVE_FILE=ruff-${{ matrix.target }}.tar.gz
|
||||||
|
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
|
||||||
TARGET=${{ matrix.target }}
|
|
||||||
ARCHIVE_NAME=ruff-$TARGET
|
|
||||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
|
||||||
|
|
||||||
mkdir -p $ARCHIVE_NAME
|
|
||||||
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
|
|
||||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
|
||||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||||
- name: "Upload binary"
|
- name: "Upload binary"
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: artifacts-${{ matrix.target }}
|
name: binaries
|
||||||
path: |
|
path: |
|
||||||
*.tar.gz
|
*.tar.gz
|
||||||
*.sha256
|
*.sha256
|
||||||
|
|
||||||
musllinux-cross:
|
musllinux-cross:
|
||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
@@ -418,8 +368,8 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
ref: ${{ inputs.sha }}
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
- name: "Prep README.md"
|
- name: "Prep README.md"
|
||||||
@@ -429,7 +379,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
target: ${{ matrix.platform.target }}
|
target: ${{ matrix.platform.target }}
|
||||||
manylinux: musllinux_1_2
|
manylinux: musllinux_1_2
|
||||||
args: --release --locked --out dist
|
args: --release --out dist
|
||||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||||
- uses: uraimo/run-on-arch-action@v2
|
- uses: uraimo/run-on-arch-action@v2
|
||||||
name: Test wheel
|
name: Test wheel
|
||||||
@@ -438,33 +388,149 @@ jobs:
|
|||||||
distro: alpine_latest
|
distro: alpine_latest
|
||||||
githubToken: ${{ github.token }}
|
githubToken: ${{ github.token }}
|
||||||
install: |
|
install: |
|
||||||
apk add python3
|
apk add py3-pip
|
||||||
run: |
|
run: |
|
||||||
python -m venv .venv
|
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
ruff check --help
|
||||||
.venv/bin/${{ env.MODULE_NAME }} --help
|
|
||||||
- name: "Upload wheels"
|
- name: "Upload wheels"
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: wheels-${{ matrix.platform.target }}
|
name: wheels
|
||||||
path: dist
|
path: dist
|
||||||
- name: "Archive binary"
|
- name: "Archive binary"
|
||||||
shell: bash
|
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.tar.gz
|
||||||
|
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
|
||||||
TARGET=${{ matrix.platform.target }}
|
|
||||||
ARCHIVE_NAME=ruff-$TARGET
|
|
||||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
|
||||||
|
|
||||||
mkdir -p $ARCHIVE_NAME
|
|
||||||
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
|
|
||||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
|
||||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||||
- name: "Upload binary"
|
- name: "Upload binary"
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: artifacts-${{ matrix.platform.target }}
|
name: binaries
|
||||||
path: |
|
path: |
|
||||||
*.tar.gz
|
*.tar.gz
|
||||||
*.sha256
|
*.sha256
|
||||||
|
|
||||||
|
validate-tag:
|
||||||
|
name: Validate tag
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
# If you don't set an input tag, it's a dry run (no uploads).
|
||||||
|
if: ${{ inputs.tag }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: main # We checkout the main branch to check for the commit
|
||||||
|
- name: Check main branch
|
||||||
|
if: ${{ inputs.sha }}
|
||||||
|
run: |
|
||||||
|
# Fetch the main branch since a shallow checkout is used by default
|
||||||
|
git fetch origin main --unshallow
|
||||||
|
if ! git branch --contains ${{ inputs.sha }} | grep -E '(^|\s)main$'; then
|
||||||
|
echo "The specified sha is not on the main branch" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
- name: Check tag consistency
|
||||||
|
run: |
|
||||||
|
# Switch to the commit we want to release
|
||||||
|
git checkout ${{ inputs.sha }}
|
||||||
|
version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
|
||||||
|
if [ "${{ inputs.tag }}" != "${version}" ]; then
|
||||||
|
echo "The input tag does not match the version from pyproject.toml:" >&2
|
||||||
|
echo "${{ inputs.tag }}" >&2
|
||||||
|
echo "${version}" >&2
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
echo "Releasing ${version}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
upload-release:
|
||||||
|
name: Upload to PyPI
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs:
|
||||||
|
- macos-universal
|
||||||
|
- macos-x86_64
|
||||||
|
- windows
|
||||||
|
- linux
|
||||||
|
- linux-cross
|
||||||
|
- musllinux
|
||||||
|
- musllinux-cross
|
||||||
|
- validate-tag
|
||||||
|
# If you don't set an input tag, it's a dry run (no uploads).
|
||||||
|
if: ${{ inputs.tag }}
|
||||||
|
environment:
|
||||||
|
name: release
|
||||||
|
permissions:
|
||||||
|
# For pypi trusted publishing
|
||||||
|
id-token: write
|
||||||
|
steps:
|
||||||
|
- uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: wheels
|
||||||
|
path: wheels
|
||||||
|
- name: Publish to PyPi
|
||||||
|
uses: pypa/gh-action-pypi-publish@release/v1
|
||||||
|
with:
|
||||||
|
skip-existing: true
|
||||||
|
packages-dir: wheels
|
||||||
|
verbose: true
|
||||||
|
|
||||||
|
tag-release:
|
||||||
|
name: Tag release
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: upload-release
|
||||||
|
# If you don't set an input tag, it's a dry run (no uploads).
|
||||||
|
if: ${{ inputs.tag }}
|
||||||
|
permissions:
|
||||||
|
# For git tag
|
||||||
|
contents: write
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: ${{ inputs.sha }}
|
||||||
|
- name: git tag
|
||||||
|
run: |
|
||||||
|
git config user.email "hey@astral.sh"
|
||||||
|
git config user.name "Ruff Release CI"
|
||||||
|
git tag -m "v${{ inputs.tag }}" "v${{ inputs.tag }}"
|
||||||
|
# If there is duplicate tag, this will fail. The publish to pypi action will have been a noop (due to skip
|
||||||
|
# existing), so we make a non-destructive exit here
|
||||||
|
git push --tags
|
||||||
|
|
||||||
|
publish-release:
|
||||||
|
name: Publish to GitHub
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: tag-release
|
||||||
|
# If you don't set an input tag, it's a dry run (no uploads).
|
||||||
|
if: ${{ inputs.tag }}
|
||||||
|
permissions:
|
||||||
|
# For GitHub release publishing
|
||||||
|
contents: write
|
||||||
|
steps:
|
||||||
|
- uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: binaries
|
||||||
|
path: binaries
|
||||||
|
- name: "Publish to GitHub"
|
||||||
|
uses: softprops/action-gh-release@v1
|
||||||
|
with:
|
||||||
|
draft: true
|
||||||
|
files: binaries/*
|
||||||
|
tag_name: v${{ inputs.tag }}
|
||||||
|
|
||||||
|
# After the release has been published, we update downstream repositories
|
||||||
|
# This is separate because if this fails the release is still fine, we just need to do some manual workflow triggers
|
||||||
|
update-dependents:
|
||||||
|
name: Update dependents
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: publish-release
|
||||||
|
steps:
|
||||||
|
- name: "Update pre-commit mirror"
|
||||||
|
uses: actions/github-script@v6
|
||||||
|
with:
|
||||||
|
github-token: ${{ secrets.RUFF_PRE_COMMIT_PAT }}
|
||||||
|
script: |
|
||||||
|
github.rest.actions.createWorkflowDispatch({
|
||||||
|
owner: 'astral-sh',
|
||||||
|
repo: 'ruff-pre-commit',
|
||||||
|
workflow_id: 'main.yml',
|
||||||
|
ref: 'main',
|
||||||
|
})
|
||||||
298
.github/workflows/release.yml
vendored
298
.github/workflows/release.yml
vendored
@@ -1,298 +0,0 @@
|
|||||||
# Copyright 2022-2024, axodotdev
|
|
||||||
# SPDX-License-Identifier: MIT or Apache-2.0
|
|
||||||
#
|
|
||||||
# CI that:
|
|
||||||
#
|
|
||||||
# * checks for a Git Tag that looks like a release
|
|
||||||
# * builds artifacts with cargo-dist (archives, installers, hashes)
|
|
||||||
# * uploads those artifacts to temporary workflow zip
|
|
||||||
# * on success, uploads the artifacts to a GitHub Release
|
|
||||||
#
|
|
||||||
# Note that the GitHub Release will be created with a generated
|
|
||||||
# title/body based on your changelogs.
|
|
||||||
|
|
||||||
name: Release
|
|
||||||
permissions:
|
|
||||||
"contents": "write"
|
|
||||||
|
|
||||||
# This task will run whenever you workflow_dispatch with a tag that looks like a version
|
|
||||||
# like "1.0.0", "v0.1.0-prerelease.1", "my-app/0.1.0", "releases/v1.0.0", etc.
|
|
||||||
# Various formats will be parsed into a VERSION and an optional PACKAGE_NAME, where
|
|
||||||
# PACKAGE_NAME must be the name of a Cargo package in your workspace, and VERSION
|
|
||||||
# must be a Cargo-style SemVer Version (must have at least major.minor.patch).
|
|
||||||
#
|
|
||||||
# If PACKAGE_NAME is specified, then the announcement will be for that
|
|
||||||
# package (erroring out if it doesn't have the given version or isn't cargo-dist-able).
|
|
||||||
#
|
|
||||||
# If PACKAGE_NAME isn't specified, then the announcement will be for all
|
|
||||||
# (cargo-dist-able) packages in the workspace with that version (this mode is
|
|
||||||
# intended for workspaces with only one dist-able package, or with all dist-able
|
|
||||||
# packages versioned/released in lockstep).
|
|
||||||
#
|
|
||||||
# If you push multiple tags at once, separate instances of this workflow will
|
|
||||||
# spin up, creating an independent announcement for each one. However, GitHub
|
|
||||||
# will hard limit this to 3 tags per commit, as it will assume more tags is a
|
|
||||||
# mistake.
|
|
||||||
#
|
|
||||||
# If there's a prerelease-style suffix to the version, then the release(s)
|
|
||||||
# will be marked as a prerelease.
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
tag:
|
|
||||||
description: Release Tag
|
|
||||||
required: true
|
|
||||||
default: dry-run
|
|
||||||
type: string
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
# Run 'cargo dist plan' (or host) to determine what tasks we need to do
|
|
||||||
plan:
|
|
||||||
runs-on: "ubuntu-20.04"
|
|
||||||
outputs:
|
|
||||||
val: ${{ steps.plan.outputs.manifest }}
|
|
||||||
tag: ${{ (inputs.tag != 'dry-run' && inputs.tag) || '' }}
|
|
||||||
tag-flag: ${{ inputs.tag && inputs.tag != 'dry-run' && format('--tag={0}', inputs.tag) || '' }}
|
|
||||||
publishing: ${{ inputs.tag && inputs.tag != 'dry-run' }}
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
submodules: recursive
|
|
||||||
- name: Install cargo-dist
|
|
||||||
# we specify bash to get pipefail; it guards against the `curl` command
|
|
||||||
# failing. otherwise `sh` won't catch that `curl` returned non-0
|
|
||||||
shell: bash
|
|
||||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.18.0/cargo-dist-installer.sh | sh"
|
|
||||||
- name: Cache cargo-dist
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: cargo-dist-cache
|
|
||||||
path: ~/.cargo/bin/cargo-dist
|
|
||||||
# sure would be cool if github gave us proper conditionals...
|
|
||||||
# so here's a doubly-nested ternary-via-truthiness to try to provide the best possible
|
|
||||||
# functionality based on whether this is a pull_request, and whether it's from a fork.
|
|
||||||
# (PRs run on the *source* but secrets are usually on the *target* -- that's *good*
|
|
||||||
# but also really annoying to build CI around when it needs secrets to work right.)
|
|
||||||
- id: plan
|
|
||||||
run: |
|
|
||||||
cargo dist ${{ (inputs.tag && inputs.tag != 'dry-run' && format('host --steps=create --tag={0}', inputs.tag)) || 'plan' }} --output-format=json > plan-dist-manifest.json
|
|
||||||
echo "cargo dist ran successfully"
|
|
||||||
cat plan-dist-manifest.json
|
|
||||||
echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
|
||||||
- name: "Upload dist-manifest.json"
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: artifacts-plan-dist-manifest
|
|
||||||
path: plan-dist-manifest.json
|
|
||||||
|
|
||||||
custom-build-binaries:
|
|
||||||
needs:
|
|
||||||
- plan
|
|
||||||
if: ${{ needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload' || inputs.tag == 'dry-run' }}
|
|
||||||
uses: ./.github/workflows/build-binaries.yml
|
|
||||||
with:
|
|
||||||
plan: ${{ needs.plan.outputs.val }}
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
custom-build-docker:
|
|
||||||
needs:
|
|
||||||
- plan
|
|
||||||
if: ${{ needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload' || inputs.tag == 'dry-run' }}
|
|
||||||
uses: ./.github/workflows/build-docker.yml
|
|
||||||
with:
|
|
||||||
plan: ${{ needs.plan.outputs.val }}
|
|
||||||
secrets: inherit
|
|
||||||
permissions:
|
|
||||||
"contents": "read"
|
|
||||||
"packages": "write"
|
|
||||||
|
|
||||||
# Build and package all the platform-agnostic(ish) things
|
|
||||||
build-global-artifacts:
|
|
||||||
needs:
|
|
||||||
- plan
|
|
||||||
- custom-build-binaries
|
|
||||||
- custom-build-docker
|
|
||||||
runs-on: "ubuntu-20.04"
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
submodules: recursive
|
|
||||||
- name: Install cached cargo-dist
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: cargo-dist-cache
|
|
||||||
path: ~/.cargo/bin/
|
|
||||||
- run: chmod +x ~/.cargo/bin/cargo-dist
|
|
||||||
# Get all the local artifacts for the global tasks to use (for e.g. checksums)
|
|
||||||
- name: Fetch local artifacts
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
pattern: artifacts-*
|
|
||||||
path: target/distrib/
|
|
||||||
merge-multiple: true
|
|
||||||
- id: cargo-dist
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
cargo dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json
|
|
||||||
echo "cargo dist ran successfully"
|
|
||||||
|
|
||||||
# Parse out what we just built and upload it to scratch storage
|
|
||||||
echo "paths<<EOF" >> "$GITHUB_OUTPUT"
|
|
||||||
jq --raw-output ".upload_files[]" dist-manifest.json >> "$GITHUB_OUTPUT"
|
|
||||||
echo "EOF" >> "$GITHUB_OUTPUT"
|
|
||||||
|
|
||||||
cp dist-manifest.json "$BUILD_MANIFEST_NAME"
|
|
||||||
- name: "Upload artifacts"
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: artifacts-build-global
|
|
||||||
path: |
|
|
||||||
${{ steps.cargo-dist.outputs.paths }}
|
|
||||||
${{ env.BUILD_MANIFEST_NAME }}
|
|
||||||
# Determines if we should publish/announce
|
|
||||||
host:
|
|
||||||
needs:
|
|
||||||
- plan
|
|
||||||
- custom-build-binaries
|
|
||||||
- custom-build-docker
|
|
||||||
- build-global-artifacts
|
|
||||||
# Only run if we're "publishing", and only if local and global didn't fail (skipped is fine)
|
|
||||||
if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.custom-build-binaries.result == 'skipped' || needs.custom-build-binaries.result == 'success') && (needs.custom-build-docker.result == 'skipped' || needs.custom-build-docker.result == 'success') }}
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
runs-on: "ubuntu-20.04"
|
|
||||||
outputs:
|
|
||||||
val: ${{ steps.host.outputs.manifest }}
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
submodules: recursive
|
|
||||||
- name: Install cached cargo-dist
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: cargo-dist-cache
|
|
||||||
path: ~/.cargo/bin/
|
|
||||||
- run: chmod +x ~/.cargo/bin/cargo-dist
|
|
||||||
# Fetch artifacts from scratch-storage
|
|
||||||
- name: Fetch artifacts
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
pattern: artifacts-*
|
|
||||||
path: target/distrib/
|
|
||||||
merge-multiple: true
|
|
||||||
# This is a harmless no-op for GitHub Releases, hosting for that happens in "announce"
|
|
||||||
- id: host
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
cargo dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json
|
|
||||||
echo "artifacts uploaded and released successfully"
|
|
||||||
cat dist-manifest.json
|
|
||||||
echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
|
||||||
- name: "Upload dist-manifest.json"
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
# Overwrite the previous copy
|
|
||||||
name: artifacts-dist-manifest
|
|
||||||
path: dist-manifest.json
|
|
||||||
|
|
||||||
custom-publish-pypi:
|
|
||||||
needs:
|
|
||||||
- plan
|
|
||||||
- host
|
|
||||||
if: ${{ !fromJson(needs.plan.outputs.val).announcement_is_prerelease || fromJson(needs.plan.outputs.val).publish_prereleases }}
|
|
||||||
uses: ./.github/workflows/publish-pypi.yml
|
|
||||||
with:
|
|
||||||
plan: ${{ needs.plan.outputs.val }}
|
|
||||||
secrets: inherit
|
|
||||||
# publish jobs get escalated permissions
|
|
||||||
permissions:
|
|
||||||
"id-token": "write"
|
|
||||||
"packages": "write"
|
|
||||||
|
|
||||||
custom-publish-wasm:
|
|
||||||
needs:
|
|
||||||
- plan
|
|
||||||
- host
|
|
||||||
if: ${{ !fromJson(needs.plan.outputs.val).announcement_is_prerelease || fromJson(needs.plan.outputs.val).publish_prereleases }}
|
|
||||||
uses: ./.github/workflows/publish-wasm.yml
|
|
||||||
with:
|
|
||||||
plan: ${{ needs.plan.outputs.val }}
|
|
||||||
secrets: inherit
|
|
||||||
# publish jobs get escalated permissions
|
|
||||||
permissions:
|
|
||||||
"contents": "read"
|
|
||||||
"id-token": "write"
|
|
||||||
"packages": "write"
|
|
||||||
|
|
||||||
# Create a GitHub Release while uploading all files to it
|
|
||||||
announce:
|
|
||||||
needs:
|
|
||||||
- plan
|
|
||||||
- host
|
|
||||||
- custom-publish-pypi
|
|
||||||
- custom-publish-wasm
|
|
||||||
# use "always() && ..." to allow us to wait for all publish jobs while
|
|
||||||
# still allowing individual publish jobs to skip themselves (for prereleases).
|
|
||||||
# "host" however must run to completion, no skipping allowed!
|
|
||||||
if: ${{ always() && needs.host.result == 'success' && (needs.custom-publish-pypi.result == 'skipped' || needs.custom-publish-pypi.result == 'success') && (needs.custom-publish-wasm.result == 'skipped' || needs.custom-publish-wasm.result == 'success') }}
|
|
||||||
runs-on: "ubuntu-20.04"
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
submodules: recursive
|
|
||||||
# Create a GitHub Release while uploading all files to it
|
|
||||||
- name: "Download GitHub Artifacts"
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
pattern: artifacts-*
|
|
||||||
path: artifacts
|
|
||||||
merge-multiple: true
|
|
||||||
- name: Cleanup
|
|
||||||
run: |
|
|
||||||
# Remove the granular manifests
|
|
||||||
rm -f artifacts/*-dist-manifest.json
|
|
||||||
- name: Create GitHub Release
|
|
||||||
env:
|
|
||||||
PRERELEASE_FLAG: "${{ fromJson(needs.host.outputs.val).announcement_is_prerelease && '--prerelease' || '' }}"
|
|
||||||
ANNOUNCEMENT_TITLE: "${{ fromJson(needs.host.outputs.val).announcement_title }}"
|
|
||||||
ANNOUNCEMENT_BODY: "${{ fromJson(needs.host.outputs.val).announcement_github_body }}"
|
|
||||||
RELEASE_COMMIT: "${{ github.sha }}"
|
|
||||||
run: |
|
|
||||||
# Write and read notes from a file to avoid quoting breaking things
|
|
||||||
echo "$ANNOUNCEMENT_BODY" > $RUNNER_TEMP/notes.txt
|
|
||||||
|
|
||||||
gh release create "${{ needs.plan.outputs.tag }}" --target "$RELEASE_COMMIT" $PRERELEASE_FLAG --title "$ANNOUNCEMENT_TITLE" --notes-file "$RUNNER_TEMP/notes.txt" artifacts/*
|
|
||||||
|
|
||||||
custom-notify-dependents:
|
|
||||||
needs:
|
|
||||||
- plan
|
|
||||||
- announce
|
|
||||||
uses: ./.github/workflows/notify-dependents.yml
|
|
||||||
with:
|
|
||||||
plan: ${{ needs.plan.outputs.val }}
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
custom-publish-docs:
|
|
||||||
needs:
|
|
||||||
- plan
|
|
||||||
- announce
|
|
||||||
uses: ./.github/workflows/publish-docs.yml
|
|
||||||
with:
|
|
||||||
plan: ${{ needs.plan.outputs.val }}
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
custom-publish-playground:
|
|
||||||
needs:
|
|
||||||
- plan
|
|
||||||
- announce
|
|
||||||
uses: ./.github/workflows/publish-playground.yml
|
|
||||||
with:
|
|
||||||
plan: ${{ needs.plan.outputs.val }}
|
|
||||||
secrets: inherit
|
|
||||||
80
.github/workflows/sync_typeshed.yaml
vendored
80
.github/workflows/sync_typeshed.yaml
vendored
@@ -1,80 +0,0 @@
|
|||||||
name: Sync typeshed
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
schedule:
|
|
||||||
# Run on the 1st and the 15th of every month:
|
|
||||||
- cron: "0 0 1,15 * *"
|
|
||||||
|
|
||||||
env:
|
|
||||||
FORCE_COLOR: 1
|
|
||||||
GH_TOKEN: ${{ github.token }}
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
sync:
|
|
||||||
name: Sync typeshed
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
timeout-minutes: 20
|
|
||||||
# Don't run the cron job on forks:
|
|
||||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
pull-requests: write
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
name: Checkout Ruff
|
|
||||||
with:
|
|
||||||
path: ruff
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
name: Checkout typeshed
|
|
||||||
with:
|
|
||||||
repository: python/typeshed
|
|
||||||
path: typeshed
|
|
||||||
- name: Setup git
|
|
||||||
run: |
|
|
||||||
git config --global user.name typeshedbot
|
|
||||||
git config --global user.email '<>'
|
|
||||||
- name: Sync typeshed
|
|
||||||
id: sync
|
|
||||||
run: |
|
|
||||||
rm -rf ruff/crates/red_knot_module_resolver/vendor/typeshed
|
|
||||||
mkdir ruff/crates/red_knot_module_resolver/vendor/typeshed
|
|
||||||
cp typeshed/README.md ruff/crates/red_knot_module_resolver/vendor/typeshed
|
|
||||||
cp typeshed/LICENSE ruff/crates/red_knot_module_resolver/vendor/typeshed
|
|
||||||
cp -r typeshed/stdlib ruff/crates/red_knot_module_resolver/vendor/typeshed/stdlib
|
|
||||||
rm -rf ruff/crates/red_knot_module_resolver/vendor/typeshed/stdlib/@tests
|
|
||||||
git -C typeshed rev-parse HEAD > ruff/crates/red_knot_module_resolver/vendor/typeshed/source_commit.txt
|
|
||||||
- name: Commit the changes
|
|
||||||
id: commit
|
|
||||||
if: ${{ steps.sync.outcome == 'success' }}
|
|
||||||
run: |
|
|
||||||
cd ruff
|
|
||||||
git checkout -b typeshedbot/sync-typeshed
|
|
||||||
git add .
|
|
||||||
git diff --staged --quiet || git commit -m "Sync typeshed. Source commit: https://github.com/python/typeshed/commit/$(git -C ../typeshed rev-parse HEAD)"
|
|
||||||
- name: Create a PR
|
|
||||||
if: ${{ steps.sync.outcome == 'success' && steps.commit.outcome == 'success' }}
|
|
||||||
run: |
|
|
||||||
cd ruff
|
|
||||||
git push --force origin typeshedbot/sync-typeshed
|
|
||||||
gh pr list --repo $GITHUB_REPOSITORY --head typeshedbot/sync-typeshed --json id --jq length | grep 1 && exit 0 # exit if there is existing pr
|
|
||||||
gh pr create --title "Sync vendored typeshed stubs" --body "Close and reopen this PR to trigger CI" --label "internal"
|
|
||||||
|
|
||||||
create-issue-on-failure:
|
|
||||||
name: Create an issue if the typeshed sync failed
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: [sync]
|
|
||||||
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && needs.sync.result == 'failure' }}
|
|
||||||
permissions:
|
|
||||||
issues: write
|
|
||||||
steps:
|
|
||||||
- uses: actions/github-script@v7
|
|
||||||
with:
|
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
script: |
|
|
||||||
await github.rest.issues.create({
|
|
||||||
owner: "astral-sh",
|
|
||||||
repo: "ruff",
|
|
||||||
title: `Automated typeshed sync failed on ${new Date().toDateString()}`,
|
|
||||||
body: "Runs are listed here: https://github.com/astral-sh/ruff/actions/workflows/sync_typeshed.yaml",
|
|
||||||
})
|
|
||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -92,7 +92,6 @@ coverage.xml
|
|||||||
.hypothesis/
|
.hypothesis/
|
||||||
.pytest_cache/
|
.pytest_cache/
|
||||||
cover/
|
cover/
|
||||||
repos/
|
|
||||||
|
|
||||||
# Translations
|
# Translations
|
||||||
*.mo
|
*.mo
|
||||||
|
|||||||
@@ -17,4 +17,4 @@ MD013: false
|
|||||||
# MD024/no-duplicate-heading
|
# MD024/no-duplicate-heading
|
||||||
MD024:
|
MD024:
|
||||||
# Allow when nested under different parents e.g. CHANGELOG.md
|
# Allow when nested under different parents e.g. CHANGELOG.md
|
||||||
siblings_only: true
|
allow_different_nesting: true
|
||||||
|
|||||||
@@ -2,10 +2,9 @@ fail_fast: true
|
|||||||
|
|
||||||
exclude: |
|
exclude: |
|
||||||
(?x)^(
|
(?x)^(
|
||||||
crates/red_knot_module_resolver/vendor/.*|
|
|
||||||
crates/ruff_linter/resources/.*|
|
crates/ruff_linter/resources/.*|
|
||||||
crates/ruff_linter/src/rules/.*/snapshots/.*|
|
crates/ruff_linter/src/rules/.*/snapshots/.*|
|
||||||
crates/ruff/resources/.*|
|
crates/ruff_cli/resources/.*|
|
||||||
crates/ruff_python_formatter/resources/.*|
|
crates/ruff_python_formatter/resources/.*|
|
||||||
crates/ruff_python_formatter/tests/snapshots/.*|
|
crates/ruff_python_formatter/tests/snapshots/.*|
|
||||||
crates/ruff_python_resolver/resources/.*|
|
crates/ruff_python_resolver/resources/.*|
|
||||||
@@ -14,7 +13,7 @@ exclude: |
|
|||||||
|
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/abravalheri/validate-pyproject
|
- repo: https://github.com/abravalheri/validate-pyproject
|
||||||
rev: v0.18
|
rev: v0.15
|
||||||
hooks:
|
hooks:
|
||||||
- id: validate-pyproject
|
- id: validate-pyproject
|
||||||
|
|
||||||
@@ -32,7 +31,7 @@ repos:
|
|||||||
)$
|
)$
|
||||||
|
|
||||||
- repo: https://github.com/igorshubovych/markdownlint-cli
|
- repo: https://github.com/igorshubovych/markdownlint-cli
|
||||||
rev: v0.41.0
|
rev: v0.37.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: markdownlint-fix
|
- id: markdownlint-fix
|
||||||
exclude: |
|
exclude: |
|
||||||
@@ -42,7 +41,7 @@ repos:
|
|||||||
)$
|
)$
|
||||||
|
|
||||||
- repo: https://github.com/crate-ci/typos
|
- repo: https://github.com/crate-ci/typos
|
||||||
rev: v1.23.2
|
rev: v1.16.22
|
||||||
hooks:
|
hooks:
|
||||||
- id: typos
|
- id: typos
|
||||||
|
|
||||||
@@ -56,7 +55,7 @@ repos:
|
|||||||
pass_filenames: false # This makes it a lot faster
|
pass_filenames: false # This makes it a lot faster
|
||||||
|
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.5.4
|
rev: v0.1.4
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff-format
|
- id: ruff-format
|
||||||
- id: ruff
|
- id: ruff
|
||||||
@@ -71,7 +70,7 @@ repos:
|
|||||||
|
|
||||||
# Prettier
|
# Prettier
|
||||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||||
rev: v3.1.0
|
rev: v3.0.3
|
||||||
hooks:
|
hooks:
|
||||||
- id: prettier
|
- id: prettier
|
||||||
types: [yaml]
|
types: [yaml]
|
||||||
|
|||||||
@@ -1,2 +0,0 @@
|
|||||||
# Auto-generated by `cargo-dist`.
|
|
||||||
.github/workflows/release.yml
|
|
||||||
5
.vscode/extensions.json
vendored
5
.vscode/extensions.json
vendored
@@ -1,5 +0,0 @@
|
|||||||
{
|
|
||||||
"recommendations": [
|
|
||||||
"rust-lang.rust-analyzer"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
6
.vscode/settings.json
vendored
6
.vscode/settings.json
vendored
@@ -1,6 +0,0 @@
|
|||||||
{
|
|
||||||
"rust-analyzer.check.extraArgs": [
|
|
||||||
"--all-features"
|
|
||||||
],
|
|
||||||
"rust-analyzer.check.command": "clippy",
|
|
||||||
}
|
|
||||||
@@ -1,100 +1,5 @@
|
|||||||
# Breaking Changes
|
# Breaking Changes
|
||||||
|
|
||||||
## 0.5.0
|
|
||||||
|
|
||||||
- Follow the XDG specification to discover user-level configurations on macOS (same as on other Unix platforms)
|
|
||||||
- Selecting `ALL` now excludes deprecated rules
|
|
||||||
- The released archives now include an extra level of nesting, which can be removed with `--strip-components=1` when untarring.
|
|
||||||
- The release artifact's file name no longer includes the version tag. This enables users to install via `/latest` URLs on GitHub.
|
|
||||||
|
|
||||||
## 0.3.0
|
|
||||||
|
|
||||||
### Ruff 2024.2 style
|
|
||||||
|
|
||||||
The formatter now formats code according to the Ruff 2024.2 style guide. Read the [changelog](./CHANGELOG.md#030) for a detailed list of stabilized style changes.
|
|
||||||
|
|
||||||
### `isort`: Use one blank line after imports in typing stub files ([#9971](https://github.com/astral-sh/ruff/pull/9971))
|
|
||||||
|
|
||||||
Previously, Ruff used one or two blank lines (or the number configured by `isort.lines-after-imports`) after imports in typing stub files (`.pyi` files).
|
|
||||||
The [typing style guide for stubs](https://typing.readthedocs.io/en/latest/source/stubs.html#style-guide) recommends using at most 1 blank line for grouping.
|
|
||||||
As of this release, `isort` now always uses one blank line after imports in stub files, the same as the formatter.
|
|
||||||
|
|
||||||
### `build` is no longer excluded by default ([#10093](https://github.com/astral-sh/ruff/pull/10093))
|
|
||||||
|
|
||||||
Ruff maintains a list of directories and files that are excluded by default. This list now consists of the following patterns:
|
|
||||||
|
|
||||||
- `.bzr`
|
|
||||||
- `.direnv`
|
|
||||||
- `.eggs`
|
|
||||||
- `.git`
|
|
||||||
- `.git-rewrite`
|
|
||||||
- `.hg`
|
|
||||||
- `.ipynb_checkpoints`
|
|
||||||
- `.mypy_cache`
|
|
||||||
- `.nox`
|
|
||||||
- `.pants.d`
|
|
||||||
- `.pyenv`
|
|
||||||
- `.pytest_cache`
|
|
||||||
- `.pytype`
|
|
||||||
- `.ruff_cache`
|
|
||||||
- `.svn`
|
|
||||||
- `.tox`
|
|
||||||
- `.venv`
|
|
||||||
- `.vscode`
|
|
||||||
- `__pypackages__`
|
|
||||||
- `_build`
|
|
||||||
- `buck-out`
|
|
||||||
- `dist`
|
|
||||||
- `node_modules`
|
|
||||||
- `site-packages`
|
|
||||||
- `venv`
|
|
||||||
|
|
||||||
Previously, the `build` directory was included in this list. However, the `build` directory tends to be a not-unpopular directory
|
|
||||||
name, and excluding it by default caused confusion. Ruff now no longer excludes `build` except if it is excluded by a `.gitignore` file
|
|
||||||
or because it is listed in `extend-exclude`.
|
|
||||||
|
|
||||||
### `--format` is no longer a valid `rule` or `linter` command option
|
|
||||||
|
|
||||||
Previously, `ruff rule` and `ruff linter` accepted the `--format <FORMAT>` option as an alias for `--output-format`. Ruff no longer
|
|
||||||
supports this alias. Please use `ruff rule --output-format <FORMAT>` and `ruff linter --output-format <FORMAT>` instead.
|
|
||||||
|
|
||||||
## 0.1.9
|
|
||||||
|
|
||||||
### `site-packages` is now excluded by default ([#5513](https://github.com/astral-sh/ruff/pull/5513))
|
|
||||||
|
|
||||||
Ruff maintains a list of default exclusions, which now consists of the following patterns:
|
|
||||||
|
|
||||||
- `.bzr`
|
|
||||||
- `.direnv`
|
|
||||||
- `.eggs`
|
|
||||||
- `.git-rewrite`
|
|
||||||
- `.git`
|
|
||||||
- `.hg`
|
|
||||||
- `.ipynb_checkpoints`
|
|
||||||
- `.mypy_cache`
|
|
||||||
- `.nox`
|
|
||||||
- `.pants.d`
|
|
||||||
- `.pyenv`
|
|
||||||
- `.pytest_cache`
|
|
||||||
- `.pytype`
|
|
||||||
- `.ruff_cache`
|
|
||||||
- `.svn`
|
|
||||||
- `.tox`
|
|
||||||
- `.venv`
|
|
||||||
- `.vscode`
|
|
||||||
- `__pypackages__`
|
|
||||||
- `_build`
|
|
||||||
- `buck-out`
|
|
||||||
- `build`
|
|
||||||
- `dist`
|
|
||||||
- `node_modules`
|
|
||||||
- `site-packages`
|
|
||||||
- `venv`
|
|
||||||
|
|
||||||
Previously, the `site-packages` directory was not excluded by default. While `site-packages` tends
|
|
||||||
to be excluded anyway by virtue of the `.venv` exclusion, this may not be the case when using Ruff
|
|
||||||
from VS Code outside a virtual environment.
|
|
||||||
|
|
||||||
## 0.1.0
|
## 0.1.0
|
||||||
|
|
||||||
### The deprecated `format` setting has been removed
|
### The deprecated `format` setting has been removed
|
||||||
|
|||||||
1811
CHANGELOG.md
1811
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
152
CONTRIBUTING.md
152
CONTRIBUTING.md
@@ -26,25 +26,30 @@ Welcome! We're happy to have you here. Thank you in advance for your contributio
|
|||||||
- [`cargo dev`](#cargo-dev)
|
- [`cargo dev`](#cargo-dev)
|
||||||
- [Subsystems](#subsystems)
|
- [Subsystems](#subsystems)
|
||||||
- [Compilation Pipeline](#compilation-pipeline)
|
- [Compilation Pipeline](#compilation-pipeline)
|
||||||
- [Import Categorization](#import-categorization)
|
|
||||||
- [Project root](#project-root)
|
|
||||||
- [Package root](#package-root)
|
|
||||||
- [Import categorization](#import-categorization-1)
|
|
||||||
|
|
||||||
## The Basics
|
## The Basics
|
||||||
|
|
||||||
Ruff welcomes contributions in the form of pull requests.
|
Ruff welcomes contributions in the form of Pull Requests.
|
||||||
|
|
||||||
For small changes (e.g., bug fixes), feel free to submit a PR.
|
For small changes (e.g., bug fixes), feel free to submit a PR.
|
||||||
|
|
||||||
For larger changes (e.g., new lint rules, new functionality, new configuration options), consider
|
For larger changes (e.g., new lint rules, new functionality, new configuration options), consider
|
||||||
creating an [**issue**](https://github.com/astral-sh/ruff/issues) outlining your proposed change.
|
creating an [**issue**](https://github.com/astral-sh/ruff/issues) outlining your proposed change.
|
||||||
You can also join us on [Discord](https://discord.com/invite/astral-sh) to discuss your idea with the
|
You can also join us on [**Discord**](https://discord.gg/c9MhzV8aU5) to discuss your idea with the
|
||||||
community. We've labeled [beginner-friendly tasks](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22)
|
community. We've labeled [beginner-friendly tasks](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22)
|
||||||
in the issue tracker, along with [bugs](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3Abug)
|
in the issue tracker, along with [bugs](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3Abug)
|
||||||
and [improvements](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3Aaccepted)
|
and [improvements](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3Aaccepted)
|
||||||
that are ready for contributions.
|
that are ready for contributions.
|
||||||
|
|
||||||
|
If you're looking for a place to start, we recommend implementing a new lint rule (see:
|
||||||
|
[_Adding a new lint rule_](#example-adding-a-new-lint-rule), which will allow you to learn from and
|
||||||
|
pattern-match against the examples in the existing codebase. Many lint rules are inspired by
|
||||||
|
existing Python plugins, which can be used as a reference implementation.
|
||||||
|
|
||||||
|
As a concrete example: consider taking on one of the rules from the [`flake8-pyi`](https://github.com/astral-sh/ruff/issues/848)
|
||||||
|
plugin, and looking to the originating [Python source](https://github.com/PyCQA/flake8-pyi) for
|
||||||
|
guidance.
|
||||||
|
|
||||||
If you have suggestions on how we might improve the contributing documentation, [let us know](https://github.com/astral-sh/ruff/discussions/5693)!
|
If you have suggestions on how we might improve the contributing documentation, [let us know](https://github.com/astral-sh/ruff/discussions/5693)!
|
||||||
|
|
||||||
### Prerequisites
|
### Prerequisites
|
||||||
@@ -58,7 +63,7 @@ You'll also need [Insta](https://insta.rs/docs/) to update snapshot tests:
|
|||||||
cargo install cargo-insta
|
cargo install cargo-insta
|
||||||
```
|
```
|
||||||
|
|
||||||
And you'll need pre-commit to run some validation checks:
|
and pre-commit to run some validation checks:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
pipx install pre-commit # or `pip install pre-commit` if you have a virtualenv
|
pipx install pre-commit # or `pip install pre-commit` if you have a virtualenv
|
||||||
@@ -71,22 +76,12 @@ when making a commit:
|
|||||||
pre-commit install
|
pre-commit install
|
||||||
```
|
```
|
||||||
|
|
||||||
We recommend [nextest](https://nexte.st/) to run Ruff's test suite (via `cargo nextest run`),
|
|
||||||
though it's not strictly necessary:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
cargo install cargo-nextest --locked
|
|
||||||
```
|
|
||||||
|
|
||||||
Throughout this guide, any usages of `cargo test` can be replaced with `cargo nextest run`,
|
|
||||||
if you choose to install `nextest`.
|
|
||||||
|
|
||||||
### Development
|
### Development
|
||||||
|
|
||||||
After cloning the repository, run Ruff locally from the repository root with:
|
After cloning the repository, run Ruff locally from the repository root with:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
cargo run -p ruff -- check /path/to/file.py --no-cache
|
cargo run -p ruff_cli -- check /path/to/file.py --no-cache
|
||||||
```
|
```
|
||||||
|
|
||||||
Prior to opening a pull request, ensure that your code has been auto-formatted,
|
Prior to opening a pull request, ensure that your code has been auto-formatted,
|
||||||
@@ -98,11 +93,9 @@ RUFF_UPDATE_SCHEMA=1 cargo test # Rust testing and updating ruff.schema.json
|
|||||||
pre-commit run --all-files --show-diff-on-failure # Rust and Python formatting, Markdown and Python linting, etc.
|
pre-commit run --all-files --show-diff-on-failure # Rust and Python formatting, Markdown and Python linting, etc.
|
||||||
```
|
```
|
||||||
|
|
||||||
These checks will run on GitHub Actions when you open your pull request, but running them locally
|
These checks will run on GitHub Actions when you open your Pull Request, but running them locally
|
||||||
will save you time and expedite the merge process.
|
will save you time and expedite the merge process.
|
||||||
|
|
||||||
If you're using VS Code, you can also install the recommended [rust-analyzer](https://marketplace.visualstudio.com/items?itemName=rust-lang.rust-analyzer) extension to get these checks while editing.
|
|
||||||
|
|
||||||
Note that many code changes also require updating the snapshot tests, which is done interactively
|
Note that many code changes also require updating the snapshot tests, which is done interactively
|
||||||
after running `cargo test` like so:
|
after running `cargo test` like so:
|
||||||
|
|
||||||
@@ -110,14 +103,7 @@ after running `cargo test` like so:
|
|||||||
cargo insta review
|
cargo insta review
|
||||||
```
|
```
|
||||||
|
|
||||||
If your pull request relates to a specific lint rule, include the category and rule code in the
|
Your Pull Request will be reviewed by a maintainer, which may involve a few rounds of iteration
|
||||||
title, as in the following examples:
|
|
||||||
|
|
||||||
- \[`flake8-bugbear`\] Avoid false positive for usage after `continue` (`B031`)
|
|
||||||
- \[`flake8-simplify`\] Detect implicit `else` cases in `needless-bool` (`SIM103`)
|
|
||||||
- \[`pycodestyle`\] Implement `redundant-backslash` (`E502`)
|
|
||||||
|
|
||||||
Your pull request will be reviewed by a maintainer, which may involve a few rounds of iteration
|
|
||||||
prior to merging.
|
prior to merging.
|
||||||
|
|
||||||
### Project Structure
|
### Project Structure
|
||||||
@@ -125,8 +111,8 @@ prior to merging.
|
|||||||
Ruff is structured as a monorepo with a [flat crate structure](https://matklad.github.io/2021/08/22/large-rust-workspaces.html),
|
Ruff is structured as a monorepo with a [flat crate structure](https://matklad.github.io/2021/08/22/large-rust-workspaces.html),
|
||||||
such that all crates are contained in a flat `crates` directory.
|
such that all crates are contained in a flat `crates` directory.
|
||||||
|
|
||||||
The vast majority of the code, including all lint rules, lives in the `ruff_linter` crate (located
|
The vast majority of the code, including all lint rules, lives in the `ruff` crate (located at
|
||||||
at `crates/ruff_linter`). As a contributor, that's the crate that'll be most relevant to you.
|
`crates/ruff_linter`). As a contributor, that's the crate that'll be most relevant to you.
|
||||||
|
|
||||||
At the time of writing, the repository includes the following crates:
|
At the time of writing, the repository includes the following crates:
|
||||||
|
|
||||||
@@ -134,7 +120,7 @@ At the time of writing, the repository includes the following crates:
|
|||||||
If you're working on a rule, this is the crate for you.
|
If you're working on a rule, this is the crate for you.
|
||||||
- `crates/ruff_benchmark`: binary crate for running micro-benchmarks.
|
- `crates/ruff_benchmark`: binary crate for running micro-benchmarks.
|
||||||
- `crates/ruff_cache`: library crate for caching lint results.
|
- `crates/ruff_cache`: library crate for caching lint results.
|
||||||
- `crates/ruff`: binary crate containing Ruff's command-line interface.
|
- `crates/ruff_cli`: binary crate containing Ruff's command-line interface.
|
||||||
- `crates/ruff_dev`: binary crate containing utilities used in the development of Ruff itself (e.g.,
|
- `crates/ruff_dev`: binary crate containing utilities used in the development of Ruff itself (e.g.,
|
||||||
`cargo dev generate-all`), see the [`cargo dev`](#cargo-dev) section below.
|
`cargo dev generate-all`), see the [`cargo dev`](#cargo-dev) section below.
|
||||||
- `crates/ruff_diagnostics`: library crate for the rule-independent abstractions in the lint
|
- `crates/ruff_diagnostics`: library crate for the rule-independent abstractions in the lint
|
||||||
@@ -199,14 +185,11 @@ and calling out to lint rule analyzer functions as it goes.
|
|||||||
If you need to inspect the AST, you can run `cargo dev print-ast` with a Python file. Grep
|
If you need to inspect the AST, you can run `cargo dev print-ast` with a Python file. Grep
|
||||||
for the `Diagnostic::new` invocations to understand how other, similar rules are implemented.
|
for the `Diagnostic::new` invocations to understand how other, similar rules are implemented.
|
||||||
|
|
||||||
Once you're satisfied with your code, add tests for your rule
|
Once you're satisfied with your code, add tests for your rule. See [rule testing](#rule-testing-fixtures-and-snapshots)
|
||||||
(see: [rule testing](#rule-testing-fixtures-and-snapshots)), and regenerate the documentation and
|
for more details.
|
||||||
associated assets (like our JSON Schema) with `cargo dev generate-all`.
|
|
||||||
|
|
||||||
Finally, submit a pull request, and include the category, rule name, and rule code in the title, as
|
Finally, regenerate the documentation and other generated assets (like our JSON Schema) with:
|
||||||
in:
|
`cargo dev generate-all`.
|
||||||
|
|
||||||
> \[`pycodestyle`\] Implement `redundant-backslash` (`E502`)
|
|
||||||
|
|
||||||
#### Rule naming convention
|
#### Rule naming convention
|
||||||
|
|
||||||
@@ -248,7 +231,7 @@ Once you've completed the code for the rule itself, you can define tests with th
|
|||||||
For example, if you're adding a new rule named `E402`, you would run:
|
For example, if you're adding a new rule named `E402`, you would run:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
cargo run -p ruff -- check crates/ruff_linter/resources/test/fixtures/pycodestyle/E402.py --no-cache --preview --select E402
|
cargo run -p ruff_cli -- check crates/ruff_linter/resources/test/fixtures/pycodestyle/E402.py --no-cache --select E402
|
||||||
```
|
```
|
||||||
|
|
||||||
**Note:** Only a subset of rules are enabled by default. When testing a new rule, ensure that
|
**Note:** Only a subset of rules are enabled by default. When testing a new rule, ensure that
|
||||||
@@ -269,7 +252,7 @@ Once you've completed the code for the rule itself, you can define tests with th
|
|||||||
|
|
||||||
Ruff's user-facing settings live in a few different places.
|
Ruff's user-facing settings live in a few different places.
|
||||||
|
|
||||||
First, the command-line options are defined via the `Args` struct in `crates/ruff/src/args.rs`.
|
First, the command-line options are defined via the `Args` struct in `crates/ruff_cli/src/args.rs`.
|
||||||
|
|
||||||
Second, the `pyproject.toml` options are defined in `crates/ruff_workspace/src/options.rs` (via the
|
Second, the `pyproject.toml` options are defined in `crates/ruff_workspace/src/options.rs` (via the
|
||||||
`Options` struct), `crates/ruff_workspace/src/configuration.rs` (via the `Configuration` struct),
|
`Options` struct), `crates/ruff_workspace/src/configuration.rs` (via the `Configuration` struct),
|
||||||
@@ -280,7 +263,7 @@ These represent, respectively: the schema used to parse the `pyproject.toml` fil
|
|||||||
intermediate representation; and the final, internal representation used to power Ruff.
|
intermediate representation; and the final, internal representation used to power Ruff.
|
||||||
|
|
||||||
To add a new configuration option, you'll likely want to modify these latter few files (along with
|
To add a new configuration option, you'll likely want to modify these latter few files (along with
|
||||||
`args.rs`, if appropriate). If you want to pattern-match against an existing example, grep for
|
`arg.rs`, if appropriate). If you want to pattern-match against an existing example, grep for
|
||||||
`dummy_variable_rgx`, which defines a regular expression to match against acceptable unused
|
`dummy_variable_rgx`, which defines a regular expression to match against acceptable unused
|
||||||
variables (e.g., `_`).
|
variables (e.g., `_`).
|
||||||
|
|
||||||
@@ -312,14 +295,14 @@ To preview any changes to the documentation locally:
|
|||||||
|
|
||||||
```shell
|
```shell
|
||||||
# For contributors.
|
# For contributors.
|
||||||
mkdocs serve -f mkdocs.public.yml
|
mkdocs serve -f mkdocs.generated.yml
|
||||||
|
|
||||||
# For members of the Astral org, which has access to MkDocs Insiders via sponsorship.
|
# For members of the Astral org, which has access to MkDocs Insiders via sponsorship.
|
||||||
mkdocs serve -f mkdocs.insiders.yml
|
mkdocs serve -f mkdocs.insiders.yml
|
||||||
```
|
```
|
||||||
|
|
||||||
The documentation should then be available locally at
|
The documentation should then be available locally at
|
||||||
[http://127.0.0.1:8000/ruff/](http://127.0.0.1:8000/ruff/).
|
[http://127.0.0.1:8000/docs/](http://127.0.0.1:8000/docs/).
|
||||||
|
|
||||||
## Release Process
|
## Release Process
|
||||||
|
|
||||||
@@ -332,40 +315,36 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
|
|||||||
|
|
||||||
### Creating a new release
|
### Creating a new release
|
||||||
|
|
||||||
1. Install `uv`: `curl -LsSf https://astral.sh/uv/install.sh | sh`
|
We use an experimental in-house tool for managing releases.
|
||||||
1. Run `./scripts/release.sh`; this command will:
|
|
||||||
- Generate a temporary virtual environment with `rooster`
|
1. Install `rooster`: `pip install git+https://github.com/zanieb/rooster@main`
|
||||||
|
1. Run `rooster release`; this command will:
|
||||||
- Generate a changelog entry in `CHANGELOG.md`
|
- Generate a changelog entry in `CHANGELOG.md`
|
||||||
- Update versions in `pyproject.toml` and `Cargo.toml`
|
- Update versions in `pyproject.toml` and `Cargo.toml`
|
||||||
- Update references to versions in the `README.md` and documentation
|
- Update references to versions in the `README.md` and documentation
|
||||||
- Display contributors for the release
|
|
||||||
1. The changelog should then be editorialized for consistency
|
1. The changelog should then be editorialized for consistency
|
||||||
- Often labels will be missing from pull requests they will need to be manually organized into the proper section
|
- Often labels will be missing from pull requests they will need to be manually organized into the proper section
|
||||||
- Changes should be edited to be user-facing descriptions, avoiding internal details
|
- Changes should be edited to be user-facing descriptions, avoiding internal details
|
||||||
1. Highlight any breaking changes in `BREAKING_CHANGES.md`
|
1. Highlight any breaking changes in `BREAKING_CHANGES.md`
|
||||||
1. Run `cargo check`. This should update the lock file with new versions.
|
|
||||||
1. Create a pull request with the changelog and version updates
|
1. Create a pull request with the changelog and version updates
|
||||||
1. Merge the PR
|
1. Merge the PR
|
||||||
1. Run the [release workflow](https://github.com/astral-sh/ruff/actions/workflows/release.yml) with:
|
1. Run the release workflow with the version number (without starting `v`) as input. Make sure
|
||||||
- The new version number (without starting `v`)
|
main has your merged PR as last commit
|
||||||
1. The release workflow will do the following:
|
1. The release workflow will do the following:
|
||||||
1. Build all the assets. If this fails (even though we tested in step 4), we haven't tagged or
|
1. Build all the assets. If this fails (even though we tested in step 4), we haven't tagged or
|
||||||
uploaded anything, you can restart after pushing a fix. If you just need to rerun the build,
|
uploaded anything, you can restart after pushing a fix.
|
||||||
make sure you're [re-running all the failed
|
|
||||||
jobs](https://docs.github.com/en/actions/managing-workflow-runs/re-running-workflows-and-jobs#re-running-failed-jobs-in-a-workflow) and not just a single failed job.
|
|
||||||
1. Upload to PyPI.
|
1. Upload to PyPI.
|
||||||
1. Create and push the Git tag (as extracted from `pyproject.toml`). We create the Git tag only
|
1. Create and push the Git tag (as extracted from `pyproject.toml`). We create the Git tag only
|
||||||
after building the wheels and uploading to PyPI, since we can't delete or modify the tag ([#4468](https://github.com/astral-sh/ruff/issues/4468)).
|
after building the wheels and uploading to PyPI, since we can't delete or modify the tag ([#4468](https://github.com/charliermarsh/ruff/issues/4468)).
|
||||||
1. Attach artifacts to draft GitHub release
|
1. Attach artifacts to draft GitHub release
|
||||||
1. Trigger downstream repositories. This can fail non-catastrophically, as we can run any
|
1. Trigger downstream repositories. This can fail non-catastrophically, as we can run any
|
||||||
downstream jobs manually if needed.
|
downstream jobs manually if needed.
|
||||||
1. Verify the GitHub release:
|
1. Publish the GitHub release
|
||||||
1. The Changelog should match the content of `CHANGELOG.md`
|
1. Open the draft release in the GitHub release section
|
||||||
1. Append the contributors from the `bump.sh` script
|
1. Copy the changelog for the release into the GitHub release
|
||||||
1. If needed, [update the schemastore](https://github.com/astral-sh/ruff/blob/main/scripts/update_schemastore.py).
|
- See previous releases for formatting of section headers
|
||||||
1. One can determine if an update is needed when
|
1. Generate the contributor list with `rooster contributors` and add to the release notes
|
||||||
`git diff old-version-tag new-version-tag -- ruff.schema.json` returns a non-empty diff.
|
1. If needed, [update the schemastore](https://github.com/charliermarsh/ruff/blob/main/scripts/update_schemastore.py)
|
||||||
1. Once run successfully, you should follow the link in the output to create a PR.
|
|
||||||
1. If needed, update the `ruff-lsp` and `ruff-vscode` repositories.
|
1. If needed, update the `ruff-lsp` and `ruff-vscode` repositories.
|
||||||
|
|
||||||
## Ecosystem CI
|
## Ecosystem CI
|
||||||
@@ -386,14 +365,9 @@ See the [ruff-ecosystem package](https://github.com/astral-sh/ruff/tree/main/pyt
|
|||||||
We have several ways of benchmarking and profiling Ruff:
|
We have several ways of benchmarking and profiling Ruff:
|
||||||
|
|
||||||
- Our main performance benchmark comparing Ruff with other tools on the CPython codebase
|
- Our main performance benchmark comparing Ruff with other tools on the CPython codebase
|
||||||
- Microbenchmarks which run the linter or the formatter on individual files. These run on pull requests.
|
- Microbenchmarks which the linter or the formatter on individual files. There run on pull requests.
|
||||||
- Profiling the linter on either the microbenchmarks or entire projects
|
- Profiling the linter on either the microbenchmarks or entire projects
|
||||||
|
|
||||||
> \[!NOTE\]
|
|
||||||
> When running benchmarks, ensure that your CPU is otherwise idle (e.g., close any background
|
|
||||||
> applications, like web browsers). You may also want to switch your CPU to a "performance"
|
|
||||||
> mode, if it exists, especially when benchmarking short-lived processes.
|
|
||||||
|
|
||||||
### CPython Benchmark
|
### CPython Benchmark
|
||||||
|
|
||||||
First, clone [CPython](https://github.com/python/cpython). It's a large and diverse Python codebase,
|
First, clone [CPython](https://github.com/python/cpython). It's a large and diverse Python codebase,
|
||||||
@@ -539,10 +513,10 @@ if the benchmark improved/regressed compared to that baseline.
|
|||||||
|
|
||||||
```shell
|
```shell
|
||||||
# Run once on your "baseline" code
|
# Run once on your "baseline" code
|
||||||
cargo bench -p ruff_benchmark -- --save-baseline=main
|
cargo benchmark --save-baseline=main
|
||||||
|
|
||||||
# Then iterate with
|
# Then iterate with
|
||||||
cargo bench -p ruff_benchmark -- --baseline=main
|
cargo benchmark --baseline=main
|
||||||
```
|
```
|
||||||
|
|
||||||
#### PR Summary
|
#### PR Summary
|
||||||
@@ -552,10 +526,10 @@ This is useful to illustrate the improvements of a PR.
|
|||||||
|
|
||||||
```shell
|
```shell
|
||||||
# On main
|
# On main
|
||||||
cargo bench -p ruff_benchmark -- --save-baseline=main
|
cargo benchmark --save-baseline=main
|
||||||
|
|
||||||
# After applying your changes
|
# After applying your changes
|
||||||
cargo bench -p ruff_benchmark -- --save-baseline=pr
|
cargo benchmark --save-baseline=pr
|
||||||
|
|
||||||
critcmp main pr
|
critcmp main pr
|
||||||
```
|
```
|
||||||
@@ -568,10 +542,10 @@ cargo install critcmp
|
|||||||
|
|
||||||
#### Tips
|
#### Tips
|
||||||
|
|
||||||
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo benchmark lexer`
|
- Use `cargo benchmark <filter>` to only run specific benchmarks. For example: `cargo benchmark linter/pydantic`
|
||||||
to only run the lexer benchmarks.
|
to only run the pydantic tests.
|
||||||
- Use `cargo bench -p ruff_benchmark -- --quiet` for a more cleaned up output (without statistical relevance)
|
- Use `cargo benchmark --quiet` for a more cleaned up output (without statistical relevance)
|
||||||
- Use `cargo bench -p ruff_benchmark -- --quick` to get faster results (more prone to noise)
|
- Use `cargo benchmark --quick` to get faster results (more prone to noise)
|
||||||
|
|
||||||
### Profiling Projects
|
### Profiling Projects
|
||||||
|
|
||||||
@@ -582,10 +556,10 @@ examples.
|
|||||||
|
|
||||||
#### Linux
|
#### Linux
|
||||||
|
|
||||||
Install `perf` and build `ruff_benchmark` with the `profiling` profile and then run it with perf
|
Install `perf` and build `ruff_benchmark` with the `release-debug` profile and then run it with perf
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
cargo bench -p ruff_benchmark --no-run --profile=profiling && perf record --call-graph dwarf -F 9999 cargo bench -p ruff_benchmark --profile=profiling -- --profile-time=1
|
cargo bench -p ruff_benchmark --no-run --profile=release-debug && perf record --call-graph dwarf -F 9999 cargo bench -p ruff_benchmark --profile=release-debug -- --profile-time=1
|
||||||
```
|
```
|
||||||
|
|
||||||
You can also use the `ruff_dev` launcher to run `ruff check` multiple times on a repository to
|
You can also use the `ruff_dev` launcher to run `ruff check` multiple times on a repository to
|
||||||
@@ -593,8 +567,8 @@ gather enough samples for a good flamegraph (change the 999, the sample rate, an
|
|||||||
of checks, to your liking)
|
of checks, to your liking)
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
cargo build --bin ruff_dev --profile=profiling
|
cargo build --bin ruff_dev --profile=release-debug
|
||||||
perf record -g -F 999 target/profiling/ruff_dev repeat --repeat 30 --exit-zero --no-cache path/to/cpython > /dev/null
|
perf record -g -F 999 target/release-debug/ruff_dev repeat --repeat 30 --exit-zero --no-cache path/to/cpython > /dev/null
|
||||||
```
|
```
|
||||||
|
|
||||||
Then convert the recorded profile
|
Then convert the recorded profile
|
||||||
@@ -624,7 +598,7 @@ cargo install cargo-instruments
|
|||||||
Then run the profiler with
|
Then run the profiler with
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
cargo instruments -t time --bench linter --profile profiling -p ruff_benchmark -- --profile-time=1
|
cargo instruments -t time --bench linter --profile release-debug -p ruff_benchmark -- --profile-time=1
|
||||||
```
|
```
|
||||||
|
|
||||||
- `-t`: Specifies what to profile. Useful options are `time` to profile the wall time and `alloc`
|
- `-t`: Specifies what to profile. Useful options are `time` to profile the wall time and `alloc`
|
||||||
@@ -638,11 +612,11 @@ Otherwise, follow the instructions from the linux section.
|
|||||||
`cargo dev` is a shortcut for `cargo run --package ruff_dev --bin ruff_dev`. You can run some useful
|
`cargo dev` is a shortcut for `cargo run --package ruff_dev --bin ruff_dev`. You can run some useful
|
||||||
utils with it:
|
utils with it:
|
||||||
|
|
||||||
- `cargo dev print-ast <file>`: Print the AST of a python file using Ruff's
|
- `cargo dev print-ast <file>`: Print the AST of a python file using the
|
||||||
[Python parser](https://github.com/astral-sh/ruff/tree/main/crates/ruff_python_parser).
|
[RustPython parser](https://github.com/astral-sh/RustPython-Parser/tree/main/parser) that is
|
||||||
For `if True: pass # comment`, you can see the syntax tree, the byte offsets for start and
|
mainly used in Ruff. For `if True: pass # comment`, you can see the syntax tree, the byte offsets
|
||||||
stop of each node and also how the `:` token, the comment and whitespace are not represented
|
for start and stop of each node and also how the `:` token, the comment and whitespace are not
|
||||||
anymore:
|
represented anymore:
|
||||||
|
|
||||||
```text
|
```text
|
||||||
[
|
[
|
||||||
@@ -815,8 +789,8 @@ To understand Ruff's import categorization system, we first need to define two c
|
|||||||
"project root".)
|
"project root".)
|
||||||
- "Package root": The top-most directory defining the Python package that includes a given Python
|
- "Package root": The top-most directory defining the Python package that includes a given Python
|
||||||
file. To find the package root for a given Python file, traverse up its parent directories until
|
file. To find the package root for a given Python file, traverse up its parent directories until
|
||||||
you reach a parent directory that doesn't contain an `__init__.py` file (and isn't in a subtree
|
you reach a parent directory that doesn't contain an `__init__.py` file (and isn't marked as
|
||||||
marked as a [namespace package](https://docs.astral.sh/ruff/settings/#namespace-packages)); take the directory
|
a [namespace package](https://docs.astral.sh/ruff/settings/#namespace-packages)); take the directory
|
||||||
just before that, i.e., the first directory in the package.
|
just before that, i.e., the first directory in the package.
|
||||||
|
|
||||||
For example, given:
|
For example, given:
|
||||||
@@ -905,7 +879,7 @@ There are three ways in which an import can be categorized as "first-party":
|
|||||||
package (e.g., `from foo import bar` or `import foo.bar`), they'll be classified as first-party
|
package (e.g., `from foo import bar` or `import foo.bar`), they'll be classified as first-party
|
||||||
automatically. This check is as simple as comparing the first segment of the current file's
|
automatically. This check is as simple as comparing the first segment of the current file's
|
||||||
module path to the first segment of the import.
|
module path to the first segment of the import.
|
||||||
1. **Source roots**: Ruff supports a [`src`](https://docs.astral.sh/ruff/settings/#src) setting, which
|
1. **Source roots**: Ruff supports a `[src](https://docs.astral.sh/ruff/settings/#src)` setting, which
|
||||||
sets the directories to scan when identifying first-party imports. The algorithm is
|
sets the directories to scan when identifying first-party imports. The algorithm is
|
||||||
straightforward: given an import, like `import foo`, iterate over the directories enumerated in
|
straightforward: given an import, like `import foo`, iterate over the directories enumerated in
|
||||||
the `src` setting and, for each directory, check for the existence of a subdirectory `foo` or a
|
the `src` setting and, for each directory, check for the existence of a subdirectory `foo` or a
|
||||||
|
|||||||
2202
Cargo.lock
generated
2202
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
276
Cargo.toml
276
Cargo.toml
@@ -4,7 +4,7 @@ resolver = "2"
|
|||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
rust-version = "1.75"
|
rust-version = "1.71"
|
||||||
homepage = "https://docs.astral.sh/ruff"
|
homepage = "https://docs.astral.sh/ruff"
|
||||||
documentation = "https://docs.astral.sh/ruff"
|
documentation = "https://docs.astral.sh/ruff"
|
||||||
repository = "https://github.com/astral-sh/ruff"
|
repository = "https://github.com/astral-sh/ruff"
|
||||||
@@ -12,196 +12,51 @@ authors = ["Charlie Marsh <charlie.r.marsh@gmail.com>"]
|
|||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
|
||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
ruff = { path = "crates/ruff" }
|
anyhow = { version = "1.0.69" }
|
||||||
ruff_cache = { path = "crates/ruff_cache" }
|
bitflags = { version = "2.4.1" }
|
||||||
ruff_db = { path = "crates/ruff_db" }
|
chrono = { version = "0.4.31", default-features = false, features = ["clock"] }
|
||||||
ruff_diagnostics = { path = "crates/ruff_diagnostics" }
|
clap = { version = "4.4.7", features = ["derive"] }
|
||||||
ruff_formatter = { path = "crates/ruff_formatter" }
|
colored = { version = "2.0.0" }
|
||||||
ruff_index = { path = "crates/ruff_index" }
|
filetime = { version = "0.2.20" }
|
||||||
ruff_linter = { path = "crates/ruff_linter" }
|
|
||||||
ruff_macros = { path = "crates/ruff_macros" }
|
|
||||||
ruff_notebook = { path = "crates/ruff_notebook" }
|
|
||||||
ruff_python_ast = { path = "crates/ruff_python_ast" }
|
|
||||||
ruff_python_codegen = { path = "crates/ruff_python_codegen" }
|
|
||||||
ruff_python_formatter = { path = "crates/ruff_python_formatter" }
|
|
||||||
ruff_python_index = { path = "crates/ruff_python_index" }
|
|
||||||
ruff_python_literal = { path = "crates/ruff_python_literal" }
|
|
||||||
ruff_python_parser = { path = "crates/ruff_python_parser" }
|
|
||||||
ruff_python_semantic = { path = "crates/ruff_python_semantic" }
|
|
||||||
ruff_python_stdlib = { path = "crates/ruff_python_stdlib" }
|
|
||||||
ruff_python_trivia = { path = "crates/ruff_python_trivia" }
|
|
||||||
ruff_server = { path = "crates/ruff_server" }
|
|
||||||
ruff_source_file = { path = "crates/ruff_source_file" }
|
|
||||||
ruff_text_size = { path = "crates/ruff_text_size" }
|
|
||||||
ruff_workspace = { path = "crates/ruff_workspace" }
|
|
||||||
|
|
||||||
red_knot = { path = "crates/red_knot" }
|
|
||||||
red_knot_module_resolver = { path = "crates/red_knot_module_resolver" }
|
|
||||||
red_knot_python_semantic = { path = "crates/red_knot_python_semantic" }
|
|
||||||
|
|
||||||
aho-corasick = { version = "1.1.3" }
|
|
||||||
annotate-snippets = { version = "0.9.2", features = ["color"] }
|
|
||||||
anyhow = { version = "1.0.80" }
|
|
||||||
argfile = { version = "0.2.0" }
|
|
||||||
bincode = { version = "1.3.3" }
|
|
||||||
bitflags = { version = "2.5.0" }
|
|
||||||
bstr = { version = "1.9.1" }
|
|
||||||
cachedir = { version = "0.3.1" }
|
|
||||||
camino = { version = "1.1.7" }
|
|
||||||
chrono = { version = "0.4.35", default-features = false, features = ["clock"] }
|
|
||||||
clap = { version = "4.5.3", features = ["derive"] }
|
|
||||||
clap_complete_command = { version = "0.6.0" }
|
|
||||||
clearscreen = { version = "3.0.0" }
|
|
||||||
codspeed-criterion-compat = { version = "2.6.0", default-features = false }
|
|
||||||
colored = { version = "2.1.0" }
|
|
||||||
console_error_panic_hook = { version = "0.1.7" }
|
|
||||||
console_log = { version = "1.0.0" }
|
|
||||||
countme = { version = "3.0.1" }
|
|
||||||
compact_str = "0.8.0"
|
|
||||||
criterion = { version = "0.5.1", default-features = false }
|
|
||||||
crossbeam = { version = "0.8.4" }
|
|
||||||
dashmap = { version = "6.0.1" }
|
|
||||||
drop_bomb = { version = "0.1.5" }
|
|
||||||
env_logger = { version = "0.11.0" }
|
|
||||||
etcetera = { version = "0.8.0" }
|
|
||||||
fern = { version = "0.6.1" }
|
|
||||||
filetime = { version = "0.2.23" }
|
|
||||||
glob = { version = "0.3.1" }
|
glob = { version = "0.3.1" }
|
||||||
globset = { version = "0.4.14" }
|
globset = { version = "0.4.10" }
|
||||||
hashbrown = "0.14.3"
|
ignore = { version = "0.4.20" }
|
||||||
ignore = { version = "0.4.22" }
|
insta = { version = "1.34.0", feature = ["filters", "glob"] }
|
||||||
imara-diff = { version = "0.1.5" }
|
is-macro = { version = "0.3.0" }
|
||||||
imperative = { version = "1.0.4" }
|
itertools = { version = "0.11.0" }
|
||||||
indicatif = { version = "0.17.8" }
|
|
||||||
indoc = { version = "2.0.4" }
|
|
||||||
insta = { version = "1.35.1" }
|
|
||||||
insta-cmd = { version = "0.6.0" }
|
|
||||||
is-macro = { version = "0.3.5" }
|
|
||||||
is-wsl = { version = "0.4.0" }
|
|
||||||
itertools = { version = "0.13.0" }
|
|
||||||
js-sys = { version = "0.3.69" }
|
|
||||||
jod-thread = { version = "0.1.2" }
|
|
||||||
libc = { version = "0.2.153" }
|
|
||||||
libcst = { version = "1.1.0", default-features = false }
|
libcst = { version = "1.1.0", default-features = false }
|
||||||
log = { version = "0.4.17" }
|
log = { version = "0.4.17" }
|
||||||
lsp-server = { version = "0.7.6" }
|
memchr = { version = "2.6.4" }
|
||||||
lsp-types = { git = "https://github.com/astral-sh/lsp-types.git", rev = "3512a9f", features = [
|
once_cell = { version = "1.17.1" }
|
||||||
"proposed",
|
|
||||||
] }
|
|
||||||
matchit = { version = "0.8.1" }
|
|
||||||
memchr = { version = "2.7.1" }
|
|
||||||
mimalloc = { version = "0.1.39" }
|
|
||||||
natord = { version = "1.0.9" }
|
|
||||||
notify = { version = "6.1.1" }
|
|
||||||
once_cell = { version = "1.19.0" }
|
|
||||||
ordermap = { version = "0.5.0" }
|
|
||||||
path-absolutize = { version = "3.1.1" }
|
path-absolutize = { version = "3.1.1" }
|
||||||
path-slash = { version = "0.2.1" }
|
proc-macro2 = { version = "1.0.69" }
|
||||||
pathdiff = { version = "0.2.1" }
|
|
||||||
pep440_rs = { version = "0.6.0", features = ["serde"] }
|
|
||||||
pretty_assertions = "1.3.0"
|
|
||||||
proc-macro2 = { version = "1.0.79" }
|
|
||||||
pyproject-toml = { version = "0.9.0" }
|
|
||||||
quick-junit = { version = "0.4.0" }
|
|
||||||
quote = { version = "1.0.23" }
|
quote = { version = "1.0.23" }
|
||||||
rand = { version = "0.8.5" }
|
|
||||||
rayon = { version = "1.10.0" }
|
|
||||||
regex = { version = "1.10.2" }
|
regex = { version = "1.10.2" }
|
||||||
rustc-hash = { version = "2.0.0" }
|
rustc-hash = { version = "1.1.0" }
|
||||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "a1bf3a613f451af7fc0a59411c56abc47fe8e8e1" }
|
schemars = { version = "0.8.15" }
|
||||||
schemars = { version = "0.8.16" }
|
serde = { version = "1.0.190", features = ["derive"] }
|
||||||
seahash = { version = "4.1.0" }
|
serde_json = { version = "1.0.108" }
|
||||||
serde = { version = "1.0.197", features = ["derive"] }
|
|
||||||
serde-wasm-bindgen = { version = "0.6.4" }
|
|
||||||
serde_json = { version = "1.0.113" }
|
|
||||||
serde_test = { version = "1.0.152" }
|
|
||||||
serde_with = { version = "3.6.0", default-features = false, features = [
|
|
||||||
"macros",
|
|
||||||
] }
|
|
||||||
shellexpand = { version = "3.0.0" }
|
shellexpand = { version = "3.0.0" }
|
||||||
similar = { version = "2.4.0", features = ["inline"] }
|
similar = { version = "2.3.0", features = ["inline"] }
|
||||||
smallvec = { version = "1.13.2" }
|
smallvec = { version = "1.11.1" }
|
||||||
static_assertions = "1.1.0"
|
static_assertions = "1.1.0"
|
||||||
strum = { version = "0.26.0", features = ["strum_macros"] }
|
strum = { version = "0.25.0", features = ["strum_macros"] }
|
||||||
strum_macros = { version = "0.26.0" }
|
strum_macros = { version = "0.25.3" }
|
||||||
syn = { version = "2.0.55" }
|
syn = { version = "2.0.39" }
|
||||||
tempfile = { version = "3.9.0" }
|
test-case = { version = "3.2.1" }
|
||||||
test-case = { version = "3.3.1" }
|
thiserror = { version = "1.0.50" }
|
||||||
thiserror = { version = "1.0.58" }
|
toml = { version = "0.7.8" }
|
||||||
tikv-jemallocator = { version = "0.6.0" }
|
|
||||||
toml = { version = "0.8.11" }
|
|
||||||
tracing = { version = "0.1.40" }
|
tracing = { version = "0.1.40" }
|
||||||
tracing-indicatif = { version = "0.3.6" }
|
tracing-indicatif = { version = "0.3.4" }
|
||||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
|
tracing-subscriber = { version = "0.3.17", features = ["env-filter"] }
|
||||||
tracing-tree = { version = "0.4.0" }
|
|
||||||
typed-arena = { version = "2.0.2" }
|
|
||||||
unic-ucd-category = { version = "0.9" }
|
|
||||||
unicode-ident = { version = "1.0.12" }
|
unicode-ident = { version = "1.0.12" }
|
||||||
|
unicode_names2 = { version = "1.2.0" }
|
||||||
unicode-width = { version = "0.1.11" }
|
unicode-width = { version = "0.1.11" }
|
||||||
unicode_names2 = { version = "1.2.2" }
|
uuid = { version = "1.5.0", features = ["v4", "fast-rng", "macro-diagnostics", "js"] }
|
||||||
unicode-normalization = { version = "0.1.23" }
|
wsl = { version = "0.1.0" }
|
||||||
ureq = { version = "2.9.6" }
|
|
||||||
url = { version = "2.5.0" }
|
|
||||||
uuid = { version = "1.6.1", features = [
|
|
||||||
"v4",
|
|
||||||
"fast-rng",
|
|
||||||
"macro-diagnostics",
|
|
||||||
"js",
|
|
||||||
] }
|
|
||||||
walkdir = { version = "2.3.2" }
|
|
||||||
wasm-bindgen = { version = "0.2.92" }
|
|
||||||
wasm-bindgen-test = { version = "0.3.42" }
|
|
||||||
wild = { version = "2" }
|
|
||||||
zip = { version = "0.6.6", default-features = false, features = ["zstd"] }
|
|
||||||
|
|
||||||
[workspace.lints.rust]
|
|
||||||
unsafe_code = "warn"
|
|
||||||
unreachable_pub = "warn"
|
|
||||||
|
|
||||||
[workspace.lints.clippy]
|
|
||||||
pedantic = { level = "warn", priority = -2 }
|
|
||||||
# Allowed pedantic lints
|
|
||||||
char_lit_as_u8 = "allow"
|
|
||||||
collapsible_else_if = "allow"
|
|
||||||
collapsible_if = "allow"
|
|
||||||
implicit_hasher = "allow"
|
|
||||||
map_unwrap_or = "allow"
|
|
||||||
match_same_arms = "allow"
|
|
||||||
missing_errors_doc = "allow"
|
|
||||||
missing_panics_doc = "allow"
|
|
||||||
module_name_repetitions = "allow"
|
|
||||||
must_use_candidate = "allow"
|
|
||||||
similar_names = "allow"
|
|
||||||
too_many_lines = "allow"
|
|
||||||
# To allow `#[allow(clippy::all)]` in `crates/ruff_python_parser/src/python.rs`.
|
|
||||||
needless_raw_string_hashes = "allow"
|
|
||||||
# Disallowed restriction lints
|
|
||||||
print_stdout = "warn"
|
|
||||||
print_stderr = "warn"
|
|
||||||
dbg_macro = "warn"
|
|
||||||
empty_drop = "warn"
|
|
||||||
empty_structs_with_brackets = "warn"
|
|
||||||
exit = "warn"
|
|
||||||
get_unwrap = "warn"
|
|
||||||
rc_buffer = "warn"
|
|
||||||
rc_mutex = "warn"
|
|
||||||
rest_pat_in_fully_bound_structs = "warn"
|
|
||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
# Note that we set these explicitly, and these values
|
lto = "fat"
|
||||||
# were chosen based on a trade-off between compile times
|
|
||||||
# and runtime performance[1].
|
|
||||||
#
|
|
||||||
# [1]: https://github.com/astral-sh/ruff/pull/9031
|
|
||||||
lto = "thin"
|
|
||||||
codegen-units = 16
|
|
||||||
|
|
||||||
# Some crates don't change as much but benefit more from
|
|
||||||
# more expensive optimization passes, so we selectively
|
|
||||||
# decrease codegen-units in some cases.
|
|
||||||
[profile.release.package.ruff_python_parser]
|
|
||||||
codegen-units = 1
|
|
||||||
[profile.release.package.ruff_python_ast]
|
|
||||||
codegen-units = 1
|
codegen-units = 1
|
||||||
|
|
||||||
[profile.dev.package.insta]
|
[profile.dev.package.insta]
|
||||||
@@ -215,67 +70,8 @@ opt-level = 3
|
|||||||
[profile.dev.package.ruff_python_parser]
|
[profile.dev.package.ruff_python_parser]
|
||||||
opt-level = 1
|
opt-level = 1
|
||||||
|
|
||||||
# Use the `--profile profiling` flag to show symbols in release mode.
|
# Use the `--profile release-debug` flag to show symbols in release mode.
|
||||||
# e.g. `cargo build --profile profiling`
|
# e.g. `cargo build --profile release-debug`
|
||||||
[profile.profiling]
|
[profile.release-debug]
|
||||||
inherits = "release"
|
inherits = "release"
|
||||||
debug = 1
|
debug = 1
|
||||||
|
|
||||||
# The profile that 'cargo dist' will build with.
|
|
||||||
[profile.dist]
|
|
||||||
inherits = "release"
|
|
||||||
|
|
||||||
# Config for 'cargo dist'
|
|
||||||
[workspace.metadata.dist]
|
|
||||||
# The preferred cargo-dist version to use in CI (Cargo.toml SemVer syntax)
|
|
||||||
cargo-dist-version = "0.18.0"
|
|
||||||
# CI backends to support
|
|
||||||
ci = ["github"]
|
|
||||||
# The installers to generate for each app
|
|
||||||
installers = ["shell", "powershell"]
|
|
||||||
# The archive format to use for windows builds (defaults .zip)
|
|
||||||
windows-archive = ".zip"
|
|
||||||
# The archive format to use for non-windows builds (defaults .tar.xz)
|
|
||||||
unix-archive = ".tar.gz"
|
|
||||||
# Target platforms to build apps for (Rust target-triple syntax)
|
|
||||||
targets = [
|
|
||||||
"aarch64-apple-darwin",
|
|
||||||
"aarch64-pc-windows-msvc",
|
|
||||||
"aarch64-unknown-linux-gnu",
|
|
||||||
"aarch64-unknown-linux-musl",
|
|
||||||
"arm-unknown-linux-musleabihf",
|
|
||||||
"armv7-unknown-linux-gnueabihf",
|
|
||||||
"armv7-unknown-linux-musleabihf",
|
|
||||||
"i686-pc-windows-msvc",
|
|
||||||
"i686-unknown-linux-gnu",
|
|
||||||
"i686-unknown-linux-musl",
|
|
||||||
"powerpc64-unknown-linux-gnu",
|
|
||||||
"powerpc64le-unknown-linux-gnu",
|
|
||||||
"s390x-unknown-linux-gnu",
|
|
||||||
"x86_64-apple-darwin",
|
|
||||||
"x86_64-pc-windows-msvc",
|
|
||||||
"x86_64-unknown-linux-gnu",
|
|
||||||
"x86_64-unknown-linux-musl",
|
|
||||||
]
|
|
||||||
# Whether to auto-include files like READMEs, LICENSEs, and CHANGELOGs (default true)
|
|
||||||
auto-includes = false
|
|
||||||
# Whether cargo-dist should create a GitHub Release or use an existing draft
|
|
||||||
create-release = true
|
|
||||||
# Publish jobs to run in CI
|
|
||||||
pr-run-mode = "skip"
|
|
||||||
# Whether CI should trigger releases with dispatches instead of tag pushes
|
|
||||||
dispatch-releases = true
|
|
||||||
# The stage during which the GitHub Release should be created
|
|
||||||
github-release = "announce"
|
|
||||||
# Whether CI should include auto-generated code to build local artifacts
|
|
||||||
build-local-artifacts = false
|
|
||||||
# Local artifacts jobs to run in CI
|
|
||||||
local-artifacts-jobs = ["./build-binaries", "./build-docker"]
|
|
||||||
# Publish jobs to run in CI
|
|
||||||
publish-jobs = ["./publish-pypi", "./publish-wasm"]
|
|
||||||
# Announcement jobs to run in CI
|
|
||||||
post-announce-jobs = ["./notify-dependents", "./publish-docs", "./publish-playground"]
|
|
||||||
# Custom permissions for GitHub Jobs
|
|
||||||
github-custom-job-permissions = { "build-docker" = { packages = "write", contents = "read" }, "publish-wasm" = { contents = "read", id-token = "write", packages = "write" } }
|
|
||||||
# Whether to install an updater program
|
|
||||||
install-updater = false
|
|
||||||
|
|||||||
38
Dockerfile
38
Dockerfile
@@ -1,38 +0,0 @@
|
|||||||
FROM --platform=$BUILDPLATFORM ubuntu as build
|
|
||||||
ENV HOME="/root"
|
|
||||||
WORKDIR $HOME
|
|
||||||
|
|
||||||
RUN apt update && apt install -y build-essential curl python3-venv
|
|
||||||
|
|
||||||
# Setup zig as cross compiling linker
|
|
||||||
RUN python3 -m venv $HOME/.venv
|
|
||||||
RUN .venv/bin/pip install cargo-zigbuild
|
|
||||||
ENV PATH="$HOME/.venv/bin:$PATH"
|
|
||||||
|
|
||||||
# Install rust
|
|
||||||
ARG TARGETPLATFORM
|
|
||||||
RUN case "$TARGETPLATFORM" in \
|
|
||||||
"linux/arm64") echo "aarch64-unknown-linux-musl" > rust_target.txt ;; \
|
|
||||||
"linux/amd64") echo "x86_64-unknown-linux-musl" > rust_target.txt ;; \
|
|
||||||
*) exit 1 ;; \
|
|
||||||
esac
|
|
||||||
# Update rustup whenever we bump the rust version
|
|
||||||
COPY rust-toolchain.toml rust-toolchain.toml
|
|
||||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --target $(cat rust_target.txt) --profile minimal --default-toolchain none
|
|
||||||
ENV PATH="$HOME/.cargo/bin:$PATH"
|
|
||||||
# Installs the correct toolchain version from rust-toolchain.toml and then the musl target
|
|
||||||
RUN rustup target add $(cat rust_target.txt)
|
|
||||||
|
|
||||||
# Build
|
|
||||||
COPY crates crates
|
|
||||||
COPY Cargo.toml Cargo.toml
|
|
||||||
COPY Cargo.lock Cargo.lock
|
|
||||||
RUN cargo zigbuild --bin ruff --target $(cat rust_target.txt) --release
|
|
||||||
RUN cp target/$(cat rust_target.txt)/release/ruff /ruff
|
|
||||||
# TODO: Optimize binary size, with a version that also works when cross compiling
|
|
||||||
# RUN strip --strip-all /ruff
|
|
||||||
|
|
||||||
FROM scratch
|
|
||||||
COPY --from=build /ruff /ruff
|
|
||||||
WORKDIR /io
|
|
||||||
ENTRYPOINT ["/ruff"]
|
|
||||||
25
LICENSE
25
LICENSE
@@ -1371,28 +1371,3 @@ are:
|
|||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
SOFTWARE.
|
SOFTWARE.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
- pydoclint, licensed as follows:
|
|
||||||
"""
|
|
||||||
MIT License
|
|
||||||
|
|
||||||
Copyright (c) 2023 jsh9
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
||||||
"""
|
|
||||||
|
|||||||
115
README.md
115
README.md
@@ -4,12 +4,11 @@
|
|||||||
|
|
||||||
[](https://github.com/astral-sh/ruff)
|
[](https://github.com/astral-sh/ruff)
|
||||||
[](https://pypi.python.org/pypi/ruff)
|
[](https://pypi.python.org/pypi/ruff)
|
||||||
[](https://github.com/astral-sh/ruff/blob/main/LICENSE)
|
[](https://pypi.python.org/pypi/ruff)
|
||||||
[](https://pypi.python.org/pypi/ruff)
|
[](https://pypi.python.org/pypi/ruff)
|
||||||
[](https://github.com/astral-sh/ruff/actions)
|
[](https://github.com/astral-sh/ruff/actions)
|
||||||
[](https://discord.com/invite/astral-sh)
|
|
||||||
|
|
||||||
[**Docs**](https://docs.astral.sh/ruff/) | [**Playground**](https://play.ruff.rs/)
|
[**Discord**](https://discord.gg/c9MhzV8aU5) | [**Docs**](https://docs.astral.sh/ruff/) | [**Playground**](https://play.ruff.rs/)
|
||||||
|
|
||||||
An extremely fast Python linter and code formatter, written in Rust.
|
An extremely fast Python linter and code formatter, written in Rust.
|
||||||
|
|
||||||
@@ -28,11 +27,11 @@ An extremely fast Python linter and code formatter, written in Rust.
|
|||||||
- ⚡️ 10-100x faster than existing linters (like Flake8) and formatters (like Black)
|
- ⚡️ 10-100x faster than existing linters (like Flake8) and formatters (like Black)
|
||||||
- 🐍 Installable via `pip`
|
- 🐍 Installable via `pip`
|
||||||
- 🛠️ `pyproject.toml` support
|
- 🛠️ `pyproject.toml` support
|
||||||
- 🤝 Python 3.13 compatibility
|
- 🤝 Python 3.12 compatibility
|
||||||
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruff-compare-to-flake8), isort, and Black
|
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruff-compare-to-flake8), isort, and Black
|
||||||
- 📦 Built-in caching, to avoid re-analyzing unchanged files
|
- 📦 Built-in caching, to avoid re-analyzing unchanged files
|
||||||
- 🔧 Fix support, for automatic error correction (e.g., automatically remove unused imports)
|
- 🔧 Fix support, for automatic error correction (e.g., automatically remove unused imports)
|
||||||
- 📏 Over [800 built-in rules](https://docs.astral.sh/ruff/rules/), with native re-implementations
|
- 📏 Over [700 built-in rules](https://docs.astral.sh/ruff/rules/), with native re-implementations
|
||||||
of popular Flake8 plugins, like flake8-bugbear
|
of popular Flake8 plugins, like flake8-bugbear
|
||||||
- ⌨️ First-party [editor integrations](https://docs.astral.sh/ruff/integrations/) for
|
- ⌨️ First-party [editor integrations](https://docs.astral.sh/ruff/integrations/) for
|
||||||
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://github.com/astral-sh/ruff-lsp)
|
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://github.com/astral-sh/ruff-lsp)
|
||||||
@@ -50,13 +49,12 @@ times faster than any individual tool.
|
|||||||
Ruff is extremely actively developed and used in major open-source projects like:
|
Ruff is extremely actively developed and used in major open-source projects like:
|
||||||
|
|
||||||
- [Apache Airflow](https://github.com/apache/airflow)
|
- [Apache Airflow](https://github.com/apache/airflow)
|
||||||
- [Apache Superset](https://github.com/apache/superset)
|
|
||||||
- [FastAPI](https://github.com/tiangolo/fastapi)
|
- [FastAPI](https://github.com/tiangolo/fastapi)
|
||||||
- [Hugging Face](https://github.com/huggingface/transformers)
|
- [Hugging Face](https://github.com/huggingface/transformers)
|
||||||
- [Pandas](https://github.com/pandas-dev/pandas)
|
- [Pandas](https://github.com/pandas-dev/pandas)
|
||||||
- [SciPy](https://github.com/scipy/scipy)
|
- [SciPy](https://github.com/scipy/scipy)
|
||||||
|
|
||||||
...and [many more](#whos-using-ruff).
|
...and many more.
|
||||||
|
|
||||||
Ruff is backed by [Astral](https://astral.sh). Read the [launch post](https://astral.sh/blog/announcing-astral-the-company-behind-ruff),
|
Ruff is backed by [Astral](https://astral.sh). Read the [launch post](https://astral.sh/blog/announcing-astral-the-company-behind-ruff),
|
||||||
or the original [project announcement](https://notes.crmarsh.com/python-tooling-could-be-much-much-faster).
|
or the original [project announcement](https://notes.crmarsh.com/python-tooling-could-be-much-much-faster).
|
||||||
@@ -119,25 +117,7 @@ For more, see the [documentation](https://docs.astral.sh/ruff/).
|
|||||||
Ruff is available as [`ruff`](https://pypi.org/project/ruff/) on PyPI:
|
Ruff is available as [`ruff`](https://pypi.org/project/ruff/) on PyPI:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
# With pip.
|
|
||||||
pip install ruff
|
pip install ruff
|
||||||
|
|
||||||
# With pipx.
|
|
||||||
pipx install ruff
|
|
||||||
```
|
|
||||||
|
|
||||||
Starting with version `0.5.0`, Ruff can be installed with our standalone installers:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
# On macOS and Linux.
|
|
||||||
curl -LsSf https://astral.sh/ruff/install.sh | sh
|
|
||||||
|
|
||||||
# On Windows.
|
|
||||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
|
||||||
|
|
||||||
# For a specific version.
|
|
||||||
curl -LsSf https://astral.sh/ruff/0.5.5/install.sh | sh
|
|
||||||
powershell -c "irm https://astral.sh/ruff/0.5.5/install.ps1 | iex"
|
|
||||||
```
|
```
|
||||||
|
|
||||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||||
@@ -148,7 +128,7 @@ and with [a variety of other package managers](https://docs.astral.sh/ruff/insta
|
|||||||
To run Ruff as a linter, try any of the following:
|
To run Ruff as a linter, try any of the following:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
ruff check # Lint all files in the current directory (and any subdirectories).
|
ruff check . # Lint all files in the current directory (and any subdirectories).
|
||||||
ruff check path/to/code/ # Lint all files in `/path/to/code` (and any subdirectories).
|
ruff check path/to/code/ # Lint all files in `/path/to/code` (and any subdirectories).
|
||||||
ruff check path/to/code/*.py # Lint all `.py` files in `/path/to/code`.
|
ruff check path/to/code/*.py # Lint all `.py` files in `/path/to/code`.
|
||||||
ruff check path/to/code/to/file.py # Lint `file.py`.
|
ruff check path/to/code/to/file.py # Lint `file.py`.
|
||||||
@@ -158,7 +138,7 @@ ruff check @arguments.txt # Lint using an input file, treating its con
|
|||||||
Or, to run Ruff as a formatter:
|
Or, to run Ruff as a formatter:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
ruff format # Format all files in the current directory (and any subdirectories).
|
ruff format . # Format all files in the current directory (and any subdirectories).
|
||||||
ruff format path/to/code/ # Format all files in `/path/to/code` (and any subdirectories).
|
ruff format path/to/code/ # Format all files in `/path/to/code` (and any subdirectories).
|
||||||
ruff format path/to/code/*.py # Format all `.py` files in `/path/to/code`.
|
ruff format path/to/code/*.py # Format all `.py` files in `/path/to/code`.
|
||||||
ruff format path/to/code/to/file.py # Format `file.py`.
|
ruff format path/to/code/to/file.py # Format `file.py`.
|
||||||
@@ -170,7 +150,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
|||||||
```yaml
|
```yaml
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
# Ruff version.
|
# Ruff version.
|
||||||
rev: v0.5.5
|
rev: v0.1.5
|
||||||
hooks:
|
hooks:
|
||||||
# Run the linter.
|
# Run the linter.
|
||||||
- id: ruff
|
- id: ruff
|
||||||
@@ -192,7 +172,7 @@ jobs:
|
|||||||
ruff:
|
ruff:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- uses: chartboost/ruff-action@v1
|
- uses: chartboost/ruff-action@v1
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -202,9 +182,10 @@ Ruff can be configured through a `pyproject.toml`, `ruff.toml`, or `.ruff.toml`
|
|||||||
[_Configuration_](https://docs.astral.sh/ruff/configuration/), or [_Settings_](https://docs.astral.sh/ruff/settings/)
|
[_Configuration_](https://docs.astral.sh/ruff/configuration/), or [_Settings_](https://docs.astral.sh/ruff/settings/)
|
||||||
for a complete list of all configuration options).
|
for a complete list of all configuration options).
|
||||||
|
|
||||||
If left unspecified, Ruff's default configuration is equivalent to the following `ruff.toml` file:
|
If left unspecified, Ruff's default configuration is equivalent to:
|
||||||
|
|
||||||
```toml
|
```toml
|
||||||
|
[tool.ruff]
|
||||||
# Exclude a variety of commonly ignored directories.
|
# Exclude a variety of commonly ignored directories.
|
||||||
exclude = [
|
exclude = [
|
||||||
".bzr",
|
".bzr",
|
||||||
@@ -213,25 +194,20 @@ exclude = [
|
|||||||
".git",
|
".git",
|
||||||
".git-rewrite",
|
".git-rewrite",
|
||||||
".hg",
|
".hg",
|
||||||
".ipynb_checkpoints",
|
|
||||||
".mypy_cache",
|
".mypy_cache",
|
||||||
".nox",
|
".nox",
|
||||||
".pants.d",
|
".pants.d",
|
||||||
".pyenv",
|
|
||||||
".pytest_cache",
|
|
||||||
".pytype",
|
".pytype",
|
||||||
".ruff_cache",
|
".ruff_cache",
|
||||||
".svn",
|
".svn",
|
||||||
".tox",
|
".tox",
|
||||||
".venv",
|
".venv",
|
||||||
".vscode",
|
|
||||||
"__pypackages__",
|
"__pypackages__",
|
||||||
"_build",
|
"_build",
|
||||||
"buck-out",
|
"buck-out",
|
||||||
"build",
|
"build",
|
||||||
"dist",
|
"dist",
|
||||||
"node_modules",
|
"node_modules",
|
||||||
"site-packages",
|
|
||||||
"venv",
|
"venv",
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -242,7 +218,7 @@ indent-width = 4
|
|||||||
# Assume Python 3.8
|
# Assume Python 3.8
|
||||||
target-version = "py38"
|
target-version = "py38"
|
||||||
|
|
||||||
[lint]
|
[tool.ruff.lint]
|
||||||
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
|
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
|
||||||
select = ["E4", "E7", "E9", "F"]
|
select = ["E4", "E7", "E9", "F"]
|
||||||
ignore = []
|
ignore = []
|
||||||
@@ -254,7 +230,7 @@ unfixable = []
|
|||||||
# Allow unused variables when underscore-prefixed.
|
# Allow unused variables when underscore-prefixed.
|
||||||
dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
|
dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
|
||||||
|
|
||||||
[format]
|
[tool.ruff.format]
|
||||||
# Like Black, use double quotes for strings.
|
# Like Black, use double quotes for strings.
|
||||||
quote-style = "double"
|
quote-style = "double"
|
||||||
|
|
||||||
@@ -268,27 +244,13 @@ skip-magic-trailing-comma = false
|
|||||||
line-ending = "auto"
|
line-ending = "auto"
|
||||||
```
|
```
|
||||||
|
|
||||||
Note that, in a `pyproject.toml`, each section header should be prefixed with `tool.ruff`. For
|
Some configuration options can be provided via the command-line, such as those related to
|
||||||
example, `[lint]` should be replaced with `[tool.ruff.lint]`.
|
rule enablement and disablement, file discovery, and logging level:
|
||||||
|
|
||||||
Some configuration options can be provided via dedicated command-line arguments, such as those
|
|
||||||
related to rule enablement and disablement, file discovery, and logging level:
|
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
ruff check --select F401 --select F403 --quiet
|
ruff check path/to/code/ --select F401 --select F403 --quiet
|
||||||
```
|
```
|
||||||
|
|
||||||
The remaining configuration options can be provided through a catch-all `--config` argument:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
ruff check --config "lint.per-file-ignores = {'some_file.py' = ['F841']}"
|
|
||||||
```
|
|
||||||
|
|
||||||
To opt in to the latest lint rules, formatter style changes, interface updates, and more, enable
|
|
||||||
[preview mode](https://docs.astral.sh/ruff/rules/) by setting `preview = true` in your configuration
|
|
||||||
file or passing `--preview` on the command line. Preview mode enables a collection of unstable
|
|
||||||
features that may change prior to stabilization.
|
|
||||||
|
|
||||||
See `ruff help` for more on Ruff's top-level commands, or `ruff help check` and `ruff help format`
|
See `ruff help` for more on Ruff's top-level commands, or `ruff help check` and `ruff help format`
|
||||||
for more on the linting and formatting commands, respectively.
|
for more on the linting and formatting commands, respectively.
|
||||||
|
|
||||||
@@ -296,7 +258,7 @@ for more on the linting and formatting commands, respectively.
|
|||||||
|
|
||||||
<!-- Begin section: Rules -->
|
<!-- Begin section: Rules -->
|
||||||
|
|
||||||
**Ruff supports over 800 lint rules**, many of which are inspired by popular tools like Flake8,
|
**Ruff supports over 700 lint rules**, many of which are inspired by popular tools like Flake8,
|
||||||
isort, pyupgrade, and others. Regardless of the rule's origin, Ruff re-implements every rule in
|
isort, pyupgrade, and others. Regardless of the rule's origin, Ruff re-implements every rule in
|
||||||
Rust as a first-party feature.
|
Rust as a first-party feature.
|
||||||
|
|
||||||
@@ -352,6 +314,7 @@ quality tools, including:
|
|||||||
- [flake8-super](https://pypi.org/project/flake8-super/)
|
- [flake8-super](https://pypi.org/project/flake8-super/)
|
||||||
- [flake8-tidy-imports](https://pypi.org/project/flake8-tidy-imports/)
|
- [flake8-tidy-imports](https://pypi.org/project/flake8-tidy-imports/)
|
||||||
- [flake8-todos](https://pypi.org/project/flake8-todos/)
|
- [flake8-todos](https://pypi.org/project/flake8-todos/)
|
||||||
|
- [flake8-trio](https://pypi.org/project/flake8-trio/)
|
||||||
- [flake8-type-checking](https://pypi.org/project/flake8-type-checking/)
|
- [flake8-type-checking](https://pypi.org/project/flake8-type-checking/)
|
||||||
- [flake8-use-pathlib](https://pypi.org/project/flake8-use-pathlib/)
|
- [flake8-use-pathlib](https://pypi.org/project/flake8-use-pathlib/)
|
||||||
- [flynt](https://pypi.org/project/flynt/) ([#2102](https://github.com/astral-sh/ruff/issues/2102))
|
- [flynt](https://pypi.org/project/flynt/) ([#2102](https://github.com/astral-sh/ruff/issues/2102))
|
||||||
@@ -373,14 +336,14 @@ For a complete enumeration of the supported rules, see [_Rules_](https://docs.as
|
|||||||
Contributions are welcome and highly appreciated. To get started, check out the
|
Contributions are welcome and highly appreciated. To get started, check out the
|
||||||
[**contributing guidelines**](https://docs.astral.sh/ruff/contributing/).
|
[**contributing guidelines**](https://docs.astral.sh/ruff/contributing/).
|
||||||
|
|
||||||
You can also join us on [**Discord**](https://discord.com/invite/astral-sh).
|
You can also join us on [**Discord**](https://discord.gg/c9MhzV8aU5).
|
||||||
|
|
||||||
## Support
|
## Support
|
||||||
|
|
||||||
Having trouble? Check out the existing issues on [**GitHub**](https://github.com/astral-sh/ruff/issues),
|
Having trouble? Check out the existing issues on [**GitHub**](https://github.com/astral-sh/ruff/issues),
|
||||||
or feel free to [**open a new one**](https://github.com/astral-sh/ruff/issues/new).
|
or feel free to [**open a new one**](https://github.com/astral-sh/ruff/issues/new).
|
||||||
|
|
||||||
You can also ask for help on [**Discord**](https://discord.com/invite/astral-sh).
|
You can also ask for help on [**Discord**](https://discord.gg/c9MhzV8aU5).
|
||||||
|
|
||||||
## Acknowledgements
|
## Acknowledgements
|
||||||
|
|
||||||
@@ -410,38 +373,31 @@ Ruff is released under the MIT license.
|
|||||||
|
|
||||||
Ruff is used by a number of major open-source projects and companies, including:
|
Ruff is used by a number of major open-source projects and companies, including:
|
||||||
|
|
||||||
- [Albumentations](https://github.com/albumentations-team/albumentations)
|
|
||||||
- Amazon ([AWS SAM](https://github.com/aws/serverless-application-model))
|
- Amazon ([AWS SAM](https://github.com/aws/serverless-application-model))
|
||||||
- Anthropic ([Python SDK](https://github.com/anthropics/anthropic-sdk-python))
|
- Anthropic ([Python SDK](https://github.com/anthropics/anthropic-sdk-python))
|
||||||
- [Apache Airflow](https://github.com/apache/airflow)
|
- [Apache Airflow](https://github.com/apache/airflow)
|
||||||
- AstraZeneca ([Magnus](https://github.com/AstraZeneca/magnus-core))
|
- AstraZeneca ([Magnus](https://github.com/AstraZeneca/magnus-core))
|
||||||
- [Babel](https://github.com/python-babel/babel)
|
|
||||||
- Benchling ([Refac](https://github.com/benchling/refac))
|
- Benchling ([Refac](https://github.com/benchling/refac))
|
||||||
|
- [Babel](https://github.com/python-babel/babel)
|
||||||
- [Bokeh](https://github.com/bokeh/bokeh)
|
- [Bokeh](https://github.com/bokeh/bokeh)
|
||||||
- [Cryptography (PyCA)](https://github.com/pyca/cryptography)
|
- [Cryptography (PyCA)](https://github.com/pyca/cryptography)
|
||||||
- CERN ([Indico](https://getindico.io/))
|
|
||||||
- [DVC](https://github.com/iterative/dvc)
|
- [DVC](https://github.com/iterative/dvc)
|
||||||
- [Dagger](https://github.com/dagger/dagger)
|
- [Dagger](https://github.com/dagger/dagger)
|
||||||
- [Dagster](https://github.com/dagster-io/dagster)
|
- [Dagster](https://github.com/dagster-io/dagster)
|
||||||
- Databricks ([MLflow](https://github.com/mlflow/mlflow))
|
- Databricks ([MLflow](https://github.com/mlflow/mlflow))
|
||||||
- [FastAPI](https://github.com/tiangolo/fastapi)
|
- [FastAPI](https://github.com/tiangolo/fastapi)
|
||||||
- [Godot](https://github.com/godotengine/godot)
|
|
||||||
- [Gradio](https://github.com/gradio-app/gradio)
|
- [Gradio](https://github.com/gradio-app/gradio)
|
||||||
- [Great Expectations](https://github.com/great-expectations/great_expectations)
|
- [Great Expectations](https://github.com/great-expectations/great_expectations)
|
||||||
- [HTTPX](https://github.com/encode/httpx)
|
- [HTTPX](https://github.com/encode/httpx)
|
||||||
- [Hatch](https://github.com/pypa/hatch)
|
|
||||||
- [Home Assistant](https://github.com/home-assistant/core)
|
|
||||||
- Hugging Face ([Transformers](https://github.com/huggingface/transformers),
|
- Hugging Face ([Transformers](https://github.com/huggingface/transformers),
|
||||||
[Datasets](https://github.com/huggingface/datasets),
|
[Datasets](https://github.com/huggingface/datasets),
|
||||||
[Diffusers](https://github.com/huggingface/diffusers))
|
[Diffusers](https://github.com/huggingface/diffusers))
|
||||||
- IBM ([Qiskit](https://github.com/Qiskit/qiskit))
|
- [Hatch](https://github.com/pypa/hatch)
|
||||||
|
- [Home Assistant](https://github.com/home-assistant/core)
|
||||||
- ING Bank ([popmon](https://github.com/ing-bank/popmon), [probatus](https://github.com/ing-bank/probatus))
|
- ING Bank ([popmon](https://github.com/ing-bank/popmon), [probatus](https://github.com/ing-bank/probatus))
|
||||||
- [Ibis](https://github.com/ibis-project/ibis)
|
- [Ibis](https://github.com/ibis-project/ibis)
|
||||||
- [ivy](https://github.com/unifyai/ivy)
|
|
||||||
- [Jupyter](https://github.com/jupyter-server/jupyter_server)
|
- [Jupyter](https://github.com/jupyter-server/jupyter_server)
|
||||||
- [Kraken Tech](https://kraken.tech/)
|
|
||||||
- [LangChain](https://github.com/hwchase17/langchain)
|
- [LangChain](https://github.com/hwchase17/langchain)
|
||||||
- [Litestar](https://litestar.dev/)
|
|
||||||
- [LlamaIndex](https://github.com/jerryjliu/llama_index)
|
- [LlamaIndex](https://github.com/jerryjliu/llama_index)
|
||||||
- Matrix ([Synapse](https://github.com/matrix-org/synapse))
|
- Matrix ([Synapse](https://github.com/matrix-org/synapse))
|
||||||
- [MegaLinter](https://github.com/oxsecurity/megalinter)
|
- [MegaLinter](https://github.com/oxsecurity/megalinter)
|
||||||
@@ -449,18 +405,14 @@ Ruff is used by a number of major open-source projects and companies, including:
|
|||||||
- Microsoft ([Semantic Kernel](https://github.com/microsoft/semantic-kernel),
|
- Microsoft ([Semantic Kernel](https://github.com/microsoft/semantic-kernel),
|
||||||
[ONNX Runtime](https://github.com/microsoft/onnxruntime),
|
[ONNX Runtime](https://github.com/microsoft/onnxruntime),
|
||||||
[LightGBM](https://github.com/microsoft/LightGBM))
|
[LightGBM](https://github.com/microsoft/LightGBM))
|
||||||
- Modern Treasury ([Python SDK](https://github.com/Modern-Treasury/modern-treasury-python))
|
- Modern Treasury ([Python SDK](https://github.com/Modern-Treasury/modern-treasury-python-sdk))
|
||||||
- Mozilla ([Firefox](https://github.com/mozilla/gecko-dev))
|
- Mozilla ([Firefox](https://github.com/mozilla/gecko-dev))
|
||||||
- [Mypy](https://github.com/python/mypy)
|
- [Mypy](https://github.com/python/mypy)
|
||||||
- [Nautobot](https://github.com/nautobot/nautobot)
|
|
||||||
- Netflix ([Dispatch](https://github.com/Netflix/dispatch))
|
- Netflix ([Dispatch](https://github.com/Netflix/dispatch))
|
||||||
- [Neon](https://github.com/neondatabase/neon)
|
- [Neon](https://github.com/neondatabase/neon)
|
||||||
- [Nokia](https://nokia.com/)
|
|
||||||
- [NoneBot](https://github.com/nonebot/nonebot2)
|
- [NoneBot](https://github.com/nonebot/nonebot2)
|
||||||
- [NumPyro](https://github.com/pyro-ppl/numpyro)
|
|
||||||
- [ONNX](https://github.com/onnx/onnx)
|
- [ONNX](https://github.com/onnx/onnx)
|
||||||
- [OpenBB](https://github.com/OpenBB-finance/OpenBBTerminal)
|
- [OpenBB](https://github.com/OpenBB-finance/OpenBBTerminal)
|
||||||
- [Open Wine Components](https://github.com/Open-Wine-Components/umu-launcher)
|
|
||||||
- [PDM](https://github.com/pdm-project/pdm)
|
- [PDM](https://github.com/pdm-project/pdm)
|
||||||
- [PaddlePaddle](https://github.com/PaddlePaddle/Paddle)
|
- [PaddlePaddle](https://github.com/PaddlePaddle/Paddle)
|
||||||
- [Pandas](https://github.com/pandas-dev/pandas)
|
- [Pandas](https://github.com/pandas-dev/pandas)
|
||||||
@@ -470,25 +422,20 @@ Ruff is used by a number of major open-source projects and companies, including:
|
|||||||
- [PostHog](https://github.com/PostHog/posthog)
|
- [PostHog](https://github.com/PostHog/posthog)
|
||||||
- Prefect ([Python SDK](https://github.com/PrefectHQ/prefect), [Marvin](https://github.com/PrefectHQ/marvin))
|
- Prefect ([Python SDK](https://github.com/PrefectHQ/prefect), [Marvin](https://github.com/PrefectHQ/marvin))
|
||||||
- [PyInstaller](https://github.com/pyinstaller/pyinstaller)
|
- [PyInstaller](https://github.com/pyinstaller/pyinstaller)
|
||||||
- [PyMC](https://github.com/pymc-devs/pymc/)
|
|
||||||
- [PyMC-Marketing](https://github.com/pymc-labs/pymc-marketing)
|
|
||||||
- [pytest](https://github.com/pytest-dev/pytest)
|
|
||||||
- [PyTorch](https://github.com/pytorch/pytorch)
|
- [PyTorch](https://github.com/pytorch/pytorch)
|
||||||
- [Pydantic](https://github.com/pydantic/pydantic)
|
- [Pydantic](https://github.com/pydantic/pydantic)
|
||||||
- [Pylint](https://github.com/PyCQA/pylint)
|
- [Pylint](https://github.com/PyCQA/pylint)
|
||||||
- [PyVista](https://github.com/pyvista/pyvista)
|
- [PyMC-Marketing](https://github.com/pymc-labs/pymc-marketing)
|
||||||
- [Reflex](https://github.com/reflex-dev/reflex)
|
- [Reflex](https://github.com/reflex-dev/reflex)
|
||||||
- [River](https://github.com/online-ml/river)
|
|
||||||
- [Rippling](https://rippling.com)
|
- [Rippling](https://rippling.com)
|
||||||
- [Robyn](https://github.com/sansyrox/robyn)
|
- [Robyn](https://github.com/sansyrox/robyn)
|
||||||
- [Saleor](https://github.com/saleor/saleor)
|
|
||||||
- Scale AI ([Launch SDK](https://github.com/scaleapi/launch-python-client))
|
- Scale AI ([Launch SDK](https://github.com/scaleapi/launch-python-client))
|
||||||
- [SciPy](https://github.com/scipy/scipy)
|
|
||||||
- Snowflake ([SnowCLI](https://github.com/Snowflake-Labs/snowcli))
|
- Snowflake ([SnowCLI](https://github.com/Snowflake-Labs/snowcli))
|
||||||
|
- [Saleor](https://github.com/saleor/saleor)
|
||||||
|
- [SciPy](https://github.com/scipy/scipy)
|
||||||
- [Sphinx](https://github.com/sphinx-doc/sphinx)
|
- [Sphinx](https://github.com/sphinx-doc/sphinx)
|
||||||
- [Stable Baselines3](https://github.com/DLR-RM/stable-baselines3)
|
- [Stable Baselines3](https://github.com/DLR-RM/stable-baselines3)
|
||||||
- [Starlette](https://github.com/encode/starlette)
|
- [Litestar](https://litestar.dev/)
|
||||||
- [Streamlit](https://github.com/streamlit/streamlit)
|
|
||||||
- [The Algorithms](https://github.com/TheAlgorithms/Python)
|
- [The Algorithms](https://github.com/TheAlgorithms/Python)
|
||||||
- [Vega-Altair](https://github.com/altair-viz/altair)
|
- [Vega-Altair](https://github.com/altair-viz/altair)
|
||||||
- WordPress ([Openverse](https://github.com/WordPress/openverse))
|
- WordPress ([Openverse](https://github.com/WordPress/openverse))
|
||||||
@@ -504,7 +451,7 @@ Ruff is used by a number of major open-source projects and companies, including:
|
|||||||
|
|
||||||
### Show Your Support
|
### Show Your Support
|
||||||
|
|
||||||
If you're using Ruff, consider adding the Ruff badge to your project's `README.md`:
|
If you're using Ruff, consider adding the Ruff badge to project's `README.md`:
|
||||||
|
|
||||||
```md
|
```md
|
||||||
[](https://github.com/astral-sh/ruff)
|
[](https://github.com/astral-sh/ruff)
|
||||||
@@ -526,10 +473,10 @@ If you're using Ruff, consider adding the Ruff badge to your project's `README.m
|
|||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
This repository is licensed under the [MIT License](https://github.com/astral-sh/ruff/blob/main/LICENSE)
|
MIT
|
||||||
|
|
||||||
<div align="center">
|
<div align="center">
|
||||||
<a target="_blank" href="https://astral.sh" style="background:none">
|
<a target="_blank" href="https://astral.sh" style="background:none">
|
||||||
<img src="https://raw.githubusercontent.com/astral-sh/ruff/main/assets/svg/Astral.svg" alt="Made by Astral">
|
<img src="https://raw.githubusercontent.com/astral-sh/ruff/main/assets/svg/Astral.svg">
|
||||||
</a>
|
</a>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
12
_typos.toml
12
_typos.toml
@@ -1,21 +1,11 @@
|
|||||||
[files]
|
[files]
|
||||||
# https://github.com/crate-ci/typos/issues/868
|
# https://github.com/crate-ci/typos/issues/868
|
||||||
extend-exclude = ["crates/red_knot_module_resolver/vendor/**/*", "**/resources/**/*", "**/snapshots/**/*"]
|
extend-exclude = ["**/resources/**/*", "**/snapshots/**/*"]
|
||||||
|
|
||||||
[default.extend-words]
|
[default.extend-words]
|
||||||
"arange" = "arange" # e.g. `numpy.arange`
|
|
||||||
hel = "hel"
|
hel = "hel"
|
||||||
whos = "whos"
|
whos = "whos"
|
||||||
spawnve = "spawnve"
|
spawnve = "spawnve"
|
||||||
ned = "ned"
|
ned = "ned"
|
||||||
pn = "pn" # `import panel as pd` is a thing
|
|
||||||
poit = "poit"
|
poit = "poit"
|
||||||
BA = "BA" # acronym for "Bad Allowed", used in testing.
|
BA = "BA" # acronym for "Bad Allowed", used in testing.
|
||||||
jod = "jod" # e.g., `jod-thread`
|
|
||||||
|
|
||||||
[default]
|
|
||||||
extend-ignore-re = [
|
|
||||||
# Line ignore with trailing "spellchecker:disable-line"
|
|
||||||
"(?Rm)^.*#\\s*spellchecker:disable-line$",
|
|
||||||
"LICENSEs",
|
|
||||||
]
|
|
||||||
|
|||||||
14
clippy.toml
14
clippy.toml
@@ -1,13 +1,7 @@
|
|||||||
doc-valid-idents = [
|
doc-valid-idents = [
|
||||||
"..",
|
|
||||||
"CodeQL",
|
|
||||||
"FastAPI",
|
|
||||||
"IPython",
|
|
||||||
"LangChain",
|
|
||||||
"LibCST",
|
|
||||||
"McCabe",
|
|
||||||
"NumPy",
|
|
||||||
"SCREAMING_SNAKE_CASE",
|
|
||||||
"SQLAlchemy",
|
|
||||||
"StackOverflow",
|
"StackOverflow",
|
||||||
|
"CodeQL",
|
||||||
|
"IPython",
|
||||||
|
"NumPy",
|
||||||
|
"..",
|
||||||
]
|
]
|
||||||
|
|||||||
36
crates/flake8_to_ruff/Cargo.toml
Normal file
36
crates/flake8_to_ruff/Cargo.toml
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
[package]
|
||||||
|
name = "flake8-to-ruff"
|
||||||
|
version = "0.1.5"
|
||||||
|
description = """
|
||||||
|
Convert Flake8 configuration files to Ruff configuration files.
|
||||||
|
"""
|
||||||
|
authors = { workspace = true }
|
||||||
|
edition = { workspace = true }
|
||||||
|
rust-version = { workspace = true }
|
||||||
|
homepage = { workspace = true }
|
||||||
|
documentation = { workspace = true }
|
||||||
|
repository = { workspace = true }
|
||||||
|
license = { workspace = true }
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
ruff_linter = { path = "../ruff_linter", default-features = false }
|
||||||
|
ruff_workspace = { path = "../ruff_workspace" }
|
||||||
|
|
||||||
|
anyhow = { workspace = true }
|
||||||
|
clap = { workspace = true }
|
||||||
|
colored = { workspace = true }
|
||||||
|
configparser = { version = "3.0.2" }
|
||||||
|
itertools = { workspace = true }
|
||||||
|
log = { workspace = true }
|
||||||
|
once_cell = { workspace = true }
|
||||||
|
pep440_rs = { version = "0.3.12", features = ["serde"] }
|
||||||
|
regex = { workspace = true }
|
||||||
|
rustc-hash = { workspace = true }
|
||||||
|
serde = { workspace = true }
|
||||||
|
serde_json = { workspace = true }
|
||||||
|
strum = { workspace = true }
|
||||||
|
strum_macros = { workspace = true }
|
||||||
|
toml = { workspace = true }
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
pretty_assertions = "1.3.0"
|
||||||
99
crates/flake8_to_ruff/README.md
Normal file
99
crates/flake8_to_ruff/README.md
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
# flake8-to-ruff
|
||||||
|
|
||||||
|
Convert existing Flake8 configuration files (`setup.cfg`, `tox.ini`, or `.flake8`) for use with
|
||||||
|
[Ruff](https://github.com/astral-sh/ruff).
|
||||||
|
|
||||||
|
Generates a Ruff-compatible `pyproject.toml` section.
|
||||||
|
|
||||||
|
## Installation and Usage
|
||||||
|
|
||||||
|
### Installation
|
||||||
|
|
||||||
|
Available as [`flake8-to-ruff`](https://pypi.org/project/flake8-to-ruff/) on PyPI:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
pip install flake8-to-ruff
|
||||||
|
```
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
To run `flake8-to-ruff`:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
flake8-to-ruff path/to/setup.cfg
|
||||||
|
flake8-to-ruff path/to/tox.ini
|
||||||
|
flake8-to-ruff path/to/.flake8
|
||||||
|
```
|
||||||
|
|
||||||
|
`flake8-to-ruff` will print the relevant `pyproject.toml` sections to standard output, like so:
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[tool.ruff]
|
||||||
|
exclude = [
|
||||||
|
'.svn',
|
||||||
|
'CVS',
|
||||||
|
'.bzr',
|
||||||
|
'.hg',
|
||||||
|
'.git',
|
||||||
|
'__pycache__',
|
||||||
|
'.tox',
|
||||||
|
'.idea',
|
||||||
|
'.mypy_cache',
|
||||||
|
'.venv',
|
||||||
|
'node_modules',
|
||||||
|
'_state_machine.py',
|
||||||
|
'test_fstring.py',
|
||||||
|
'bad_coding2.py',
|
||||||
|
'badsyntax_*.py',
|
||||||
|
]
|
||||||
|
select = [
|
||||||
|
'A',
|
||||||
|
'E',
|
||||||
|
'F',
|
||||||
|
'Q',
|
||||||
|
]
|
||||||
|
ignore = []
|
||||||
|
|
||||||
|
[tool.ruff.flake8-quotes]
|
||||||
|
inline-quotes = 'single'
|
||||||
|
|
||||||
|
[tool.ruff.pep8-naming]
|
||||||
|
ignore-names = [
|
||||||
|
'foo',
|
||||||
|
'bar',
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
### Plugins
|
||||||
|
|
||||||
|
`flake8-to-ruff` will attempt to infer any activated plugins based on the settings provided in your
|
||||||
|
configuration file.
|
||||||
|
|
||||||
|
For example, if your `.flake8` file includes a `docstring-convention` property, `flake8-to-ruff`
|
||||||
|
will enable the appropriate [`flake8-docstrings`](https://pypi.org/project/flake8-docstrings/)
|
||||||
|
checks.
|
||||||
|
|
||||||
|
Alternatively, you can manually specify plugins on the command-line:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
flake8-to-ruff path/to/.flake8 --plugin flake8-builtins --plugin flake8-quotes
|
||||||
|
```
|
||||||
|
|
||||||
|
## Limitations
|
||||||
|
|
||||||
|
1. Ruff only supports a subset of the Flake configuration options. `flake8-to-ruff` will warn on and
|
||||||
|
ignore unsupported options in the `.flake8` file (or equivalent). (Similarly, Ruff has a few
|
||||||
|
configuration options that don't exist in Flake8.)
|
||||||
|
1. Ruff will omit any rule codes that are unimplemented or unsupported by Ruff, including rule
|
||||||
|
codes from unsupported plugins. (See the
|
||||||
|
[documentation](https://docs.astral.sh/ruff/faq/#how-does-ruff-compare-to-flake8) for the complete
|
||||||
|
list of supported plugins.)
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
MIT
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
Contributions are welcome and hugely appreciated. To get started, check out the
|
||||||
|
[contributing guidelines](https://github.com/astral-sh/ruff/blob/main/CONTRIBUTING.md).
|
||||||
65
crates/flake8_to_ruff/examples/cryptography/pyproject.toml
Normal file
65
crates/flake8_to_ruff/examples/cryptography/pyproject.toml
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
[build-system]
|
||||||
|
requires = [
|
||||||
|
# The minimum setuptools version is specific to the PEP 517 backend,
|
||||||
|
# and may be stricter than the version required in `setup.cfg`
|
||||||
|
"setuptools>=40.6.0,!=60.9.0",
|
||||||
|
"wheel",
|
||||||
|
# Must be kept in sync with the `install_requirements` in `setup.cfg`
|
||||||
|
"cffi>=1.12; platform_python_implementation != 'PyPy'",
|
||||||
|
"setuptools-rust>=0.11.4",
|
||||||
|
]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
|
[tool.black]
|
||||||
|
line-length = 79
|
||||||
|
target-version = ["py36"]
|
||||||
|
|
||||||
|
[tool.pytest.ini_options]
|
||||||
|
addopts = "-r s --capture=no --strict-markers --benchmark-disable"
|
||||||
|
markers = [
|
||||||
|
"skip_fips: this test is not executed in FIPS mode",
|
||||||
|
"supported: parametrized test requiring only_if and skip_message",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.mypy]
|
||||||
|
show_error_codes = true
|
||||||
|
check_untyped_defs = true
|
||||||
|
no_implicit_reexport = true
|
||||||
|
warn_redundant_casts = true
|
||||||
|
warn_unused_ignores = true
|
||||||
|
warn_unused_configs = true
|
||||||
|
strict_equality = true
|
||||||
|
|
||||||
|
[[tool.mypy.overrides]]
|
||||||
|
module = [
|
||||||
|
"pretend"
|
||||||
|
]
|
||||||
|
ignore_missing_imports = true
|
||||||
|
|
||||||
|
[tool.coverage.run]
|
||||||
|
branch = true
|
||||||
|
relative_files = true
|
||||||
|
source = [
|
||||||
|
"cryptography",
|
||||||
|
"tests/",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.coverage.paths]
|
||||||
|
source = [
|
||||||
|
"src/cryptography",
|
||||||
|
"*.tox/*/lib*/python*/site-packages/cryptography",
|
||||||
|
"*.tox\\*\\Lib\\site-packages\\cryptography",
|
||||||
|
"*.tox/pypy/site-packages/cryptography",
|
||||||
|
]
|
||||||
|
tests =[
|
||||||
|
"tests/",
|
||||||
|
"*tests\\",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.coverage.report]
|
||||||
|
exclude_lines = [
|
||||||
|
"@abc.abstractmethod",
|
||||||
|
"@abc.abstractproperty",
|
||||||
|
"@typing.overload",
|
||||||
|
"if typing.TYPE_CHECKING",
|
||||||
|
]
|
||||||
91
crates/flake8_to_ruff/examples/cryptography/setup.cfg
Normal file
91
crates/flake8_to_ruff/examples/cryptography/setup.cfg
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
[metadata]
|
||||||
|
name = cryptography
|
||||||
|
version = attr: cryptography.__version__
|
||||||
|
description = cryptography is a package which provides cryptographic recipes and primitives to Python developers.
|
||||||
|
long_description = file: README.rst
|
||||||
|
long_description_content_type = text/x-rst
|
||||||
|
license = BSD-3-Clause OR Apache-2.0
|
||||||
|
url = https://github.com/pyca/cryptography
|
||||||
|
author = The Python Cryptographic Authority and individual contributors
|
||||||
|
author_email = cryptography-dev@python.org
|
||||||
|
project_urls =
|
||||||
|
Documentation=https://cryptography.io/
|
||||||
|
Source=https://github.com/pyca/cryptography/
|
||||||
|
Issues=https://github.com/pyca/cryptography/issues
|
||||||
|
Changelog=https://cryptography.io/en/latest/changelog/
|
||||||
|
classifiers =
|
||||||
|
Development Status :: 5 - Production/Stable
|
||||||
|
Intended Audience :: Developers
|
||||||
|
License :: OSI Approved :: Apache Software License
|
||||||
|
License :: OSI Approved :: BSD License
|
||||||
|
Natural Language :: English
|
||||||
|
Operating System :: MacOS :: MacOS X
|
||||||
|
Operating System :: POSIX
|
||||||
|
Operating System :: POSIX :: BSD
|
||||||
|
Operating System :: POSIX :: Linux
|
||||||
|
Operating System :: Microsoft :: Windows
|
||||||
|
Programming Language :: Python
|
||||||
|
Programming Language :: Python :: 3
|
||||||
|
Programming Language :: Python :: 3 :: Only
|
||||||
|
Programming Language :: Python :: 3.6
|
||||||
|
Programming Language :: Python :: 3.7
|
||||||
|
Programming Language :: Python :: 3.8
|
||||||
|
Programming Language :: Python :: 3.9
|
||||||
|
Programming Language :: Python :: 3.10
|
||||||
|
Programming Language :: Python :: 3.11
|
||||||
|
Programming Language :: Python :: Implementation :: CPython
|
||||||
|
Programming Language :: Python :: Implementation :: PyPy
|
||||||
|
Topic :: Security :: Cryptography
|
||||||
|
|
||||||
|
[options]
|
||||||
|
python_requires = >=3.6
|
||||||
|
include_package_data = True
|
||||||
|
zip_safe = False
|
||||||
|
package_dir =
|
||||||
|
=src
|
||||||
|
packages = find:
|
||||||
|
# `install_requires` must be kept in sync with `pyproject.toml`
|
||||||
|
install_requires =
|
||||||
|
cffi >=1.12
|
||||||
|
|
||||||
|
[options.packages.find]
|
||||||
|
where = src
|
||||||
|
exclude =
|
||||||
|
_cffi_src
|
||||||
|
_cffi_src.*
|
||||||
|
|
||||||
|
[options.extras_require]
|
||||||
|
test =
|
||||||
|
pytest>=6.2.0
|
||||||
|
pytest-benchmark
|
||||||
|
pytest-cov
|
||||||
|
pytest-subtests
|
||||||
|
pytest-xdist
|
||||||
|
pretend
|
||||||
|
iso8601
|
||||||
|
pytz
|
||||||
|
hypothesis>=1.11.4,!=3.79.2
|
||||||
|
docs =
|
||||||
|
sphinx >= 1.6.5,!=1.8.0,!=3.1.0,!=3.1.1,!=5.2.0,!=5.2.0.post0
|
||||||
|
sphinx_rtd_theme
|
||||||
|
docstest =
|
||||||
|
pyenchant >= 1.6.11
|
||||||
|
twine >= 1.12.0
|
||||||
|
sphinxcontrib-spelling >= 4.0.1
|
||||||
|
sdist =
|
||||||
|
setuptools_rust >= 0.11.4
|
||||||
|
pep8test =
|
||||||
|
black
|
||||||
|
flake8
|
||||||
|
flake8-import-order
|
||||||
|
pep8-naming
|
||||||
|
# This extra is for OpenSSH private keys that use bcrypt KDF
|
||||||
|
# Versions: v3.1.3 - ignore_few_rounds, v3.1.5 - abi3
|
||||||
|
ssh =
|
||||||
|
bcrypt >= 3.1.5
|
||||||
|
|
||||||
|
[flake8]
|
||||||
|
ignore = E203,E211,W503,W504,N818
|
||||||
|
exclude = .tox,*.egg,.git,_build,.hypothesis
|
||||||
|
select = E,W,F,N,I
|
||||||
|
application-import-names = cryptography,cryptography_vectors,tests
|
||||||
19
crates/flake8_to_ruff/examples/jupyterhub.ini
Normal file
19
crates/flake8_to_ruff/examples/jupyterhub.ini
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
[flake8]
|
||||||
|
# Ignore style and complexity
|
||||||
|
# E: style errors
|
||||||
|
# W: style warnings
|
||||||
|
# C: complexity
|
||||||
|
# D: docstring warnings (unused pydocstyle extension)
|
||||||
|
# F841: local variable assigned but never used
|
||||||
|
ignore = E, C, W, D, F841
|
||||||
|
builtins = c, get_config
|
||||||
|
exclude =
|
||||||
|
.cache,
|
||||||
|
.github,
|
||||||
|
docs,
|
||||||
|
jupyterhub/alembic*,
|
||||||
|
onbuild,
|
||||||
|
scripts,
|
||||||
|
share,
|
||||||
|
tools,
|
||||||
|
setup.py
|
||||||
43
crates/flake8_to_ruff/examples/manim.ini
Normal file
43
crates/flake8_to_ruff/examples/manim.ini
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
[flake8]
|
||||||
|
# Exclude the grpc generated code
|
||||||
|
exclude = ./manim/grpc/gen/*
|
||||||
|
max-complexity = 15
|
||||||
|
max-line-length = 88
|
||||||
|
statistics = True
|
||||||
|
# Prevents some flake8-rst-docstrings errors
|
||||||
|
rst-roles = attr,class,func,meth,mod,obj,ref,doc,exc
|
||||||
|
rst-directives = manim, SEEALSO, seealso
|
||||||
|
docstring-convention=numpy
|
||||||
|
|
||||||
|
select = A,A00,B,B9,C4,C90,D,E,F,F,PT,RST,SIM,W
|
||||||
|
|
||||||
|
# General Compatibility
|
||||||
|
extend-ignore = E203, W503, D202, D212, D213, D404
|
||||||
|
|
||||||
|
# Misc
|
||||||
|
F401, F403, F405, F841, E501, E731, E402, F811, F821,
|
||||||
|
|
||||||
|
# Plug-in: flake8-builtins
|
||||||
|
A001, A002, A003,
|
||||||
|
|
||||||
|
# Plug-in: flake8-bugbear
|
||||||
|
B006, B007, B008, B009, B010, B903, B950,
|
||||||
|
|
||||||
|
# Plug-in: flake8-simplify
|
||||||
|
SIM105, SIM106, SIM119,
|
||||||
|
|
||||||
|
# Plug-in: flake8-comprehensions
|
||||||
|
C901
|
||||||
|
|
||||||
|
# Plug-in: flake8-pytest-style
|
||||||
|
PT001, PT004, PT006, PT011, PT018, PT022, PT023,
|
||||||
|
|
||||||
|
# Plug-in: flake8-docstrings
|
||||||
|
D100, D101, D102, D103, D104, D105, D106, D107,
|
||||||
|
D200, D202, D204, D205, D209,
|
||||||
|
D301,
|
||||||
|
D400, D401, D402, D403, D405, D406, D407, D409, D411, D412, D414,
|
||||||
|
|
||||||
|
# Plug-in: flake8-rst-docstrings
|
||||||
|
RST201, RST203, RST210, RST212, RST213, RST215,
|
||||||
|
RST301, RST303,
|
||||||
36
crates/flake8_to_ruff/examples/poetry.ini
Normal file
36
crates/flake8_to_ruff/examples/poetry.ini
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
[flake8]
|
||||||
|
min_python_version = 3.7.0
|
||||||
|
max-line-length = 88
|
||||||
|
ban-relative-imports = true
|
||||||
|
# flake8-use-fstring: https://github.com/MichaelKim0407/flake8-use-fstring#--percent-greedy-and---format-greedy
|
||||||
|
format-greedy = 1
|
||||||
|
inline-quotes = double
|
||||||
|
enable-extensions = TC, TC1
|
||||||
|
type-checking-strict = true
|
||||||
|
eradicate-whitelist-extend = ^-.*;
|
||||||
|
extend-ignore =
|
||||||
|
# E203: Whitespace before ':' (pycqa/pycodestyle#373)
|
||||||
|
E203,
|
||||||
|
# SIM106: Handle error-cases first
|
||||||
|
SIM106,
|
||||||
|
# ANN101: Missing type annotation for self in method
|
||||||
|
ANN101,
|
||||||
|
# ANN102: Missing type annotation for cls in classmethod
|
||||||
|
ANN102,
|
||||||
|
# PIE781: assign-and-return
|
||||||
|
PIE781,
|
||||||
|
# PIE798 no-unnecessary-class: Consider using a module for namespacing instead
|
||||||
|
PIE798,
|
||||||
|
per-file-ignores =
|
||||||
|
# TC002: Move third-party import '...' into a type-checking block
|
||||||
|
__init__.py:TC002,
|
||||||
|
# ANN201: Missing return type annotation for public function
|
||||||
|
tests/test_*:ANN201
|
||||||
|
tests/**/test_*:ANN201
|
||||||
|
extend-exclude =
|
||||||
|
# Frozen and not subject to change in this repo:
|
||||||
|
get-poetry.py,
|
||||||
|
install-poetry.py,
|
||||||
|
# External to the project's coding standards:
|
||||||
|
tests/fixtures/*,
|
||||||
|
tests/**/fixtures/*,
|
||||||
19
crates/flake8_to_ruff/examples/python-discord.ini
Normal file
19
crates/flake8_to_ruff/examples/python-discord.ini
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
[flake8]
|
||||||
|
max-line-length=120
|
||||||
|
docstring-convention=all
|
||||||
|
import-order-style=pycharm
|
||||||
|
application_import_names=bot,tests
|
||||||
|
exclude=.cache,.venv,.git,constants.py
|
||||||
|
extend-ignore=
|
||||||
|
B311,W503,E226,S311,T000,E731
|
||||||
|
# Missing Docstrings
|
||||||
|
D100,D104,D105,D107,
|
||||||
|
# Docstring Whitespace
|
||||||
|
D203,D212,D214,D215,
|
||||||
|
# Docstring Quotes
|
||||||
|
D301,D302,
|
||||||
|
# Docstring Content
|
||||||
|
D400,D401,D402,D404,D405,D406,D407,D408,D409,D410,D411,D412,D413,D414,D416,D417
|
||||||
|
# Type Annotations
|
||||||
|
ANN002,ANN003,ANN101,ANN102,ANN204,ANN206,ANN401
|
||||||
|
per-file-ignores=tests/*:D,ANN
|
||||||
6
crates/flake8_to_ruff/examples/requests.ini
Normal file
6
crates/flake8_to_ruff/examples/requests.ini
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
[flake8]
|
||||||
|
ignore = E203, E501, W503
|
||||||
|
per-file-ignores =
|
||||||
|
requests/__init__.py:E402, F401
|
||||||
|
requests/compat.py:E402, F401
|
||||||
|
tests/compat.py:F401
|
||||||
34
crates/flake8_to_ruff/pyproject.toml
Normal file
34
crates/flake8_to_ruff/pyproject.toml
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
[project]
|
||||||
|
name = "flake8-to-ruff"
|
||||||
|
keywords = ["automation", "flake8", "pycodestyle", "pyflakes", "pylint", "clippy"]
|
||||||
|
classifiers = [
|
||||||
|
"Development Status :: 3 - Alpha",
|
||||||
|
"Environment :: Console",
|
||||||
|
"Intended Audience :: Developers",
|
||||||
|
"License :: OSI Approved :: MIT License",
|
||||||
|
"Operating System :: OS Independent",
|
||||||
|
"Programming Language :: Python",
|
||||||
|
"Programming Language :: Python :: 3.7",
|
||||||
|
"Programming Language :: Python :: 3.8",
|
||||||
|
"Programming Language :: Python :: 3.9",
|
||||||
|
"Programming Language :: Python :: 3.10",
|
||||||
|
"Programming Language :: Python :: 3.11",
|
||||||
|
"Programming Language :: Python :: 3 :: Only",
|
||||||
|
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||||
|
"Topic :: Software Development :: Quality Assurance",
|
||||||
|
]
|
||||||
|
author = "Charlie Marsh"
|
||||||
|
author_email = "charlie.r.marsh@gmail.com"
|
||||||
|
description = "Convert existing Flake8 configuration to Ruff."
|
||||||
|
requires-python = ">=3.7"
|
||||||
|
|
||||||
|
[project.urls]
|
||||||
|
repository = "https://github.com/astral-sh/ruff#subdirectory=crates/flake8_to_ruff"
|
||||||
|
|
||||||
|
[build-system]
|
||||||
|
requires = ["maturin>=1.0,<2.0"]
|
||||||
|
build-backend = "maturin"
|
||||||
|
|
||||||
|
[tool.maturin]
|
||||||
|
bindings = "bin"
|
||||||
|
strip = true
|
||||||
13
crates/flake8_to_ruff/src/black.rs
Normal file
13
crates/flake8_to_ruff/src/black.rs
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
//! Extract Black configuration settings from a pyproject.toml.
|
||||||
|
|
||||||
|
use ruff_linter::line_width::LineLength;
|
||||||
|
use ruff_linter::settings::types::PythonVersion;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
|
||||||
|
pub(crate) struct Black {
|
||||||
|
#[serde(alias = "line-length", alias = "line_length")]
|
||||||
|
pub(crate) line_length: Option<LineLength>,
|
||||||
|
#[serde(alias = "target-version", alias = "target_version")]
|
||||||
|
pub(crate) target_version: Option<Vec<PythonVersion>>,
|
||||||
|
}
|
||||||
687
crates/flake8_to_ruff/src/converter.rs
Normal file
687
crates/flake8_to_ruff/src/converter.rs
Normal file
@@ -0,0 +1,687 @@
|
|||||||
|
use std::collections::{HashMap, HashSet};
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use itertools::Itertools;
|
||||||
|
|
||||||
|
use ruff_linter::line_width::LineLength;
|
||||||
|
use ruff_linter::registry::Linter;
|
||||||
|
use ruff_linter::rule_selector::RuleSelector;
|
||||||
|
use ruff_linter::rules::flake8_pytest_style::types::{
|
||||||
|
ParametrizeNameType, ParametrizeValuesRowType, ParametrizeValuesType,
|
||||||
|
};
|
||||||
|
use ruff_linter::rules::flake8_quotes::settings::Quote;
|
||||||
|
use ruff_linter::rules::flake8_tidy_imports::settings::Strictness;
|
||||||
|
use ruff_linter::rules::pydocstyle::settings::Convention;
|
||||||
|
use ruff_linter::settings::types::PythonVersion;
|
||||||
|
use ruff_linter::settings::DEFAULT_SELECTORS;
|
||||||
|
use ruff_linter::warn_user;
|
||||||
|
use ruff_workspace::options::{
|
||||||
|
Flake8AnnotationsOptions, Flake8BugbearOptions, Flake8BuiltinsOptions, Flake8ErrMsgOptions,
|
||||||
|
Flake8PytestStyleOptions, Flake8QuotesOptions, Flake8TidyImportsOptions, LintCommonOptions,
|
||||||
|
LintOptions, McCabeOptions, Options, Pep8NamingOptions, PydocstyleOptions,
|
||||||
|
};
|
||||||
|
use ruff_workspace::pyproject::Pyproject;
|
||||||
|
|
||||||
|
use super::external_config::ExternalConfig;
|
||||||
|
use super::plugin::Plugin;
|
||||||
|
use super::{parser, plugin};
|
||||||
|
|
||||||
|
pub(crate) fn convert(
|
||||||
|
config: &HashMap<String, HashMap<String, Option<String>>>,
|
||||||
|
external_config: &ExternalConfig,
|
||||||
|
plugins: Option<Vec<Plugin>>,
|
||||||
|
) -> Pyproject {
|
||||||
|
// Extract the Flake8 section.
|
||||||
|
let flake8 = config
|
||||||
|
.get("flake8")
|
||||||
|
.expect("Unable to find flake8 section in INI file");
|
||||||
|
|
||||||
|
// Extract all referenced rule code prefixes, to power plugin inference.
|
||||||
|
let mut referenced_codes: HashSet<RuleSelector> = HashSet::default();
|
||||||
|
for (key, value) in flake8 {
|
||||||
|
if let Some(value) = value {
|
||||||
|
match key.as_str() {
|
||||||
|
"select" | "ignore" | "extend-select" | "extend_select" | "extend-ignore"
|
||||||
|
| "extend_ignore" => {
|
||||||
|
referenced_codes.extend(parser::parse_prefix_codes(value.as_ref()));
|
||||||
|
}
|
||||||
|
"per-file-ignores" | "per_file_ignores" => {
|
||||||
|
if let Ok(per_file_ignores) =
|
||||||
|
parser::parse_files_to_codes_mapping(value.as_ref())
|
||||||
|
{
|
||||||
|
for (_, codes) in parser::collect_per_file_ignores(per_file_ignores) {
|
||||||
|
referenced_codes.extend(codes);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Infer plugins, if not provided.
|
||||||
|
let plugins = plugins.unwrap_or_else(|| {
|
||||||
|
let from_options = plugin::infer_plugins_from_options(flake8);
|
||||||
|
if !from_options.is_empty() {
|
||||||
|
#[allow(clippy::print_stderr)]
|
||||||
|
{
|
||||||
|
eprintln!("Inferred plugins from settings: {from_options:#?}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let from_codes = plugin::infer_plugins_from_codes(&referenced_codes);
|
||||||
|
if !from_codes.is_empty() {
|
||||||
|
#[allow(clippy::print_stderr)]
|
||||||
|
{
|
||||||
|
eprintln!("Inferred plugins from referenced codes: {from_codes:#?}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
from_options.into_iter().chain(from_codes).collect()
|
||||||
|
});
|
||||||
|
|
||||||
|
// Check if the user has specified a `select`. If not, we'll add our own
|
||||||
|
// default `select`, and populate it based on user plugins.
|
||||||
|
let mut select = flake8
|
||||||
|
.get("select")
|
||||||
|
.and_then(|value| {
|
||||||
|
value
|
||||||
|
.as_ref()
|
||||||
|
.map(|value| HashSet::from_iter(parser::parse_prefix_codes(value)))
|
||||||
|
})
|
||||||
|
.unwrap_or_else(|| resolve_select(&plugins));
|
||||||
|
let mut ignore: HashSet<RuleSelector> = flake8
|
||||||
|
.get("ignore")
|
||||||
|
.and_then(|value| {
|
||||||
|
value
|
||||||
|
.as_ref()
|
||||||
|
.map(|value| HashSet::from_iter(parser::parse_prefix_codes(value)))
|
||||||
|
})
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
// Parse each supported option.
|
||||||
|
let mut options = Options::default();
|
||||||
|
let mut lint_options = LintCommonOptions::default();
|
||||||
|
let mut flake8_annotations = Flake8AnnotationsOptions::default();
|
||||||
|
let mut flake8_bugbear = Flake8BugbearOptions::default();
|
||||||
|
let mut flake8_builtins = Flake8BuiltinsOptions::default();
|
||||||
|
let mut flake8_errmsg = Flake8ErrMsgOptions::default();
|
||||||
|
let mut flake8_pytest_style = Flake8PytestStyleOptions::default();
|
||||||
|
let mut flake8_quotes = Flake8QuotesOptions::default();
|
||||||
|
let mut flake8_tidy_imports = Flake8TidyImportsOptions::default();
|
||||||
|
let mut mccabe = McCabeOptions::default();
|
||||||
|
let mut pep8_naming = Pep8NamingOptions::default();
|
||||||
|
let mut pydocstyle = PydocstyleOptions::default();
|
||||||
|
for (key, value) in flake8 {
|
||||||
|
if let Some(value) = value {
|
||||||
|
match key.as_str() {
|
||||||
|
// flake8
|
||||||
|
"builtins" => {
|
||||||
|
options.builtins = Some(parser::parse_strings(value.as_ref()));
|
||||||
|
}
|
||||||
|
"max-line-length" | "max_line_length" => match LineLength::from_str(value) {
|
||||||
|
Ok(line_length) => options.line_length = Some(line_length),
|
||||||
|
Err(e) => {
|
||||||
|
warn_user!("Unable to parse '{key}' property: {e}");
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"select" => {
|
||||||
|
// No-op (handled above).
|
||||||
|
select.extend(parser::parse_prefix_codes(value.as_ref()));
|
||||||
|
}
|
||||||
|
"ignore" => {
|
||||||
|
// No-op (handled above).
|
||||||
|
}
|
||||||
|
"extend-select" | "extend_select" => {
|
||||||
|
// Unlike Flake8, use a single explicit `select`.
|
||||||
|
select.extend(parser::parse_prefix_codes(value.as_ref()));
|
||||||
|
}
|
||||||
|
"extend-ignore" | "extend_ignore" => {
|
||||||
|
// Unlike Flake8, use a single explicit `ignore`.
|
||||||
|
ignore.extend(parser::parse_prefix_codes(value.as_ref()));
|
||||||
|
}
|
||||||
|
"exclude" => {
|
||||||
|
options.exclude = Some(parser::parse_strings(value.as_ref()));
|
||||||
|
}
|
||||||
|
"extend-exclude" | "extend_exclude" => {
|
||||||
|
options.extend_exclude = Some(parser::parse_strings(value.as_ref()));
|
||||||
|
}
|
||||||
|
"per-file-ignores" | "per_file_ignores" => {
|
||||||
|
match parser::parse_files_to_codes_mapping(value.as_ref()) {
|
||||||
|
Ok(per_file_ignores) => {
|
||||||
|
lint_options.per_file_ignores =
|
||||||
|
Some(parser::collect_per_file_ignores(per_file_ignores));
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
warn_user!("Unable to parse '{key}' property: {e}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// flake8-bugbear
|
||||||
|
"extend-immutable-calls" | "extend_immutable_calls" => {
|
||||||
|
flake8_bugbear.extend_immutable_calls =
|
||||||
|
Some(parser::parse_strings(value.as_ref()));
|
||||||
|
}
|
||||||
|
// flake8-builtins
|
||||||
|
"builtins-ignorelist" | "builtins_ignorelist" => {
|
||||||
|
flake8_builtins.builtins_ignorelist =
|
||||||
|
Some(parser::parse_strings(value.as_ref()));
|
||||||
|
}
|
||||||
|
// flake8-annotations
|
||||||
|
"suppress-none-returning" | "suppress_none_returning" => {
|
||||||
|
match parser::parse_bool(value.as_ref()) {
|
||||||
|
Ok(bool) => flake8_annotations.suppress_none_returning = Some(bool),
|
||||||
|
Err(e) => {
|
||||||
|
warn_user!("Unable to parse '{key}' property: {e}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"suppress-dummy-args" | "suppress_dummy_args" => {
|
||||||
|
match parser::parse_bool(value.as_ref()) {
|
||||||
|
Ok(bool) => flake8_annotations.suppress_dummy_args = Some(bool),
|
||||||
|
Err(e) => {
|
||||||
|
warn_user!("Unable to parse '{key}' property: {e}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"mypy-init-return" | "mypy_init_return" => {
|
||||||
|
match parser::parse_bool(value.as_ref()) {
|
||||||
|
Ok(bool) => flake8_annotations.mypy_init_return = Some(bool),
|
||||||
|
Err(e) => {
|
||||||
|
warn_user!("Unable to parse '{key}' property: {e}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"allow-star-arg-any" | "allow_star_arg_any" => {
|
||||||
|
match parser::parse_bool(value.as_ref()) {
|
||||||
|
Ok(bool) => flake8_annotations.allow_star_arg_any = Some(bool),
|
||||||
|
Err(e) => {
|
||||||
|
warn_user!("Unable to parse '{key}' property: {e}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// flake8-quotes
|
||||||
|
"quotes" | "inline-quotes" | "inline_quotes" => match value.trim() {
|
||||||
|
"'" | "single" => flake8_quotes.inline_quotes = Some(Quote::Single),
|
||||||
|
"\"" | "double" => flake8_quotes.inline_quotes = Some(Quote::Double),
|
||||||
|
_ => {
|
||||||
|
warn_user!("Unexpected '{key}' value: {value}");
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"multiline-quotes" | "multiline_quotes" => match value.trim() {
|
||||||
|
"'" | "single" => flake8_quotes.multiline_quotes = Some(Quote::Single),
|
||||||
|
"\"" | "double" => flake8_quotes.multiline_quotes = Some(Quote::Double),
|
||||||
|
_ => {
|
||||||
|
warn_user!("Unexpected '{key}' value: {value}");
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"docstring-quotes" | "docstring_quotes" => match value.trim() {
|
||||||
|
"'" | "single" => flake8_quotes.docstring_quotes = Some(Quote::Single),
|
||||||
|
"\"" | "double" => flake8_quotes.docstring_quotes = Some(Quote::Double),
|
||||||
|
_ => {
|
||||||
|
warn_user!("Unexpected '{key}' value: {value}");
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"avoid-escape" | "avoid_escape" => match parser::parse_bool(value.as_ref()) {
|
||||||
|
Ok(bool) => flake8_quotes.avoid_escape = Some(bool),
|
||||||
|
Err(e) => {
|
||||||
|
warn_user!("Unable to parse '{key}' property: {e}");
|
||||||
|
}
|
||||||
|
},
|
||||||
|
// pep8-naming
|
||||||
|
"ignore-names" | "ignore_names" => {
|
||||||
|
pep8_naming.ignore_names = Some(parser::parse_strings(value.as_ref()));
|
||||||
|
}
|
||||||
|
"classmethod-decorators" | "classmethod_decorators" => {
|
||||||
|
pep8_naming.classmethod_decorators =
|
||||||
|
Some(parser::parse_strings(value.as_ref()));
|
||||||
|
}
|
||||||
|
"staticmethod-decorators" | "staticmethod_decorators" => {
|
||||||
|
pep8_naming.staticmethod_decorators =
|
||||||
|
Some(parser::parse_strings(value.as_ref()));
|
||||||
|
}
|
||||||
|
// flake8-tidy-imports
|
||||||
|
"ban-relative-imports" | "ban_relative_imports" => match value.trim() {
|
||||||
|
"true" => flake8_tidy_imports.ban_relative_imports = Some(Strictness::All),
|
||||||
|
"parents" => {
|
||||||
|
flake8_tidy_imports.ban_relative_imports = Some(Strictness::Parents);
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
warn_user!("Unexpected '{key}' value: {value}");
|
||||||
|
}
|
||||||
|
},
|
||||||
|
// flake8-docstrings
|
||||||
|
"docstring-convention" => match value.trim() {
|
||||||
|
"google" => pydocstyle.convention = Some(Convention::Google),
|
||||||
|
"numpy" => pydocstyle.convention = Some(Convention::Numpy),
|
||||||
|
"pep257" => pydocstyle.convention = Some(Convention::Pep257),
|
||||||
|
"all" => pydocstyle.convention = None,
|
||||||
|
_ => {
|
||||||
|
warn_user!("Unexpected '{key}' value: {value}");
|
||||||
|
}
|
||||||
|
},
|
||||||
|
// mccabe
|
||||||
|
"max-complexity" | "max_complexity" => match value.parse::<usize>() {
|
||||||
|
Ok(max_complexity) => mccabe.max_complexity = Some(max_complexity),
|
||||||
|
Err(e) => {
|
||||||
|
warn_user!("Unable to parse '{key}' property: {e}");
|
||||||
|
}
|
||||||
|
},
|
||||||
|
// flake8-errmsg
|
||||||
|
"errmsg-max-string-length" | "errmsg_max_string_length" => {
|
||||||
|
match value.parse::<usize>() {
|
||||||
|
Ok(max_string_length) => {
|
||||||
|
flake8_errmsg.max_string_length = Some(max_string_length);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
warn_user!("Unable to parse '{key}' property: {e}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// flake8-pytest-style
|
||||||
|
"pytest-fixture-no-parentheses" | "pytest_fixture_no_parentheses " => {
|
||||||
|
match parser::parse_bool(value.as_ref()) {
|
||||||
|
Ok(bool) => flake8_pytest_style.fixture_parentheses = Some(!bool),
|
||||||
|
Err(e) => {
|
||||||
|
warn_user!("Unable to parse '{key}' property: {e}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"pytest-parametrize-names-type" | "pytest_parametrize_names_type" => {
|
||||||
|
match value.trim() {
|
||||||
|
"csv" => {
|
||||||
|
flake8_pytest_style.parametrize_names_type =
|
||||||
|
Some(ParametrizeNameType::Csv);
|
||||||
|
}
|
||||||
|
"tuple" => {
|
||||||
|
flake8_pytest_style.parametrize_names_type =
|
||||||
|
Some(ParametrizeNameType::Tuple);
|
||||||
|
}
|
||||||
|
"list" => {
|
||||||
|
flake8_pytest_style.parametrize_names_type =
|
||||||
|
Some(ParametrizeNameType::List);
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
warn_user!("Unexpected '{key}' value: {value}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"pytest-parametrize-values-type" | "pytest_parametrize_values_type" => {
|
||||||
|
match value.trim() {
|
||||||
|
"tuple" => {
|
||||||
|
flake8_pytest_style.parametrize_values_type =
|
||||||
|
Some(ParametrizeValuesType::Tuple);
|
||||||
|
}
|
||||||
|
"list" => {
|
||||||
|
flake8_pytest_style.parametrize_values_type =
|
||||||
|
Some(ParametrizeValuesType::List);
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
warn_user!("Unexpected '{key}' value: {value}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"pytest-parametrize-values-row-type" | "pytest_parametrize_values_row_type" => {
|
||||||
|
match value.trim() {
|
||||||
|
"tuple" => {
|
||||||
|
flake8_pytest_style.parametrize_values_row_type =
|
||||||
|
Some(ParametrizeValuesRowType::Tuple);
|
||||||
|
}
|
||||||
|
"list" => {
|
||||||
|
flake8_pytest_style.parametrize_values_row_type =
|
||||||
|
Some(ParametrizeValuesRowType::List);
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
warn_user!("Unexpected '{key}' value: {value}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"pytest-raises-require-match-for" | "pytest_raises_require_match_for" => {
|
||||||
|
flake8_pytest_style.raises_require_match_for =
|
||||||
|
Some(parser::parse_strings(value.as_ref()));
|
||||||
|
}
|
||||||
|
"pytest-mark-no-parentheses" | "pytest_mark_no_parentheses" => {
|
||||||
|
match parser::parse_bool(value.as_ref()) {
|
||||||
|
Ok(bool) => flake8_pytest_style.mark_parentheses = Some(!bool),
|
||||||
|
Err(e) => {
|
||||||
|
warn_user!("Unable to parse '{key}' property: {e}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Unknown
|
||||||
|
_ => {
|
||||||
|
warn_user!("Skipping unsupported property: {}", key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deduplicate and sort.
|
||||||
|
lint_options.select = Some(
|
||||||
|
select
|
||||||
|
.into_iter()
|
||||||
|
.sorted_by_key(RuleSelector::prefix_and_code)
|
||||||
|
.collect(),
|
||||||
|
);
|
||||||
|
lint_options.ignore = Some(
|
||||||
|
ignore
|
||||||
|
.into_iter()
|
||||||
|
.sorted_by_key(RuleSelector::prefix_and_code)
|
||||||
|
.collect(),
|
||||||
|
);
|
||||||
|
if flake8_annotations != Flake8AnnotationsOptions::default() {
|
||||||
|
lint_options.flake8_annotations = Some(flake8_annotations);
|
||||||
|
}
|
||||||
|
if flake8_bugbear != Flake8BugbearOptions::default() {
|
||||||
|
lint_options.flake8_bugbear = Some(flake8_bugbear);
|
||||||
|
}
|
||||||
|
if flake8_builtins != Flake8BuiltinsOptions::default() {
|
||||||
|
lint_options.flake8_builtins = Some(flake8_builtins);
|
||||||
|
}
|
||||||
|
if flake8_errmsg != Flake8ErrMsgOptions::default() {
|
||||||
|
lint_options.flake8_errmsg = Some(flake8_errmsg);
|
||||||
|
}
|
||||||
|
if flake8_pytest_style != Flake8PytestStyleOptions::default() {
|
||||||
|
lint_options.flake8_pytest_style = Some(flake8_pytest_style);
|
||||||
|
}
|
||||||
|
if flake8_quotes != Flake8QuotesOptions::default() {
|
||||||
|
lint_options.flake8_quotes = Some(flake8_quotes);
|
||||||
|
}
|
||||||
|
if flake8_tidy_imports != Flake8TidyImportsOptions::default() {
|
||||||
|
lint_options.flake8_tidy_imports = Some(flake8_tidy_imports);
|
||||||
|
}
|
||||||
|
if mccabe != McCabeOptions::default() {
|
||||||
|
lint_options.mccabe = Some(mccabe);
|
||||||
|
}
|
||||||
|
if pep8_naming != Pep8NamingOptions::default() {
|
||||||
|
lint_options.pep8_naming = Some(pep8_naming);
|
||||||
|
}
|
||||||
|
if pydocstyle != PydocstyleOptions::default() {
|
||||||
|
lint_options.pydocstyle = Some(pydocstyle);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract any settings from the existing `pyproject.toml`.
|
||||||
|
if let Some(black) = &external_config.black {
|
||||||
|
if let Some(line_length) = &black.line_length {
|
||||||
|
options.line_length = Some(*line_length);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(target_version) = &black.target_version {
|
||||||
|
if let Some(target_version) = target_version.iter().min() {
|
||||||
|
options.target_version = Some(*target_version);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(isort) = &external_config.isort {
|
||||||
|
if let Some(src_paths) = &isort.src_paths {
|
||||||
|
match options.src.as_mut() {
|
||||||
|
Some(src) => {
|
||||||
|
src.extend_from_slice(src_paths);
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
options.src = Some(src_paths.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(project) = &external_config.project {
|
||||||
|
if let Some(requires_python) = &project.requires_python {
|
||||||
|
if options.target_version.is_none() {
|
||||||
|
options.target_version =
|
||||||
|
PythonVersion::get_minimum_supported_version(requires_python);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if lint_options != LintCommonOptions::default() {
|
||||||
|
options.lint = Some(LintOptions {
|
||||||
|
common: lint_options,
|
||||||
|
..LintOptions::default()
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create the pyproject.toml.
|
||||||
|
Pyproject::new(options)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Resolve the set of enabled `RuleSelector` values for the given
|
||||||
|
/// plugins.
|
||||||
|
fn resolve_select(plugins: &[Plugin]) -> HashSet<RuleSelector> {
|
||||||
|
let mut select: HashSet<_> = DEFAULT_SELECTORS.iter().cloned().collect();
|
||||||
|
select.extend(plugins.iter().map(|p| Linter::from(p).into()));
|
||||||
|
select
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
|
use itertools::Itertools;
|
||||||
|
use pep440_rs::VersionSpecifiers;
|
||||||
|
|
||||||
|
use pretty_assertions::assert_eq;
|
||||||
|
use ruff_linter::line_width::LineLength;
|
||||||
|
use ruff_linter::registry::Linter;
|
||||||
|
use ruff_linter::rule_selector::RuleSelector;
|
||||||
|
use ruff_linter::rules::flake8_quotes;
|
||||||
|
use ruff_linter::rules::pydocstyle::settings::Convention;
|
||||||
|
use ruff_linter::settings::types::PythonVersion;
|
||||||
|
use ruff_workspace::options::{
|
||||||
|
Flake8QuotesOptions, LintCommonOptions, LintOptions, Options, PydocstyleOptions,
|
||||||
|
};
|
||||||
|
use ruff_workspace::pyproject::Pyproject;
|
||||||
|
|
||||||
|
use crate::converter::DEFAULT_SELECTORS;
|
||||||
|
use crate::pep621::Project;
|
||||||
|
use crate::ExternalConfig;
|
||||||
|
|
||||||
|
use super::super::plugin::Plugin;
|
||||||
|
use super::convert;
|
||||||
|
|
||||||
|
fn lint_default_options(plugins: impl IntoIterator<Item = RuleSelector>) -> LintCommonOptions {
|
||||||
|
LintCommonOptions {
|
||||||
|
ignore: Some(vec![]),
|
||||||
|
select: Some(
|
||||||
|
DEFAULT_SELECTORS
|
||||||
|
.iter()
|
||||||
|
.cloned()
|
||||||
|
.chain(plugins)
|
||||||
|
.sorted_by_key(RuleSelector::prefix_and_code)
|
||||||
|
.collect(),
|
||||||
|
),
|
||||||
|
..LintCommonOptions::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_converts_empty() {
|
||||||
|
let actual = convert(
|
||||||
|
&HashMap::from([("flake8".to_string(), HashMap::default())]),
|
||||||
|
&ExternalConfig::default(),
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
let expected = Pyproject::new(Options {
|
||||||
|
lint: Some(LintOptions {
|
||||||
|
common: lint_default_options([]),
|
||||||
|
..LintOptions::default()
|
||||||
|
}),
|
||||||
|
..Options::default()
|
||||||
|
});
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_converts_dashes() {
|
||||||
|
let actual = convert(
|
||||||
|
&HashMap::from([(
|
||||||
|
"flake8".to_string(),
|
||||||
|
HashMap::from([("max-line-length".to_string(), Some("100".to_string()))]),
|
||||||
|
)]),
|
||||||
|
&ExternalConfig::default(),
|
||||||
|
Some(vec![]),
|
||||||
|
);
|
||||||
|
let expected = Pyproject::new(Options {
|
||||||
|
line_length: Some(LineLength::try_from(100).unwrap()),
|
||||||
|
lint: Some(LintOptions {
|
||||||
|
common: lint_default_options([]),
|
||||||
|
..LintOptions::default()
|
||||||
|
}),
|
||||||
|
..Options::default()
|
||||||
|
});
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_converts_underscores() {
|
||||||
|
let actual = convert(
|
||||||
|
&HashMap::from([(
|
||||||
|
"flake8".to_string(),
|
||||||
|
HashMap::from([("max_line_length".to_string(), Some("100".to_string()))]),
|
||||||
|
)]),
|
||||||
|
&ExternalConfig::default(),
|
||||||
|
Some(vec![]),
|
||||||
|
);
|
||||||
|
let expected = Pyproject::new(Options {
|
||||||
|
line_length: Some(LineLength::try_from(100).unwrap()),
|
||||||
|
lint: Some(LintOptions {
|
||||||
|
common: lint_default_options([]),
|
||||||
|
..LintOptions::default()
|
||||||
|
}),
|
||||||
|
..Options::default()
|
||||||
|
});
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_ignores_parse_errors() {
|
||||||
|
let actual = convert(
|
||||||
|
&HashMap::from([(
|
||||||
|
"flake8".to_string(),
|
||||||
|
HashMap::from([("max_line_length".to_string(), Some("abc".to_string()))]),
|
||||||
|
)]),
|
||||||
|
&ExternalConfig::default(),
|
||||||
|
Some(vec![]),
|
||||||
|
);
|
||||||
|
let expected = Pyproject::new(Options {
|
||||||
|
lint: Some(LintOptions {
|
||||||
|
common: lint_default_options([]),
|
||||||
|
..LintOptions::default()
|
||||||
|
}),
|
||||||
|
..Options::default()
|
||||||
|
});
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_converts_plugin_options() {
|
||||||
|
let actual = convert(
|
||||||
|
&HashMap::from([(
|
||||||
|
"flake8".to_string(),
|
||||||
|
HashMap::from([("inline-quotes".to_string(), Some("single".to_string()))]),
|
||||||
|
)]),
|
||||||
|
&ExternalConfig::default(),
|
||||||
|
Some(vec![]),
|
||||||
|
);
|
||||||
|
let expected = Pyproject::new(Options {
|
||||||
|
lint: Some(LintOptions {
|
||||||
|
common: LintCommonOptions {
|
||||||
|
flake8_quotes: Some(Flake8QuotesOptions {
|
||||||
|
inline_quotes: Some(flake8_quotes::settings::Quote::Single),
|
||||||
|
multiline_quotes: None,
|
||||||
|
docstring_quotes: None,
|
||||||
|
avoid_escape: None,
|
||||||
|
}),
|
||||||
|
..lint_default_options([])
|
||||||
|
},
|
||||||
|
..LintOptions::default()
|
||||||
|
}),
|
||||||
|
..Options::default()
|
||||||
|
});
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_converts_docstring_conventions() {
|
||||||
|
let actual = convert(
|
||||||
|
&HashMap::from([(
|
||||||
|
"flake8".to_string(),
|
||||||
|
HashMap::from([(
|
||||||
|
"docstring-convention".to_string(),
|
||||||
|
Some("numpy".to_string()),
|
||||||
|
)]),
|
||||||
|
)]),
|
||||||
|
&ExternalConfig::default(),
|
||||||
|
Some(vec![Plugin::Flake8Docstrings]),
|
||||||
|
);
|
||||||
|
let expected = Pyproject::new(Options {
|
||||||
|
lint: Some(LintOptions {
|
||||||
|
common: LintCommonOptions {
|
||||||
|
pydocstyle: Some(PydocstyleOptions {
|
||||||
|
convention: Some(Convention::Numpy),
|
||||||
|
ignore_decorators: None,
|
||||||
|
property_decorators: None,
|
||||||
|
}),
|
||||||
|
..lint_default_options([Linter::Pydocstyle.into()])
|
||||||
|
},
|
||||||
|
..LintOptions::default()
|
||||||
|
}),
|
||||||
|
..Options::default()
|
||||||
|
});
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_infers_plugins_if_omitted() {
|
||||||
|
let actual = convert(
|
||||||
|
&HashMap::from([(
|
||||||
|
"flake8".to_string(),
|
||||||
|
HashMap::from([("inline-quotes".to_string(), Some("single".to_string()))]),
|
||||||
|
)]),
|
||||||
|
&ExternalConfig::default(),
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
let expected = Pyproject::new(Options {
|
||||||
|
lint: Some(LintOptions {
|
||||||
|
common: LintCommonOptions {
|
||||||
|
flake8_quotes: Some(Flake8QuotesOptions {
|
||||||
|
inline_quotes: Some(flake8_quotes::settings::Quote::Single),
|
||||||
|
multiline_quotes: None,
|
||||||
|
docstring_quotes: None,
|
||||||
|
avoid_escape: None,
|
||||||
|
}),
|
||||||
|
..lint_default_options([Linter::Flake8Quotes.into()])
|
||||||
|
},
|
||||||
|
..LintOptions::default()
|
||||||
|
}),
|
||||||
|
..Options::default()
|
||||||
|
});
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_converts_project_requires_python() -> Result<()> {
|
||||||
|
let actual = convert(
|
||||||
|
&HashMap::from([("flake8".to_string(), HashMap::default())]),
|
||||||
|
&ExternalConfig {
|
||||||
|
project: Some(&Project {
|
||||||
|
requires_python: Some(VersionSpecifiers::from_str(">=3.8.16, <3.11")?),
|
||||||
|
}),
|
||||||
|
..ExternalConfig::default()
|
||||||
|
},
|
||||||
|
Some(vec![]),
|
||||||
|
);
|
||||||
|
let expected = Pyproject::new(Options {
|
||||||
|
target_version: Some(PythonVersion::Py38),
|
||||||
|
lint: Some(LintOptions {
|
||||||
|
common: lint_default_options([]),
|
||||||
|
..LintOptions::default()
|
||||||
|
}),
|
||||||
|
..Options::default()
|
||||||
|
});
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
10
crates/flake8_to_ruff/src/external_config.rs
Normal file
10
crates/flake8_to_ruff/src/external_config.rs
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
use super::black::Black;
|
||||||
|
use super::isort::Isort;
|
||||||
|
use super::pep621::Project;
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
pub(crate) struct ExternalConfig<'a> {
|
||||||
|
pub(crate) black: Option<&'a Black>,
|
||||||
|
pub(crate) isort: Option<&'a Isort>,
|
||||||
|
pub(crate) project: Option<&'a Project>,
|
||||||
|
}
|
||||||
10
crates/flake8_to_ruff/src/isort.rs
Normal file
10
crates/flake8_to_ruff/src/isort.rs
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
//! Extract isort configuration settings from a pyproject.toml.
|
||||||
|
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
/// The [isort configuration](https://pycqa.github.io/isort/docs/configuration/config_files.html).
|
||||||
|
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
|
||||||
|
pub(crate) struct Isort {
|
||||||
|
#[serde(alias = "src-paths", alias = "src_paths")]
|
||||||
|
pub(crate) src_paths: Option<Vec<String>>,
|
||||||
|
}
|
||||||
80
crates/flake8_to_ruff/src/main.rs
Normal file
80
crates/flake8_to_ruff/src/main.rs
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
//! Utility to generate Ruff's `pyproject.toml` section from a Flake8 INI file.
|
||||||
|
|
||||||
|
mod black;
|
||||||
|
mod converter;
|
||||||
|
mod external_config;
|
||||||
|
mod isort;
|
||||||
|
mod parser;
|
||||||
|
mod pep621;
|
||||||
|
mod plugin;
|
||||||
|
mod pyproject;
|
||||||
|
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
|
use clap::Parser;
|
||||||
|
use configparser::ini::Ini;
|
||||||
|
|
||||||
|
use crate::converter::convert;
|
||||||
|
use crate::external_config::ExternalConfig;
|
||||||
|
use crate::plugin::Plugin;
|
||||||
|
use crate::pyproject::parse;
|
||||||
|
use ruff_linter::logging::{set_up_logging, LogLevel};
|
||||||
|
|
||||||
|
#[derive(Parser)]
|
||||||
|
#[command(
|
||||||
|
about = "Convert existing Flake8 configuration to Ruff.",
|
||||||
|
long_about = None
|
||||||
|
)]
|
||||||
|
struct Args {
|
||||||
|
/// Path to the Flake8 configuration file (e.g., `setup.cfg`, `tox.ini`, or
|
||||||
|
/// `.flake8`).
|
||||||
|
#[arg(required = true)]
|
||||||
|
file: PathBuf,
|
||||||
|
/// Optional path to a `pyproject.toml` file, used to ensure compatibility
|
||||||
|
/// with Black.
|
||||||
|
#[arg(long)]
|
||||||
|
pyproject: Option<PathBuf>,
|
||||||
|
/// List of plugins to enable.
|
||||||
|
#[arg(long, value_delimiter = ',')]
|
||||||
|
plugin: Option<Vec<Plugin>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() -> Result<()> {
|
||||||
|
set_up_logging(&LogLevel::Default)?;
|
||||||
|
|
||||||
|
let args = Args::parse();
|
||||||
|
|
||||||
|
// Read the INI file.
|
||||||
|
let mut ini = Ini::new_cs();
|
||||||
|
ini.set_multiline(true);
|
||||||
|
let config = ini.load(args.file).map_err(|msg| anyhow::anyhow!(msg))?;
|
||||||
|
|
||||||
|
// Read the pyproject.toml file.
|
||||||
|
let pyproject = args.pyproject.map(parse).transpose()?;
|
||||||
|
let external_config = pyproject
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|pyproject| pyproject.tool.as_ref())
|
||||||
|
.map(|tool| ExternalConfig {
|
||||||
|
black: tool.black.as_ref(),
|
||||||
|
isort: tool.isort.as_ref(),
|
||||||
|
..Default::default()
|
||||||
|
})
|
||||||
|
.unwrap_or_default();
|
||||||
|
let external_config = ExternalConfig {
|
||||||
|
project: pyproject
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|pyproject| pyproject.project.as_ref()),
|
||||||
|
..external_config
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create Ruff's pyproject.toml section.
|
||||||
|
let pyproject = convert(&config, &external_config, args.plugin);
|
||||||
|
|
||||||
|
#[allow(clippy::print_stdout)]
|
||||||
|
{
|
||||||
|
println!("{}", toml::to_string_pretty(&pyproject)?);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
391
crates/flake8_to_ruff/src/parser.rs
Normal file
391
crates/flake8_to_ruff/src/parser.rs
Normal file
@@ -0,0 +1,391 @@
|
|||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use anyhow::{bail, Result};
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
|
use regex::Regex;
|
||||||
|
use rustc_hash::FxHashMap;
|
||||||
|
|
||||||
|
use ruff_linter::settings::types::PatternPrefixPair;
|
||||||
|
use ruff_linter::{warn_user, RuleSelector};
|
||||||
|
|
||||||
|
static COMMA_SEPARATED_LIST_RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"[,\s]").unwrap());
|
||||||
|
|
||||||
|
/// Parse a comma-separated list of `RuleSelector` values (e.g.,
|
||||||
|
/// "F401,E501").
|
||||||
|
pub(crate) fn parse_prefix_codes(value: &str) -> Vec<RuleSelector> {
|
||||||
|
let mut codes: Vec<RuleSelector> = vec![];
|
||||||
|
for code in COMMA_SEPARATED_LIST_RE.split(value) {
|
||||||
|
let code = code.trim();
|
||||||
|
if code.is_empty() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if let Ok(code) = RuleSelector::from_str(code) {
|
||||||
|
codes.push(code);
|
||||||
|
} else {
|
||||||
|
warn_user!("Unsupported prefix code: {code}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
codes
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse a comma-separated list of strings (e.g., "__init__.py,__main__.py").
|
||||||
|
pub(crate) fn parse_strings(value: &str) -> Vec<String> {
|
||||||
|
COMMA_SEPARATED_LIST_RE
|
||||||
|
.split(value)
|
||||||
|
.map(str::trim)
|
||||||
|
.filter(|part| !part.is_empty())
|
||||||
|
.map(String::from)
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse a boolean.
|
||||||
|
pub(crate) fn parse_bool(value: &str) -> Result<bool> {
|
||||||
|
match value.trim() {
|
||||||
|
"true" => Ok(true),
|
||||||
|
"false" => Ok(false),
|
||||||
|
_ => bail!("Unexpected boolean value: {value}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct Token {
|
||||||
|
token_name: TokenType,
|
||||||
|
src: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
enum TokenType {
|
||||||
|
Code,
|
||||||
|
File,
|
||||||
|
Colon,
|
||||||
|
Comma,
|
||||||
|
Ws,
|
||||||
|
Eof,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct State {
|
||||||
|
seen_sep: bool,
|
||||||
|
seen_colon: bool,
|
||||||
|
filenames: Vec<String>,
|
||||||
|
codes: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl State {
|
||||||
|
const fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
seen_sep: true,
|
||||||
|
seen_colon: false,
|
||||||
|
filenames: vec![],
|
||||||
|
codes: vec![],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generate the list of `StrRuleCodePair` pairs for the current
|
||||||
|
/// state.
|
||||||
|
fn parse(&self) -> Vec<PatternPrefixPair> {
|
||||||
|
let mut codes: Vec<PatternPrefixPair> = vec![];
|
||||||
|
for code in &self.codes {
|
||||||
|
if let Ok(code) = RuleSelector::from_str(code) {
|
||||||
|
for filename in &self.filenames {
|
||||||
|
codes.push(PatternPrefixPair {
|
||||||
|
pattern: filename.clone(),
|
||||||
|
prefix: code.clone(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
warn_user!("Unsupported prefix code: {code}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
codes
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Tokenize the raw 'files-to-codes' mapping.
|
||||||
|
fn tokenize_files_to_codes_mapping(value: &str) -> Vec<Token> {
|
||||||
|
let mut tokens = vec![];
|
||||||
|
let mut i = 0;
|
||||||
|
while i < value.len() {
|
||||||
|
for (token_re, token_name) in [
|
||||||
|
(
|
||||||
|
Regex::new(r"([A-Z]+[0-9]*)(?:$|\s|,)").unwrap(),
|
||||||
|
TokenType::Code,
|
||||||
|
),
|
||||||
|
(Regex::new(r"([^\s:,]+)").unwrap(), TokenType::File),
|
||||||
|
(Regex::new(r"(\s*:\s*)").unwrap(), TokenType::Colon),
|
||||||
|
(Regex::new(r"(\s*,\s*)").unwrap(), TokenType::Comma),
|
||||||
|
(Regex::new(r"(\s+)").unwrap(), TokenType::Ws),
|
||||||
|
] {
|
||||||
|
if let Some(cap) = token_re.captures(&value[i..]) {
|
||||||
|
let mat = cap.get(1).unwrap();
|
||||||
|
if mat.start() == 0 {
|
||||||
|
tokens.push(Token {
|
||||||
|
token_name,
|
||||||
|
src: mat.as_str().trim().to_string(),
|
||||||
|
});
|
||||||
|
i += mat.end();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tokens.push(Token {
|
||||||
|
token_name: TokenType::Eof,
|
||||||
|
src: String::new(),
|
||||||
|
});
|
||||||
|
tokens
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse a 'files-to-codes' mapping, mimicking Flake8's internal logic.
|
||||||
|
/// See: <https://github.com/PyCQA/flake8/blob/7dfe99616fc2f07c0017df2ba5fa884158f3ea8a/src/flake8/utils.py#L45>
|
||||||
|
pub(crate) fn parse_files_to_codes_mapping(value: &str) -> Result<Vec<PatternPrefixPair>> {
|
||||||
|
if value.trim().is_empty() {
|
||||||
|
return Ok(vec![]);
|
||||||
|
}
|
||||||
|
let mut codes: Vec<PatternPrefixPair> = vec![];
|
||||||
|
let mut state = State::new();
|
||||||
|
for token in tokenize_files_to_codes_mapping(value) {
|
||||||
|
if matches!(token.token_name, TokenType::Comma | TokenType::Ws) {
|
||||||
|
state.seen_sep = true;
|
||||||
|
} else if !state.seen_colon {
|
||||||
|
if matches!(token.token_name, TokenType::Colon) {
|
||||||
|
state.seen_colon = true;
|
||||||
|
state.seen_sep = true;
|
||||||
|
} else if state.seen_sep && matches!(token.token_name, TokenType::File) {
|
||||||
|
state.filenames.push(token.src);
|
||||||
|
state.seen_sep = false;
|
||||||
|
} else {
|
||||||
|
bail!("Unexpected token: {:?}", token.token_name);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if matches!(token.token_name, TokenType::Eof) {
|
||||||
|
codes.extend(state.parse());
|
||||||
|
state = State::new();
|
||||||
|
} else if state.seen_sep && matches!(token.token_name, TokenType::Code) {
|
||||||
|
state.codes.push(token.src);
|
||||||
|
state.seen_sep = false;
|
||||||
|
} else if state.seen_sep && matches!(token.token_name, TokenType::File) {
|
||||||
|
codes.extend(state.parse());
|
||||||
|
state = State::new();
|
||||||
|
state.filenames.push(token.src);
|
||||||
|
state.seen_sep = false;
|
||||||
|
} else {
|
||||||
|
bail!("Unexpected token: {:?}", token.token_name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(codes)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Collect a list of `PatternPrefixPair` structs as a `BTreeMap`.
|
||||||
|
pub(crate) fn collect_per_file_ignores(
|
||||||
|
pairs: Vec<PatternPrefixPair>,
|
||||||
|
) -> FxHashMap<String, Vec<RuleSelector>> {
|
||||||
|
let mut per_file_ignores: FxHashMap<String, Vec<RuleSelector>> = FxHashMap::default();
|
||||||
|
for pair in pairs {
|
||||||
|
per_file_ignores
|
||||||
|
.entry(pair.pattern)
|
||||||
|
.or_default()
|
||||||
|
.push(pair.prefix);
|
||||||
|
}
|
||||||
|
per_file_ignores
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use anyhow::Result;
|
||||||
|
|
||||||
|
use ruff_linter::codes;
|
||||||
|
use ruff_linter::registry::Linter;
|
||||||
|
use ruff_linter::settings::types::PatternPrefixPair;
|
||||||
|
use ruff_linter::RuleSelector;
|
||||||
|
|
||||||
|
use super::{parse_files_to_codes_mapping, parse_prefix_codes, parse_strings};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_parses_prefix_codes() {
|
||||||
|
let actual = parse_prefix_codes("");
|
||||||
|
let expected: Vec<RuleSelector> = vec![];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
let actual = parse_prefix_codes(" ");
|
||||||
|
let expected: Vec<RuleSelector> = vec![];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
let actual = parse_prefix_codes("F401");
|
||||||
|
let expected = vec![codes::Pyflakes::_401.into()];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
let actual = parse_prefix_codes("F401,");
|
||||||
|
let expected = vec![codes::Pyflakes::_401.into()];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
let actual = parse_prefix_codes("F401,E501");
|
||||||
|
let expected = vec![
|
||||||
|
codes::Pyflakes::_401.into(),
|
||||||
|
codes::Pycodestyle::E501.into(),
|
||||||
|
];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
let actual = parse_prefix_codes("F401, E501");
|
||||||
|
let expected = vec![
|
||||||
|
codes::Pyflakes::_401.into(),
|
||||||
|
codes::Pycodestyle::E501.into(),
|
||||||
|
];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_parses_strings() {
|
||||||
|
let actual = parse_strings("");
|
||||||
|
let expected: Vec<String> = vec![];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
let actual = parse_strings(" ");
|
||||||
|
let expected: Vec<String> = vec![];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
let actual = parse_strings("__init__.py");
|
||||||
|
let expected = vec!["__init__.py".to_string()];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
let actual = parse_strings("__init__.py,");
|
||||||
|
let expected = vec!["__init__.py".to_string()];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
let actual = parse_strings("__init__.py,__main__.py");
|
||||||
|
let expected = vec!["__init__.py".to_string(), "__main__.py".to_string()];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
let actual = parse_strings("__init__.py, __main__.py");
|
||||||
|
let expected = vec!["__init__.py".to_string(), "__main__.py".to_string()];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_parse_files_to_codes_mapping() -> Result<()> {
|
||||||
|
let actual = parse_files_to_codes_mapping("")?;
|
||||||
|
let expected: Vec<PatternPrefixPair> = vec![];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
let actual = parse_files_to_codes_mapping(" ")?;
|
||||||
|
let expected: Vec<PatternPrefixPair> = vec![];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
// Ex) locust
|
||||||
|
let actual = parse_files_to_codes_mapping(
|
||||||
|
"per-file-ignores =
|
||||||
|
locust/test/*: F841
|
||||||
|
examples/*: F841
|
||||||
|
*.pyi: E302,E704"
|
||||||
|
.strip_prefix("per-file-ignores =")
|
||||||
|
.unwrap(),
|
||||||
|
)?;
|
||||||
|
let expected: Vec<PatternPrefixPair> = vec![
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "locust/test/*".to_string(),
|
||||||
|
prefix: codes::Pyflakes::_841.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "examples/*".to_string(),
|
||||||
|
prefix: codes::Pyflakes::_841.into(),
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
// Ex) celery
|
||||||
|
let actual = parse_files_to_codes_mapping(
|
||||||
|
"per-file-ignores =
|
||||||
|
t/*,setup.py,examples/*,docs/*,extra/*:
|
||||||
|
D,"
|
||||||
|
.strip_prefix("per-file-ignores =")
|
||||||
|
.unwrap(),
|
||||||
|
)?;
|
||||||
|
let expected: Vec<PatternPrefixPair> = vec![
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "t/*".to_string(),
|
||||||
|
prefix: Linter::Pydocstyle.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "setup.py".to_string(),
|
||||||
|
prefix: Linter::Pydocstyle.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "examples/*".to_string(),
|
||||||
|
prefix: Linter::Pydocstyle.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "docs/*".to_string(),
|
||||||
|
prefix: Linter::Pydocstyle.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "extra/*".to_string(),
|
||||||
|
prefix: Linter::Pydocstyle.into(),
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
// Ex) scrapy
|
||||||
|
let actual = parse_files_to_codes_mapping(
|
||||||
|
"per-file-ignores =
|
||||||
|
scrapy/__init__.py:E402
|
||||||
|
scrapy/core/downloader/handlers/http.py:F401
|
||||||
|
scrapy/http/__init__.py:F401
|
||||||
|
scrapy/linkextractors/__init__.py:E402,F401
|
||||||
|
scrapy/selector/__init__.py:F401
|
||||||
|
scrapy/spiders/__init__.py:E402,F401
|
||||||
|
scrapy/utils/url.py:F403,F405
|
||||||
|
tests/test_loader.py:E741"
|
||||||
|
.strip_prefix("per-file-ignores =")
|
||||||
|
.unwrap(),
|
||||||
|
)?;
|
||||||
|
let expected: Vec<PatternPrefixPair> = vec![
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "scrapy/__init__.py".to_string(),
|
||||||
|
prefix: codes::Pycodestyle::E402.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "scrapy/core/downloader/handlers/http.py".to_string(),
|
||||||
|
prefix: codes::Pyflakes::_401.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "scrapy/http/__init__.py".to_string(),
|
||||||
|
prefix: codes::Pyflakes::_401.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "scrapy/linkextractors/__init__.py".to_string(),
|
||||||
|
prefix: codes::Pycodestyle::E402.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "scrapy/linkextractors/__init__.py".to_string(),
|
||||||
|
prefix: codes::Pyflakes::_401.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "scrapy/selector/__init__.py".to_string(),
|
||||||
|
prefix: codes::Pyflakes::_401.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "scrapy/spiders/__init__.py".to_string(),
|
||||||
|
prefix: codes::Pycodestyle::E402.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "scrapy/spiders/__init__.py".to_string(),
|
||||||
|
prefix: codes::Pyflakes::_401.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "scrapy/utils/url.py".to_string(),
|
||||||
|
prefix: codes::Pyflakes::_403.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "scrapy/utils/url.py".to_string(),
|
||||||
|
prefix: codes::Pyflakes::_405.into(),
|
||||||
|
},
|
||||||
|
PatternPrefixPair {
|
||||||
|
pattern: "tests/test_loader.py".to_string(),
|
||||||
|
prefix: codes::Pycodestyle::E741.into(),
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
10
crates/flake8_to_ruff/src/pep621.rs
Normal file
10
crates/flake8_to_ruff/src/pep621.rs
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
//! Extract PEP 621 configuration settings from a pyproject.toml.
|
||||||
|
|
||||||
|
use pep440_rs::VersionSpecifiers;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
|
||||||
|
pub(crate) struct Project {
|
||||||
|
#[serde(alias = "requires-python", alias = "requires_python")]
|
||||||
|
pub(crate) requires_python: Option<VersionSpecifiers>,
|
||||||
|
}
|
||||||
368
crates/flake8_to_ruff/src/plugin.rs
Normal file
368
crates/flake8_to_ruff/src/plugin.rs
Normal file
@@ -0,0 +1,368 @@
|
|||||||
|
use std::collections::{BTreeSet, HashMap, HashSet};
|
||||||
|
use std::fmt;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use anyhow::anyhow;
|
||||||
|
use ruff_linter::registry::Linter;
|
||||||
|
use ruff_linter::rule_selector::PreviewOptions;
|
||||||
|
use ruff_linter::RuleSelector;
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq)]
|
||||||
|
pub enum Plugin {
|
||||||
|
Flake82020,
|
||||||
|
Flake8Annotations,
|
||||||
|
Flake8Bandit,
|
||||||
|
Flake8BlindExcept,
|
||||||
|
Flake8BooleanTrap,
|
||||||
|
Flake8Bugbear,
|
||||||
|
Flake8Builtins,
|
||||||
|
Flake8Commas,
|
||||||
|
Flake8Comprehensions,
|
||||||
|
Flake8Datetimez,
|
||||||
|
Flake8Debugger,
|
||||||
|
Flake8Docstrings,
|
||||||
|
Flake8Eradicate,
|
||||||
|
Flake8ErrMsg,
|
||||||
|
Flake8Executable,
|
||||||
|
Flake8ImplicitStrConcat,
|
||||||
|
Flake8ImportConventions,
|
||||||
|
Flake8NoPep420,
|
||||||
|
Flake8Pie,
|
||||||
|
Flake8Print,
|
||||||
|
Flake8PytestStyle,
|
||||||
|
Flake8Quotes,
|
||||||
|
Flake8Return,
|
||||||
|
Flake8Simplify,
|
||||||
|
Flake8TidyImports,
|
||||||
|
Flake8TypeChecking,
|
||||||
|
Flake8UnusedArguments,
|
||||||
|
Flake8UsePathlib,
|
||||||
|
McCabe,
|
||||||
|
PEP8Naming,
|
||||||
|
PandasVet,
|
||||||
|
Pyupgrade,
|
||||||
|
Tryceratops,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for Plugin {
|
||||||
|
type Err = anyhow::Error;
|
||||||
|
|
||||||
|
fn from_str(string: &str) -> Result<Self, Self::Err> {
|
||||||
|
match string {
|
||||||
|
"flake8-2020" => Ok(Plugin::Flake82020),
|
||||||
|
"flake8-annotations" => Ok(Plugin::Flake8Annotations),
|
||||||
|
"flake8-bandit" => Ok(Plugin::Flake8Bandit),
|
||||||
|
"flake8-blind-except" => Ok(Plugin::Flake8BlindExcept),
|
||||||
|
"flake8-boolean-trap" => Ok(Plugin::Flake8BooleanTrap),
|
||||||
|
"flake8-bugbear" => Ok(Plugin::Flake8Bugbear),
|
||||||
|
"flake8-builtins" => Ok(Plugin::Flake8Builtins),
|
||||||
|
"flake8-commas" => Ok(Plugin::Flake8Commas),
|
||||||
|
"flake8-comprehensions" => Ok(Plugin::Flake8Comprehensions),
|
||||||
|
"flake8-datetimez" => Ok(Plugin::Flake8Datetimez),
|
||||||
|
"flake8-debugger" => Ok(Plugin::Flake8Debugger),
|
||||||
|
"flake8-docstrings" => Ok(Plugin::Flake8Docstrings),
|
||||||
|
"flake8-eradicate" => Ok(Plugin::Flake8Eradicate),
|
||||||
|
"flake8-errmsg" => Ok(Plugin::Flake8ErrMsg),
|
||||||
|
"flake8-executable" => Ok(Plugin::Flake8Executable),
|
||||||
|
"flake8-implicit-str-concat" => Ok(Plugin::Flake8ImplicitStrConcat),
|
||||||
|
"flake8-import-conventions" => Ok(Plugin::Flake8ImportConventions),
|
||||||
|
"flake8-no-pep420" => Ok(Plugin::Flake8NoPep420),
|
||||||
|
"flake8-pie" => Ok(Plugin::Flake8Pie),
|
||||||
|
"flake8-print" => Ok(Plugin::Flake8Print),
|
||||||
|
"flake8-pytest-style" => Ok(Plugin::Flake8PytestStyle),
|
||||||
|
"flake8-quotes" => Ok(Plugin::Flake8Quotes),
|
||||||
|
"flake8-return" => Ok(Plugin::Flake8Return),
|
||||||
|
"flake8-simplify" => Ok(Plugin::Flake8Simplify),
|
||||||
|
"flake8-tidy-imports" => Ok(Plugin::Flake8TidyImports),
|
||||||
|
"flake8-type-checking" => Ok(Plugin::Flake8TypeChecking),
|
||||||
|
"flake8-unused-arguments" => Ok(Plugin::Flake8UnusedArguments),
|
||||||
|
"flake8-use-pathlib" => Ok(Plugin::Flake8UsePathlib),
|
||||||
|
"mccabe" => Ok(Plugin::McCabe),
|
||||||
|
"pep8-naming" => Ok(Plugin::PEP8Naming),
|
||||||
|
"pandas-vet" => Ok(Plugin::PandasVet),
|
||||||
|
"pyupgrade" => Ok(Plugin::Pyupgrade),
|
||||||
|
"tryceratops" => Ok(Plugin::Tryceratops),
|
||||||
|
_ => Err(anyhow!("Unknown plugin: {string}")),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Debug for Plugin {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"{}",
|
||||||
|
match self {
|
||||||
|
Plugin::Flake82020 => "flake8-2020",
|
||||||
|
Plugin::Flake8Annotations => "flake8-annotations",
|
||||||
|
Plugin::Flake8Bandit => "flake8-bandit",
|
||||||
|
Plugin::Flake8BlindExcept => "flake8-blind-except",
|
||||||
|
Plugin::Flake8BooleanTrap => "flake8-boolean-trap",
|
||||||
|
Plugin::Flake8Bugbear => "flake8-bugbear",
|
||||||
|
Plugin::Flake8Builtins => "flake8-builtins",
|
||||||
|
Plugin::Flake8Commas => "flake8-commas",
|
||||||
|
Plugin::Flake8Comprehensions => "flake8-comprehensions",
|
||||||
|
Plugin::Flake8Datetimez => "flake8-datetimez",
|
||||||
|
Plugin::Flake8Debugger => "flake8-debugger",
|
||||||
|
Plugin::Flake8Docstrings => "flake8-docstrings",
|
||||||
|
Plugin::Flake8Eradicate => "flake8-eradicate",
|
||||||
|
Plugin::Flake8ErrMsg => "flake8-errmsg",
|
||||||
|
Plugin::Flake8Executable => "flake8-executable",
|
||||||
|
Plugin::Flake8ImplicitStrConcat => "flake8-implicit-str-concat",
|
||||||
|
Plugin::Flake8ImportConventions => "flake8-import-conventions",
|
||||||
|
Plugin::Flake8NoPep420 => "flake8-no-pep420",
|
||||||
|
Plugin::Flake8Pie => "flake8-pie",
|
||||||
|
Plugin::Flake8Print => "flake8-print",
|
||||||
|
Plugin::Flake8PytestStyle => "flake8-pytest-style",
|
||||||
|
Plugin::Flake8Quotes => "flake8-quotes",
|
||||||
|
Plugin::Flake8Return => "flake8-return",
|
||||||
|
Plugin::Flake8Simplify => "flake8-simplify",
|
||||||
|
Plugin::Flake8TidyImports => "flake8-tidy-imports",
|
||||||
|
Plugin::Flake8TypeChecking => "flake8-type-checking",
|
||||||
|
Plugin::Flake8UnusedArguments => "flake8-unused-arguments",
|
||||||
|
Plugin::Flake8UsePathlib => "flake8-use-pathlib",
|
||||||
|
Plugin::McCabe => "mccabe",
|
||||||
|
Plugin::PEP8Naming => "pep8-naming",
|
||||||
|
Plugin::PandasVet => "pandas-vet",
|
||||||
|
Plugin::Pyupgrade => "pyupgrade",
|
||||||
|
Plugin::Tryceratops => "tryceratops",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&Plugin> for Linter {
|
||||||
|
fn from(plugin: &Plugin) -> Self {
|
||||||
|
match plugin {
|
||||||
|
Plugin::Flake82020 => Linter::Flake82020,
|
||||||
|
Plugin::Flake8Annotations => Linter::Flake8Annotations,
|
||||||
|
Plugin::Flake8Bandit => Linter::Flake8Bandit,
|
||||||
|
Plugin::Flake8BlindExcept => Linter::Flake8BlindExcept,
|
||||||
|
Plugin::Flake8BooleanTrap => Linter::Flake8BooleanTrap,
|
||||||
|
Plugin::Flake8Bugbear => Linter::Flake8Bugbear,
|
||||||
|
Plugin::Flake8Builtins => Linter::Flake8Builtins,
|
||||||
|
Plugin::Flake8Commas => Linter::Flake8Commas,
|
||||||
|
Plugin::Flake8Comprehensions => Linter::Flake8Comprehensions,
|
||||||
|
Plugin::Flake8Datetimez => Linter::Flake8Datetimez,
|
||||||
|
Plugin::Flake8Debugger => Linter::Flake8Debugger,
|
||||||
|
Plugin::Flake8Docstrings => Linter::Pydocstyle,
|
||||||
|
Plugin::Flake8Eradicate => Linter::Eradicate,
|
||||||
|
Plugin::Flake8ErrMsg => Linter::Flake8ErrMsg,
|
||||||
|
Plugin::Flake8Executable => Linter::Flake8Executable,
|
||||||
|
Plugin::Flake8ImplicitStrConcat => Linter::Flake8ImplicitStrConcat,
|
||||||
|
Plugin::Flake8ImportConventions => Linter::Flake8ImportConventions,
|
||||||
|
Plugin::Flake8NoPep420 => Linter::Flake8NoPep420,
|
||||||
|
Plugin::Flake8Pie => Linter::Flake8Pie,
|
||||||
|
Plugin::Flake8Print => Linter::Flake8Print,
|
||||||
|
Plugin::Flake8PytestStyle => Linter::Flake8PytestStyle,
|
||||||
|
Plugin::Flake8Quotes => Linter::Flake8Quotes,
|
||||||
|
Plugin::Flake8Return => Linter::Flake8Return,
|
||||||
|
Plugin::Flake8Simplify => Linter::Flake8Simplify,
|
||||||
|
Plugin::Flake8TidyImports => Linter::Flake8TidyImports,
|
||||||
|
Plugin::Flake8TypeChecking => Linter::Flake8TypeChecking,
|
||||||
|
Plugin::Flake8UnusedArguments => Linter::Flake8UnusedArguments,
|
||||||
|
Plugin::Flake8UsePathlib => Linter::Flake8UsePathlib,
|
||||||
|
Plugin::McCabe => Linter::McCabe,
|
||||||
|
Plugin::PEP8Naming => Linter::PEP8Naming,
|
||||||
|
Plugin::PandasVet => Linter::PandasVet,
|
||||||
|
Plugin::Pyupgrade => Linter::Pyupgrade,
|
||||||
|
Plugin::Tryceratops => Linter::Tryceratops,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Infer the enabled plugins based on user-provided options.
|
||||||
|
///
|
||||||
|
/// For example, if the user specified a `mypy-init-return` setting, we should
|
||||||
|
/// infer that `flake8-annotations` is active.
|
||||||
|
pub(crate) fn infer_plugins_from_options(flake8: &HashMap<String, Option<String>>) -> Vec<Plugin> {
|
||||||
|
let mut plugins = BTreeSet::new();
|
||||||
|
for key in flake8.keys() {
|
||||||
|
match key.as_str() {
|
||||||
|
// flake8-annotations
|
||||||
|
"suppress-none-returning" | "suppress_none_returning" => {
|
||||||
|
plugins.insert(Plugin::Flake8Annotations);
|
||||||
|
}
|
||||||
|
"suppress-dummy-args" | "suppress_dummy_args" => {
|
||||||
|
plugins.insert(Plugin::Flake8Annotations);
|
||||||
|
}
|
||||||
|
"allow-untyped-defs" | "allow_untyped_defs" => {
|
||||||
|
plugins.insert(Plugin::Flake8Annotations);
|
||||||
|
}
|
||||||
|
"allow-untyped-nested" | "allow_untyped_nested" => {
|
||||||
|
plugins.insert(Plugin::Flake8Annotations);
|
||||||
|
}
|
||||||
|
"mypy-init-return" | "mypy_init_return" => {
|
||||||
|
plugins.insert(Plugin::Flake8Annotations);
|
||||||
|
}
|
||||||
|
"dispatch-decorators" | "dispatch_decorators" => {
|
||||||
|
plugins.insert(Plugin::Flake8Annotations);
|
||||||
|
}
|
||||||
|
"overload-decorators" | "overload_decorators" => {
|
||||||
|
plugins.insert(Plugin::Flake8Annotations);
|
||||||
|
}
|
||||||
|
"allow-star-arg-any" | "allow_star_arg_any" => {
|
||||||
|
plugins.insert(Plugin::Flake8Annotations);
|
||||||
|
}
|
||||||
|
// flake8-bugbear
|
||||||
|
"extend-immutable-calls" | "extend_immutable_calls" => {
|
||||||
|
plugins.insert(Plugin::Flake8Bugbear);
|
||||||
|
}
|
||||||
|
// flake8-builtins
|
||||||
|
"builtins-ignorelist" | "builtins_ignorelist" => {
|
||||||
|
plugins.insert(Plugin::Flake8Builtins);
|
||||||
|
}
|
||||||
|
// flake8-docstrings
|
||||||
|
"docstring-convention" | "docstring_convention" => {
|
||||||
|
plugins.insert(Plugin::Flake8Docstrings);
|
||||||
|
}
|
||||||
|
// flake8-eradicate
|
||||||
|
"eradicate-aggressive" | "eradicate_aggressive" => {
|
||||||
|
plugins.insert(Plugin::Flake8Eradicate);
|
||||||
|
}
|
||||||
|
"eradicate-whitelist" | "eradicate_whitelist" => {
|
||||||
|
plugins.insert(Plugin::Flake8Eradicate);
|
||||||
|
}
|
||||||
|
"eradicate-whitelist-extend" | "eradicate_whitelist_extend" => {
|
||||||
|
plugins.insert(Plugin::Flake8Eradicate);
|
||||||
|
}
|
||||||
|
// flake8-pytest-style
|
||||||
|
"pytest-fixture-no-parentheses" | "pytest_fixture_no_parentheses " => {
|
||||||
|
plugins.insert(Plugin::Flake8PytestStyle);
|
||||||
|
}
|
||||||
|
"pytest-parametrize-names-type" | "pytest_parametrize_names_type" => {
|
||||||
|
plugins.insert(Plugin::Flake8PytestStyle);
|
||||||
|
}
|
||||||
|
"pytest-parametrize-values-type" | "pytest_parametrize_values_type" => {
|
||||||
|
plugins.insert(Plugin::Flake8PytestStyle);
|
||||||
|
}
|
||||||
|
"pytest-parametrize-values-row-type" | "pytest_parametrize_values_row_type" => {
|
||||||
|
plugins.insert(Plugin::Flake8PytestStyle);
|
||||||
|
}
|
||||||
|
"pytest-raises-require-match-for" | "pytest_raises_require_match_for" => {
|
||||||
|
plugins.insert(Plugin::Flake8PytestStyle);
|
||||||
|
}
|
||||||
|
"pytest-mark-no-parentheses" | "pytest_mark_no_parentheses" => {
|
||||||
|
plugins.insert(Plugin::Flake8PytestStyle);
|
||||||
|
}
|
||||||
|
// flake8-quotes
|
||||||
|
"quotes" | "inline-quotes" | "inline_quotes" => {
|
||||||
|
plugins.insert(Plugin::Flake8Quotes);
|
||||||
|
}
|
||||||
|
"multiline-quotes" | "multiline_quotes" => {
|
||||||
|
plugins.insert(Plugin::Flake8Quotes);
|
||||||
|
}
|
||||||
|
"docstring-quotes" | "docstring_quotes" => {
|
||||||
|
plugins.insert(Plugin::Flake8Quotes);
|
||||||
|
}
|
||||||
|
"avoid-escape" | "avoid_escape" => {
|
||||||
|
plugins.insert(Plugin::Flake8Quotes);
|
||||||
|
}
|
||||||
|
// flake8-tidy-imports
|
||||||
|
"ban-relative-imports" | "ban_relative_imports" => {
|
||||||
|
plugins.insert(Plugin::Flake8TidyImports);
|
||||||
|
}
|
||||||
|
"banned-modules" | "banned_modules" => {
|
||||||
|
plugins.insert(Plugin::Flake8TidyImports);
|
||||||
|
}
|
||||||
|
// mccabe
|
||||||
|
"max-complexity" | "max_complexity" => {
|
||||||
|
plugins.insert(Plugin::McCabe);
|
||||||
|
}
|
||||||
|
// pep8-naming
|
||||||
|
"ignore-names" | "ignore_names" => {
|
||||||
|
plugins.insert(Plugin::PEP8Naming);
|
||||||
|
}
|
||||||
|
"classmethod-decorators" | "classmethod_decorators" => {
|
||||||
|
plugins.insert(Plugin::PEP8Naming);
|
||||||
|
}
|
||||||
|
"staticmethod-decorators" | "staticmethod_decorators" => {
|
||||||
|
plugins.insert(Plugin::PEP8Naming);
|
||||||
|
}
|
||||||
|
"max-string-length" | "max_string_length" => {
|
||||||
|
plugins.insert(Plugin::Flake8ErrMsg);
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Vec::from_iter(plugins)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Infer the enabled plugins based on the referenced prefixes.
|
||||||
|
///
|
||||||
|
/// For example, if the user ignores `ANN101`, we should infer that
|
||||||
|
/// `flake8-annotations` is active.
|
||||||
|
pub(crate) fn infer_plugins_from_codes(selectors: &HashSet<RuleSelector>) -> Vec<Plugin> {
|
||||||
|
// Ignore cases in which we've knowingly changed rule prefixes.
|
||||||
|
[
|
||||||
|
Plugin::Flake82020,
|
||||||
|
Plugin::Flake8Annotations,
|
||||||
|
Plugin::Flake8Bandit,
|
||||||
|
// Plugin::Flake8BlindExcept,
|
||||||
|
Plugin::Flake8BooleanTrap,
|
||||||
|
Plugin::Flake8Bugbear,
|
||||||
|
Plugin::Flake8Builtins,
|
||||||
|
// Plugin::Flake8Commas,
|
||||||
|
Plugin::Flake8Comprehensions,
|
||||||
|
Plugin::Flake8Datetimez,
|
||||||
|
Plugin::Flake8Debugger,
|
||||||
|
Plugin::Flake8Docstrings,
|
||||||
|
// Plugin::Flake8Eradicate,
|
||||||
|
Plugin::Flake8ErrMsg,
|
||||||
|
Plugin::Flake8Executable,
|
||||||
|
Plugin::Flake8ImplicitStrConcat,
|
||||||
|
// Plugin::Flake8ImportConventions,
|
||||||
|
Plugin::Flake8NoPep420,
|
||||||
|
Plugin::Flake8Pie,
|
||||||
|
Plugin::Flake8Print,
|
||||||
|
Plugin::Flake8PytestStyle,
|
||||||
|
Plugin::Flake8Quotes,
|
||||||
|
Plugin::Flake8Return,
|
||||||
|
Plugin::Flake8Simplify,
|
||||||
|
// Plugin::Flake8TidyImports,
|
||||||
|
// Plugin::Flake8TypeChecking,
|
||||||
|
Plugin::Flake8UnusedArguments,
|
||||||
|
// Plugin::Flake8UsePathlib,
|
||||||
|
Plugin::McCabe,
|
||||||
|
Plugin::PEP8Naming,
|
||||||
|
Plugin::PandasVet,
|
||||||
|
Plugin::Tryceratops,
|
||||||
|
]
|
||||||
|
.into_iter()
|
||||||
|
.filter(|plugin| {
|
||||||
|
for selector in selectors {
|
||||||
|
if selector
|
||||||
|
.rules(&PreviewOptions::default())
|
||||||
|
.any(|rule| Linter::from(plugin).rules().any(|r| r == rule))
|
||||||
|
{
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
false
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use super::{infer_plugins_from_options, Plugin};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_infers_plugins() {
|
||||||
|
let actual = infer_plugins_from_options(&HashMap::from([(
|
||||||
|
"inline-quotes".to_string(),
|
||||||
|
Some("single".to_string()),
|
||||||
|
)]));
|
||||||
|
let expected = vec![Plugin::Flake8Quotes];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
|
let actual = infer_plugins_from_options(&HashMap::from([(
|
||||||
|
"staticmethod-decorators".to_string(),
|
||||||
|
Some("[]".to_string()),
|
||||||
|
)]));
|
||||||
|
let expected = vec![Plugin::PEP8Naming];
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
}
|
||||||
|
}
|
||||||
26
crates/flake8_to_ruff/src/pyproject.rs
Normal file
26
crates/flake8_to_ruff/src/pyproject.rs
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
use super::black::Black;
|
||||||
|
use super::isort::Isort;
|
||||||
|
use super::pep621::Project;
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
pub(crate) struct Tools {
|
||||||
|
pub(crate) black: Option<Black>,
|
||||||
|
pub(crate) isort: Option<Isort>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
pub(crate) struct Pyproject {
|
||||||
|
pub(crate) tool: Option<Tools>,
|
||||||
|
pub(crate) project: Option<Project>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn parse<P: AsRef<Path>>(path: P) -> Result<Pyproject> {
|
||||||
|
let contents = std::fs::read_to_string(path)?;
|
||||||
|
let pyproject = toml::from_str::<Pyproject>(&contents)?;
|
||||||
|
Ok(pyproject)
|
||||||
|
}
|
||||||
@@ -1,39 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "red_knot"
|
|
||||||
version = "0.0.0"
|
|
||||||
edition.workspace = true
|
|
||||||
rust-version.workspace = true
|
|
||||||
homepage.workspace = true
|
|
||||||
documentation.workspace = true
|
|
||||||
repository.workspace = true
|
|
||||||
authors.workspace = true
|
|
||||||
license.workspace = true
|
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
red_knot_module_resolver = { workspace = true }
|
|
||||||
red_knot_python_semantic = { workspace = true }
|
|
||||||
|
|
||||||
ruff_db = { workspace = true, features = ["os", "cache"] }
|
|
||||||
ruff_python_ast = { workspace = true }
|
|
||||||
|
|
||||||
anyhow = { workspace = true }
|
|
||||||
clap = { workspace = true, features = ["wrap_help"] }
|
|
||||||
countme = { workspace = true, features = ["enable"] }
|
|
||||||
crossbeam = { workspace = true }
|
|
||||||
ctrlc = { version = "3.4.4" }
|
|
||||||
notify = { workspace = true }
|
|
||||||
rayon = { workspace = true }
|
|
||||||
rustc-hash = { workspace = true }
|
|
||||||
salsa = { workspace = true }
|
|
||||||
tracing = { workspace = true }
|
|
||||||
tracing-subscriber = { workspace = true }
|
|
||||||
tracing-tree = { workspace = true }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
tempfile = { workspace = true }
|
|
||||||
|
|
||||||
|
|
||||||
[lints]
|
|
||||||
workspace = true
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
pub(crate) mod target_version;
|
|
||||||
pub(crate) mod verbosity;
|
|
||||||
@@ -1,34 +0,0 @@
|
|||||||
/// Enumeration of all supported Python versions
|
|
||||||
///
|
|
||||||
/// TODO: unify with the `PythonVersion` enum in the linter/formatter crates?
|
|
||||||
#[derive(Copy, Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord, Default, clap::ValueEnum)]
|
|
||||||
pub enum TargetVersion {
|
|
||||||
Py37,
|
|
||||||
#[default]
|
|
||||||
Py38,
|
|
||||||
Py39,
|
|
||||||
Py310,
|
|
||||||
Py311,
|
|
||||||
Py312,
|
|
||||||
Py313,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Display for TargetVersion {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
ruff_db::program::TargetVersion::from(*self).fmt(f)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<TargetVersion> for ruff_db::program::TargetVersion {
|
|
||||||
fn from(value: TargetVersion) -> Self {
|
|
||||||
match value {
|
|
||||||
TargetVersion::Py37 => Self::Py37,
|
|
||||||
TargetVersion::Py38 => Self::Py38,
|
|
||||||
TargetVersion::Py39 => Self::Py39,
|
|
||||||
TargetVersion::Py310 => Self::Py310,
|
|
||||||
TargetVersion::Py311 => Self::Py311,
|
|
||||||
TargetVersion::Py312 => Self::Py312,
|
|
||||||
TargetVersion::Py313 => Self::Py313,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,34 +0,0 @@
|
|||||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
|
|
||||||
pub(crate) enum VerbosityLevel {
|
|
||||||
Info,
|
|
||||||
Debug,
|
|
||||||
Trace,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Logging flags to `#[command(flatten)]` into your CLI
|
|
||||||
#[derive(clap::Args, Debug, Clone, Default)]
|
|
||||||
#[command(about = None, long_about = None)]
|
|
||||||
pub(crate) struct Verbosity {
|
|
||||||
#[arg(
|
|
||||||
long,
|
|
||||||
short = 'v',
|
|
||||||
help = "Use verbose output (or `-vv` and `-vvv` for more verbose output)",
|
|
||||||
action = clap::ArgAction::Count,
|
|
||||||
global = true,
|
|
||||||
)]
|
|
||||||
verbose: u8,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Verbosity {
|
|
||||||
/// Returns the verbosity level based on the number of `-v` flags.
|
|
||||||
///
|
|
||||||
/// Returns `None` if the user did not specify any verbosity flags.
|
|
||||||
pub(crate) fn level(&self) -> Option<VerbosityLevel> {
|
|
||||||
match self.verbose {
|
|
||||||
0 => None,
|
|
||||||
1 => Some(VerbosityLevel::Info),
|
|
||||||
2 => Some(VerbosityLevel::Debug),
|
|
||||||
_ => Some(VerbosityLevel::Trace),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,260 +0,0 @@
|
|||||||
use std::panic::{AssertUnwindSafe, RefUnwindSafe};
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use salsa::{Cancelled, Database, DbWithJar};
|
|
||||||
|
|
||||||
use red_knot_module_resolver::{vendored_typeshed_stubs, Db as ResolverDb, Jar as ResolverJar};
|
|
||||||
use red_knot_python_semantic::{Db as SemanticDb, Jar as SemanticJar};
|
|
||||||
use ruff_db::files::{File, Files};
|
|
||||||
use ruff_db::program::{Program, ProgramSettings};
|
|
||||||
use ruff_db::system::System;
|
|
||||||
use ruff_db::vendored::VendoredFileSystem;
|
|
||||||
use ruff_db::{Db as SourceDb, Jar as SourceJar, Upcast};
|
|
||||||
|
|
||||||
use crate::lint::{lint_semantic, lint_syntax, unwind_if_cancelled, Diagnostics};
|
|
||||||
use crate::workspace::{check_file, Package, Package_files, Workspace, WorkspaceMetadata};
|
|
||||||
|
|
||||||
mod changes;
|
|
||||||
|
|
||||||
pub trait Db: DbWithJar<Jar> + SemanticDb + Upcast<dyn SemanticDb> {}
|
|
||||||
|
|
||||||
#[salsa::jar(db=Db)]
|
|
||||||
pub struct Jar(
|
|
||||||
Workspace,
|
|
||||||
Package,
|
|
||||||
Package_files,
|
|
||||||
lint_syntax,
|
|
||||||
lint_semantic,
|
|
||||||
unwind_if_cancelled,
|
|
||||||
);
|
|
||||||
|
|
||||||
#[salsa::db(SourceJar, ResolverJar, SemanticJar, Jar)]
|
|
||||||
pub struct RootDatabase {
|
|
||||||
workspace: Option<Workspace>,
|
|
||||||
storage: salsa::Storage<RootDatabase>,
|
|
||||||
files: Files,
|
|
||||||
system: Arc<dyn System + Send + Sync + RefUnwindSafe>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl RootDatabase {
|
|
||||||
pub fn new<S>(workspace: WorkspaceMetadata, settings: ProgramSettings, system: S) -> Self
|
|
||||||
where
|
|
||||||
S: System + 'static + Send + Sync + RefUnwindSafe,
|
|
||||||
{
|
|
||||||
let mut db = Self {
|
|
||||||
workspace: None,
|
|
||||||
storage: salsa::Storage::default(),
|
|
||||||
files: Files::default(),
|
|
||||||
system: Arc::new(system),
|
|
||||||
};
|
|
||||||
|
|
||||||
let workspace = Workspace::from_metadata(&db, workspace);
|
|
||||||
// Initialize the `Program` singleton
|
|
||||||
Program::from_settings(&db, settings);
|
|
||||||
|
|
||||||
db.workspace = Some(workspace);
|
|
||||||
db
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn workspace(&self) -> Workspace {
|
|
||||||
// SAFETY: The workspace is always initialized in `new`.
|
|
||||||
self.workspace.unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Checks all open files in the workspace and its dependencies.
|
|
||||||
pub fn check(&self) -> Result<Vec<String>, Cancelled> {
|
|
||||||
self.with_db(|db| db.workspace().check(db))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn check_file(&self, file: File) -> Result<Diagnostics, Cancelled> {
|
|
||||||
self.with_db(|db| check_file(db, file))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn with_db<F, T>(&self, f: F) -> Result<T, Cancelled>
|
|
||||||
where
|
|
||||||
F: FnOnce(&RootDatabase) -> T + std::panic::UnwindSafe,
|
|
||||||
{
|
|
||||||
// The `AssertUnwindSafe` here looks scary, but is a consequence of Salsa's design.
|
|
||||||
// Salsa uses panics to implement cancellation and to recover from cycles. However, the Salsa
|
|
||||||
// storage isn't `UnwindSafe` or `RefUnwindSafe` because its dependencies `DashMap` and `parking_lot::*` aren't
|
|
||||||
// unwind safe.
|
|
||||||
//
|
|
||||||
// Having to use `AssertUnwindSafe` isn't as big as a deal as it might seem because
|
|
||||||
// the `UnwindSafe` and `RefUnwindSafe` traits are designed to catch logical bugs.
|
|
||||||
// They don't protect against [UB](https://internals.rust-lang.org/t/pre-rfc-deprecating-unwindsafe/15974).
|
|
||||||
// On top of that, `Cancelled` only catches specific Salsa-panics and propagates all other panics.
|
|
||||||
//
|
|
||||||
// That still leaves us with possible logical bugs in two sources:
|
|
||||||
// * In Salsa itself: This must be considered a bug in Salsa and needs fixing upstream.
|
|
||||||
// Reviewing Salsa code specifically around unwind safety seems doable.
|
|
||||||
// * Our code: This is the main concern. Luckily, it only involves code that uses internal mutability
|
|
||||||
// and calls into Salsa queries when mutating the internal state. Using `AssertUnwindSafe`
|
|
||||||
// certainly makes it harder to catch these issues in our user code.
|
|
||||||
//
|
|
||||||
// For now, this is the only solution at hand unless Salsa decides to change its design.
|
|
||||||
// [Zulip support thread](https://salsa.zulipchat.com/#narrow/stream/145099-general/topic/How.20to.20use.20.60Cancelled.3A.3Acatch.60)
|
|
||||||
let db = &AssertUnwindSafe(self);
|
|
||||||
Cancelled::catch(|| f(db))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Upcast<dyn SemanticDb> for RootDatabase {
|
|
||||||
fn upcast(&self) -> &(dyn SemanticDb + 'static) {
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
fn upcast_mut(&mut self) -> &mut (dyn SemanticDb + 'static) {
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Upcast<dyn SourceDb> for RootDatabase {
|
|
||||||
fn upcast(&self) -> &(dyn SourceDb + 'static) {
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
fn upcast_mut(&mut self) -> &mut (dyn SourceDb + 'static) {
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Upcast<dyn ResolverDb> for RootDatabase {
|
|
||||||
fn upcast(&self) -> &(dyn ResolverDb + 'static) {
|
|
||||||
self
|
|
||||||
}
|
|
||||||
fn upcast_mut(&mut self) -> &mut (dyn ResolverDb + 'static) {
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ResolverDb for RootDatabase {}
|
|
||||||
|
|
||||||
impl SemanticDb for RootDatabase {}
|
|
||||||
|
|
||||||
impl SourceDb for RootDatabase {
|
|
||||||
fn vendored(&self) -> &VendoredFileSystem {
|
|
||||||
vendored_typeshed_stubs()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn system(&self) -> &dyn System {
|
|
||||||
&*self.system
|
|
||||||
}
|
|
||||||
|
|
||||||
fn files(&self) -> &Files {
|
|
||||||
&self.files
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Database for RootDatabase {}
|
|
||||||
|
|
||||||
impl Db for RootDatabase {}
|
|
||||||
|
|
||||||
impl salsa::ParallelDatabase for RootDatabase {
|
|
||||||
fn snapshot(&self) -> salsa::Snapshot<Self> {
|
|
||||||
salsa::Snapshot::new(Self {
|
|
||||||
workspace: self.workspace,
|
|
||||||
storage: self.storage.snapshot(),
|
|
||||||
files: self.files.snapshot(),
|
|
||||||
system: self.system.clone(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
pub(crate) mod tests {
|
|
||||||
use red_knot_module_resolver::{vendored_typeshed_stubs, Db as ResolverDb, Jar as ResolverJar};
|
|
||||||
use red_knot_python_semantic::{Db as SemanticDb, Jar as SemanticJar};
|
|
||||||
use ruff_db::files::Files;
|
|
||||||
use ruff_db::system::{DbWithTestSystem, System, TestSystem};
|
|
||||||
use ruff_db::vendored::VendoredFileSystem;
|
|
||||||
use ruff_db::{Db as SourceDb, Jar as SourceJar, Upcast};
|
|
||||||
|
|
||||||
use super::{Db, Jar};
|
|
||||||
|
|
||||||
#[salsa::db(Jar, SemanticJar, ResolverJar, SourceJar)]
|
|
||||||
pub(crate) struct TestDb {
|
|
||||||
storage: salsa::Storage<Self>,
|
|
||||||
files: Files,
|
|
||||||
system: TestSystem,
|
|
||||||
vendored: VendoredFileSystem,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TestDb {
|
|
||||||
pub(crate) fn new() -> Self {
|
|
||||||
Self {
|
|
||||||
storage: salsa::Storage::default(),
|
|
||||||
system: TestSystem::default(),
|
|
||||||
vendored: vendored_typeshed_stubs().snapshot(),
|
|
||||||
files: Files::default(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DbWithTestSystem for TestDb {
|
|
||||||
fn test_system(&self) -> &TestSystem {
|
|
||||||
&self.system
|
|
||||||
}
|
|
||||||
|
|
||||||
fn test_system_mut(&mut self) -> &mut TestSystem {
|
|
||||||
&mut self.system
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SourceDb for TestDb {
|
|
||||||
fn vendored(&self) -> &VendoredFileSystem {
|
|
||||||
&self.vendored
|
|
||||||
}
|
|
||||||
|
|
||||||
fn system(&self) -> &dyn System {
|
|
||||||
&self.system
|
|
||||||
}
|
|
||||||
|
|
||||||
fn files(&self) -> &Files {
|
|
||||||
&self.files
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Upcast<dyn SemanticDb> for TestDb {
|
|
||||||
fn upcast(&self) -> &(dyn SemanticDb + 'static) {
|
|
||||||
self
|
|
||||||
}
|
|
||||||
fn upcast_mut(&mut self) -> &mut (dyn SemanticDb + 'static) {
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Upcast<dyn SourceDb> for TestDb {
|
|
||||||
fn upcast(&self) -> &(dyn SourceDb + 'static) {
|
|
||||||
self
|
|
||||||
}
|
|
||||||
fn upcast_mut(&mut self) -> &mut (dyn SourceDb + 'static) {
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Upcast<dyn ResolverDb> for TestDb {
|
|
||||||
fn upcast(&self) -> &(dyn ResolverDb + 'static) {
|
|
||||||
self
|
|
||||||
}
|
|
||||||
fn upcast_mut(&mut self) -> &mut (dyn ResolverDb + 'static) {
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl red_knot_module_resolver::Db for TestDb {}
|
|
||||||
impl red_knot_python_semantic::Db for TestDb {}
|
|
||||||
impl Db for TestDb {}
|
|
||||||
|
|
||||||
impl salsa::Database for TestDb {}
|
|
||||||
|
|
||||||
impl salsa::ParallelDatabase for TestDb {
|
|
||||||
fn snapshot(&self) -> salsa::Snapshot<Self> {
|
|
||||||
salsa::Snapshot::new(Self {
|
|
||||||
storage: self.storage.snapshot(),
|
|
||||||
files: self.files.snapshot(),
|
|
||||||
system: self.system.snapshot(),
|
|
||||||
vendored: self.vendored.snapshot(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,190 +0,0 @@
|
|||||||
use rustc_hash::FxHashSet;
|
|
||||||
|
|
||||||
use ruff_db::files::{system_path_to_file, File, Files};
|
|
||||||
use ruff_db::system::walk_directory::WalkState;
|
|
||||||
use ruff_db::system::SystemPath;
|
|
||||||
use ruff_db::Db;
|
|
||||||
|
|
||||||
use crate::db::RootDatabase;
|
|
||||||
use crate::watch;
|
|
||||||
use crate::watch::{CreatedKind, DeletedKind};
|
|
||||||
use crate::workspace::WorkspaceMetadata;
|
|
||||||
|
|
||||||
impl RootDatabase {
|
|
||||||
#[tracing::instrument(level = "debug", skip(self, changes))]
|
|
||||||
pub fn apply_changes(&mut self, changes: Vec<watch::ChangeEvent>) {
|
|
||||||
let workspace = self.workspace();
|
|
||||||
let workspace_path = workspace.root(self).to_path_buf();
|
|
||||||
|
|
||||||
let mut workspace_change = false;
|
|
||||||
// Packages that need reloading
|
|
||||||
let mut changed_packages = FxHashSet::default();
|
|
||||||
// Paths that were added
|
|
||||||
let mut added_paths = FxHashSet::default();
|
|
||||||
|
|
||||||
// Deduplicate the `sync` calls. Many file watchers emit multiple events for the same path.
|
|
||||||
let mut synced_files = FxHashSet::default();
|
|
||||||
let mut synced_recursively = FxHashSet::default();
|
|
||||||
|
|
||||||
let mut sync_path = |db: &mut RootDatabase, path: &SystemPath| {
|
|
||||||
if synced_files.insert(path.to_path_buf()) {
|
|
||||||
File::sync_path(db, path);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut sync_recursively = |db: &mut RootDatabase, path: &SystemPath| {
|
|
||||||
if synced_recursively.insert(path.to_path_buf()) {
|
|
||||||
Files::sync_recursively(db, path);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
for change in changes {
|
|
||||||
if let Some(path) = change.path() {
|
|
||||||
if matches!(
|
|
||||||
path.file_name(),
|
|
||||||
Some(".gitignore" | ".ignore" | "ruff.toml" | ".ruff.toml" | "pyproject.toml")
|
|
||||||
) {
|
|
||||||
// Changes to ignore files or settings can change the workspace structure or add/remove files
|
|
||||||
// from packages.
|
|
||||||
if let Some(package) = workspace.package(self, path) {
|
|
||||||
changed_packages.insert(package);
|
|
||||||
} else {
|
|
||||||
workspace_change = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
match change {
|
|
||||||
watch::ChangeEvent::Changed { path, kind: _ } => sync_path(self, &path),
|
|
||||||
|
|
||||||
watch::ChangeEvent::Created { kind, path } => {
|
|
||||||
match kind {
|
|
||||||
CreatedKind::File => sync_path(self, &path),
|
|
||||||
CreatedKind::Directory | CreatedKind::Any => {
|
|
||||||
sync_recursively(self, &path);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if self.system().is_file(&path) {
|
|
||||||
// Add the parent directory because `walkdir` always visits explicitly passed files
|
|
||||||
// even if they match an exclude filter.
|
|
||||||
added_paths.insert(path.parent().unwrap().to_path_buf());
|
|
||||||
} else {
|
|
||||||
added_paths.insert(path);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
watch::ChangeEvent::Deleted { kind, path } => {
|
|
||||||
let is_file = match kind {
|
|
||||||
DeletedKind::File => true,
|
|
||||||
DeletedKind::Directory => {
|
|
||||||
// file watchers emit an event for every deleted file. No need to scan the entire dir.
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
DeletedKind::Any => self
|
|
||||||
.files
|
|
||||||
.try_system(self, &path)
|
|
||||||
.is_some_and(|file| file.exists(self)),
|
|
||||||
};
|
|
||||||
|
|
||||||
if is_file {
|
|
||||||
sync_path(self, &path);
|
|
||||||
|
|
||||||
if let Some(package) = workspace.package(self, &path) {
|
|
||||||
if let Some(file) = self.files().try_system(self, &path) {
|
|
||||||
package.remove_file(self, file);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
sync_recursively(self, &path);
|
|
||||||
|
|
||||||
// TODO: Remove after converting `package.files()` to a salsa query.
|
|
||||||
if let Some(package) = workspace.package(self, &path) {
|
|
||||||
changed_packages.insert(package);
|
|
||||||
} else {
|
|
||||||
workspace_change = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
watch::ChangeEvent::Rescan => {
|
|
||||||
workspace_change = true;
|
|
||||||
Files::sync_all(self);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if workspace_change {
|
|
||||||
match WorkspaceMetadata::from_path(&workspace_path, self.system()) {
|
|
||||||
Ok(metadata) => {
|
|
||||||
tracing::debug!("Reload workspace after structural change.");
|
|
||||||
// TODO: Handle changes in the program settings.
|
|
||||||
workspace.reload(self, metadata);
|
|
||||||
}
|
|
||||||
Err(error) => {
|
|
||||||
tracing::error!("Failed to load workspace, keep old workspace: {error}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut added_paths = added_paths.into_iter().filter(|path| {
|
|
||||||
let Some(package) = workspace.package(self, path) else {
|
|
||||||
return false;
|
|
||||||
};
|
|
||||||
|
|
||||||
// Skip packages that need reloading
|
|
||||||
!changed_packages.contains(&package)
|
|
||||||
});
|
|
||||||
|
|
||||||
// Use directory walking to discover newly added files.
|
|
||||||
if let Some(path) = added_paths.next() {
|
|
||||||
let mut walker = self.system().walk_directory(&path);
|
|
||||||
|
|
||||||
for extra_path in added_paths {
|
|
||||||
walker = walker.add(&extra_path);
|
|
||||||
}
|
|
||||||
|
|
||||||
let added_paths = std::sync::Mutex::new(Vec::default());
|
|
||||||
|
|
||||||
walker.run(|| {
|
|
||||||
Box::new(|entry| {
|
|
||||||
let Ok(entry) = entry else {
|
|
||||||
return WalkState::Continue;
|
|
||||||
};
|
|
||||||
|
|
||||||
if !entry.file_type().is_file() {
|
|
||||||
return WalkState::Continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut paths = added_paths.lock().unwrap();
|
|
||||||
|
|
||||||
paths.push(entry.into_path());
|
|
||||||
|
|
||||||
WalkState::Continue
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
for path in added_paths.into_inner().unwrap() {
|
|
||||||
let package = workspace.package(self, &path);
|
|
||||||
let file = system_path_to_file(self, &path);
|
|
||||||
|
|
||||||
if let (Some(package), Some(file)) = (package, file) {
|
|
||||||
package.add_file(self, file);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Reload
|
|
||||||
for package in changed_packages {
|
|
||||||
package.reload_files(self);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {}
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
use crate::db::Jar;
|
|
||||||
|
|
||||||
pub mod db;
|
|
||||||
pub mod lint;
|
|
||||||
pub mod watch;
|
|
||||||
pub mod workspace;
|
|
||||||
@@ -1,360 +0,0 @@
|
|||||||
use std::cell::RefCell;
|
|
||||||
use std::ops::Deref;
|
|
||||||
use std::time::Duration;
|
|
||||||
|
|
||||||
use tracing::trace_span;
|
|
||||||
|
|
||||||
use red_knot_module_resolver::ModuleName;
|
|
||||||
use red_knot_python_semantic::types::Type;
|
|
||||||
use red_knot_python_semantic::{HasTy, SemanticModel};
|
|
||||||
use ruff_db::files::File;
|
|
||||||
use ruff_db::parsed::{parsed_module, ParsedModule};
|
|
||||||
use ruff_db::source::{source_text, SourceText};
|
|
||||||
use ruff_python_ast as ast;
|
|
||||||
use ruff_python_ast::visitor::{walk_expr, walk_stmt, Visitor};
|
|
||||||
|
|
||||||
use crate::db::Db;
|
|
||||||
|
|
||||||
/// Workaround query to test for if the computation should be cancelled.
|
|
||||||
/// Ideally, push for Salsa to expose an API for testing if cancellation was requested.
|
|
||||||
#[salsa::tracked]
|
|
||||||
#[allow(unused_variables)]
|
|
||||||
pub(crate) fn unwind_if_cancelled(db: &dyn Db) {}
|
|
||||||
|
|
||||||
#[salsa::tracked(return_ref)]
|
|
||||||
pub(crate) fn lint_syntax(db: &dyn Db, file_id: File) -> Diagnostics {
|
|
||||||
#[allow(clippy::print_stdout)]
|
|
||||||
if std::env::var("RED_KNOT_SLOW_LINT").is_ok() {
|
|
||||||
for i in 0..10 {
|
|
||||||
unwind_if_cancelled(db);
|
|
||||||
|
|
||||||
println!("RED_KNOT_SLOW_LINT is set, sleeping for {i}/10 seconds");
|
|
||||||
std::thread::sleep(Duration::from_secs(1));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut diagnostics = Vec::new();
|
|
||||||
|
|
||||||
let source = source_text(db.upcast(), file_id);
|
|
||||||
lint_lines(&source, &mut diagnostics);
|
|
||||||
|
|
||||||
let parsed = parsed_module(db.upcast(), file_id);
|
|
||||||
|
|
||||||
if parsed.errors().is_empty() {
|
|
||||||
let ast = parsed.syntax();
|
|
||||||
|
|
||||||
let mut visitor = SyntaxLintVisitor {
|
|
||||||
diagnostics,
|
|
||||||
source: &source,
|
|
||||||
};
|
|
||||||
visitor.visit_body(&ast.body);
|
|
||||||
diagnostics = visitor.diagnostics;
|
|
||||||
} else {
|
|
||||||
diagnostics.extend(parsed.errors().iter().map(ToString::to_string));
|
|
||||||
}
|
|
||||||
|
|
||||||
Diagnostics::from(diagnostics)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn lint_lines(source: &str, diagnostics: &mut Vec<String>) {
|
|
||||||
for (line_number, line) in source.lines().enumerate() {
|
|
||||||
if line.len() < 88 {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let char_count = line.chars().count();
|
|
||||||
if char_count > 88 {
|
|
||||||
diagnostics.push(format!(
|
|
||||||
"Line {} is too long ({} characters)",
|
|
||||||
line_number + 1,
|
|
||||||
char_count
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[salsa::tracked(return_ref)]
|
|
||||||
pub(crate) fn lint_semantic(db: &dyn Db, file_id: File) -> Diagnostics {
|
|
||||||
let _span = trace_span!("lint_semantic", ?file_id).entered();
|
|
||||||
|
|
||||||
let source = source_text(db.upcast(), file_id);
|
|
||||||
let parsed = parsed_module(db.upcast(), file_id);
|
|
||||||
let semantic = SemanticModel::new(db.upcast(), file_id);
|
|
||||||
|
|
||||||
if !parsed.is_valid() {
|
|
||||||
return Diagnostics::Empty;
|
|
||||||
}
|
|
||||||
|
|
||||||
let context = SemanticLintContext {
|
|
||||||
source,
|
|
||||||
parsed,
|
|
||||||
semantic,
|
|
||||||
diagnostics: RefCell::new(Vec::new()),
|
|
||||||
};
|
|
||||||
|
|
||||||
SemanticVisitor { context: &context }.visit_body(parsed.suite());
|
|
||||||
|
|
||||||
Diagnostics::from(context.diagnostics.take())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn lint_unresolved_imports(context: &SemanticLintContext, import: AnyImportRef) {
|
|
||||||
match import {
|
|
||||||
AnyImportRef::Import(import) => {
|
|
||||||
for alias in &import.names {
|
|
||||||
let ty = alias.ty(&context.semantic);
|
|
||||||
|
|
||||||
if ty.is_unbound() {
|
|
||||||
context.push_diagnostic(format!("Unresolved import '{}'", &alias.name));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
AnyImportRef::ImportFrom(import) => {
|
|
||||||
for alias in &import.names {
|
|
||||||
let ty = alias.ty(&context.semantic);
|
|
||||||
|
|
||||||
if ty.is_unbound() {
|
|
||||||
context.push_diagnostic(format!("Unresolved import '{}'", &alias.name));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn lint_maybe_undefined(context: &SemanticLintContext, name: &ast::ExprName) {
|
|
||||||
if !matches!(name.ctx, ast::ExprContext::Load) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
let semantic = &context.semantic;
|
|
||||||
match name.ty(semantic) {
|
|
||||||
Type::Unbound => {
|
|
||||||
context.push_diagnostic(format!("Name '{}' used when not defined.", &name.id));
|
|
||||||
}
|
|
||||||
Type::Union(union) if union.contains(semantic.db(), Type::Unbound) => {
|
|
||||||
context.push_diagnostic(format!(
|
|
||||||
"Name '{}' used when possibly not defined.",
|
|
||||||
&name.id
|
|
||||||
));
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn lint_bad_override(context: &SemanticLintContext, class: &ast::StmtClassDef) {
|
|
||||||
let semantic = &context.semantic;
|
|
||||||
|
|
||||||
// TODO we should have a special marker on the real typing module (from typeshed) so if you
|
|
||||||
// have your own "typing" module in your project, we don't consider it THE typing module (and
|
|
||||||
// same for other stdlib modules that our lint rules care about)
|
|
||||||
let Some(typing) = semantic.resolve_module(ModuleName::new("typing").unwrap()) else {
|
|
||||||
return;
|
|
||||||
};
|
|
||||||
|
|
||||||
let override_ty = semantic.global_symbol_ty(&typing, "override");
|
|
||||||
|
|
||||||
let Type::Class(class_ty) = class.ty(semantic) else {
|
|
||||||
return;
|
|
||||||
};
|
|
||||||
|
|
||||||
for function in class
|
|
||||||
.body
|
|
||||||
.iter()
|
|
||||||
.filter_map(|stmt| stmt.as_function_def_stmt())
|
|
||||||
{
|
|
||||||
let Type::Function(ty) = function.ty(semantic) else {
|
|
||||||
return;
|
|
||||||
};
|
|
||||||
|
|
||||||
// TODO this shouldn't make direct use of the Db; see comment on SemanticModel::db
|
|
||||||
let db = semantic.db();
|
|
||||||
|
|
||||||
if ty.has_decorator(db, override_ty) {
|
|
||||||
let method_name = ty.name(db);
|
|
||||||
if class_ty
|
|
||||||
.inherited_class_member(db, &method_name)
|
|
||||||
.is_unbound()
|
|
||||||
{
|
|
||||||
// TODO should have a qualname() method to support nested classes
|
|
||||||
context.push_diagnostic(
|
|
||||||
format!(
|
|
||||||
"Method {}.{} is decorated with `typing.override` but does not override any base class method",
|
|
||||||
class_ty.name(db),
|
|
||||||
method_name,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) struct SemanticLintContext<'a> {
|
|
||||||
source: SourceText,
|
|
||||||
parsed: &'a ParsedModule,
|
|
||||||
semantic: SemanticModel<'a>,
|
|
||||||
diagnostics: RefCell<Vec<String>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'db> SemanticLintContext<'db> {
|
|
||||||
#[allow(unused)]
|
|
||||||
pub(crate) fn source_text(&self) -> &str {
|
|
||||||
self.source.as_str()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
pub(crate) fn ast(&self) -> &'db ast::ModModule {
|
|
||||||
self.parsed.syntax()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn push_diagnostic(&self, diagnostic: String) {
|
|
||||||
self.diagnostics.borrow_mut().push(diagnostic);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
pub(crate) fn extend_diagnostics(&mut self, diagnostics: impl IntoIterator<Item = String>) {
|
|
||||||
self.diagnostics.get_mut().extend(diagnostics);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
struct SyntaxLintVisitor<'a> {
|
|
||||||
diagnostics: Vec<String>,
|
|
||||||
source: &'a str,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Visitor<'_> for SyntaxLintVisitor<'_> {
|
|
||||||
fn visit_string_literal(&mut self, string_literal: &'_ ast::StringLiteral) {
|
|
||||||
// A very naive implementation of use double quotes
|
|
||||||
let text = &self.source[string_literal.range];
|
|
||||||
|
|
||||||
if text.starts_with('\'') {
|
|
||||||
self.diagnostics
|
|
||||||
.push("Use double quotes for strings".to_string());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
struct SemanticVisitor<'a> {
|
|
||||||
context: &'a SemanticLintContext<'a>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Visitor<'_> for SemanticVisitor<'_> {
|
|
||||||
fn visit_stmt(&mut self, stmt: &ast::Stmt) {
|
|
||||||
match stmt {
|
|
||||||
ast::Stmt::ClassDef(class) => {
|
|
||||||
lint_bad_override(self.context, class);
|
|
||||||
}
|
|
||||||
ast::Stmt::Import(import) => {
|
|
||||||
lint_unresolved_imports(self.context, AnyImportRef::Import(import));
|
|
||||||
}
|
|
||||||
ast::Stmt::ImportFrom(import) => {
|
|
||||||
lint_unresolved_imports(self.context, AnyImportRef::ImportFrom(import));
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
|
|
||||||
walk_stmt(self, stmt);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn visit_expr(&mut self, expr: &ast::Expr) {
|
|
||||||
match expr {
|
|
||||||
ast::Expr::Name(name) if matches!(name.ctx, ast::ExprContext::Load) => {
|
|
||||||
lint_maybe_undefined(self.context, name);
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
|
|
||||||
walk_expr(self, expr);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
|
||||||
pub enum Diagnostics {
|
|
||||||
Empty,
|
|
||||||
List(Vec<String>),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Diagnostics {
|
|
||||||
pub fn as_slice(&self) -> &[String] {
|
|
||||||
match self {
|
|
||||||
Diagnostics::Empty => &[],
|
|
||||||
Diagnostics::List(list) => list.as_slice(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Deref for Diagnostics {
|
|
||||||
type Target = [String];
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
self.as_slice()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<Vec<String>> for Diagnostics {
|
|
||||||
fn from(value: Vec<String>) -> Self {
|
|
||||||
if value.is_empty() {
|
|
||||||
Diagnostics::Empty
|
|
||||||
} else {
|
|
||||||
Diagnostics::List(value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug)]
|
|
||||||
enum AnyImportRef<'a> {
|
|
||||||
Import(&'a ast::StmtImport),
|
|
||||||
ImportFrom(&'a ast::StmtImportFrom),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use ruff_db::files::system_path_to_file;
|
|
||||||
use ruff_db::program::{Program, SearchPathSettings, TargetVersion};
|
|
||||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
|
||||||
|
|
||||||
use super::{lint_semantic, Diagnostics};
|
|
||||||
use crate::db::tests::TestDb;
|
|
||||||
|
|
||||||
fn setup_db() -> TestDb {
|
|
||||||
let db = TestDb::new();
|
|
||||||
|
|
||||||
Program::new(
|
|
||||||
&db,
|
|
||||||
TargetVersion::Py38,
|
|
||||||
SearchPathSettings {
|
|
||||||
extra_paths: Vec::new(),
|
|
||||||
workspace_root: SystemPathBuf::from("/src"),
|
|
||||||
site_packages: None,
|
|
||||||
custom_typeshed: None,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
db
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn undefined_variable() {
|
|
||||||
let mut db = setup_db();
|
|
||||||
|
|
||||||
db.write_dedented(
|
|
||||||
"/src/a.py",
|
|
||||||
"
|
|
||||||
x = int
|
|
||||||
if flag:
|
|
||||||
y = x
|
|
||||||
y
|
|
||||||
",
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let file = system_path_to_file(&db, "/src/a.py").expect("file to exist");
|
|
||||||
let Diagnostics::List(messages) = lint_semantic(&db, file) else {
|
|
||||||
panic!("expected some diagnostics");
|
|
||||||
};
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
*messages,
|
|
||||||
vec![
|
|
||||||
"Name 'flag' used when not defined.",
|
|
||||||
"Name 'y' used when possibly not defined."
|
|
||||||
]
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,323 +0,0 @@
|
|||||||
use std::sync::Mutex;
|
|
||||||
|
|
||||||
use clap::Parser;
|
|
||||||
use crossbeam::channel as crossbeam_channel;
|
|
||||||
use salsa::ParallelDatabase;
|
|
||||||
use tracing::subscriber::Interest;
|
|
||||||
use tracing::{Level, Metadata};
|
|
||||||
use tracing_subscriber::filter::LevelFilter;
|
|
||||||
use tracing_subscriber::layer::{Context, Filter, SubscriberExt};
|
|
||||||
use tracing_subscriber::{Layer, Registry};
|
|
||||||
use tracing_tree::time::Uptime;
|
|
||||||
|
|
||||||
use red_knot::db::RootDatabase;
|
|
||||||
use red_knot::watch;
|
|
||||||
use red_knot::watch::WorkspaceWatcher;
|
|
||||||
use red_knot::workspace::WorkspaceMetadata;
|
|
||||||
use ruff_db::program::{ProgramSettings, SearchPathSettings};
|
|
||||||
use ruff_db::system::{OsSystem, System, SystemPathBuf};
|
|
||||||
|
|
||||||
use cli::target_version::TargetVersion;
|
|
||||||
use cli::verbosity::{Verbosity, VerbosityLevel};
|
|
||||||
|
|
||||||
mod cli;
|
|
||||||
|
|
||||||
#[derive(Debug, Parser)]
|
|
||||||
#[command(
|
|
||||||
author,
|
|
||||||
name = "red-knot",
|
|
||||||
about = "An experimental multifile analysis backend for Ruff"
|
|
||||||
)]
|
|
||||||
#[command(version)]
|
|
||||||
struct Args {
|
|
||||||
#[arg(
|
|
||||||
long,
|
|
||||||
help = "Changes the current working directory.",
|
|
||||||
long_help = "Changes the current working directory before any specified operations. This affects the workspace and configuration discovery.",
|
|
||||||
value_name = "PATH"
|
|
||||||
)]
|
|
||||||
current_directory: Option<SystemPathBuf>,
|
|
||||||
|
|
||||||
#[arg(
|
|
||||||
long,
|
|
||||||
value_name = "DIRECTORY",
|
|
||||||
help = "Custom directory to use for stdlib typeshed stubs"
|
|
||||||
)]
|
|
||||||
custom_typeshed_dir: Option<SystemPathBuf>,
|
|
||||||
|
|
||||||
#[arg(
|
|
||||||
long,
|
|
||||||
value_name = "PATH",
|
|
||||||
help = "Additional path to use as a module-resolution source (can be passed multiple times)"
|
|
||||||
)]
|
|
||||||
extra_search_path: Vec<SystemPathBuf>,
|
|
||||||
|
|
||||||
#[arg(long, help = "Python version to assume when resolving types", default_value_t = TargetVersion::default(), value_name="VERSION")]
|
|
||||||
target_version: TargetVersion,
|
|
||||||
|
|
||||||
#[clap(flatten)]
|
|
||||||
verbosity: Verbosity,
|
|
||||||
|
|
||||||
#[arg(
|
|
||||||
long,
|
|
||||||
help = "Run in watch mode by re-running whenever files change",
|
|
||||||
short = 'W'
|
|
||||||
)]
|
|
||||||
watch: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(
|
|
||||||
clippy::print_stdout,
|
|
||||||
clippy::unnecessary_wraps,
|
|
||||||
clippy::print_stderr,
|
|
||||||
clippy::dbg_macro
|
|
||||||
)]
|
|
||||||
pub fn main() -> anyhow::Result<()> {
|
|
||||||
let Args {
|
|
||||||
current_directory,
|
|
||||||
custom_typeshed_dir,
|
|
||||||
extra_search_path: extra_paths,
|
|
||||||
target_version,
|
|
||||||
verbosity,
|
|
||||||
watch,
|
|
||||||
} = Args::parse_from(std::env::args().collect::<Vec<_>>());
|
|
||||||
|
|
||||||
let verbosity = verbosity.level();
|
|
||||||
countme::enable(verbosity == Some(VerbosityLevel::Trace));
|
|
||||||
setup_tracing(verbosity);
|
|
||||||
|
|
||||||
let cwd = if let Some(cwd) = current_directory {
|
|
||||||
let canonicalized = cwd.as_utf8_path().canonicalize_utf8().unwrap();
|
|
||||||
SystemPathBuf::from_utf8_path_buf(canonicalized)
|
|
||||||
} else {
|
|
||||||
let cwd = std::env::current_dir().unwrap();
|
|
||||||
SystemPathBuf::from_path_buf(cwd).unwrap()
|
|
||||||
};
|
|
||||||
|
|
||||||
let system = OsSystem::new(cwd.clone());
|
|
||||||
let workspace_metadata =
|
|
||||||
WorkspaceMetadata::from_path(system.current_directory(), &system).unwrap();
|
|
||||||
|
|
||||||
// TODO: Respect the settings from the workspace metadata. when resolving the program settings.
|
|
||||||
let program_settings = ProgramSettings {
|
|
||||||
target_version: target_version.into(),
|
|
||||||
search_paths: SearchPathSettings {
|
|
||||||
extra_paths,
|
|
||||||
workspace_root: workspace_metadata.root().to_path_buf(),
|
|
||||||
custom_typeshed: custom_typeshed_dir,
|
|
||||||
site_packages: None,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
// TODO: Use the `program_settings` to compute the key for the database's persistent
|
|
||||||
// cache and load the cache if it exists.
|
|
||||||
let mut db = RootDatabase::new(workspace_metadata, program_settings, system);
|
|
||||||
|
|
||||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new(verbosity);
|
|
||||||
|
|
||||||
// Listen to Ctrl+C and abort the watch mode.
|
|
||||||
let main_loop_cancellation_token = Mutex::new(Some(main_loop_cancellation_token));
|
|
||||||
ctrlc::set_handler(move || {
|
|
||||||
let mut lock = main_loop_cancellation_token.lock().unwrap();
|
|
||||||
|
|
||||||
if let Some(token) = lock.take() {
|
|
||||||
token.stop();
|
|
||||||
}
|
|
||||||
})?;
|
|
||||||
|
|
||||||
if watch {
|
|
||||||
main_loop.watch(&mut db)?;
|
|
||||||
} else {
|
|
||||||
main_loop.run(&mut db);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
struct MainLoop {
|
|
||||||
/// Sender that can be used to send messages to the main loop.
|
|
||||||
sender: crossbeam_channel::Sender<MainLoopMessage>,
|
|
||||||
|
|
||||||
/// Receiver for the messages sent **to** the main loop.
|
|
||||||
receiver: crossbeam_channel::Receiver<MainLoopMessage>,
|
|
||||||
|
|
||||||
/// The file system watcher, if running in watch mode.
|
|
||||||
watcher: Option<WorkspaceWatcher>,
|
|
||||||
|
|
||||||
verbosity: Option<VerbosityLevel>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MainLoop {
|
|
||||||
fn new(verbosity: Option<VerbosityLevel>) -> (Self, MainLoopCancellationToken) {
|
|
||||||
let (sender, receiver) = crossbeam_channel::bounded(10);
|
|
||||||
|
|
||||||
(
|
|
||||||
Self {
|
|
||||||
sender: sender.clone(),
|
|
||||||
receiver,
|
|
||||||
watcher: None,
|
|
||||||
verbosity,
|
|
||||||
},
|
|
||||||
MainLoopCancellationToken { sender },
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn watch(mut self, db: &mut RootDatabase) -> anyhow::Result<()> {
|
|
||||||
let sender = self.sender.clone();
|
|
||||||
let watcher = watch::directory_watcher(move |event| {
|
|
||||||
sender.send(MainLoopMessage::ApplyChanges(event)).unwrap();
|
|
||||||
})?;
|
|
||||||
|
|
||||||
self.watcher = Some(WorkspaceWatcher::new(watcher, db));
|
|
||||||
self.run(db);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(clippy::print_stderr)]
|
|
||||||
fn run(mut self, db: &mut RootDatabase) {
|
|
||||||
// Schedule the first check.
|
|
||||||
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
|
|
||||||
let mut revision = 0usize;
|
|
||||||
|
|
||||||
while let Ok(message) = self.receiver.recv() {
|
|
||||||
tracing::trace!("Main Loop: Tick");
|
|
||||||
|
|
||||||
match message {
|
|
||||||
MainLoopMessage::CheckWorkspace => {
|
|
||||||
let db = db.snapshot();
|
|
||||||
let sender = self.sender.clone();
|
|
||||||
|
|
||||||
// Spawn a new task that checks the workspace. This needs to be done in a separate thread
|
|
||||||
// to prevent blocking the main loop here.
|
|
||||||
rayon::spawn(move || {
|
|
||||||
if let Ok(result) = db.check() {
|
|
||||||
// Send the result back to the main loop for printing.
|
|
||||||
sender
|
|
||||||
.send(MainLoopMessage::CheckCompleted { result, revision })
|
|
||||||
.ok();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
MainLoopMessage::CheckCompleted {
|
|
||||||
result,
|
|
||||||
revision: check_revision,
|
|
||||||
} => {
|
|
||||||
if check_revision == revision {
|
|
||||||
eprintln!("{}", result.join("\n"));
|
|
||||||
|
|
||||||
if self.verbosity == Some(VerbosityLevel::Trace) {
|
|
||||||
eprintln!("{}", countme::get_all());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if self.watcher.is_none() {
|
|
||||||
return self.exit();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
MainLoopMessage::ApplyChanges(changes) => {
|
|
||||||
revision += 1;
|
|
||||||
// Automatically cancels any pending queries and waits for them to complete.
|
|
||||||
db.apply_changes(changes);
|
|
||||||
if let Some(watcher) = self.watcher.as_mut() {
|
|
||||||
watcher.update(db);
|
|
||||||
}
|
|
||||||
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
|
|
||||||
}
|
|
||||||
MainLoopMessage::Exit => {
|
|
||||||
return self.exit();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(clippy::print_stderr, clippy::unused_self)]
|
|
||||||
fn exit(self) {
|
|
||||||
if self.verbosity == Some(VerbosityLevel::Trace) {
|
|
||||||
eprintln!("Exit");
|
|
||||||
eprintln!("{}", countme::get_all());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
struct MainLoopCancellationToken {
|
|
||||||
sender: crossbeam_channel::Sender<MainLoopMessage>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MainLoopCancellationToken {
|
|
||||||
fn stop(self) {
|
|
||||||
self.sender.send(MainLoopMessage::Exit).unwrap();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Message sent from the orchestrator to the main loop.
|
|
||||||
#[derive(Debug)]
|
|
||||||
enum MainLoopMessage {
|
|
||||||
CheckWorkspace,
|
|
||||||
CheckCompleted {
|
|
||||||
result: Vec<String>,
|
|
||||||
revision: usize,
|
|
||||||
},
|
|
||||||
ApplyChanges(Vec<watch::ChangeEvent>),
|
|
||||||
Exit,
|
|
||||||
}
|
|
||||||
|
|
||||||
fn setup_tracing(verbosity: Option<VerbosityLevel>) {
|
|
||||||
let trace_level = match verbosity {
|
|
||||||
None => Level::WARN,
|
|
||||||
Some(VerbosityLevel::Info) => Level::INFO,
|
|
||||||
Some(VerbosityLevel::Debug) => Level::DEBUG,
|
|
||||||
Some(VerbosityLevel::Trace) => Level::TRACE,
|
|
||||||
};
|
|
||||||
|
|
||||||
let subscriber = Registry::default().with(
|
|
||||||
tracing_tree::HierarchicalLayer::default()
|
|
||||||
.with_indent_lines(true)
|
|
||||||
.with_indent_amount(2)
|
|
||||||
.with_bracketed_fields(true)
|
|
||||||
.with_thread_ids(true)
|
|
||||||
.with_targets(true)
|
|
||||||
.with_writer(|| Box::new(std::io::stderr()))
|
|
||||||
.with_timer(Uptime::default())
|
|
||||||
.with_filter(LoggingFilter { trace_level }),
|
|
||||||
);
|
|
||||||
|
|
||||||
tracing::subscriber::set_global_default(subscriber).unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
struct LoggingFilter {
|
|
||||||
trace_level: Level,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl LoggingFilter {
|
|
||||||
fn is_enabled(&self, meta: &Metadata<'_>) -> bool {
|
|
||||||
let filter = if meta.target().starts_with("red_knot") || meta.target().starts_with("ruff") {
|
|
||||||
self.trace_level
|
|
||||||
} else {
|
|
||||||
Level::INFO
|
|
||||||
};
|
|
||||||
|
|
||||||
meta.level() <= &filter
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<S> Filter<S> for LoggingFilter {
|
|
||||||
fn enabled(&self, meta: &Metadata<'_>, _cx: &Context<'_, S>) -> bool {
|
|
||||||
self.is_enabled(meta)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn callsite_enabled(&self, meta: &'static Metadata<'static>) -> Interest {
|
|
||||||
if self.is_enabled(meta) {
|
|
||||||
Interest::always()
|
|
||||||
} else {
|
|
||||||
Interest::never()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn max_level_hint(&self) -> Option<LevelFilter> {
|
|
||||||
Some(LevelFilter::from_level(self.trace_level))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,94 +0,0 @@
|
|||||||
use ruff_db::system::{SystemPath, SystemPathBuf};
|
|
||||||
pub use watcher::{directory_watcher, EventHandler, Watcher};
|
|
||||||
pub use workspace_watcher::WorkspaceWatcher;
|
|
||||||
|
|
||||||
mod watcher;
|
|
||||||
mod workspace_watcher;
|
|
||||||
|
|
||||||
/// Classification of a file system change event.
|
|
||||||
///
|
|
||||||
/// ## Renaming a path
|
|
||||||
/// Renaming a path creates a [`ChangeEvent::Deleted`] event for the old path and/or a [`ChangeEvent::Created`] for the new location.
|
|
||||||
/// Whether both events are created or just one of them depends from where to where the path was moved:
|
|
||||||
///
|
|
||||||
/// * Inside the watched directory: Both events are created.
|
|
||||||
/// * From a watched directory to a non-watched directory: Only a [`ChangeEvent::Deleted`] event is created.
|
|
||||||
/// * From a non-watched directory to a watched directory: Only a [`ChangeEvent::Created`] event is created.
|
|
||||||
///
|
|
||||||
/// ## Renaming a directory
|
|
||||||
/// It's up to the file watcher implementation to aggregate the rename event for a directory to a single rename
|
|
||||||
/// event instead of emitting an event for each file or subdirectory in that path.
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
|
||||||
pub enum ChangeEvent {
|
|
||||||
/// A new path was created
|
|
||||||
Created {
|
|
||||||
path: SystemPathBuf,
|
|
||||||
kind: CreatedKind,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// The content or metadata of a path was changed.
|
|
||||||
Changed {
|
|
||||||
path: SystemPathBuf,
|
|
||||||
kind: ChangedKind,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// A path was deleted.
|
|
||||||
Deleted {
|
|
||||||
path: SystemPathBuf,
|
|
||||||
kind: DeletedKind,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// The file watcher failed to observe some changes and now is out of sync with the file system.
|
|
||||||
///
|
|
||||||
/// This can happen if many files are changed at once. The consumer should rescan all files to catch up
|
|
||||||
/// with the file system.
|
|
||||||
Rescan,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ChangeEvent {
|
|
||||||
pub fn file_name(&self) -> Option<&str> {
|
|
||||||
self.path().and_then(|path| path.file_name())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn path(&self) -> Option<&SystemPath> {
|
|
||||||
match self {
|
|
||||||
ChangeEvent::Created { path, .. }
|
|
||||||
| ChangeEvent::Changed { path, .. }
|
|
||||||
| ChangeEvent::Deleted { path, .. } => Some(path),
|
|
||||||
ChangeEvent::Rescan => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Classification of an event that creates a new path.
|
|
||||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
|
||||||
pub enum CreatedKind {
|
|
||||||
/// A file was created.
|
|
||||||
File,
|
|
||||||
|
|
||||||
/// A directory was created.
|
|
||||||
Directory,
|
|
||||||
|
|
||||||
/// A file, directory, or any other kind of path was created.
|
|
||||||
Any,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Classification of an event related to a content or metadata change.
|
|
||||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
|
||||||
pub enum ChangedKind {
|
|
||||||
/// The content of a file was changed.
|
|
||||||
FileContent,
|
|
||||||
|
|
||||||
/// The metadata of a file was changed.
|
|
||||||
FileMetadata,
|
|
||||||
|
|
||||||
/// Either the content or metadata of a path was changed.
|
|
||||||
Any,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
|
||||||
pub enum DeletedKind {
|
|
||||||
File,
|
|
||||||
Directory,
|
|
||||||
Any,
|
|
||||||
}
|
|
||||||
@@ -1,393 +0,0 @@
|
|||||||
use notify::event::{CreateKind, MetadataKind, ModifyKind, RemoveKind, RenameMode};
|
|
||||||
use notify::{recommended_watcher, EventKind, RecommendedWatcher, RecursiveMode, Watcher as _};
|
|
||||||
|
|
||||||
use ruff_db::system::{SystemPath, SystemPathBuf};
|
|
||||||
|
|
||||||
use crate::watch::{ChangeEvent, ChangedKind, CreatedKind, DeletedKind};
|
|
||||||
|
|
||||||
/// Creates a new watcher observing file system changes.
|
|
||||||
///
|
|
||||||
/// The watcher debounces events, but guarantees to send all changes eventually (even if the file system keeps changing).
|
|
||||||
pub fn directory_watcher<H>(handler: H) -> notify::Result<Watcher>
|
|
||||||
where
|
|
||||||
H: EventHandler,
|
|
||||||
{
|
|
||||||
let (sender, receiver) = crossbeam::channel::bounded(20);
|
|
||||||
|
|
||||||
let debouncer = std::thread::Builder::new()
|
|
||||||
.name("watcher::debouncer".to_string())
|
|
||||||
.spawn(move || {
|
|
||||||
// Wait for the next set of changes
|
|
||||||
for message in &receiver {
|
|
||||||
let event = match message {
|
|
||||||
DebouncerMessage::Event(event) => event,
|
|
||||||
DebouncerMessage::Flush => {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
DebouncerMessage::Exit => {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut debouncer = Debouncer::default();
|
|
||||||
|
|
||||||
debouncer.add_result(event);
|
|
||||||
|
|
||||||
// Debounce any new incoming changes:
|
|
||||||
// * Take any new incoming change events and merge them with the previous change events
|
|
||||||
// * If there are no new incoming change events after 10 ms, flush the changes and wait for the next notify event.
|
|
||||||
// * Flush no later than after 3s.
|
|
||||||
loop {
|
|
||||||
let start = std::time::Instant::now();
|
|
||||||
|
|
||||||
crossbeam::select! {
|
|
||||||
recv(receiver) -> message => {
|
|
||||||
match message {
|
|
||||||
Ok(DebouncerMessage::Event(event)) => {
|
|
||||||
debouncer.add_result(event);
|
|
||||||
|
|
||||||
// Ensure that we flush the changes eventually.
|
|
||||||
if start.elapsed() > std::time::Duration::from_secs(3) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(DebouncerMessage::Flush) => {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(DebouncerMessage::Exit) => {
|
|
||||||
return;
|
|
||||||
},
|
|
||||||
|
|
||||||
Err(_) => {
|
|
||||||
// There are no more senders. There's no point in waiting for more messages
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
default(std::time::Duration::from_millis(10)) => {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// No more file changes after 10 ms, send the changes and schedule a new analysis
|
|
||||||
let events = debouncer.into_events();
|
|
||||||
|
|
||||||
if !events.is_empty() {
|
|
||||||
handler.handle(events);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let debouncer_sender = sender.clone();
|
|
||||||
let watcher =
|
|
||||||
recommended_watcher(move |event| sender.send(DebouncerMessage::Event(event)).unwrap())?;
|
|
||||||
|
|
||||||
Ok(Watcher {
|
|
||||||
watcher,
|
|
||||||
debouncer_sender,
|
|
||||||
debouncer_thread: Some(debouncer),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
enum DebouncerMessage {
|
|
||||||
/// A new file system event.
|
|
||||||
Event(notify::Result<notify::Event>),
|
|
||||||
|
|
||||||
Flush,
|
|
||||||
|
|
||||||
/// Exit the debouncer thread.
|
|
||||||
Exit,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct Watcher {
|
|
||||||
watcher: RecommendedWatcher,
|
|
||||||
debouncer_sender: crossbeam::channel::Sender<DebouncerMessage>,
|
|
||||||
debouncer_thread: Option<std::thread::JoinHandle<()>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Watcher {
|
|
||||||
/// Sets up file watching for `path`.
|
|
||||||
pub fn watch(&mut self, path: &SystemPath) -> notify::Result<()> {
|
|
||||||
self.watcher
|
|
||||||
.watch(path.as_std_path(), RecursiveMode::Recursive)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Stops file watching for `path`.
|
|
||||||
pub fn unwatch(&mut self, path: &SystemPath) -> notify::Result<()> {
|
|
||||||
self.watcher.unwatch(path.as_std_path())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Stops the file watcher.
|
|
||||||
///
|
|
||||||
/// Pending events will be discarded.
|
|
||||||
///
|
|
||||||
/// The call blocks until the watcher has stopped.
|
|
||||||
pub fn stop(mut self) {
|
|
||||||
self.set_stop();
|
|
||||||
if let Some(debouncher) = self.debouncer_thread.take() {
|
|
||||||
debouncher.join().unwrap();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Flushes any pending events.
|
|
||||||
pub fn flush(&self) {
|
|
||||||
self.debouncer_sender.send(DebouncerMessage::Flush).unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
fn set_stop(&mut self) {
|
|
||||||
self.debouncer_sender.send(DebouncerMessage::Exit).ok();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Drop for Watcher {
|
|
||||||
fn drop(&mut self) {
|
|
||||||
self.set_stop();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
struct Debouncer {
|
|
||||||
events: Vec<ChangeEvent>,
|
|
||||||
rescan_event: Option<ChangeEvent>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Debouncer {
|
|
||||||
#[tracing::instrument(level = "trace", skip(self))]
|
|
||||||
fn add_result(&mut self, result: notify::Result<notify::Event>) {
|
|
||||||
match result {
|
|
||||||
Ok(event) => self.add_event(event),
|
|
||||||
Err(error) => self.add_error(error),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(clippy::unused_self, clippy::needless_pass_by_value)]
|
|
||||||
fn add_error(&mut self, error: notify::Error) {
|
|
||||||
// Micha: I skimmed through some of notify's source code and it seems the most common errors
|
|
||||||
// are IO errors. All other errors should really only happen when adding or removing a watched folders.
|
|
||||||
// It's not clear what an upstream handler should do in the case of an IOError (other than logging it).
|
|
||||||
// That's what we do for now as well.
|
|
||||||
tracing::warn!("File watcher error: {error:?}.");
|
|
||||||
}
|
|
||||||
|
|
||||||
fn add_event(&mut self, event: notify::Event) {
|
|
||||||
if self.rescan_event.is_some() {
|
|
||||||
// We're already in a rescan state, ignore all other events
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the file watcher is out of sync or we observed too many changes, trigger a full rescan
|
|
||||||
if event.need_rescan() || self.events.len() > 10000 {
|
|
||||||
self.events = Vec::new();
|
|
||||||
self.rescan_event = Some(ChangeEvent::Rescan);
|
|
||||||
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
let kind = event.kind;
|
|
||||||
let path = match SystemPathBuf::from_path_buf(event.paths.into_iter().next().unwrap()) {
|
|
||||||
Ok(path) => path,
|
|
||||||
Err(path) => {
|
|
||||||
tracing::debug!(
|
|
||||||
"Ignore change to non-UTF8 path '{path}': {kind:?}",
|
|
||||||
path = path.display()
|
|
||||||
);
|
|
||||||
|
|
||||||
// Ignore non-UTF8 paths because they aren't handled by the rest of the system.
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let event = match kind {
|
|
||||||
EventKind::Create(create) => {
|
|
||||||
let kind = match create {
|
|
||||||
CreateKind::File => CreatedKind::File,
|
|
||||||
CreateKind::Folder => CreatedKind::Directory,
|
|
||||||
CreateKind::Any | CreateKind::Other => {
|
|
||||||
CreatedKind::from(FileType::from_path(&path))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
ChangeEvent::Created { path, kind }
|
|
||||||
}
|
|
||||||
|
|
||||||
EventKind::Modify(modify) => match modify {
|
|
||||||
ModifyKind::Metadata(metadata) => {
|
|
||||||
if FileType::from_path(&path) != FileType::File {
|
|
||||||
// Only interested in file metadata events.
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
match metadata {
|
|
||||||
MetadataKind::Any | MetadataKind::Permissions | MetadataKind::Other => {
|
|
||||||
ChangeEvent::Changed {
|
|
||||||
path,
|
|
||||||
kind: ChangedKind::FileMetadata,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
MetadataKind::AccessTime
|
|
||||||
| MetadataKind::WriteTime
|
|
||||||
| MetadataKind::Ownership
|
|
||||||
| MetadataKind::Extended => {
|
|
||||||
// We're not interested in these metadata changes
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
ModifyKind::Data(_) => ChangeEvent::Changed {
|
|
||||||
kind: ChangedKind::FileMetadata,
|
|
||||||
path,
|
|
||||||
},
|
|
||||||
|
|
||||||
ModifyKind::Name(rename) => match rename {
|
|
||||||
RenameMode::From => {
|
|
||||||
// TODO: notify_debouncer_full matches the `RenameMode::From` and `RenameMode::To` events.
|
|
||||||
// Matching the from and to event would have the added advantage that we know the
|
|
||||||
// type of the path that was renamed, allowing `apply_changes` to avoid traversing the
|
|
||||||
// entire package.
|
|
||||||
// https://github.com/notify-rs/notify/blob/128bf6230c03d39dbb7f301ff7b20e594e34c3a2/notify-debouncer-full/src/lib.rs#L293-L297
|
|
||||||
ChangeEvent::Deleted {
|
|
||||||
kind: DeletedKind::Any,
|
|
||||||
path,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
RenameMode::To => ChangeEvent::Created {
|
|
||||||
kind: CreatedKind::from(FileType::from_path(&path)),
|
|
||||||
path,
|
|
||||||
},
|
|
||||||
|
|
||||||
RenameMode::Both => {
|
|
||||||
// Both is only emitted when moving a path from within a watched directory
|
|
||||||
// to another watched directory. The event is not emitted if the `to` or `from` path
|
|
||||||
// lay outside the watched directory. However, the `To` and `From` events are always emitted.
|
|
||||||
// That's why we ignore `Both` and instead rely on `To` and `From`.
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
RenameMode::Other => {
|
|
||||||
// Skip over any other rename events
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
RenameMode::Any => {
|
|
||||||
// Guess the action based on the current file system state
|
|
||||||
if path.as_std_path().exists() {
|
|
||||||
let file_type = FileType::from_path(&path);
|
|
||||||
|
|
||||||
ChangeEvent::Created {
|
|
||||||
kind: file_type.into(),
|
|
||||||
path,
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
ChangeEvent::Deleted {
|
|
||||||
kind: DeletedKind::Any,
|
|
||||||
path,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
ModifyKind::Other => {
|
|
||||||
// Skip other modification events that are not content or metadata related
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
ModifyKind::Any => {
|
|
||||||
if !path.as_std_path().is_file() {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
ChangeEvent::Changed {
|
|
||||||
path,
|
|
||||||
kind: ChangedKind::Any,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
EventKind::Access(_) => {
|
|
||||||
// We're not interested in any access events
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
EventKind::Remove(kind) => {
|
|
||||||
let kind = match kind {
|
|
||||||
RemoveKind::File => DeletedKind::File,
|
|
||||||
RemoveKind::Folder => DeletedKind::Directory,
|
|
||||||
RemoveKind::Any | RemoveKind::Other => DeletedKind::Any,
|
|
||||||
};
|
|
||||||
|
|
||||||
ChangeEvent::Deleted { path, kind }
|
|
||||||
}
|
|
||||||
|
|
||||||
EventKind::Other => {
|
|
||||||
// Skip over meta events
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
EventKind::Any => {
|
|
||||||
tracing::debug!("Skip any FS event for {path}.");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
self.events.push(event);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn into_events(self) -> Vec<ChangeEvent> {
|
|
||||||
if let Some(rescan_event) = self.rescan_event {
|
|
||||||
vec![rescan_event]
|
|
||||||
} else {
|
|
||||||
self.events
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait EventHandler: Send + 'static {
|
|
||||||
fn handle(&self, changes: Vec<ChangeEvent>);
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<F> EventHandler for F
|
|
||||||
where
|
|
||||||
F: Fn(Vec<ChangeEvent>) + Send + 'static,
|
|
||||||
{
|
|
||||||
fn handle(&self, changes: Vec<ChangeEvent>) {
|
|
||||||
let f = self;
|
|
||||||
f(changes);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
|
||||||
enum FileType {
|
|
||||||
/// The event is related to a directory.
|
|
||||||
File,
|
|
||||||
|
|
||||||
/// The event is related to a directory.
|
|
||||||
Directory,
|
|
||||||
|
|
||||||
/// It's unknown whether the event is related to a file or a directory or if it is any other file type.
|
|
||||||
Any,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FileType {
|
|
||||||
fn from_path(path: &SystemPath) -> FileType {
|
|
||||||
match path.as_std_path().metadata() {
|
|
||||||
Ok(metadata) if metadata.is_file() => FileType::File,
|
|
||||||
Ok(metadata) if metadata.is_dir() => FileType::Directory,
|
|
||||||
Ok(_) | Err(_) => FileType::Any,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<FileType> for CreatedKind {
|
|
||||||
fn from(value: FileType) -> Self {
|
|
||||||
match value {
|
|
||||||
FileType::File => Self::File,
|
|
||||||
FileType::Directory => Self::Directory,
|
|
||||||
FileType::Any => Self::Any,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,112 +0,0 @@
|
|||||||
use crate::db::RootDatabase;
|
|
||||||
use crate::watch::Watcher;
|
|
||||||
use ruff_db::system::SystemPathBuf;
|
|
||||||
use rustc_hash::FxHashSet;
|
|
||||||
use std::fmt::{Formatter, Write};
|
|
||||||
use tracing::info;
|
|
||||||
|
|
||||||
/// Wrapper around a [`Watcher`] that watches the relevant paths of a workspace.
|
|
||||||
pub struct WorkspaceWatcher {
|
|
||||||
watcher: Watcher,
|
|
||||||
|
|
||||||
/// The paths that need to be watched. This includes paths for which setting up file watching failed.
|
|
||||||
watched_paths: FxHashSet<SystemPathBuf>,
|
|
||||||
|
|
||||||
/// Paths that should be watched but setting up the watcher failed for some reason.
|
|
||||||
/// This should be rare.
|
|
||||||
errored_paths: Vec<SystemPathBuf>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl WorkspaceWatcher {
|
|
||||||
/// Create a new workspace watcher.
|
|
||||||
pub fn new(watcher: Watcher, db: &RootDatabase) -> Self {
|
|
||||||
let mut watcher = Self {
|
|
||||||
watcher,
|
|
||||||
watched_paths: FxHashSet::default(),
|
|
||||||
errored_paths: Vec::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
watcher.update(db);
|
|
||||||
|
|
||||||
watcher
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn update(&mut self, db: &RootDatabase) {
|
|
||||||
let new_watch_paths = db.workspace().paths_to_watch(db);
|
|
||||||
|
|
||||||
let mut added_folders = new_watch_paths.difference(&self.watched_paths).peekable();
|
|
||||||
let mut removed_folders = self.watched_paths.difference(&new_watch_paths).peekable();
|
|
||||||
|
|
||||||
if added_folders.peek().is_none() && removed_folders.peek().is_none() {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
for added_folder in added_folders {
|
|
||||||
// Log a warning. It's not worth aborting if registering a single folder fails because
|
|
||||||
// Ruff otherwise stills works as expected.
|
|
||||||
if let Err(error) = self.watcher.watch(added_folder) {
|
|
||||||
// TODO: Log a user-facing warning.
|
|
||||||
tracing::warn!("Failed to setup watcher for path '{added_folder}': {error}. You have to restart Ruff after making changes to files under this path or you might see stale results.");
|
|
||||||
self.errored_paths.push(added_folder.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for removed_path in removed_folders {
|
|
||||||
if let Some(index) = self
|
|
||||||
.errored_paths
|
|
||||||
.iter()
|
|
||||||
.position(|path| path == removed_path)
|
|
||||||
{
|
|
||||||
self.errored_paths.swap_remove(index);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Err(error) = self.watcher.unwatch(removed_path) {
|
|
||||||
info!("Failed to remove the file watcher for the path '{removed_path}: {error}.");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
info!(
|
|
||||||
"Set up file watchers for {}",
|
|
||||||
DisplayWatchedPaths {
|
|
||||||
paths: &new_watch_paths
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
self.watched_paths = new_watch_paths;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns `true` if setting up watching for any path failed.
|
|
||||||
pub fn has_errored_paths(&self) -> bool {
|
|
||||||
!self.errored_paths.is_empty()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn flush(&self) {
|
|
||||||
self.watcher.flush();
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn stop(self) {
|
|
||||||
self.watcher.stop();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
struct DisplayWatchedPaths<'a> {
|
|
||||||
paths: &'a FxHashSet<SystemPathBuf>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Display for DisplayWatchedPaths<'_> {
|
|
||||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
|
||||||
f.write_char('[')?;
|
|
||||||
|
|
||||||
let mut iter = self.paths.iter();
|
|
||||||
if let Some(first) = iter.next() {
|
|
||||||
write!(f, "\"{first}\"")?;
|
|
||||||
|
|
||||||
for path in iter {
|
|
||||||
write!(f, ", \"{path}\"")?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
f.write_char(']')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,379 +0,0 @@
|
|||||||
// TODO: Fix clippy warnings created by salsa macros
|
|
||||||
#![allow(clippy::used_underscore_binding, unreachable_pub)]
|
|
||||||
|
|
||||||
use std::{collections::BTreeMap, sync::Arc};
|
|
||||||
|
|
||||||
use rustc_hash::{FxBuildHasher, FxHashSet};
|
|
||||||
|
|
||||||
pub use metadata::{PackageMetadata, WorkspaceMetadata};
|
|
||||||
use red_knot_module_resolver::system_module_search_paths;
|
|
||||||
use ruff_db::{
|
|
||||||
files::{system_path_to_file, File},
|
|
||||||
system::{walk_directory::WalkState, SystemPath, SystemPathBuf},
|
|
||||||
};
|
|
||||||
use ruff_python_ast::{name::Name, PySourceType};
|
|
||||||
|
|
||||||
use crate::workspace::files::{Index, IndexedFiles, PackageFiles};
|
|
||||||
use crate::{
|
|
||||||
db::Db,
|
|
||||||
lint::{lint_semantic, lint_syntax, Diagnostics},
|
|
||||||
};
|
|
||||||
|
|
||||||
mod files;
|
|
||||||
mod metadata;
|
|
||||||
|
|
||||||
/// The project workspace as a Salsa ingredient.
|
|
||||||
///
|
|
||||||
/// A workspace consists of one or multiple packages. Packages can be nested. A file in a workspace
|
|
||||||
/// belongs to no or exactly one package (files can't belong to multiple packages).
|
|
||||||
///
|
|
||||||
/// How workspaces and packages are discovered is TBD. For now, a workspace can be any directory,
|
|
||||||
/// and it always contains a single package which has the same root as the workspace.
|
|
||||||
///
|
|
||||||
/// ## Examples
|
|
||||||
///
|
|
||||||
/// ```text
|
|
||||||
/// app-1/
|
|
||||||
/// pyproject.toml
|
|
||||||
/// src/
|
|
||||||
/// ... python files
|
|
||||||
///
|
|
||||||
/// app-2/
|
|
||||||
/// pyproject.toml
|
|
||||||
/// src/
|
|
||||||
/// ... python files
|
|
||||||
///
|
|
||||||
/// shared/
|
|
||||||
/// pyproject.toml
|
|
||||||
/// src/
|
|
||||||
/// ... python files
|
|
||||||
///
|
|
||||||
/// pyproject.toml
|
|
||||||
/// ```
|
|
||||||
///
|
|
||||||
/// The above project structure has three packages: `app-1`, `app-2`, and `shared`.
|
|
||||||
/// Each of the packages can define their own settings in their `pyproject.toml` file, but
|
|
||||||
/// they must be compatible. For example, each package can define a different `requires-python` range,
|
|
||||||
/// but the ranges must overlap.
|
|
||||||
///
|
|
||||||
/// ## How is a workspace different from a program?
|
|
||||||
/// There are two (related) motivations:
|
|
||||||
///
|
|
||||||
/// 1. Program is defined in `ruff_db` and it can't reference the settings types for the linter and formatter
|
|
||||||
/// without introducing a cyclic dependency. The workspace is defined in a higher level crate
|
|
||||||
/// where it can reference these setting types.
|
|
||||||
/// 2. Running `ruff check` with different target versions results in different programs (settings) but
|
|
||||||
/// it remains the same workspace. That's why program is a narrowed view of the workspace only
|
|
||||||
/// holding on to the most fundamental settings required for checking.
|
|
||||||
#[salsa::input]
|
|
||||||
pub struct Workspace {
|
|
||||||
#[id]
|
|
||||||
#[return_ref]
|
|
||||||
root_buf: SystemPathBuf,
|
|
||||||
|
|
||||||
/// The files that are open in the workspace.
|
|
||||||
///
|
|
||||||
/// Setting the open files to a non-`None` value changes `check` to only check the
|
|
||||||
/// open files rather than all files in the workspace.
|
|
||||||
#[return_ref]
|
|
||||||
open_file_set: Option<Arc<FxHashSet<File>>>,
|
|
||||||
|
|
||||||
/// The (first-party) packages in this workspace.
|
|
||||||
#[return_ref]
|
|
||||||
package_tree: BTreeMap<SystemPathBuf, Package>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A first-party package in a workspace.
|
|
||||||
#[salsa::input]
|
|
||||||
pub struct Package {
|
|
||||||
#[return_ref]
|
|
||||||
pub name: Name,
|
|
||||||
|
|
||||||
/// The path to the root directory of the package.
|
|
||||||
#[id]
|
|
||||||
#[return_ref]
|
|
||||||
root_buf: SystemPathBuf,
|
|
||||||
|
|
||||||
/// The files that are part of this package.
|
|
||||||
#[return_ref]
|
|
||||||
file_set: PackageFiles,
|
|
||||||
// TODO: Add the loaded settings.
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Workspace {
|
|
||||||
/// Discovers the closest workspace at `path` and returns its metadata.
|
|
||||||
pub fn from_metadata(db: &dyn Db, metadata: WorkspaceMetadata) -> Self {
|
|
||||||
let mut packages = BTreeMap::new();
|
|
||||||
|
|
||||||
for package in metadata.packages {
|
|
||||||
packages.insert(package.root.clone(), Package::from_metadata(db, package));
|
|
||||||
}
|
|
||||||
|
|
||||||
Workspace::new(db, metadata.root, None, packages)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn root(self, db: &dyn Db) -> &SystemPath {
|
|
||||||
self.root_buf(db)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn packages(self, db: &dyn Db) -> impl Iterator<Item = Package> + '_ {
|
|
||||||
self.package_tree(db).values().copied()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
|
||||||
pub fn reload(self, db: &mut dyn Db, metadata: WorkspaceMetadata) {
|
|
||||||
assert_eq!(self.root(db), metadata.root());
|
|
||||||
|
|
||||||
let mut old_packages = self.package_tree(db).clone();
|
|
||||||
let mut new_packages = BTreeMap::new();
|
|
||||||
|
|
||||||
for package_metadata in metadata.packages {
|
|
||||||
let path = package_metadata.root().to_path_buf();
|
|
||||||
|
|
||||||
let package = if let Some(old_package) = old_packages.remove(&path) {
|
|
||||||
old_package.update(db, package_metadata);
|
|
||||||
old_package
|
|
||||||
} else {
|
|
||||||
Package::from_metadata(db, package_metadata)
|
|
||||||
};
|
|
||||||
|
|
||||||
new_packages.insert(path, package);
|
|
||||||
}
|
|
||||||
|
|
||||||
self.set_package_tree(db).to(new_packages);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(level = "debug", skip_all)]
|
|
||||||
pub fn update_package(self, db: &mut dyn Db, metadata: PackageMetadata) -> anyhow::Result<()> {
|
|
||||||
let path = metadata.root().to_path_buf();
|
|
||||||
|
|
||||||
if let Some(package) = self.package_tree(db).get(&path).copied() {
|
|
||||||
package.update(db, metadata);
|
|
||||||
Ok(())
|
|
||||||
} else {
|
|
||||||
Err(anyhow::anyhow!("Package {path} not found"))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the closest package to which the first-party `path` belongs.
|
|
||||||
///
|
|
||||||
/// Returns `None` if the `path` is outside of any package or if `file` isn't a first-party file
|
|
||||||
/// (e.g. third-party dependencies or `excluded`).
|
|
||||||
pub fn package(self, db: &dyn Db, path: &SystemPath) -> Option<Package> {
|
|
||||||
let packages = self.package_tree(db);
|
|
||||||
|
|
||||||
let (package_path, package) = packages.range(..=path.to_path_buf()).next_back()?;
|
|
||||||
|
|
||||||
if path.starts_with(package_path) {
|
|
||||||
Some(*package)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Checks all open files in the workspace and its dependencies.
|
|
||||||
#[tracing::instrument(level = "debug", skip_all)]
|
|
||||||
pub fn check(self, db: &dyn Db) -> Vec<String> {
|
|
||||||
let mut result = Vec::new();
|
|
||||||
|
|
||||||
if let Some(open_files) = self.open_files(db) {
|
|
||||||
for file in open_files {
|
|
||||||
result.extend_from_slice(&check_file(db, *file));
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
for package in self.packages(db) {
|
|
||||||
result.extend(package.check(db));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Opens a file in the workspace.
|
|
||||||
///
|
|
||||||
/// This changes the behavior of `check` to only check the open files rather than all files in the workspace.
|
|
||||||
#[tracing::instrument(level = "debug", skip(self, db))]
|
|
||||||
pub fn open_file(self, db: &mut dyn Db, file: File) {
|
|
||||||
let mut open_files = self.take_open_files(db);
|
|
||||||
open_files.insert(file);
|
|
||||||
self.set_open_files(db, open_files);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Closes a file in the workspace.
|
|
||||||
#[tracing::instrument(level = "debug", skip(self, db))]
|
|
||||||
pub fn close_file(self, db: &mut dyn Db, file: File) -> bool {
|
|
||||||
let mut open_files = self.take_open_files(db);
|
|
||||||
let removed = open_files.remove(&file);
|
|
||||||
|
|
||||||
if removed {
|
|
||||||
self.set_open_files(db, open_files);
|
|
||||||
}
|
|
||||||
|
|
||||||
removed
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the open files in the workspace or `None` if the entire workspace should be checked.
|
|
||||||
pub fn open_files(self, db: &dyn Db) -> Option<&FxHashSet<File>> {
|
|
||||||
self.open_file_set(db).as_deref()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Sets the open files in the workspace.
|
|
||||||
///
|
|
||||||
/// This changes the behavior of `check` to only check the open files rather than all files in the workspace.
|
|
||||||
#[tracing::instrument(level = "debug", skip(self, db))]
|
|
||||||
pub fn set_open_files(self, db: &mut dyn Db, open_files: FxHashSet<File>) {
|
|
||||||
self.set_open_file_set(db).to(Some(Arc::new(open_files)));
|
|
||||||
}
|
|
||||||
|
|
||||||
/// This takes the open files from the workspace and returns them.
|
|
||||||
///
|
|
||||||
/// This changes the behavior of `check` to check all files in the workspace instead of just the open files.
|
|
||||||
pub fn take_open_files(self, db: &mut dyn Db) -> FxHashSet<File> {
|
|
||||||
let open_files = self.open_file_set(db).clone();
|
|
||||||
|
|
||||||
if let Some(open_files) = open_files {
|
|
||||||
// Salsa will cancel any pending queries and remove its own reference to `open_files`
|
|
||||||
// so that the reference counter to `open_files` now drops to 1.
|
|
||||||
self.set_open_file_set(db).to(None);
|
|
||||||
|
|
||||||
Arc::try_unwrap(open_files).unwrap()
|
|
||||||
} else {
|
|
||||||
FxHashSet::default()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the paths that should be watched.
|
|
||||||
///
|
|
||||||
/// The paths that require watching might change with every revision.
|
|
||||||
pub fn paths_to_watch(self, db: &dyn Db) -> FxHashSet<SystemPathBuf> {
|
|
||||||
ruff_db::system::deduplicate_nested_paths(
|
|
||||||
std::iter::once(self.root(db)).chain(system_module_search_paths(db.upcast())),
|
|
||||||
)
|
|
||||||
.map(SystemPath::to_path_buf)
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[salsa::tracked]
|
|
||||||
impl Package {
|
|
||||||
pub fn root(self, db: &dyn Db) -> &SystemPath {
|
|
||||||
self.root_buf(db)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns `true` if `file` is a first-party file part of this package.
|
|
||||||
pub fn contains_file(self, db: &dyn Db, file: File) -> bool {
|
|
||||||
self.files(db).read().contains(&file)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(level = "debug", skip(db))]
|
|
||||||
pub fn remove_file(self, db: &mut dyn Db, file: File) {
|
|
||||||
let Some(mut index) = PackageFiles::indexed_mut(db, self) else {
|
|
||||||
return;
|
|
||||||
};
|
|
||||||
|
|
||||||
index.remove(file);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(level = "debug", skip(db))]
|
|
||||||
pub fn add_file(self, db: &mut dyn Db, file: File) {
|
|
||||||
let Some(mut index) = PackageFiles::indexed_mut(db, self) else {
|
|
||||||
return;
|
|
||||||
};
|
|
||||||
|
|
||||||
index.insert(file);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(level = "debug", skip(db))]
|
|
||||||
pub(crate) fn check(self, db: &dyn Db) -> Vec<String> {
|
|
||||||
let mut result = Vec::new();
|
|
||||||
for file in &self.files(db).read() {
|
|
||||||
let diagnostics = check_file(db, file);
|
|
||||||
result.extend_from_slice(&diagnostics);
|
|
||||||
}
|
|
||||||
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the files belonging to this package.
|
|
||||||
#[salsa::tracked]
|
|
||||||
pub fn files(self, db: &dyn Db) -> IndexedFiles {
|
|
||||||
let files = self.file_set(db);
|
|
||||||
|
|
||||||
let indexed = match files.get() {
|
|
||||||
Index::Lazy(vacant) => {
|
|
||||||
let files = discover_package_files(db, self.root(db));
|
|
||||||
vacant.set(files)
|
|
||||||
}
|
|
||||||
Index::Indexed(indexed) => indexed,
|
|
||||||
};
|
|
||||||
|
|
||||||
indexed
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_metadata(db: &dyn Db, metadata: PackageMetadata) -> Self {
|
|
||||||
Self::new(db, metadata.name, metadata.root, PackageFiles::default())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn update(self, db: &mut dyn Db, metadata: PackageMetadata) {
|
|
||||||
let root = self.root(db);
|
|
||||||
assert_eq!(root, metadata.root());
|
|
||||||
|
|
||||||
self.set_name(db).to(metadata.name);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(level = "debug", skip(db))]
|
|
||||||
pub fn reload_files(self, db: &mut dyn Db) {
|
|
||||||
// Force a re-index of the files in the next revision.
|
|
||||||
self.set_file_set(db).to(PackageFiles::lazy());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(super) fn check_file(db: &dyn Db, file: File) -> Diagnostics {
|
|
||||||
let mut diagnostics = Vec::new();
|
|
||||||
diagnostics.extend_from_slice(lint_syntax(db, file));
|
|
||||||
diagnostics.extend_from_slice(lint_semantic(db, file));
|
|
||||||
Diagnostics::from(diagnostics)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn discover_package_files(db: &dyn Db, path: &SystemPath) -> FxHashSet<File> {
|
|
||||||
let paths = std::sync::Mutex::new(Vec::new());
|
|
||||||
|
|
||||||
db.system().walk_directory(path).run(|| {
|
|
||||||
Box::new(|entry| {
|
|
||||||
match entry {
|
|
||||||
Ok(entry) => {
|
|
||||||
// Skip over any non python files to avoid creating too many entries in `Files`.
|
|
||||||
if entry.file_type().is_file()
|
|
||||||
&& entry
|
|
||||||
.path()
|
|
||||||
.extension()
|
|
||||||
.and_then(PySourceType::try_from_extension)
|
|
||||||
.is_some()
|
|
||||||
{
|
|
||||||
let mut paths = paths.lock().unwrap();
|
|
||||||
paths.push(entry.into_path());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(error) => {
|
|
||||||
// TODO Handle error
|
|
||||||
tracing::error!("Failed to walk path: {error}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
WalkState::Continue
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
let paths = paths.into_inner().unwrap();
|
|
||||||
let mut files = FxHashSet::with_capacity_and_hasher(paths.len(), FxBuildHasher);
|
|
||||||
|
|
||||||
for path in paths {
|
|
||||||
// If this returns `None`, then the file was deleted between the `walk_directory` call and now.
|
|
||||||
// We can ignore this.
|
|
||||||
if let Some(file) = system_path_to_file(db.upcast(), &path) {
|
|
||||||
files.insert(file);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
files
|
|
||||||
}
|
|
||||||
@@ -1,252 +0,0 @@
|
|||||||
use std::iter::FusedIterator;
|
|
||||||
use std::ops::Deref;
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use rustc_hash::FxHashSet;
|
|
||||||
|
|
||||||
use crate::db::Db;
|
|
||||||
use crate::workspace::Package;
|
|
||||||
use ruff_db::files::File;
|
|
||||||
|
|
||||||
/// The indexed files of a package.
|
|
||||||
///
|
|
||||||
/// The indexing happens lazily, but the files are then cached for subsequent reads.
|
|
||||||
///
|
|
||||||
/// ## Implementation
|
|
||||||
/// The implementation uses internal mutability to transition between the lazy and indexed state
|
|
||||||
/// without triggering a new salsa revision. This is safe because the initial indexing happens on first access,
|
|
||||||
/// so no query can be depending on the contents of the indexed files before that. All subsequent mutations to
|
|
||||||
/// the indexed files must go through `IndexedFilesMut`, which uses the Salsa setter `package.set_file_set` to
|
|
||||||
/// ensure that Salsa always knows when the set of indexed files have changed.
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct PackageFiles {
|
|
||||||
state: std::sync::Mutex<State>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PackageFiles {
|
|
||||||
pub fn lazy() -> Self {
|
|
||||||
Self {
|
|
||||||
state: std::sync::Mutex::new(State::Lazy),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn indexed(indexed_files: IndexedFiles) -> Self {
|
|
||||||
Self {
|
|
||||||
state: std::sync::Mutex::new(State::Indexed(indexed_files)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get(&self) -> Index {
|
|
||||||
let state = self.state.lock().unwrap();
|
|
||||||
|
|
||||||
match &*state {
|
|
||||||
State::Lazy => Index::Lazy(LazyFiles { files: state }),
|
|
||||||
State::Indexed(files) => Index::Indexed(files.clone()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns a mutable view on the index that allows cheap in-place mutations.
|
|
||||||
///
|
|
||||||
/// The changes are automatically written back to the database once the view is dropped.
|
|
||||||
pub fn indexed_mut(db: &mut dyn Db, package: Package) -> Option<IndexedFilesMut> {
|
|
||||||
// Calling `runtime_mut` cancels all pending salsa queries. This ensures that there are no pending
|
|
||||||
// reads to the file set.
|
|
||||||
let _ = db.runtime_mut();
|
|
||||||
|
|
||||||
let files = package.file_set(db);
|
|
||||||
|
|
||||||
let indexed = match &*files.state.lock().unwrap() {
|
|
||||||
State::Lazy => return None,
|
|
||||||
State::Indexed(indexed) => indexed.clone(),
|
|
||||||
};
|
|
||||||
|
|
||||||
Some(IndexedFilesMut {
|
|
||||||
db: Some(db),
|
|
||||||
package,
|
|
||||||
new_revision: indexed.revision,
|
|
||||||
indexed,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for PackageFiles {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self::lazy()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
enum State {
|
|
||||||
/// The files of a package haven't been indexed yet.
|
|
||||||
Lazy,
|
|
||||||
|
|
||||||
/// The files are indexed. Stores the known files of a package.
|
|
||||||
Indexed(IndexedFiles),
|
|
||||||
}
|
|
||||||
|
|
||||||
pub enum Index<'a> {
|
|
||||||
/// The index has not yet been computed. Allows inserting the files.
|
|
||||||
Lazy(LazyFiles<'a>),
|
|
||||||
|
|
||||||
Indexed(IndexedFiles),
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Package files that have not been indexed yet.
|
|
||||||
pub struct LazyFiles<'a> {
|
|
||||||
files: std::sync::MutexGuard<'a, State>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> LazyFiles<'a> {
|
|
||||||
/// Sets the indexed files of a package to `files`.
|
|
||||||
pub fn set(mut self, files: FxHashSet<File>) -> IndexedFiles {
|
|
||||||
let files = IndexedFiles::new(files);
|
|
||||||
*self.files = State::Indexed(files.clone());
|
|
||||||
files
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The indexed files of a package.
|
|
||||||
///
|
|
||||||
/// # Salsa integration
|
|
||||||
/// The type is cheap clonable and allows for in-place mutation of the files. The in-place mutation requires
|
|
||||||
/// extra care because the type is used as the result of Salsa queries and Salsa relies on a type's equality
|
|
||||||
/// to determine if the output has changed. This is accomplished by using a `revision` that gets incremented
|
|
||||||
/// whenever the files are changed. The revision ensures that salsa's comparison of the
|
|
||||||
/// previous [`IndexedFiles`] with the next [`IndexedFiles`] returns false even though they both
|
|
||||||
/// point to the same underlying hash set.
|
|
||||||
///
|
|
||||||
/// # Equality
|
|
||||||
/// Two [`IndexedFiles`] are only equal if they have the same revision and point to the **same** (identity) hash set.
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct IndexedFiles {
|
|
||||||
revision: u64,
|
|
||||||
files: Arc<std::sync::Mutex<FxHashSet<File>>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl IndexedFiles {
|
|
||||||
fn new(files: FxHashSet<File>) -> Self {
|
|
||||||
Self {
|
|
||||||
files: Arc::new(std::sync::Mutex::new(files)),
|
|
||||||
revision: 0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Locks the file index for reading.
|
|
||||||
pub fn read(&self) -> IndexedFilesGuard {
|
|
||||||
IndexedFilesGuard {
|
|
||||||
guard: self.files.lock().unwrap(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PartialEq for IndexedFiles {
|
|
||||||
fn eq(&self, other: &Self) -> bool {
|
|
||||||
self.revision == other.revision && Arc::ptr_eq(&self.files, &other.files)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Eq for IndexedFiles {}
|
|
||||||
|
|
||||||
pub struct IndexedFilesGuard<'a> {
|
|
||||||
guard: std::sync::MutexGuard<'a, FxHashSet<File>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Deref for IndexedFilesGuard<'_> {
|
|
||||||
type Target = FxHashSet<File>;
|
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
&self.guard
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> IntoIterator for &'a IndexedFilesGuard<'a> {
|
|
||||||
type Item = File;
|
|
||||||
type IntoIter = IndexedFilesIter<'a>;
|
|
||||||
|
|
||||||
fn into_iter(self) -> Self::IntoIter {
|
|
||||||
IndexedFilesIter {
|
|
||||||
inner: self.guard.iter(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Iterator over the indexed files.
|
|
||||||
///
|
|
||||||
/// # Locks
|
|
||||||
/// Holding on to the iterator locks the file index for reading.
|
|
||||||
pub struct IndexedFilesIter<'a> {
|
|
||||||
inner: std::collections::hash_set::Iter<'a, File>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> Iterator for IndexedFilesIter<'a> {
|
|
||||||
type Item = File;
|
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
|
||||||
self.inner.next().copied()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
|
||||||
self.inner.size_hint()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FusedIterator for IndexedFilesIter<'_> {}
|
|
||||||
|
|
||||||
impl ExactSizeIterator for IndexedFilesIter<'_> {}
|
|
||||||
|
|
||||||
/// A Mutable view of a package's indexed files.
|
|
||||||
///
|
|
||||||
/// Allows in-place mutation of the files without deep cloning the hash set.
|
|
||||||
/// The changes are written back when the mutable view is dropped or by calling [`Self::set`] manually.
|
|
||||||
pub struct IndexedFilesMut<'db> {
|
|
||||||
db: Option<&'db mut dyn Db>,
|
|
||||||
package: Package,
|
|
||||||
indexed: IndexedFiles,
|
|
||||||
new_revision: u64,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl IndexedFilesMut<'_> {
|
|
||||||
pub fn insert(&mut self, file: File) -> bool {
|
|
||||||
if self.indexed.files.lock().unwrap().insert(file) {
|
|
||||||
self.new_revision += 1;
|
|
||||||
true
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn remove(&mut self, file: File) -> bool {
|
|
||||||
if self.indexed.files.lock().unwrap().remove(&file) {
|
|
||||||
self.new_revision += 1;
|
|
||||||
true
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Writes the changes back to the database.
|
|
||||||
pub fn set(mut self) {
|
|
||||||
self.set_impl();
|
|
||||||
}
|
|
||||||
|
|
||||||
fn set_impl(&mut self) {
|
|
||||||
let Some(db) = self.db.take() else {
|
|
||||||
return;
|
|
||||||
};
|
|
||||||
|
|
||||||
if self.indexed.revision != self.new_revision {
|
|
||||||
self.package
|
|
||||||
.set_file_set(db)
|
|
||||||
.to(PackageFiles::indexed(IndexedFiles {
|
|
||||||
revision: self.new_revision,
|
|
||||||
files: self.indexed.files.clone(),
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Drop for IndexedFilesMut<'_> {
|
|
||||||
fn drop(&mut self) {
|
|
||||||
self.set_impl();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,68 +0,0 @@
|
|||||||
use ruff_db::system::{System, SystemPath, SystemPathBuf};
|
|
||||||
use ruff_python_ast::name::Name;
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct WorkspaceMetadata {
|
|
||||||
pub(super) root: SystemPathBuf,
|
|
||||||
|
|
||||||
/// The (first-party) packages in this workspace.
|
|
||||||
pub(super) packages: Vec<PackageMetadata>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A first-party package in a workspace.
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct PackageMetadata {
|
|
||||||
pub(super) name: Name,
|
|
||||||
|
|
||||||
/// The path to the root directory of the package.
|
|
||||||
pub(super) root: SystemPathBuf,
|
|
||||||
// TODO: Add the loaded package configuration (not the nested ruff settings)
|
|
||||||
}
|
|
||||||
|
|
||||||
impl WorkspaceMetadata {
|
|
||||||
/// Discovers the closest workspace at `path` and returns its metadata.
|
|
||||||
pub fn from_path(path: &SystemPath, system: &dyn System) -> anyhow::Result<WorkspaceMetadata> {
|
|
||||||
let root = if system.is_file(path) {
|
|
||||||
path.parent().unwrap().to_path_buf()
|
|
||||||
} else {
|
|
||||||
path.to_path_buf()
|
|
||||||
};
|
|
||||||
|
|
||||||
if !system.is_directory(&root) {
|
|
||||||
anyhow::bail!("no workspace found at {:?}", root);
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: Discover package name from `pyproject.toml`.
|
|
||||||
let package_name: Name = path.file_name().unwrap_or("<root>").into();
|
|
||||||
|
|
||||||
let package = PackageMetadata {
|
|
||||||
name: package_name,
|
|
||||||
root: root.clone(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let workspace = WorkspaceMetadata {
|
|
||||||
root,
|
|
||||||
packages: vec![package],
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(workspace)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn root(&self) -> &SystemPath {
|
|
||||||
&self.root
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn packages(&self) -> &[PackageMetadata] {
|
|
||||||
&self.packages
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PackageMetadata {
|
|
||||||
pub fn name(&self) -> &Name {
|
|
||||||
&self.name
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn root(&self) -> &SystemPath {
|
|
||||||
&self.root
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,726 +0,0 @@
|
|||||||
#![allow(clippy::disallowed_names)]
|
|
||||||
|
|
||||||
use std::time::Duration;
|
|
||||||
|
|
||||||
use anyhow::{anyhow, Context};
|
|
||||||
|
|
||||||
use red_knot::db::RootDatabase;
|
|
||||||
use red_knot::watch;
|
|
||||||
use red_knot::watch::{directory_watcher, WorkspaceWatcher};
|
|
||||||
use red_knot::workspace::WorkspaceMetadata;
|
|
||||||
use red_knot_module_resolver::{resolve_module, ModuleName};
|
|
||||||
use ruff_db::files::{system_path_to_file, File};
|
|
||||||
use ruff_db::program::{Program, ProgramSettings, SearchPathSettings, TargetVersion};
|
|
||||||
use ruff_db::source::source_text;
|
|
||||||
use ruff_db::system::{OsSystem, SystemPath, SystemPathBuf};
|
|
||||||
use ruff_db::Upcast;
|
|
||||||
|
|
||||||
struct TestCase {
|
|
||||||
db: RootDatabase,
|
|
||||||
watcher: Option<WorkspaceWatcher>,
|
|
||||||
changes_receiver: crossbeam::channel::Receiver<Vec<watch::ChangeEvent>>,
|
|
||||||
temp_dir: tempfile::TempDir,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TestCase {
|
|
||||||
fn workspace_path(&self, relative: impl AsRef<SystemPath>) -> SystemPathBuf {
|
|
||||||
SystemPath::absolute(relative, self.db.workspace().root(&self.db))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn root_path(&self) -> &SystemPath {
|
|
||||||
SystemPath::from_std_path(self.temp_dir.path()).unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn db(&self) -> &RootDatabase {
|
|
||||||
&self.db
|
|
||||||
}
|
|
||||||
|
|
||||||
fn db_mut(&mut self) -> &mut RootDatabase {
|
|
||||||
&mut self.db
|
|
||||||
}
|
|
||||||
|
|
||||||
fn stop_watch(&mut self) -> Vec<watch::ChangeEvent> {
|
|
||||||
if let Some(watcher) = self.watcher.take() {
|
|
||||||
// Give the watcher some time to catch up.
|
|
||||||
std::thread::sleep(Duration::from_millis(10));
|
|
||||||
watcher.flush();
|
|
||||||
watcher.stop();
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut all_events = Vec::new();
|
|
||||||
for events in &self.changes_receiver {
|
|
||||||
all_events.extend(events);
|
|
||||||
}
|
|
||||||
|
|
||||||
all_events
|
|
||||||
}
|
|
||||||
|
|
||||||
fn update_search_path_settings(
|
|
||||||
&mut self,
|
|
||||||
f: impl FnOnce(&SearchPathSettings) -> SearchPathSettings,
|
|
||||||
) {
|
|
||||||
let program = Program::get(self.db());
|
|
||||||
let search_path_settings = program.search_paths(self.db());
|
|
||||||
|
|
||||||
let new_settings = f(search_path_settings);
|
|
||||||
|
|
||||||
program.set_search_paths(&mut self.db).to(new_settings);
|
|
||||||
|
|
||||||
if let Some(watcher) = &mut self.watcher {
|
|
||||||
watcher.update(&self.db);
|
|
||||||
assert!(!watcher.has_errored_paths());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn collect_package_files(&self, path: &SystemPath) -> Vec<File> {
|
|
||||||
let package = self.db().workspace().package(self.db(), path).unwrap();
|
|
||||||
let files = package.files(self.db());
|
|
||||||
let files = files.read();
|
|
||||||
let mut collected: Vec<_> = files.into_iter().collect();
|
|
||||||
collected.sort_unstable_by_key(|file| file.path(self.db()).as_system_path().unwrap());
|
|
||||||
collected
|
|
||||||
}
|
|
||||||
|
|
||||||
fn system_file(&self, path: impl AsRef<SystemPath>) -> Option<File> {
|
|
||||||
system_path_to_file(self.db(), path.as_ref())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn setup<I, P>(workspace_files: I) -> anyhow::Result<TestCase>
|
|
||||||
where
|
|
||||||
I: IntoIterator<Item = (P, &'static str)>,
|
|
||||||
P: AsRef<SystemPath>,
|
|
||||||
{
|
|
||||||
setup_with_search_paths(workspace_files, |_root, workspace_path| {
|
|
||||||
SearchPathSettings {
|
|
||||||
extra_paths: vec![],
|
|
||||||
workspace_root: workspace_path.to_path_buf(),
|
|
||||||
custom_typeshed: None,
|
|
||||||
site_packages: None,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn setup_with_search_paths<I, P>(
|
|
||||||
workspace_files: I,
|
|
||||||
create_search_paths: impl FnOnce(&SystemPath, &SystemPath) -> SearchPathSettings,
|
|
||||||
) -> anyhow::Result<TestCase>
|
|
||||||
where
|
|
||||||
I: IntoIterator<Item = (P, &'static str)>,
|
|
||||||
P: AsRef<SystemPath>,
|
|
||||||
{
|
|
||||||
let temp_dir = tempfile::tempdir()?;
|
|
||||||
|
|
||||||
let root_path = SystemPath::from_std_path(temp_dir.path()).ok_or_else(|| {
|
|
||||||
anyhow!(
|
|
||||||
"Temp directory '{}' is not a valid UTF-8 path.",
|
|
||||||
temp_dir.path().display()
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let root_path = SystemPathBuf::from_utf8_path_buf(
|
|
||||||
root_path
|
|
||||||
.as_utf8_path()
|
|
||||||
.canonicalize_utf8()
|
|
||||||
.with_context(|| "Failed to canonicalize root path.")?,
|
|
||||||
);
|
|
||||||
|
|
||||||
let workspace_path = root_path.join("workspace");
|
|
||||||
|
|
||||||
std::fs::create_dir_all(workspace_path.as_std_path())
|
|
||||||
.with_context(|| format!("Failed to create workspace directory '{workspace_path}'",))?;
|
|
||||||
|
|
||||||
for (relative_path, content) in workspace_files {
|
|
||||||
let relative_path = relative_path.as_ref();
|
|
||||||
let absolute_path = workspace_path.join(relative_path);
|
|
||||||
if let Some(parent) = absolute_path.parent() {
|
|
||||||
std::fs::create_dir_all(parent).with_context(|| {
|
|
||||||
format!("Failed to create parent directory for file '{relative_path}'.",)
|
|
||||||
})?;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::fs::write(absolute_path.as_std_path(), content)
|
|
||||||
.with_context(|| format!("Failed to write file '{relative_path}'"))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let system = OsSystem::new(&workspace_path);
|
|
||||||
|
|
||||||
let workspace = WorkspaceMetadata::from_path(&workspace_path, &system)?;
|
|
||||||
let search_paths = create_search_paths(&root_path, workspace.root());
|
|
||||||
|
|
||||||
for path in search_paths
|
|
||||||
.extra_paths
|
|
||||||
.iter()
|
|
||||||
.chain(search_paths.site_packages.iter())
|
|
||||||
.chain(search_paths.custom_typeshed.iter())
|
|
||||||
{
|
|
||||||
std::fs::create_dir_all(path.as_std_path())
|
|
||||||
.with_context(|| format!("Failed to create search path '{path}'"))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let settings = ProgramSettings {
|
|
||||||
target_version: TargetVersion::default(),
|
|
||||||
search_paths,
|
|
||||||
};
|
|
||||||
|
|
||||||
let db = RootDatabase::new(workspace, settings, system);
|
|
||||||
|
|
||||||
let (sender, receiver) = crossbeam::channel::unbounded();
|
|
||||||
let watcher = directory_watcher(move |events| sender.send(events).unwrap())
|
|
||||||
.with_context(|| "Failed to create directory watcher")?;
|
|
||||||
|
|
||||||
let watcher = WorkspaceWatcher::new(watcher, &db);
|
|
||||||
assert!(!watcher.has_errored_paths());
|
|
||||||
|
|
||||||
let test_case = TestCase {
|
|
||||||
db,
|
|
||||||
changes_receiver: receiver,
|
|
||||||
watcher: Some(watcher),
|
|
||||||
temp_dir,
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(test_case)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn new_file() -> anyhow::Result<()> {
|
|
||||||
let mut case = setup([("bar.py", "")])?;
|
|
||||||
let bar_path = case.workspace_path("bar.py");
|
|
||||||
let bar_file = case.system_file(&bar_path).unwrap();
|
|
||||||
let foo_path = case.workspace_path("foo.py");
|
|
||||||
|
|
||||||
assert_eq!(case.system_file(&foo_path), None);
|
|
||||||
assert_eq!(&case.collect_package_files(&bar_path), &[bar_file]);
|
|
||||||
|
|
||||||
std::fs::write(foo_path.as_std_path(), "print('Hello')")?;
|
|
||||||
|
|
||||||
let changes = case.stop_watch();
|
|
||||||
|
|
||||||
case.db_mut().apply_changes(changes);
|
|
||||||
|
|
||||||
let foo = case.system_file(&foo_path).expect("foo.py to exist.");
|
|
||||||
|
|
||||||
assert_eq!(&case.collect_package_files(&bar_path), &[bar_file, foo]);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn new_ignored_file() -> anyhow::Result<()> {
|
|
||||||
let mut case = setup([("bar.py", ""), (".ignore", "foo.py")])?;
|
|
||||||
let bar_path = case.workspace_path("bar.py");
|
|
||||||
let bar_file = case.system_file(&bar_path).unwrap();
|
|
||||||
let foo_path = case.workspace_path("foo.py");
|
|
||||||
|
|
||||||
assert_eq!(case.system_file(&foo_path), None);
|
|
||||||
assert_eq!(&case.collect_package_files(&bar_path), &[bar_file]);
|
|
||||||
|
|
||||||
std::fs::write(foo_path.as_std_path(), "print('Hello')")?;
|
|
||||||
|
|
||||||
let changes = case.stop_watch();
|
|
||||||
|
|
||||||
case.db_mut().apply_changes(changes);
|
|
||||||
|
|
||||||
assert!(case.system_file(&foo_path).is_some());
|
|
||||||
assert_eq!(&case.collect_package_files(&bar_path), &[bar_file]);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn changed_file() -> anyhow::Result<()> {
|
|
||||||
let foo_source = "print('Hello, world!')";
|
|
||||||
let mut case = setup([("foo.py", foo_source)])?;
|
|
||||||
let foo_path = case.workspace_path("foo.py");
|
|
||||||
|
|
||||||
let foo = case
|
|
||||||
.system_file(&foo_path)
|
|
||||||
.ok_or_else(|| anyhow!("Foo not found"))?;
|
|
||||||
assert_eq!(source_text(case.db(), foo).as_str(), foo_source);
|
|
||||||
assert_eq!(&case.collect_package_files(&foo_path), &[foo]);
|
|
||||||
|
|
||||||
std::fs::write(foo_path.as_std_path(), "print('Version 2')")?;
|
|
||||||
|
|
||||||
let changes = case.stop_watch();
|
|
||||||
|
|
||||||
case.db_mut().apply_changes(changes);
|
|
||||||
|
|
||||||
assert_eq!(source_text(case.db(), foo).as_str(), "print('Version 2')");
|
|
||||||
assert_eq!(&case.collect_package_files(&foo_path), &[foo]);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(unix)]
|
|
||||||
#[test]
|
|
||||||
fn changed_metadata() -> anyhow::Result<()> {
|
|
||||||
use std::os::unix::fs::PermissionsExt;
|
|
||||||
|
|
||||||
let mut case = setup([("foo.py", "")])?;
|
|
||||||
let foo_path = case.workspace_path("foo.py");
|
|
||||||
|
|
||||||
let foo = case
|
|
||||||
.system_file(&foo_path)
|
|
||||||
.ok_or_else(|| anyhow!("Foo not found"))?;
|
|
||||||
assert_eq!(
|
|
||||||
foo.permissions(case.db()),
|
|
||||||
Some(
|
|
||||||
std::fs::metadata(foo_path.as_std_path())
|
|
||||||
.unwrap()
|
|
||||||
.permissions()
|
|
||||||
.mode()
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
std::fs::set_permissions(
|
|
||||||
foo_path.as_std_path(),
|
|
||||||
std::fs::Permissions::from_mode(0o777),
|
|
||||||
)
|
|
||||||
.with_context(|| "Failed to set file permissions.")?;
|
|
||||||
|
|
||||||
let changes = case.stop_watch();
|
|
||||||
|
|
||||||
case.db_mut().apply_changes(changes);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
foo.permissions(case.db()),
|
|
||||||
Some(
|
|
||||||
std::fs::metadata(foo_path.as_std_path())
|
|
||||||
.unwrap()
|
|
||||||
.permissions()
|
|
||||||
.mode()
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn deleted_file() -> anyhow::Result<()> {
|
|
||||||
let foo_source = "print('Hello, world!')";
|
|
||||||
let mut case = setup([("foo.py", foo_source)])?;
|
|
||||||
let foo_path = case.workspace_path("foo.py");
|
|
||||||
|
|
||||||
let foo = case
|
|
||||||
.system_file(&foo_path)
|
|
||||||
.ok_or_else(|| anyhow!("Foo not found"))?;
|
|
||||||
|
|
||||||
assert!(foo.exists(case.db()));
|
|
||||||
assert_eq!(&case.collect_package_files(&foo_path), &[foo]);
|
|
||||||
|
|
||||||
std::fs::remove_file(foo_path.as_std_path())?;
|
|
||||||
|
|
||||||
let changes = case.stop_watch();
|
|
||||||
|
|
||||||
case.db_mut().apply_changes(changes);
|
|
||||||
|
|
||||||
assert!(!foo.exists(case.db()));
|
|
||||||
assert_eq!(&case.collect_package_files(&foo_path), &[] as &[File]);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Tests the case where a file is moved from inside a watched directory to a directory that is not watched.
|
|
||||||
///
|
|
||||||
/// This matches the behavior of deleting a file in VS code.
|
|
||||||
#[test]
|
|
||||||
fn move_file_to_trash() -> anyhow::Result<()> {
|
|
||||||
let foo_source = "print('Hello, world!')";
|
|
||||||
let mut case = setup([("foo.py", foo_source)])?;
|
|
||||||
let foo_path = case.workspace_path("foo.py");
|
|
||||||
|
|
||||||
let trash_path = case.root_path().join(".trash");
|
|
||||||
std::fs::create_dir_all(trash_path.as_std_path())?;
|
|
||||||
|
|
||||||
let foo = case
|
|
||||||
.system_file(&foo_path)
|
|
||||||
.ok_or_else(|| anyhow!("Foo not found"))?;
|
|
||||||
|
|
||||||
assert!(foo.exists(case.db()));
|
|
||||||
assert_eq!(&case.collect_package_files(&foo_path), &[foo]);
|
|
||||||
|
|
||||||
std::fs::rename(
|
|
||||||
foo_path.as_std_path(),
|
|
||||||
trash_path.join("foo.py").as_std_path(),
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let changes = case.stop_watch();
|
|
||||||
|
|
||||||
case.db_mut().apply_changes(changes);
|
|
||||||
|
|
||||||
assert!(!foo.exists(case.db()));
|
|
||||||
assert_eq!(&case.collect_package_files(&foo_path), &[] as &[File]);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Move a file from a non-workspace (non-watched) location into the workspace.
|
|
||||||
#[test]
|
|
||||||
fn move_file_to_workspace() -> anyhow::Result<()> {
|
|
||||||
let mut case = setup([("bar.py", "")])?;
|
|
||||||
let bar_path = case.workspace_path("bar.py");
|
|
||||||
let bar = case.system_file(&bar_path).unwrap();
|
|
||||||
|
|
||||||
let foo_path = case.root_path().join("foo.py");
|
|
||||||
std::fs::write(foo_path.as_std_path(), "")?;
|
|
||||||
|
|
||||||
let foo_in_workspace_path = case.workspace_path("foo.py");
|
|
||||||
|
|
||||||
assert!(case.system_file(&foo_path).is_some());
|
|
||||||
assert_eq!(&case.collect_package_files(&bar_path), &[bar]);
|
|
||||||
assert!(case
|
|
||||||
.db()
|
|
||||||
.workspace()
|
|
||||||
.package(case.db(), &foo_path)
|
|
||||||
.is_none());
|
|
||||||
|
|
||||||
std::fs::rename(foo_path.as_std_path(), foo_in_workspace_path.as_std_path())?;
|
|
||||||
|
|
||||||
let changes = case.stop_watch();
|
|
||||||
|
|
||||||
case.db_mut().apply_changes(changes);
|
|
||||||
|
|
||||||
let foo_in_workspace = case
|
|
||||||
.system_file(&foo_in_workspace_path)
|
|
||||||
.ok_or_else(|| anyhow!("Foo not found"))?;
|
|
||||||
|
|
||||||
assert!(foo_in_workspace.exists(case.db()));
|
|
||||||
assert_eq!(
|
|
||||||
&case.collect_package_files(&foo_in_workspace_path),
|
|
||||||
&[bar, foo_in_workspace]
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Rename a workspace file.
|
|
||||||
#[test]
|
|
||||||
fn rename_file() -> anyhow::Result<()> {
|
|
||||||
let mut case = setup([("foo.py", "")])?;
|
|
||||||
let foo_path = case.workspace_path("foo.py");
|
|
||||||
let bar_path = case.workspace_path("bar.py");
|
|
||||||
|
|
||||||
let foo = case
|
|
||||||
.system_file(&foo_path)
|
|
||||||
.ok_or_else(|| anyhow!("Foo not found"))?;
|
|
||||||
|
|
||||||
assert_eq!(case.collect_package_files(&foo_path), [foo]);
|
|
||||||
|
|
||||||
std::fs::rename(foo_path.as_std_path(), bar_path.as_std_path())?;
|
|
||||||
|
|
||||||
let changes = case.stop_watch();
|
|
||||||
|
|
||||||
case.db_mut().apply_changes(changes);
|
|
||||||
|
|
||||||
assert!(!foo.exists(case.db()));
|
|
||||||
|
|
||||||
let bar = case
|
|
||||||
.system_file(&bar_path)
|
|
||||||
.ok_or_else(|| anyhow!("Bar not found"))?;
|
|
||||||
|
|
||||||
assert!(bar.exists(case.db()));
|
|
||||||
assert_eq!(case.collect_package_files(&foo_path), [bar]);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn directory_moved_to_workspace() -> anyhow::Result<()> {
|
|
||||||
let mut case = setup([("bar.py", "import sub.a")])?;
|
|
||||||
let bar = case.system_file(case.workspace_path("bar.py")).unwrap();
|
|
||||||
|
|
||||||
let sub_original_path = case.root_path().join("sub");
|
|
||||||
let init_original_path = sub_original_path.join("__init__.py");
|
|
||||||
let a_original_path = sub_original_path.join("a.py");
|
|
||||||
|
|
||||||
std::fs::create_dir(sub_original_path.as_std_path())
|
|
||||||
.with_context(|| "Failed to create sub directory")?;
|
|
||||||
std::fs::write(init_original_path.as_std_path(), "")
|
|
||||||
.with_context(|| "Failed to create __init__.py")?;
|
|
||||||
std::fs::write(a_original_path.as_std_path(), "").with_context(|| "Failed to create a.py")?;
|
|
||||||
|
|
||||||
let sub_a_module = resolve_module(case.db().upcast(), ModuleName::new_static("sub.a").unwrap());
|
|
||||||
|
|
||||||
assert_eq!(sub_a_module, None);
|
|
||||||
assert_eq!(
|
|
||||||
case.collect_package_files(&case.workspace_path("bar.py")),
|
|
||||||
&[bar]
|
|
||||||
);
|
|
||||||
|
|
||||||
let sub_new_path = case.workspace_path("sub");
|
|
||||||
std::fs::rename(sub_original_path.as_std_path(), sub_new_path.as_std_path())
|
|
||||||
.with_context(|| "Failed to move sub directory")?;
|
|
||||||
|
|
||||||
let changes = case.stop_watch();
|
|
||||||
|
|
||||||
case.db_mut().apply_changes(changes);
|
|
||||||
|
|
||||||
let init_file = case
|
|
||||||
.system_file(sub_new_path.join("__init__.py"))
|
|
||||||
.expect("__init__.py to exist");
|
|
||||||
let a_file = case
|
|
||||||
.system_file(sub_new_path.join("a.py"))
|
|
||||||
.expect("a.py to exist");
|
|
||||||
|
|
||||||
// `import sub.a` should now resolve
|
|
||||||
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("sub.a").unwrap()).is_some());
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
case.collect_package_files(&case.workspace_path("bar.py")),
|
|
||||||
&[bar, init_file, a_file]
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn directory_moved_to_trash() -> anyhow::Result<()> {
|
|
||||||
let mut case = setup([
|
|
||||||
("bar.py", "import sub.a"),
|
|
||||||
("sub/__init__.py", ""),
|
|
||||||
("sub/a.py", ""),
|
|
||||||
])?;
|
|
||||||
let bar = case.system_file(case.workspace_path("bar.py")).unwrap();
|
|
||||||
|
|
||||||
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("sub.a").unwrap()).is_some(),);
|
|
||||||
|
|
||||||
let sub_path = case.workspace_path("sub");
|
|
||||||
let init_file = case
|
|
||||||
.system_file(sub_path.join("__init__.py"))
|
|
||||||
.expect("__init__.py to exist");
|
|
||||||
let a_file = case
|
|
||||||
.system_file(sub_path.join("a.py"))
|
|
||||||
.expect("a.py to exist");
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
case.collect_package_files(&case.workspace_path("bar.py")),
|
|
||||||
&[bar, init_file, a_file]
|
|
||||||
);
|
|
||||||
|
|
||||||
std::fs::create_dir(case.root_path().join(".trash").as_std_path())?;
|
|
||||||
let trashed_sub = case.root_path().join(".trash/sub");
|
|
||||||
std::fs::rename(sub_path.as_std_path(), trashed_sub.as_std_path())
|
|
||||||
.with_context(|| "Failed to move the sub directory to the trash")?;
|
|
||||||
|
|
||||||
let changes = case.stop_watch();
|
|
||||||
|
|
||||||
case.db_mut().apply_changes(changes);
|
|
||||||
|
|
||||||
// `import sub.a` should no longer resolve
|
|
||||||
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("sub.a").unwrap()).is_none());
|
|
||||||
|
|
||||||
assert!(!init_file.exists(case.db()));
|
|
||||||
assert!(!a_file.exists(case.db()));
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
case.collect_package_files(&case.workspace_path("bar.py")),
|
|
||||||
&[bar]
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn directory_renamed() -> anyhow::Result<()> {
|
|
||||||
let mut case = setup([
|
|
||||||
("bar.py", "import sub.a"),
|
|
||||||
("sub/__init__.py", ""),
|
|
||||||
("sub/a.py", ""),
|
|
||||||
])?;
|
|
||||||
|
|
||||||
let bar = case.system_file(case.workspace_path("bar.py")).unwrap();
|
|
||||||
|
|
||||||
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("sub.a").unwrap()).is_some());
|
|
||||||
assert!(resolve_module(
|
|
||||||
case.db().upcast(),
|
|
||||||
ModuleName::new_static("foo.baz").unwrap()
|
|
||||||
)
|
|
||||||
.is_none());
|
|
||||||
|
|
||||||
let sub_path = case.workspace_path("sub");
|
|
||||||
let sub_init = case
|
|
||||||
.system_file(sub_path.join("__init__.py"))
|
|
||||||
.expect("__init__.py to exist");
|
|
||||||
let sub_a = case
|
|
||||||
.system_file(sub_path.join("a.py"))
|
|
||||||
.expect("a.py to exist");
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
case.collect_package_files(&sub_path),
|
|
||||||
&[bar, sub_init, sub_a]
|
|
||||||
);
|
|
||||||
|
|
||||||
let foo_baz = case.workspace_path("foo/baz");
|
|
||||||
|
|
||||||
std::fs::create_dir(case.workspace_path("foo").as_std_path())?;
|
|
||||||
std::fs::rename(sub_path.as_std_path(), foo_baz.as_std_path())
|
|
||||||
.with_context(|| "Failed to move the sub directory")?;
|
|
||||||
|
|
||||||
let changes = case.stop_watch();
|
|
||||||
|
|
||||||
case.db_mut().apply_changes(changes);
|
|
||||||
|
|
||||||
// `import sub.a` should no longer resolve
|
|
||||||
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("sub.a").unwrap()).is_none());
|
|
||||||
// `import foo.baz` should now resolve
|
|
||||||
assert!(resolve_module(
|
|
||||||
case.db().upcast(),
|
|
||||||
ModuleName::new_static("foo.baz").unwrap()
|
|
||||||
)
|
|
||||||
.is_some());
|
|
||||||
|
|
||||||
// The old paths are no longer tracked
|
|
||||||
assert!(!sub_init.exists(case.db()));
|
|
||||||
assert!(!sub_a.exists(case.db()));
|
|
||||||
|
|
||||||
let foo_baz_init = case
|
|
||||||
.system_file(foo_baz.join("__init__.py"))
|
|
||||||
.expect("__init__.py to exist");
|
|
||||||
let foo_baz_a = case
|
|
||||||
.system_file(foo_baz.join("a.py"))
|
|
||||||
.expect("a.py to exist");
|
|
||||||
|
|
||||||
// The new paths are synced
|
|
||||||
|
|
||||||
assert!(foo_baz_init.exists(case.db()));
|
|
||||||
assert!(foo_baz_a.exists(case.db()));
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
case.collect_package_files(&sub_path),
|
|
||||||
&[bar, foo_baz_init, foo_baz_a]
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn directory_deleted() -> anyhow::Result<()> {
|
|
||||||
let mut case = setup([
|
|
||||||
("bar.py", "import sub.a"),
|
|
||||||
("sub/__init__.py", ""),
|
|
||||||
("sub/a.py", ""),
|
|
||||||
])?;
|
|
||||||
|
|
||||||
let bar = case.system_file(case.workspace_path("bar.py")).unwrap();
|
|
||||||
|
|
||||||
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("sub.a").unwrap()).is_some(),);
|
|
||||||
|
|
||||||
let sub_path = case.workspace_path("sub");
|
|
||||||
|
|
||||||
let init_file = case
|
|
||||||
.system_file(sub_path.join("__init__.py"))
|
|
||||||
.expect("__init__.py to exist");
|
|
||||||
let a_file = case
|
|
||||||
.system_file(sub_path.join("a.py"))
|
|
||||||
.expect("a.py to exist");
|
|
||||||
assert_eq!(
|
|
||||||
case.collect_package_files(&sub_path),
|
|
||||||
&[bar, init_file, a_file]
|
|
||||||
);
|
|
||||||
|
|
||||||
std::fs::remove_dir_all(sub_path.as_std_path())
|
|
||||||
.with_context(|| "Failed to remove the sub directory")?;
|
|
||||||
|
|
||||||
let changes = case.stop_watch();
|
|
||||||
|
|
||||||
case.db_mut().apply_changes(changes);
|
|
||||||
|
|
||||||
// `import sub.a` should no longer resolve
|
|
||||||
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("sub.a").unwrap()).is_none());
|
|
||||||
|
|
||||||
assert!(!init_file.exists(case.db()));
|
|
||||||
assert!(!a_file.exists(case.db()));
|
|
||||||
assert_eq!(case.collect_package_files(&sub_path), &[bar]);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn search_path() -> anyhow::Result<()> {
|
|
||||||
let mut case =
|
|
||||||
setup_with_search_paths([("bar.py", "import sub.a")], |root_path, workspace_path| {
|
|
||||||
SearchPathSettings {
|
|
||||||
extra_paths: vec![],
|
|
||||||
workspace_root: workspace_path.to_path_buf(),
|
|
||||||
custom_typeshed: None,
|
|
||||||
site_packages: Some(root_path.join("site_packages")),
|
|
||||||
}
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let site_packages = case.root_path().join("site_packages");
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
resolve_module(case.db(), ModuleName::new("a").unwrap()),
|
|
||||||
None
|
|
||||||
);
|
|
||||||
|
|
||||||
std::fs::write(site_packages.join("a.py").as_std_path(), "class A: ...")?;
|
|
||||||
std::fs::write(site_packages.join("__init__.py").as_std_path(), "")?;
|
|
||||||
|
|
||||||
let changes = case.stop_watch();
|
|
||||||
|
|
||||||
case.db_mut().apply_changes(changes);
|
|
||||||
|
|
||||||
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("a").unwrap()).is_some());
|
|
||||||
assert_eq!(
|
|
||||||
case.collect_package_files(&case.workspace_path("bar.py")),
|
|
||||||
&[case.system_file(case.workspace_path("bar.py")).unwrap()]
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn add_search_path() -> anyhow::Result<()> {
|
|
||||||
let mut case = setup([("bar.py", "import sub.a")])?;
|
|
||||||
|
|
||||||
let site_packages = case.workspace_path("site_packages");
|
|
||||||
std::fs::create_dir_all(site_packages.as_std_path())?;
|
|
||||||
|
|
||||||
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("a").unwrap()).is_none());
|
|
||||||
|
|
||||||
// Register site-packages as a search path.
|
|
||||||
case.update_search_path_settings(|settings| SearchPathSettings {
|
|
||||||
site_packages: Some(site_packages.clone()),
|
|
||||||
..settings.clone()
|
|
||||||
});
|
|
||||||
|
|
||||||
std::fs::write(site_packages.join("a.py").as_std_path(), "class A: ...")?;
|
|
||||||
std::fs::write(site_packages.join("__init__.py").as_std_path(), "")?;
|
|
||||||
|
|
||||||
let changes = case.stop_watch();
|
|
||||||
|
|
||||||
case.db_mut().apply_changes(changes);
|
|
||||||
|
|
||||||
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("a").unwrap()).is_some());
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn remove_search_path() -> anyhow::Result<()> {
|
|
||||||
let mut case =
|
|
||||||
setup_with_search_paths([("bar.py", "import sub.a")], |root_path, workspace_path| {
|
|
||||||
SearchPathSettings {
|
|
||||||
extra_paths: vec![],
|
|
||||||
workspace_root: workspace_path.to_path_buf(),
|
|
||||||
custom_typeshed: None,
|
|
||||||
site_packages: Some(root_path.join("site_packages")),
|
|
||||||
}
|
|
||||||
})?;
|
|
||||||
|
|
||||||
// Remove site packages from the search path settings.
|
|
||||||
let site_packages = case.root_path().join("site_packages");
|
|
||||||
case.update_search_path_settings(|settings| SearchPathSettings {
|
|
||||||
site_packages: None,
|
|
||||||
..settings.clone()
|
|
||||||
});
|
|
||||||
|
|
||||||
std::fs::write(site_packages.join("a.py").as_std_path(), "class A: ...")?;
|
|
||||||
|
|
||||||
let changes = case.stop_watch();
|
|
||||||
|
|
||||||
assert_eq!(changes, &[]);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
@@ -1,39 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "red_knot_module_resolver"
|
|
||||||
version = "0.0.0"
|
|
||||||
publish = false
|
|
||||||
authors = { workspace = true }
|
|
||||||
edition = { workspace = true }
|
|
||||||
rust-version = { workspace = true }
|
|
||||||
homepage = { workspace = true }
|
|
||||||
documentation = { workspace = true }
|
|
||||||
repository = { workspace = true }
|
|
||||||
license = { workspace = true }
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
ruff_db = { workspace = true }
|
|
||||||
ruff_python_stdlib = { workspace = true }
|
|
||||||
|
|
||||||
compact_str = { workspace = true }
|
|
||||||
camino = { workspace = true }
|
|
||||||
once_cell = { workspace = true }
|
|
||||||
rustc-hash = { workspace = true }
|
|
||||||
salsa = { workspace = true }
|
|
||||||
tracing = { workspace = true }
|
|
||||||
zip = { workspace = true }
|
|
||||||
|
|
||||||
[build-dependencies]
|
|
||||||
path-slash = { workspace = true }
|
|
||||||
walkdir = { workspace = true }
|
|
||||||
zip = { workspace = true }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
ruff_db = { workspace = true, features = ["os"] }
|
|
||||||
|
|
||||||
anyhow = { workspace = true }
|
|
||||||
insta = { workspace = true }
|
|
||||||
tempfile = { workspace = true }
|
|
||||||
walkdir = { workspace = true }
|
|
||||||
|
|
||||||
[lints]
|
|
||||||
workspace = true
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
# Red Knot
|
|
||||||
|
|
||||||
A work-in-progress multifile module resolver for Ruff.
|
|
||||||
|
|
||||||
## Vendored types for the stdlib
|
|
||||||
|
|
||||||
This crate vendors [typeshed](https://github.com/python/typeshed)'s stubs for the standard library. The vendored stubs can be found in `crates/red_knot_module_resolver/vendor/typeshed`. The file `crates/red_knot_module_resolver/vendor/typeshed/source_commit.txt` tells you the typeshed commit that our vendored stdlib stubs currently correspond to.
|
|
||||||
|
|
||||||
The typeshed stubs are updated every two weeks via an automated PR using the `sync_typeshed.yaml` workflow in the `.github/workflows` directory. This workflow can also be triggered at any time via [workflow dispatch](https://docs.github.com/en/actions/using-workflows/manually-running-a-workflow#running-a-workflow).
|
|
||||||
@@ -1,74 +0,0 @@
|
|||||||
//! Build script to package our vendored typeshed files
|
|
||||||
//! into a zip archive that can be included in the Ruff binary.
|
|
||||||
//!
|
|
||||||
//! This script should be automatically run at build time
|
|
||||||
//! whenever the script itself changes, or whenever any files
|
|
||||||
//! in `crates/red_knot_module_resolver/vendor/typeshed` change.
|
|
||||||
|
|
||||||
use std::fs::File;
|
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
use path_slash::PathExt;
|
|
||||||
use zip::result::ZipResult;
|
|
||||||
use zip::write::{FileOptions, ZipWriter};
|
|
||||||
use zip::CompressionMethod;
|
|
||||||
|
|
||||||
const TYPESHED_SOURCE_DIR: &str = "vendor/typeshed";
|
|
||||||
const TYPESHED_ZIP_LOCATION: &str = "/zipped_typeshed.zip";
|
|
||||||
|
|
||||||
/// Recursively zip the contents of an entire directory.
|
|
||||||
///
|
|
||||||
/// This routine is adapted from a recipe at
|
|
||||||
/// <https://github.com/zip-rs/zip-old/blob/5d0f198124946b7be4e5969719a7f29f363118cd/examples/write_dir.rs>
|
|
||||||
fn zip_dir(directory_path: &str, writer: File) -> ZipResult<File> {
|
|
||||||
let mut zip = ZipWriter::new(writer);
|
|
||||||
|
|
||||||
let options = FileOptions::default()
|
|
||||||
.compression_method(CompressionMethod::Zstd)
|
|
||||||
.unix_permissions(0o644);
|
|
||||||
|
|
||||||
for entry in walkdir::WalkDir::new(directory_path) {
|
|
||||||
let dir_entry = entry.unwrap();
|
|
||||||
let absolute_path = dir_entry.path();
|
|
||||||
let normalized_relative_path = absolute_path
|
|
||||||
.strip_prefix(Path::new(directory_path))
|
|
||||||
.unwrap()
|
|
||||||
.to_slash()
|
|
||||||
.expect("Unexpected non-utf8 typeshed path!");
|
|
||||||
|
|
||||||
// Write file or directory explicitly
|
|
||||||
// Some unzip tools unzip files with directory paths correctly, some do not!
|
|
||||||
if absolute_path.is_file() {
|
|
||||||
println!("adding file {absolute_path:?} as {normalized_relative_path:?} ...");
|
|
||||||
zip.start_file(normalized_relative_path, options)?;
|
|
||||||
let mut f = File::open(absolute_path)?;
|
|
||||||
std::io::copy(&mut f, &mut zip).unwrap();
|
|
||||||
} else if !normalized_relative_path.is_empty() {
|
|
||||||
// Only if not root! Avoids path spec / warning
|
|
||||||
// and mapname conversion failed error on unzip
|
|
||||||
println!("adding dir {absolute_path:?} as {normalized_relative_path:?} ...");
|
|
||||||
zip.add_directory(normalized_relative_path, options)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
zip.finish()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
println!("cargo:rerun-if-changed={TYPESHED_SOURCE_DIR}");
|
|
||||||
assert!(
|
|
||||||
Path::new(TYPESHED_SOURCE_DIR).is_dir(),
|
|
||||||
"Where is typeshed?"
|
|
||||||
);
|
|
||||||
let out_dir = std::env::var("OUT_DIR").unwrap();
|
|
||||||
|
|
||||||
// N.B. Deliberately using `format!()` instead of `Path::join()` here,
|
|
||||||
// so that we use `/` as a path separator on all platforms.
|
|
||||||
// That enables us to load the typeshed zip at compile time in `module.rs`
|
|
||||||
// (otherwise we'd have to dynamically determine the exact path to the typeshed zip
|
|
||||||
// based on the default path separator for the specific platform we're on,
|
|
||||||
// which can't be done at compile time.)
|
|
||||||
let zipped_typeshed_location = format!("{out_dir}{TYPESHED_ZIP_LOCATION}");
|
|
||||||
|
|
||||||
let zipped_typeshed = File::create(zipped_typeshed_location).unwrap();
|
|
||||||
zip_dir(TYPESHED_SOURCE_DIR, zipped_typeshed).unwrap();
|
|
||||||
}
|
|
||||||
@@ -1,129 +0,0 @@
|
|||||||
use ruff_db::Upcast;
|
|
||||||
|
|
||||||
use crate::resolver::{
|
|
||||||
editable_install_resolution_paths, file_to_module, internal::ModuleNameIngredient,
|
|
||||||
module_resolution_settings, resolve_module_query,
|
|
||||||
};
|
|
||||||
use crate::typeshed::parse_typeshed_versions;
|
|
||||||
|
|
||||||
#[salsa::jar(db=Db)]
|
|
||||||
pub struct Jar(
|
|
||||||
ModuleNameIngredient<'_>,
|
|
||||||
module_resolution_settings,
|
|
||||||
editable_install_resolution_paths,
|
|
||||||
resolve_module_query,
|
|
||||||
file_to_module,
|
|
||||||
parse_typeshed_versions,
|
|
||||||
);
|
|
||||||
|
|
||||||
pub trait Db: salsa::DbWithJar<Jar> + ruff_db::Db + Upcast<dyn ruff_db::Db> {}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
pub(crate) mod tests {
|
|
||||||
use std::sync;
|
|
||||||
|
|
||||||
use salsa::DebugWithDb;
|
|
||||||
|
|
||||||
use ruff_db::files::Files;
|
|
||||||
use ruff_db::system::{DbWithTestSystem, TestSystem};
|
|
||||||
use ruff_db::vendored::VendoredFileSystem;
|
|
||||||
|
|
||||||
use crate::vendored_typeshed_stubs;
|
|
||||||
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[salsa::db(Jar, ruff_db::Jar)]
|
|
||||||
pub(crate) struct TestDb {
|
|
||||||
storage: salsa::Storage<Self>,
|
|
||||||
system: TestSystem,
|
|
||||||
vendored: VendoredFileSystem,
|
|
||||||
files: Files,
|
|
||||||
events: sync::Arc<sync::Mutex<Vec<salsa::Event>>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TestDb {
|
|
||||||
pub(crate) fn new() -> Self {
|
|
||||||
Self {
|
|
||||||
storage: salsa::Storage::default(),
|
|
||||||
system: TestSystem::default(),
|
|
||||||
vendored: vendored_typeshed_stubs().snapshot(),
|
|
||||||
events: sync::Arc::default(),
|
|
||||||
files: Files::default(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Takes the salsa events.
|
|
||||||
///
|
|
||||||
/// ## Panics
|
|
||||||
/// If there are any pending salsa snapshots.
|
|
||||||
pub(crate) fn take_salsa_events(&mut self) -> Vec<salsa::Event> {
|
|
||||||
let inner = sync::Arc::get_mut(&mut self.events).expect("no pending salsa snapshots");
|
|
||||||
|
|
||||||
let events = inner.get_mut().unwrap();
|
|
||||||
std::mem::take(&mut *events)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Clears the salsa events.
|
|
||||||
///
|
|
||||||
/// ## Panics
|
|
||||||
/// If there are any pending salsa snapshots.
|
|
||||||
pub(crate) fn clear_salsa_events(&mut self) {
|
|
||||||
self.take_salsa_events();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Upcast<dyn ruff_db::Db> for TestDb {
|
|
||||||
fn upcast(&self) -> &(dyn ruff_db::Db + 'static) {
|
|
||||||
self
|
|
||||||
}
|
|
||||||
fn upcast_mut(&mut self) -> &mut (dyn ruff_db::Db + 'static) {
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ruff_db::Db for TestDb {
|
|
||||||
fn vendored(&self) -> &VendoredFileSystem {
|
|
||||||
&self.vendored
|
|
||||||
}
|
|
||||||
|
|
||||||
fn system(&self) -> &dyn ruff_db::system::System {
|
|
||||||
&self.system
|
|
||||||
}
|
|
||||||
|
|
||||||
fn files(&self) -> &Files {
|
|
||||||
&self.files
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Db for TestDb {}
|
|
||||||
|
|
||||||
impl DbWithTestSystem for TestDb {
|
|
||||||
fn test_system(&self) -> &TestSystem {
|
|
||||||
&self.system
|
|
||||||
}
|
|
||||||
|
|
||||||
fn test_system_mut(&mut self) -> &mut TestSystem {
|
|
||||||
&mut self.system
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl salsa::Database for TestDb {
|
|
||||||
fn salsa_event(&self, event: salsa::Event) {
|
|
||||||
tracing::trace!("event: {:?}", event.debug(self));
|
|
||||||
let mut events = self.events.lock().unwrap();
|
|
||||||
events.push(event);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl salsa::ParallelDatabase for TestDb {
|
|
||||||
fn snapshot(&self) -> salsa::Snapshot<Self> {
|
|
||||||
salsa::Snapshot::new(Self {
|
|
||||||
storage: self.storage.snapshot(),
|
|
||||||
system: self.system.snapshot(),
|
|
||||||
vendored: self.vendored.snapshot(),
|
|
||||||
files: self.files.snapshot(),
|
|
||||||
events: self.events.clone(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,50 +0,0 @@
|
|||||||
use std::iter::FusedIterator;
|
|
||||||
|
|
||||||
pub use db::{Db, Jar};
|
|
||||||
pub use module::{Module, ModuleKind};
|
|
||||||
pub use module_name::ModuleName;
|
|
||||||
pub use resolver::resolve_module;
|
|
||||||
use ruff_db::system::SystemPath;
|
|
||||||
pub use typeshed::{
|
|
||||||
vendored_typeshed_stubs, TypeshedVersionsParseError, TypeshedVersionsParseErrorKind,
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::resolver::{module_resolution_settings, SearchPathIterator};
|
|
||||||
|
|
||||||
mod db;
|
|
||||||
mod module;
|
|
||||||
mod module_name;
|
|
||||||
mod path;
|
|
||||||
mod resolver;
|
|
||||||
mod state;
|
|
||||||
mod typeshed;
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod testing;
|
|
||||||
|
|
||||||
/// Returns an iterator over all search paths pointing to a system path
|
|
||||||
pub fn system_module_search_paths(db: &dyn Db) -> SystemModuleSearchPathsIter {
|
|
||||||
SystemModuleSearchPathsIter {
|
|
||||||
inner: module_resolution_settings(db).search_paths(db),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct SystemModuleSearchPathsIter<'db> {
|
|
||||||
inner: SearchPathIterator<'db>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'db> Iterator for SystemModuleSearchPathsIter<'db> {
|
|
||||||
type Item = &'db SystemPath;
|
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
|
||||||
loop {
|
|
||||||
let next = self.inner.next()?;
|
|
||||||
|
|
||||||
if let Some(system_path) = next.as_system_path() {
|
|
||||||
return Some(system_path);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FusedIterator for SystemModuleSearchPathsIter<'_> {}
|
|
||||||
@@ -1,91 +0,0 @@
|
|||||||
use std::fmt::Formatter;
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use ruff_db::files::File;
|
|
||||||
|
|
||||||
use crate::db::Db;
|
|
||||||
use crate::module_name::ModuleName;
|
|
||||||
use crate::path::ModuleSearchPath;
|
|
||||||
|
|
||||||
/// Representation of a Python module.
|
|
||||||
#[derive(Clone, PartialEq, Eq)]
|
|
||||||
pub struct Module {
|
|
||||||
inner: Arc<ModuleInner>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Module {
|
|
||||||
pub(crate) fn new(
|
|
||||||
name: ModuleName,
|
|
||||||
kind: ModuleKind,
|
|
||||||
search_path: ModuleSearchPath,
|
|
||||||
file: File,
|
|
||||||
) -> Self {
|
|
||||||
Self {
|
|
||||||
inner: Arc::new(ModuleInner {
|
|
||||||
name,
|
|
||||||
kind,
|
|
||||||
search_path,
|
|
||||||
file,
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The absolute name of the module (e.g. `foo.bar`)
|
|
||||||
pub fn name(&self) -> &ModuleName {
|
|
||||||
&self.inner.name
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The file to the source code that defines this module
|
|
||||||
pub fn file(&self) -> File {
|
|
||||||
self.inner.file
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The search path from which the module was resolved.
|
|
||||||
pub(crate) fn search_path(&self) -> &ModuleSearchPath {
|
|
||||||
&self.inner.search_path
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Determine whether this module is a single-file module or a package
|
|
||||||
pub fn kind(&self) -> ModuleKind {
|
|
||||||
self.inner.kind
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Debug for Module {
|
|
||||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
|
||||||
f.debug_struct("Module")
|
|
||||||
.field("name", &self.name())
|
|
||||||
.field("kind", &self.kind())
|
|
||||||
.field("file", &self.file())
|
|
||||||
.field("search_path", &self.search_path())
|
|
||||||
.finish()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl salsa::DebugWithDb<dyn Db> for Module {
|
|
||||||
fn fmt(&self, f: &mut Formatter<'_>, db: &dyn Db) -> std::fmt::Result {
|
|
||||||
f.debug_struct("Module")
|
|
||||||
.field("name", &self.name())
|
|
||||||
.field("kind", &self.kind())
|
|
||||||
.field("file", &self.file().debug(db.upcast()))
|
|
||||||
.field("search_path", &self.search_path())
|
|
||||||
.finish()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(PartialEq, Eq)]
|
|
||||||
struct ModuleInner {
|
|
||||||
name: ModuleName,
|
|
||||||
kind: ModuleKind,
|
|
||||||
search_path: ModuleSearchPath,
|
|
||||||
file: File,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
|
||||||
pub enum ModuleKind {
|
|
||||||
/// A single-file module (e.g. `foo.py` or `foo.pyi`)
|
|
||||||
Module,
|
|
||||||
|
|
||||||
/// A python package (`foo/__init__.py` or `foo/__init__.pyi`)
|
|
||||||
Package,
|
|
||||||
}
|
|
||||||
@@ -1,198 +0,0 @@
|
|||||||
use std::fmt;
|
|
||||||
use std::ops::Deref;
|
|
||||||
|
|
||||||
use compact_str::{CompactString, ToCompactString};
|
|
||||||
|
|
||||||
use ruff_python_stdlib::identifiers::is_identifier;
|
|
||||||
|
|
||||||
/// A module name, e.g. `foo.bar`.
|
|
||||||
///
|
|
||||||
/// Always normalized to the absolute form (never a relative module name, i.e., never `.foo`).
|
|
||||||
#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
|
|
||||||
pub struct ModuleName(compact_str::CompactString);
|
|
||||||
|
|
||||||
impl ModuleName {
|
|
||||||
/// Creates a new module name for `name`. Returns `Some` if `name` is a valid, absolute
|
|
||||||
/// module name and `None` otherwise.
|
|
||||||
///
|
|
||||||
/// The module name is invalid if:
|
|
||||||
///
|
|
||||||
/// * The name is empty
|
|
||||||
/// * The name is relative
|
|
||||||
/// * The name ends with a `.`
|
|
||||||
/// * The name contains a sequence of multiple dots
|
|
||||||
/// * A component of a name (the part between two dots) isn't a valid python identifier.
|
|
||||||
#[inline]
|
|
||||||
#[must_use]
|
|
||||||
pub fn new(name: &str) -> Option<Self> {
|
|
||||||
Self::is_valid_name(name).then(|| Self(CompactString::from(name)))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Creates a new module name for `name` where `name` is a static string.
|
|
||||||
/// Returns `Some` if `name` is a valid, absolute module name and `None` otherwise.
|
|
||||||
///
|
|
||||||
/// The module name is invalid if:
|
|
||||||
///
|
|
||||||
/// * The name is empty
|
|
||||||
/// * The name is relative
|
|
||||||
/// * The name ends with a `.`
|
|
||||||
/// * The name contains a sequence of multiple dots
|
|
||||||
/// * A component of a name (the part between two dots) isn't a valid python identifier.
|
|
||||||
///
|
|
||||||
/// ## Examples
|
|
||||||
///
|
|
||||||
/// ```
|
|
||||||
/// use red_knot_module_resolver::ModuleName;
|
|
||||||
///
|
|
||||||
/// assert_eq!(ModuleName::new_static("foo.bar").as_deref(), Some("foo.bar"));
|
|
||||||
/// assert_eq!(ModuleName::new_static(""), None);
|
|
||||||
/// assert_eq!(ModuleName::new_static("..foo"), None);
|
|
||||||
/// assert_eq!(ModuleName::new_static(".foo"), None);
|
|
||||||
/// assert_eq!(ModuleName::new_static("foo."), None);
|
|
||||||
/// assert_eq!(ModuleName::new_static("foo..bar"), None);
|
|
||||||
/// assert_eq!(ModuleName::new_static("2000"), None);
|
|
||||||
/// ```
|
|
||||||
#[inline]
|
|
||||||
#[must_use]
|
|
||||||
pub fn new_static(name: &'static str) -> Option<Self> {
|
|
||||||
Self::is_valid_name(name).then(|| Self(CompactString::const_new(name)))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
fn is_valid_name(name: &str) -> bool {
|
|
||||||
!name.is_empty() && name.split('.').all(is_identifier)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// An iterator over the components of the module name:
|
|
||||||
///
|
|
||||||
/// # Examples
|
|
||||||
///
|
|
||||||
/// ```
|
|
||||||
/// use red_knot_module_resolver::ModuleName;
|
|
||||||
///
|
|
||||||
/// assert_eq!(ModuleName::new_static("foo.bar.baz").unwrap().components().collect::<Vec<_>>(), vec!["foo", "bar", "baz"]);
|
|
||||||
/// ```
|
|
||||||
#[must_use]
|
|
||||||
pub fn components(&self) -> impl DoubleEndedIterator<Item = &str> {
|
|
||||||
self.0.split('.')
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The name of this module's immediate parent, if it has a parent.
|
|
||||||
///
|
|
||||||
/// # Examples
|
|
||||||
///
|
|
||||||
/// ```
|
|
||||||
/// use red_knot_module_resolver::ModuleName;
|
|
||||||
///
|
|
||||||
/// assert_eq!(ModuleName::new_static("foo.bar").unwrap().parent(), Some(ModuleName::new_static("foo").unwrap()));
|
|
||||||
/// assert_eq!(ModuleName::new_static("foo.bar.baz").unwrap().parent(), Some(ModuleName::new_static("foo.bar").unwrap()));
|
|
||||||
/// assert_eq!(ModuleName::new_static("root").unwrap().parent(), None);
|
|
||||||
/// ```
|
|
||||||
#[must_use]
|
|
||||||
pub fn parent(&self) -> Option<ModuleName> {
|
|
||||||
let (parent, _) = self.0.rsplit_once('.')?;
|
|
||||||
Some(Self(parent.to_compact_string()))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns `true` if the name starts with `other`.
|
|
||||||
///
|
|
||||||
/// This is equivalent to checking if `self` is a sub-module of `other`.
|
|
||||||
///
|
|
||||||
/// # Examples
|
|
||||||
///
|
|
||||||
/// ```
|
|
||||||
/// use red_knot_module_resolver::ModuleName;
|
|
||||||
///
|
|
||||||
/// assert!(ModuleName::new_static("foo.bar").unwrap().starts_with(&ModuleName::new_static("foo").unwrap()));
|
|
||||||
///
|
|
||||||
/// assert!(!ModuleName::new_static("foo.bar").unwrap().starts_with(&ModuleName::new_static("bar").unwrap()));
|
|
||||||
/// assert!(!ModuleName::new_static("foo_bar").unwrap().starts_with(&ModuleName::new_static("foo").unwrap()));
|
|
||||||
/// ```
|
|
||||||
#[must_use]
|
|
||||||
pub fn starts_with(&self, other: &ModuleName) -> bool {
|
|
||||||
let mut self_components = self.components();
|
|
||||||
let other_components = other.components();
|
|
||||||
|
|
||||||
for other_component in other_components {
|
|
||||||
if self_components.next() != Some(other_component) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
true
|
|
||||||
}
|
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
#[inline]
|
|
||||||
pub fn as_str(&self) -> &str {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Construct a [`ModuleName`] from a sequence of parts.
|
|
||||||
///
|
|
||||||
/// # Examples
|
|
||||||
///
|
|
||||||
/// ```
|
|
||||||
/// use red_knot_module_resolver::ModuleName;
|
|
||||||
///
|
|
||||||
/// assert_eq!(&*ModuleName::from_components(["a"]).unwrap(), "a");
|
|
||||||
/// assert_eq!(&*ModuleName::from_components(["a", "b"]).unwrap(), "a.b");
|
|
||||||
/// assert_eq!(&*ModuleName::from_components(["a", "b", "c"]).unwrap(), "a.b.c");
|
|
||||||
///
|
|
||||||
/// assert_eq!(ModuleName::from_components(["a-b"]), None);
|
|
||||||
/// assert_eq!(ModuleName::from_components(["a", "a-b"]), None);
|
|
||||||
/// assert_eq!(ModuleName::from_components(["a", "b", "a-b-c"]), None);
|
|
||||||
/// ```
|
|
||||||
#[must_use]
|
|
||||||
pub fn from_components<'a>(components: impl IntoIterator<Item = &'a str>) -> Option<Self> {
|
|
||||||
let mut components = components.into_iter();
|
|
||||||
let first_part = components.next()?;
|
|
||||||
if !is_identifier(first_part) {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
let name = if let Some(second_part) = components.next() {
|
|
||||||
if !is_identifier(second_part) {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
let mut name = format!("{first_part}.{second_part}");
|
|
||||||
for part in components {
|
|
||||||
if !is_identifier(part) {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
name.push('.');
|
|
||||||
name.push_str(part);
|
|
||||||
}
|
|
||||||
CompactString::from(&name)
|
|
||||||
} else {
|
|
||||||
CompactString::from(first_part)
|
|
||||||
};
|
|
||||||
Some(Self(name))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Deref for ModuleName {
|
|
||||||
type Target = str;
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
self.as_str()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PartialEq<str> for ModuleName {
|
|
||||||
fn eq(&self, other: &str) -> bool {
|
|
||||||
self.as_str() == other
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PartialEq<ModuleName> for str {
|
|
||||||
fn eq(&self, other: &ModuleName) -> bool {
|
|
||||||
self == other.as_str()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Display for ModuleName {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
f.write_str(&self.0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,30 +0,0 @@
|
|||||||
use ruff_db::program::TargetVersion;
|
|
||||||
use ruff_db::system::System;
|
|
||||||
use ruff_db::vendored::VendoredFileSystem;
|
|
||||||
|
|
||||||
use crate::db::Db;
|
|
||||||
use crate::typeshed::LazyTypeshedVersions;
|
|
||||||
|
|
||||||
pub(crate) struct ResolverState<'db> {
|
|
||||||
pub(crate) db: &'db dyn Db,
|
|
||||||
pub(crate) typeshed_versions: LazyTypeshedVersions<'db>,
|
|
||||||
pub(crate) target_version: TargetVersion,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'db> ResolverState<'db> {
|
|
||||||
pub(crate) fn new(db: &'db dyn Db, target_version: TargetVersion) -> Self {
|
|
||||||
Self {
|
|
||||||
db,
|
|
||||||
typeshed_versions: LazyTypeshedVersions::new(),
|
|
||||||
target_version,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn system(&self) -> &dyn System {
|
|
||||||
self.db.system()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn vendored(&self) -> &VendoredFileSystem {
|
|
||||||
self.db.vendored()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,291 +0,0 @@
|
|||||||
use ruff_db::program::{Program, SearchPathSettings, TargetVersion};
|
|
||||||
use ruff_db::system::{DbWithTestSystem, SystemPath, SystemPathBuf};
|
|
||||||
use ruff_db::vendored::VendoredPathBuf;
|
|
||||||
|
|
||||||
use crate::db::tests::TestDb;
|
|
||||||
|
|
||||||
/// A test case for the module resolver.
|
|
||||||
///
|
|
||||||
/// You generally shouldn't construct instances of this struct directly;
|
|
||||||
/// instead, use the [`TestCaseBuilder`].
|
|
||||||
pub(crate) struct TestCase<T> {
|
|
||||||
pub(crate) db: TestDb,
|
|
||||||
pub(crate) src: SystemPathBuf,
|
|
||||||
pub(crate) stdlib: T,
|
|
||||||
pub(crate) site_packages: SystemPathBuf,
|
|
||||||
pub(crate) target_version: TargetVersion,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A `(file_name, file_contents)` tuple
|
|
||||||
pub(crate) type FileSpec = (&'static str, &'static str);
|
|
||||||
|
|
||||||
/// Specification for a typeshed mock to be created as part of a test
|
|
||||||
#[derive(Debug, Clone, Copy, Default)]
|
|
||||||
pub(crate) struct MockedTypeshed {
|
|
||||||
/// The stdlib files to be created in the typeshed mock
|
|
||||||
pub(crate) stdlib_files: &'static [FileSpec],
|
|
||||||
|
|
||||||
/// The contents of the `stdlib/VERSIONS` file
|
|
||||||
/// to be created in the typeshed mock
|
|
||||||
pub(crate) versions: &'static str,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub(crate) struct VendoredTypeshed;
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub(crate) struct UnspecifiedTypeshed;
|
|
||||||
|
|
||||||
/// A builder for a module-resolver test case.
|
|
||||||
///
|
|
||||||
/// The builder takes care of creating a [`TestDb`]
|
|
||||||
/// instance, applying the module resolver settings,
|
|
||||||
/// and creating mock directories for the stdlib, `site-packages`,
|
|
||||||
/// first-party code, etc.
|
|
||||||
///
|
|
||||||
/// For simple tests that do not involve typeshed,
|
|
||||||
/// test cases can be created as follows:
|
|
||||||
///
|
|
||||||
/// ```rs
|
|
||||||
/// let test_case = TestCaseBuilder::new()
|
|
||||||
/// .with_src_files(...)
|
|
||||||
/// .build();
|
|
||||||
///
|
|
||||||
/// let test_case2 = TestCaseBuilder::new()
|
|
||||||
/// .with_site_packages_files(...)
|
|
||||||
/// .build();
|
|
||||||
/// ```
|
|
||||||
///
|
|
||||||
/// Any tests can specify the target Python version that should be used
|
|
||||||
/// in the module resolver settings:
|
|
||||||
///
|
|
||||||
/// ```rs
|
|
||||||
/// let test_case = TestCaseBuilder::new()
|
|
||||||
/// .with_src_files(...)
|
|
||||||
/// .with_target_version(...)
|
|
||||||
/// .build();
|
|
||||||
/// ```
|
|
||||||
///
|
|
||||||
/// For tests checking that standard-library module resolution is working
|
|
||||||
/// correctly, you should usually create a [`MockedTypeshed`] instance
|
|
||||||
/// and pass it to the [`TestCaseBuilder::with_custom_typeshed`] method.
|
|
||||||
/// If you need to check something that involves the vendored typeshed stubs
|
|
||||||
/// we include as part of the binary, you can instead use the
|
|
||||||
/// [`TestCaseBuilder::with_vendored_typeshed`] method.
|
|
||||||
/// For either of these, you should almost always try to be explicit
|
|
||||||
/// about the Python version you want to be specified in the module-resolver
|
|
||||||
/// settings for the test:
|
|
||||||
///
|
|
||||||
/// ```rs
|
|
||||||
/// const TYPESHED = MockedTypeshed { ... };
|
|
||||||
///
|
|
||||||
/// let test_case = resolver_test_case()
|
|
||||||
/// .with_custom_typeshed(TYPESHED)
|
|
||||||
/// .with_target_version(...)
|
|
||||||
/// .build();
|
|
||||||
///
|
|
||||||
/// let test_case2 = resolver_test_case()
|
|
||||||
/// .with_vendored_typeshed()
|
|
||||||
/// .with_target_version(...)
|
|
||||||
/// .build();
|
|
||||||
/// ```
|
|
||||||
///
|
|
||||||
/// If you have not called one of those options, the `stdlib` field
|
|
||||||
/// on the [`TestCase`] instance created from `.build()` will be set
|
|
||||||
/// to `()`.
|
|
||||||
pub(crate) struct TestCaseBuilder<T> {
|
|
||||||
typeshed_option: T,
|
|
||||||
target_version: TargetVersion,
|
|
||||||
first_party_files: Vec<FileSpec>,
|
|
||||||
site_packages_files: Vec<FileSpec>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> TestCaseBuilder<T> {
|
|
||||||
/// Specify files to be created in the `src` mock directory
|
|
||||||
pub(crate) fn with_src_files(mut self, files: &[FileSpec]) -> Self {
|
|
||||||
self.first_party_files.extend(files.iter().copied());
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Specify files to be created in the `site-packages` mock directory
|
|
||||||
pub(crate) fn with_site_packages_files(mut self, files: &[FileSpec]) -> Self {
|
|
||||||
self.site_packages_files.extend(files.iter().copied());
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Specify the target Python version the module resolver should assume
|
|
||||||
pub(crate) fn with_target_version(mut self, target_version: TargetVersion) -> Self {
|
|
||||||
self.target_version = target_version;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
fn write_mock_directory(
|
|
||||||
db: &mut TestDb,
|
|
||||||
location: impl AsRef<SystemPath>,
|
|
||||||
files: impl IntoIterator<Item = FileSpec>,
|
|
||||||
) -> SystemPathBuf {
|
|
||||||
let root = location.as_ref().to_path_buf();
|
|
||||||
// Make sure to create the directory even if the list of files is empty:
|
|
||||||
db.memory_file_system().create_directory_all(&root).unwrap();
|
|
||||||
db.write_files(
|
|
||||||
files
|
|
||||||
.into_iter()
|
|
||||||
.map(|(relative_path, contents)| (root.join(relative_path), contents)),
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
root
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TestCaseBuilder<UnspecifiedTypeshed> {
|
|
||||||
pub(crate) fn new() -> TestCaseBuilder<UnspecifiedTypeshed> {
|
|
||||||
Self {
|
|
||||||
typeshed_option: UnspecifiedTypeshed,
|
|
||||||
target_version: TargetVersion::default(),
|
|
||||||
first_party_files: vec![],
|
|
||||||
site_packages_files: vec![],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Use the vendored stdlib stubs included in the Ruff binary for this test case
|
|
||||||
pub(crate) fn with_vendored_typeshed(self) -> TestCaseBuilder<VendoredTypeshed> {
|
|
||||||
let TestCaseBuilder {
|
|
||||||
typeshed_option: _,
|
|
||||||
target_version,
|
|
||||||
first_party_files,
|
|
||||||
site_packages_files,
|
|
||||||
} = self;
|
|
||||||
TestCaseBuilder {
|
|
||||||
typeshed_option: VendoredTypeshed,
|
|
||||||
target_version,
|
|
||||||
first_party_files,
|
|
||||||
site_packages_files,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Use a mock typeshed directory for this test case
|
|
||||||
pub(crate) fn with_custom_typeshed(
|
|
||||||
self,
|
|
||||||
typeshed: MockedTypeshed,
|
|
||||||
) -> TestCaseBuilder<MockedTypeshed> {
|
|
||||||
let TestCaseBuilder {
|
|
||||||
typeshed_option: _,
|
|
||||||
target_version,
|
|
||||||
first_party_files,
|
|
||||||
site_packages_files,
|
|
||||||
} = self;
|
|
||||||
TestCaseBuilder {
|
|
||||||
typeshed_option: typeshed,
|
|
||||||
target_version,
|
|
||||||
first_party_files,
|
|
||||||
site_packages_files,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn build(self) -> TestCase<()> {
|
|
||||||
let TestCase {
|
|
||||||
db,
|
|
||||||
src,
|
|
||||||
stdlib: _,
|
|
||||||
site_packages,
|
|
||||||
target_version,
|
|
||||||
} = self.with_custom_typeshed(MockedTypeshed::default()).build();
|
|
||||||
TestCase {
|
|
||||||
db,
|
|
||||||
src,
|
|
||||||
stdlib: (),
|
|
||||||
site_packages,
|
|
||||||
target_version,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TestCaseBuilder<MockedTypeshed> {
|
|
||||||
pub(crate) fn build(self) -> TestCase<SystemPathBuf> {
|
|
||||||
let TestCaseBuilder {
|
|
||||||
typeshed_option,
|
|
||||||
target_version,
|
|
||||||
first_party_files,
|
|
||||||
site_packages_files,
|
|
||||||
} = self;
|
|
||||||
|
|
||||||
let mut db = TestDb::new();
|
|
||||||
|
|
||||||
let site_packages =
|
|
||||||
Self::write_mock_directory(&mut db, "/site-packages", site_packages_files);
|
|
||||||
let src = Self::write_mock_directory(&mut db, "/src", first_party_files);
|
|
||||||
let typeshed = Self::build_typeshed_mock(&mut db, &typeshed_option);
|
|
||||||
|
|
||||||
Program::new(
|
|
||||||
&db,
|
|
||||||
target_version,
|
|
||||||
SearchPathSettings {
|
|
||||||
extra_paths: vec![],
|
|
||||||
workspace_root: src.clone(),
|
|
||||||
custom_typeshed: Some(typeshed.clone()),
|
|
||||||
site_packages: Some(site_packages.clone()),
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
TestCase {
|
|
||||||
db,
|
|
||||||
src,
|
|
||||||
stdlib: typeshed.join("stdlib"),
|
|
||||||
site_packages,
|
|
||||||
target_version,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build_typeshed_mock(db: &mut TestDb, typeshed_to_build: &MockedTypeshed) -> SystemPathBuf {
|
|
||||||
let typeshed = SystemPathBuf::from("/typeshed");
|
|
||||||
let MockedTypeshed {
|
|
||||||
stdlib_files,
|
|
||||||
versions,
|
|
||||||
} = typeshed_to_build;
|
|
||||||
Self::write_mock_directory(
|
|
||||||
db,
|
|
||||||
typeshed.join("stdlib"),
|
|
||||||
stdlib_files
|
|
||||||
.iter()
|
|
||||||
.copied()
|
|
||||||
.chain(std::iter::once(("VERSIONS", *versions))),
|
|
||||||
);
|
|
||||||
typeshed
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TestCaseBuilder<VendoredTypeshed> {
|
|
||||||
pub(crate) fn build(self) -> TestCase<VendoredPathBuf> {
|
|
||||||
let TestCaseBuilder {
|
|
||||||
typeshed_option: VendoredTypeshed,
|
|
||||||
target_version,
|
|
||||||
first_party_files,
|
|
||||||
site_packages_files,
|
|
||||||
} = self;
|
|
||||||
|
|
||||||
let mut db = TestDb::new();
|
|
||||||
|
|
||||||
let site_packages =
|
|
||||||
Self::write_mock_directory(&mut db, "/site-packages", site_packages_files);
|
|
||||||
let src = Self::write_mock_directory(&mut db, "/src", first_party_files);
|
|
||||||
|
|
||||||
Program::new(
|
|
||||||
&db,
|
|
||||||
target_version,
|
|
||||||
SearchPathSettings {
|
|
||||||
extra_paths: vec![],
|
|
||||||
workspace_root: src.clone(),
|
|
||||||
custom_typeshed: None,
|
|
||||||
site_packages: Some(site_packages.clone()),
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
TestCase {
|
|
||||||
db,
|
|
||||||
src,
|
|
||||||
stdlib: VendoredPathBuf::from("stdlib"),
|
|
||||||
site_packages,
|
|
||||||
target_version,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
pub use self::vendored::vendored_typeshed_stubs;
|
|
||||||
pub(crate) use self::versions::{
|
|
||||||
parse_typeshed_versions, LazyTypeshedVersions, TypeshedVersionsQueryResult,
|
|
||||||
};
|
|
||||||
pub use self::versions::{TypeshedVersionsParseError, TypeshedVersionsParseErrorKind};
|
|
||||||
|
|
||||||
mod vendored;
|
|
||||||
mod versions;
|
|
||||||
@@ -1,99 +0,0 @@
|
|||||||
use once_cell::sync::Lazy;
|
|
||||||
|
|
||||||
use ruff_db::vendored::VendoredFileSystem;
|
|
||||||
|
|
||||||
// The file path here is hardcoded in this crate's `build.rs` script.
|
|
||||||
// Luckily this crate will fail to build if this file isn't available at build time.
|
|
||||||
static TYPESHED_ZIP_BYTES: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/zipped_typeshed.zip"));
|
|
||||||
|
|
||||||
pub fn vendored_typeshed_stubs() -> &'static VendoredFileSystem {
|
|
||||||
static VENDORED_TYPESHED_STUBS: Lazy<VendoredFileSystem> =
|
|
||||||
Lazy::new(|| VendoredFileSystem::new_static(TYPESHED_ZIP_BYTES).unwrap());
|
|
||||||
&VENDORED_TYPESHED_STUBS
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use std::io::{self, Read};
|
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
use ruff_db::vendored::VendoredPath;
|
|
||||||
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn typeshed_zip_created_at_build_time() {
|
|
||||||
let mut typeshed_zip_archive =
|
|
||||||
zip::ZipArchive::new(io::Cursor::new(TYPESHED_ZIP_BYTES)).unwrap();
|
|
||||||
|
|
||||||
let mut functools_module_stub = typeshed_zip_archive
|
|
||||||
.by_name("stdlib/functools.pyi")
|
|
||||||
.unwrap();
|
|
||||||
assert!(functools_module_stub.is_file());
|
|
||||||
|
|
||||||
let mut functools_module_stub_source = String::new();
|
|
||||||
functools_module_stub
|
|
||||||
.read_to_string(&mut functools_module_stub_source)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
assert!(functools_module_stub_source.contains("def update_wrapper("));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn typeshed_vfs_consistent_with_vendored_stubs() {
|
|
||||||
let vendored_typeshed_dir = Path::new("vendor/typeshed").canonicalize().unwrap();
|
|
||||||
let vendored_typeshed_stubs = vendored_typeshed_stubs();
|
|
||||||
|
|
||||||
let mut empty_iterator = true;
|
|
||||||
for entry in walkdir::WalkDir::new(&vendored_typeshed_dir).min_depth(1) {
|
|
||||||
empty_iterator = false;
|
|
||||||
let entry = entry.unwrap();
|
|
||||||
let absolute_path = entry.path();
|
|
||||||
let file_type = entry.file_type();
|
|
||||||
|
|
||||||
let relative_path = absolute_path
|
|
||||||
.strip_prefix(&vendored_typeshed_dir)
|
|
||||||
.unwrap_or_else(|_| {
|
|
||||||
panic!("Expected {absolute_path:?} to be a child of {vendored_typeshed_dir:?}")
|
|
||||||
});
|
|
||||||
|
|
||||||
let vendored_path = <&VendoredPath>::try_from(relative_path)
|
|
||||||
.unwrap_or_else(|_| panic!("Expected {relative_path:?} to be valid UTF-8"));
|
|
||||||
|
|
||||||
assert!(
|
|
||||||
vendored_typeshed_stubs.exists(vendored_path),
|
|
||||||
"Expected {vendored_path:?} to exist in the `VendoredFileSystem`!
|
|
||||||
|
|
||||||
Vendored file system:
|
|
||||||
|
|
||||||
{vendored_typeshed_stubs:#?}
|
|
||||||
"
|
|
||||||
);
|
|
||||||
|
|
||||||
let vendored_path_kind = vendored_typeshed_stubs
|
|
||||||
.metadata(vendored_path)
|
|
||||||
.unwrap_or_else(|_| {
|
|
||||||
panic!(
|
|
||||||
"Expected metadata for {vendored_path:?} to be retrievable from the `VendoredFileSystem!
|
|
||||||
|
|
||||||
Vendored file system:
|
|
||||||
|
|
||||||
{vendored_typeshed_stubs:#?}
|
|
||||||
"
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.kind();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
vendored_path_kind.is_directory(),
|
|
||||||
file_type.is_dir(),
|
|
||||||
"{vendored_path:?} had type {vendored_path_kind:?}, inconsistent with fs path {relative_path:?}: {file_type:?}"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
assert!(
|
|
||||||
!empty_iterator,
|
|
||||||
"Expected there to be at least one file or directory in the vendored typeshed stubs!"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,817 +0,0 @@
|
|||||||
use std::cell::OnceCell;
|
|
||||||
use std::collections::BTreeMap;
|
|
||||||
use std::fmt;
|
|
||||||
use std::num::{NonZeroU16, NonZeroUsize};
|
|
||||||
use std::ops::{RangeFrom, RangeInclusive};
|
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use ruff_db::program::TargetVersion;
|
|
||||||
use ruff_db::system::SystemPath;
|
|
||||||
use rustc_hash::FxHashMap;
|
|
||||||
|
|
||||||
use ruff_db::files::{system_path_to_file, File};
|
|
||||||
|
|
||||||
use crate::db::Db;
|
|
||||||
use crate::module_name::ModuleName;
|
|
||||||
|
|
||||||
use super::vendored::vendored_typeshed_stubs;
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub(crate) struct LazyTypeshedVersions<'db>(OnceCell<&'db TypeshedVersions>);
|
|
||||||
|
|
||||||
impl<'db> LazyTypeshedVersions<'db> {
|
|
||||||
#[must_use]
|
|
||||||
pub(crate) fn new() -> Self {
|
|
||||||
Self(OnceCell::new())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Query whether a module exists at runtime in the stdlib on a certain Python version.
|
|
||||||
///
|
|
||||||
/// Simply probing whether a file exists in typeshed is insufficient for this question,
|
|
||||||
/// as a module in the stdlib may have been added in Python 3.10, but the typeshed stub
|
|
||||||
/// will still be available (either in a custom typeshed dir or in our vendored copy)
|
|
||||||
/// even if the user specified Python 3.8 as the target version.
|
|
||||||
///
|
|
||||||
/// For top-level modules and packages, the VERSIONS file can always provide an unambiguous answer
|
|
||||||
/// as to whether the module exists on the specified target version. However, VERSIONS does not
|
|
||||||
/// provide comprehensive information on all submodules, meaning that this method sometimes
|
|
||||||
/// returns [`TypeshedVersionsQueryResult::MaybeExists`].
|
|
||||||
/// See [`TypeshedVersionsQueryResult`] for more details.
|
|
||||||
#[must_use]
|
|
||||||
pub(crate) fn query_module(
|
|
||||||
&self,
|
|
||||||
db: &'db dyn Db,
|
|
||||||
module: &ModuleName,
|
|
||||||
stdlib_root: Option<&SystemPath>,
|
|
||||||
target_version: TargetVersion,
|
|
||||||
) -> TypeshedVersionsQueryResult {
|
|
||||||
let versions = self.0.get_or_init(|| {
|
|
||||||
let versions_path = if let Some(system_path) = stdlib_root {
|
|
||||||
system_path.join("VERSIONS")
|
|
||||||
} else {
|
|
||||||
return &VENDORED_VERSIONS;
|
|
||||||
};
|
|
||||||
let Some(versions_file) = system_path_to_file(db.upcast(), &versions_path) else {
|
|
||||||
todo!(
|
|
||||||
"Still need to figure out how to handle VERSIONS files being deleted \
|
|
||||||
from custom typeshed directories! Expected a file to exist at {versions_path}"
|
|
||||||
)
|
|
||||||
};
|
|
||||||
// TODO(Alex/Micha): If VERSIONS is invalid,
|
|
||||||
// this should invalidate not just the specific module resolution we're currently attempting,
|
|
||||||
// but all type inference that depends on any standard-library types.
|
|
||||||
// Unwrapping here is not correct...
|
|
||||||
parse_typeshed_versions(db, versions_file).as_ref().unwrap()
|
|
||||||
});
|
|
||||||
versions.query_module(module, PyVersion::from(target_version))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[salsa::tracked(return_ref)]
|
|
||||||
pub(crate) fn parse_typeshed_versions(
|
|
||||||
db: &dyn Db,
|
|
||||||
versions_file: File,
|
|
||||||
) -> Result<TypeshedVersions, TypeshedVersionsParseError> {
|
|
||||||
// TODO: Handle IO errors
|
|
||||||
let file_content = versions_file
|
|
||||||
.read_to_string(db.upcast())
|
|
||||||
.unwrap_or_default();
|
|
||||||
file_content.parse()
|
|
||||||
}
|
|
||||||
|
|
||||||
static VENDORED_VERSIONS: Lazy<TypeshedVersions> = Lazy::new(|| {
|
|
||||||
TypeshedVersions::from_str(
|
|
||||||
&vendored_typeshed_stubs()
|
|
||||||
.read_to_string("stdlib/VERSIONS")
|
|
||||||
.unwrap(),
|
|
||||||
)
|
|
||||||
.unwrap()
|
|
||||||
});
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
|
||||||
pub struct TypeshedVersionsParseError {
|
|
||||||
line_number: Option<NonZeroU16>,
|
|
||||||
reason: TypeshedVersionsParseErrorKind,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for TypeshedVersionsParseError {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
let TypeshedVersionsParseError {
|
|
||||||
line_number,
|
|
||||||
reason,
|
|
||||||
} = self;
|
|
||||||
if let Some(line_number) = line_number {
|
|
||||||
write!(
|
|
||||||
f,
|
|
||||||
"Error while parsing line {line_number} of typeshed's VERSIONS file: {reason}"
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
write!(f, "Error while parsing typeshed's VERSIONS file: {reason}")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::error::Error for TypeshedVersionsParseError {
|
|
||||||
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
|
|
||||||
if let TypeshedVersionsParseErrorKind::IntegerParsingFailure { err, .. } = &self.reason {
|
|
||||||
Some(err)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
|
||||||
pub enum TypeshedVersionsParseErrorKind {
|
|
||||||
TooManyLines(NonZeroUsize),
|
|
||||||
UnexpectedNumberOfColons,
|
|
||||||
InvalidModuleName(String),
|
|
||||||
UnexpectedNumberOfHyphens,
|
|
||||||
UnexpectedNumberOfPeriods(String),
|
|
||||||
IntegerParsingFailure {
|
|
||||||
version: String,
|
|
||||||
err: std::num::ParseIntError,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for TypeshedVersionsParseErrorKind {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
match self {
|
|
||||||
Self::TooManyLines(num_lines) => write!(
|
|
||||||
f,
|
|
||||||
"File has too many lines ({num_lines}); maximum allowed is {}",
|
|
||||||
NonZeroU16::MAX
|
|
||||||
),
|
|
||||||
Self::UnexpectedNumberOfColons => {
|
|
||||||
f.write_str("Expected every non-comment line to have exactly one colon")
|
|
||||||
}
|
|
||||||
Self::InvalidModuleName(name) => write!(
|
|
||||||
f,
|
|
||||||
"Expected all components of '{name}' to be valid Python identifiers"
|
|
||||||
),
|
|
||||||
Self::UnexpectedNumberOfHyphens => {
|
|
||||||
f.write_str("Expected every non-comment line to have exactly one '-' character")
|
|
||||||
}
|
|
||||||
Self::UnexpectedNumberOfPeriods(format) => write!(
|
|
||||||
f,
|
|
||||||
"Expected all versions to be in the form {{MAJOR}}.{{MINOR}}; got '{format}'"
|
|
||||||
),
|
|
||||||
Self::IntegerParsingFailure { version, err } => write!(
|
|
||||||
f,
|
|
||||||
"Failed to convert '{version}' to a pair of integers due to {err}",
|
|
||||||
),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
|
||||||
pub(crate) struct TypeshedVersions(FxHashMap<ModuleName, PyVersionRange>);
|
|
||||||
|
|
||||||
impl TypeshedVersions {
|
|
||||||
#[must_use]
|
|
||||||
fn exact(&self, module_name: &ModuleName) -> Option<&PyVersionRange> {
|
|
||||||
self.0.get(module_name)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
fn query_module(
|
|
||||||
&self,
|
|
||||||
module: &ModuleName,
|
|
||||||
target_version: PyVersion,
|
|
||||||
) -> TypeshedVersionsQueryResult {
|
|
||||||
if let Some(range) = self.exact(module) {
|
|
||||||
if range.contains(target_version) {
|
|
||||||
TypeshedVersionsQueryResult::Exists
|
|
||||||
} else {
|
|
||||||
TypeshedVersionsQueryResult::DoesNotExist
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
let mut module = module.parent();
|
|
||||||
while let Some(module_to_try) = module {
|
|
||||||
if let Some(range) = self.exact(&module_to_try) {
|
|
||||||
return {
|
|
||||||
if range.contains(target_version) {
|
|
||||||
TypeshedVersionsQueryResult::MaybeExists
|
|
||||||
} else {
|
|
||||||
TypeshedVersionsQueryResult::DoesNotExist
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
module = module_to_try.parent();
|
|
||||||
}
|
|
||||||
TypeshedVersionsQueryResult::DoesNotExist
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Possible answers [`LazyTypeshedVersions::query_module()`] could give to the question:
|
|
||||||
/// "Does this module exist in the stdlib at runtime on a certain target version?"
|
|
||||||
#[derive(Debug, Copy, PartialEq, Eq, Clone, Hash)]
|
|
||||||
pub(crate) enum TypeshedVersionsQueryResult {
|
|
||||||
/// The module definitely exists in the stdlib at runtime on the user-specified target version.
|
|
||||||
///
|
|
||||||
/// For example:
|
|
||||||
/// - The target version is Python 3.8
|
|
||||||
/// - We're querying whether the `asyncio.tasks` module exists in the stdlib
|
|
||||||
/// - The VERSIONS file contains the line `asyncio.tasks: 3.8-`
|
|
||||||
Exists,
|
|
||||||
|
|
||||||
/// The module definitely does not exist in the stdlib on the user-specified target version.
|
|
||||||
///
|
|
||||||
/// For example:
|
|
||||||
/// - We're querying whether the `foo` module exists in the stdlib
|
|
||||||
/// - There is no top-level `foo` module in VERSIONS
|
|
||||||
///
|
|
||||||
/// OR:
|
|
||||||
/// - The target version is Python 3.8
|
|
||||||
/// - We're querying whether the module `importlib.abc` exists in the stdlib
|
|
||||||
/// - The VERSIONS file contains the line `importlib.abc: 3.10-`,
|
|
||||||
/// indicating that the module was added in 3.10
|
|
||||||
///
|
|
||||||
/// OR:
|
|
||||||
/// - The target version is Python 3.8
|
|
||||||
/// - We're querying whether the module `collections.abc` exists in the stdlib
|
|
||||||
/// - The VERSIONS file does not contain any information about the `collections.abc` submodule,
|
|
||||||
/// but *does* contain the line `collections: 3.10-`,
|
|
||||||
/// indicating that the entire `collections` package was added in Python 3.10.
|
|
||||||
DoesNotExist,
|
|
||||||
|
|
||||||
/// The module potentially exists in the stdlib and, if it does,
|
|
||||||
/// it definitely exists on the user-specified target version.
|
|
||||||
///
|
|
||||||
/// This variant is only relevant for submodules,
|
|
||||||
/// for which the typeshed VERSIONS file does not provide comprehensive information.
|
|
||||||
/// (The VERSIONS file is guaranteed to provide information about all top-level stdlib modules and packages,
|
|
||||||
/// but not necessarily about all submodules within each top-level package.)
|
|
||||||
///
|
|
||||||
/// For example:
|
|
||||||
/// - The target version is Python 3.8
|
|
||||||
/// - We're querying whether the `asyncio.staggered` module exists in the stdlib
|
|
||||||
/// - The typeshed VERSIONS file contains the line `asyncio: 3.8`,
|
|
||||||
/// indicating that the `asyncio` package was added in Python 3.8,
|
|
||||||
/// but does not contain any explicit information about the `asyncio.staggered` submodule.
|
|
||||||
MaybeExists,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromStr for TypeshedVersions {
|
|
||||||
type Err = TypeshedVersionsParseError;
|
|
||||||
|
|
||||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
|
||||||
let mut map = FxHashMap::default();
|
|
||||||
|
|
||||||
for (line_index, line) in s.lines().enumerate() {
|
|
||||||
// humans expect line numbers to be 1-indexed
|
|
||||||
let line_number = NonZeroUsize::new(line_index.saturating_add(1)).unwrap();
|
|
||||||
|
|
||||||
let Ok(line_number) = NonZeroU16::try_from(line_number) else {
|
|
||||||
return Err(TypeshedVersionsParseError {
|
|
||||||
line_number: None,
|
|
||||||
reason: TypeshedVersionsParseErrorKind::TooManyLines(line_number),
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
let Some(content) = line.split('#').map(str::trim).next() else {
|
|
||||||
continue;
|
|
||||||
};
|
|
||||||
if content.is_empty() {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut parts = content.split(':').map(str::trim);
|
|
||||||
let (Some(module_name), Some(rest), None) = (parts.next(), parts.next(), parts.next())
|
|
||||||
else {
|
|
||||||
return Err(TypeshedVersionsParseError {
|
|
||||||
line_number: Some(line_number),
|
|
||||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfColons,
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
let Some(module_name) = ModuleName::new(module_name) else {
|
|
||||||
return Err(TypeshedVersionsParseError {
|
|
||||||
line_number: Some(line_number),
|
|
||||||
reason: TypeshedVersionsParseErrorKind::InvalidModuleName(
|
|
||||||
module_name.to_string(),
|
|
||||||
),
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
match PyVersionRange::from_str(rest) {
|
|
||||||
Ok(version) => map.insert(module_name, version),
|
|
||||||
Err(reason) => {
|
|
||||||
return Err(TypeshedVersionsParseError {
|
|
||||||
line_number: Some(line_number),
|
|
||||||
reason,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Self(map))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for TypeshedVersions {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
let sorted_items: BTreeMap<&ModuleName, &PyVersionRange> = self.0.iter().collect();
|
|
||||||
for (module_name, range) in sorted_items {
|
|
||||||
writeln!(f, "{module_name}: {range}")?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
|
|
||||||
enum PyVersionRange {
|
|
||||||
AvailableFrom(RangeFrom<PyVersion>),
|
|
||||||
AvailableWithin(RangeInclusive<PyVersion>),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PyVersionRange {
|
|
||||||
#[must_use]
|
|
||||||
fn contains(&self, version: PyVersion) -> bool {
|
|
||||||
match self {
|
|
||||||
Self::AvailableFrom(inner) => inner.contains(&version),
|
|
||||||
Self::AvailableWithin(inner) => inner.contains(&version),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromStr for PyVersionRange {
|
|
||||||
type Err = TypeshedVersionsParseErrorKind;
|
|
||||||
|
|
||||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
|
||||||
let mut parts = s.split('-').map(str::trim);
|
|
||||||
match (parts.next(), parts.next(), parts.next()) {
|
|
||||||
(Some(lower), Some(""), None) => Ok(Self::AvailableFrom((lower.parse()?)..)),
|
|
||||||
(Some(lower), Some(upper), None) => {
|
|
||||||
Ok(Self::AvailableWithin((lower.parse()?)..=(upper.parse()?)))
|
|
||||||
}
|
|
||||||
_ => Err(TypeshedVersionsParseErrorKind::UnexpectedNumberOfHyphens),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for PyVersionRange {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
match self {
|
|
||||||
Self::AvailableFrom(range_from) => write!(f, "{}-", range_from.start),
|
|
||||||
Self::AvailableWithin(range_inclusive) => {
|
|
||||||
write!(f, "{}-{}", range_inclusive.start(), range_inclusive.end())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
|
||||||
struct PyVersion {
|
|
||||||
major: u8,
|
|
||||||
minor: u8,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromStr for PyVersion {
|
|
||||||
type Err = TypeshedVersionsParseErrorKind;
|
|
||||||
|
|
||||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
|
||||||
let mut parts = s.split('.').map(str::trim);
|
|
||||||
let (Some(major), Some(minor), None) = (parts.next(), parts.next(), parts.next()) else {
|
|
||||||
return Err(TypeshedVersionsParseErrorKind::UnexpectedNumberOfPeriods(
|
|
||||||
s.to_string(),
|
|
||||||
));
|
|
||||||
};
|
|
||||||
let major = match u8::from_str(major) {
|
|
||||||
Ok(major) => major,
|
|
||||||
Err(err) => {
|
|
||||||
return Err(TypeshedVersionsParseErrorKind::IntegerParsingFailure {
|
|
||||||
version: s.to_string(),
|
|
||||||
err,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
};
|
|
||||||
let minor = match u8::from_str(minor) {
|
|
||||||
Ok(minor) => minor,
|
|
||||||
Err(err) => {
|
|
||||||
return Err(TypeshedVersionsParseErrorKind::IntegerParsingFailure {
|
|
||||||
version: s.to_string(),
|
|
||||||
err,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
};
|
|
||||||
Ok(Self { major, minor })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for PyVersion {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
let PyVersion { major, minor } = self;
|
|
||||||
write!(f, "{major}.{minor}")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<TargetVersion> for PyVersion {
|
|
||||||
fn from(value: TargetVersion) -> Self {
|
|
||||||
match value {
|
|
||||||
TargetVersion::Py37 => PyVersion { major: 3, minor: 7 },
|
|
||||||
TargetVersion::Py38 => PyVersion { major: 3, minor: 8 },
|
|
||||||
TargetVersion::Py39 => PyVersion { major: 3, minor: 9 },
|
|
||||||
TargetVersion::Py310 => PyVersion {
|
|
||||||
major: 3,
|
|
||||||
minor: 10,
|
|
||||||
},
|
|
||||||
TargetVersion::Py311 => PyVersion {
|
|
||||||
major: 3,
|
|
||||||
minor: 11,
|
|
||||||
},
|
|
||||||
TargetVersion::Py312 => PyVersion {
|
|
||||||
major: 3,
|
|
||||||
minor: 12,
|
|
||||||
},
|
|
||||||
TargetVersion::Py313 => PyVersion {
|
|
||||||
major: 3,
|
|
||||||
minor: 13,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use std::num::{IntErrorKind, NonZeroU16};
|
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
use insta::assert_snapshot;
|
|
||||||
use ruff_db::program::TargetVersion;
|
|
||||||
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
const TYPESHED_STDLIB_DIR: &str = "stdlib";
|
|
||||||
|
|
||||||
#[allow(unsafe_code)]
|
|
||||||
const ONE: Option<NonZeroU16> = Some(unsafe { NonZeroU16::new_unchecked(1) });
|
|
||||||
|
|
||||||
impl TypeshedVersions {
|
|
||||||
#[must_use]
|
|
||||||
fn contains_exact(&self, module: &ModuleName) -> bool {
|
|
||||||
self.exact(module).is_some()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
fn len(&self) -> usize {
|
|
||||||
self.0.len()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn can_parse_vendored_versions_file() {
|
|
||||||
let versions_data = include_str!(concat!(
|
|
||||||
env!("CARGO_MANIFEST_DIR"),
|
|
||||||
"/vendor/typeshed/stdlib/VERSIONS"
|
|
||||||
));
|
|
||||||
|
|
||||||
let versions = TypeshedVersions::from_str(versions_data).unwrap();
|
|
||||||
assert!(versions.len() > 100);
|
|
||||||
assert!(versions.len() < 1000);
|
|
||||||
|
|
||||||
let asyncio = ModuleName::new_static("asyncio").unwrap();
|
|
||||||
let asyncio_staggered = ModuleName::new_static("asyncio.staggered").unwrap();
|
|
||||||
let audioop = ModuleName::new_static("audioop").unwrap();
|
|
||||||
|
|
||||||
assert!(versions.contains_exact(&asyncio));
|
|
||||||
assert_eq!(
|
|
||||||
versions.query_module(&asyncio, TargetVersion::Py310.into()),
|
|
||||||
TypeshedVersionsQueryResult::Exists
|
|
||||||
);
|
|
||||||
|
|
||||||
assert!(versions.contains_exact(&asyncio_staggered));
|
|
||||||
assert_eq!(
|
|
||||||
versions.query_module(&asyncio_staggered, TargetVersion::Py38.into()),
|
|
||||||
TypeshedVersionsQueryResult::Exists
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
versions.query_module(&asyncio_staggered, TargetVersion::Py37.into()),
|
|
||||||
TypeshedVersionsQueryResult::DoesNotExist
|
|
||||||
);
|
|
||||||
|
|
||||||
assert!(versions.contains_exact(&audioop));
|
|
||||||
assert_eq!(
|
|
||||||
versions.query_module(&audioop, TargetVersion::Py312.into()),
|
|
||||||
TypeshedVersionsQueryResult::Exists
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
versions.query_module(&audioop, TargetVersion::Py313.into()),
|
|
||||||
TypeshedVersionsQueryResult::DoesNotExist
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn typeshed_versions_consistent_with_vendored_stubs() {
|
|
||||||
const VERSIONS_DATA: &str = include_str!("../../vendor/typeshed/stdlib/VERSIONS");
|
|
||||||
let vendored_typeshed_dir = Path::new("vendor/typeshed").canonicalize().unwrap();
|
|
||||||
let vendored_typeshed_versions = TypeshedVersions::from_str(VERSIONS_DATA).unwrap();
|
|
||||||
|
|
||||||
let mut empty_iterator = true;
|
|
||||||
|
|
||||||
let stdlib_stubs_path = vendored_typeshed_dir.join(TYPESHED_STDLIB_DIR);
|
|
||||||
|
|
||||||
for entry in std::fs::read_dir(&stdlib_stubs_path).unwrap() {
|
|
||||||
empty_iterator = false;
|
|
||||||
let entry = entry.unwrap();
|
|
||||||
let absolute_path = entry.path();
|
|
||||||
|
|
||||||
let relative_path = absolute_path
|
|
||||||
.strip_prefix(&stdlib_stubs_path)
|
|
||||||
.unwrap_or_else(|_| panic!("Expected path to be a child of {stdlib_stubs_path:?} but found {absolute_path:?}"));
|
|
||||||
|
|
||||||
let relative_path_str = relative_path.as_os_str().to_str().unwrap_or_else(|| {
|
|
||||||
panic!("Expected all typeshed paths to be valid UTF-8; got {relative_path:?}")
|
|
||||||
});
|
|
||||||
if relative_path_str == "VERSIONS" {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let top_level_module = if let Some(extension) = relative_path.extension() {
|
|
||||||
// It was a file; strip off the file extension to get the module name:
|
|
||||||
let extension = extension
|
|
||||||
.to_str()
|
|
||||||
.unwrap_or_else(||panic!("Expected all file extensions to be UTF-8; was not true for {relative_path:?}"));
|
|
||||||
|
|
||||||
relative_path_str
|
|
||||||
.strip_suffix(extension)
|
|
||||||
.and_then(|string| string.strip_suffix('.')).unwrap_or_else(|| {
|
|
||||||
panic!("Expected path {relative_path_str:?} to end with computed extension {extension:?}")
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
// It was a directory; no need to do anything to get the module name
|
|
||||||
relative_path_str
|
|
||||||
};
|
|
||||||
|
|
||||||
let top_level_module = ModuleName::new(top_level_module)
|
|
||||||
.unwrap_or_else(|| panic!("{top_level_module:?} was not a valid module name!"));
|
|
||||||
|
|
||||||
assert!(vendored_typeshed_versions.contains_exact(&top_level_module));
|
|
||||||
}
|
|
||||||
|
|
||||||
assert!(
|
|
||||||
!empty_iterator,
|
|
||||||
"Expected there to be at least one file or directory in the vendored typeshed stubs"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn can_parse_mock_versions_file() {
|
|
||||||
const VERSIONS: &str = "\
|
|
||||||
# a comment
|
|
||||||
# some more comment
|
|
||||||
# yet more comment
|
|
||||||
|
|
||||||
|
|
||||||
# and some more comment
|
|
||||||
|
|
||||||
bar: 2.7-3.10
|
|
||||||
|
|
||||||
# more comment
|
|
||||||
bar.baz: 3.1-3.9
|
|
||||||
foo: 3.8- # trailing comment
|
|
||||||
";
|
|
||||||
let parsed_versions = TypeshedVersions::from_str(VERSIONS).unwrap();
|
|
||||||
assert_eq!(parsed_versions.len(), 3);
|
|
||||||
assert_snapshot!(parsed_versions.to_string(), @r###"
|
|
||||||
bar: 2.7-3.10
|
|
||||||
bar.baz: 3.1-3.9
|
|
||||||
foo: 3.8-
|
|
||||||
"###
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn version_within_range_parsed_correctly() {
|
|
||||||
let parsed_versions = TypeshedVersions::from_str("bar: 2.7-3.10").unwrap();
|
|
||||||
let bar = ModuleName::new_static("bar").unwrap();
|
|
||||||
|
|
||||||
assert!(parsed_versions.contains_exact(&bar));
|
|
||||||
assert_eq!(
|
|
||||||
parsed_versions.query_module(&bar, TargetVersion::Py37.into()),
|
|
||||||
TypeshedVersionsQueryResult::Exists
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
parsed_versions.query_module(&bar, TargetVersion::Py310.into()),
|
|
||||||
TypeshedVersionsQueryResult::Exists
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
parsed_versions.query_module(&bar, TargetVersion::Py311.into()),
|
|
||||||
TypeshedVersionsQueryResult::DoesNotExist
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn version_from_range_parsed_correctly() {
|
|
||||||
let parsed_versions = TypeshedVersions::from_str("foo: 3.8-").unwrap();
|
|
||||||
let foo = ModuleName::new_static("foo").unwrap();
|
|
||||||
|
|
||||||
assert!(parsed_versions.contains_exact(&foo));
|
|
||||||
assert_eq!(
|
|
||||||
parsed_versions.query_module(&foo, TargetVersion::Py37.into()),
|
|
||||||
TypeshedVersionsQueryResult::DoesNotExist
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
parsed_versions.query_module(&foo, TargetVersion::Py38.into()),
|
|
||||||
TypeshedVersionsQueryResult::Exists
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
parsed_versions.query_module(&foo, TargetVersion::Py311.into()),
|
|
||||||
TypeshedVersionsQueryResult::Exists
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn explicit_submodule_parsed_correctly() {
|
|
||||||
let parsed_versions = TypeshedVersions::from_str("bar.baz: 3.1-3.9").unwrap();
|
|
||||||
let bar_baz = ModuleName::new_static("bar.baz").unwrap();
|
|
||||||
|
|
||||||
assert!(parsed_versions.contains_exact(&bar_baz));
|
|
||||||
assert_eq!(
|
|
||||||
parsed_versions.query_module(&bar_baz, TargetVersion::Py37.into()),
|
|
||||||
TypeshedVersionsQueryResult::Exists
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
parsed_versions.query_module(&bar_baz, TargetVersion::Py39.into()),
|
|
||||||
TypeshedVersionsQueryResult::Exists
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
parsed_versions.query_module(&bar_baz, TargetVersion::Py310.into()),
|
|
||||||
TypeshedVersionsQueryResult::DoesNotExist
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn implicit_submodule_queried_correctly() {
|
|
||||||
let parsed_versions = TypeshedVersions::from_str("bar: 2.7-3.10").unwrap();
|
|
||||||
let bar_eggs = ModuleName::new_static("bar.eggs").unwrap();
|
|
||||||
|
|
||||||
assert!(!parsed_versions.contains_exact(&bar_eggs));
|
|
||||||
assert_eq!(
|
|
||||||
parsed_versions.query_module(&bar_eggs, TargetVersion::Py37.into()),
|
|
||||||
TypeshedVersionsQueryResult::MaybeExists
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
parsed_versions.query_module(&bar_eggs, TargetVersion::Py310.into()),
|
|
||||||
TypeshedVersionsQueryResult::MaybeExists
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
parsed_versions.query_module(&bar_eggs, TargetVersion::Py311.into()),
|
|
||||||
TypeshedVersionsQueryResult::DoesNotExist
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn nonexistent_module_queried_correctly() {
|
|
||||||
let parsed_versions = TypeshedVersions::from_str("eggs: 3.8-").unwrap();
|
|
||||||
let spam = ModuleName::new_static("spam").unwrap();
|
|
||||||
|
|
||||||
assert!(!parsed_versions.contains_exact(&spam));
|
|
||||||
assert_eq!(
|
|
||||||
parsed_versions.query_module(&spam, TargetVersion::Py37.into()),
|
|
||||||
TypeshedVersionsQueryResult::DoesNotExist
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
parsed_versions.query_module(&spam, TargetVersion::Py313.into()),
|
|
||||||
TypeshedVersionsQueryResult::DoesNotExist
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn invalid_huge_versions_file() {
|
|
||||||
let offset = 100;
|
|
||||||
let too_many = u16::MAX as usize + offset;
|
|
||||||
|
|
||||||
let mut massive_versions_file = String::new();
|
|
||||||
for i in 0..too_many {
|
|
||||||
massive_versions_file.push_str(&format!("x{i}: 3.8-\n"));
|
|
||||||
}
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
TypeshedVersions::from_str(&massive_versions_file),
|
|
||||||
Err(TypeshedVersionsParseError {
|
|
||||||
line_number: None,
|
|
||||||
reason: TypeshedVersionsParseErrorKind::TooManyLines(
|
|
||||||
NonZeroUsize::new(too_many + 1 - offset).unwrap()
|
|
||||||
)
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn invalid_typeshed_versions_bad_colon_number() {
|
|
||||||
assert_eq!(
|
|
||||||
TypeshedVersions::from_str("foo 3.7"),
|
|
||||||
Err(TypeshedVersionsParseError {
|
|
||||||
line_number: ONE,
|
|
||||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfColons
|
|
||||||
})
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
TypeshedVersions::from_str("foo:: 3.7"),
|
|
||||||
Err(TypeshedVersionsParseError {
|
|
||||||
line_number: ONE,
|
|
||||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfColons
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn invalid_typeshed_versions_non_identifier_modules() {
|
|
||||||
assert_eq!(
|
|
||||||
TypeshedVersions::from_str("not!an!identifier!: 3.7"),
|
|
||||||
Err(TypeshedVersionsParseError {
|
|
||||||
line_number: ONE,
|
|
||||||
reason: TypeshedVersionsParseErrorKind::InvalidModuleName(
|
|
||||||
"not!an!identifier!".to_string()
|
|
||||||
)
|
|
||||||
})
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
TypeshedVersions::from_str("(also_not).(an_identifier): 3.7"),
|
|
||||||
Err(TypeshedVersionsParseError {
|
|
||||||
line_number: ONE,
|
|
||||||
reason: TypeshedVersionsParseErrorKind::InvalidModuleName(
|
|
||||||
"(also_not).(an_identifier)".to_string()
|
|
||||||
)
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn invalid_typeshed_versions_bad_hyphen_number() {
|
|
||||||
assert_eq!(
|
|
||||||
TypeshedVersions::from_str("foo: 3.8"),
|
|
||||||
Err(TypeshedVersionsParseError {
|
|
||||||
line_number: ONE,
|
|
||||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfHyphens
|
|
||||||
})
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
TypeshedVersions::from_str("foo: 3.8--"),
|
|
||||||
Err(TypeshedVersionsParseError {
|
|
||||||
line_number: ONE,
|
|
||||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfHyphens
|
|
||||||
})
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
TypeshedVersions::from_str("foo: 3.8--3.9"),
|
|
||||||
Err(TypeshedVersionsParseError {
|
|
||||||
line_number: ONE,
|
|
||||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfHyphens
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn invalid_typeshed_versions_bad_period_number() {
|
|
||||||
assert_eq!(
|
|
||||||
TypeshedVersions::from_str("foo: 38-"),
|
|
||||||
Err(TypeshedVersionsParseError {
|
|
||||||
line_number: ONE,
|
|
||||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfPeriods("38".to_string())
|
|
||||||
})
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
TypeshedVersions::from_str("foo: 3..8-"),
|
|
||||||
Err(TypeshedVersionsParseError {
|
|
||||||
line_number: ONE,
|
|
||||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfPeriods(
|
|
||||||
"3..8".to_string()
|
|
||||||
)
|
|
||||||
})
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
TypeshedVersions::from_str("foo: 3.8-3..11"),
|
|
||||||
Err(TypeshedVersionsParseError {
|
|
||||||
line_number: ONE,
|
|
||||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfPeriods(
|
|
||||||
"3..11".to_string()
|
|
||||||
)
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn invalid_typeshed_versions_non_digits() {
|
|
||||||
let err = TypeshedVersions::from_str("foo: 1.two-").unwrap_err();
|
|
||||||
assert_eq!(err.line_number, ONE);
|
|
||||||
let TypeshedVersionsParseErrorKind::IntegerParsingFailure { version, err } = err.reason
|
|
||||||
else {
|
|
||||||
panic!()
|
|
||||||
};
|
|
||||||
assert_eq!(version, "1.two".to_string());
|
|
||||||
assert_eq!(*err.kind(), IntErrorKind::InvalidDigit);
|
|
||||||
|
|
||||||
let err = TypeshedVersions::from_str("foo: 3.8-four.9").unwrap_err();
|
|
||||||
assert_eq!(err.line_number, ONE);
|
|
||||||
let TypeshedVersionsParseErrorKind::IntegerParsingFailure { version, err } = err.reason
|
|
||||||
else {
|
|
||||||
panic!()
|
|
||||||
};
|
|
||||||
assert_eq!(version, "four.9".to_string());
|
|
||||||
assert_eq!(*err.kind(), IntErrorKind::InvalidDigit);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,237 +0,0 @@
|
|||||||
The "typeshed" project is licensed under the terms of the Apache license, as
|
|
||||||
reproduced below.
|
|
||||||
|
|
||||||
= = = = =
|
|
||||||
|
|
||||||
Apache License
|
|
||||||
Version 2.0, January 2004
|
|
||||||
http://www.apache.org/licenses/
|
|
||||||
|
|
||||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
|
||||||
|
|
||||||
1. Definitions.
|
|
||||||
|
|
||||||
"License" shall mean the terms and conditions for use, reproduction,
|
|
||||||
and distribution as defined by Sections 1 through 9 of this document.
|
|
||||||
|
|
||||||
"Licensor" shall mean the copyright owner or entity authorized by
|
|
||||||
the copyright owner that is granting the License.
|
|
||||||
|
|
||||||
"Legal Entity" shall mean the union of the acting entity and all
|
|
||||||
other entities that control, are controlled by, or are under common
|
|
||||||
control with that entity. For the purposes of this definition,
|
|
||||||
"control" means (i) the power, direct or indirect, to cause the
|
|
||||||
direction or management of such entity, whether by contract or
|
|
||||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
|
||||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
|
||||||
|
|
||||||
"You" (or "Your") shall mean an individual or Legal Entity
|
|
||||||
exercising permissions granted by this License.
|
|
||||||
|
|
||||||
"Source" form shall mean the preferred form for making modifications,
|
|
||||||
including but not limited to software source code, documentation
|
|
||||||
source, and configuration files.
|
|
||||||
|
|
||||||
"Object" form shall mean any form resulting from mechanical
|
|
||||||
transformation or translation of a Source form, including but
|
|
||||||
not limited to compiled object code, generated documentation,
|
|
||||||
and conversions to other media types.
|
|
||||||
|
|
||||||
"Work" shall mean the work of authorship, whether in Source or
|
|
||||||
Object form, made available under the License, as indicated by a
|
|
||||||
copyright notice that is included in or attached to the work
|
|
||||||
(an example is provided in the Appendix below).
|
|
||||||
|
|
||||||
"Derivative Works" shall mean any work, whether in Source or Object
|
|
||||||
form, that is based on (or derived from) the Work and for which the
|
|
||||||
editorial revisions, annotations, elaborations, or other modifications
|
|
||||||
represent, as a whole, an original work of authorship. For the purposes
|
|
||||||
of this License, Derivative Works shall not include works that remain
|
|
||||||
separable from, or merely link (or bind by name) to the interfaces of,
|
|
||||||
the Work and Derivative Works thereof.
|
|
||||||
|
|
||||||
"Contribution" shall mean any work of authorship, including
|
|
||||||
the original version of the Work and any modifications or additions
|
|
||||||
to that Work or Derivative Works thereof, that is intentionally
|
|
||||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
|
||||||
or by an individual or Legal Entity authorized to submit on behalf of
|
|
||||||
the copyright owner. For the purposes of this definition, "submitted"
|
|
||||||
means any form of electronic, verbal, or written communication sent
|
|
||||||
to the Licensor or its representatives, including but not limited to
|
|
||||||
communication on electronic mailing lists, source code control systems,
|
|
||||||
and issue tracking systems that are managed by, or on behalf of, the
|
|
||||||
Licensor for the purpose of discussing and improving the Work, but
|
|
||||||
excluding communication that is conspicuously marked or otherwise
|
|
||||||
designated in writing by the copyright owner as "Not a Contribution."
|
|
||||||
|
|
||||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
|
||||||
on behalf of whom a Contribution has been received by Licensor and
|
|
||||||
subsequently incorporated within the Work.
|
|
||||||
|
|
||||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
copyright license to reproduce, prepare Derivative Works of,
|
|
||||||
publicly display, publicly perform, sublicense, and distribute the
|
|
||||||
Work and such Derivative Works in Source or Object form.
|
|
||||||
|
|
||||||
3. Grant of Patent License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
(except as stated in this section) patent license to make, have made,
|
|
||||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
|
||||||
where such license applies only to those patent claims licensable
|
|
||||||
by such Contributor that are necessarily infringed by their
|
|
||||||
Contribution(s) alone or by combination of their Contribution(s)
|
|
||||||
with the Work to which such Contribution(s) was submitted. If You
|
|
||||||
institute patent litigation against any entity (including a
|
|
||||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
|
||||||
or a Contribution incorporated within the Work constitutes direct
|
|
||||||
or contributory patent infringement, then any patent licenses
|
|
||||||
granted to You under this License for that Work shall terminate
|
|
||||||
as of the date such litigation is filed.
|
|
||||||
|
|
||||||
4. Redistribution. You may reproduce and distribute copies of the
|
|
||||||
Work or Derivative Works thereof in any medium, with or without
|
|
||||||
modifications, and in Source or Object form, provided that You
|
|
||||||
meet the following conditions:
|
|
||||||
|
|
||||||
(a) You must give any other recipients of the Work or
|
|
||||||
Derivative Works a copy of this License; and
|
|
||||||
|
|
||||||
(b) You must cause any modified files to carry prominent notices
|
|
||||||
stating that You changed the files; and
|
|
||||||
|
|
||||||
(c) You must retain, in the Source form of any Derivative Works
|
|
||||||
that You distribute, all copyright, patent, trademark, and
|
|
||||||
attribution notices from the Source form of the Work,
|
|
||||||
excluding those notices that do not pertain to any part of
|
|
||||||
the Derivative Works; and
|
|
||||||
|
|
||||||
(d) If the Work includes a "NOTICE" text file as part of its
|
|
||||||
distribution, then any Derivative Works that You distribute must
|
|
||||||
include a readable copy of the attribution notices contained
|
|
||||||
within such NOTICE file, excluding those notices that do not
|
|
||||||
pertain to any part of the Derivative Works, in at least one
|
|
||||||
of the following places: within a NOTICE text file distributed
|
|
||||||
as part of the Derivative Works; within the Source form or
|
|
||||||
documentation, if provided along with the Derivative Works; or,
|
|
||||||
within a display generated by the Derivative Works, if and
|
|
||||||
wherever such third-party notices normally appear. The contents
|
|
||||||
of the NOTICE file are for informational purposes only and
|
|
||||||
do not modify the License. You may add Your own attribution
|
|
||||||
notices within Derivative Works that You distribute, alongside
|
|
||||||
or as an addendum to the NOTICE text from the Work, provided
|
|
||||||
that such additional attribution notices cannot be construed
|
|
||||||
as modifying the License.
|
|
||||||
|
|
||||||
You may add Your own copyright statement to Your modifications and
|
|
||||||
may provide additional or different license terms and conditions
|
|
||||||
for use, reproduction, or distribution of Your modifications, or
|
|
||||||
for any such Derivative Works as a whole, provided Your use,
|
|
||||||
reproduction, and distribution of the Work otherwise complies with
|
|
||||||
the conditions stated in this License.
|
|
||||||
|
|
||||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
|
||||||
any Contribution intentionally submitted for inclusion in the Work
|
|
||||||
by You to the Licensor shall be under the terms and conditions of
|
|
||||||
this License, without any additional terms or conditions.
|
|
||||||
Notwithstanding the above, nothing herein shall supersede or modify
|
|
||||||
the terms of any separate license agreement you may have executed
|
|
||||||
with Licensor regarding such Contributions.
|
|
||||||
|
|
||||||
6. Trademarks. This License does not grant permission to use the trade
|
|
||||||
names, trademarks, service marks, or product names of the Licensor,
|
|
||||||
except as required for reasonable and customary use in describing the
|
|
||||||
origin of the Work and reproducing the content of the NOTICE file.
|
|
||||||
|
|
||||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
|
||||||
agreed to in writing, Licensor provides the Work (and each
|
|
||||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
implied, including, without limitation, any warranties or conditions
|
|
||||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
|
||||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
|
||||||
appropriateness of using or redistributing the Work and assume any
|
|
||||||
risks associated with Your exercise of permissions under this License.
|
|
||||||
|
|
||||||
8. Limitation of Liability. In no event and under no legal theory,
|
|
||||||
whether in tort (including negligence), contract, or otherwise,
|
|
||||||
unless required by applicable law (such as deliberate and grossly
|
|
||||||
negligent acts) or agreed to in writing, shall any Contributor be
|
|
||||||
liable to You for damages, including any direct, indirect, special,
|
|
||||||
incidental, or consequential damages of any character arising as a
|
|
||||||
result of this License or out of the use or inability to use the
|
|
||||||
Work (including but not limited to damages for loss of goodwill,
|
|
||||||
work stoppage, computer failure or malfunction, or any and all
|
|
||||||
other commercial damages or losses), even if such Contributor
|
|
||||||
has been advised of the possibility of such damages.
|
|
||||||
|
|
||||||
9. Accepting Warranty or Additional Liability. While redistributing
|
|
||||||
the Work or Derivative Works thereof, You may choose to offer,
|
|
||||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
|
||||||
or other liability obligations and/or rights consistent with this
|
|
||||||
License. However, in accepting such obligations, You may act only
|
|
||||||
on Your own behalf and on Your sole responsibility, not on behalf
|
|
||||||
of any other Contributor, and only if You agree to indemnify,
|
|
||||||
defend, and hold each Contributor harmless for any liability
|
|
||||||
incurred by, or claims asserted against, such Contributor by reason
|
|
||||||
of your accepting any such warranty or additional liability.
|
|
||||||
|
|
||||||
END OF TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
APPENDIX: How to apply the Apache License to your work.
|
|
||||||
|
|
||||||
To apply the Apache License to your work, attach the following
|
|
||||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
|
||||||
replaced with your own identifying information. (Don't include
|
|
||||||
the brackets!) The text should be enclosed in the appropriate
|
|
||||||
comment syntax for the file format. We also recommend that a
|
|
||||||
file or class name and description of purpose be included on the
|
|
||||||
same "printed page" as the copyright notice for easier
|
|
||||||
identification within third-party archives.
|
|
||||||
|
|
||||||
Copyright {yyyy} {name of copyright owner}
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
|
|
||||||
= = = = =
|
|
||||||
|
|
||||||
Parts of typeshed are licensed under different licenses (like the MIT
|
|
||||||
license), reproduced below.
|
|
||||||
|
|
||||||
= = = = =
|
|
||||||
|
|
||||||
The MIT License
|
|
||||||
|
|
||||||
Copyright (c) 2015 Jukka Lehtosalo and contributors
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a
|
|
||||||
copy of this software and associated documentation files (the "Software"),
|
|
||||||
to deal in the Software without restriction, including without limitation
|
|
||||||
the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
|
||||||
and/or sell copies of the Software, and to permit persons to whom the
|
|
||||||
Software is furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in
|
|
||||||
all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
||||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
|
||||||
DEALINGS IN THE SOFTWARE.
|
|
||||||
|
|
||||||
= = = = =
|
|
||||||
@@ -1,124 +0,0 @@
|
|||||||
# typeshed
|
|
||||||
|
|
||||||
[](https://github.com/python/typeshed/actions/workflows/tests.yml)
|
|
||||||
[](https://gitter.im/python/typing?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
|
||||||
[](https://github.com/python/typeshed/blob/main/CONTRIBUTING.md)
|
|
||||||
|
|
||||||
## About
|
|
||||||
|
|
||||||
Typeshed contains external type annotations for the Python standard library
|
|
||||||
and Python builtins, as well as third party packages as contributed by
|
|
||||||
people external to those projects.
|
|
||||||
|
|
||||||
This data can e.g. be used for static analysis, type checking, type inference,
|
|
||||||
and autocompletion.
|
|
||||||
|
|
||||||
For information on how to use typeshed, read below. Information for
|
|
||||||
contributors can be found in [CONTRIBUTING.md](CONTRIBUTING.md). **Please read
|
|
||||||
it before submitting pull requests; do not report issues with annotations to
|
|
||||||
the project the stubs are for, but instead report them here to typeshed.**
|
|
||||||
|
|
||||||
Further documentation on stub files, typeshed, and Python's typing system in
|
|
||||||
general, can also be found at https://typing.readthedocs.io/en/latest/.
|
|
||||||
|
|
||||||
Typeshed supports Python versions 3.8 and up.
|
|
||||||
|
|
||||||
## Using
|
|
||||||
|
|
||||||
If you're just using a type checker ([mypy](https://github.com/python/mypy/),
|
|
||||||
[pyright](https://github.com/microsoft/pyright),
|
|
||||||
[pytype](https://github.com/google/pytype/), PyCharm, ...), as opposed to
|
|
||||||
developing it, you don't need to interact with the typeshed repo at
|
|
||||||
all: a copy of standard library part of typeshed is bundled with type checkers.
|
|
||||||
And type stubs for third party packages and modules you are using can
|
|
||||||
be installed from PyPI. For example, if you are using `html5lib` and `requests`,
|
|
||||||
you can install the type stubs using
|
|
||||||
|
|
||||||
```bash
|
|
||||||
$ pip install types-html5lib types-requests
|
|
||||||
```
|
|
||||||
|
|
||||||
These PyPI packages follow [PEP 561](http://www.python.org/dev/peps/pep-0561/)
|
|
||||||
and are automatically released (up to once a day) by
|
|
||||||
[typeshed internal machinery](https://github.com/typeshed-internal/stub_uploader).
|
|
||||||
|
|
||||||
Type checkers should be able to use these stub packages when installed. For more
|
|
||||||
details, see the documentation for your type checker.
|
|
||||||
|
|
||||||
### Package versioning for third-party stubs
|
|
||||||
|
|
||||||
Version numbers of third-party stub packages consist of at least four parts.
|
|
||||||
All parts of the stub version, except for the last part, correspond to the
|
|
||||||
version of the runtime package being stubbed. For example, if the `types-foo`
|
|
||||||
package has version `1.2.0.20240309`, this guarantees that the `types-foo` package
|
|
||||||
contains stubs targeted against `foo==1.2.*` and tested against the latest
|
|
||||||
version of `foo` matching that specifier. In this example, the final element
|
|
||||||
of the version number (20240309) indicates that the stub package was pushed on
|
|
||||||
March 9, 2024.
|
|
||||||
|
|
||||||
At typeshed, we try to keep breaking changes to a minimum. However, due to the
|
|
||||||
nature of stubs, any version bump can introduce changes that might make your
|
|
||||||
code fail to type check.
|
|
||||||
|
|
||||||
There are several strategies available for specifying the version of a stubs
|
|
||||||
package you're using, each with its own tradeoffs:
|
|
||||||
|
|
||||||
1. Use the same bounds that you use for the package being stubbed. For example,
|
|
||||||
if you use `requests>=2.30.0,<2.32`, you can use
|
|
||||||
`types-requests>=2.30.0,<2.32`. This ensures that the stubs are compatible
|
|
||||||
with the package you are using, but it carries a small risk of breaking
|
|
||||||
type checking due to changes in the stubs.
|
|
||||||
|
|
||||||
Another risk of this strategy is that stubs often lag behind
|
|
||||||
the package being stubbed. You might want to force the package being stubbed
|
|
||||||
to a certain minimum version because it fixes a critical bug, but if
|
|
||||||
correspondingly updated stubs have not been released, your type
|
|
||||||
checking results may not be fully accurate.
|
|
||||||
2. Pin the stubs to a known good version and update the pin from time to time
|
|
||||||
(either manually, or using a tool such as dependabot or renovate).
|
|
||||||
|
|
||||||
For example, if you use `types-requests==2.31.0.1`, you can have confidence
|
|
||||||
that upgrading dependencies will not break type checking. However, you will
|
|
||||||
miss out on improvements in the stubs that could potentially improve type
|
|
||||||
checking until you update the pin. This strategy also has the risk that the
|
|
||||||
stubs you are using might become incompatible with the package being stubbed.
|
|
||||||
3. Don't pin the stubs. This is the option that demands the least work from
|
|
||||||
you when it comes to updating version pins, and has the advantage that you
|
|
||||||
will automatically benefit from improved stubs whenever a new version of the
|
|
||||||
stubs package is released. However, it carries the risk that the stubs
|
|
||||||
become incompatible with the package being stubbed.
|
|
||||||
|
|
||||||
For example, if a new major version of the package is released, there's a
|
|
||||||
chance the stubs might be updated to reflect the new version of the runtime
|
|
||||||
package before you update the package being stubbed.
|
|
||||||
|
|
||||||
You can also switch between the different strategies as needed. For example,
|
|
||||||
you could default to strategy (1), but fall back to strategy (2) when
|
|
||||||
a problem arises that can't easily be fixed.
|
|
||||||
|
|
||||||
### The `_typeshed` package
|
|
||||||
|
|
||||||
typeshed includes a package `_typeshed` as part of the standard library.
|
|
||||||
This package and its submodules contain utility types, but are not
|
|
||||||
available at runtime. For more information about how to use this package,
|
|
||||||
[see the `stdlib/_typeshed` directory](https://github.com/python/typeshed/tree/main/stdlib/_typeshed).
|
|
||||||
|
|
||||||
## Discussion
|
|
||||||
|
|
||||||
If you've run into behavior in the type checker that suggests the type
|
|
||||||
stubs for a given library are incorrect or incomplete,
|
|
||||||
we want to hear from you!
|
|
||||||
|
|
||||||
Our main forum for discussion is the project's [GitHub issue
|
|
||||||
tracker](https://github.com/python/typeshed/issues). This is the right
|
|
||||||
place to start a discussion of any of the above or most any other
|
|
||||||
topic concerning the project.
|
|
||||||
|
|
||||||
If you have general questions about typing with Python, or you need
|
|
||||||
a review of your type annotations or stubs outside of typeshed, head over to
|
|
||||||
[our discussion forum](https://github.com/python/typing/discussions).
|
|
||||||
For less formal discussion, try the typing chat room on
|
|
||||||
[gitter.im](https://gitter.im/python/typing). Some typeshed maintainers
|
|
||||||
are almost always present; feel free to find us there and we're happy
|
|
||||||
to chat. Substantive technical discussion will be directed to the
|
|
||||||
issue tracker.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
f863db6bc5242348ceaa6a3bca4e59aa9e62faaa
|
|
||||||
@@ -1,315 +0,0 @@
|
|||||||
# The structure of this file is as follows:
|
|
||||||
# - Blank lines and comments starting with `#` are ignored.
|
|
||||||
# - Lines contain the name of a module, followed by a colon,
|
|
||||||
# a space, and a version range (for example: `symbol: 3.0-3.9`).
|
|
||||||
#
|
|
||||||
# Version ranges may be of the form "X.Y-A.B" or "X.Y-". The
|
|
||||||
# first form means that a module was introduced in version X.Y and last
|
|
||||||
# available in version A.B. The second form means that the module was
|
|
||||||
# introduced in version X.Y and is still available in the latest
|
|
||||||
# version of Python.
|
|
||||||
#
|
|
||||||
# If a submodule is not listed separately, it has the same lifetime as
|
|
||||||
# its parent module.
|
|
||||||
#
|
|
||||||
# Python versions before 3.0 are ignored, so any module that was already
|
|
||||||
# present in 3.0 will have "3.0" as its minimum version. Version ranges
|
|
||||||
# for unsupported versions of Python 3 are generally accurate but we do
|
|
||||||
# not guarantee their correctness.
|
|
||||||
|
|
||||||
__future__: 3.0-
|
|
||||||
__main__: 3.0-
|
|
||||||
_ast: 3.0-
|
|
||||||
_bisect: 3.0-
|
|
||||||
_bootlocale: 3.4-3.9
|
|
||||||
_codecs: 3.0-
|
|
||||||
_collections_abc: 3.3-
|
|
||||||
_compat_pickle: 3.1-
|
|
||||||
_compression: 3.5-
|
|
||||||
_csv: 3.0-
|
|
||||||
_ctypes: 3.0-
|
|
||||||
_curses: 3.0-
|
|
||||||
_decimal: 3.3-
|
|
||||||
_dummy_thread: 3.0-3.8
|
|
||||||
_dummy_threading: 3.0-3.8
|
|
||||||
_heapq: 3.0-
|
|
||||||
_imp: 3.0-
|
|
||||||
_interpchannels: 3.13-
|
|
||||||
_interpqueues: 3.13-
|
|
||||||
_interpreters: 3.13-
|
|
||||||
_json: 3.0-
|
|
||||||
_locale: 3.0-
|
|
||||||
_lsprof: 3.0-
|
|
||||||
_markupbase: 3.0-
|
|
||||||
_msi: 3.0-
|
|
||||||
_operator: 3.4-
|
|
||||||
_osx_support: 3.0-
|
|
||||||
_posixsubprocess: 3.2-
|
|
||||||
_py_abc: 3.7-
|
|
||||||
_pydecimal: 3.5-
|
|
||||||
_random: 3.0-
|
|
||||||
_sitebuiltins: 3.4-
|
|
||||||
_socket: 3.0- # present in 3.0 at runtime, but not in typeshed
|
|
||||||
_stat: 3.4-
|
|
||||||
_thread: 3.0-
|
|
||||||
_threading_local: 3.0-
|
|
||||||
_tkinter: 3.0-
|
|
||||||
_tracemalloc: 3.4-
|
|
||||||
_typeshed: 3.0- # not present at runtime, only for type checking
|
|
||||||
_warnings: 3.0-
|
|
||||||
_weakref: 3.0-
|
|
||||||
_weakrefset: 3.0-
|
|
||||||
_winapi: 3.3-
|
|
||||||
abc: 3.0-
|
|
||||||
aifc: 3.0-3.12
|
|
||||||
antigravity: 3.0-
|
|
||||||
argparse: 3.0-
|
|
||||||
array: 3.0-
|
|
||||||
ast: 3.0-
|
|
||||||
asynchat: 3.0-3.11
|
|
||||||
asyncio: 3.4-
|
|
||||||
asyncio.exceptions: 3.8-
|
|
||||||
asyncio.format_helpers: 3.7-
|
|
||||||
asyncio.mixins: 3.10-
|
|
||||||
asyncio.runners: 3.7-
|
|
||||||
asyncio.staggered: 3.8-
|
|
||||||
asyncio.taskgroups: 3.11-
|
|
||||||
asyncio.threads: 3.9-
|
|
||||||
asyncio.timeouts: 3.11-
|
|
||||||
asyncio.trsock: 3.8-
|
|
||||||
asyncore: 3.0-3.11
|
|
||||||
atexit: 3.0-
|
|
||||||
audioop: 3.0-3.12
|
|
||||||
base64: 3.0-
|
|
||||||
bdb: 3.0-
|
|
||||||
binascii: 3.0-
|
|
||||||
binhex: 3.0-3.10
|
|
||||||
bisect: 3.0-
|
|
||||||
builtins: 3.0-
|
|
||||||
bz2: 3.0-
|
|
||||||
cProfile: 3.0-
|
|
||||||
calendar: 3.0-
|
|
||||||
cgi: 3.0-3.12
|
|
||||||
cgitb: 3.0-3.12
|
|
||||||
chunk: 3.0-3.12
|
|
||||||
cmath: 3.0-
|
|
||||||
cmd: 3.0-
|
|
||||||
code: 3.0-
|
|
||||||
codecs: 3.0-
|
|
||||||
codeop: 3.0-
|
|
||||||
collections: 3.0-
|
|
||||||
collections.abc: 3.3-
|
|
||||||
colorsys: 3.0-
|
|
||||||
compileall: 3.0-
|
|
||||||
concurrent: 3.2-
|
|
||||||
configparser: 3.0-
|
|
||||||
contextlib: 3.0-
|
|
||||||
contextvars: 3.7-
|
|
||||||
copy: 3.0-
|
|
||||||
copyreg: 3.0-
|
|
||||||
crypt: 3.0-3.12
|
|
||||||
csv: 3.0-
|
|
||||||
ctypes: 3.0-
|
|
||||||
curses: 3.0-
|
|
||||||
dataclasses: 3.7-
|
|
||||||
datetime: 3.0-
|
|
||||||
dbm: 3.0-
|
|
||||||
dbm.sqlite3: 3.13-
|
|
||||||
decimal: 3.0-
|
|
||||||
difflib: 3.0-
|
|
||||||
dis: 3.0-
|
|
||||||
distutils: 3.0-3.11
|
|
||||||
distutils.command.bdist_msi: 3.0-3.10
|
|
||||||
distutils.command.bdist_wininst: 3.0-3.9
|
|
||||||
doctest: 3.0-
|
|
||||||
dummy_threading: 3.0-3.8
|
|
||||||
email: 3.0-
|
|
||||||
encodings: 3.0-
|
|
||||||
ensurepip: 3.0-
|
|
||||||
enum: 3.4-
|
|
||||||
errno: 3.0-
|
|
||||||
faulthandler: 3.3-
|
|
||||||
fcntl: 3.0-
|
|
||||||
filecmp: 3.0-
|
|
||||||
fileinput: 3.0-
|
|
||||||
fnmatch: 3.0-
|
|
||||||
formatter: 3.0-3.9
|
|
||||||
fractions: 3.0-
|
|
||||||
ftplib: 3.0-
|
|
||||||
functools: 3.0-
|
|
||||||
gc: 3.0-
|
|
||||||
genericpath: 3.0-
|
|
||||||
getopt: 3.0-
|
|
||||||
getpass: 3.0-
|
|
||||||
gettext: 3.0-
|
|
||||||
glob: 3.0-
|
|
||||||
graphlib: 3.9-
|
|
||||||
grp: 3.0-
|
|
||||||
gzip: 3.0-
|
|
||||||
hashlib: 3.0-
|
|
||||||
heapq: 3.0-
|
|
||||||
hmac: 3.0-
|
|
||||||
html: 3.0-
|
|
||||||
http: 3.0-
|
|
||||||
imaplib: 3.0-
|
|
||||||
imghdr: 3.0-3.12
|
|
||||||
imp: 3.0-3.11
|
|
||||||
importlib: 3.0-
|
|
||||||
importlib._abc: 3.10-
|
|
||||||
importlib.metadata: 3.8-
|
|
||||||
importlib.metadata._meta: 3.10-
|
|
||||||
importlib.metadata.diagnose: 3.13-
|
|
||||||
importlib.readers: 3.10-
|
|
||||||
importlib.resources: 3.7-
|
|
||||||
importlib.resources.abc: 3.11-
|
|
||||||
importlib.resources.readers: 3.11-
|
|
||||||
importlib.resources.simple: 3.11-
|
|
||||||
importlib.simple: 3.11-
|
|
||||||
inspect: 3.0-
|
|
||||||
io: 3.0-
|
|
||||||
ipaddress: 3.3-
|
|
||||||
itertools: 3.0-
|
|
||||||
json: 3.0-
|
|
||||||
keyword: 3.0-
|
|
||||||
lib2to3: 3.0-3.12
|
|
||||||
linecache: 3.0-
|
|
||||||
locale: 3.0-
|
|
||||||
logging: 3.0-
|
|
||||||
lzma: 3.3-
|
|
||||||
mailbox: 3.0-
|
|
||||||
mailcap: 3.0-3.12
|
|
||||||
marshal: 3.0-
|
|
||||||
math: 3.0-
|
|
||||||
mimetypes: 3.0-
|
|
||||||
mmap: 3.0-
|
|
||||||
modulefinder: 3.0-
|
|
||||||
msilib: 3.0-3.12
|
|
||||||
msvcrt: 3.0-
|
|
||||||
multiprocessing: 3.0-
|
|
||||||
multiprocessing.resource_tracker: 3.8-
|
|
||||||
multiprocessing.shared_memory: 3.8-
|
|
||||||
netrc: 3.0-
|
|
||||||
nis: 3.0-3.12
|
|
||||||
nntplib: 3.0-3.12
|
|
||||||
nt: 3.0-
|
|
||||||
ntpath: 3.0-
|
|
||||||
nturl2path: 3.0-
|
|
||||||
numbers: 3.0-
|
|
||||||
opcode: 3.0-
|
|
||||||
operator: 3.0-
|
|
||||||
optparse: 3.0-
|
|
||||||
os: 3.0-
|
|
||||||
ossaudiodev: 3.0-3.12
|
|
||||||
parser: 3.0-3.9
|
|
||||||
pathlib: 3.4-
|
|
||||||
pdb: 3.0-
|
|
||||||
pickle: 3.0-
|
|
||||||
pickletools: 3.0-
|
|
||||||
pipes: 3.0-3.12
|
|
||||||
pkgutil: 3.0-
|
|
||||||
platform: 3.0-
|
|
||||||
plistlib: 3.0-
|
|
||||||
poplib: 3.0-
|
|
||||||
posix: 3.0-
|
|
||||||
posixpath: 3.0-
|
|
||||||
pprint: 3.0-
|
|
||||||
profile: 3.0-
|
|
||||||
pstats: 3.0-
|
|
||||||
pty: 3.0-
|
|
||||||
pwd: 3.0-
|
|
||||||
py_compile: 3.0-
|
|
||||||
pyclbr: 3.0-
|
|
||||||
pydoc: 3.0-
|
|
||||||
pydoc_data: 3.0-
|
|
||||||
pyexpat: 3.0-
|
|
||||||
queue: 3.0-
|
|
||||||
quopri: 3.0-
|
|
||||||
random: 3.0-
|
|
||||||
re: 3.0-
|
|
||||||
readline: 3.0-
|
|
||||||
reprlib: 3.0-
|
|
||||||
resource: 3.0-
|
|
||||||
rlcompleter: 3.0-
|
|
||||||
runpy: 3.0-
|
|
||||||
sched: 3.0-
|
|
||||||
secrets: 3.6-
|
|
||||||
select: 3.0-
|
|
||||||
selectors: 3.4-
|
|
||||||
shelve: 3.0-
|
|
||||||
shlex: 3.0-
|
|
||||||
shutil: 3.0-
|
|
||||||
signal: 3.0-
|
|
||||||
site: 3.0-
|
|
||||||
smtpd: 3.0-3.11
|
|
||||||
smtplib: 3.0-
|
|
||||||
sndhdr: 3.0-3.12
|
|
||||||
socket: 3.0-
|
|
||||||
socketserver: 3.0-
|
|
||||||
spwd: 3.0-3.12
|
|
||||||
sqlite3: 3.0-
|
|
||||||
sre_compile: 3.0-
|
|
||||||
sre_constants: 3.0-
|
|
||||||
sre_parse: 3.0-
|
|
||||||
ssl: 3.0-
|
|
||||||
stat: 3.0-
|
|
||||||
statistics: 3.4-
|
|
||||||
string: 3.0-
|
|
||||||
stringprep: 3.0-
|
|
||||||
struct: 3.0-
|
|
||||||
subprocess: 3.0-
|
|
||||||
sunau: 3.0-3.12
|
|
||||||
symbol: 3.0-3.9
|
|
||||||
symtable: 3.0-
|
|
||||||
sys: 3.0-
|
|
||||||
sys._monitoring: 3.12- # Doesn't actually exist. See comments in the stub.
|
|
||||||
sysconfig: 3.0-
|
|
||||||
syslog: 3.0-
|
|
||||||
tabnanny: 3.0-
|
|
||||||
tarfile: 3.0-
|
|
||||||
telnetlib: 3.0-3.12
|
|
||||||
tempfile: 3.0-
|
|
||||||
termios: 3.0-
|
|
||||||
textwrap: 3.0-
|
|
||||||
this: 3.0-
|
|
||||||
threading: 3.0-
|
|
||||||
time: 3.0-
|
|
||||||
timeit: 3.0-
|
|
||||||
tkinter: 3.0-
|
|
||||||
tkinter.tix: 3.0-3.12
|
|
||||||
token: 3.0-
|
|
||||||
tokenize: 3.0-
|
|
||||||
tomllib: 3.11-
|
|
||||||
trace: 3.0-
|
|
||||||
traceback: 3.0-
|
|
||||||
tracemalloc: 3.4-
|
|
||||||
tty: 3.0-
|
|
||||||
turtle: 3.0-
|
|
||||||
types: 3.0-
|
|
||||||
typing: 3.5-
|
|
||||||
typing_extensions: 3.0-
|
|
||||||
unicodedata: 3.0-
|
|
||||||
unittest: 3.0-
|
|
||||||
unittest._log: 3.9-
|
|
||||||
unittest.async_case: 3.8-
|
|
||||||
urllib: 3.0-
|
|
||||||
uu: 3.0-3.12
|
|
||||||
uuid: 3.0-
|
|
||||||
venv: 3.3-
|
|
||||||
warnings: 3.0-
|
|
||||||
wave: 3.0-
|
|
||||||
weakref: 3.0-
|
|
||||||
webbrowser: 3.0-
|
|
||||||
winreg: 3.0-
|
|
||||||
winsound: 3.0-
|
|
||||||
wsgiref: 3.0-
|
|
||||||
wsgiref.types: 3.11-
|
|
||||||
xdrlib: 3.0-3.12
|
|
||||||
xml: 3.0-
|
|
||||||
xmlrpc: 3.0-
|
|
||||||
xxlimited: 3.2-
|
|
||||||
zipapp: 3.5-
|
|
||||||
zipfile: 3.0-
|
|
||||||
zipfile._path: 3.12-
|
|
||||||
zipimport: 3.0-
|
|
||||||
zlib: 3.0-
|
|
||||||
zoneinfo: 3.9-
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
from typing_extensions import TypeAlias
|
|
||||||
|
|
||||||
_VersionInfo: TypeAlias = tuple[int, int, int, str, int]
|
|
||||||
|
|
||||||
class _Feature:
|
|
||||||
def __init__(self, optionalRelease: _VersionInfo, mandatoryRelease: _VersionInfo | None, compiler_flag: int) -> None: ...
|
|
||||||
def getOptionalRelease(self) -> _VersionInfo: ...
|
|
||||||
def getMandatoryRelease(self) -> _VersionInfo | None: ...
|
|
||||||
compiler_flag: int
|
|
||||||
|
|
||||||
absolute_import: _Feature
|
|
||||||
division: _Feature
|
|
||||||
generators: _Feature
|
|
||||||
nested_scopes: _Feature
|
|
||||||
print_function: _Feature
|
|
||||||
unicode_literals: _Feature
|
|
||||||
with_statement: _Feature
|
|
||||||
barry_as_FLUFL: _Feature
|
|
||||||
generator_stop: _Feature
|
|
||||||
annotations: _Feature
|
|
||||||
|
|
||||||
all_feature_names: list[str] # undocumented
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"all_feature_names",
|
|
||||||
"absolute_import",
|
|
||||||
"division",
|
|
||||||
"generators",
|
|
||||||
"nested_scopes",
|
|
||||||
"print_function",
|
|
||||||
"unicode_literals",
|
|
||||||
"with_statement",
|
|
||||||
"barry_as_FLUFL",
|
|
||||||
"generator_stop",
|
|
||||||
"annotations",
|
|
||||||
]
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
from typing import Any
|
|
||||||
|
|
||||||
def __getattr__(name: str) -> Any: ...
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,84 +0,0 @@
|
|||||||
import sys
|
|
||||||
from _typeshed import SupportsLenAndGetItem, SupportsRichComparisonT
|
|
||||||
from collections.abc import Callable, MutableSequence
|
|
||||||
from typing import TypeVar, overload
|
|
||||||
|
|
||||||
_T = TypeVar("_T")
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
@overload
|
|
||||||
def bisect_left(
|
|
||||||
a: SupportsLenAndGetItem[SupportsRichComparisonT],
|
|
||||||
x: SupportsRichComparisonT,
|
|
||||||
lo: int = 0,
|
|
||||||
hi: int | None = None,
|
|
||||||
*,
|
|
||||||
key: None = None,
|
|
||||||
) -> int: ...
|
|
||||||
@overload
|
|
||||||
def bisect_left(
|
|
||||||
a: SupportsLenAndGetItem[_T],
|
|
||||||
x: SupportsRichComparisonT,
|
|
||||||
lo: int = 0,
|
|
||||||
hi: int | None = None,
|
|
||||||
*,
|
|
||||||
key: Callable[[_T], SupportsRichComparisonT],
|
|
||||||
) -> int: ...
|
|
||||||
@overload
|
|
||||||
def bisect_right(
|
|
||||||
a: SupportsLenAndGetItem[SupportsRichComparisonT],
|
|
||||||
x: SupportsRichComparisonT,
|
|
||||||
lo: int = 0,
|
|
||||||
hi: int | None = None,
|
|
||||||
*,
|
|
||||||
key: None = None,
|
|
||||||
) -> int: ...
|
|
||||||
@overload
|
|
||||||
def bisect_right(
|
|
||||||
a: SupportsLenAndGetItem[_T],
|
|
||||||
x: SupportsRichComparisonT,
|
|
||||||
lo: int = 0,
|
|
||||||
hi: int | None = None,
|
|
||||||
*,
|
|
||||||
key: Callable[[_T], SupportsRichComparisonT],
|
|
||||||
) -> int: ...
|
|
||||||
@overload
|
|
||||||
def insort_left(
|
|
||||||
a: MutableSequence[SupportsRichComparisonT],
|
|
||||||
x: SupportsRichComparisonT,
|
|
||||||
lo: int = 0,
|
|
||||||
hi: int | None = None,
|
|
||||||
*,
|
|
||||||
key: None = None,
|
|
||||||
) -> None: ...
|
|
||||||
@overload
|
|
||||||
def insort_left(
|
|
||||||
a: MutableSequence[_T], x: _T, lo: int = 0, hi: int | None = None, *, key: Callable[[_T], SupportsRichComparisonT]
|
|
||||||
) -> None: ...
|
|
||||||
@overload
|
|
||||||
def insort_right(
|
|
||||||
a: MutableSequence[SupportsRichComparisonT],
|
|
||||||
x: SupportsRichComparisonT,
|
|
||||||
lo: int = 0,
|
|
||||||
hi: int | None = None,
|
|
||||||
*,
|
|
||||||
key: None = None,
|
|
||||||
) -> None: ...
|
|
||||||
@overload
|
|
||||||
def insort_right(
|
|
||||||
a: MutableSequence[_T], x: _T, lo: int = 0, hi: int | None = None, *, key: Callable[[_T], SupportsRichComparisonT]
|
|
||||||
) -> None: ...
|
|
||||||
|
|
||||||
else:
|
|
||||||
def bisect_left(
|
|
||||||
a: SupportsLenAndGetItem[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None
|
|
||||||
) -> int: ...
|
|
||||||
def bisect_right(
|
|
||||||
a: SupportsLenAndGetItem[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None
|
|
||||||
) -> int: ...
|
|
||||||
def insort_left(
|
|
||||||
a: MutableSequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None
|
|
||||||
) -> None: ...
|
|
||||||
def insort_right(
|
|
||||||
a: MutableSequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None
|
|
||||||
) -> None: ...
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
def getpreferredencoding(do_setlocale: bool = True) -> str: ...
|
|
||||||
@@ -1,133 +0,0 @@
|
|||||||
import codecs
|
|
||||||
import sys
|
|
||||||
from _typeshed import ReadableBuffer
|
|
||||||
from collections.abc import Callable
|
|
||||||
from typing import Literal, overload
|
|
||||||
from typing_extensions import TypeAlias
|
|
||||||
|
|
||||||
# This type is not exposed; it is defined in unicodeobject.c
|
|
||||||
class _EncodingMap:
|
|
||||||
def size(self) -> int: ...
|
|
||||||
|
|
||||||
_CharMap: TypeAlias = dict[int, int] | _EncodingMap
|
|
||||||
_Handler: TypeAlias = Callable[[UnicodeError], tuple[str | bytes, int]]
|
|
||||||
_SearchFunction: TypeAlias = Callable[[str], codecs.CodecInfo | None]
|
|
||||||
|
|
||||||
def register(search_function: _SearchFunction, /) -> None: ...
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
def unregister(search_function: _SearchFunction, /) -> None: ...
|
|
||||||
|
|
||||||
def register_error(errors: str, handler: _Handler, /) -> None: ...
|
|
||||||
def lookup_error(name: str, /) -> _Handler: ...
|
|
||||||
|
|
||||||
# The type ignore on `encode` and `decode` is to avoid issues with overlapping overloads, for more details, see #300
|
|
||||||
# https://docs.python.org/3/library/codecs.html#binary-transforms
|
|
||||||
_BytesToBytesEncoding: TypeAlias = Literal[
|
|
||||||
"base64",
|
|
||||||
"base_64",
|
|
||||||
"base64_codec",
|
|
||||||
"bz2",
|
|
||||||
"bz2_codec",
|
|
||||||
"hex",
|
|
||||||
"hex_codec",
|
|
||||||
"quopri",
|
|
||||||
"quotedprintable",
|
|
||||||
"quoted_printable",
|
|
||||||
"quopri_codec",
|
|
||||||
"uu",
|
|
||||||
"uu_codec",
|
|
||||||
"zip",
|
|
||||||
"zlib",
|
|
||||||
"zlib_codec",
|
|
||||||
]
|
|
||||||
# https://docs.python.org/3/library/codecs.html#text-transforms
|
|
||||||
_StrToStrEncoding: TypeAlias = Literal["rot13", "rot_13"]
|
|
||||||
|
|
||||||
@overload
|
|
||||||
def encode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = "strict") -> bytes: ...
|
|
||||||
@overload
|
|
||||||
def encode(obj: str, encoding: _StrToStrEncoding, errors: str = "strict") -> str: ... # type: ignore[overload-overlap]
|
|
||||||
@overload
|
|
||||||
def encode(obj: str, encoding: str = "utf-8", errors: str = "strict") -> bytes: ...
|
|
||||||
@overload
|
|
||||||
def decode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = "strict") -> bytes: ... # type: ignore[overload-overlap]
|
|
||||||
@overload
|
|
||||||
def decode(obj: str, encoding: _StrToStrEncoding, errors: str = "strict") -> str: ...
|
|
||||||
|
|
||||||
# these are documented as text encodings but in practice they also accept str as input
|
|
||||||
@overload
|
|
||||||
def decode(
|
|
||||||
obj: str,
|
|
||||||
encoding: Literal["unicode_escape", "unicode-escape", "raw_unicode_escape", "raw-unicode-escape"],
|
|
||||||
errors: str = "strict",
|
|
||||||
) -> str: ...
|
|
||||||
|
|
||||||
# hex is officially documented as a bytes to bytes encoding, but it appears to also work with str
|
|
||||||
@overload
|
|
||||||
def decode(obj: str, encoding: Literal["hex", "hex_codec"], errors: str = "strict") -> bytes: ...
|
|
||||||
@overload
|
|
||||||
def decode(obj: ReadableBuffer, encoding: str = "utf-8", errors: str = "strict") -> str: ...
|
|
||||||
def lookup(encoding: str, /) -> codecs.CodecInfo: ...
|
|
||||||
def charmap_build(map: str, /) -> _CharMap: ...
|
|
||||||
def ascii_decode(data: ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ...
|
|
||||||
def ascii_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
|
||||||
def charmap_decode(data: ReadableBuffer, errors: str | None = None, mapping: _CharMap | None = None, /) -> tuple[str, int]: ...
|
|
||||||
def charmap_encode(str: str, errors: str | None = None, mapping: _CharMap | None = None, /) -> tuple[bytes, int]: ...
|
|
||||||
def escape_decode(data: str | ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ...
|
|
||||||
def escape_encode(data: bytes, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
|
||||||
def latin_1_decode(data: ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ...
|
|
||||||
def latin_1_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 9):
|
|
||||||
def raw_unicode_escape_decode(
|
|
||||||
data: str | ReadableBuffer, errors: str | None = None, final: bool = True, /
|
|
||||||
) -> tuple[str, int]: ...
|
|
||||||
|
|
||||||
else:
|
|
||||||
def raw_unicode_escape_decode(data: str | ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ...
|
|
||||||
|
|
||||||
def raw_unicode_escape_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
|
||||||
def readbuffer_encode(data: str | ReadableBuffer, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 9):
|
|
||||||
def unicode_escape_decode(
|
|
||||||
data: str | ReadableBuffer, errors: str | None = None, final: bool = True, /
|
|
||||||
) -> tuple[str, int]: ...
|
|
||||||
|
|
||||||
else:
|
|
||||||
def unicode_escape_decode(data: str | ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ...
|
|
||||||
|
|
||||||
def unicode_escape_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
|
||||||
def utf_16_be_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
|
||||||
def utf_16_be_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
|
||||||
def utf_16_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
|
||||||
def utf_16_encode(str: str, errors: str | None = None, byteorder: int = 0, /) -> tuple[bytes, int]: ...
|
|
||||||
def utf_16_ex_decode(
|
|
||||||
data: ReadableBuffer, errors: str | None = None, byteorder: int = 0, final: bool = False, /
|
|
||||||
) -> tuple[str, int, int]: ...
|
|
||||||
def utf_16_le_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
|
||||||
def utf_16_le_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
|
||||||
def utf_32_be_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
|
||||||
def utf_32_be_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
|
||||||
def utf_32_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
|
||||||
def utf_32_encode(str: str, errors: str | None = None, byteorder: int = 0, /) -> tuple[bytes, int]: ...
|
|
||||||
def utf_32_ex_decode(
|
|
||||||
data: ReadableBuffer, errors: str | None = None, byteorder: int = 0, final: bool = False, /
|
|
||||||
) -> tuple[str, int, int]: ...
|
|
||||||
def utf_32_le_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
|
||||||
def utf_32_le_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
|
||||||
def utf_7_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
|
||||||
def utf_7_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
|
||||||
def utf_8_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
|
||||||
def utf_8_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
|
||||||
|
|
||||||
if sys.platform == "win32":
|
|
||||||
def mbcs_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
|
||||||
def mbcs_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
|
||||||
def code_page_decode(
|
|
||||||
codepage: int, data: ReadableBuffer, errors: str | None = None, final: bool = False, /
|
|
||||||
) -> tuple[str, int]: ...
|
|
||||||
def code_page_encode(code_page: int, str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
|
||||||
def oem_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
|
||||||
def oem_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
|
||||||
@@ -1,98 +0,0 @@
|
|||||||
import sys
|
|
||||||
from abc import abstractmethod
|
|
||||||
from types import MappingProxyType
|
|
||||||
from typing import ( # noqa: Y022,Y038,Y057
|
|
||||||
AbstractSet as Set,
|
|
||||||
AsyncGenerator as AsyncGenerator,
|
|
||||||
AsyncIterable as AsyncIterable,
|
|
||||||
AsyncIterator as AsyncIterator,
|
|
||||||
Awaitable as Awaitable,
|
|
||||||
ByteString as ByteString,
|
|
||||||
Callable as Callable,
|
|
||||||
Collection as Collection,
|
|
||||||
Container as Container,
|
|
||||||
Coroutine as Coroutine,
|
|
||||||
Generator as Generator,
|
|
||||||
Generic,
|
|
||||||
Hashable as Hashable,
|
|
||||||
ItemsView as ItemsView,
|
|
||||||
Iterable as Iterable,
|
|
||||||
Iterator as Iterator,
|
|
||||||
KeysView as KeysView,
|
|
||||||
Mapping as Mapping,
|
|
||||||
MappingView as MappingView,
|
|
||||||
MutableMapping as MutableMapping,
|
|
||||||
MutableSequence as MutableSequence,
|
|
||||||
MutableSet as MutableSet,
|
|
||||||
Protocol,
|
|
||||||
Reversible as Reversible,
|
|
||||||
Sequence as Sequence,
|
|
||||||
Sized as Sized,
|
|
||||||
TypeVar,
|
|
||||||
ValuesView as ValuesView,
|
|
||||||
final,
|
|
||||||
runtime_checkable,
|
|
||||||
)
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"Awaitable",
|
|
||||||
"Coroutine",
|
|
||||||
"AsyncIterable",
|
|
||||||
"AsyncIterator",
|
|
||||||
"AsyncGenerator",
|
|
||||||
"Hashable",
|
|
||||||
"Iterable",
|
|
||||||
"Iterator",
|
|
||||||
"Generator",
|
|
||||||
"Reversible",
|
|
||||||
"Sized",
|
|
||||||
"Container",
|
|
||||||
"Callable",
|
|
||||||
"Collection",
|
|
||||||
"Set",
|
|
||||||
"MutableSet",
|
|
||||||
"Mapping",
|
|
||||||
"MutableMapping",
|
|
||||||
"MappingView",
|
|
||||||
"KeysView",
|
|
||||||
"ItemsView",
|
|
||||||
"ValuesView",
|
|
||||||
"Sequence",
|
|
||||||
"MutableSequence",
|
|
||||||
"ByteString",
|
|
||||||
]
|
|
||||||
if sys.version_info >= (3, 12):
|
|
||||||
__all__ += ["Buffer"]
|
|
||||||
|
|
||||||
_KT_co = TypeVar("_KT_co", covariant=True) # Key type covariant containers.
|
|
||||||
_VT_co = TypeVar("_VT_co", covariant=True) # Value type covariant containers.
|
|
||||||
|
|
||||||
@final
|
|
||||||
class dict_keys(KeysView[_KT_co], Generic[_KT_co, _VT_co]): # undocumented
|
|
||||||
def __eq__(self, value: object, /) -> bool: ...
|
|
||||||
if sys.version_info >= (3, 13):
|
|
||||||
def isdisjoint(self, other: Iterable[_KT_co], /) -> bool: ...
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
@property
|
|
||||||
def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ...
|
|
||||||
|
|
||||||
@final
|
|
||||||
class dict_values(ValuesView[_VT_co], Generic[_KT_co, _VT_co]): # undocumented
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
@property
|
|
||||||
def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ...
|
|
||||||
|
|
||||||
@final
|
|
||||||
class dict_items(ItemsView[_KT_co, _VT_co]): # undocumented
|
|
||||||
def __eq__(self, value: object, /) -> bool: ...
|
|
||||||
if sys.version_info >= (3, 13):
|
|
||||||
def isdisjoint(self, other: Iterable[tuple[_KT_co, _VT_co]], /) -> bool: ...
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
@property
|
|
||||||
def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ...
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 12):
|
|
||||||
@runtime_checkable
|
|
||||||
class Buffer(Protocol):
|
|
||||||
@abstractmethod
|
|
||||||
def __buffer__(self, flags: int, /) -> memoryview: ...
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
IMPORT_MAPPING: dict[str, str]
|
|
||||||
NAME_MAPPING: dict[tuple[str, str], tuple[str, str]]
|
|
||||||
PYTHON2_EXCEPTIONS: tuple[str, ...]
|
|
||||||
MULTIPROCESSING_EXCEPTIONS: tuple[str, ...]
|
|
||||||
REVERSE_IMPORT_MAPPING: dict[str, str]
|
|
||||||
REVERSE_NAME_MAPPING: dict[tuple[str, str], tuple[str, str]]
|
|
||||||
PYTHON3_OSERROR_EXCEPTIONS: tuple[str, ...]
|
|
||||||
PYTHON3_IMPORTERROR_EXCEPTIONS: tuple[str, ...]
|
|
||||||
@@ -1,25 +0,0 @@
|
|||||||
from _typeshed import WriteableBuffer
|
|
||||||
from collections.abc import Callable
|
|
||||||
from io import DEFAULT_BUFFER_SIZE, BufferedIOBase, RawIOBase
|
|
||||||
from typing import Any, Protocol
|
|
||||||
|
|
||||||
BUFFER_SIZE = DEFAULT_BUFFER_SIZE
|
|
||||||
|
|
||||||
class _Reader(Protocol):
|
|
||||||
def read(self, n: int, /) -> bytes: ...
|
|
||||||
def seekable(self) -> bool: ...
|
|
||||||
def seek(self, n: int, /) -> Any: ...
|
|
||||||
|
|
||||||
class BaseStream(BufferedIOBase): ...
|
|
||||||
|
|
||||||
class DecompressReader(RawIOBase):
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
fp: _Reader,
|
|
||||||
decomp_factory: Callable[..., object],
|
|
||||||
trailing_error: type[Exception] | tuple[type[Exception], ...] = (),
|
|
||||||
**decomp_args: Any,
|
|
||||||
) -> None: ...
|
|
||||||
def readinto(self, b: WriteableBuffer) -> int: ...
|
|
||||||
def read(self, size: int = -1) -> bytes: ...
|
|
||||||
def seek(self, offset: int, whence: int = 0) -> int: ...
|
|
||||||
@@ -1,90 +0,0 @@
|
|||||||
import sys
|
|
||||||
from _typeshed import SupportsWrite
|
|
||||||
from collections.abc import Iterable, Iterator
|
|
||||||
from typing import Any, Final, Literal
|
|
||||||
from typing_extensions import TypeAlias
|
|
||||||
|
|
||||||
__version__: Final[str]
|
|
||||||
|
|
||||||
QUOTE_ALL: Literal[1]
|
|
||||||
QUOTE_MINIMAL: Literal[0]
|
|
||||||
QUOTE_NONE: Literal[3]
|
|
||||||
QUOTE_NONNUMERIC: Literal[2]
|
|
||||||
if sys.version_info >= (3, 12):
|
|
||||||
QUOTE_STRINGS: Literal[4]
|
|
||||||
QUOTE_NOTNULL: Literal[5]
|
|
||||||
|
|
||||||
# Ideally this would be `QUOTE_ALL | QUOTE_MINIMAL | QUOTE_NONE | QUOTE_NONNUMERIC`
|
|
||||||
# However, using literals in situations like these can cause false-positives (see #7258)
|
|
||||||
_QuotingType: TypeAlias = int
|
|
||||||
|
|
||||||
class Error(Exception): ...
|
|
||||||
|
|
||||||
class Dialect:
|
|
||||||
delimiter: str
|
|
||||||
quotechar: str | None
|
|
||||||
escapechar: str | None
|
|
||||||
doublequote: bool
|
|
||||||
skipinitialspace: bool
|
|
||||||
lineterminator: str
|
|
||||||
quoting: _QuotingType
|
|
||||||
strict: bool
|
|
||||||
def __init__(self) -> None: ...
|
|
||||||
|
|
||||||
_DialectLike: TypeAlias = str | Dialect | type[Dialect]
|
|
||||||
|
|
||||||
class _reader(Iterator[list[str]]):
|
|
||||||
@property
|
|
||||||
def dialect(self) -> Dialect: ...
|
|
||||||
line_num: int
|
|
||||||
def __next__(self) -> list[str]: ...
|
|
||||||
|
|
||||||
class _writer:
|
|
||||||
@property
|
|
||||||
def dialect(self) -> Dialect: ...
|
|
||||||
def writerow(self, row: Iterable[Any]) -> Any: ...
|
|
||||||
def writerows(self, rows: Iterable[Iterable[Any]]) -> None: ...
|
|
||||||
|
|
||||||
def writer(
|
|
||||||
csvfile: SupportsWrite[str],
|
|
||||||
dialect: _DialectLike = "excel",
|
|
||||||
*,
|
|
||||||
delimiter: str = ",",
|
|
||||||
quotechar: str | None = '"',
|
|
||||||
escapechar: str | None = None,
|
|
||||||
doublequote: bool = True,
|
|
||||||
skipinitialspace: bool = False,
|
|
||||||
lineterminator: str = "\r\n",
|
|
||||||
quoting: _QuotingType = 0,
|
|
||||||
strict: bool = False,
|
|
||||||
) -> _writer: ...
|
|
||||||
def reader(
|
|
||||||
csvfile: Iterable[str],
|
|
||||||
dialect: _DialectLike = "excel",
|
|
||||||
*,
|
|
||||||
delimiter: str = ",",
|
|
||||||
quotechar: str | None = '"',
|
|
||||||
escapechar: str | None = None,
|
|
||||||
doublequote: bool = True,
|
|
||||||
skipinitialspace: bool = False,
|
|
||||||
lineterminator: str = "\r\n",
|
|
||||||
quoting: _QuotingType = 0,
|
|
||||||
strict: bool = False,
|
|
||||||
) -> _reader: ...
|
|
||||||
def register_dialect(
|
|
||||||
name: str,
|
|
||||||
dialect: type[Dialect] = ...,
|
|
||||||
*,
|
|
||||||
delimiter: str = ",",
|
|
||||||
quotechar: str | None = '"',
|
|
||||||
escapechar: str | None = None,
|
|
||||||
doublequote: bool = True,
|
|
||||||
skipinitialspace: bool = False,
|
|
||||||
lineterminator: str = "\r\n",
|
|
||||||
quoting: _QuotingType = 0,
|
|
||||||
strict: bool = False,
|
|
||||||
) -> None: ...
|
|
||||||
def unregister_dialect(name: str) -> None: ...
|
|
||||||
def get_dialect(name: str) -> Dialect: ...
|
|
||||||
def list_dialects() -> list[str]: ...
|
|
||||||
def field_size_limit(new_limit: int = ...) -> int: ...
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user