Compare commits
1 Commits
0.5.7
...
editables-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
09e8599e91 |
13
.github/renovate.json5
vendored
13
.github/renovate.json5
vendored
@@ -8,7 +8,7 @@
|
||||
semanticCommits: "disabled",
|
||||
separateMajorMinor: false,
|
||||
prHourlyLimit: 10,
|
||||
enabledManagers: ["github-actions", "pre-commit", "cargo", "pep621", "pip_requirements", "npm"],
|
||||
enabledManagers: ["github-actions", "pre-commit", "cargo", "pep621", "npm"],
|
||||
cargo: {
|
||||
// See https://docs.renovatebot.com/configuration-options/#rangestrategy
|
||||
rangeStrategy: "update-lockfile",
|
||||
@@ -16,9 +16,6 @@
|
||||
pep621: {
|
||||
fileMatch: ["^(python|scripts)/.*pyproject\\.toml$"],
|
||||
},
|
||||
pip_requirements: {
|
||||
fileMatch: ["^docs/requirements.*\\.txt$"],
|
||||
},
|
||||
npm: {
|
||||
fileMatch: ["^playground/.*package\\.json$"],
|
||||
},
|
||||
@@ -51,14 +48,6 @@
|
||||
matchManagers: ["cargo"],
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
// `mkdocs-material` requires a manual update to keep the version in sync
|
||||
// with `mkdocs-material-insider`.
|
||||
// See: https://squidfunk.github.io/mkdocs-material/insiders/upgrade/
|
||||
matchManagers: ["pip_requirements"],
|
||||
matchPackagePatterns: ["mkdocs-material"],
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
groupName: "pre-commit dependencies",
|
||||
matchManagers: ["pre-commit"],
|
||||
|
||||
10
.github/workflows/ci.yaml
vendored
10
.github/workflows/ci.yaml
vendored
@@ -111,7 +111,7 @@ jobs:
|
||||
- name: "Clippy"
|
||||
run: cargo clippy --workspace --all-targets --all-features --locked -- -D warnings
|
||||
- name: "Clippy (wasm)"
|
||||
run: cargo clippy -p ruff_wasm -p red_knot_wasm --target wasm32-unknown-unknown --all-features --locked -- -D warnings
|
||||
run: cargo clippy -p ruff_wasm --target wasm32-unknown-unknown --all-features --locked -- -D warnings
|
||||
|
||||
cargo-test-linux:
|
||||
name: "cargo test (linux)"
|
||||
@@ -191,14 +191,10 @@ jobs:
|
||||
cache-dependency-path: playground/package-lock.json
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Test ruff_wasm"
|
||||
- name: "Run wasm-pack"
|
||||
run: |
|
||||
cd crates/ruff_wasm
|
||||
wasm-pack test --node
|
||||
- name: "Test red_knot_wasm"
|
||||
run: |
|
||||
cd crates/red_knot_wasm
|
||||
wasm-pack test --node
|
||||
|
||||
cargo-build-release:
|
||||
name: "cargo build (release)"
|
||||
@@ -623,7 +619,7 @@ jobs:
|
||||
run: cargo codspeed build --features codspeed -p ruff_benchmark
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@v3
|
||||
uses: CodSpeedHQ/action@v2
|
||||
with:
|
||||
run: cargo codspeed run
|
||||
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||
|
||||
2
.github/workflows/pr-comment.yaml
vendored
2
.github/workflows/pr-comment.yaml
vendored
@@ -23,7 +23,6 @@ jobs:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Parse pull request number
|
||||
id: pr-number
|
||||
@@ -44,7 +43,6 @@ jobs:
|
||||
path: pr/ecosystem
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
|
||||
4
.github/workflows/publish-docs.yml
vendored
4
.github/workflows/publish-docs.yml
vendored
@@ -104,8 +104,8 @@ jobs:
|
||||
run: |
|
||||
branch_name="${{ env.branch_name }}"
|
||||
|
||||
git config user.name "astral-docs-bot"
|
||||
git config user.email "176161322+astral-docs-bot@users.noreply.github.com"
|
||||
git config user.name "$GITHUB_ACTOR"
|
||||
git config user.email "$GITHUB_ACTOR@users.noreply.github.com"
|
||||
|
||||
git checkout -b $branch_name
|
||||
git add site/ruff
|
||||
|
||||
14
.github/workflows/sync_typeshed.yaml
vendored
14
.github/workflows/sync_typeshed.yaml
vendored
@@ -37,13 +37,13 @@ jobs:
|
||||
- name: Sync typeshed
|
||||
id: sync
|
||||
run: |
|
||||
rm -rf ruff/crates/red_knot_python_semantic/vendor/typeshed
|
||||
mkdir ruff/crates/red_knot_python_semantic/vendor/typeshed
|
||||
cp typeshed/README.md ruff/crates/red_knot_python_semantic/vendor/typeshed
|
||||
cp typeshed/LICENSE ruff/crates/red_knot_python_semantic/vendor/typeshed
|
||||
cp -r typeshed/stdlib ruff/crates/red_knot_python_semantic/vendor/typeshed/stdlib
|
||||
rm -rf ruff/crates/red_knot_python_semantic/vendor/typeshed/stdlib/@tests
|
||||
git -C typeshed rev-parse HEAD > ruff/crates/red_knot_python_semantic/vendor/typeshed/source_commit.txt
|
||||
rm -rf ruff/crates/red_knot_module_resolver/vendor/typeshed
|
||||
mkdir ruff/crates/red_knot_module_resolver/vendor/typeshed
|
||||
cp typeshed/README.md ruff/crates/red_knot_module_resolver/vendor/typeshed
|
||||
cp typeshed/LICENSE ruff/crates/red_knot_module_resolver/vendor/typeshed
|
||||
cp -r typeshed/stdlib ruff/crates/red_knot_module_resolver/vendor/typeshed/stdlib
|
||||
rm -rf ruff/crates/red_knot_module_resolver/vendor/typeshed/stdlib/@tests
|
||||
git -C typeshed rev-parse HEAD > ruff/crates/red_knot_module_resolver/vendor/typeshed/source_commit.txt
|
||||
- name: Commit the changes
|
||||
id: commit
|
||||
if: ${{ steps.sync.outcome == 'success' }}
|
||||
|
||||
8
.gitignore
vendored
8
.gitignore
vendored
@@ -21,14 +21,6 @@ flamegraph.svg
|
||||
# `CARGO_TARGET_DIR=target-llvm-lines RUSTFLAGS="-Csymbol-mangling-version=v0" cargo llvm-lines -p ruff --lib`
|
||||
/target*
|
||||
|
||||
# samply profiles
|
||||
profile.json
|
||||
|
||||
# tracing-flame traces
|
||||
tracing.folded
|
||||
tracing-flamechart.svg
|
||||
tracing-flamegraph.svg
|
||||
|
||||
###
|
||||
# Rust.gitignore
|
||||
###
|
||||
|
||||
@@ -2,8 +2,7 @@ fail_fast: true
|
||||
|
||||
exclude: |
|
||||
(?x)^(
|
||||
crates/red_knot_python_semantic/vendor/.*|
|
||||
crates/red_knot_workspace/resources/.*|
|
||||
crates/red_knot_module_resolver/vendor/.*|
|
||||
crates/ruff_linter/resources/.*|
|
||||
crates/ruff_linter/src/rules/.*/snapshots/.*|
|
||||
crates/ruff/resources/.*|
|
||||
@@ -43,7 +42,7 @@ repos:
|
||||
)$
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.23.6
|
||||
rev: v1.23.2
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
@@ -57,13 +56,18 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.5.6
|
||||
rev: v0.5.2
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
args: [--fix, --exit-non-zero-on-fix]
|
||||
types_or: [python, pyi]
|
||||
require_serial: true
|
||||
exclude: |
|
||||
(?x)^(
|
||||
crates/ruff_linter/resources/.*|
|
||||
crates/ruff_python_formatter/resources/.*
|
||||
)$
|
||||
|
||||
# Prettier
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
|
||||
140
CHANGELOG.md
140
CHANGELOG.md
@@ -1,145 +1,5 @@
|
||||
# Changelog
|
||||
|
||||
## 0.5.7
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-comprehensions`\] Account for list and set comprehensions in `unnecessary-literal-within-tuple-call` (`C409`) ([#12657](https://github.com/astral-sh/ruff/pull/12657))
|
||||
- \[`flake8-pyi`\] Add autofix for `future-annotations-in-stub` (`PYI044`) ([#12676](https://github.com/astral-sh/ruff/pull/12676))
|
||||
- \[`flake8-return`\] Avoid syntax error when auto-fixing `RET505` with mixed indentation (space and tabs) ([#12740](https://github.com/astral-sh/ruff/pull/12740))
|
||||
- \[`pydoclint`\] Add `docstring-missing-yields` (`DOC402`) and `docstring-extraneous-yields` (`DOC403`) ([#12538](https://github.com/astral-sh/ruff/pull/12538))
|
||||
- \[`pydoclint`\] Avoid `DOC201` if docstring begins with "Return", "Returns", "Yield", or "Yields" ([#12675](https://github.com/astral-sh/ruff/pull/12675))
|
||||
- \[`pydoclint`\] Deduplicate collected exceptions after traversing function bodies (`DOC501`) ([#12642](https://github.com/astral-sh/ruff/pull/12642))
|
||||
- \[`pydoclint`\] Ignore `DOC` errors for stub functions ([#12651](https://github.com/astral-sh/ruff/pull/12651))
|
||||
- \[`pydoclint`\] Teach rules to understand reraised exceptions as being explicitly raised (`DOC501`, `DOC502`) ([#12639](https://github.com/astral-sh/ruff/pull/12639))
|
||||
- \[`ruff`\] Implement `incorrectly-parenthesized-tuple-in-subscript` (`RUF031`) ([#12480](https://github.com/astral-sh/ruff/pull/12480))
|
||||
- \[`ruff`\] Mark `RUF023` fix as unsafe if `__slots__` is not a set and the binding is used elsewhere ([#12692](https://github.com/astral-sh/ruff/pull/12692))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`refurb`\] Add autofix for `implicit-cwd` (`FURB177`) ([#12708](https://github.com/astral-sh/ruff/pull/12708))
|
||||
- \[`ruff`\] Add autofix for `zip-instead-of-pairwise` (`RUF007`) ([#12663](https://github.com/astral-sh/ruff/pull/12663))
|
||||
- \[`tryceratops`\] Add `BaseException` to `raise-vanilla-class` rule (`TRY002`) ([#12620](https://github.com/astral-sh/ruff/pull/12620))
|
||||
|
||||
### Server
|
||||
|
||||
- Ignore non-file workspace URL; Ruff will display a warning notification in this case ([#12725](https://github.com/astral-sh/ruff/pull/12725))
|
||||
|
||||
### CLI
|
||||
|
||||
- Fix cache invalidation for nested `pyproject.toml` files ([#12727](https://github.com/astral-sh/ruff/pull/12727))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-async`\] Fix false positives with multiple `async with` items (`ASYNC100`) ([#12643](https://github.com/astral-sh/ruff/pull/12643))
|
||||
- \[`flake8-bandit`\] Avoid false-positives for list concatenations in SQL construction (`S608`) ([#12720](https://github.com/astral-sh/ruff/pull/12720))
|
||||
- \[`flake8-bugbear`\] Treat `return` as equivalent to `break` (`B909`) ([#12646](https://github.com/astral-sh/ruff/pull/12646))
|
||||
- \[`flake8-comprehensions`\] Set comprehensions not a violation for `sum` in `unnecessary-comprehension-in-call` (`C419`) ([#12691](https://github.com/astral-sh/ruff/pull/12691))
|
||||
- \[`flake8-simplify`\] Parenthesize conditions based on precedence when merging if arms (`SIM114`) ([#12737](https://github.com/astral-sh/ruff/pull/12737))
|
||||
- \[`pydoclint`\] Try both 'Raises' section styles when convention is unspecified (`DOC501`) ([#12649](https://github.com/astral-sh/ruff/pull/12649))
|
||||
|
||||
## 0.5.6
|
||||
|
||||
Ruff 0.5.6 automatically enables linting and formatting of notebooks in *preview mode*.
|
||||
You can opt-out of this behavior by adding `*.ipynb` to the `extend-exclude` setting.
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
extend-exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
### Preview features
|
||||
|
||||
- Enable notebooks by default in preview mode ([#12621](https://github.com/astral-sh/ruff/pull/12621))
|
||||
- \[`flake8-builtins`\] Implement import, lambda, and module shadowing ([#12546](https://github.com/astral-sh/ruff/pull/12546))
|
||||
- \[`pydoclint`\] Add `docstring-missing-returns` (`DOC201`) and `docstring-extraneous-returns` (`DOC202`) ([#12485](https://github.com/astral-sh/ruff/pull/12485))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-return`\] Exempt cached properties and other property-like decorators from explicit return rule (`RET501`) ([#12563](https://github.com/astral-sh/ruff/pull/12563))
|
||||
|
||||
### Server
|
||||
|
||||
- Make server panic hook more error resilient ([#12610](https://github.com/astral-sh/ruff/pull/12610))
|
||||
- Use `$/logTrace` for server trace logs in Zed and VS Code ([#12564](https://github.com/astral-sh/ruff/pull/12564))
|
||||
- Keep track of deleted cells for reorder change request ([#12575](https://github.com/astral-sh/ruff/pull/12575))
|
||||
|
||||
### Configuration
|
||||
|
||||
- \[`flake8-implicit-str-concat`\] Always allow explicit multi-line concatenations when implicit concatenations are banned ([#12532](https://github.com/astral-sh/ruff/pull/12532))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-async`\] Avoid flagging `asyncio.timeout`s as unused when the context manager includes `asyncio.TaskGroup` ([#12605](https://github.com/astral-sh/ruff/pull/12605))
|
||||
- \[`flake8-slots`\] Avoid recommending `__slots__` for classes that inherit from more than `namedtuple` ([#12531](https://github.com/astral-sh/ruff/pull/12531))
|
||||
- \[`isort`\] Avoid marking required imports as unused ([#12537](https://github.com/astral-sh/ruff/pull/12537))
|
||||
- \[`isort`\] Preserve trailing inline comments on import-from statements ([#12498](https://github.com/astral-sh/ruff/pull/12498))
|
||||
- \[`pycodestyle`\] Add newlines before comments (`E305`) ([#12606](https://github.com/astral-sh/ruff/pull/12606))
|
||||
- \[`pycodestyle`\] Don't attach comments with mismatched indents ([#12604](https://github.com/astral-sh/ruff/pull/12604))
|
||||
- \[`pyflakes`\] Fix preview-mode bugs in `F401` when attempting to autofix unused first-party submodule imports in an `__init__.py` file ([#12569](https://github.com/astral-sh/ruff/pull/12569))
|
||||
- \[`pylint`\] Respect start index in `unnecessary-list-index-lookup` ([#12603](https://github.com/astral-sh/ruff/pull/12603))
|
||||
- \[`pyupgrade`\] Avoid recommending no-argument super in `slots=True` dataclasses ([#12530](https://github.com/astral-sh/ruff/pull/12530))
|
||||
- \[`pyupgrade`\] Use colon rather than dot formatting for integer-only types ([#12534](https://github.com/astral-sh/ruff/pull/12534))
|
||||
- Fix NFKC normalization bug when removing unused imports ([#12571](https://github.com/astral-sh/ruff/pull/12571))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Consider more stdlib decorators to be property-like ([#12583](https://github.com/astral-sh/ruff/pull/12583))
|
||||
- Improve handling of metaclasses in various linter rules ([#12579](https://github.com/astral-sh/ruff/pull/12579))
|
||||
- Improve consistency between linter rules in determining whether a function is property ([#12581](https://github.com/astral-sh/ruff/pull/12581))
|
||||
|
||||
## 0.5.5
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`fastapi`\] Implement `fastapi-redundant-response-model` (`FAST001`) and `fastapi-non-annotated-dependency`(`FAST002`) ([#11579](https://github.com/astral-sh/ruff/pull/11579))
|
||||
- \[`pydoclint`\] Implement `docstring-missing-exception` (`DOC501`) and `docstring-extraneous-exception` (`DOC502`) ([#11471](https://github.com/astral-sh/ruff/pull/11471))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`numpy`\] Fix NumPy 2.0 rule for `np.alltrue` and `np.sometrue` ([#12473](https://github.com/astral-sh/ruff/pull/12473))
|
||||
- \[`numpy`\] Ignore `NPY201` inside `except` blocks for compatibility with older numpy versions ([#12490](https://github.com/astral-sh/ruff/pull/12490))
|
||||
- \[`pep8-naming`\] Avoid applying `ignore-names` to `self` and `cls` function names (`N804`, `N805`) ([#12497](https://github.com/astral-sh/ruff/pull/12497))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Fix incorrect placement of leading function comment with type params ([#12447](https://github.com/astral-sh/ruff/pull/12447))
|
||||
|
||||
### Server
|
||||
|
||||
- Do not bail code action resolution when a quick fix is requested ([#12462](https://github.com/astral-sh/ruff/pull/12462))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix `Ord` implementation of `cmp_fix` ([#12471](https://github.com/astral-sh/ruff/pull/12471))
|
||||
- Raise syntax error for unparenthesized generator expression in multi-argument call ([#12445](https://github.com/astral-sh/ruff/pull/12445))
|
||||
- \[`pydoclint`\] Fix panic in `DOC501` reported in [#12428](https://github.com/astral-sh/ruff/pull/12428) ([#12435](https://github.com/astral-sh/ruff/pull/12435))
|
||||
- \[`flake8-bugbear`\] Allow singleton tuples with starred expressions in `B013` ([#12484](https://github.com/astral-sh/ruff/pull/12484))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add Eglot setup guide for Emacs editor ([#12426](https://github.com/astral-sh/ruff/pull/12426))
|
||||
- Add note about the breaking change in `nvim-lspconfig` ([#12507](https://github.com/astral-sh/ruff/pull/12507))
|
||||
- Add note to include notebook files for native server ([#12449](https://github.com/astral-sh/ruff/pull/12449))
|
||||
- Add setup docs for Zed editor ([#12501](https://github.com/astral-sh/ruff/pull/12501))
|
||||
|
||||
## 0.5.4
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`ruff`\] Rename `RUF007` to `zip-instead-of-pairwise` ([#12399](https://github.com/astral-sh/ruff/pull/12399))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-builtins`\] Avoid shadowing diagnostics for `@override` methods ([#12415](https://github.com/astral-sh/ruff/pull/12415))
|
||||
- \[`flake8-comprehensions`\] Insert parentheses for multi-argument generators ([#12422](https://github.com/astral-sh/ruff/pull/12422))
|
||||
- \[`pydocstyle`\] Handle escaped docstrings within docstring (`D301`) ([#12192](https://github.com/astral-sh/ruff/pull/12192))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix GitHub link to Neovim setup ([#12410](https://github.com/astral-sh/ruff/pull/12410))
|
||||
- Fix `output-format` default in settings reference ([#12409](https://github.com/astral-sh/ruff/pull/12409))
|
||||
|
||||
## 0.5.3
|
||||
|
||||
**Ruff 0.5.3 marks the stable release of the Ruff language server and introduces revamped
|
||||
|
||||
@@ -905,7 +905,7 @@ There are three ways in which an import can be categorized as "first-party":
|
||||
package (e.g., `from foo import bar` or `import foo.bar`), they'll be classified as first-party
|
||||
automatically. This check is as simple as comparing the first segment of the current file's
|
||||
module path to the first segment of the import.
|
||||
1. **Source roots**: Ruff supports a [`src`](https://docs.astral.sh/ruff/settings/#src) setting, which
|
||||
1. **Source roots**: Ruff supports a `[src](https://docs.astral.sh/ruff/settings/#src)` setting, which
|
||||
sets the directories to scan when identifying first-party imports. The algorithm is
|
||||
straightforward: given an import, like `import foo`, iterate over the directories enumerated in
|
||||
the `src` setting and, for each directory, check for the existence of a subdirectory `foo` or a
|
||||
|
||||
265
Cargo.lock
generated
265
Cargo.lock
generated
@@ -133,12 +133,6 @@ version = "1.0.86"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da"
|
||||
|
||||
[[package]]
|
||||
name = "append-only-vec"
|
||||
version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "74d9f7083455f1a474276ccd32374958d2cb591024aac45101c7623b10271347"
|
||||
|
||||
[[package]]
|
||||
name = "arc-swap"
|
||||
version = "1.7.1"
|
||||
@@ -147,9 +141,9 @@ checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457"
|
||||
|
||||
[[package]]
|
||||
name = "argfile"
|
||||
version = "0.2.1"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0a1cc0ba69de57db40674c66f7cf2caee3981ddef084388482c95c0e2133e5e8"
|
||||
checksum = "b7c5c8e418080ef8aa932039d12eda7b6f5043baf48f1523c166fbc32d004534"
|
||||
dependencies = [
|
||||
"fs-err",
|
||||
"os_str_bytes",
|
||||
@@ -196,9 +190,9 @@ checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de"
|
||||
|
||||
[[package]]
|
||||
name = "bstr"
|
||||
version = "1.10.0"
|
||||
version = "1.9.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "40723b8fb387abc38f4f4a37c09073622e41dd12327033091ef8950659e6dc0c"
|
||||
checksum = "05efc5cfd9110c8416e471df0e96702d58690178e206e61b7173706673c93706"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
"regex-automata 0.4.6",
|
||||
@@ -320,9 +314,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.13"
|
||||
version = "4.5.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0fbb260a053428790f3de475e304ff84cdbc4face759ea7a3e64c1edd938a7fc"
|
||||
checksum = "64acc1846d54c1fe936a78dc189c34e28d3f5afc348403f28ecf53660b9b8462"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
@@ -330,9 +324,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.5.13"
|
||||
version = "4.5.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "64b17d7ea74e9f833c7dbf2cbe4fb12ff26783eda4782a8975b72f895c9b4d99"
|
||||
checksum = "6fb8393d67ba2e7bfaf28a23458e4e2b543cc73a99595511eb207fdb8aede942"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
@@ -373,9 +367,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_derive"
|
||||
version = "4.5.13"
|
||||
version = "4.5.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "501d359d5f3dcaf6ecdeee48833ae73ec6e42723a1e52419c79abf9507eec0a0"
|
||||
checksum = "2bac35c6dafb060fd4d275d9a4ffae97917c13a6327903a8be2153cd964f7085"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
@@ -765,9 +759,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "env_logger"
|
||||
version = "0.11.5"
|
||||
version = "0.11.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e13fa619b91fb2381732789fc5de83b45675e882f66623b7d8cb4f643017018d"
|
||||
checksum = "38b35839ba51819680ba087cd351788c9a3c476841207e0b8cee0b04722343b9"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
@@ -936,9 +930,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "hashlink"
|
||||
version = "0.9.1"
|
||||
version = "0.8.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6ba4ff7128dee98c7dc9794b6a411377e1404dba1c97deb8d1a55297bd25d8af"
|
||||
checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7"
|
||||
dependencies = [
|
||||
"hashbrown",
|
||||
]
|
||||
@@ -1027,9 +1021,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "imara-diff"
|
||||
version = "0.1.7"
|
||||
version = "0.1.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fc9da1a252bd44cd341657203722352efc9bc0c847d06ea6d2dc1cd1135e0a01"
|
||||
checksum = "af13c8ceb376860ff0c6a66d83a8cdd4ecd9e464da24621bbffcd02b49619434"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
"hashbrown",
|
||||
@@ -1037,9 +1031,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "imperative"
|
||||
version = "1.0.6"
|
||||
version = "1.0.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "29a1f6526af721f9aec9ceed7ab8ebfca47f3399d08b80056c2acca3fcb694a9"
|
||||
checksum = "8b70798296d538cdaa6d652941fcc795963f8b9878b9e300c9fab7a522bd2fc0"
|
||||
dependencies = [
|
||||
"phf",
|
||||
"rust-stemmers",
|
||||
@@ -1047,9 +1041,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "indexmap"
|
||||
version = "2.3.0"
|
||||
version = "2.2.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "de3fc2e30ba82dd1b3911c8de1ffc143c74a914a14e99514d7637e3099df5ea0"
|
||||
checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26"
|
||||
dependencies = [
|
||||
"equivalent",
|
||||
"hashbrown",
|
||||
@@ -1200,9 +1194,9 @@ checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b"
|
||||
|
||||
[[package]]
|
||||
name = "jobserver"
|
||||
version = "0.1.32"
|
||||
version = "0.1.31"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0"
|
||||
checksum = "d2b099aaa34a9751c5bf0878add70444e1ed2dd73f347be99003d4577277de6e"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
@@ -1482,11 +1476,11 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "nu-ansi-term"
|
||||
version = "0.50.1"
|
||||
version = "0.50.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d4a28e057d01f97e61255210fcff094d74ed0466038633e95017f5beb68e4399"
|
||||
checksum = "dd2800e1520bdc966782168a627aa5d1ad92e33b984bf7c7615d31280c83ff14"
|
||||
dependencies = [
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1524,18 +1518,18 @@ checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
|
||||
|
||||
[[package]]
|
||||
name = "ordermap"
|
||||
version = "0.5.1"
|
||||
version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8c81974681ab4f0cc9fe49cad56f821d1cc67a08cd2caa9b5d58b0adaa5dd36d"
|
||||
checksum = "ab5a8e22be64dfa1123429350872e7be33594dbf5ae5212c90c5890e71966d1d"
|
||||
dependencies = [
|
||||
"indexmap",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "os_str_bytes"
|
||||
version = "7.0.0"
|
||||
version = "6.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7ac44c994af577c799b1b4bd80dc214701e349873ad894d6cdf96f4f7526e0b9"
|
||||
checksum = "e2355d85b9a3786f481747ced0e0ff2ba35213a1f9bd406ed906554d7af805a1"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
@@ -1646,9 +1640,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pep440_rs"
|
||||
version = "0.6.6"
|
||||
version = "0.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "466eada3179c2e069ca897b99006cbb33f816290eaeec62464eea907e22ae385"
|
||||
checksum = "ca0a570e7ec9171250cac57614e901f62408094b54b3798bb920d3cf0d4a0e09"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"serde",
|
||||
@@ -1860,46 +1854,35 @@ name = "red_knot"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"chrono",
|
||||
"clap",
|
||||
"colored",
|
||||
"countme",
|
||||
"crossbeam",
|
||||
"ctrlc",
|
||||
"filetime",
|
||||
"notify",
|
||||
"rayon",
|
||||
"red_knot_module_resolver",
|
||||
"red_knot_python_semantic",
|
||||
"red_knot_server",
|
||||
"red_knot_workspace",
|
||||
"ruff_db",
|
||||
"ruff_python_ast",
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa",
|
||||
"tempfile",
|
||||
"tracing",
|
||||
"tracing-flame",
|
||||
"tracing-subscriber",
|
||||
"tracing-tree",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "red_knot_python_semantic"
|
||||
name = "red_knot_module_resolver"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bitflags 2.6.0",
|
||||
"camino",
|
||||
"compact_str",
|
||||
"countme",
|
||||
"hashbrown",
|
||||
"insta",
|
||||
"once_cell",
|
||||
"ordermap",
|
||||
"path-slash",
|
||||
"ruff_db",
|
||||
"ruff_index",
|
||||
"ruff_python_ast",
|
||||
"ruff_python_parser",
|
||||
"ruff_python_stdlib",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa",
|
||||
"tempfile",
|
||||
@@ -1909,59 +1892,22 @@ dependencies = [
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "red_knot_server"
|
||||
name = "red_knot_python_semantic"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"crossbeam",
|
||||
"jod-thread",
|
||||
"libc",
|
||||
"lsp-server",
|
||||
"lsp-types",
|
||||
"red_knot_workspace",
|
||||
"bitflags 2.6.0",
|
||||
"hashbrown",
|
||||
"ordermap",
|
||||
"red_knot_module_resolver",
|
||||
"ruff_db",
|
||||
"ruff_linter",
|
||||
"ruff_notebook",
|
||||
"ruff_index",
|
||||
"ruff_python_ast",
|
||||
"ruff_source_file",
|
||||
"ruff_python_parser",
|
||||
"ruff_python_trivia",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.0.0",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"shellexpand",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "red_knot_wasm"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"console_log",
|
||||
"js-sys",
|
||||
"log",
|
||||
"red_knot_workspace",
|
||||
"ruff_db",
|
||||
"ruff_notebook",
|
||||
"wasm-bindgen",
|
||||
"wasm-bindgen-test",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "red_knot_workspace"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"crossbeam",
|
||||
"notify",
|
||||
"red_knot_python_semantic",
|
||||
"ruff_cache",
|
||||
"ruff_db",
|
||||
"ruff_python_ast",
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa",
|
||||
"thiserror",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
@@ -1987,9 +1933,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "1.10.6"
|
||||
version = "1.10.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619"
|
||||
checksum = "b91213439dad192326a0d7c6ee3955910425f441d7038e0d6933b0aec5c4517f"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
@@ -2046,7 +1992,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.5.7"
|
||||
version = "0.5.3"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"argfile",
|
||||
@@ -2104,7 +2050,7 @@ dependencies = [
|
||||
"criterion",
|
||||
"mimalloc",
|
||||
"once_cell",
|
||||
"red_knot_workspace",
|
||||
"red_knot",
|
||||
"ruff_db",
|
||||
"ruff_linter",
|
||||
"ruff_python_ast",
|
||||
@@ -2141,20 +2087,16 @@ dependencies = [
|
||||
"filetime",
|
||||
"ignore",
|
||||
"insta",
|
||||
"matchit",
|
||||
"path-slash",
|
||||
"ruff_cache",
|
||||
"ruff_notebook",
|
||||
"ruff_python_ast",
|
||||
"ruff_python_parser",
|
||||
"ruff_python_trivia",
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa",
|
||||
"tempfile",
|
||||
"tracing",
|
||||
"web-time",
|
||||
"zip",
|
||||
]
|
||||
|
||||
@@ -2234,7 +2176,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_linter"
|
||||
version = "0.5.7"
|
||||
version = "0.5.3"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"annotate-snippets 0.9.2",
|
||||
@@ -2258,7 +2200,7 @@ dependencies = [
|
||||
"once_cell",
|
||||
"path-absolutize",
|
||||
"pathdiff",
|
||||
"pep440_rs 0.6.6",
|
||||
"pep440_rs 0.6.0",
|
||||
"pyproject-toml",
|
||||
"quick-junit",
|
||||
"regex",
|
||||
@@ -2288,7 +2230,6 @@ dependencies = [
|
||||
"thiserror",
|
||||
"toml",
|
||||
"typed-arena",
|
||||
"unicode-normalization",
|
||||
"unicode-width",
|
||||
"unicode_names2",
|
||||
"url",
|
||||
@@ -2458,17 +2399,13 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"bitflags 2.6.0",
|
||||
"is-macro",
|
||||
"ruff_cache",
|
||||
"ruff_index",
|
||||
"ruff_macros",
|
||||
"ruff_python_ast",
|
||||
"ruff_python_parser",
|
||||
"ruff_python_stdlib",
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.0.0",
|
||||
"schemars",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2554,7 +2491,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_wasm"
|
||||
version = "0.5.7"
|
||||
version = "0.5.3"
|
||||
dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"console_log",
|
||||
@@ -2593,7 +2530,7 @@ dependencies = [
|
||||
"matchit",
|
||||
"path-absolutize",
|
||||
"path-slash",
|
||||
"pep440_rs 0.6.6",
|
||||
"pep440_rs 0.6.0",
|
||||
"regex",
|
||||
"ruff_cache",
|
||||
"ruff_formatter",
|
||||
@@ -2601,7 +2538,6 @@ dependencies = [
|
||||
"ruff_macros",
|
||||
"ruff_python_ast",
|
||||
"ruff_python_formatter",
|
||||
"ruff_python_semantic",
|
||||
"ruff_source_file",
|
||||
"rustc-hash 2.0.0",
|
||||
"schemars",
|
||||
@@ -2694,34 +2630,25 @@ checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1"
|
||||
[[package]]
|
||||
name = "salsa"
|
||||
version = "0.18.0"
|
||||
source = "git+https://github.com/MichaReiser/salsa.git?tag=red-knot-0.0.1#ece083e15b79f155f9e4368ec1318cec9a08d88b"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=a1bf3a613f451af7fc0a59411c56abc47fe8e8e1#a1bf3a613f451af7fc0a59411c56abc47fe8e8e1"
|
||||
dependencies = [
|
||||
"append-only-vec",
|
||||
"arc-swap",
|
||||
"crossbeam",
|
||||
"dashmap 6.0.1",
|
||||
"dashmap 5.5.3",
|
||||
"hashlink",
|
||||
"indexmap",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"parking_lot",
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa-macro-rules",
|
||||
"rustc-hash 1.1.0",
|
||||
"salsa-macros",
|
||||
"smallvec",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "salsa-macro-rules"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/MichaReiser/salsa.git?tag=red-knot-0.0.1#ece083e15b79f155f9e4368ec1318cec9a08d88b"
|
||||
|
||||
[[package]]
|
||||
name = "salsa-macros"
|
||||
version = "0.18.0"
|
||||
source = "git+https://github.com/MichaReiser/salsa.git?tag=red-knot-0.0.1#ece083e15b79f155f9e4368ec1318cec9a08d88b"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=a1bf3a613f451af7fc0a59411c56abc47fe8e8e1#a1bf3a613f451af7fc0a59411c56abc47fe8e8e1"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
@@ -2823,12 +2750,11 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.122"
|
||||
version = "1.0.120"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "784b6203951c57ff748476b126ccb5e8e2959a5c19e5c617ab1956be3dbc68da"
|
||||
checksum = "4e0d21c9a8cae1235ad58a00c11cb40d4b1e5c784f1ef2c537876ed6ffd8b7c5"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"memchr",
|
||||
"ryu",
|
||||
"serde",
|
||||
]
|
||||
@@ -2846,9 +2772,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_spanned"
|
||||
version = "0.6.7"
|
||||
version = "0.6.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "eb5b1b31579f3811bf615c144393417496f152e12ac8b7663bf664f4a815306d"
|
||||
checksum = "79e674e01f999af37c49f70a6ede167a8a60b2503e56c5599532a65baa5969a0"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
@@ -2984,9 +2910,9 @@ checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc"
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.72"
|
||||
version = "2.0.71"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dc4b9b9bf2add8093d3f2c0204471e951b2285580335de42f9d2534f3ae7a8af"
|
||||
checksum = "b146dcf730474b4bcd16c311627b31ede9ab149045db4d6088b3becaea046462"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -3006,13 +2932,12 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tempfile"
|
||||
version = "3.11.0"
|
||||
version = "3.10.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b8fcd239983515c23a32fb82099f97d0b11b8c72f654ed659363a95c3dad7a53"
|
||||
checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"fastrand",
|
||||
"once_cell",
|
||||
"rustix",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
@@ -3075,18 +3000,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "thiserror"
|
||||
version = "1.0.63"
|
||||
version = "1.0.62"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724"
|
||||
checksum = "f2675633b1499176c2dff06b0856a27976a8f9d436737b4cf4f312d4d91d8bbb"
|
||||
dependencies = [
|
||||
"thiserror-impl",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror-impl"
|
||||
version = "1.0.63"
|
||||
version = "1.0.62"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261"
|
||||
checksum = "d20468752b09f49e909e55a5d338caa8bedf615594e9d80bc4c565d30faf798c"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -3150,9 +3075,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
|
||||
|
||||
[[package]]
|
||||
name = "toml"
|
||||
version = "0.8.19"
|
||||
version = "0.8.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e"
|
||||
checksum = "6f49eb2ab21d2f26bd6db7bf383edc527a7ebaee412d17af4d40fdccd442f335"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"serde_spanned",
|
||||
@@ -3162,18 +3087,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "toml_datetime"
|
||||
version = "0.6.8"
|
||||
version = "0.6.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41"
|
||||
checksum = "4badfd56924ae69bcc9039335b2e017639ce3f9b001c393c1b2d1ef846ce2cbf"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "toml_edit"
|
||||
version = "0.22.20"
|
||||
version = "0.22.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "583c44c02ad26b0c3f3066fe629275e50627026c51ac2e595cca4c230ce1ce1d"
|
||||
checksum = "f21c7aaf97f1bd9ca9d4f9e73b0a6c74bd5afef56f2bc931943a6e1c37e04e38"
|
||||
dependencies = [
|
||||
"indexmap",
|
||||
"serde",
|
||||
@@ -3215,17 +3140,6 @@ dependencies = [
|
||||
"valuable",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tracing-flame"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0bae117ee14789185e129aaee5d93750abe67fdc5a9a62650452bfe4e122a3a9"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tracing-indicatif"
|
||||
version = "0.3.6"
|
||||
@@ -3269,11 +3183,11 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tracing-tree"
|
||||
version = "0.4.0"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f459ca79f1b0d5f71c54ddfde6debfc59c8b6eeb46808ae492077f739dc7b49c"
|
||||
checksum = "b56c62d2c80033cb36fae448730a2f2ef99410fe3ecbffc916681a32f6807dbe"
|
||||
dependencies = [
|
||||
"nu-ansi-term 0.50.1",
|
||||
"nu-ansi-term 0.50.0",
|
||||
"tracing-core",
|
||||
"tracing-log",
|
||||
"tracing-subscriber",
|
||||
@@ -3424,9 +3338,9 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
|
||||
|
||||
[[package]]
|
||||
name = "uuid"
|
||||
version = "1.10.0"
|
||||
version = "1.9.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "81dfa00651efa65069b0b6b651f4aaa31ba9e3c3ce0137aaad053604ee7e0314"
|
||||
checksum = "5de17fd2f7da591098415cff336e12965a28061ddace43b59cb3c430179c9439"
|
||||
dependencies = [
|
||||
"getrandom",
|
||||
"rand",
|
||||
@@ -3436,9 +3350,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "uuid-macro-internal"
|
||||
version = "1.10.0"
|
||||
version = "1.9.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ee1cd046f83ea2c4e920d6ee9f7c3537ef928d75dce5d84a87c2c5d6b3999a3a"
|
||||
checksum = "a3ff64d5cde1e2cb5268bdb497235b6bd255ba8244f910dbc3574e59593de68c"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -3607,16 +3521,6 @@ dependencies = [
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "web-time"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb"
|
||||
dependencies = [
|
||||
"js-sys",
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "webpki-roots"
|
||||
version = "0.26.1"
|
||||
@@ -3828,9 +3732,9 @@ checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0"
|
||||
|
||||
[[package]]
|
||||
name = "winnow"
|
||||
version = "0.6.18"
|
||||
version = "0.6.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "68a9bda4691f099d435ad181000724da8e5899daa10713c2d432552b9ccd3a6f"
|
||||
checksum = "f0c976aaaa0e1f90dbb21e9587cdaf1d9679a1cde8875c0d6bd83ab96a208352"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
@@ -3891,7 +3795,6 @@ dependencies = [
|
||||
"byteorder",
|
||||
"crc32fast",
|
||||
"crossbeam-utils",
|
||||
"flate2",
|
||||
"zstd",
|
||||
]
|
||||
|
||||
|
||||
16
Cargo.toml
16
Cargo.toml
@@ -4,7 +4,7 @@ resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
edition = "2021"
|
||||
rust-version = "1.76"
|
||||
rust-version = "1.75"
|
||||
homepage = "https://docs.astral.sh/ruff"
|
||||
documentation = "https://docs.astral.sh/ruff"
|
||||
repository = "https://github.com/astral-sh/ruff"
|
||||
@@ -35,9 +35,9 @@ ruff_source_file = { path = "crates/ruff_source_file" }
|
||||
ruff_text_size = { path = "crates/ruff_text_size" }
|
||||
ruff_workspace = { path = "crates/ruff_workspace" }
|
||||
|
||||
red_knot = { path = "crates/red_knot" }
|
||||
red_knot_module_resolver = { path = "crates/red_knot_module_resolver" }
|
||||
red_knot_python_semantic = { path = "crates/red_knot_python_semantic" }
|
||||
red_knot_server = { path = "crates/red_knot_server" }
|
||||
red_knot_workspace = { path = "crates/red_knot_workspace" }
|
||||
|
||||
aho-corasick = { version = "1.1.3" }
|
||||
annotate-snippets = { version = "0.9.2", features = ["color"] }
|
||||
@@ -108,7 +108,7 @@ rand = { version = "0.8.5" }
|
||||
rayon = { version = "1.10.0" }
|
||||
regex = { version = "1.10.2" }
|
||||
rustc-hash = { version = "2.0.0" }
|
||||
salsa = { git = "https://github.com/MichaReiser/salsa.git", tag = "red-knot-0.0.1" }
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "a1bf3a613f451af7fc0a59411c56abc47fe8e8e1" }
|
||||
schemars = { version = "0.8.16" }
|
||||
seahash = { version = "4.1.0" }
|
||||
serde = { version = "1.0.197", features = ["derive"] }
|
||||
@@ -131,10 +131,9 @@ thiserror = { version = "1.0.58" }
|
||||
tikv-jemallocator = { version = "0.6.0" }
|
||||
toml = { version = "0.8.11" }
|
||||
tracing = { version = "0.1.40" }
|
||||
tracing-flame = { version = "0.2.0" }
|
||||
tracing-indicatif = { version = "0.3.6" }
|
||||
tracing-subscriber = { version = "0.3.18", default-features = false, features = ["env-filter", "fmt"] }
|
||||
tracing-tree = { version = "0.4.0" }
|
||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
|
||||
tracing-tree = { version = "0.3.0" }
|
||||
typed-arena = { version = "2.0.2" }
|
||||
unic-ucd-category = { version = "0.9" }
|
||||
unicode-ident = { version = "1.0.12" }
|
||||
@@ -153,12 +152,11 @@ walkdir = { version = "2.3.2" }
|
||||
wasm-bindgen = { version = "0.2.92" }
|
||||
wasm-bindgen-test = { version = "0.3.42" }
|
||||
wild = { version = "2" }
|
||||
zip = { version = "0.6.6", default-features = false }
|
||||
zip = { version = "0.6.6", default-features = false, features = ["zstd"] }
|
||||
|
||||
[workspace.lints.rust]
|
||||
unsafe_code = "warn"
|
||||
unreachable_pub = "warn"
|
||||
unexpected_cfgs = { level = "warn", check-cfg = ["cfg(fuzzing)", "cfg(codspeed)"] }
|
||||
|
||||
[workspace.lints.clippy]
|
||||
pedantic = { level = "warn", priority = -2 }
|
||||
|
||||
25
LICENSE
25
LICENSE
@@ -1371,28 +1371,3 @@ are:
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
"""
|
||||
|
||||
- pydoclint, licensed as follows:
|
||||
"""
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2023 jsh9
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
"""
|
||||
|
||||
17
README.md
17
README.md
@@ -29,14 +29,14 @@ An extremely fast Python linter and code formatter, written in Rust.
|
||||
- 🐍 Installable via `pip`
|
||||
- 🛠️ `pyproject.toml` support
|
||||
- 🤝 Python 3.13 compatibility
|
||||
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruffs-linter-compare-to-flake8), isort, and [Black](https://docs.astral.sh/ruff/faq/#how-does-ruffs-formatter-compare-to-black)
|
||||
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruff-compare-to-flake8), isort, and Black
|
||||
- 📦 Built-in caching, to avoid re-analyzing unchanged files
|
||||
- 🔧 Fix support, for automatic error correction (e.g., automatically remove unused imports)
|
||||
- 📏 Over [800 built-in rules](https://docs.astral.sh/ruff/rules/), with native re-implementations
|
||||
of popular Flake8 plugins, like flake8-bugbear
|
||||
- ⌨️ First-party [editor integrations](https://docs.astral.sh/ruff/integrations/) for
|
||||
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://docs.astral.sh/ruff/editors/setup)
|
||||
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://docs.astral.sh/ruff/configuration/#config-file-discovery)
|
||||
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://github.com/astral-sh/ruff-lsp)
|
||||
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://docs.astral.sh/ruff/configuration/#pyprojecttoml-discovery)
|
||||
|
||||
Ruff aims to be orders of magnitude faster than alternative tools while integrating more
|
||||
functionality behind a single, common interface.
|
||||
@@ -136,8 +136,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.5.7/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.5.7/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.5.3/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.5.3/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -170,7 +170,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.5.7
|
||||
rev: v0.5.3
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
@@ -179,7 +179,8 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
- id: ruff-format
|
||||
```
|
||||
|
||||
Ruff can also be used as a [VS Code extension](https://github.com/astral-sh/ruff-vscode) or with [various other editors](https://docs.astral.sh/ruff/editors/setup).
|
||||
Ruff can also be used as a [VS Code extension](https://github.com/astral-sh/ruff-vscode) or
|
||||
alongside any other editor through the [Ruff LSP](https://github.com/astral-sh/ruff-lsp).
|
||||
|
||||
Ruff can also be used as a [GitHub Action](https://github.com/features/actions) via
|
||||
[`ruff-action`](https://github.com/chartboost/ruff-action):
|
||||
@@ -423,7 +424,6 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [Dagger](https://github.com/dagger/dagger)
|
||||
- [Dagster](https://github.com/dagster-io/dagster)
|
||||
- Databricks ([MLflow](https://github.com/mlflow/mlflow))
|
||||
- [Dify](https://github.com/langgenius/dify)
|
||||
- [FastAPI](https://github.com/tiangolo/fastapi)
|
||||
- [Godot](https://github.com/godotengine/godot)
|
||||
- [Gradio](https://github.com/gradio-app/gradio)
|
||||
@@ -434,7 +434,6 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- Hugging Face ([Transformers](https://github.com/huggingface/transformers),
|
||||
[Datasets](https://github.com/huggingface/datasets),
|
||||
[Diffusers](https://github.com/huggingface/diffusers))
|
||||
- IBM ([Qiskit](https://github.com/Qiskit/qiskit))
|
||||
- ING Bank ([popmon](https://github.com/ing-bank/popmon), [probatus](https://github.com/ing-bank/probatus))
|
||||
- [Ibis](https://github.com/ibis-project/ibis)
|
||||
- [ivy](https://github.com/unifyai/ivy)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[files]
|
||||
# https://github.com/crate-ci/typos/issues/868
|
||||
extend-exclude = ["crates/red_knot_python_semantic/vendor/**/*", "**/resources/**/*", "**/snapshots/**/*"]
|
||||
extend-exclude = ["crates/red_knot_module_resolver/vendor/**/*", "**/resources/**/*", "**/snapshots/**/*"]
|
||||
|
||||
[default.extend-words]
|
||||
"arange" = "arange" # e.g. `numpy.arange`
|
||||
|
||||
@@ -10,12 +10,4 @@ doc-valid-idents = [
|
||||
"SCREAMING_SNAKE_CASE",
|
||||
"SQLAlchemy",
|
||||
"StackOverflow",
|
||||
"PyCharm",
|
||||
]
|
||||
|
||||
ignore-interior-mutability = [
|
||||
# Interned is read-only. The wrapped `Rc` never gets updated.
|
||||
"ruff_formatter::format_element::Interned",
|
||||
# The expression is read-only.
|
||||
"ruff_python_ast::hashable::HashableExpr",
|
||||
]
|
||||
|
||||
@@ -12,29 +12,25 @@ license.workspace = true
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
red_knot_module_resolver = { workspace = true }
|
||||
red_knot_python_semantic = { workspace = true }
|
||||
red_knot_workspace = { workspace = true }
|
||||
red_knot_server = { workspace = true }
|
||||
|
||||
ruff_db = { workspace = true, features = ["os", "cache"] }
|
||||
ruff_python_ast = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
clap = { workspace = true, features = ["wrap_help"] }
|
||||
colored = { workspace = true }
|
||||
countme = { workspace = true, features = ["enable"] }
|
||||
crossbeam = { workspace = true }
|
||||
ctrlc = { version = "3.4.4" }
|
||||
notify = { workspace = true }
|
||||
rayon = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
tracing = { workspace = true, features = ["release_max_level_debug"] }
|
||||
tracing-subscriber = { workspace = true, features = ["env-filter", "fmt"] }
|
||||
tracing-flame = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true }
|
||||
tracing-tree = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
filetime = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 40 KiB |
@@ -1,103 +0,0 @@
|
||||
# Tracing
|
||||
|
||||
Traces are a useful tool to narrow down the location of a bug or, at least, to understand why the compiler is doing a particular thing.
|
||||
Note, tracing messages with severity `debug` or greater are user-facing. They should be phrased accordingly.
|
||||
Tracing spans are only shown when using `-vvv`.
|
||||
|
||||
## Verbosity levels
|
||||
|
||||
The CLI supports different verbosity levels.
|
||||
|
||||
- default: Only show errors and warnings.
|
||||
- `-v` activates `info!`: Show generally useful information such as paths of configuration files, detected platform, etc., but it's not a lot of messages, it's something you'll activate in CI by default. cargo build e.g. shows you which packages are fresh.
|
||||
- `-vv` activates `debug!` and timestamps: This should be enough information to get to the bottom of bug reports. When you're processing many packages or files, you'll get pages and pages of output, but each line is link to a specific action or state change.
|
||||
- `-vvv` activates `trace!` (only in debug builds) and shows tracing-spans: At this level, you're logging everything. Most of this is wasted, it's really slow, we dump e.g. the entire resolution graph. Only useful to developers, and you almost certainly want to use `RED_KNOT_LOG` to filter it down to the area your investigating.
|
||||
|
||||
## `RED_KNOT_LOG`
|
||||
|
||||
By default, the CLI shows messages from the `ruff` and `red_knot` crates. Tracing messages from other crates are not shown.
|
||||
The `RED_KNOT_LOG` environment variable allows you to customize which messages are shown by specifying one
|
||||
or more [filter directives](https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives).
|
||||
|
||||
### Examples
|
||||
|
||||
#### Show all debug messages
|
||||
|
||||
Shows debug messages from all crates.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG=debug
|
||||
```
|
||||
|
||||
#### Show salsa query execution messages
|
||||
|
||||
Show the salsa `execute: my_query` messages in addition to all red knot messages.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG=ruff=trace,red_knot=trace,salsa=info
|
||||
```
|
||||
|
||||
#### Show typing traces
|
||||
|
||||
Only show traces for the `red_knot_python_semantic::types` module.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG="red_knot_python_semantic::types"
|
||||
```
|
||||
|
||||
Note: Ensure that you use `-vvv` to see tracing spans.
|
||||
|
||||
#### Show messages for a single file
|
||||
|
||||
Shows all messages that are inside of a span for a specific file.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG=red_knot[{file=/home/micha/astral/test/x.py}]=trace
|
||||
```
|
||||
|
||||
**Note**: Tracing still shows all spans because tracing can't know at the time of entering the span
|
||||
whether one if its children has the file `x.py`.
|
||||
|
||||
**Note**: Salsa currently logs the entire memoized values. In our case, the source text and parsed AST.
|
||||
This very quickly leads to extremely long outputs.
|
||||
|
||||
## Tracing and Salsa
|
||||
|
||||
Be mindful about using `tracing` in Salsa queries, especially when using `warn` or `error` because it isn't guaranteed
|
||||
that the query will execute after restoring from a persistent cache. In which case the user won't see the message.
|
||||
|
||||
For example, don't use `tracing` to show the user a message when generating a lint violation failed
|
||||
because the message would only be shown when linting the file the first time, but not on subsequent analysis
|
||||
runs or when restoring from a persistent cache. This can be confusing for users because they
|
||||
don't understand why a specific lint violation isn't raised. Instead, change your
|
||||
query to return the failure as part of the query's result or use a Salsa accumulator.
|
||||
|
||||
## Release builds
|
||||
|
||||
`trace!` events are removed in release builds.
|
||||
|
||||
## Profiling
|
||||
|
||||
Red Knot generates a folded stack trace to the current directory named `tracing.folded` when setting the environment variable `RED_KNOT_LOG_PROFILE` to `1` or `true`.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG_PROFILE=1 red_knot -- --current-directory=../test -vvv
|
||||
```
|
||||
|
||||
You can convert the textual representation into a visual one using `inferno`.
|
||||
|
||||
```shell
|
||||
cargo install inferno
|
||||
```
|
||||
|
||||
```shell
|
||||
# flamegraph
|
||||
cat tracing.folded | inferno-flamegraph > tracing-flamegraph.svg
|
||||
|
||||
# flamechart
|
||||
cat tracing.folded | inferno-flamegraph --flamechart > tracing-flamechart.svg
|
||||
```
|
||||
|
||||

|
||||
|
||||
See [`tracing-flame`](https://crates.io/crates/tracing-flame) for more details.
|
||||
2
crates/red_knot/src/cli/mod.rs
Normal file
2
crates/red_knot/src/cli/mod.rs
Normal file
@@ -0,0 +1,2 @@
|
||||
pub(crate) mod target_version;
|
||||
pub(crate) mod verbosity;
|
||||
34
crates/red_knot/src/cli/verbosity.rs
Normal file
34
crates/red_knot/src/cli/verbosity.rs
Normal file
@@ -0,0 +1,34 @@
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
|
||||
pub(crate) enum VerbosityLevel {
|
||||
Info,
|
||||
Debug,
|
||||
Trace,
|
||||
}
|
||||
|
||||
/// Logging flags to `#[command(flatten)]` into your CLI
|
||||
#[derive(clap::Args, Debug, Clone, Default)]
|
||||
#[command(about = None, long_about = None)]
|
||||
pub(crate) struct Verbosity {
|
||||
#[arg(
|
||||
long,
|
||||
short = 'v',
|
||||
help = "Use verbose output (or `-vv` and `-vvv` for more verbose output)",
|
||||
action = clap::ArgAction::Count,
|
||||
global = true,
|
||||
)]
|
||||
verbose: u8,
|
||||
}
|
||||
|
||||
impl Verbosity {
|
||||
/// Returns the verbosity level based on the number of `-v` flags.
|
||||
///
|
||||
/// Returns `None` if the user did not specify any verbosity flags.
|
||||
pub(crate) fn level(&self) -> Option<VerbosityLevel> {
|
||||
match self.verbose {
|
||||
0 => None,
|
||||
1 => Some(VerbosityLevel::Info),
|
||||
2 => Some(VerbosityLevel::Debug),
|
||||
_ => Some(VerbosityLevel::Trace),
|
||||
}
|
||||
}
|
||||
}
|
||||
200
crates/red_knot/src/db.rs
Normal file
200
crates/red_knot/src/db.rs
Normal file
@@ -0,0 +1,200 @@
|
||||
use std::panic::{AssertUnwindSafe, RefUnwindSafe};
|
||||
use std::sync::Arc;
|
||||
|
||||
use salsa::{Cancelled, Database, DbWithJar};
|
||||
|
||||
use red_knot_module_resolver::{vendored_typeshed_stubs, Db as ResolverDb, Jar as ResolverJar};
|
||||
use red_knot_python_semantic::{Db as SemanticDb, Jar as SemanticJar};
|
||||
use ruff_db::files::{system_path_to_file, File, Files};
|
||||
use ruff_db::program::{Program, ProgramSettings};
|
||||
use ruff_db::system::System;
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
use ruff_db::{Db as SourceDb, Jar as SourceJar, Upcast};
|
||||
|
||||
use crate::lint::{lint_semantic, lint_syntax, unwind_if_cancelled, Diagnostics};
|
||||
use crate::watch::{FileChangeKind, FileWatcherChange};
|
||||
use crate::workspace::{check_file, Package, Workspace, WorkspaceMetadata};
|
||||
|
||||
pub trait Db: DbWithJar<Jar> + SemanticDb + Upcast<dyn SemanticDb> {}
|
||||
|
||||
#[salsa::jar(db=Db)]
|
||||
pub struct Jar(
|
||||
Workspace,
|
||||
Package,
|
||||
lint_syntax,
|
||||
lint_semantic,
|
||||
unwind_if_cancelled,
|
||||
);
|
||||
|
||||
#[salsa::db(SourceJar, ResolverJar, SemanticJar, Jar)]
|
||||
pub struct RootDatabase {
|
||||
workspace: Option<Workspace>,
|
||||
storage: salsa::Storage<RootDatabase>,
|
||||
files: Files,
|
||||
system: Arc<dyn System + Send + Sync + RefUnwindSafe>,
|
||||
}
|
||||
|
||||
impl RootDatabase {
|
||||
pub fn new<S>(workspace: WorkspaceMetadata, settings: ProgramSettings, system: S) -> Self
|
||||
where
|
||||
S: System + 'static + Send + Sync + RefUnwindSafe,
|
||||
{
|
||||
let mut db = Self {
|
||||
workspace: None,
|
||||
storage: salsa::Storage::default(),
|
||||
files: Files::default(),
|
||||
system: Arc::new(system),
|
||||
};
|
||||
|
||||
let workspace = Workspace::from_metadata(&db, workspace);
|
||||
// Initialize the `Program` singleton
|
||||
Program::from_settings(&db, settings);
|
||||
|
||||
db.workspace = Some(workspace);
|
||||
db
|
||||
}
|
||||
|
||||
pub fn workspace(&self) -> Workspace {
|
||||
// SAFETY: The workspace is always initialized in `new`.
|
||||
self.workspace.unwrap()
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(self, changes))]
|
||||
pub fn apply_changes(&mut self, changes: Vec<FileWatcherChange>) {
|
||||
let workspace = self.workspace();
|
||||
let workspace_path = workspace.root(self).to_path_buf();
|
||||
|
||||
// TODO: Optimize change tracking by only reloading a package if a file that is part of the package was changed.
|
||||
let mut structural_change = false;
|
||||
for change in changes {
|
||||
if matches!(
|
||||
change.path.file_name(),
|
||||
Some(".gitignore" | ".ignore" | "ruff.toml" | ".ruff.toml" | "pyproject.toml")
|
||||
) {
|
||||
// Changes to ignore files or settings can change the workspace structure or add/remove files
|
||||
// from packages.
|
||||
structural_change = true;
|
||||
} else {
|
||||
match change.kind {
|
||||
FileChangeKind::Created => {
|
||||
// Reload the package when a new file was added. This is necessary because the file might be excluded
|
||||
// by a gitignore.
|
||||
if workspace.package(self, &change.path).is_some() {
|
||||
structural_change = true;
|
||||
}
|
||||
}
|
||||
FileChangeKind::Modified => {}
|
||||
FileChangeKind::Deleted => {
|
||||
if let Some(package) = workspace.package(self, &change.path) {
|
||||
if let Some(file) = system_path_to_file(self, &change.path) {
|
||||
package.remove_file(self, file);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
File::touch_path(self, &change.path);
|
||||
}
|
||||
|
||||
if structural_change {
|
||||
match WorkspaceMetadata::from_path(&workspace_path, self.system()) {
|
||||
Ok(metadata) => {
|
||||
tracing::debug!("Reload workspace after structural change.");
|
||||
// TODO: Handle changes in the program settings.
|
||||
workspace.reload(self, metadata);
|
||||
}
|
||||
Err(error) => {
|
||||
tracing::error!("Failed to load workspace, keep old workspace: {error}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Checks all open files in the workspace and its dependencies.
|
||||
pub fn check(&self) -> Result<Vec<String>, Cancelled> {
|
||||
self.with_db(|db| db.workspace().check(db))
|
||||
}
|
||||
|
||||
pub fn check_file(&self, file: File) -> Result<Diagnostics, Cancelled> {
|
||||
self.with_db(|db| check_file(db, file))
|
||||
}
|
||||
|
||||
pub(crate) fn with_db<F, T>(&self, f: F) -> Result<T, Cancelled>
|
||||
where
|
||||
F: FnOnce(&RootDatabase) -> T + std::panic::UnwindSafe,
|
||||
{
|
||||
// The `AssertUnwindSafe` here looks scary, but is a consequence of Salsa's design.
|
||||
// Salsa uses panics to implement cancellation and to recover from cycles. However, the Salsa
|
||||
// storage isn't `UnwindSafe` or `RefUnwindSafe` because its dependencies `DashMap` and `parking_lot::*` aren't
|
||||
// unwind safe.
|
||||
//
|
||||
// Having to use `AssertUnwindSafe` isn't as big as a deal as it might seem because
|
||||
// the `UnwindSafe` and `RefUnwindSafe` traits are designed to catch logical bugs.
|
||||
// They don't protect against [UB](https://internals.rust-lang.org/t/pre-rfc-deprecating-unwindsafe/15974).
|
||||
// On top of that, `Cancelled` only catches specific Salsa-panics and propagates all other panics.
|
||||
//
|
||||
// That still leaves us with possible logical bugs in two sources:
|
||||
// * In Salsa itself: This must be considered a bug in Salsa and needs fixing upstream.
|
||||
// Reviewing Salsa code specifically around unwind safety seems doable.
|
||||
// * Our code: This is the main concern. Luckily, it only involves code that uses internal mutability
|
||||
// and calls into Salsa queries when mutating the internal state. Using `AssertUnwindSafe`
|
||||
// certainly makes it harder to catch these issues in our user code.
|
||||
//
|
||||
// For now, this is the only solution at hand unless Salsa decides to change its design.
|
||||
// [Zulip support thread](https://salsa.zulipchat.com/#narrow/stream/145099-general/topic/How.20to.20use.20.60Cancelled.3A.3Acatch.60)
|
||||
let db = &AssertUnwindSafe(self);
|
||||
Cancelled::catch(|| f(db))
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SemanticDb> for RootDatabase {
|
||||
fn upcast(&self) -> &(dyn SemanticDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SourceDb> for RootDatabase {
|
||||
fn upcast(&self) -> &(dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn ResolverDb> for RootDatabase {
|
||||
fn upcast(&self) -> &(dyn ResolverDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl ResolverDb for RootDatabase {}
|
||||
|
||||
impl SemanticDb for RootDatabase {}
|
||||
|
||||
impl SourceDb for RootDatabase {
|
||||
fn vendored(&self) -> &VendoredFileSystem {
|
||||
vendored_typeshed_stubs()
|
||||
}
|
||||
|
||||
fn system(&self) -> &dyn System {
|
||||
&*self.system
|
||||
}
|
||||
|
||||
fn files(&self) -> &Files {
|
||||
&self.files
|
||||
}
|
||||
}
|
||||
|
||||
impl Database for RootDatabase {}
|
||||
|
||||
impl Db for RootDatabase {}
|
||||
|
||||
impl salsa::ParallelDatabase for RootDatabase {
|
||||
fn snapshot(&self) -> salsa::Snapshot<Self> {
|
||||
salsa::Snapshot::new(Self {
|
||||
workspace: self.workspace,
|
||||
storage: self.storage.snapshot(),
|
||||
files: self.files.snapshot(),
|
||||
system: self.system.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
use crate::db::Jar;
|
||||
|
||||
pub mod db;
|
||||
pub mod lint;
|
||||
pub mod site_packages;
|
||||
pub mod watch;
|
||||
pub mod workspace;
|
||||
@@ -2,15 +2,16 @@ use std::cell::RefCell;
|
||||
use std::ops::Deref;
|
||||
use std::time::Duration;
|
||||
|
||||
use tracing::debug_span;
|
||||
use tracing::trace_span;
|
||||
|
||||
use red_knot_module_resolver::ModuleName;
|
||||
use red_knot_python_semantic::types::Type;
|
||||
use red_knot_python_semantic::{HasTy, ModuleName, SemanticModel};
|
||||
use red_knot_python_semantic::{HasTy, SemanticModel};
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::parsed::{parsed_module, ParsedModule};
|
||||
use ruff_db::source::{source_text, SourceText};
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::visitor::{walk_expr, walk_stmt, Visitor};
|
||||
use ruff_python_ast::visitor::{walk_stmt, Visitor};
|
||||
|
||||
use crate::db::Db;
|
||||
|
||||
@@ -72,10 +73,9 @@ fn lint_lines(source: &str, diagnostics: &mut Vec<String>) {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unreachable_pub)]
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub fn lint_semantic(db: &dyn Db, file_id: File) -> Diagnostics {
|
||||
let _span = debug_span!("lint_semantic", file=%file_id.path(db)).entered();
|
||||
pub(crate) fn lint_semantic(db: &dyn Db, file_id: File) -> Diagnostics {
|
||||
let _span = trace_span!("lint_semantic", ?file_id).entered();
|
||||
|
||||
let source = source_text(db.upcast(), file_id);
|
||||
let parsed = parsed_module(db.upcast(), file_id);
|
||||
@@ -120,25 +120,6 @@ fn lint_unresolved_imports(context: &SemanticLintContext, import: AnyImportRef)
|
||||
}
|
||||
}
|
||||
|
||||
fn lint_maybe_undefined(context: &SemanticLintContext, name: &ast::ExprName) {
|
||||
if !matches!(name.ctx, ast::ExprContext::Load) {
|
||||
return;
|
||||
}
|
||||
let semantic = &context.semantic;
|
||||
match name.ty(semantic) {
|
||||
Type::Unbound => {
|
||||
context.push_diagnostic(format!("Name '{}' used when not defined.", &name.id));
|
||||
}
|
||||
Type::Union(union) if union.contains(semantic.db(), Type::Unbound) => {
|
||||
context.push_diagnostic(format!(
|
||||
"Name '{}' used when possibly not defined.",
|
||||
&name.id
|
||||
));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn lint_bad_override(context: &SemanticLintContext, class: &ast::StmtClassDef) {
|
||||
let semantic = &context.semantic;
|
||||
|
||||
@@ -252,17 +233,6 @@ impl Visitor<'_> for SemanticVisitor<'_> {
|
||||
|
||||
walk_stmt(self, stmt);
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, expr: &ast::Expr) {
|
||||
match expr {
|
||||
ast::Expr::Name(name) if matches!(name.ctx, ast::ExprContext::Load) => {
|
||||
lint_maybe_undefined(self.context, name);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
walk_expr(self, expr);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
@@ -302,63 +272,3 @@ enum AnyImportRef<'a> {
|
||||
Import(&'a ast::StmtImport),
|
||||
ImportFrom(&'a ast::StmtImportFrom),
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::program::{Program, SearchPathSettings, TargetVersion};
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
|
||||
use super::{lint_semantic, Diagnostics};
|
||||
use crate::db::tests::TestDb;
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
setup_db_with_root(SystemPathBuf::from("/src"))
|
||||
}
|
||||
|
||||
fn setup_db_with_root(src_root: SystemPathBuf) -> TestDb {
|
||||
let db = TestDb::new();
|
||||
|
||||
Program::new(
|
||||
&db,
|
||||
TargetVersion::Py38,
|
||||
SearchPathSettings {
|
||||
extra_paths: Vec::new(),
|
||||
src_root,
|
||||
site_packages: vec![],
|
||||
custom_typeshed: None,
|
||||
},
|
||||
);
|
||||
|
||||
db
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn undefined_variable() {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_dedented(
|
||||
"/src/a.py",
|
||||
"
|
||||
x = int
|
||||
if flag:
|
||||
y = x
|
||||
y
|
||||
",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let file = system_path_to_file(&db, "/src/a.py").expect("file to exist");
|
||||
let Diagnostics::List(messages) = lint_semantic(&db, file) else {
|
||||
panic!("expected some diagnostics");
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
*messages,
|
||||
vec![
|
||||
"Name 'flag' used when not defined.",
|
||||
"Name 'y' used when possibly not defined."
|
||||
]
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,254 +0,0 @@
|
||||
//! Sets up logging for Red Knot
|
||||
|
||||
use anyhow::Context;
|
||||
use colored::Colorize;
|
||||
use std::fmt;
|
||||
use std::fs::File;
|
||||
use std::io::BufWriter;
|
||||
use tracing::log::LevelFilter;
|
||||
use tracing::{Event, Subscriber};
|
||||
use tracing_subscriber::fmt::format::Writer;
|
||||
use tracing_subscriber::fmt::{FmtContext, FormatEvent, FormatFields};
|
||||
use tracing_subscriber::registry::LookupSpan;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
|
||||
/// Logging flags to `#[command(flatten)]` into your CLI
|
||||
#[derive(clap::Args, Debug, Clone, Default)]
|
||||
#[command(about = None, long_about = None)]
|
||||
pub(crate) struct Verbosity {
|
||||
#[arg(
|
||||
long,
|
||||
short = 'v',
|
||||
help = "Use verbose output (or `-vv` and `-vvv` for more verbose output)",
|
||||
action = clap::ArgAction::Count,
|
||||
global = true,
|
||||
)]
|
||||
verbose: u8,
|
||||
}
|
||||
|
||||
impl Verbosity {
|
||||
/// Returns the verbosity level based on the number of `-v` flags.
|
||||
///
|
||||
/// Returns `None` if the user did not specify any verbosity flags.
|
||||
pub(crate) fn level(&self) -> VerbosityLevel {
|
||||
match self.verbose {
|
||||
0 => VerbosityLevel::Default,
|
||||
1 => VerbosityLevel::Verbose,
|
||||
2 => VerbosityLevel::ExtraVerbose,
|
||||
_ => VerbosityLevel::Trace,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
|
||||
pub(crate) enum VerbosityLevel {
|
||||
/// Default output level. Only shows Ruff and Red Knot events up to the [`WARN`](tracing::Level::WARN).
|
||||
Default,
|
||||
|
||||
/// Enables verbose output. Emits Ruff and Red Knot events up to the [`INFO`](tracing::Level::INFO).
|
||||
/// Corresponds to `-v`.
|
||||
Verbose,
|
||||
|
||||
/// Enables a more verbose tracing format and emits Ruff and Red Knot events up to [`DEBUG`](tracing::Level::DEBUG).
|
||||
/// Corresponds to `-vv`
|
||||
ExtraVerbose,
|
||||
|
||||
/// Enables all tracing events and uses a tree-like output format. Corresponds to `-vvv`.
|
||||
Trace,
|
||||
}
|
||||
|
||||
impl VerbosityLevel {
|
||||
const fn level_filter(self) -> LevelFilter {
|
||||
match self {
|
||||
VerbosityLevel::Default => LevelFilter::Warn,
|
||||
VerbosityLevel::Verbose => LevelFilter::Info,
|
||||
VerbosityLevel::ExtraVerbose => LevelFilter::Debug,
|
||||
VerbosityLevel::Trace => LevelFilter::Trace,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) const fn is_trace(self) -> bool {
|
||||
matches!(self, VerbosityLevel::Trace)
|
||||
}
|
||||
|
||||
pub(crate) const fn is_extra_verbose(self) -> bool {
|
||||
matches!(self, VerbosityLevel::ExtraVerbose)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn setup_tracing(level: VerbosityLevel) -> anyhow::Result<TracingGuard> {
|
||||
use tracing_subscriber::prelude::*;
|
||||
|
||||
// The `RED_KNOT_LOG` environment variable overrides the default log level.
|
||||
let filter = if let Ok(log_env_variable) = std::env::var("RED_KNOT_LOG") {
|
||||
EnvFilter::builder()
|
||||
.parse(log_env_variable)
|
||||
.context("Failed to parse directives specified in RED_KNOT_LOG environment variable.")?
|
||||
} else {
|
||||
match level {
|
||||
VerbosityLevel::Default => {
|
||||
// Show warning traces
|
||||
EnvFilter::default().add_directive(tracing::level_filters::LevelFilter::WARN.into())
|
||||
}
|
||||
level => {
|
||||
let level_filter = level.level_filter();
|
||||
|
||||
// Show info|debug|trace events, but allow `RED_KNOT_LOG` to override
|
||||
let filter = EnvFilter::default().add_directive(
|
||||
format!("red_knot={level_filter}")
|
||||
.parse()
|
||||
.expect("Hardcoded directive to be valid"),
|
||||
);
|
||||
|
||||
filter.add_directive(
|
||||
format!("ruff={level_filter}")
|
||||
.parse()
|
||||
.expect("Hardcoded directive to be valid"),
|
||||
)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let (profiling_layer, guard) = setup_profile();
|
||||
|
||||
let registry = tracing_subscriber::registry()
|
||||
.with(filter)
|
||||
.with(profiling_layer);
|
||||
|
||||
if level.is_trace() {
|
||||
let subscriber = registry.with(
|
||||
tracing_tree::HierarchicalLayer::default()
|
||||
.with_indent_lines(true)
|
||||
.with_indent_amount(2)
|
||||
.with_bracketed_fields(true)
|
||||
.with_thread_ids(true)
|
||||
.with_targets(true)
|
||||
.with_writer(std::io::stderr)
|
||||
.with_timer(tracing_tree::time::Uptime::default()),
|
||||
);
|
||||
|
||||
subscriber.init();
|
||||
} else {
|
||||
let subscriber = registry.with(
|
||||
tracing_subscriber::fmt::layer()
|
||||
.event_format(RedKnotFormat {
|
||||
display_level: true,
|
||||
display_timestamp: level.is_extra_verbose(),
|
||||
show_spans: false,
|
||||
})
|
||||
.with_writer(std::io::stderr),
|
||||
);
|
||||
|
||||
subscriber.init();
|
||||
}
|
||||
|
||||
Ok(TracingGuard {
|
||||
_flame_guard: guard,
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn setup_profile<S>() -> (
|
||||
Option<tracing_flame::FlameLayer<S, BufWriter<File>>>,
|
||||
Option<tracing_flame::FlushGuard<BufWriter<File>>>,
|
||||
)
|
||||
where
|
||||
S: Subscriber + for<'span> LookupSpan<'span>,
|
||||
{
|
||||
if let Ok("1" | "true") = std::env::var("RED_KNOT_LOG_PROFILE").as_deref() {
|
||||
let (layer, guard) = tracing_flame::FlameLayer::with_file("tracing.folded")
|
||||
.expect("Flame layer to be created");
|
||||
(Some(layer), Some(guard))
|
||||
} else {
|
||||
(None, None)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct TracingGuard {
|
||||
_flame_guard: Option<tracing_flame::FlushGuard<BufWriter<File>>>,
|
||||
}
|
||||
|
||||
struct RedKnotFormat {
|
||||
display_timestamp: bool,
|
||||
display_level: bool,
|
||||
show_spans: bool,
|
||||
}
|
||||
|
||||
/// See <https://docs.rs/tracing-subscriber/0.3.18/src/tracing_subscriber/fmt/format/mod.rs.html#1026-1156>
|
||||
impl<S, N> FormatEvent<S, N> for RedKnotFormat
|
||||
where
|
||||
S: Subscriber + for<'a> LookupSpan<'a>,
|
||||
N: for<'a> FormatFields<'a> + 'static,
|
||||
{
|
||||
fn format_event(
|
||||
&self,
|
||||
ctx: &FmtContext<'_, S, N>,
|
||||
mut writer: Writer<'_>,
|
||||
event: &Event<'_>,
|
||||
) -> fmt::Result {
|
||||
let meta = event.metadata();
|
||||
let ansi = writer.has_ansi_escapes();
|
||||
|
||||
if self.display_timestamp {
|
||||
let timestamp = chrono::Local::now()
|
||||
.format("%Y-%m-%d %H:%M:%S.%f")
|
||||
.to_string();
|
||||
if ansi {
|
||||
write!(writer, "{} ", timestamp.dimmed())?;
|
||||
} else {
|
||||
write!(
|
||||
writer,
|
||||
"{} ",
|
||||
chrono::Local::now().format("%Y-%m-%d %H:%M:%S.%f")
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
if self.display_level {
|
||||
let level = meta.level();
|
||||
// Same colors as tracing
|
||||
if ansi {
|
||||
let formatted_level = level.to_string();
|
||||
match *level {
|
||||
tracing::Level::TRACE => {
|
||||
write!(writer, "{} ", formatted_level.purple().bold())?;
|
||||
}
|
||||
tracing::Level::DEBUG => write!(writer, "{} ", formatted_level.blue().bold())?,
|
||||
tracing::Level::INFO => write!(writer, "{} ", formatted_level.green().bold())?,
|
||||
tracing::Level::WARN => write!(writer, "{} ", formatted_level.yellow().bold())?,
|
||||
tracing::Level::ERROR => write!(writer, "{} ", level.to_string().red().bold())?,
|
||||
}
|
||||
} else {
|
||||
write!(writer, "{level} ")?;
|
||||
}
|
||||
}
|
||||
|
||||
if self.show_spans {
|
||||
let span = event.parent();
|
||||
let mut seen = false;
|
||||
|
||||
let span = span
|
||||
.and_then(|id| ctx.span(id))
|
||||
.or_else(|| ctx.lookup_current());
|
||||
|
||||
let scope = span.into_iter().flat_map(|span| span.scope().from_root());
|
||||
|
||||
for span in scope {
|
||||
seen = true;
|
||||
if ansi {
|
||||
write!(writer, "{}:", span.metadata().name().bold())?;
|
||||
} else {
|
||||
write!(writer, "{}:", span.metadata().name())?;
|
||||
}
|
||||
}
|
||||
|
||||
if seen {
|
||||
writer.write_char(' ')?;
|
||||
}
|
||||
}
|
||||
|
||||
ctx.field_format().format_fields(writer.by_ref(), event)?;
|
||||
|
||||
writeln!(writer)
|
||||
}
|
||||
}
|
||||
@@ -1,39 +1,35 @@
|
||||
use std::process::{ExitCode, Termination};
|
||||
use std::sync::Mutex;
|
||||
|
||||
use anyhow::{anyhow, Context};
|
||||
use clap::Parser;
|
||||
use colored::Colorize;
|
||||
use crossbeam::channel as crossbeam_channel;
|
||||
use salsa::plumbing::ZalsaDatabase;
|
||||
use salsa::ParallelDatabase;
|
||||
use tracing::subscriber::Interest;
|
||||
use tracing::{Level, Metadata};
|
||||
use tracing_subscriber::filter::LevelFilter;
|
||||
use tracing_subscriber::layer::{Context, Filter, SubscriberExt};
|
||||
use tracing_subscriber::{Layer, Registry};
|
||||
use tracing_tree::time::Uptime;
|
||||
|
||||
use red_knot_server::run_server;
|
||||
use red_knot_workspace::db::RootDatabase;
|
||||
use red_knot_workspace::site_packages::site_packages_dirs_of_venv;
|
||||
use red_knot_workspace::watch;
|
||||
use red_knot_workspace::watch::WorkspaceWatcher;
|
||||
use red_knot_workspace::workspace::WorkspaceMetadata;
|
||||
use red_knot::db::RootDatabase;
|
||||
use red_knot::watch::FileWatcher;
|
||||
use red_knot::watch::FileWatcherChange;
|
||||
use red_knot::workspace::WorkspaceMetadata;
|
||||
use ruff_db::program::{ProgramSettings, SearchPathSettings};
|
||||
use ruff_db::system::{OsSystem, System, SystemPath, SystemPathBuf};
|
||||
use target_version::TargetVersion;
|
||||
use ruff_db::system::{OsSystem, System, SystemPathBuf};
|
||||
|
||||
use crate::logging::{setup_tracing, Verbosity};
|
||||
use cli::target_version::TargetVersion;
|
||||
use cli::verbosity::{Verbosity, VerbosityLevel};
|
||||
|
||||
mod logging;
|
||||
mod target_version;
|
||||
mod verbosity;
|
||||
mod cli;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(
|
||||
author,
|
||||
name = "red-knot",
|
||||
about = "An extremely fast Python type checker."
|
||||
about = "An experimental multifile analysis backend for Ruff"
|
||||
)]
|
||||
#[command(version)]
|
||||
struct Args {
|
||||
#[command(subcommand)]
|
||||
pub(crate) command: Option<Command>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help = "Changes the current working directory.",
|
||||
@@ -42,17 +38,6 @@ struct Args {
|
||||
)]
|
||||
current_directory: Option<SystemPathBuf>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help = "Path to the virtual environment the project uses",
|
||||
long_help = "\
|
||||
Path to the virtual environment the project uses. \
|
||||
If provided, red-knot will use the `site-packages` directory of this virtual environment \
|
||||
to resolve type information for the project's third-party dependencies.",
|
||||
value_name = "PATH"
|
||||
)]
|
||||
venv_path: Option<SystemPathBuf>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
value_name = "DIRECTORY",
|
||||
@@ -67,125 +52,52 @@ to resolve type information for the project's third-party dependencies.",
|
||||
)]
|
||||
extra_search_path: Vec<SystemPathBuf>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help = "Python version to assume when resolving types",
|
||||
default_value_t = TargetVersion::default(),
|
||||
value_name="VERSION")
|
||||
]
|
||||
#[arg(long, help = "Python version to assume when resolving types", default_value_t = TargetVersion::default(), value_name="VERSION")]
|
||||
target_version: TargetVersion,
|
||||
|
||||
#[clap(flatten)]
|
||||
verbosity: Verbosity,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help = "Run in watch mode by re-running whenever files change",
|
||||
short = 'W'
|
||||
)]
|
||||
watch: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Subcommand)]
|
||||
pub enum Command {
|
||||
/// Start the language server
|
||||
Server,
|
||||
}
|
||||
|
||||
#[allow(clippy::print_stdout, clippy::unnecessary_wraps, clippy::print_stderr)]
|
||||
pub fn main() -> ExitStatus {
|
||||
run().unwrap_or_else(|error| {
|
||||
use std::io::Write;
|
||||
|
||||
// Use `writeln` instead of `eprintln` to avoid panicking when the stderr pipe is broken.
|
||||
let mut stderr = std::io::stderr().lock();
|
||||
|
||||
// This communicates that this isn't a linter error but Red Knot itself hard-errored for
|
||||
// some reason (e.g. failed to resolve the configuration)
|
||||
writeln!(stderr, "{}", "Red Knot failed".red().bold()).ok();
|
||||
// Currently we generally only see one error, but e.g. with io errors when resolving
|
||||
// the configuration it is help to chain errors ("resolving configuration failed" ->
|
||||
// "failed to read file: subdir/pyproject.toml")
|
||||
for cause in error.chain() {
|
||||
writeln!(stderr, " {} {cause}", "Cause:".bold()).ok();
|
||||
}
|
||||
|
||||
ExitStatus::Error
|
||||
})
|
||||
}
|
||||
|
||||
fn run() -> anyhow::Result<ExitStatus> {
|
||||
#[allow(
|
||||
clippy::print_stdout,
|
||||
clippy::unnecessary_wraps,
|
||||
clippy::print_stderr,
|
||||
clippy::dbg_macro
|
||||
)]
|
||||
pub fn main() -> anyhow::Result<()> {
|
||||
let Args {
|
||||
command,
|
||||
current_directory,
|
||||
custom_typeshed_dir,
|
||||
extra_search_path: extra_paths,
|
||||
venv_path,
|
||||
target_version,
|
||||
verbosity,
|
||||
watch,
|
||||
} = Args::parse_from(std::env::args().collect::<Vec<_>>());
|
||||
|
||||
if matches!(command, Some(Command::Server)) {
|
||||
return run_server().map(|()| ExitStatus::Success);
|
||||
}
|
||||
|
||||
let verbosity = verbosity.level();
|
||||
countme::enable(verbosity.is_trace());
|
||||
let _guard = setup_tracing(verbosity)?;
|
||||
countme::enable(verbosity == Some(VerbosityLevel::Trace));
|
||||
setup_tracing(verbosity);
|
||||
|
||||
// The base path to which all CLI arguments are relative to.
|
||||
let cli_base_path = {
|
||||
let cwd = std::env::current_dir().context("Failed to get the current working directory")?;
|
||||
SystemPathBuf::from_path_buf(cwd)
|
||||
.map_err(|path| {
|
||||
anyhow!(
|
||||
"The current working directory '{}' contains non-unicode characters. Red Knot only supports unicode paths.",
|
||||
path.display()
|
||||
)
|
||||
})?
|
||||
let cwd = if let Some(cwd) = current_directory {
|
||||
let canonicalized = cwd.as_utf8_path().canonicalize_utf8().unwrap();
|
||||
SystemPathBuf::from_utf8_path_buf(canonicalized)
|
||||
} else {
|
||||
let cwd = std::env::current_dir().unwrap();
|
||||
SystemPathBuf::from_path_buf(cwd).unwrap()
|
||||
};
|
||||
|
||||
let cwd = current_directory
|
||||
.map(|cwd| {
|
||||
if cwd.as_std_path().is_dir() {
|
||||
Ok(SystemPath::absolute(&cwd, &cli_base_path))
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
"Provided current-directory path '{cwd}' is not a directory."
|
||||
))
|
||||
}
|
||||
})
|
||||
.transpose()?
|
||||
.unwrap_or_else(|| cli_base_path.clone());
|
||||
|
||||
let system = OsSystem::new(cwd.clone());
|
||||
let workspace_metadata = WorkspaceMetadata::from_path(system.current_directory(), &system)?;
|
||||
|
||||
// TODO: Verify the remaining search path settings eagerly.
|
||||
let site_packages = venv_path
|
||||
.map(|venv_path| {
|
||||
let venv_path = SystemPath::absolute(venv_path, &cli_base_path);
|
||||
|
||||
if system.is_directory(&venv_path) {
|
||||
Ok(site_packages_dirs_of_venv(&venv_path, &system)?)
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
"Provided venv-path {venv_path} is not a directory!"
|
||||
))
|
||||
}
|
||||
})
|
||||
.transpose()?
|
||||
.unwrap_or_default();
|
||||
let workspace_metadata =
|
||||
WorkspaceMetadata::from_path(system.current_directory(), &system).unwrap();
|
||||
|
||||
// TODO: Respect the settings from the workspace metadata. when resolving the program settings.
|
||||
let program_settings = ProgramSettings {
|
||||
target_version: target_version.into(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths,
|
||||
src_root: workspace_metadata.root().to_path_buf(),
|
||||
workspace_root: workspace_metadata.root().to_path_buf(),
|
||||
custom_typeshed: custom_typeshed_dir,
|
||||
site_packages,
|
||||
site_packages: None,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -193,7 +105,7 @@ fn run() -> anyhow::Result<ExitStatus> {
|
||||
// cache and load the cache if it exists.
|
||||
let mut db = RootDatabase::new(workspace_metadata, program_settings, system);
|
||||
|
||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new();
|
||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new(verbosity);
|
||||
|
||||
// Listen to Ctrl+C and abort the watch mode.
|
||||
let main_loop_cancellation_token = Mutex::new(Some(main_loop_cancellation_token));
|
||||
@@ -205,158 +117,125 @@ fn run() -> anyhow::Result<ExitStatus> {
|
||||
}
|
||||
})?;
|
||||
|
||||
let exit_status = if watch {
|
||||
main_loop.watch(&mut db)?
|
||||
} else {
|
||||
main_loop.run(&mut db)
|
||||
};
|
||||
let file_changes_notifier = main_loop.file_changes_notifier();
|
||||
|
||||
tracing::trace!("Counts for entire CLI run:\n{}", countme::get_all());
|
||||
// Watch for file changes and re-trigger the analysis.
|
||||
let mut file_watcher = FileWatcher::new(move |changes| {
|
||||
file_changes_notifier.notify(changes);
|
||||
})?;
|
||||
|
||||
std::mem::forget(db);
|
||||
file_watcher.watch_folder(db.workspace().root(&db).as_std_path())?;
|
||||
|
||||
Ok(exit_status)
|
||||
}
|
||||
main_loop.run(&mut db);
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub enum ExitStatus {
|
||||
/// Checking was successful and there were no errors.
|
||||
Success = 0,
|
||||
println!("{}", countme::get_all());
|
||||
|
||||
/// Checking was successful but there were errors.
|
||||
Failure = 1,
|
||||
|
||||
/// Checking failed.
|
||||
Error = 2,
|
||||
}
|
||||
|
||||
impl Termination for ExitStatus {
|
||||
fn report(self) -> ExitCode {
|
||||
ExitCode::from(self as u8)
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct MainLoop {
|
||||
/// Sender that can be used to send messages to the main loop.
|
||||
sender: crossbeam_channel::Sender<MainLoopMessage>,
|
||||
|
||||
/// Receiver for the messages sent **to** the main loop.
|
||||
verbosity: Option<VerbosityLevel>,
|
||||
orchestrator: crossbeam_channel::Sender<OrchestratorMessage>,
|
||||
receiver: crossbeam_channel::Receiver<MainLoopMessage>,
|
||||
|
||||
/// The file system watcher, if running in watch mode.
|
||||
watcher: Option<WorkspaceWatcher>,
|
||||
}
|
||||
|
||||
impl MainLoop {
|
||||
fn new() -> (Self, MainLoopCancellationToken) {
|
||||
let (sender, receiver) = crossbeam_channel::bounded(10);
|
||||
fn new(verbosity: Option<VerbosityLevel>) -> (Self, MainLoopCancellationToken) {
|
||||
let (orchestrator_sender, orchestrator_receiver) = crossbeam_channel::bounded(1);
|
||||
let (main_loop_sender, main_loop_receiver) = crossbeam_channel::bounded(1);
|
||||
|
||||
let mut orchestrator = Orchestrator {
|
||||
receiver: orchestrator_receiver,
|
||||
main_loop: main_loop_sender.clone(),
|
||||
revision: 0,
|
||||
};
|
||||
|
||||
std::thread::spawn(move || {
|
||||
orchestrator.run();
|
||||
});
|
||||
|
||||
(
|
||||
Self {
|
||||
sender: sender.clone(),
|
||||
receiver,
|
||||
watcher: None,
|
||||
verbosity,
|
||||
orchestrator: orchestrator_sender,
|
||||
receiver: main_loop_receiver,
|
||||
},
|
||||
MainLoopCancellationToken {
|
||||
sender: main_loop_sender,
|
||||
},
|
||||
MainLoopCancellationToken { sender },
|
||||
)
|
||||
}
|
||||
|
||||
fn watch(mut self, db: &mut RootDatabase) -> anyhow::Result<ExitStatus> {
|
||||
tracing::debug!("Starting watch mode");
|
||||
let sender = self.sender.clone();
|
||||
let watcher = watch::directory_watcher(move |event| {
|
||||
sender.send(MainLoopMessage::ApplyChanges(event)).unwrap();
|
||||
})?;
|
||||
|
||||
self.watcher = Some(WorkspaceWatcher::new(watcher, db));
|
||||
|
||||
self.run(db);
|
||||
|
||||
Ok(ExitStatus::Success)
|
||||
fn file_changes_notifier(&self) -> FileChangesNotifier {
|
||||
FileChangesNotifier {
|
||||
sender: self.orchestrator.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn run(mut self, db: &mut RootDatabase) -> ExitStatus {
|
||||
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
|
||||
#[allow(clippy::print_stderr)]
|
||||
fn run(self, db: &mut RootDatabase) {
|
||||
self.orchestrator.send(OrchestratorMessage::Run).unwrap();
|
||||
|
||||
let result = self.main_loop(db);
|
||||
for message in &self.receiver {
|
||||
tracing::trace!("Main Loop: Tick");
|
||||
|
||||
tracing::debug!("Exiting main loop");
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn main_loop(&mut self, db: &mut RootDatabase) -> ExitStatus {
|
||||
// Schedule the first check.
|
||||
tracing::debug!("Starting main loop");
|
||||
|
||||
let mut revision = 0u64;
|
||||
|
||||
while let Ok(message) = self.receiver.recv() {
|
||||
match message {
|
||||
MainLoopMessage::CheckWorkspace => {
|
||||
MainLoopMessage::CheckWorkspace { revision } => {
|
||||
let db = db.snapshot();
|
||||
let sender = self.sender.clone();
|
||||
let orchestrator = self.orchestrator.clone();
|
||||
|
||||
// Spawn a new task that checks the workspace. This needs to be done in a separate thread
|
||||
// to prevent blocking the main loop here.
|
||||
rayon::spawn(move || {
|
||||
if let Ok(result) = db.check() {
|
||||
// Send the result back to the main loop for printing.
|
||||
sender
|
||||
.send(MainLoopMessage::CheckCompleted { result, revision })
|
||||
orchestrator
|
||||
.send(OrchestratorMessage::CheckCompleted {
|
||||
diagnostics: result,
|
||||
revision,
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
MainLoopMessage::CheckCompleted {
|
||||
result,
|
||||
revision: check_revision,
|
||||
} => {
|
||||
let has_diagnostics = !result.is_empty();
|
||||
if check_revision == revision {
|
||||
for diagnostic in result {
|
||||
tracing::error!("{}", diagnostic);
|
||||
}
|
||||
} else {
|
||||
tracing::debug!(
|
||||
"Discarding check result for outdated revision: current: {revision}, result revision: {check_revision}"
|
||||
);
|
||||
}
|
||||
|
||||
if self.watcher.is_none() {
|
||||
return if has_diagnostics {
|
||||
ExitStatus::Failure
|
||||
} else {
|
||||
ExitStatus::Success
|
||||
};
|
||||
}
|
||||
|
||||
tracing::trace!("Counts after last check:\n{}", countme::get_all());
|
||||
}
|
||||
|
||||
MainLoopMessage::ApplyChanges(changes) => {
|
||||
revision += 1;
|
||||
// Automatically cancels any pending queries and waits for them to complete.
|
||||
db.apply_changes(changes);
|
||||
if let Some(watcher) = self.watcher.as_mut() {
|
||||
watcher.update(db);
|
||||
}
|
||||
MainLoopMessage::CheckCompleted(diagnostics) => {
|
||||
eprintln!("{}", diagnostics.join("\n"));
|
||||
if self.verbosity == Some(VerbosityLevel::Trace) {
|
||||
eprintln!("{}", countme::get_all());
|
||||
}
|
||||
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
|
||||
}
|
||||
MainLoopMessage::Exit => {
|
||||
// Cancel any pending queries and wait for them to complete.
|
||||
// TODO: Don't use Salsa internal APIs
|
||||
// [Zulip-Thread](https://salsa.zulipchat.com/#narrow/stream/333573-salsa-3.2E0/topic/Expose.20an.20API.20to.20cancel.20other.20queries)
|
||||
let _ = db.zalsa_mut();
|
||||
return ExitStatus::Success;
|
||||
if self.verbosity == Some(VerbosityLevel::Trace) {
|
||||
eprintln!("{}", countme::get_all());
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
tracing::debug!("Waiting for next main loop message.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ExitStatus::Success
|
||||
impl Drop for MainLoop {
|
||||
fn drop(&mut self) {
|
||||
self.orchestrator
|
||||
.send(OrchestratorMessage::Shutdown)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct FileChangesNotifier {
|
||||
sender: crossbeam_channel::Sender<OrchestratorMessage>,
|
||||
}
|
||||
|
||||
impl FileChangesNotifier {
|
||||
fn notify(&self, changes: Vec<FileWatcherChange>) {
|
||||
self.sender
|
||||
.send(OrchestratorMessage::FileChanges(changes))
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -371,11 +250,170 @@ impl MainLoopCancellationToken {
|
||||
}
|
||||
}
|
||||
|
||||
struct Orchestrator {
|
||||
/// Sends messages to the main loop.
|
||||
main_loop: crossbeam_channel::Sender<MainLoopMessage>,
|
||||
/// Receives messages from the main loop.
|
||||
receiver: crossbeam_channel::Receiver<OrchestratorMessage>,
|
||||
revision: usize,
|
||||
}
|
||||
|
||||
impl Orchestrator {
|
||||
#[allow(clippy::print_stderr)]
|
||||
fn run(&mut self) {
|
||||
while let Ok(message) = self.receiver.recv() {
|
||||
match message {
|
||||
OrchestratorMessage::Run => {
|
||||
self.main_loop
|
||||
.send(MainLoopMessage::CheckWorkspace {
|
||||
revision: self.revision,
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
OrchestratorMessage::CheckCompleted {
|
||||
diagnostics,
|
||||
revision,
|
||||
} => {
|
||||
// Only take the diagnostics if they are for the latest revision.
|
||||
if self.revision == revision {
|
||||
self.main_loop
|
||||
.send(MainLoopMessage::CheckCompleted(diagnostics))
|
||||
.unwrap();
|
||||
} else {
|
||||
tracing::debug!("Discarding diagnostics for outdated revision {revision} (current: {}).", self.revision);
|
||||
}
|
||||
}
|
||||
|
||||
OrchestratorMessage::FileChanges(changes) => {
|
||||
// Request cancellation, but wait until all analysis tasks have completed to
|
||||
// avoid stale messages in the next main loop.
|
||||
|
||||
self.revision += 1;
|
||||
self.debounce_changes(changes);
|
||||
}
|
||||
OrchestratorMessage::Shutdown => {
|
||||
return self.shutdown();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn debounce_changes(&self, mut changes: Vec<FileWatcherChange>) {
|
||||
loop {
|
||||
// Consume possibly incoming file change messages before running a new analysis, but don't wait for more than 100ms.
|
||||
crossbeam_channel::select! {
|
||||
recv(self.receiver) -> message => {
|
||||
match message {
|
||||
Ok(OrchestratorMessage::Shutdown) => {
|
||||
return self.shutdown();
|
||||
}
|
||||
Ok(OrchestratorMessage::FileChanges(file_changes)) => {
|
||||
changes.extend(file_changes);
|
||||
}
|
||||
|
||||
Ok(OrchestratorMessage::CheckCompleted { .. })=> {
|
||||
// disregard any outdated completion message.
|
||||
}
|
||||
Ok(OrchestratorMessage::Run) => unreachable!("The orchestrator is already running."),
|
||||
|
||||
Err(_) => {
|
||||
// There are no more senders, no point in waiting for more messages
|
||||
return;
|
||||
}
|
||||
}
|
||||
},
|
||||
default(std::time::Duration::from_millis(10)) => {
|
||||
// No more file changes after 10 ms, send the changes and schedule a new analysis
|
||||
self.main_loop.send(MainLoopMessage::ApplyChanges(changes)).unwrap();
|
||||
self.main_loop.send(MainLoopMessage::CheckWorkspace { revision: self.revision}).unwrap();
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::unused_self)]
|
||||
fn shutdown(&self) {
|
||||
tracing::trace!("Shutting down orchestrator.");
|
||||
}
|
||||
}
|
||||
|
||||
/// Message sent from the orchestrator to the main loop.
|
||||
#[derive(Debug)]
|
||||
enum MainLoopMessage {
|
||||
CheckWorkspace,
|
||||
CheckCompleted { result: Vec<String>, revision: u64 },
|
||||
ApplyChanges(Vec<watch::ChangeEvent>),
|
||||
CheckWorkspace { revision: usize },
|
||||
CheckCompleted(Vec<String>),
|
||||
ApplyChanges(Vec<FileWatcherChange>),
|
||||
Exit,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum OrchestratorMessage {
|
||||
Run,
|
||||
Shutdown,
|
||||
|
||||
CheckCompleted {
|
||||
diagnostics: Vec<String>,
|
||||
revision: usize,
|
||||
},
|
||||
|
||||
FileChanges(Vec<FileWatcherChange>),
|
||||
}
|
||||
|
||||
fn setup_tracing(verbosity: Option<VerbosityLevel>) {
|
||||
let trace_level = match verbosity {
|
||||
None => Level::WARN,
|
||||
Some(VerbosityLevel::Info) => Level::INFO,
|
||||
Some(VerbosityLevel::Debug) => Level::DEBUG,
|
||||
Some(VerbosityLevel::Trace) => Level::TRACE,
|
||||
};
|
||||
|
||||
let subscriber = Registry::default().with(
|
||||
tracing_tree::HierarchicalLayer::default()
|
||||
.with_indent_lines(true)
|
||||
.with_indent_amount(2)
|
||||
.with_bracketed_fields(true)
|
||||
.with_thread_ids(true)
|
||||
.with_targets(true)
|
||||
.with_writer(|| Box::new(std::io::stderr()))
|
||||
.with_timer(Uptime::default())
|
||||
.with_filter(LoggingFilter { trace_level }),
|
||||
);
|
||||
|
||||
tracing::subscriber::set_global_default(subscriber).unwrap();
|
||||
}
|
||||
|
||||
struct LoggingFilter {
|
||||
trace_level: Level,
|
||||
}
|
||||
|
||||
impl LoggingFilter {
|
||||
fn is_enabled(&self, meta: &Metadata<'_>) -> bool {
|
||||
let filter = if meta.target().starts_with("red_knot") || meta.target().starts_with("ruff") {
|
||||
self.trace_level
|
||||
} else {
|
||||
Level::INFO
|
||||
};
|
||||
|
||||
meta.level() <= &filter
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> Filter<S> for LoggingFilter {
|
||||
fn enabled(&self, meta: &Metadata<'_>, _cx: &Context<'_, S>) -> bool {
|
||||
self.is_enabled(meta)
|
||||
}
|
||||
|
||||
fn callsite_enabled(&self, meta: &'static Metadata<'static>) -> Interest {
|
||||
if self.is_enabled(meta) {
|
||||
Interest::always()
|
||||
} else {
|
||||
Interest::never()
|
||||
}
|
||||
}
|
||||
|
||||
fn max_level_hint(&self) -> Option<LevelFilter> {
|
||||
Some(LevelFilter::from_level(self.trace_level))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
|
||||
111
crates/red_knot/src/watch.rs
Normal file
111
crates/red_knot/src/watch.rs
Normal file
@@ -0,0 +1,111 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Context;
|
||||
use notify::event::{CreateKind, ModifyKind, RemoveKind};
|
||||
use notify::{recommended_watcher, Event, EventKind, RecommendedWatcher, RecursiveMode, Watcher};
|
||||
|
||||
use ruff_db::system::{SystemPath, SystemPathBuf};
|
||||
|
||||
pub struct FileWatcher {
|
||||
watcher: RecommendedWatcher,
|
||||
}
|
||||
|
||||
pub trait EventHandler: Send + 'static {
|
||||
fn handle(&self, changes: Vec<FileWatcherChange>);
|
||||
}
|
||||
|
||||
impl<F> EventHandler for F
|
||||
where
|
||||
F: Fn(Vec<FileWatcherChange>) + Send + 'static,
|
||||
{
|
||||
fn handle(&self, changes: Vec<FileWatcherChange>) {
|
||||
let f = self;
|
||||
f(changes);
|
||||
}
|
||||
}
|
||||
|
||||
impl FileWatcher {
|
||||
pub fn new<E>(handler: E) -> anyhow::Result<Self>
|
||||
where
|
||||
E: EventHandler,
|
||||
{
|
||||
Self::from_handler(Box::new(handler))
|
||||
}
|
||||
|
||||
fn from_handler(handler: Box<dyn EventHandler>) -> anyhow::Result<Self> {
|
||||
let watcher = recommended_watcher(move |event: notify::Result<Event>| {
|
||||
match event {
|
||||
Ok(event) => {
|
||||
// TODO verify that this handles all events correctly
|
||||
let change_kind = match event.kind {
|
||||
EventKind::Create(CreateKind::File) => FileChangeKind::Created,
|
||||
EventKind::Modify(ModifyKind::Name(notify::event::RenameMode::From)) => {
|
||||
FileChangeKind::Deleted
|
||||
}
|
||||
EventKind::Modify(ModifyKind::Name(notify::event::RenameMode::To)) => {
|
||||
FileChangeKind::Created
|
||||
}
|
||||
EventKind::Modify(ModifyKind::Name(notify::event::RenameMode::Any)) => {
|
||||
// TODO Introduce a better catch all event for cases that we don't understand.
|
||||
FileChangeKind::Created
|
||||
}
|
||||
EventKind::Modify(ModifyKind::Name(notify::event::RenameMode::Both)) => {
|
||||
todo!("Handle both create and delete event.");
|
||||
}
|
||||
EventKind::Modify(_) => FileChangeKind::Modified,
|
||||
EventKind::Remove(RemoveKind::File) => FileChangeKind::Deleted,
|
||||
_ => {
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let mut changes = Vec::new();
|
||||
|
||||
for path in event.paths {
|
||||
if let Some(fs_path) = SystemPath::from_std_path(&path) {
|
||||
changes
|
||||
.push(FileWatcherChange::new(fs_path.to_path_buf(), change_kind));
|
||||
}
|
||||
}
|
||||
|
||||
if !changes.is_empty() {
|
||||
handler.handle(changes);
|
||||
}
|
||||
}
|
||||
// TODO proper error handling
|
||||
Err(err) => {
|
||||
panic!("Error: {err}");
|
||||
}
|
||||
}
|
||||
})
|
||||
.context("Failed to create file watcher.")?;
|
||||
|
||||
Ok(Self { watcher })
|
||||
}
|
||||
|
||||
pub fn watch_folder(&mut self, path: &Path) -> anyhow::Result<()> {
|
||||
self.watcher.watch(path, RecursiveMode::Recursive)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct FileWatcherChange {
|
||||
pub path: SystemPathBuf,
|
||||
#[allow(unused)]
|
||||
pub kind: FileChangeKind,
|
||||
}
|
||||
|
||||
impl FileWatcherChange {
|
||||
pub fn new(path: SystemPathBuf, kind: FileChangeKind) -> Self {
|
||||
Self { path, kind }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
pub enum FileChangeKind {
|
||||
Created,
|
||||
Modified,
|
||||
Deleted,
|
||||
}
|
||||
@@ -1,4 +1,6 @@
|
||||
use salsa::{Durability, Setter as _};
|
||||
// TODO: Fix clippy warnings created by salsa macros
|
||||
#![allow(clippy::used_underscore_binding)]
|
||||
|
||||
use std::{collections::BTreeMap, sync::Arc};
|
||||
|
||||
use rustc_hash::{FxBuildHasher, FxHashSet};
|
||||
@@ -10,13 +12,11 @@ use ruff_db::{
|
||||
};
|
||||
use ruff_python_ast::{name::Name, PySourceType};
|
||||
|
||||
use crate::workspace::files::{Index, IndexedFiles, PackageFiles};
|
||||
use crate::{
|
||||
db::Db,
|
||||
lint::{lint_semantic, lint_syntax, Diagnostics},
|
||||
};
|
||||
|
||||
mod files;
|
||||
mod metadata;
|
||||
|
||||
/// The project workspace as a Salsa ingredient.
|
||||
@@ -64,6 +64,7 @@ mod metadata;
|
||||
/// holding on to the most fundamental settings required for checking.
|
||||
#[salsa::input]
|
||||
pub struct Workspace {
|
||||
#[id]
|
||||
#[return_ref]
|
||||
root_buf: SystemPathBuf,
|
||||
|
||||
@@ -72,8 +73,7 @@ pub struct Workspace {
|
||||
/// Setting the open files to a non-`None` value changes `check` to only check the
|
||||
/// open files rather than all files in the workspace.
|
||||
#[return_ref]
|
||||
#[default]
|
||||
open_fileset: Option<Arc<FxHashSet<File>>>,
|
||||
open_file_set: Option<Arc<FxHashSet<File>>>,
|
||||
|
||||
/// The (first-party) packages in this workspace.
|
||||
#[return_ref]
|
||||
@@ -87,13 +87,13 @@ pub struct Package {
|
||||
pub name: Name,
|
||||
|
||||
/// The path to the root directory of the package.
|
||||
#[id]
|
||||
#[return_ref]
|
||||
root_buf: SystemPathBuf,
|
||||
|
||||
/// The files that are part of this package.
|
||||
#[return_ref]
|
||||
#[default]
|
||||
file_set: PackageFiles,
|
||||
file_set: Arc<FxHashSet<File>>,
|
||||
// TODO: Add the loaded settings.
|
||||
}
|
||||
|
||||
@@ -106,10 +106,7 @@ impl Workspace {
|
||||
packages.insert(package.root.clone(), Package::from_metadata(db, package));
|
||||
}
|
||||
|
||||
Workspace::builder(metadata.root, packages)
|
||||
.durability(Durability::MEDIUM)
|
||||
.open_fileset_durability(Durability::LOW)
|
||||
.new(db)
|
||||
Workspace::new(db, metadata.root, None, packages)
|
||||
}
|
||||
|
||||
pub fn root(self, db: &dyn Db) -> &SystemPath {
|
||||
@@ -121,7 +118,6 @@ impl Workspace {
|
||||
}
|
||||
|
||||
pub fn reload(self, db: &mut dyn Db, metadata: WorkspaceMetadata) {
|
||||
tracing::debug!("Reloading workspace");
|
||||
assert_eq!(self.root(db), metadata.root());
|
||||
|
||||
let mut old_packages = self.package_tree(db).clone();
|
||||
@@ -140,9 +136,7 @@ impl Workspace {
|
||||
new_packages.insert(path, package);
|
||||
}
|
||||
|
||||
self.set_package_tree(db)
|
||||
.with_durability(Durability::MEDIUM)
|
||||
.to(new_packages);
|
||||
self.set_package_tree(db).to(new_packages);
|
||||
}
|
||||
|
||||
pub fn update_package(self, db: &mut dyn Db, metadata: PackageMetadata) -> anyhow::Result<()> {
|
||||
@@ -163,7 +157,7 @@ impl Workspace {
|
||||
pub fn package(self, db: &dyn Db, path: &SystemPath) -> Option<Package> {
|
||||
let packages = self.package_tree(db);
|
||||
|
||||
let (package_path, package) = packages.range(..=path.to_path_buf()).next_back()?;
|
||||
let (package_path, package) = packages.range(..path.to_path_buf()).next_back()?;
|
||||
|
||||
if path.starts_with(package_path) {
|
||||
Some(*package)
|
||||
@@ -175,8 +169,6 @@ impl Workspace {
|
||||
/// Checks all open files in the workspace and its dependencies.
|
||||
#[tracing::instrument(level = "debug", skip_all)]
|
||||
pub fn check(self, db: &dyn Db) -> Vec<String> {
|
||||
tracing::debug!("Checking workspace");
|
||||
|
||||
let mut result = Vec::new();
|
||||
|
||||
if let Some(open_files) = self.open_files(db) {
|
||||
@@ -195,18 +187,16 @@ impl Workspace {
|
||||
/// Opens a file in the workspace.
|
||||
///
|
||||
/// This changes the behavior of `check` to only check the open files rather than all files in the workspace.
|
||||
#[tracing::instrument(level = "debug", skip(self, db))]
|
||||
pub fn open_file(self, db: &mut dyn Db, file: File) {
|
||||
tracing::debug!("Opening file {}", file.path(db));
|
||||
|
||||
let mut open_files = self.take_open_files(db);
|
||||
open_files.insert(file);
|
||||
self.set_open_files(db, open_files);
|
||||
}
|
||||
|
||||
/// Closes a file in the workspace.
|
||||
#[tracing::instrument(level = "debug", skip(self, db))]
|
||||
pub fn close_file(self, db: &mut dyn Db, file: File) -> bool {
|
||||
tracing::debug!("Closing file {}", file.path(db));
|
||||
|
||||
let mut open_files = self.take_open_files(db);
|
||||
let removed = open_files.remove(&file);
|
||||
|
||||
@@ -219,7 +209,7 @@ impl Workspace {
|
||||
|
||||
/// Returns the open files in the workspace or `None` if the entire workspace should be checked.
|
||||
pub fn open_files(self, db: &dyn Db) -> Option<&FxHashSet<File>> {
|
||||
self.open_fileset(db).as_deref()
|
||||
self.open_file_set(db).as_deref()
|
||||
}
|
||||
|
||||
/// Sets the open files in the workspace.
|
||||
@@ -227,22 +217,20 @@ impl Workspace {
|
||||
/// This changes the behavior of `check` to only check the open files rather than all files in the workspace.
|
||||
#[tracing::instrument(level = "debug", skip(self, db))]
|
||||
pub fn set_open_files(self, db: &mut dyn Db, open_files: FxHashSet<File>) {
|
||||
tracing::debug!("Set open workspace files (count: {})", open_files.len());
|
||||
|
||||
self.set_open_fileset(db).to(Some(Arc::new(open_files)));
|
||||
self.set_open_file_set(db).to(Some(Arc::new(open_files)));
|
||||
}
|
||||
|
||||
/// This takes the open files from the workspace and returns them.
|
||||
///
|
||||
/// This changes the behavior of `check` to check all files in the workspace instead of just the open files.
|
||||
pub fn take_open_files(self, db: &mut dyn Db) -> FxHashSet<File> {
|
||||
tracing::debug!("Take open workspace files");
|
||||
|
||||
// Salsa will cancel any pending queries and remove its own reference to `open_files`
|
||||
// so that the reference counter to `open_files` now drops to 1.
|
||||
let open_files = self.set_open_fileset(db).to(None);
|
||||
let open_files = self.open_file_set(db).clone();
|
||||
|
||||
if let Some(open_files) = open_files {
|
||||
// Salsa will cancel any pending queries and remove its own reference to `open_files`
|
||||
// so that the reference counter to `open_files` now drops to 1.
|
||||
self.set_open_file_set(db).to(None);
|
||||
|
||||
Arc::try_unwrap(open_files).unwrap()
|
||||
} else {
|
||||
FxHashSet::default()
|
||||
@@ -250,7 +238,6 @@ impl Workspace {
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::tracked]
|
||||
impl Package {
|
||||
pub fn root(self, db: &dyn Db) -> &SystemPath {
|
||||
self.root_buf(db)
|
||||
@@ -258,98 +245,55 @@ impl Package {
|
||||
|
||||
/// Returns `true` if `file` is a first-party file part of this package.
|
||||
pub fn contains_file(self, db: &dyn Db, file: File) -> bool {
|
||||
self.files(db).read().contains(&file)
|
||||
self.files(db).contains(&file)
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub fn remove_file(self, db: &mut dyn Db, file: File) {
|
||||
tracing::debug!(
|
||||
"Remove file {} from package {}",
|
||||
file.path(db),
|
||||
self.name(db)
|
||||
);
|
||||
|
||||
let Some(mut index) = PackageFiles::indexed_mut(db, self) else {
|
||||
return;
|
||||
};
|
||||
|
||||
index.remove(file);
|
||||
pub fn files(self, db: &dyn Db) -> &FxHashSet<File> {
|
||||
self.file_set(db)
|
||||
}
|
||||
|
||||
pub fn add_file(self, db: &mut dyn Db, file: File) {
|
||||
tracing::debug!("Add file {} to package {}", file.path(db), self.name(db));
|
||||
pub fn remove_file(self, db: &mut dyn Db, file: File) -> bool {
|
||||
let mut files_arc = self.file_set(db).clone();
|
||||
|
||||
let Some(mut index) = PackageFiles::indexed_mut(db, self) else {
|
||||
return;
|
||||
};
|
||||
// Set a dummy value. Salsa will cancel any pending queries and remove its own reference to `files`
|
||||
// so that the reference counter to `files` now drops to 1.
|
||||
self.set_file_set(db).to(Arc::new(FxHashSet::default()));
|
||||
|
||||
index.insert(file);
|
||||
let files = Arc::get_mut(&mut files_arc).unwrap();
|
||||
let removed = files.remove(&file);
|
||||
self.set_file_set(db).to(files_arc);
|
||||
|
||||
removed
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub(crate) fn check(self, db: &dyn Db) -> Vec<String> {
|
||||
tracing::debug!("Checking package {}", self.root(db));
|
||||
|
||||
let mut result = Vec::new();
|
||||
for file in &self.files(db).read() {
|
||||
let diagnostics = check_file(db, file);
|
||||
for file in self.files(db) {
|
||||
let diagnostics = check_file(db, *file);
|
||||
result.extend_from_slice(&diagnostics);
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
/// Returns the files belonging to this package.
|
||||
#[salsa::tracked]
|
||||
pub fn files(self, db: &dyn Db) -> IndexedFiles {
|
||||
let _entered = tracing::debug_span!("files").entered();
|
||||
let files = self.file_set(db);
|
||||
|
||||
let indexed = match files.get() {
|
||||
Index::Lazy(vacant) => {
|
||||
tracing::debug!("Indexing files for package {}", self.name(db));
|
||||
let files = discover_package_files(db, self.root(db));
|
||||
vacant.set(files)
|
||||
}
|
||||
Index::Indexed(indexed) => indexed,
|
||||
};
|
||||
|
||||
indexed
|
||||
}
|
||||
|
||||
fn from_metadata(db: &dyn Db, metadata: PackageMetadata) -> Self {
|
||||
Self::builder(metadata.name, metadata.root)
|
||||
.durability(Durability::MEDIUM)
|
||||
.file_set_durability(Durability::LOW)
|
||||
.new(db)
|
||||
let files = discover_package_files(db, metadata.root());
|
||||
|
||||
Self::new(db, metadata.name, metadata.root, Arc::new(files))
|
||||
}
|
||||
|
||||
fn update(self, db: &mut dyn Db, metadata: PackageMetadata) {
|
||||
let root = self.root(db);
|
||||
assert_eq!(root, metadata.root());
|
||||
|
||||
if self.name(db) != metadata.name() {
|
||||
self.set_name(db)
|
||||
.with_durability(Durability::MEDIUM)
|
||||
.to(metadata.name);
|
||||
}
|
||||
}
|
||||
let files = discover_package_files(db, root);
|
||||
|
||||
pub fn reload_files(self, db: &mut dyn Db) {
|
||||
tracing::debug!("Reload files for package {}", self.name(db));
|
||||
|
||||
if !self.file_set(db).is_lazy() {
|
||||
// Force a re-index of the files in the next revision.
|
||||
self.set_file_set(db).to(PackageFiles::lazy());
|
||||
}
|
||||
self.set_name(db).to(metadata.name);
|
||||
self.set_file_set(db).to(Arc::new(files));
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn check_file(db: &dyn Db, file: File) -> Diagnostics {
|
||||
let path = file.path(db);
|
||||
let _span = tracing::debug_span!("check_file", file=%path).entered();
|
||||
tracing::debug!("Checking file {path}");
|
||||
|
||||
let mut diagnostics = Vec::new();
|
||||
diagnostics.extend_from_slice(lint_syntax(db, file));
|
||||
diagnostics.extend_from_slice(lint_semantic(db, file));
|
||||
@@ -391,7 +335,7 @@ fn discover_package_files(db: &dyn Db, path: &SystemPath) -> FxHashSet<File> {
|
||||
for path in paths {
|
||||
// If this returns `None`, then the file was deleted between the `walk_directory` call and now.
|
||||
// We can ignore this.
|
||||
if let Ok(file) = system_path_to_file(db.upcast(), &path) {
|
||||
if let Some(file) = system_path_to_file(db.upcast(), &path) {
|
||||
files.insert(file);
|
||||
}
|
||||
}
|
||||
@@ -22,13 +22,15 @@ pub struct PackageMetadata {
|
||||
impl WorkspaceMetadata {
|
||||
/// Discovers the closest workspace at `path` and returns its metadata.
|
||||
pub fn from_path(path: &SystemPath, system: &dyn System) -> anyhow::Result<WorkspaceMetadata> {
|
||||
assert!(
|
||||
system.is_directory(path),
|
||||
"Workspace root path must be a directory"
|
||||
);
|
||||
tracing::debug!("Searching for workspace in '{path}'");
|
||||
let root = if system.is_file(path) {
|
||||
path.parent().unwrap().to_path_buf()
|
||||
} else {
|
||||
path.to_path_buf()
|
||||
};
|
||||
|
||||
let root = path.to_path_buf();
|
||||
if !system.is_directory(&root) {
|
||||
anyhow::bail!("no workspace found at {:?}", root);
|
||||
}
|
||||
|
||||
// TODO: Discover package name from `pyproject.toml`.
|
||||
let package_name: Name = path.file_name().unwrap_or("<root>").into();
|
||||
File diff suppressed because it is too large
Load Diff
39
crates/red_knot_module_resolver/Cargo.toml
Normal file
39
crates/red_knot_module_resolver/Cargo.toml
Normal file
@@ -0,0 +1,39 @@
|
||||
[package]
|
||||
name = "red_knot_module_resolver"
|
||||
version = "0.0.0"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
ruff_db = { workspace = true }
|
||||
ruff_python_stdlib = { workspace = true }
|
||||
|
||||
compact_str = { workspace = true }
|
||||
camino = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
path-slash = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
ruff_db = { workspace = true, features = ["os"] }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
insta = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
@@ -1,9 +1,9 @@
|
||||
# Red Knot
|
||||
|
||||
Semantic analysis for the red-knot project.
|
||||
A work-in-progress multifile module resolver for Ruff.
|
||||
|
||||
## Vendored types for the stdlib
|
||||
|
||||
This crate vendors [typeshed](https://github.com/python/typeshed)'s stubs for the standard library. The vendored stubs can be found in `crates/red_knot_python_semantic/vendor/typeshed`. The file `crates/red_knot_python_semantic/vendor/typeshed/source_commit.txt` tells you the typeshed commit that our vendored stdlib stubs currently correspond to.
|
||||
This crate vendors [typeshed](https://github.com/python/typeshed)'s stubs for the standard library. The vendored stubs can be found in `crates/red_knot_module_resolver/vendor/typeshed`. The file `crates/red_knot_module_resolver/vendor/typeshed/source_commit.txt` tells you the typeshed commit that our vendored stdlib stubs currently correspond to.
|
||||
|
||||
The typeshed stubs are updated every two weeks via an automated PR using the `sync_typeshed.yaml` workflow in the `.github/workflows` directory. This workflow can also be triggered at any time via [workflow dispatch](https://docs.github.com/en/actions/using-workflows/manually-running-a-workflow#running-a-workflow).
|
||||
@@ -3,7 +3,7 @@
|
||||
//!
|
||||
//! This script should be automatically run at build time
|
||||
//! whenever the script itself changes, or whenever any files
|
||||
//! in `crates/red_knot_python_semantic/vendor/typeshed` change.
|
||||
//! in `crates/red_knot_module_resolver/vendor/typeshed` change.
|
||||
|
||||
use std::fs::File;
|
||||
use std::path::Path;
|
||||
@@ -23,21 +23,8 @@ const TYPESHED_ZIP_LOCATION: &str = "/zipped_typeshed.zip";
|
||||
fn zip_dir(directory_path: &str, writer: File) -> ZipResult<File> {
|
||||
let mut zip = ZipWriter::new(writer);
|
||||
|
||||
// Use deflated compression for WASM builds because compiling `zstd-sys` requires clang
|
||||
// [source](https://github.com/gyscos/zstd-rs/wiki/Compile-for-WASM) which complicates the build
|
||||
// by a lot. Deflated compression is slower but it shouldn't matter much for the WASM use case
|
||||
// (WASM itself is already slower than a native build for a specific platform).
|
||||
// We can't use `#[cfg(...)]` here because the target-arch in a build script is the
|
||||
// architecture of the system running the build script and not the architecture of the build-target.
|
||||
// That's why we use the `TARGET` environment variable here.
|
||||
let method = if std::env::var("TARGET").unwrap().contains("wasm32") {
|
||||
CompressionMethod::Deflated
|
||||
} else {
|
||||
CompressionMethod::Zstd
|
||||
};
|
||||
|
||||
let options = FileOptions::default()
|
||||
.compression_method(method)
|
||||
.compression_method(CompressionMethod::Zstd)
|
||||
.unix_permissions(0o644);
|
||||
|
||||
for entry in walkdir::WalkDir::new(directory_path) {
|
||||
126
crates/red_knot_module_resolver/src/db.rs
Normal file
126
crates/red_knot_module_resolver/src/db.rs
Normal file
@@ -0,0 +1,126 @@
|
||||
use ruff_db::Upcast;
|
||||
|
||||
use crate::resolver::{
|
||||
editable_install_resolution_paths, file_to_module, internal::ModuleNameIngredient,
|
||||
module_resolution_settings, resolve_module_query,
|
||||
};
|
||||
use crate::typeshed::parse_typeshed_versions;
|
||||
|
||||
#[salsa::jar(db=Db)]
|
||||
pub struct Jar(
|
||||
ModuleNameIngredient<'_>,
|
||||
module_resolution_settings,
|
||||
editable_install_resolution_paths,
|
||||
resolve_module_query,
|
||||
file_to_module,
|
||||
parse_typeshed_versions,
|
||||
);
|
||||
|
||||
pub trait Db: salsa::DbWithJar<Jar> + ruff_db::Db + Upcast<dyn ruff_db::Db> {}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use std::sync;
|
||||
|
||||
use salsa::DebugWithDb;
|
||||
|
||||
use ruff_db::files::Files;
|
||||
use ruff_db::system::{DbWithTestSystem, TestSystem};
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
|
||||
use crate::vendored_typeshed_stubs;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[salsa::db(Jar, ruff_db::Jar)]
|
||||
pub(crate) struct TestDb {
|
||||
storage: salsa::Storage<Self>,
|
||||
system: TestSystem,
|
||||
vendored: VendoredFileSystem,
|
||||
files: Files,
|
||||
events: sync::Arc<sync::Mutex<Vec<salsa::Event>>>,
|
||||
}
|
||||
|
||||
impl TestDb {
|
||||
pub(crate) fn new() -> Self {
|
||||
Self {
|
||||
storage: salsa::Storage::default(),
|
||||
system: TestSystem::default(),
|
||||
vendored: vendored_typeshed_stubs().snapshot(),
|
||||
events: sync::Arc::default(),
|
||||
files: Files::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Takes the salsa events.
|
||||
///
|
||||
/// ## Panics
|
||||
/// If there are any pending salsa snapshots.
|
||||
pub(crate) fn take_salsa_events(&mut self) -> Vec<salsa::Event> {
|
||||
let inner = sync::Arc::get_mut(&mut self.events).expect("no pending salsa snapshots");
|
||||
|
||||
let events = inner.get_mut().unwrap();
|
||||
std::mem::take(&mut *events)
|
||||
}
|
||||
|
||||
/// Clears the salsa events.
|
||||
///
|
||||
/// ## Panics
|
||||
/// If there are any pending salsa snapshots.
|
||||
pub(crate) fn clear_salsa_events(&mut self) {
|
||||
self.take_salsa_events();
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn ruff_db::Db> for TestDb {
|
||||
fn upcast(&self) -> &(dyn ruff_db::Db + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl ruff_db::Db for TestDb {
|
||||
fn vendored(&self) -> &VendoredFileSystem {
|
||||
&self.vendored
|
||||
}
|
||||
|
||||
fn system(&self) -> &dyn ruff_db::system::System {
|
||||
&self.system
|
||||
}
|
||||
|
||||
fn files(&self) -> &Files {
|
||||
&self.files
|
||||
}
|
||||
}
|
||||
|
||||
impl Db for TestDb {}
|
||||
|
||||
impl DbWithTestSystem for TestDb {
|
||||
fn test_system(&self) -> &TestSystem {
|
||||
&self.system
|
||||
}
|
||||
|
||||
fn test_system_mut(&mut self) -> &mut TestSystem {
|
||||
&mut self.system
|
||||
}
|
||||
}
|
||||
|
||||
impl salsa::Database for TestDb {
|
||||
fn salsa_event(&self, event: salsa::Event) {
|
||||
tracing::trace!("event: {:?}", event.debug(self));
|
||||
let mut events = self.events.lock().unwrap();
|
||||
events.push(event);
|
||||
}
|
||||
}
|
||||
|
||||
impl salsa::ParallelDatabase for TestDb {
|
||||
fn snapshot(&self) -> salsa::Snapshot<Self> {
|
||||
salsa::Snapshot::new(Self {
|
||||
storage: self.storage.snapshot(),
|
||||
system: self.system.snapshot(),
|
||||
vendored: self.vendored.snapshot(),
|
||||
files: self.files.snapshot(),
|
||||
events: self.events.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
18
crates/red_knot_module_resolver/src/lib.rs
Normal file
18
crates/red_knot_module_resolver/src/lib.rs
Normal file
@@ -0,0 +1,18 @@
|
||||
mod db;
|
||||
mod module;
|
||||
mod module_name;
|
||||
mod path;
|
||||
mod resolver;
|
||||
mod state;
|
||||
mod typeshed;
|
||||
|
||||
#[cfg(test)]
|
||||
mod testing;
|
||||
|
||||
pub use db::{Db, Jar};
|
||||
pub use module::{Module, ModuleKind};
|
||||
pub use module_name::ModuleName;
|
||||
pub use resolver::resolve_module;
|
||||
pub use typeshed::{
|
||||
vendored_typeshed_stubs, TypeshedVersionsParseError, TypeshedVersionsParseErrorKind,
|
||||
};
|
||||
@@ -3,8 +3,9 @@ use std::sync::Arc;
|
||||
|
||||
use ruff_db::files::File;
|
||||
|
||||
use super::path::SearchPath;
|
||||
use crate::db::Db;
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::path::{ModuleResolutionPathBuf, ModuleResolutionPathRef};
|
||||
|
||||
/// Representation of a Python module.
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
@@ -16,7 +17,7 @@ impl Module {
|
||||
pub(crate) fn new(
|
||||
name: ModuleName,
|
||||
kind: ModuleKind,
|
||||
search_path: SearchPath,
|
||||
search_path: Arc<ModuleResolutionPathBuf>,
|
||||
file: File,
|
||||
) -> Self {
|
||||
Self {
|
||||
@@ -40,8 +41,8 @@ impl Module {
|
||||
}
|
||||
|
||||
/// The search path from which the module was resolved.
|
||||
pub(crate) fn search_path(&self) -> &SearchPath {
|
||||
&self.inner.search_path
|
||||
pub(crate) fn search_path(&self) -> ModuleResolutionPathRef {
|
||||
ModuleResolutionPathRef::from(&*self.inner.search_path)
|
||||
}
|
||||
|
||||
/// Determine whether this module is a single-file module or a package
|
||||
@@ -61,11 +62,22 @@ impl std::fmt::Debug for Module {
|
||||
}
|
||||
}
|
||||
|
||||
impl salsa::DebugWithDb<dyn Db> for Module {
|
||||
fn fmt(&self, f: &mut Formatter<'_>, db: &dyn Db) -> std::fmt::Result {
|
||||
f.debug_struct("Module")
|
||||
.field("name", &self.name())
|
||||
.field("kind", &self.kind())
|
||||
.field("file", &self.file().debug(db.upcast()))
|
||||
.field("search_path", &self.search_path())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq)]
|
||||
struct ModuleInner {
|
||||
name: ModuleName,
|
||||
kind: ModuleKind,
|
||||
search_path: SearchPath,
|
||||
search_path: Arc<ModuleResolutionPathBuf>,
|
||||
file: File,
|
||||
}
|
||||
|
||||
@@ -42,7 +42,7 @@ impl ModuleName {
|
||||
/// ## Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
/// use red_knot_module_resolver::ModuleName;
|
||||
///
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar").as_deref(), Some("foo.bar"));
|
||||
/// assert_eq!(ModuleName::new_static(""), None);
|
||||
@@ -68,7 +68,7 @@ impl ModuleName {
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
/// use red_knot_module_resolver::ModuleName;
|
||||
///
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar.baz").unwrap().components().collect::<Vec<_>>(), vec!["foo", "bar", "baz"]);
|
||||
/// ```
|
||||
@@ -82,7 +82,7 @@ impl ModuleName {
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
/// use red_knot_module_resolver::ModuleName;
|
||||
///
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar").unwrap().parent(), Some(ModuleName::new_static("foo").unwrap()));
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar.baz").unwrap().parent(), Some(ModuleName::new_static("foo.bar").unwrap()));
|
||||
@@ -101,7 +101,7 @@ impl ModuleName {
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
/// use red_knot_module_resolver::ModuleName;
|
||||
///
|
||||
/// assert!(ModuleName::new_static("foo.bar").unwrap().starts_with(&ModuleName::new_static("foo").unwrap()));
|
||||
///
|
||||
@@ -133,7 +133,7 @@ impl ModuleName {
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
/// use red_knot_module_resolver::ModuleName;
|
||||
///
|
||||
/// assert_eq!(&*ModuleName::from_components(["a"]).unwrap(), "a");
|
||||
/// assert_eq!(&*ModuleName::from_components(["a", "b"]).unwrap(), "a.b");
|
||||
1286
crates/red_knot_module_resolver/src/path.rs
Normal file
1286
crates/red_knot_module_resolver/src/path.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,22 +1,24 @@
|
||||
use std::borrow::Cow;
|
||||
use std::iter::FusedIterator;
|
||||
use std::sync::Arc;
|
||||
|
||||
use ruff_db::files::{File, FilePath, FileRootKind};
|
||||
use ruff_db::program::{Program, SearchPathSettings, TargetVersion};
|
||||
use ruff_db::system::{DirectoryEntry, System, SystemPath, SystemPathBuf};
|
||||
use ruff_db::vendored::VendoredPath;
|
||||
use rustc_hash::{FxBuildHasher, FxHashSet};
|
||||
|
||||
use crate::db::Db;
|
||||
use crate::module_name::ModuleName;
|
||||
use ruff_db::files::{File, FilePath};
|
||||
use ruff_db::program::{Program, SearchPathSettings, TargetVersion};
|
||||
use ruff_db::system::{DirectoryEntry, System, SystemPath, SystemPathBuf};
|
||||
|
||||
use super::module::{Module, ModuleKind};
|
||||
use super::path::{ModulePath, SearchPath, SearchPathValidationError};
|
||||
use super::state::ResolverState;
|
||||
use crate::db::Db;
|
||||
use crate::module::{Module, ModuleKind};
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::path::ModuleResolutionPathBuf;
|
||||
use crate::state::ResolverState;
|
||||
|
||||
type SearchPathRoot = Arc<ModuleResolutionPathBuf>;
|
||||
|
||||
/// Resolves a module name to a module.
|
||||
pub fn resolve_module(db: &dyn Db, module_name: ModuleName) -> Option<Module> {
|
||||
let interned_name = ModuleNameIngredient::new(db, module_name);
|
||||
let interned_name = internal::ModuleNameIngredient::new(db, module_name);
|
||||
|
||||
resolve_module_query(db, interned_name)
|
||||
}
|
||||
@@ -28,23 +30,16 @@ pub fn resolve_module(db: &dyn Db, module_name: ModuleName) -> Option<Module> {
|
||||
#[salsa::tracked]
|
||||
pub(crate) fn resolve_module_query<'db>(
|
||||
db: &'db dyn Db,
|
||||
module_name: ModuleNameIngredient<'db>,
|
||||
module_name: internal::ModuleNameIngredient<'db>,
|
||||
) -> Option<Module> {
|
||||
let name = module_name.name(db);
|
||||
let _span = tracing::trace_span!("resolve_module", %name).entered();
|
||||
let _span = tracing::trace_span!("resolve_module", ?module_name).entered();
|
||||
|
||||
let Some((search_path, module_file, kind)) = resolve_name(db, name) else {
|
||||
tracing::debug!("Module '{name}' not found in the search paths.");
|
||||
return None;
|
||||
};
|
||||
let name = module_name.name(db);
|
||||
|
||||
let (search_path, module_file, kind) = resolve_name(db, name)?;
|
||||
|
||||
let module = Module::new(name.clone(), kind, search_path, module_file);
|
||||
|
||||
tracing::debug!(
|
||||
"Resolved module '{name}' to '{path}'.",
|
||||
path = module_file.path(db)
|
||||
);
|
||||
|
||||
Some(module)
|
||||
}
|
||||
|
||||
@@ -65,12 +60,6 @@ pub(crate) fn path_to_module(db: &dyn Db, path: &FilePath) -> Option<Module> {
|
||||
file_to_module(db, file)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
enum SystemOrVendoredPathRef<'a> {
|
||||
System(&'a SystemPath),
|
||||
Vendored(&'a VendoredPath),
|
||||
}
|
||||
|
||||
/// Resolves the module for the file with the given id.
|
||||
///
|
||||
/// Returns `None` if the file is not a module locatable via any of the known search paths.
|
||||
@@ -78,11 +67,7 @@ enum SystemOrVendoredPathRef<'a> {
|
||||
pub(crate) fn file_to_module(db: &dyn Db, file: File) -> Option<Module> {
|
||||
let _span = tracing::trace_span!("file_to_module", ?file).entered();
|
||||
|
||||
let path = match file.path(db.upcast()) {
|
||||
FilePath::System(system) => SystemOrVendoredPathRef::System(system),
|
||||
FilePath::Vendored(vendored) => SystemOrVendoredPathRef::Vendored(vendored),
|
||||
FilePath::SystemVirtual(_) => return None,
|
||||
};
|
||||
let path = file.path(db.upcast());
|
||||
|
||||
let settings = module_resolution_settings(db);
|
||||
|
||||
@@ -90,11 +75,7 @@ pub(crate) fn file_to_module(db: &dyn Db, file: File) -> Option<Module> {
|
||||
|
||||
let module_name = loop {
|
||||
let candidate = search_paths.next()?;
|
||||
let relative_path = match path {
|
||||
SystemOrVendoredPathRef::System(path) => candidate.relativize_system_path(path),
|
||||
SystemOrVendoredPathRef::Vendored(path) => candidate.relativize_vendored_path(path),
|
||||
};
|
||||
if let Some(relative_path) = relative_path {
|
||||
if let Some(relative_path) = candidate.relativize_path(path) {
|
||||
break relative_path.to_module_name()?;
|
||||
}
|
||||
};
|
||||
@@ -124,53 +105,71 @@ pub(crate) fn file_to_module(db: &dyn Db, file: File) -> Option<Module> {
|
||||
///
|
||||
/// This method also implements the typing spec's [module resolution order].
|
||||
///
|
||||
/// TODO(Alex): this method does multiple `.unwrap()` calls when it should really return an error.
|
||||
/// Each `.unwrap()` call is a point where we're validating a setting that the user would pass
|
||||
/// and transforming it into an internal representation for a validated path.
|
||||
/// Rather than panicking if a path fails to validate, we should display an error message to the user
|
||||
/// and exit the process with a nonzero exit code.
|
||||
/// This validation should probably be done outside of Salsa?
|
||||
///
|
||||
/// [module resolution order]: https://typing.readthedocs.io/en/latest/spec/distributing.html#import-resolution-ordering
|
||||
fn try_resolve_module_resolution_settings(
|
||||
db: &dyn Db,
|
||||
) -> Result<ModuleResolutionSettings, SearchPathValidationError> {
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub(crate) fn module_resolution_settings(db: &dyn Db) -> ModuleResolutionSettings {
|
||||
let program = Program::get(db.upcast());
|
||||
|
||||
let SearchPathSettings {
|
||||
extra_paths,
|
||||
src_root,
|
||||
workspace_root,
|
||||
custom_typeshed,
|
||||
site_packages,
|
||||
} = program.search_paths(db.upcast());
|
||||
|
||||
if !extra_paths.is_empty() {
|
||||
tracing::info!("Extra search paths: {extra_paths:?}");
|
||||
}
|
||||
|
||||
if let Some(custom_typeshed) = custom_typeshed {
|
||||
tracing::info!("Custom typeshed directory: {custom_typeshed}");
|
||||
}
|
||||
|
||||
if !site_packages.is_empty() {
|
||||
tracing::info!("Site-packages directories: {site_packages:?}");
|
||||
if !extra_paths.is_empty() {
|
||||
tracing::info!("extra search paths: {extra_paths:?}");
|
||||
}
|
||||
|
||||
let system = db.system();
|
||||
let files = db.files();
|
||||
let current_directory = db.system().current_directory();
|
||||
|
||||
let mut static_search_paths = vec![];
|
||||
let mut static_search_paths: Vec<_> = extra_paths
|
||||
.iter()
|
||||
.map(|fs_path| {
|
||||
Arc::new(
|
||||
ModuleResolutionPathBuf::extra(SystemPath::absolute(fs_path, current_directory))
|
||||
.unwrap(),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
for path in extra_paths {
|
||||
files.try_add_root(db.upcast(), path, FileRootKind::LibrarySearchPath);
|
||||
static_search_paths.push(SearchPath::extra(system, path.clone())?);
|
||||
}
|
||||
static_search_paths.push(Arc::new(
|
||||
ModuleResolutionPathBuf::first_party(SystemPath::absolute(
|
||||
workspace_root,
|
||||
current_directory,
|
||||
))
|
||||
.unwrap(),
|
||||
));
|
||||
|
||||
static_search_paths.push(SearchPath::first_party(system, src_root.clone())?);
|
||||
static_search_paths.push(Arc::new(custom_typeshed.as_ref().map_or_else(
|
||||
ModuleResolutionPathBuf::vendored_stdlib,
|
||||
|custom| {
|
||||
ModuleResolutionPathBuf::stdlib_from_custom_typeshed_root(&SystemPath::absolute(
|
||||
custom,
|
||||
current_directory,
|
||||
))
|
||||
.unwrap()
|
||||
},
|
||||
)));
|
||||
|
||||
static_search_paths.push(if let Some(custom_typeshed) = custom_typeshed.as_ref() {
|
||||
files.try_add_root(
|
||||
db.upcast(),
|
||||
custom_typeshed,
|
||||
FileRootKind::LibrarySearchPath,
|
||||
if let Some(path) = site_packages {
|
||||
let site_packages_root = Arc::new(
|
||||
ModuleResolutionPathBuf::site_packages(SystemPath::absolute(path, current_directory))
|
||||
.unwrap(),
|
||||
);
|
||||
SearchPath::custom_stdlib(db, custom_typeshed.clone())?
|
||||
} else {
|
||||
SearchPath::vendored_stdlib()
|
||||
});
|
||||
static_search_paths.push(site_packages_root);
|
||||
}
|
||||
|
||||
// TODO vendor typeshed's third-party stubs as well as the stdlib and fallback to them as a final step
|
||||
|
||||
@@ -194,68 +193,41 @@ fn try_resolve_module_resolution_settings(
|
||||
}
|
||||
});
|
||||
|
||||
Ok(ModuleResolutionSettings {
|
||||
ModuleResolutionSettings {
|
||||
target_version,
|
||||
static_search_paths,
|
||||
site_packages_paths: site_packages.to_owned(),
|
||||
})
|
||||
}
|
||||
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub(crate) fn module_resolution_settings(db: &dyn Db) -> ModuleResolutionSettings {
|
||||
// TODO proper error handling if this returns an error:
|
||||
try_resolve_module_resolution_settings(db).unwrap()
|
||||
}
|
||||
|
||||
/// Collect all dynamic search paths. For each `site-packages` path:
|
||||
/// - Collect that `site-packages` path
|
||||
/// - Collect any search paths listed in `.pth` files in that `site-packages` directory
|
||||
/// due to editable installations of third-party packages.
|
||||
///
|
||||
/// The editable-install search paths for the first `site-packages` directory
|
||||
/// should come between the two `site-packages` directories when it comes to
|
||||
/// module-resolution priority.
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub(crate) fn dynamic_resolution_paths(db: &dyn Db) -> Vec<SearchPath> {
|
||||
let ModuleResolutionSettings {
|
||||
target_version: _,
|
||||
static_search_paths,
|
||||
site_packages_paths,
|
||||
} = module_resolution_settings(db);
|
||||
|
||||
let mut dynamic_paths = Vec::new();
|
||||
|
||||
if site_packages_paths.is_empty() {
|
||||
return dynamic_paths;
|
||||
}
|
||||
}
|
||||
|
||||
let mut existing_paths: FxHashSet<_> = static_search_paths
|
||||
/// Collect all dynamic search paths:
|
||||
/// search paths listed in `.pth` files in the `site-packages` directory
|
||||
/// due to editable installations of third-party packages.
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub(crate) fn editable_install_resolution_paths(db: &dyn Db) -> Vec<Arc<ModuleResolutionPathBuf>> {
|
||||
// This query needs to be re-executed each time a `.pth` file
|
||||
// is added, modified or removed from the `site-packages` directory.
|
||||
// However, we don't use Salsa queries to read the source text of `.pth` files;
|
||||
// we use the APIs on the `System` trait directly. As such, for now we simply ask
|
||||
// Salsa to recompute this query on each new revision.
|
||||
//
|
||||
// TODO: add some kind of watcher for the `site-packages` directory that looks
|
||||
// for `site-packages/*.pth` files being added/modified/removed; get rid of this.
|
||||
// When doing so, also make the test
|
||||
// `deleting_pth_file_on_which_module_resolution_depends_invalidates_cache()`
|
||||
// more principled!
|
||||
db.report_untracked_read();
|
||||
|
||||
let static_search_paths = &module_resolution_settings(db).static_search_paths;
|
||||
let site_packages = static_search_paths
|
||||
.iter()
|
||||
.filter_map(|path| path.as_system_path())
|
||||
.map(Cow::Borrowed)
|
||||
.collect();
|
||||
.find(|path| path.is_site_packages());
|
||||
|
||||
let files = db.files();
|
||||
let system = db.system();
|
||||
let mut dynamic_paths = Vec::default();
|
||||
|
||||
for site_packages_dir in site_packages_paths {
|
||||
if !existing_paths.insert(Cow::Borrowed(site_packages_dir)) {
|
||||
continue;
|
||||
}
|
||||
let site_packages_root = files.try_add_root(
|
||||
db.upcast(),
|
||||
site_packages_dir,
|
||||
FileRootKind::LibrarySearchPath,
|
||||
);
|
||||
// This query needs to be re-executed each time a `.pth` file
|
||||
// is added, modified or removed from the `site-packages` directory.
|
||||
// However, we don't use Salsa queries to read the source text of `.pth` files;
|
||||
// we use the APIs on the `System` trait directly. As such, add a dependency on the
|
||||
// site-package directory's revision.
|
||||
site_packages_root.revision(db.upcast());
|
||||
|
||||
dynamic_paths
|
||||
.push(SearchPath::site_packages(system, site_packages_dir.to_owned()).unwrap());
|
||||
if let Some(site_packages) = site_packages {
|
||||
let site_packages = site_packages
|
||||
.as_system_path()
|
||||
.expect("Expected site-packages never to be a VendoredPath!");
|
||||
|
||||
// As well as modules installed directly into `site-packages`,
|
||||
// the directory may also contain `.pth` files.
|
||||
@@ -263,8 +235,8 @@ pub(crate) fn dynamic_resolution_paths(db: &dyn Db) -> Vec<SearchPath> {
|
||||
// containing a (relative or absolute) path.
|
||||
// Each of these paths may point to an editable install of a package,
|
||||
// so should be considered an additional search path.
|
||||
let Ok(pth_file_iterator) = PthFileIterator::new(db, site_packages_dir) else {
|
||||
continue;
|
||||
let Ok(pth_file_iterator) = PthFileIterator::new(db, site_packages) else {
|
||||
return dynamic_paths;
|
||||
};
|
||||
|
||||
// The Python documentation specifies that `.pth` files in `site-packages`
|
||||
@@ -273,12 +245,20 @@ pub(crate) fn dynamic_resolution_paths(db: &dyn Db) -> Vec<SearchPath> {
|
||||
let mut all_pth_files: Vec<PthFile> = pth_file_iterator.collect();
|
||||
all_pth_files.sort_by(|a, b| a.path.cmp(&b.path));
|
||||
|
||||
let mut existing_paths: FxHashSet<_> = static_search_paths
|
||||
.iter()
|
||||
.filter_map(|path| path.as_system_path())
|
||||
.map(Cow::Borrowed)
|
||||
.collect();
|
||||
|
||||
dynamic_paths.reserve(all_pth_files.len());
|
||||
|
||||
for pth_file in &all_pth_files {
|
||||
for installation in pth_file.editable_installations() {
|
||||
if existing_paths.insert(Cow::Owned(
|
||||
installation.as_system_path().unwrap().to_path_buf(),
|
||||
)) {
|
||||
dynamic_paths.push(installation);
|
||||
dynamic_paths.push(Arc::new(installation));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -294,14 +274,14 @@ pub(crate) fn dynamic_resolution_paths(db: &dyn Db) -> Vec<SearchPath> {
|
||||
/// are only calculated lazily.
|
||||
///
|
||||
/// [`sys.path` at runtime]: https://docs.python.org/3/library/site.html#module-site
|
||||
pub(crate) struct SearchPathIterator<'db> {
|
||||
struct SearchPathIterator<'db> {
|
||||
db: &'db dyn Db,
|
||||
static_paths: std::slice::Iter<'db, SearchPath>,
|
||||
dynamic_paths: Option<std::slice::Iter<'db, SearchPath>>,
|
||||
static_paths: std::slice::Iter<'db, SearchPathRoot>,
|
||||
dynamic_paths: Option<std::slice::Iter<'db, SearchPathRoot>>,
|
||||
}
|
||||
|
||||
impl<'db> Iterator for SearchPathIterator<'db> {
|
||||
type Item = &'db SearchPath;
|
||||
type Item = &'db SearchPathRoot;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let SearchPathIterator {
|
||||
@@ -312,7 +292,7 @@ impl<'db> Iterator for SearchPathIterator<'db> {
|
||||
|
||||
static_paths.next().or_else(|| {
|
||||
dynamic_paths
|
||||
.get_or_insert_with(|| dynamic_resolution_paths(*db).iter())
|
||||
.get_or_insert_with(|| editable_install_resolution_paths(*db).iter())
|
||||
.next()
|
||||
})
|
||||
}
|
||||
@@ -333,7 +313,7 @@ struct PthFile<'db> {
|
||||
impl<'db> PthFile<'db> {
|
||||
/// Yield paths in this `.pth` file that appear to represent editable installations,
|
||||
/// and should therefore be added as module-resolution search paths.
|
||||
fn editable_installations(&'db self) -> impl Iterator<Item = SearchPath> + 'db {
|
||||
fn editable_installations(&'db self) -> impl Iterator<Item = ModuleResolutionPathBuf> + 'db {
|
||||
let PthFile {
|
||||
system,
|
||||
path: _,
|
||||
@@ -355,7 +335,7 @@ impl<'db> PthFile<'db> {
|
||||
return None;
|
||||
}
|
||||
let possible_editable_install = SystemPath::absolute(line, site_packages);
|
||||
SearchPath::editable(*system, possible_editable_install).ok()
|
||||
ModuleResolutionPathBuf::editable_installation_root(*system, possible_editable_install)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -422,18 +402,12 @@ impl<'db> Iterator for PthFileIterator<'db> {
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(crate) struct ModuleResolutionSettings {
|
||||
target_version: TargetVersion,
|
||||
|
||||
/// Search paths that have been statically determined purely from reading Ruff's configuration settings.
|
||||
/// These shouldn't ever change unless the config settings themselves change.
|
||||
static_search_paths: Vec<SearchPath>,
|
||||
|
||||
/// site-packages paths are not included in the above field:
|
||||
/// if there are multiple site-packages paths, editable installations can appear
|
||||
/// *between* the site-packages paths on `sys.path` at runtime.
|
||||
/// That means we can't know where a second or third `site-packages` path should sit
|
||||
/// in terms of module-resolution priority until we've discovered the editable installs
|
||||
/// for the first `site-packages` path
|
||||
site_packages_paths: Vec<SystemPathBuf>,
|
||||
///
|
||||
/// Note that `site-packages` *is included* as a search path in this sequence,
|
||||
/// but it is also stored separately so that we're able to find editable installs later.
|
||||
static_search_paths: Vec<SearchPathRoot>,
|
||||
}
|
||||
|
||||
impl ModuleResolutionSettings {
|
||||
@@ -441,7 +415,7 @@ impl ModuleResolutionSettings {
|
||||
self.target_version
|
||||
}
|
||||
|
||||
pub(crate) fn search_paths<'db>(&'db self, db: &'db dyn Db) -> SearchPathIterator<'db> {
|
||||
fn search_paths<'db>(&'db self, db: &'db dyn Db) -> SearchPathIterator<'db> {
|
||||
SearchPathIterator {
|
||||
db,
|
||||
static_paths: self.static_search_paths.iter(),
|
||||
@@ -450,34 +424,34 @@ impl ModuleResolutionSettings {
|
||||
}
|
||||
}
|
||||
|
||||
/// A thin wrapper around `ModuleName` to make it a Salsa ingredient.
|
||||
///
|
||||
/// This is needed because Salsa requires that all query arguments are salsa ingredients.
|
||||
#[salsa::interned]
|
||||
struct ModuleNameIngredient<'db> {
|
||||
#[return_ref]
|
||||
pub(super) name: ModuleName,
|
||||
// The singleton methods generated by salsa are all `pub` instead of `pub(crate)` which triggers
|
||||
// `unreachable_pub`. Work around this by creating a module and allow `unreachable_pub` for it.
|
||||
// Salsa also generates uses to `_db` variables for `interned` which triggers `clippy::used_underscore_binding`. Suppress that too
|
||||
// TODO(micha): Contribute a fix for this upstream where the singleton methods have the same visibility as the struct.
|
||||
#[allow(unreachable_pub, clippy::used_underscore_binding)]
|
||||
pub(crate) mod internal {
|
||||
use crate::module_name::ModuleName;
|
||||
|
||||
/// A thin wrapper around `ModuleName` to make it a Salsa ingredient.
|
||||
///
|
||||
/// This is needed because Salsa requires that all query arguments are salsa ingredients.
|
||||
#[salsa::interned]
|
||||
pub(crate) struct ModuleNameIngredient<'db> {
|
||||
#[return_ref]
|
||||
pub(super) name: ModuleName,
|
||||
}
|
||||
}
|
||||
|
||||
/// Given a module name and a list of search paths in which to lookup modules,
|
||||
/// attempt to resolve the module name
|
||||
fn resolve_name(db: &dyn Db, name: &ModuleName) -> Option<(SearchPath, File, ModuleKind)> {
|
||||
fn resolve_name(
|
||||
db: &dyn Db,
|
||||
name: &ModuleName,
|
||||
) -> Option<(Arc<ModuleResolutionPathBuf>, File, ModuleKind)> {
|
||||
let resolver_settings = module_resolution_settings(db);
|
||||
let target_version = resolver_settings.target_version();
|
||||
let resolver_state = ResolverState::new(db, target_version);
|
||||
let (_, minor_version) = target_version.as_tuple();
|
||||
let is_builtin_module =
|
||||
ruff_python_stdlib::sys::is_builtin_module(minor_version, name.as_str());
|
||||
let resolver_state = ResolverState::new(db, resolver_settings.target_version());
|
||||
|
||||
for search_path in resolver_settings.search_paths(db) {
|
||||
// When a builtin module is imported, standard module resolution is bypassed:
|
||||
// the module name always resolves to the stdlib module,
|
||||
// even if there's a module of the same name in the first-party root
|
||||
// (which would normally result in the stdlib module being overridden).
|
||||
if is_builtin_module && !search_path.is_standard_library() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut components = name.components();
|
||||
let module_name = components.next_back()?;
|
||||
|
||||
@@ -488,7 +462,7 @@ fn resolve_name(db: &dyn Db, name: &ModuleName) -> Option<(SearchPath, File, Mod
|
||||
package_path.push(module_name);
|
||||
|
||||
// Must be a `__init__.pyi` or `__init__.py` or it isn't a package.
|
||||
let kind = if package_path.is_directory(&resolver_state) {
|
||||
let kind = if package_path.is_directory(search_path, &resolver_state) {
|
||||
package_path.push("__init__");
|
||||
ModuleKind::Package
|
||||
} else {
|
||||
@@ -496,13 +470,16 @@ fn resolve_name(db: &dyn Db, name: &ModuleName) -> Option<(SearchPath, File, Mod
|
||||
};
|
||||
|
||||
// TODO Implement full https://peps.python.org/pep-0561/#type-checker-module-resolution-order resolution
|
||||
if let Some(stub) = package_path.with_pyi_extension().to_file(&resolver_state) {
|
||||
if let Some(stub) = package_path
|
||||
.with_pyi_extension()
|
||||
.to_file(search_path, &resolver_state)
|
||||
{
|
||||
return Some((search_path.clone(), stub, kind));
|
||||
}
|
||||
|
||||
if let Some(module) = package_path
|
||||
.with_py_extension()
|
||||
.and_then(|path| path.to_file(&resolver_state))
|
||||
.and_then(|path| path.to_file(search_path, &resolver_state))
|
||||
{
|
||||
return Some((search_path.clone(), module, kind));
|
||||
}
|
||||
@@ -526,14 +503,14 @@ fn resolve_name(db: &dyn Db, name: &ModuleName) -> Option<(SearchPath, File, Mod
|
||||
}
|
||||
|
||||
fn resolve_package<'a, 'db, I>(
|
||||
module_search_path: &SearchPath,
|
||||
module_search_path: &ModuleResolutionPathBuf,
|
||||
components: I,
|
||||
resolver_state: &ResolverState<'db>,
|
||||
) -> Result<ResolvedPackage, PackageKind>
|
||||
where
|
||||
I: Iterator<Item = &'a str>,
|
||||
{
|
||||
let mut package_path = module_search_path.to_module_path();
|
||||
let mut package_path = module_search_path.clone();
|
||||
|
||||
// `true` if inside a folder that is a namespace package (has no `__init__.py`).
|
||||
// Namespace packages are special because they can be spread across multiple search paths.
|
||||
@@ -547,11 +524,12 @@ where
|
||||
for folder in components {
|
||||
package_path.push(folder);
|
||||
|
||||
let is_regular_package = package_path.is_regular_package(resolver_state);
|
||||
let is_regular_package =
|
||||
package_path.is_regular_package(module_search_path, resolver_state);
|
||||
|
||||
if is_regular_package {
|
||||
in_namespace_package = false;
|
||||
} else if package_path.is_directory(resolver_state) {
|
||||
} else if package_path.is_directory(module_search_path, resolver_state) {
|
||||
// A directory without an `__init__.py` is a namespace package, continue with the next folder.
|
||||
in_namespace_package = true;
|
||||
} else if in_namespace_package {
|
||||
@@ -584,7 +562,7 @@ where
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ResolvedPackage {
|
||||
path: ModulePath,
|
||||
path: ModuleResolutionPathBuf,
|
||||
kind: PackageKind,
|
||||
}
|
||||
|
||||
@@ -612,17 +590,16 @@ impl PackageKind {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use internal::ModuleNameIngredient;
|
||||
use ruff_db::files::{system_path_to_file, File, FilePath};
|
||||
use ruff_db::system::DbWithTestSystem;
|
||||
use ruff_db::testing::{
|
||||
assert_const_function_query_was_not_run, assert_function_query_was_not_run,
|
||||
};
|
||||
use ruff_db::system::{DbWithTestSystem, OsSystem, SystemPath};
|
||||
use ruff_db::testing::assert_function_query_was_not_run;
|
||||
use ruff_db::Db;
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::module::ModuleKind;
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::module::ModuleKind;
|
||||
use crate::module_resolver::testing::{FileSpec, MockedTypeshed, TestCase, TestCaseBuilder};
|
||||
use crate::testing::{FileSpec, MockedTypeshed, TestCase, TestCaseBuilder};
|
||||
|
||||
use super::*;
|
||||
|
||||
@@ -641,7 +618,7 @@ mod tests {
|
||||
);
|
||||
|
||||
assert_eq!("foo", foo_module.name());
|
||||
assert_eq!(&src, foo_module.search_path());
|
||||
assert_eq!(&src, &foo_module.search_path());
|
||||
assert_eq!(ModuleKind::Module, foo_module.kind());
|
||||
|
||||
let expected_foo_path = src.join("foo.py");
|
||||
@@ -652,40 +629,6 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn builtins_vendored() {
|
||||
let TestCase { db, stdlib, .. } = TestCaseBuilder::new()
|
||||
.with_vendored_typeshed()
|
||||
.with_src_files(&[("builtins.py", "FOOOO = 42")])
|
||||
.build();
|
||||
|
||||
let builtins_module_name = ModuleName::new_static("builtins").unwrap();
|
||||
let builtins = resolve_module(&db, builtins_module_name).expect("builtins to resolve");
|
||||
|
||||
assert_eq!(builtins.file().path(&db), &stdlib.join("builtins.pyi"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn builtins_custom() {
|
||||
const TYPESHED: MockedTypeshed = MockedTypeshed {
|
||||
stdlib_files: &[("builtins.pyi", "def min(a, b): ...")],
|
||||
versions: "builtins: 3.8-",
|
||||
};
|
||||
|
||||
const SRC: &[FileSpec] = &[("builtins.py", "FOOOO = 42")];
|
||||
|
||||
let TestCase { db, stdlib, .. } = TestCaseBuilder::new()
|
||||
.with_src_files(SRC)
|
||||
.with_custom_typeshed(TYPESHED)
|
||||
.with_target_version(TargetVersion::Py38)
|
||||
.build();
|
||||
|
||||
let builtins_module_name = ModuleName::new_static("builtins").unwrap();
|
||||
let builtins = resolve_module(&db, builtins_module_name).expect("builtins to resolve");
|
||||
|
||||
assert_eq!(builtins.file().path(&db), &stdlib.join("builtins.pyi"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn stdlib() {
|
||||
const TYPESHED: MockedTypeshed = MockedTypeshed {
|
||||
@@ -706,7 +649,7 @@ mod tests {
|
||||
resolve_module(&db, functools_module_name).as_ref()
|
||||
);
|
||||
|
||||
assert_eq!(&stdlib, functools_module.search_path());
|
||||
assert_eq!(&stdlib, &functools_module.search_path().to_path_buf());
|
||||
assert_eq!(ModuleKind::Module, functools_module.kind());
|
||||
|
||||
let expected_functools_path = stdlib.join("functools.pyi");
|
||||
@@ -758,7 +701,7 @@ mod tests {
|
||||
});
|
||||
let search_path = resolved_module.search_path();
|
||||
assert_eq!(
|
||||
&stdlib, search_path,
|
||||
&stdlib, &search_path,
|
||||
"Search path for {module_name} was unexpectedly {search_path:?}"
|
||||
);
|
||||
assert!(
|
||||
@@ -854,7 +797,7 @@ mod tests {
|
||||
});
|
||||
let search_path = resolved_module.search_path();
|
||||
assert_eq!(
|
||||
&stdlib, search_path,
|
||||
&stdlib, &search_path,
|
||||
"Search path for {module_name} was unexpectedly {search_path:?}"
|
||||
);
|
||||
assert!(
|
||||
@@ -913,7 +856,7 @@ mod tests {
|
||||
Some(&functools_module),
|
||||
resolve_module(&db, functools_module_name).as_ref()
|
||||
);
|
||||
assert_eq!(&src, functools_module.search_path());
|
||||
assert_eq!(&src, &functools_module.search_path());
|
||||
assert_eq!(ModuleKind::Module, functools_module.kind());
|
||||
assert_eq!(&src.join("functools.py"), functools_module.file().path(&db));
|
||||
|
||||
@@ -934,7 +877,7 @@ mod tests {
|
||||
let pydoc_data_topics = resolve_module(&db, pydoc_data_topics_name).unwrap();
|
||||
|
||||
assert_eq!("pydoc_data.topics", pydoc_data_topics.name());
|
||||
assert_eq!(pydoc_data_topics.search_path(), &stdlib);
|
||||
assert_eq!(pydoc_data_topics.search_path(), stdlib);
|
||||
assert_eq!(
|
||||
pydoc_data_topics.file().path(&db),
|
||||
&stdlib.join("pydoc_data/topics.pyi")
|
||||
@@ -951,7 +894,7 @@ mod tests {
|
||||
let foo_module = resolve_module(&db, ModuleName::new_static("foo").unwrap()).unwrap();
|
||||
|
||||
assert_eq!("foo", foo_module.name());
|
||||
assert_eq!(&src, foo_module.search_path());
|
||||
assert_eq!(&src, &foo_module.search_path());
|
||||
assert_eq!(&foo_path, foo_module.file().path(&db));
|
||||
|
||||
assert_eq!(
|
||||
@@ -978,7 +921,7 @@ mod tests {
|
||||
let foo_module = resolve_module(&db, ModuleName::new_static("foo").unwrap()).unwrap();
|
||||
let foo_init_path = src.join("foo/__init__.py");
|
||||
|
||||
assert_eq!(&src, foo_module.search_path());
|
||||
assert_eq!(&src, &foo_module.search_path());
|
||||
assert_eq!(&foo_init_path, foo_module.file().path(&db));
|
||||
assert_eq!(ModuleKind::Package, foo_module.kind());
|
||||
|
||||
@@ -1001,7 +944,7 @@ mod tests {
|
||||
let foo = resolve_module(&db, ModuleName::new_static("foo").unwrap()).unwrap();
|
||||
let foo_stub = src.join("foo.pyi");
|
||||
|
||||
assert_eq!(&src, foo.search_path());
|
||||
assert_eq!(&src, &foo.search_path());
|
||||
assert_eq!(&foo_stub, foo.file().path(&db));
|
||||
|
||||
assert_eq!(Some(foo), path_to_module(&db, &FilePath::System(foo_stub)));
|
||||
@@ -1025,7 +968,7 @@ mod tests {
|
||||
resolve_module(&db, ModuleName::new_static("foo.bar.baz").unwrap()).unwrap();
|
||||
let baz_path = src.join("foo/bar/baz.py");
|
||||
|
||||
assert_eq!(&src, baz_module.search_path());
|
||||
assert_eq!(&src, &baz_module.search_path());
|
||||
assert_eq!(&baz_path, baz_module.file().path(&db));
|
||||
|
||||
assert_eq!(
|
||||
@@ -1125,7 +1068,7 @@ mod tests {
|
||||
let foo_module = resolve_module(&db, ModuleName::new_static("foo").unwrap()).unwrap();
|
||||
let foo_src_path = src.join("foo.py");
|
||||
|
||||
assert_eq!(&src, foo_module.search_path());
|
||||
assert_eq!(&src, &foo_module.search_path());
|
||||
assert_eq!(&foo_src_path, foo_module.file().path(&db));
|
||||
assert_eq!(
|
||||
Some(foo_module),
|
||||
@@ -1141,21 +1084,12 @@ mod tests {
|
||||
#[test]
|
||||
#[cfg(target_family = "unix")]
|
||||
fn symlink() -> anyhow::Result<()> {
|
||||
use anyhow::Context;
|
||||
|
||||
use ruff_db::program::Program;
|
||||
use ruff_db::system::{OsSystem, SystemPath};
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
|
||||
let mut db = TestDb::new();
|
||||
|
||||
let temp_dir = tempfile::tempdir()?;
|
||||
let root = temp_dir
|
||||
.path()
|
||||
.canonicalize()
|
||||
.context("Failed to canonicalize temp dir")?;
|
||||
let root = SystemPath::from_std_path(&root).unwrap();
|
||||
let root = SystemPath::from_std_path(temp_dir.path()).unwrap();
|
||||
db.use_system(OsSystem::new(root));
|
||||
|
||||
let src = root.join("src");
|
||||
@@ -1167,17 +1101,16 @@ mod tests {
|
||||
|
||||
std::fs::create_dir_all(src.as_std_path())?;
|
||||
std::fs::create_dir_all(site_packages.as_std_path())?;
|
||||
std::fs::create_dir_all(custom_typeshed.join("stdlib").as_std_path())?;
|
||||
std::fs::File::create(custom_typeshed.join("stdlib/VERSIONS").as_std_path())?;
|
||||
std::fs::create_dir_all(custom_typeshed.as_std_path())?;
|
||||
|
||||
std::fs::write(foo.as_std_path(), "")?;
|
||||
std::os::unix::fs::symlink(foo.as_std_path(), bar.as_std_path())?;
|
||||
|
||||
let search_paths = SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
workspace_root: src.clone(),
|
||||
custom_typeshed: Some(custom_typeshed.clone()),
|
||||
site_packages: vec![site_packages],
|
||||
site_packages: Some(site_packages.clone()),
|
||||
};
|
||||
|
||||
Program::new(&db, TargetVersion::Py38, search_paths);
|
||||
@@ -1187,12 +1120,12 @@ mod tests {
|
||||
|
||||
assert_ne!(foo_module, bar_module);
|
||||
|
||||
assert_eq!(&src, foo_module.search_path());
|
||||
assert_eq!(&src, &foo_module.search_path());
|
||||
assert_eq!(&foo, foo_module.file().path(&db));
|
||||
|
||||
// `foo` and `bar` shouldn't resolve to the same file
|
||||
|
||||
assert_eq!(&src, bar_module.search_path());
|
||||
assert_eq!(&src, &bar_module.search_path());
|
||||
assert_eq!(&bar, bar_module.file().path(&db));
|
||||
assert_eq!(&foo, foo_module.file().path(&db));
|
||||
|
||||
@@ -1227,7 +1160,7 @@ mod tests {
|
||||
|
||||
// Delete `bar.py`
|
||||
db.memory_file_system().remove_file(&bar_path).unwrap();
|
||||
bar.sync(&mut db);
|
||||
bar.touch(&mut db);
|
||||
|
||||
// Re-query the foo module. The foo module should still be cached because `bar.py` isn't relevant
|
||||
// for resolving `foo`.
|
||||
@@ -1279,8 +1212,7 @@ mod tests {
|
||||
db.memory_file_system().remove_file(&foo_init_path)?;
|
||||
db.memory_file_system()
|
||||
.remove_directory(foo_init_path.parent().unwrap())?;
|
||||
File::sync_path(&mut db, &foo_init_path);
|
||||
File::sync_path(&mut db, foo_init_path.parent().unwrap());
|
||||
File::touch_path(&mut db, &foo_init_path);
|
||||
|
||||
let foo_module = resolve_module(&db, foo_module_name).expect("Foo module to resolve");
|
||||
assert_eq!(&src.join("foo.py"), foo_module.file().path(&db));
|
||||
@@ -1309,9 +1241,9 @@ mod tests {
|
||||
let stdlib_functools_path = stdlib.join("functools.pyi");
|
||||
|
||||
let functools_module = resolve_module(&db, functools_module_name.clone()).unwrap();
|
||||
assert_eq!(functools_module.search_path(), &stdlib);
|
||||
assert_eq!(functools_module.search_path(), stdlib);
|
||||
assert_eq!(
|
||||
Ok(functools_module.file()),
|
||||
Some(functools_module.file()),
|
||||
system_path_to_file(&db, &stdlib_functools_path)
|
||||
);
|
||||
|
||||
@@ -1323,15 +1255,15 @@ mod tests {
|
||||
.unwrap();
|
||||
let functools_module = resolve_module(&db, functools_module_name.clone()).unwrap();
|
||||
let events = db.take_salsa_events();
|
||||
assert_function_query_was_not_run(
|
||||
assert_function_query_was_not_run::<resolve_module_query, _, _>(
|
||||
&db,
|
||||
resolve_module_query,
|
||||
ModuleNameIngredient::new(&db, functools_module_name.clone()),
|
||||
|res| &res.function,
|
||||
&ModuleNameIngredient::new(&db, functools_module_name.clone()),
|
||||
&events,
|
||||
);
|
||||
assert_eq!(functools_module.search_path(), &stdlib);
|
||||
assert_eq!(functools_module.search_path(), stdlib);
|
||||
assert_eq!(
|
||||
Ok(functools_module.file()),
|
||||
Some(functools_module.file()),
|
||||
system_path_to_file(&db, &stdlib_functools_path)
|
||||
);
|
||||
}
|
||||
@@ -1355,9 +1287,9 @@ mod tests {
|
||||
|
||||
let functools_module_name = ModuleName::new_static("functools").unwrap();
|
||||
let functools_module = resolve_module(&db, functools_module_name.clone()).unwrap();
|
||||
assert_eq!(functools_module.search_path(), &stdlib);
|
||||
assert_eq!(functools_module.search_path(), stdlib);
|
||||
assert_eq!(
|
||||
Ok(functools_module.file()),
|
||||
Some(functools_module.file()),
|
||||
system_path_to_file(&db, stdlib.join("functools.pyi"))
|
||||
);
|
||||
|
||||
@@ -1366,9 +1298,9 @@ mod tests {
|
||||
let src_functools_path = src.join("functools.py");
|
||||
db.write_file(&src_functools_path, "FOO: int").unwrap();
|
||||
let functools_module = resolve_module(&db, functools_module_name.clone()).unwrap();
|
||||
assert_eq!(functools_module.search_path(), &src);
|
||||
assert_eq!(functools_module.search_path(), src);
|
||||
assert_eq!(
|
||||
Ok(functools_module.file()),
|
||||
Some(functools_module.file()),
|
||||
system_path_to_file(&db, &src_functools_path)
|
||||
);
|
||||
}
|
||||
@@ -1397,9 +1329,9 @@ mod tests {
|
||||
let src_functools_path = src.join("functools.py");
|
||||
|
||||
let functools_module = resolve_module(&db, functools_module_name.clone()).unwrap();
|
||||
assert_eq!(functools_module.search_path(), &src);
|
||||
assert_eq!(functools_module.search_path(), src);
|
||||
assert_eq!(
|
||||
Ok(functools_module.file()),
|
||||
Some(functools_module.file()),
|
||||
system_path_to_file(&db, &src_functools_path)
|
||||
);
|
||||
|
||||
@@ -1408,11 +1340,11 @@ mod tests {
|
||||
db.memory_file_system()
|
||||
.remove_file(&src_functools_path)
|
||||
.unwrap();
|
||||
File::sync_path(&mut db, &src_functools_path);
|
||||
File::touch_path(&mut db, &src_functools_path);
|
||||
let functools_module = resolve_module(&db, functools_module_name.clone()).unwrap();
|
||||
assert_eq!(functools_module.search_path(), &stdlib);
|
||||
assert_eq!(functools_module.search_path(), stdlib);
|
||||
assert_eq!(
|
||||
Ok(functools_module.file()),
|
||||
Some(functools_module.file()),
|
||||
system_path_to_file(&db, stdlib.join("functools.pyi"))
|
||||
);
|
||||
}
|
||||
@@ -1575,7 +1507,12 @@ not_a_directory
|
||||
&FilePath::system("/y/src/bar.py")
|
||||
);
|
||||
let events = db.take_salsa_events();
|
||||
assert_const_function_query_was_not_run(&db, dynamic_resolution_paths, &events);
|
||||
assert_function_query_was_not_run::<editable_install_resolution_paths, _, _>(
|
||||
&db,
|
||||
|res| &res.function,
|
||||
&(),
|
||||
&events,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -1604,7 +1541,18 @@ not_a_directory
|
||||
.remove_file(site_packages.join("_foo.pth"))
|
||||
.unwrap();
|
||||
|
||||
File::sync_path(&mut db, &site_packages.join("_foo.pth"));
|
||||
// Why are we touching a random file in the path that's been editably installed,
|
||||
// rather than the `.pth` file, when the `.pth` file is the one that has been deleted?
|
||||
// It's because the `.pth` file isn't directly tracked as a dependency by Salsa
|
||||
// currently (we don't use `system_path_to_file()` to get the file, and we don't use
|
||||
// `source_text()` to read the source of the file). Instead of using these APIs which
|
||||
// would automatically add the existence and contents of the file as a Salsa-tracked
|
||||
// dependency, we use `.report_untracked_read()` to force Salsa to re-parse all
|
||||
// `.pth` files on each new "revision". Making a random modification to a tracked
|
||||
// Salsa file forces a new revision.
|
||||
//
|
||||
// TODO: get rid of the `.report_untracked_read()` call...
|
||||
File::touch_path(&mut db, SystemPath::new("/x/src/foo.py"));
|
||||
|
||||
assert_eq!(resolve_module(&db, foo_module_name.clone()), None);
|
||||
}
|
||||
@@ -1632,8 +1580,8 @@ not_a_directory
|
||||
.remove_file(src_path.join("foo.py"))
|
||||
.unwrap();
|
||||
db.memory_file_system().remove_directory(&src_path).unwrap();
|
||||
File::sync_path(&mut db, &src_path.join("foo.py"));
|
||||
File::sync_path(&mut db, &src_path);
|
||||
File::touch_path(&mut db, &src_path.join("foo.py"));
|
||||
File::touch_path(&mut db, &src_path);
|
||||
assert_eq!(resolve_module(&db, foo_module_name.clone()), None);
|
||||
}
|
||||
|
||||
@@ -1644,62 +1592,39 @@ not_a_directory
|
||||
.with_site_packages_files(&[("_foo.pth", "/src")])
|
||||
.build();
|
||||
|
||||
let search_paths: Vec<&SearchPath> =
|
||||
let search_paths: Vec<&SearchPathRoot> =
|
||||
module_resolution_settings(&db).search_paths(&db).collect();
|
||||
|
||||
assert!(search_paths.contains(
|
||||
&&SearchPath::first_party(db.system(), SystemPathBuf::from("/src")).unwrap()
|
||||
));
|
||||
assert!(!search_paths
|
||||
.contains(&&SearchPath::editable(db.system(), SystemPathBuf::from("/src")).unwrap()));
|
||||
assert!(search_paths.contains(&&Arc::new(
|
||||
ModuleResolutionPathBuf::first_party("/src").unwrap()
|
||||
)));
|
||||
|
||||
assert!(!search_paths.contains(&&Arc::new(
|
||||
ModuleResolutionPathBuf::editable_installation_root(db.system(), "/src").unwrap()
|
||||
)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiple_site_packages_with_editables() {
|
||||
let mut db = TestDb::new();
|
||||
fn no_duplicate_editable_search_paths_added() {
|
||||
let TestCase { mut db, .. } = TestCaseBuilder::new()
|
||||
.with_site_packages_files(&[("_foo.pth", "/x"), ("_bar.pth", "/x")])
|
||||
.build();
|
||||
|
||||
let venv_site_packages = SystemPathBuf::from("/venv-site-packages");
|
||||
let site_packages_pth = venv_site_packages.join("foo.pth");
|
||||
let system_site_packages = SystemPathBuf::from("/system-site-packages");
|
||||
let editable_install_location = SystemPathBuf::from("/x/y/a.py");
|
||||
let system_site_packages_location = system_site_packages.join("a.py");
|
||||
db.write_file("/x/foo.py", "").unwrap();
|
||||
|
||||
db.memory_file_system()
|
||||
.create_directory_all("/src")
|
||||
.unwrap();
|
||||
db.write_files([
|
||||
(&site_packages_pth, "/x/y"),
|
||||
(&editable_install_location, ""),
|
||||
(&system_site_packages_location, ""),
|
||||
])
|
||||
.unwrap();
|
||||
let search_paths: Vec<&SearchPathRoot> =
|
||||
module_resolution_settings(&db).search_paths(&db).collect();
|
||||
|
||||
Program::new(
|
||||
&db,
|
||||
TargetVersion::default(),
|
||||
SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: SystemPathBuf::from("/src"),
|
||||
custom_typeshed: None,
|
||||
site_packages: vec![venv_site_packages, system_site_packages],
|
||||
},
|
||||
let editable_install =
|
||||
ModuleResolutionPathBuf::editable_installation_root(db.system(), "/x").unwrap();
|
||||
|
||||
assert_eq!(
|
||||
search_paths
|
||||
.iter()
|
||||
.filter(|path| ****path == editable_install)
|
||||
.count(),
|
||||
1,
|
||||
"Unexpected search paths: {search_paths:?}"
|
||||
);
|
||||
|
||||
// The editable installs discovered from the `.pth` file in the first `site-packages` directory
|
||||
// take precedence over the second `site-packages` directory...
|
||||
let a_module_name = ModuleName::new_static("a").unwrap();
|
||||
let a_module = resolve_module(&db, a_module_name.clone()).unwrap();
|
||||
assert_eq!(a_module.file().path(&db), &editable_install_location);
|
||||
|
||||
db.memory_file_system()
|
||||
.remove_file(&site_packages_pth)
|
||||
.unwrap();
|
||||
File::sync_path(&mut db, &site_packages_pth);
|
||||
|
||||
// ...But now that the `.pth` file in the first `site-packages` directory has been deleted,
|
||||
// the editable install no longer exists, so the module now resolves to the file in the
|
||||
// second `site-packages` directory
|
||||
let a_module = resolve_module(&db, a_module_name).unwrap();
|
||||
assert_eq!(a_module.file().path(&db), &system_site_packages_location);
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,9 @@
|
||||
use ruff_db::program::TargetVersion;
|
||||
use ruff_db::system::System;
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
|
||||
use super::typeshed::LazyTypeshedVersions;
|
||||
use crate::db::Db;
|
||||
use crate::typeshed::LazyTypeshedVersions;
|
||||
|
||||
pub(crate) struct ResolverState<'db> {
|
||||
pub(crate) db: &'db dyn Db,
|
||||
@@ -19,6 +20,10 @@ impl<'db> ResolverState<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn system(&self) -> &dyn System {
|
||||
self.db.system()
|
||||
}
|
||||
|
||||
pub(crate) fn vendored(&self) -> &VendoredFileSystem {
|
||||
self.db.vendored()
|
||||
}
|
||||
@@ -12,9 +12,6 @@ pub(crate) struct TestCase<T> {
|
||||
pub(crate) db: TestDb,
|
||||
pub(crate) src: SystemPathBuf,
|
||||
pub(crate) stdlib: T,
|
||||
// Most test cases only ever need a single `site-packages` directory,
|
||||
// so this is a single directory instead of a `Vec` of directories,
|
||||
// like it is in `ruff_db::Program`.
|
||||
pub(crate) site_packages: SystemPathBuf,
|
||||
pub(crate) target_version: TargetVersion,
|
||||
}
|
||||
@@ -128,8 +125,6 @@ impl<T> TestCaseBuilder<T> {
|
||||
files: impl IntoIterator<Item = FileSpec>,
|
||||
) -> SystemPathBuf {
|
||||
let root = location.as_ref().to_path_buf();
|
||||
// Make sure to create the directory even if the list of files is empty:
|
||||
db.memory_file_system().create_directory_all(&root).unwrap();
|
||||
db.write_files(
|
||||
files
|
||||
.into_iter()
|
||||
@@ -224,9 +219,9 @@ impl TestCaseBuilder<MockedTypeshed> {
|
||||
target_version,
|
||||
SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
workspace_root: src.clone(),
|
||||
custom_typeshed: Some(typeshed.clone()),
|
||||
site_packages: vec![site_packages.clone()],
|
||||
site_packages: Some(site_packages.clone()),
|
||||
},
|
||||
);
|
||||
|
||||
@@ -277,9 +272,9 @@ impl TestCaseBuilder<VendoredTypeshed> {
|
||||
target_version,
|
||||
SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
workspace_root: src.clone(),
|
||||
custom_typeshed: None,
|
||||
site_packages: vec![site_packages.clone()],
|
||||
site_packages: Some(site_packages.clone()),
|
||||
},
|
||||
);
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
pub use self::vendored::vendored_typeshed_stubs;
|
||||
pub(super) use self::versions::{
|
||||
parse_typeshed_versions, LazyTypeshedVersions, TypeshedVersionsParseError,
|
||||
TypeshedVersionsQueryResult,
|
||||
pub(crate) use self::versions::{
|
||||
parse_typeshed_versions, LazyTypeshedVersions, TypeshedVersionsQueryResult,
|
||||
};
|
||||
pub use self::versions::{TypeshedVersionsParseError, TypeshedVersionsParseErrorKind};
|
||||
|
||||
mod vendored;
|
||||
mod versions;
|
||||
@@ -52,7 +52,7 @@ impl<'db> LazyTypeshedVersions<'db> {
|
||||
} else {
|
||||
return &VENDORED_VERSIONS;
|
||||
};
|
||||
let Ok(versions_file) = system_path_to_file(db.upcast(), &versions_path) else {
|
||||
let Some(versions_file) = system_path_to_file(db.upcast(), &versions_path) else {
|
||||
todo!(
|
||||
"Still need to figure out how to handle VERSIONS files being deleted \
|
||||
from custom typeshed directories! Expected a file to exist at {versions_path}"
|
||||
@@ -90,7 +90,7 @@ static VENDORED_VERSIONS: Lazy<TypeshedVersions> = Lazy::new(|| {
|
||||
});
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub(crate) struct TypeshedVersionsParseError {
|
||||
pub struct TypeshedVersionsParseError {
|
||||
line_number: Option<NonZeroU16>,
|
||||
reason: TypeshedVersionsParseErrorKind,
|
||||
}
|
||||
@@ -123,7 +123,7 @@ impl std::error::Error for TypeshedVersionsParseError {
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub(super) enum TypeshedVersionsParseErrorKind {
|
||||
pub enum TypeshedVersionsParseErrorKind {
|
||||
TooManyLines(NonZeroUsize),
|
||||
UnexpectedNumberOfColons,
|
||||
InvalidModuleName(String),
|
||||
@@ -505,7 +505,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn typeshed_versions_consistent_with_vendored_stubs() {
|
||||
const VERSIONS_DATA: &str = include_str!("../../../vendor/typeshed/stdlib/VERSIONS");
|
||||
const VERSIONS_DATA: &str = include_str!("../../vendor/typeshed/stdlib/VERSIONS");
|
||||
let vendored_typeshed_dir = Path::new("vendor/typeshed").canonicalize().unwrap();
|
||||
let vendored_typeshed_versions = TypeshedVersions::from_str(VERSIONS_DATA).unwrap();
|
||||
|
||||
1
crates/red_knot_module_resolver/vendor/typeshed/source_commit.txt
vendored
Normal file
1
crates/red_knot_module_resolver/vendor/typeshed/source_commit.txt
vendored
Normal file
@@ -0,0 +1 @@
|
||||
f863db6bc5242348ceaa6a3bca4e59aa9e62faaa
|
||||
@@ -1,18 +1,18 @@
|
||||
import sys
|
||||
from _typeshed import SupportsWrite
|
||||
from collections.abc import Iterable, Iterator
|
||||
from typing import Any, Final
|
||||
from typing import Any, Final, Literal
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
__version__: Final[str]
|
||||
|
||||
QUOTE_ALL: Final = 1
|
||||
QUOTE_MINIMAL: Final = 0
|
||||
QUOTE_NONE: Final = 3
|
||||
QUOTE_NONNUMERIC: Final = 2
|
||||
QUOTE_ALL: Literal[1]
|
||||
QUOTE_MINIMAL: Literal[0]
|
||||
QUOTE_NONE: Literal[3]
|
||||
QUOTE_NONNUMERIC: Literal[2]
|
||||
if sys.version_info >= (3, 12):
|
||||
QUOTE_STRINGS: Final = 4
|
||||
QUOTE_NOTNULL: Final = 5
|
||||
QUOTE_STRINGS: Literal[4]
|
||||
QUOTE_NOTNULL: Literal[5]
|
||||
|
||||
# Ideally this would be `QUOTE_ALL | QUOTE_MINIMAL | QUOTE_NONE | QUOTE_NONNUMERIC`
|
||||
# However, using literals in situations like these can cause false-positives (see #7258)
|
||||
@@ -71,7 +71,7 @@ class _CData(metaclass=_CDataMeta):
|
||||
@classmethod
|
||||
def from_address(cls, address: int) -> Self: ...
|
||||
@classmethod
|
||||
def from_param(cls, value: Any, /) -> Self | _CArgObject: ...
|
||||
def from_param(cls, obj: Any) -> Self | _CArgObject: ...
|
||||
@classmethod
|
||||
def in_dll(cls, library: CDLL, name: str) -> Self: ...
|
||||
def __buffer__(self, flags: int, /) -> memoryview: ...
|
||||
@@ -368,7 +368,11 @@ def tparm(
|
||||
) -> bytes: ...
|
||||
def typeahead(fd: int, /) -> None: ...
|
||||
def unctrl(ch: _ChType, /) -> bytes: ...
|
||||
def unget_wch(ch: int | str, /) -> None: ...
|
||||
|
||||
if sys.version_info < (3, 12) or sys.platform != "darwin":
|
||||
# The support for macos was dropped in 3.12
|
||||
def unget_wch(ch: int | str, /) -> None: ...
|
||||
|
||||
def ungetch(ch: _ChType, /) -> None: ...
|
||||
def ungetmouse(id: int, x: int, y: int, z: int, bstate: int, /) -> None: ...
|
||||
def update_lines_cols() -> None: ...
|
||||
@@ -443,10 +447,13 @@ class _CursesWindow:
|
||||
def getch(self) -> int: ...
|
||||
@overload
|
||||
def getch(self, y: int, x: int) -> int: ...
|
||||
@overload
|
||||
def get_wch(self) -> int | str: ...
|
||||
@overload
|
||||
def get_wch(self, y: int, x: int) -> int | str: ...
|
||||
if sys.version_info < (3, 12) or sys.platform != "darwin":
|
||||
# The support for macos was dropped in 3.12
|
||||
@overload
|
||||
def get_wch(self) -> int | str: ...
|
||||
@overload
|
||||
def get_wch(self, y: int, x: int) -> int | str: ...
|
||||
|
||||
@overload
|
||||
def getkey(self) -> str: ...
|
||||
@overload
|
||||
@@ -17,20 +17,20 @@ class DecimalTuple(NamedTuple):
|
||||
digits: tuple[int, ...]
|
||||
exponent: int | Literal["n", "N", "F"]
|
||||
|
||||
ROUND_DOWN: Final[str]
|
||||
ROUND_HALF_UP: Final[str]
|
||||
ROUND_HALF_EVEN: Final[str]
|
||||
ROUND_CEILING: Final[str]
|
||||
ROUND_FLOOR: Final[str]
|
||||
ROUND_UP: Final[str]
|
||||
ROUND_HALF_DOWN: Final[str]
|
||||
ROUND_05UP: Final[str]
|
||||
HAVE_CONTEXTVAR: Final[bool]
|
||||
HAVE_THREADS: Final[bool]
|
||||
MAX_EMAX: Final[int]
|
||||
MAX_PREC: Final[int]
|
||||
MIN_EMIN: Final[int]
|
||||
MIN_ETINY: Final[int]
|
||||
ROUND_DOWN: str
|
||||
ROUND_HALF_UP: str
|
||||
ROUND_HALF_EVEN: str
|
||||
ROUND_CEILING: str
|
||||
ROUND_FLOOR: str
|
||||
ROUND_UP: str
|
||||
ROUND_HALF_DOWN: str
|
||||
ROUND_05UP: str
|
||||
HAVE_CONTEXTVAR: bool
|
||||
HAVE_THREADS: bool
|
||||
MAX_EMAX: int
|
||||
MAX_PREC: int
|
||||
MIN_EMIN: int
|
||||
MIN_ETINY: int
|
||||
|
||||
class DecimalException(ArithmeticError): ...
|
||||
class Clamped(DecimalException): ...
|
||||
@@ -1,5 +1,5 @@
|
||||
from _typeshed import structseq
|
||||
from typing import Any, Final, Literal, SupportsIndex, final
|
||||
from typing import Final, Literal, SupportsIndex, final
|
||||
from typing_extensions import Buffer, Self
|
||||
|
||||
class ChannelError(RuntimeError): ...
|
||||
@@ -72,15 +72,13 @@ class ChannelInfo(structseq[int], tuple[bool, bool, bool, int, int, int, int, in
|
||||
@property
|
||||
def send_released(self) -> bool: ...
|
||||
|
||||
def create(unboundop: Literal[1, 2, 3]) -> ChannelID: ...
|
||||
def create() -> ChannelID: ...
|
||||
def destroy(cid: SupportsIndex) -> None: ...
|
||||
def list_all() -> list[ChannelID]: ...
|
||||
def list_interpreters(cid: SupportsIndex, *, send: bool) -> list[int]: ...
|
||||
def send(cid: SupportsIndex, obj: object, *, blocking: bool = True, timeout: float | None = None) -> None: ...
|
||||
def send_buffer(cid: SupportsIndex, obj: Buffer, *, blocking: bool = True, timeout: float | None = None) -> None: ...
|
||||
def recv(cid: SupportsIndex, default: object = ...) -> tuple[Any, Literal[1, 2, 3]]: ...
|
||||
def recv(cid: SupportsIndex, default: object = ...) -> object: ...
|
||||
def close(cid: SupportsIndex, *, send: bool = False, recv: bool = False) -> None: ...
|
||||
def get_count(cid: SupportsIndex) -> int: ...
|
||||
def get_info(cid: SupportsIndex) -> ChannelInfo: ...
|
||||
def get_channel_defaults(cid: SupportsIndex) -> Literal[1, 2, 3]: ...
|
||||
def release(cid: SupportsIndex, *, send: bool = False, recv: bool = False, force: bool = False) -> None: ...
|
||||
@@ -1,5 +1,5 @@
|
||||
from collections.abc import Iterable, Sequence
|
||||
from typing import Final, TypeVar
|
||||
from typing import TypeVar
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_K = TypeVar("_K")
|
||||
@@ -7,15 +7,15 @@ _V = TypeVar("_V")
|
||||
|
||||
__all__ = ["compiler_fixup", "customize_config_vars", "customize_compiler", "get_platform_osx"]
|
||||
|
||||
_UNIVERSAL_CONFIG_VARS: Final[tuple[str, ...]] # undocumented
|
||||
_COMPILER_CONFIG_VARS: Final[tuple[str, ...]] # undocumented
|
||||
_INITPRE: Final[str] # undocumented
|
||||
_UNIVERSAL_CONFIG_VARS: tuple[str, ...] # undocumented
|
||||
_COMPILER_CONFIG_VARS: tuple[str, ...] # undocumented
|
||||
_INITPRE: str # undocumented
|
||||
|
||||
def _find_executable(executable: str, path: str | None = None) -> str | None: ... # undocumented
|
||||
def _read_output(commandstring: str, capture_stderr: bool = False) -> str | None: ... # undocumented
|
||||
def _find_build_tool(toolname: str) -> str: ... # undocumented
|
||||
|
||||
_SYSTEM_VERSION: Final[str | None] # undocumented
|
||||
_SYSTEM_VERSION: str | None # undocumented
|
||||
|
||||
def _get_system_version() -> str: ... # undocumented
|
||||
def _remove_original_values(_config_vars: dict[str, str]) -> None: ... # undocumented
|
||||
117
crates/red_knot_module_resolver/vendor/typeshed/stdlib/_stat.pyi
vendored
Normal file
117
crates/red_knot_module_resolver/vendor/typeshed/stdlib/_stat.pyi
vendored
Normal file
@@ -0,0 +1,117 @@
|
||||
import sys
|
||||
from typing import Literal
|
||||
|
||||
SF_APPEND: Literal[0x00040000]
|
||||
SF_ARCHIVED: Literal[0x00010000]
|
||||
SF_IMMUTABLE: Literal[0x00020000]
|
||||
SF_NOUNLINK: Literal[0x00100000]
|
||||
SF_SNAPSHOT: Literal[0x00200000]
|
||||
|
||||
ST_MODE: Literal[0]
|
||||
ST_INO: Literal[1]
|
||||
ST_DEV: Literal[2]
|
||||
ST_NLINK: Literal[3]
|
||||
ST_UID: Literal[4]
|
||||
ST_GID: Literal[5]
|
||||
ST_SIZE: Literal[6]
|
||||
ST_ATIME: Literal[7]
|
||||
ST_MTIME: Literal[8]
|
||||
ST_CTIME: Literal[9]
|
||||
|
||||
S_IFIFO: Literal[0o010000]
|
||||
S_IFLNK: Literal[0o120000]
|
||||
S_IFREG: Literal[0o100000]
|
||||
S_IFSOCK: Literal[0o140000]
|
||||
S_IFBLK: Literal[0o060000]
|
||||
S_IFCHR: Literal[0o020000]
|
||||
S_IFDIR: Literal[0o040000]
|
||||
|
||||
# These are 0 on systems that don't support the specific kind of file.
|
||||
# Example: Linux doesn't support door files, so S_IFDOOR is 0 on linux.
|
||||
S_IFDOOR: int
|
||||
S_IFPORT: int
|
||||
S_IFWHT: int
|
||||
|
||||
S_ISUID: Literal[0o4000]
|
||||
S_ISGID: Literal[0o2000]
|
||||
S_ISVTX: Literal[0o1000]
|
||||
|
||||
S_IRWXU: Literal[0o0700]
|
||||
S_IRUSR: Literal[0o0400]
|
||||
S_IWUSR: Literal[0o0200]
|
||||
S_IXUSR: Literal[0o0100]
|
||||
|
||||
S_IRWXG: Literal[0o0070]
|
||||
S_IRGRP: Literal[0o0040]
|
||||
S_IWGRP: Literal[0o0020]
|
||||
S_IXGRP: Literal[0o0010]
|
||||
|
||||
S_IRWXO: Literal[0o0007]
|
||||
S_IROTH: Literal[0o0004]
|
||||
S_IWOTH: Literal[0o0002]
|
||||
S_IXOTH: Literal[0o0001]
|
||||
|
||||
S_ENFMT: Literal[0o2000]
|
||||
S_IREAD: Literal[0o0400]
|
||||
S_IWRITE: Literal[0o0200]
|
||||
S_IEXEC: Literal[0o0100]
|
||||
|
||||
UF_APPEND: Literal[0x00000004]
|
||||
UF_COMPRESSED: Literal[0x00000020] # OS X 10.6+ only
|
||||
UF_HIDDEN: Literal[0x00008000] # OX X 10.5+ only
|
||||
UF_IMMUTABLE: Literal[0x00000002]
|
||||
UF_NODUMP: Literal[0x00000001]
|
||||
UF_NOUNLINK: Literal[0x00000010]
|
||||
UF_OPAQUE: Literal[0x00000008]
|
||||
|
||||
def S_IMODE(mode: int, /) -> int: ...
|
||||
def S_IFMT(mode: int, /) -> int: ...
|
||||
def S_ISBLK(mode: int, /) -> bool: ...
|
||||
def S_ISCHR(mode: int, /) -> bool: ...
|
||||
def S_ISDIR(mode: int, /) -> bool: ...
|
||||
def S_ISDOOR(mode: int, /) -> bool: ...
|
||||
def S_ISFIFO(mode: int, /) -> bool: ...
|
||||
def S_ISLNK(mode: int, /) -> bool: ...
|
||||
def S_ISPORT(mode: int, /) -> bool: ...
|
||||
def S_ISREG(mode: int, /) -> bool: ...
|
||||
def S_ISSOCK(mode: int, /) -> bool: ...
|
||||
def S_ISWHT(mode: int, /) -> bool: ...
|
||||
def filemode(mode: int, /) -> str: ...
|
||||
|
||||
if sys.platform == "win32":
|
||||
IO_REPARSE_TAG_SYMLINK: int
|
||||
IO_REPARSE_TAG_MOUNT_POINT: int
|
||||
IO_REPARSE_TAG_APPEXECLINK: int
|
||||
|
||||
if sys.platform == "win32":
|
||||
FILE_ATTRIBUTE_ARCHIVE: Literal[32]
|
||||
FILE_ATTRIBUTE_COMPRESSED: Literal[2048]
|
||||
FILE_ATTRIBUTE_DEVICE: Literal[64]
|
||||
FILE_ATTRIBUTE_DIRECTORY: Literal[16]
|
||||
FILE_ATTRIBUTE_ENCRYPTED: Literal[16384]
|
||||
FILE_ATTRIBUTE_HIDDEN: Literal[2]
|
||||
FILE_ATTRIBUTE_INTEGRITY_STREAM: Literal[32768]
|
||||
FILE_ATTRIBUTE_NORMAL: Literal[128]
|
||||
FILE_ATTRIBUTE_NOT_CONTENT_INDEXED: Literal[8192]
|
||||
FILE_ATTRIBUTE_NO_SCRUB_DATA: Literal[131072]
|
||||
FILE_ATTRIBUTE_OFFLINE: Literal[4096]
|
||||
FILE_ATTRIBUTE_READONLY: Literal[1]
|
||||
FILE_ATTRIBUTE_REPARSE_POINT: Literal[1024]
|
||||
FILE_ATTRIBUTE_SPARSE_FILE: Literal[512]
|
||||
FILE_ATTRIBUTE_SYSTEM: Literal[4]
|
||||
FILE_ATTRIBUTE_TEMPORARY: Literal[256]
|
||||
FILE_ATTRIBUTE_VIRTUAL: Literal[65536]
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
SF_SETTABLE: Literal[0x3FFF0000]
|
||||
# https://github.com/python/cpython/issues/114081#issuecomment-2119017790
|
||||
# SF_RESTRICTED: Literal[0x00080000]
|
||||
SF_FIRMLINK: Literal[0x00800000]
|
||||
SF_DATALESS: Literal[0x40000000]
|
||||
|
||||
SF_SUPPORTED: Literal[0x9F0000]
|
||||
SF_SYNTHETIC: Literal[0xC0000000]
|
||||
|
||||
UF_TRACKED: Literal[0x00000040]
|
||||
UF_DATAVAULT: Literal[0x00000080]
|
||||
UF_SETTABLE: Literal[0x0000FFFF]
|
||||
@@ -1,6 +1,6 @@
|
||||
import sys
|
||||
from collections.abc import Callable
|
||||
from typing import Any, ClassVar, Final, final
|
||||
from typing import Any, ClassVar, Literal, final
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
# _tkinter is meant to be only used internally by tkinter, but some tkinter
|
||||
@@ -95,16 +95,16 @@ class TkappType:
|
||||
def settrace(self, func: _TkinterTraceFunc | None, /) -> None: ...
|
||||
|
||||
# These should be kept in sync with tkinter.tix constants, except ALL_EVENTS which doesn't match TCL_ALL_EVENTS
|
||||
ALL_EVENTS: Final = -3
|
||||
FILE_EVENTS: Final = 8
|
||||
IDLE_EVENTS: Final = 32
|
||||
TIMER_EVENTS: Final = 16
|
||||
WINDOW_EVENTS: Final = 4
|
||||
ALL_EVENTS: Literal[-3]
|
||||
FILE_EVENTS: Literal[8]
|
||||
IDLE_EVENTS: Literal[32]
|
||||
TIMER_EVENTS: Literal[16]
|
||||
WINDOW_EVENTS: Literal[4]
|
||||
|
||||
DONT_WAIT: Final = 2
|
||||
EXCEPTION: Final = 8
|
||||
READABLE: Final = 2
|
||||
WRITABLE: Final = 4
|
||||
DONT_WAIT: Literal[2]
|
||||
EXCEPTION: Literal[8]
|
||||
READABLE: Literal[2]
|
||||
WRITABLE: Literal[4]
|
||||
|
||||
TCL_VERSION: str
|
||||
TK_VERSION: str
|
||||
@@ -1,117 +1,117 @@
|
||||
import sys
|
||||
from _typeshed import ReadableBuffer
|
||||
from collections.abc import Sequence
|
||||
from typing import Any, Final, Literal, NoReturn, final, overload
|
||||
from typing import Any, Literal, NoReturn, final, overload
|
||||
|
||||
if sys.platform == "win32":
|
||||
ABOVE_NORMAL_PRIORITY_CLASS: Final = 0x8000
|
||||
BELOW_NORMAL_PRIORITY_CLASS: Final = 0x4000
|
||||
ABOVE_NORMAL_PRIORITY_CLASS: Literal[0x8000]
|
||||
BELOW_NORMAL_PRIORITY_CLASS: Literal[0x4000]
|
||||
|
||||
CREATE_BREAKAWAY_FROM_JOB: Final = 0x1000000
|
||||
CREATE_DEFAULT_ERROR_MODE: Final = 0x4000000
|
||||
CREATE_NO_WINDOW: Final = 0x8000000
|
||||
CREATE_NEW_CONSOLE: Final = 0x10
|
||||
CREATE_NEW_PROCESS_GROUP: Final = 0x200
|
||||
CREATE_BREAKAWAY_FROM_JOB: Literal[0x1000000]
|
||||
CREATE_DEFAULT_ERROR_MODE: Literal[0x4000000]
|
||||
CREATE_NO_WINDOW: Literal[0x8000000]
|
||||
CREATE_NEW_CONSOLE: Literal[0x10]
|
||||
CREATE_NEW_PROCESS_GROUP: Literal[0x200]
|
||||
|
||||
DETACHED_PROCESS: Final = 8
|
||||
DUPLICATE_CLOSE_SOURCE: Final = 1
|
||||
DUPLICATE_SAME_ACCESS: Final = 2
|
||||
DETACHED_PROCESS: Literal[8]
|
||||
DUPLICATE_CLOSE_SOURCE: Literal[1]
|
||||
DUPLICATE_SAME_ACCESS: Literal[2]
|
||||
|
||||
ERROR_ALREADY_EXISTS: Final = 183
|
||||
ERROR_BROKEN_PIPE: Final = 109
|
||||
ERROR_IO_PENDING: Final = 997
|
||||
ERROR_MORE_DATA: Final = 234
|
||||
ERROR_NETNAME_DELETED: Final = 64
|
||||
ERROR_NO_DATA: Final = 232
|
||||
ERROR_NO_SYSTEM_RESOURCES: Final = 1450
|
||||
ERROR_OPERATION_ABORTED: Final = 995
|
||||
ERROR_PIPE_BUSY: Final = 231
|
||||
ERROR_PIPE_CONNECTED: Final = 535
|
||||
ERROR_SEM_TIMEOUT: Final = 121
|
||||
ERROR_ALREADY_EXISTS: Literal[183]
|
||||
ERROR_BROKEN_PIPE: Literal[109]
|
||||
ERROR_IO_PENDING: Literal[997]
|
||||
ERROR_MORE_DATA: Literal[234]
|
||||
ERROR_NETNAME_DELETED: Literal[64]
|
||||
ERROR_NO_DATA: Literal[232]
|
||||
ERROR_NO_SYSTEM_RESOURCES: Literal[1450]
|
||||
ERROR_OPERATION_ABORTED: Literal[995]
|
||||
ERROR_PIPE_BUSY: Literal[231]
|
||||
ERROR_PIPE_CONNECTED: Literal[535]
|
||||
ERROR_SEM_TIMEOUT: Literal[121]
|
||||
|
||||
FILE_FLAG_FIRST_PIPE_INSTANCE: Final = 0x80000
|
||||
FILE_FLAG_OVERLAPPED: Final = 0x40000000
|
||||
FILE_FLAG_FIRST_PIPE_INSTANCE: Literal[0x80000]
|
||||
FILE_FLAG_OVERLAPPED: Literal[0x40000000]
|
||||
|
||||
FILE_GENERIC_READ: Final = 1179785
|
||||
FILE_GENERIC_WRITE: Final = 1179926
|
||||
FILE_GENERIC_READ: Literal[1179785]
|
||||
FILE_GENERIC_WRITE: Literal[1179926]
|
||||
|
||||
FILE_MAP_ALL_ACCESS: Final = 983071
|
||||
FILE_MAP_COPY: Final = 1
|
||||
FILE_MAP_EXECUTE: Final = 32
|
||||
FILE_MAP_READ: Final = 4
|
||||
FILE_MAP_WRITE: Final = 2
|
||||
FILE_MAP_ALL_ACCESS: Literal[983071]
|
||||
FILE_MAP_COPY: Literal[1]
|
||||
FILE_MAP_EXECUTE: Literal[32]
|
||||
FILE_MAP_READ: Literal[4]
|
||||
FILE_MAP_WRITE: Literal[2]
|
||||
|
||||
FILE_TYPE_CHAR: Final = 2
|
||||
FILE_TYPE_DISK: Final = 1
|
||||
FILE_TYPE_PIPE: Final = 3
|
||||
FILE_TYPE_REMOTE: Final = 32768
|
||||
FILE_TYPE_UNKNOWN: Final = 0
|
||||
FILE_TYPE_CHAR: Literal[2]
|
||||
FILE_TYPE_DISK: Literal[1]
|
||||
FILE_TYPE_PIPE: Literal[3]
|
||||
FILE_TYPE_REMOTE: Literal[32768]
|
||||
FILE_TYPE_UNKNOWN: Literal[0]
|
||||
|
||||
GENERIC_READ: Final = 0x80000000
|
||||
GENERIC_WRITE: Final = 0x40000000
|
||||
HIGH_PRIORITY_CLASS: Final = 0x80
|
||||
INFINITE: Final = 0xFFFFFFFF
|
||||
GENERIC_READ: Literal[0x80000000]
|
||||
GENERIC_WRITE: Literal[0x40000000]
|
||||
HIGH_PRIORITY_CLASS: Literal[0x80]
|
||||
INFINITE: Literal[0xFFFFFFFF]
|
||||
# Ignore the Flake8 error -- flake8-pyi assumes
|
||||
# most numbers this long will be implementation details,
|
||||
# but here we can see that it's a power of 2
|
||||
INVALID_HANDLE_VALUE: Final = 0xFFFFFFFFFFFFFFFF # noqa: Y054
|
||||
IDLE_PRIORITY_CLASS: Final = 0x40
|
||||
NORMAL_PRIORITY_CLASS: Final = 0x20
|
||||
REALTIME_PRIORITY_CLASS: Final = 0x100
|
||||
NMPWAIT_WAIT_FOREVER: Final = 0xFFFFFFFF
|
||||
INVALID_HANDLE_VALUE: Literal[0xFFFFFFFFFFFFFFFF] # noqa: Y054
|
||||
IDLE_PRIORITY_CLASS: Literal[0x40]
|
||||
NORMAL_PRIORITY_CLASS: Literal[0x20]
|
||||
REALTIME_PRIORITY_CLASS: Literal[0x100]
|
||||
NMPWAIT_WAIT_FOREVER: Literal[0xFFFFFFFF]
|
||||
|
||||
MEM_COMMIT: Final = 0x1000
|
||||
MEM_FREE: Final = 0x10000
|
||||
MEM_IMAGE: Final = 0x1000000
|
||||
MEM_MAPPED: Final = 0x40000
|
||||
MEM_PRIVATE: Final = 0x20000
|
||||
MEM_RESERVE: Final = 0x2000
|
||||
MEM_COMMIT: Literal[0x1000]
|
||||
MEM_FREE: Literal[0x10000]
|
||||
MEM_IMAGE: Literal[0x1000000]
|
||||
MEM_MAPPED: Literal[0x40000]
|
||||
MEM_PRIVATE: Literal[0x20000]
|
||||
MEM_RESERVE: Literal[0x2000]
|
||||
|
||||
NULL: Final = 0
|
||||
OPEN_EXISTING: Final = 3
|
||||
NULL: Literal[0]
|
||||
OPEN_EXISTING: Literal[3]
|
||||
|
||||
PIPE_ACCESS_DUPLEX: Final = 3
|
||||
PIPE_ACCESS_INBOUND: Final = 1
|
||||
PIPE_READMODE_MESSAGE: Final = 2
|
||||
PIPE_TYPE_MESSAGE: Final = 4
|
||||
PIPE_UNLIMITED_INSTANCES: Final = 255
|
||||
PIPE_WAIT: Final = 0
|
||||
PIPE_ACCESS_DUPLEX: Literal[3]
|
||||
PIPE_ACCESS_INBOUND: Literal[1]
|
||||
PIPE_READMODE_MESSAGE: Literal[2]
|
||||
PIPE_TYPE_MESSAGE: Literal[4]
|
||||
PIPE_UNLIMITED_INSTANCES: Literal[255]
|
||||
PIPE_WAIT: Literal[0]
|
||||
|
||||
PAGE_EXECUTE: Final = 0x10
|
||||
PAGE_EXECUTE_READ: Final = 0x20
|
||||
PAGE_EXECUTE_READWRITE: Final = 0x40
|
||||
PAGE_EXECUTE_WRITECOPY: Final = 0x80
|
||||
PAGE_GUARD: Final = 0x100
|
||||
PAGE_NOACCESS: Final = 0x1
|
||||
PAGE_NOCACHE: Final = 0x200
|
||||
PAGE_READONLY: Final = 0x2
|
||||
PAGE_READWRITE: Final = 0x4
|
||||
PAGE_WRITECOMBINE: Final = 0x400
|
||||
PAGE_WRITECOPY: Final = 0x8
|
||||
PAGE_EXECUTE: Literal[0x10]
|
||||
PAGE_EXECUTE_READ: Literal[0x20]
|
||||
PAGE_EXECUTE_READWRITE: Literal[0x40]
|
||||
PAGE_EXECUTE_WRITECOPY: Literal[0x80]
|
||||
PAGE_GUARD: Literal[0x100]
|
||||
PAGE_NOACCESS: Literal[0x1]
|
||||
PAGE_NOCACHE: Literal[0x200]
|
||||
PAGE_READONLY: Literal[0x2]
|
||||
PAGE_READWRITE: Literal[0x4]
|
||||
PAGE_WRITECOMBINE: Literal[0x400]
|
||||
PAGE_WRITECOPY: Literal[0x8]
|
||||
|
||||
PROCESS_ALL_ACCESS: Final = 0x1FFFFF
|
||||
PROCESS_DUP_HANDLE: Final = 0x40
|
||||
PROCESS_ALL_ACCESS: Literal[0x1FFFFF]
|
||||
PROCESS_DUP_HANDLE: Literal[0x40]
|
||||
|
||||
SEC_COMMIT: Final = 0x8000000
|
||||
SEC_IMAGE: Final = 0x1000000
|
||||
SEC_LARGE_PAGES: Final = 0x80000000
|
||||
SEC_NOCACHE: Final = 0x10000000
|
||||
SEC_RESERVE: Final = 0x4000000
|
||||
SEC_WRITECOMBINE: Final = 0x40000000
|
||||
SEC_COMMIT: Literal[0x8000000]
|
||||
SEC_IMAGE: Literal[0x1000000]
|
||||
SEC_LARGE_PAGES: Literal[0x80000000]
|
||||
SEC_NOCACHE: Literal[0x10000000]
|
||||
SEC_RESERVE: Literal[0x4000000]
|
||||
SEC_WRITECOMBINE: Literal[0x40000000]
|
||||
|
||||
STARTF_USESHOWWINDOW: Final = 0x1
|
||||
STARTF_USESTDHANDLES: Final = 0x100
|
||||
STARTF_USESHOWWINDOW: Literal[0x1]
|
||||
STARTF_USESTDHANDLES: Literal[0x100]
|
||||
|
||||
STD_ERROR_HANDLE: Final = 0xFFFFFFF4
|
||||
STD_OUTPUT_HANDLE: Final = 0xFFFFFFF5
|
||||
STD_INPUT_HANDLE: Final = 0xFFFFFFF6
|
||||
STD_ERROR_HANDLE: Literal[0xFFFFFFF4]
|
||||
STD_OUTPUT_HANDLE: Literal[0xFFFFFFF5]
|
||||
STD_INPUT_HANDLE: Literal[0xFFFFFFF6]
|
||||
|
||||
STILL_ACTIVE: Final = 259
|
||||
SW_HIDE: Final = 0
|
||||
SYNCHRONIZE: Final = 0x100000
|
||||
WAIT_ABANDONED_0: Final = 128
|
||||
WAIT_OBJECT_0: Final = 0
|
||||
WAIT_TIMEOUT: Final = 258
|
||||
STILL_ACTIVE: Literal[259]
|
||||
SW_HIDE: Literal[0]
|
||||
SYNCHRONIZE: Literal[0x100000]
|
||||
WAIT_ABANDONED_0: Literal[128]
|
||||
WAIT_OBJECT_0: Literal[0]
|
||||
WAIT_TIMEOUT: Literal[258]
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
LOCALE_NAME_INVARIANT: str
|
||||
@@ -131,32 +131,32 @@ if sys.platform == "win32":
|
||||
LCMAP_UPPERCASE: int
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
COPYFILE2_CALLBACK_CHUNK_STARTED: Final = 1
|
||||
COPYFILE2_CALLBACK_CHUNK_FINISHED: Final = 2
|
||||
COPYFILE2_CALLBACK_STREAM_STARTED: Final = 3
|
||||
COPYFILE2_CALLBACK_STREAM_FINISHED: Final = 4
|
||||
COPYFILE2_CALLBACK_POLL_CONTINUE: Final = 5
|
||||
COPYFILE2_CALLBACK_ERROR: Final = 6
|
||||
COPYFILE2_CALLBACK_CHUNK_STARTED: Literal[1]
|
||||
COPYFILE2_CALLBACK_CHUNK_FINISHED: Literal[2]
|
||||
COPYFILE2_CALLBACK_STREAM_STARTED: Literal[3]
|
||||
COPYFILE2_CALLBACK_STREAM_FINISHED: Literal[4]
|
||||
COPYFILE2_CALLBACK_POLL_CONTINUE: Literal[5]
|
||||
COPYFILE2_CALLBACK_ERROR: Literal[6]
|
||||
|
||||
COPYFILE2_PROGRESS_CONTINUE: Final = 0
|
||||
COPYFILE2_PROGRESS_CANCEL: Final = 1
|
||||
COPYFILE2_PROGRESS_STOP: Final = 2
|
||||
COPYFILE2_PROGRESS_QUIET: Final = 3
|
||||
COPYFILE2_PROGRESS_PAUSE: Final = 4
|
||||
COPYFILE2_PROGRESS_CONTINUE: Literal[0]
|
||||
COPYFILE2_PROGRESS_CANCEL: Literal[1]
|
||||
COPYFILE2_PROGRESS_STOP: Literal[2]
|
||||
COPYFILE2_PROGRESS_QUIET: Literal[3]
|
||||
COPYFILE2_PROGRESS_PAUSE: Literal[4]
|
||||
|
||||
COPY_FILE_FAIL_IF_EXISTS: Final = 0x1
|
||||
COPY_FILE_RESTARTABLE: Final = 0x2
|
||||
COPY_FILE_OPEN_SOURCE_FOR_WRITE: Final = 0x4
|
||||
COPY_FILE_ALLOW_DECRYPTED_DESTINATION: Final = 0x8
|
||||
COPY_FILE_COPY_SYMLINK: Final = 0x800
|
||||
COPY_FILE_NO_BUFFERING: Final = 0x1000
|
||||
COPY_FILE_REQUEST_SECURITY_PRIVILEGES: Final = 0x2000
|
||||
COPY_FILE_RESUME_FROM_PAUSE: Final = 0x4000
|
||||
COPY_FILE_NO_OFFLOAD: Final = 0x40000
|
||||
COPY_FILE_REQUEST_COMPRESSED_TRAFFIC: Final = 0x10000000
|
||||
COPY_FILE_FAIL_IF_EXISTS: Literal[0x1]
|
||||
COPY_FILE_RESTARTABLE: Literal[0x2]
|
||||
COPY_FILE_OPEN_SOURCE_FOR_WRITE: Literal[0x4]
|
||||
COPY_FILE_ALLOW_DECRYPTED_DESTINATION: Literal[0x8]
|
||||
COPY_FILE_COPY_SYMLINK: Literal[0x800]
|
||||
COPY_FILE_NO_BUFFERING: Literal[0x1000]
|
||||
COPY_FILE_REQUEST_SECURITY_PRIVILEGES: Literal[0x2000]
|
||||
COPY_FILE_RESUME_FROM_PAUSE: Literal[0x4000]
|
||||
COPY_FILE_NO_OFFLOAD: Literal[0x40000]
|
||||
COPY_FILE_REQUEST_COMPRESSED_TRAFFIC: Literal[0x10000000]
|
||||
|
||||
ERROR_ACCESS_DENIED: Final = 5
|
||||
ERROR_PRIVILEGE_NOT_HELD: Final = 1314
|
||||
ERROR_ACCESS_DENIED: Literal[5]
|
||||
ERROR_PRIVILEGE_NOT_HELD: Literal[1314]
|
||||
|
||||
def CloseHandle(handle: int, /) -> None: ...
|
||||
@overload
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user