Compare commits
55 Commits
0.5.2
...
editables-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
09e8599e91 | ||
|
|
5f96f69151 | ||
|
|
ad19b3fd0e | ||
|
|
a62e2d2000 | ||
|
|
d61747093c | ||
|
|
0ba7fc63d0 | ||
|
|
fa5b19d4b6 | ||
|
|
181e7b3c0d | ||
|
|
519eca9fe7 | ||
|
|
f0d589d7a3 | ||
|
|
512c8b2cc5 | ||
|
|
811f78d94d | ||
|
|
8f1be31289 | ||
|
|
8cfbac71a4 | ||
|
|
9460857932 | ||
|
|
a028ca22f0 | ||
|
|
7953f6aa79 | ||
|
|
764d9ab4ee | ||
|
|
9b9d701500 | ||
|
|
648cca199b | ||
|
|
2e77b775b0 | ||
|
|
ebe5b06c95 | ||
|
|
b2a49d8140 | ||
|
|
985a999234 | ||
|
|
1df51b1fbf | ||
|
|
1435b0f022 | ||
|
|
e39298dcbc | ||
|
|
1de8ff3308 | ||
|
|
72e02206d6 | ||
|
|
80f0116641 | ||
|
|
79b535587b | ||
|
|
6e0cbe0f35 | ||
|
|
91338ae902 | ||
|
|
0c72577b5d | ||
|
|
fe04f2b09d | ||
|
|
073588b48e | ||
|
|
9a2dafb43d | ||
|
|
595b1aa4a1 | ||
|
|
30cef67b45 | ||
|
|
d0c5925672 | ||
|
|
b1487b6b4f | ||
|
|
85ae02d62e | ||
|
|
9a817a2922 | ||
|
|
ecd4b4d943 | ||
|
|
b9a8cd390f | ||
|
|
2348714081 | ||
|
|
3817b207cf | ||
|
|
b1cf9ea663 | ||
|
|
8ad10b9307 | ||
|
|
9c5524a9a2 | ||
|
|
1530223311 | ||
|
|
b9671522c4 | ||
|
|
9918202422 | ||
|
|
42e7147860 | ||
|
|
25feab93f8 |
107
.github/workflows/publish-docs.yml
vendored
107
.github/workflows/publish-docs.yml
vendored
@@ -21,42 +21,131 @@ jobs:
|
||||
mkdocs:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.12
|
||||
|
||||
- name: "Set docs version"
|
||||
run: |
|
||||
version="${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || inputs.ref }}"
|
||||
# if version is missing, exit with error
|
||||
if [[ -z "$version" ]]; then
|
||||
echo "Can't build docs without a version."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Use version as display name for now
|
||||
display_name="$version"
|
||||
|
||||
echo "version=$version" >> $GITHUB_ENV
|
||||
echo "display_name=$display_name" >> $GITHUB_ENV
|
||||
|
||||
- name: "Set branch name"
|
||||
run: |
|
||||
version="${{ env.version }}"
|
||||
display_name="${{ env.display_name }}"
|
||||
timestamp="$(date +%s)"
|
||||
|
||||
# create branch_display_name from display_name by replacing all
|
||||
# characters disallowed in git branch names with hyphens
|
||||
branch_display_name="$(echo "$display_name" | tr -c '[:alnum:]._' '-' | tr -s '-')"
|
||||
|
||||
echo "branch_name=update-docs-$branch_display_name-$timestamp" >> $GITHUB_ENV
|
||||
echo "timestamp=$timestamp" >> $GITHUB_ENV
|
||||
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@v0.9.0
|
||||
with:
|
||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: pip install -r docs/requirements-insiders.txt
|
||||
|
||||
- name: "Install dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: pip install -r docs/requirements.txt
|
||||
|
||||
- name: "Copy README File"
|
||||
run: |
|
||||
python scripts/transform_readme.py --target mkdocs
|
||||
python scripts/generate_mkdocs.py
|
||||
|
||||
- name: "Build Insiders docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.insiders.yml
|
||||
|
||||
- name: "Build docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.public.yml
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@v3.7.0
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
# `github.head_ref` is only set during pull requests and for manual runs or tags we use `main` to deploy to production
|
||||
command: pages deploy site --project-name=astral-docs --branch ${{ github.head_ref || 'main' }} --commit-hash ${GITHUB_SHA}
|
||||
|
||||
- name: "Clone docs repo"
|
||||
run: |
|
||||
version="${{ env.version }}"
|
||||
git clone https://${{ secrets.ASTRAL_DOCS_PAT }}@github.com/astral-sh/docs.git astral-docs
|
||||
|
||||
- name: "Copy docs"
|
||||
run: rm -rf astral-docs/site/ruff && mkdir -p astral-docs/site && cp -r site/ruff astral-docs/site/
|
||||
|
||||
- name: "Commit docs"
|
||||
working-directory: astral-docs
|
||||
run: |
|
||||
branch_name="${{ env.branch_name }}"
|
||||
|
||||
git config user.name "$GITHUB_ACTOR"
|
||||
git config user.email "$GITHUB_ACTOR@users.noreply.github.com"
|
||||
|
||||
git checkout -b $branch_name
|
||||
git add site/ruff
|
||||
git commit -m "Update ruff documentation for $version"
|
||||
|
||||
- name: "Create Pull Request"
|
||||
working-directory: astral-docs
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
|
||||
run: |
|
||||
version="${{ env.version }}"
|
||||
display_name="${{ env.display_name }}"
|
||||
branch_name="${{ env.branch_name }}"
|
||||
|
||||
# set the PR title
|
||||
pull_request_title="Update ruff documentation for $display_name"
|
||||
|
||||
# Delete any existing pull requests that are open for this version
|
||||
# by checking against pull_request_title because the new PR will
|
||||
# supersede the old one.
|
||||
gh pr list --state open --json title --jq '.[] | select(.title == "$pull_request_title") | .number' | \
|
||||
xargs -I {} gh pr close {}
|
||||
|
||||
# push the branch to GitHub
|
||||
git push origin $branch_name
|
||||
|
||||
# create the PR
|
||||
gh pr create --base main --head $branch_name \
|
||||
--title "$pull_request_title" \
|
||||
--body "Automated documentation update for $display_name" \
|
||||
--label "documentation"
|
||||
|
||||
- name: "Merge Pull Request"
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
working-directory: astral-docs
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
|
||||
run: |
|
||||
branch_name="${{ env.branch_name }}"
|
||||
# auto-merge the PR if the build was triggered by a release. Manual builds should be reviewed by a human.
|
||||
# give the PR a few seconds to be created before trying to auto-merge it
|
||||
sleep 10
|
||||
gh pr merge --squash $branch_name
|
||||
|
||||
55
.github/workflows/publish-wasm.yml
vendored
Normal file
55
.github/workflows/publish-wasm.yml
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
# Build and publish ruff-api for wasm.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a publish
|
||||
# job within `cargo-dist`.
|
||||
name: "Build and publish wasm"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
ruff_wasm:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
strategy:
|
||||
matrix:
|
||||
target: [web, bundler, nodejs]
|
||||
fail-fast: false
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
- uses: jetli/wasm-bindgen-action@v0.2.0
|
||||
- name: "Run wasm-pack build"
|
||||
run: wasm-pack build --target ${{ matrix.target }} crates/ruff_wasm
|
||||
- name: "Rename generated package"
|
||||
run: | # Replace the package name w/ jq
|
||||
jq '.name="@astral-sh/ruff-wasm-${{ matrix.target }}"' crates/ruff_wasm/pkg/package.json > /tmp/package.json
|
||||
mv /tmp/package.json crates/ruff_wasm/pkg
|
||||
- run: cp LICENSE crates/ruff_wasm/pkg # wasm-pack does not put the LICENSE file in the pkg
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- name: "Publish (dry-run)"
|
||||
if: ${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
run: npm publish --dry-run crates/ruff_wasm/pkg
|
||||
- name: "Publish"
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
run: npm publish --provenance --access public crates/ruff_wasm/pkg
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
18
.github/workflows/release.yml
vendored
18
.github/workflows/release.yml
vendored
@@ -214,16 +214,32 @@ jobs:
|
||||
"id-token": "write"
|
||||
"packages": "write"
|
||||
|
||||
custom-publish-wasm:
|
||||
needs:
|
||||
- plan
|
||||
- host
|
||||
if: ${{ !fromJson(needs.plan.outputs.val).announcement_is_prerelease || fromJson(needs.plan.outputs.val).publish_prereleases }}
|
||||
uses: ./.github/workflows/publish-wasm.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
# publish jobs get escalated permissions
|
||||
permissions:
|
||||
"contents": "read"
|
||||
"id-token": "write"
|
||||
"packages": "write"
|
||||
|
||||
# Create a GitHub Release while uploading all files to it
|
||||
announce:
|
||||
needs:
|
||||
- plan
|
||||
- host
|
||||
- custom-publish-pypi
|
||||
- custom-publish-wasm
|
||||
# use "always() && ..." to allow us to wait for all publish jobs while
|
||||
# still allowing individual publish jobs to skip themselves (for prereleases).
|
||||
# "host" however must run to completion, no skipping allowed!
|
||||
if: ${{ always() && needs.host.result == 'success' && (needs.custom-publish-pypi.result == 'skipped' || needs.custom-publish-pypi.result == 'success') }}
|
||||
if: ${{ always() && needs.host.result == 'success' && (needs.custom-publish-pypi.result == 'skipped' || needs.custom-publish-pypi.result == 'success') && (needs.custom-publish-wasm.result == 'skipped' || needs.custom-publish-wasm.result == 'success') }}
|
||||
runs-on: "ubuntu-20.04"
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
@@ -42,7 +42,7 @@ repos:
|
||||
)$
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.23.1
|
||||
rev: v1.23.2
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
@@ -56,7 +56,7 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.5.1
|
||||
rev: v0.5.2
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
|
||||
42
CHANGELOG.md
42
CHANGELOG.md
@@ -1,5 +1,47 @@
|
||||
# Changelog
|
||||
|
||||
## 0.5.3
|
||||
|
||||
**Ruff 0.5.3 marks the stable release of the Ruff language server and introduces revamped
|
||||
[documentation](https://docs.astral.sh/ruff/editors), including [setup guides for your editor of
|
||||
choice](https://docs.astral.sh/ruff/editors/setup) and [the language server
|
||||
itself](https://docs.astral.sh/ruff/editors/settings)**.
|
||||
|
||||
### Preview features
|
||||
|
||||
- Formatter: Insert empty line between suite and alternative branch after function/class definition ([#12294](https://github.com/astral-sh/ruff/pull/12294))
|
||||
- \[`pyupgrade`\] Implement `unnecessary-default-type-args` (`UP043`) ([#12371](https://github.com/astral-sh/ruff/pull/12371))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-bugbear`\] Detect enumerate iterations in `loop-iterator-mutation` (`B909`) ([#12366](https://github.com/astral-sh/ruff/pull/12366))
|
||||
- \[`flake8-bugbear`\] Remove `discard`, `remove`, and `pop` allowance for `loop-iterator-mutation` (`B909`) ([#12365](https://github.com/astral-sh/ruff/pull/12365))
|
||||
- \[`pylint`\] Allow `repeated-equality-comparison` for mixed operations (`PLR1714`) ([#12369](https://github.com/astral-sh/ruff/pull/12369))
|
||||
- \[`pylint`\] Ignore `self` and `cls` when counting arguments (`PLR0913`) ([#12367](https://github.com/astral-sh/ruff/pull/12367))
|
||||
- \[`pylint`\] Use UTF-8 as default encoding in `unspecified-encoding` fix (`PLW1514`) ([#12370](https://github.com/astral-sh/ruff/pull/12370))
|
||||
|
||||
### Server
|
||||
|
||||
- Build settings index in parallel for the native server ([#12299](https://github.com/astral-sh/ruff/pull/12299))
|
||||
- Use fallback settings when indexing the project ([#12362](https://github.com/astral-sh/ruff/pull/12362))
|
||||
- Consider `--preview` flag for `server` subcommand for the linter and formatter ([#12208](https://github.com/astral-sh/ruff/pull/12208))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-comprehensions`\] Allow additional arguments for `sum` and `max` comprehensions (`C419`) ([#12364](https://github.com/astral-sh/ruff/pull/12364))
|
||||
- \[`pylint`\] Avoid dropping extra boolean operations in `repeated-equality-comparison` (`PLR1714`) ([#12368](https://github.com/astral-sh/ruff/pull/12368))
|
||||
- \[`pylint`\] Consider expression before statement when determining binding kind (`PLR1704`) ([#12346](https://github.com/astral-sh/ruff/pull/12346))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add docs for Ruff language server ([#12344](https://github.com/astral-sh/ruff/pull/12344))
|
||||
- Migrate to standalone docs repo ([#12341](https://github.com/astral-sh/ruff/pull/12341))
|
||||
- Update versioning policy for editor integration ([#12375](https://github.com/astral-sh/ruff/pull/12375))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Publish Wasm API to npm ([#12317](https://github.com/astral-sh/ruff/pull/12317))
|
||||
|
||||
## 0.5.2
|
||||
|
||||
### Preview features
|
||||
|
||||
81
Cargo.lock
generated
81
Cargo.lock
generated
@@ -234,9 +234,9 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5"
|
||||
|
||||
[[package]]
|
||||
name = "castaway"
|
||||
version = "0.2.2"
|
||||
version = "0.2.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8a17ed5635fc8536268e5d4de1e22e81ac34419e5f052d4d51f4e01dcc263fcc"
|
||||
checksum = "0abae9be0aaf9ea96a3b1b8b1b55c602ca751eba1b1500220cea4ecbafe7c0d5"
|
||||
dependencies = [
|
||||
"rustversion",
|
||||
]
|
||||
@@ -314,9 +314,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.8"
|
||||
version = "4.5.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "84b3edb18336f4df585bc9aa31dd99c036dfa5dc5e9a2939a722a188f3a8970d"
|
||||
checksum = "64acc1846d54c1fe936a78dc189c34e28d3f5afc348403f28ecf53660b9b8462"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
@@ -324,9 +324,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.5.8"
|
||||
version = "4.5.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c1c09dd5ada6c6c78075d6fd0da3f90d8080651e2d6cc8eb2f1aaa4034ced708"
|
||||
checksum = "6fb8393d67ba2e7bfaf28a23458e4e2b543cc73a99595511eb207fdb8aede942"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
@@ -346,31 +346,20 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_complete_command"
|
||||
version = "0.5.1"
|
||||
version = "0.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "183495371ea78d4c9ff638bfc6497d46fed2396e4f9c50aebc1278a4a9919a3d"
|
||||
checksum = "da8e198c052315686d36371e8a3c5778b7852fc75cc313e4e11eeb7a644a1b62"
|
||||
dependencies = [
|
||||
"clap",
|
||||
"clap_complete",
|
||||
"clap_complete_fig",
|
||||
"clap_complete_nushell",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap_complete_fig"
|
||||
version = "4.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "54b3e65f91fabdd23cac3d57d39d5d938b4daabd070c335c006dccb866a61110"
|
||||
dependencies = [
|
||||
"clap",
|
||||
"clap_complete",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap_complete_nushell"
|
||||
version = "0.1.11"
|
||||
version = "4.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5d02bc8b1a18ee47c4d2eec3fb5ac034dc68ebea6125b1509e9ccdffcddce66e"
|
||||
checksum = "1accf1b463dee0d3ab2be72591dccdab8bef314958340447c882c4c72acfe2a3"
|
||||
dependencies = [
|
||||
"clap",
|
||||
"clap_complete",
|
||||
@@ -447,13 +436,14 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "compact_str"
|
||||
version = "0.7.1"
|
||||
version = "0.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f86b9c4c00838774a6d902ef931eff7470720c51d90c2e32cfe15dc304737b3f"
|
||||
checksum = "6050c3a16ddab2e412160b31f2c871015704239bca62f72f6e5f0be631d3f644"
|
||||
dependencies = [
|
||||
"castaway",
|
||||
"cfg-if",
|
||||
"itoa",
|
||||
"rustversion",
|
||||
"ryu",
|
||||
"serde",
|
||||
"static_assertions",
|
||||
@@ -1372,9 +1362,9 @@ checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5"
|
||||
|
||||
[[package]]
|
||||
name = "matchit"
|
||||
version = "0.8.3"
|
||||
version = "0.8.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8d3c2fcf089c060eb333302d80c5f3ffa8297abecf220f788e4a09ef85f59420"
|
||||
checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3"
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
@@ -1914,6 +1904,7 @@ dependencies = [
|
||||
"ruff_index",
|
||||
"ruff_python_ast",
|
||||
"ruff_python_parser",
|
||||
"ruff_python_trivia",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa",
|
||||
@@ -2001,7 +1992,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.5.2"
|
||||
version = "0.5.3"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"argfile",
|
||||
@@ -2060,7 +2051,6 @@ dependencies = [
|
||||
"mimalloc",
|
||||
"once_cell",
|
||||
"red_knot",
|
||||
"red_knot_module_resolver",
|
||||
"ruff_db",
|
||||
"ruff_linter",
|
||||
"ruff_python_ast",
|
||||
@@ -2095,7 +2085,10 @@ dependencies = [
|
||||
"countme",
|
||||
"dashmap 6.0.1",
|
||||
"filetime",
|
||||
"ignore",
|
||||
"insta",
|
||||
"ruff_cache",
|
||||
"ruff_notebook",
|
||||
"ruff_python_ast",
|
||||
"ruff_python_parser",
|
||||
"ruff_source_file",
|
||||
@@ -2183,7 +2176,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_linter"
|
||||
version = "0.5.2"
|
||||
version = "0.5.3"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"annotate-snippets 0.9.2",
|
||||
@@ -2449,6 +2442,7 @@ version = "0.2.2"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"crossbeam",
|
||||
"ignore",
|
||||
"insta",
|
||||
"jod-thread",
|
||||
"libc",
|
||||
@@ -2473,7 +2467,6 @@ dependencies = [
|
||||
"shellexpand",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"walkdir",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2498,7 +2491,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_wasm"
|
||||
version = "0.0.0"
|
||||
version = "0.5.3"
|
||||
dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"console_log",
|
||||
@@ -2797,9 +2790,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_with"
|
||||
version = "3.8.3"
|
||||
version = "3.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e73139bc5ec2d45e6c5fd85be5a46949c1c39a4c18e56915f5eb4c12f975e377"
|
||||
checksum = "69cecfa94848272156ea67b2b1a53f20fc7bc638c4a46d2f8abde08f05f4b857"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"serde_derive",
|
||||
@@ -2808,9 +2801,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_with_macros"
|
||||
version = "3.8.3"
|
||||
version = "3.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b80d3d6b56b64335c0180e5ffde23b3c5e08c14c585b51a15bd0e95393f46703"
|
||||
checksum = "a8fee4991ef4f274617a51ad4af30519438dacb2f56ac773b08a1922ff743350"
|
||||
dependencies = [
|
||||
"darling",
|
||||
"proc-macro2",
|
||||
@@ -2917,9 +2910,9 @@ checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc"
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.69"
|
||||
version = "2.0.71"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "201fcda3845c23e8212cd466bfebf0bd20694490fc0356ae8e428e0824a915a6"
|
||||
checksum = "b146dcf730474b4bcd16c311627b31ede9ab149045db4d6088b3becaea046462"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -3007,18 +3000,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "thiserror"
|
||||
version = "1.0.61"
|
||||
version = "1.0.62"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c546c80d6be4bc6a00c0f01730c08df82eaa7a7a61f11d656526506112cc1709"
|
||||
checksum = "f2675633b1499176c2dff06b0856a27976a8f9d436737b4cf4f312d4d91d8bbb"
|
||||
dependencies = [
|
||||
"thiserror-impl",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror-impl"
|
||||
version = "1.0.61"
|
||||
version = "1.0.62"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533"
|
||||
checksum = "d20468752b09f49e909e55a5d338caa8bedf615594e9d80bc4c565d30faf798c"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -3037,9 +3030,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tikv-jemalloc-sys"
|
||||
version = "0.5.4+5.3.0-patched"
|
||||
version = "0.6.0+5.3.0-1-ge13ca993e8ccb9ba9847cc330696e02839f328f7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9402443cb8fd499b6f327e40565234ff34dbda27460c5b47db0db77443dd85d1"
|
||||
checksum = "cd3c60906412afa9c2b5b5a48ca6a5abe5736aec9eb48ad05037a677e52e4e2d"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"libc",
|
||||
@@ -3047,9 +3040,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tikv-jemallocator"
|
||||
version = "0.5.4"
|
||||
version = "0.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "965fe0c26be5c56c94e38ba547249074803efd52adfb66de62107d95aab3eaca"
|
||||
checksum = "4cec5ff18518d81584f477e9bfdf957f5bb0979b0bac3af4ca30b5b3ae2d2865"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"tikv-jemalloc-sys",
|
||||
|
||||
10
Cargo.toml
10
Cargo.toml
@@ -50,14 +50,14 @@ cachedir = { version = "0.3.1" }
|
||||
camino = { version = "1.1.7" }
|
||||
chrono = { version = "0.4.35", default-features = false, features = ["clock"] }
|
||||
clap = { version = "4.5.3", features = ["derive"] }
|
||||
clap_complete_command = { version = "0.5.1" }
|
||||
clap_complete_command = { version = "0.6.0" }
|
||||
clearscreen = { version = "3.0.0" }
|
||||
codspeed-criterion-compat = { version = "2.6.0", default-features = false }
|
||||
colored = { version = "2.1.0" }
|
||||
console_error_panic_hook = { version = "0.1.7" }
|
||||
console_log = { version = "1.0.0" }
|
||||
countme = { version = "3.0.1" }
|
||||
compact_str = "0.7.1"
|
||||
compact_str = "0.8.0"
|
||||
criterion = { version = "0.5.1", default-features = false }
|
||||
crossbeam = { version = "0.8.4" }
|
||||
dashmap = { version = "6.0.1" }
|
||||
@@ -128,7 +128,7 @@ syn = { version = "2.0.55" }
|
||||
tempfile = { version = "3.9.0" }
|
||||
test-case = { version = "3.3.1" }
|
||||
thiserror = { version = "1.0.58" }
|
||||
tikv-jemallocator = { version = "0.5.0" }
|
||||
tikv-jemallocator = { version = "0.6.0" }
|
||||
toml = { version = "0.8.11" }
|
||||
tracing = { version = "0.1.40" }
|
||||
tracing-indicatif = { version = "0.3.6" }
|
||||
@@ -272,10 +272,10 @@ build-local-artifacts = false
|
||||
# Local artifacts jobs to run in CI
|
||||
local-artifacts-jobs = ["./build-binaries", "./build-docker"]
|
||||
# Publish jobs to run in CI
|
||||
publish-jobs = ["./publish-pypi"]
|
||||
publish-jobs = ["./publish-pypi", "./publish-wasm"]
|
||||
# Announcement jobs to run in CI
|
||||
post-announce-jobs = ["./notify-dependents", "./publish-docs", "./publish-playground"]
|
||||
# Custom permissions for GitHub Jobs
|
||||
github-custom-job-permissions = { "build-docker" = { packages = "write", contents = "read" } }
|
||||
github-custom-job-permissions = { "build-docker" = { packages = "write", contents = "read" }, "publish-wasm" = { contents = "read", id-token = "write", packages = "write" } }
|
||||
# Whether to install an updater program
|
||||
install-updater = false
|
||||
|
||||
@@ -136,8 +136,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.5.2/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.5.2/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.5.3/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.5.3/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -170,7 +170,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.5.2
|
||||
rev: v0.5.3
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
|
||||
@@ -15,7 +15,7 @@ license.workspace = true
|
||||
red_knot_module_resolver = { workspace = true }
|
||||
red_knot_python_semantic = { workspace = true }
|
||||
|
||||
ruff_db = { workspace = true }
|
||||
ruff_db = { workspace = true, features = ["os", "cache"] }
|
||||
ruff_python_ast = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
|
||||
2
crates/red_knot/src/cli/mod.rs
Normal file
2
crates/red_knot/src/cli/mod.rs
Normal file
@@ -0,0 +1,2 @@
|
||||
pub(crate) mod target_version;
|
||||
pub(crate) mod verbosity;
|
||||
34
crates/red_knot/src/cli/target_version.rs
Normal file
34
crates/red_knot/src/cli/target_version.rs
Normal file
@@ -0,0 +1,34 @@
|
||||
/// Enumeration of all supported Python versions
|
||||
///
|
||||
/// TODO: unify with the `PythonVersion` enum in the linter/formatter crates?
|
||||
#[derive(Copy, Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord, Default, clap::ValueEnum)]
|
||||
pub enum TargetVersion {
|
||||
Py37,
|
||||
#[default]
|
||||
Py38,
|
||||
Py39,
|
||||
Py310,
|
||||
Py311,
|
||||
Py312,
|
||||
Py313,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for TargetVersion {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
ruff_db::program::TargetVersion::from(*self).fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TargetVersion> for ruff_db::program::TargetVersion {
|
||||
fn from(value: TargetVersion) -> Self {
|
||||
match value {
|
||||
TargetVersion::Py37 => Self::Py37,
|
||||
TargetVersion::Py38 => Self::Py38,
|
||||
TargetVersion::Py39 => Self::Py39,
|
||||
TargetVersion::Py310 => Self::Py310,
|
||||
TargetVersion::Py311 => Self::Py311,
|
||||
TargetVersion::Py312 => Self::Py312,
|
||||
TargetVersion::Py313 => Self::Py313,
|
||||
}
|
||||
}
|
||||
}
|
||||
34
crates/red_knot/src/cli/verbosity.rs
Normal file
34
crates/red_knot/src/cli/verbosity.rs
Normal file
@@ -0,0 +1,34 @@
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
|
||||
pub(crate) enum VerbosityLevel {
|
||||
Info,
|
||||
Debug,
|
||||
Trace,
|
||||
}
|
||||
|
||||
/// Logging flags to `#[command(flatten)]` into your CLI
|
||||
#[derive(clap::Args, Debug, Clone, Default)]
|
||||
#[command(about = None, long_about = None)]
|
||||
pub(crate) struct Verbosity {
|
||||
#[arg(
|
||||
long,
|
||||
short = 'v',
|
||||
help = "Use verbose output (or `-vv` and `-vvv` for more verbose output)",
|
||||
action = clap::ArgAction::Count,
|
||||
global = true,
|
||||
)]
|
||||
verbose: u8,
|
||||
}
|
||||
|
||||
impl Verbosity {
|
||||
/// Returns the verbosity level based on the number of `-v` flags.
|
||||
///
|
||||
/// Returns `None` if the user did not specify any verbosity flags.
|
||||
pub(crate) fn level(&self) -> Option<VerbosityLevel> {
|
||||
match self.verbose {
|
||||
0 => None,
|
||||
1 => Some(VerbosityLevel::Info),
|
||||
2 => Some(VerbosityLevel::Debug),
|
||||
_ => Some(VerbosityLevel::Trace),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,10 +1,200 @@
|
||||
use red_knot_python_semantic::Db as SemanticDb;
|
||||
use ruff_db::Upcast;
|
||||
use salsa::DbWithJar;
|
||||
use std::panic::{AssertUnwindSafe, RefUnwindSafe};
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::lint::{lint_semantic, lint_syntax, unwind_if_cancelled};
|
||||
use salsa::{Cancelled, Database, DbWithJar};
|
||||
|
||||
use red_knot_module_resolver::{vendored_typeshed_stubs, Db as ResolverDb, Jar as ResolverJar};
|
||||
use red_knot_python_semantic::{Db as SemanticDb, Jar as SemanticJar};
|
||||
use ruff_db::files::{system_path_to_file, File, Files};
|
||||
use ruff_db::program::{Program, ProgramSettings};
|
||||
use ruff_db::system::System;
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
use ruff_db::{Db as SourceDb, Jar as SourceJar, Upcast};
|
||||
|
||||
use crate::lint::{lint_semantic, lint_syntax, unwind_if_cancelled, Diagnostics};
|
||||
use crate::watch::{FileChangeKind, FileWatcherChange};
|
||||
use crate::workspace::{check_file, Package, Workspace, WorkspaceMetadata};
|
||||
|
||||
pub trait Db: DbWithJar<Jar> + SemanticDb + Upcast<dyn SemanticDb> {}
|
||||
|
||||
#[salsa::jar(db=Db)]
|
||||
pub struct Jar(lint_syntax, lint_semantic, unwind_if_cancelled);
|
||||
pub struct Jar(
|
||||
Workspace,
|
||||
Package,
|
||||
lint_syntax,
|
||||
lint_semantic,
|
||||
unwind_if_cancelled,
|
||||
);
|
||||
|
||||
#[salsa::db(SourceJar, ResolverJar, SemanticJar, Jar)]
|
||||
pub struct RootDatabase {
|
||||
workspace: Option<Workspace>,
|
||||
storage: salsa::Storage<RootDatabase>,
|
||||
files: Files,
|
||||
system: Arc<dyn System + Send + Sync + RefUnwindSafe>,
|
||||
}
|
||||
|
||||
impl RootDatabase {
|
||||
pub fn new<S>(workspace: WorkspaceMetadata, settings: ProgramSettings, system: S) -> Self
|
||||
where
|
||||
S: System + 'static + Send + Sync + RefUnwindSafe,
|
||||
{
|
||||
let mut db = Self {
|
||||
workspace: None,
|
||||
storage: salsa::Storage::default(),
|
||||
files: Files::default(),
|
||||
system: Arc::new(system),
|
||||
};
|
||||
|
||||
let workspace = Workspace::from_metadata(&db, workspace);
|
||||
// Initialize the `Program` singleton
|
||||
Program::from_settings(&db, settings);
|
||||
|
||||
db.workspace = Some(workspace);
|
||||
db
|
||||
}
|
||||
|
||||
pub fn workspace(&self) -> Workspace {
|
||||
// SAFETY: The workspace is always initialized in `new`.
|
||||
self.workspace.unwrap()
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(self, changes))]
|
||||
pub fn apply_changes(&mut self, changes: Vec<FileWatcherChange>) {
|
||||
let workspace = self.workspace();
|
||||
let workspace_path = workspace.root(self).to_path_buf();
|
||||
|
||||
// TODO: Optimize change tracking by only reloading a package if a file that is part of the package was changed.
|
||||
let mut structural_change = false;
|
||||
for change in changes {
|
||||
if matches!(
|
||||
change.path.file_name(),
|
||||
Some(".gitignore" | ".ignore" | "ruff.toml" | ".ruff.toml" | "pyproject.toml")
|
||||
) {
|
||||
// Changes to ignore files or settings can change the workspace structure or add/remove files
|
||||
// from packages.
|
||||
structural_change = true;
|
||||
} else {
|
||||
match change.kind {
|
||||
FileChangeKind::Created => {
|
||||
// Reload the package when a new file was added. This is necessary because the file might be excluded
|
||||
// by a gitignore.
|
||||
if workspace.package(self, &change.path).is_some() {
|
||||
structural_change = true;
|
||||
}
|
||||
}
|
||||
FileChangeKind::Modified => {}
|
||||
FileChangeKind::Deleted => {
|
||||
if let Some(package) = workspace.package(self, &change.path) {
|
||||
if let Some(file) = system_path_to_file(self, &change.path) {
|
||||
package.remove_file(self, file);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
File::touch_path(self, &change.path);
|
||||
}
|
||||
|
||||
if structural_change {
|
||||
match WorkspaceMetadata::from_path(&workspace_path, self.system()) {
|
||||
Ok(metadata) => {
|
||||
tracing::debug!("Reload workspace after structural change.");
|
||||
// TODO: Handle changes in the program settings.
|
||||
workspace.reload(self, metadata);
|
||||
}
|
||||
Err(error) => {
|
||||
tracing::error!("Failed to load workspace, keep old workspace: {error}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Checks all open files in the workspace and its dependencies.
|
||||
pub fn check(&self) -> Result<Vec<String>, Cancelled> {
|
||||
self.with_db(|db| db.workspace().check(db))
|
||||
}
|
||||
|
||||
pub fn check_file(&self, file: File) -> Result<Diagnostics, Cancelled> {
|
||||
self.with_db(|db| check_file(db, file))
|
||||
}
|
||||
|
||||
pub(crate) fn with_db<F, T>(&self, f: F) -> Result<T, Cancelled>
|
||||
where
|
||||
F: FnOnce(&RootDatabase) -> T + std::panic::UnwindSafe,
|
||||
{
|
||||
// The `AssertUnwindSafe` here looks scary, but is a consequence of Salsa's design.
|
||||
// Salsa uses panics to implement cancellation and to recover from cycles. However, the Salsa
|
||||
// storage isn't `UnwindSafe` or `RefUnwindSafe` because its dependencies `DashMap` and `parking_lot::*` aren't
|
||||
// unwind safe.
|
||||
//
|
||||
// Having to use `AssertUnwindSafe` isn't as big as a deal as it might seem because
|
||||
// the `UnwindSafe` and `RefUnwindSafe` traits are designed to catch logical bugs.
|
||||
// They don't protect against [UB](https://internals.rust-lang.org/t/pre-rfc-deprecating-unwindsafe/15974).
|
||||
// On top of that, `Cancelled` only catches specific Salsa-panics and propagates all other panics.
|
||||
//
|
||||
// That still leaves us with possible logical bugs in two sources:
|
||||
// * In Salsa itself: This must be considered a bug in Salsa and needs fixing upstream.
|
||||
// Reviewing Salsa code specifically around unwind safety seems doable.
|
||||
// * Our code: This is the main concern. Luckily, it only involves code that uses internal mutability
|
||||
// and calls into Salsa queries when mutating the internal state. Using `AssertUnwindSafe`
|
||||
// certainly makes it harder to catch these issues in our user code.
|
||||
//
|
||||
// For now, this is the only solution at hand unless Salsa decides to change its design.
|
||||
// [Zulip support thread](https://salsa.zulipchat.com/#narrow/stream/145099-general/topic/How.20to.20use.20.60Cancelled.3A.3Acatch.60)
|
||||
let db = &AssertUnwindSafe(self);
|
||||
Cancelled::catch(|| f(db))
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SemanticDb> for RootDatabase {
|
||||
fn upcast(&self) -> &(dyn SemanticDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SourceDb> for RootDatabase {
|
||||
fn upcast(&self) -> &(dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn ResolverDb> for RootDatabase {
|
||||
fn upcast(&self) -> &(dyn ResolverDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl ResolverDb for RootDatabase {}
|
||||
|
||||
impl SemanticDb for RootDatabase {}
|
||||
|
||||
impl SourceDb for RootDatabase {
|
||||
fn vendored(&self) -> &VendoredFileSystem {
|
||||
vendored_typeshed_stubs()
|
||||
}
|
||||
|
||||
fn system(&self) -> &dyn System {
|
||||
&*self.system
|
||||
}
|
||||
|
||||
fn files(&self) -> &Files {
|
||||
&self.files
|
||||
}
|
||||
}
|
||||
|
||||
impl Database for RootDatabase {}
|
||||
|
||||
impl Db for RootDatabase {}
|
||||
|
||||
impl salsa::ParallelDatabase for RootDatabase {
|
||||
fn snapshot(&self) -> salsa::Snapshot<Self> {
|
||||
salsa::Snapshot::new(Self {
|
||||
workspace: self.workspace,
|
||||
storage: self.storage.snapshot(),
|
||||
files: self.files.snapshot(),
|
||||
system: self.system.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,53 +1,6 @@
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::system::{SystemPath, SystemPathBuf};
|
||||
|
||||
use crate::db::Jar;
|
||||
|
||||
pub mod db;
|
||||
pub mod lint;
|
||||
pub mod program;
|
||||
pub mod target_version;
|
||||
pub mod watch;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Workspace {
|
||||
root: SystemPathBuf,
|
||||
/// The files that are open in the workspace.
|
||||
///
|
||||
/// * Editor: The files that are actively being edited in the editor (the user has a tab open with the file).
|
||||
/// * CLI: The resolved files passed as arguments to the CLI.
|
||||
open_files: FxHashSet<File>,
|
||||
}
|
||||
|
||||
impl Workspace {
|
||||
pub fn new(root: SystemPathBuf) -> Self {
|
||||
Self {
|
||||
root,
|
||||
open_files: FxHashSet::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn root(&self) -> &SystemPath {
|
||||
self.root.as_path()
|
||||
}
|
||||
|
||||
// TODO having the content in workspace feels wrong.
|
||||
pub fn open_file(&mut self, file_id: File) {
|
||||
self.open_files.insert(file_id);
|
||||
}
|
||||
|
||||
pub fn close_file(&mut self, file_id: File) {
|
||||
self.open_files.remove(&file_id);
|
||||
}
|
||||
|
||||
// TODO introduce an `OpenFile` type instead of using an anonymous tuple.
|
||||
pub fn open_files(&self) -> impl Iterator<Item = File> + '_ {
|
||||
self.open_files.iter().copied()
|
||||
}
|
||||
|
||||
pub fn is_file_open(&self, file_id: File) -> bool {
|
||||
self.open_files.contains(&file_id)
|
||||
}
|
||||
}
|
||||
pub mod workspace;
|
||||
|
||||
@@ -103,7 +103,7 @@ fn lint_unresolved_imports(context: &SemanticLintContext, import: AnyImportRef)
|
||||
for alias in &import.names {
|
||||
let ty = alias.ty(&context.semantic);
|
||||
|
||||
if ty.is_unknown() {
|
||||
if ty.is_unbound() {
|
||||
context.push_diagnostic(format!("Unresolved import '{}'", &alias.name));
|
||||
}
|
||||
}
|
||||
@@ -112,7 +112,7 @@ fn lint_unresolved_imports(context: &SemanticLintContext, import: AnyImportRef)
|
||||
for alias in &import.names {
|
||||
let ty = alias.ty(&context.semantic);
|
||||
|
||||
if ty.is_unknown() {
|
||||
if ty.is_unbound() {
|
||||
context.push_diagnostic(format!("Unresolved import '{}'", &alias.name));
|
||||
}
|
||||
}
|
||||
@@ -130,11 +130,7 @@ fn lint_bad_override(context: &SemanticLintContext, class: &ast::StmtClassDef) {
|
||||
return;
|
||||
};
|
||||
|
||||
let Some(typing_override) = semantic.public_symbol(&typing, "override") else {
|
||||
return;
|
||||
};
|
||||
|
||||
let override_ty = semantic.public_symbol_ty(typing_override);
|
||||
let override_ty = semantic.global_symbol_ty(&typing, "override");
|
||||
|
||||
let Type::Class(class_ty) = class.ty(semantic) else {
|
||||
return;
|
||||
@@ -154,7 +150,10 @@ fn lint_bad_override(context: &SemanticLintContext, class: &ast::StmtClassDef) {
|
||||
|
||||
if ty.has_decorator(db, override_ty) {
|
||||
let method_name = ty.name(db);
|
||||
if class_ty.inherited_class_member(db, &method_name).is_none() {
|
||||
if class_ty
|
||||
.inherited_class_member(db, &method_name)
|
||||
.is_unbound()
|
||||
{
|
||||
// TODO should have a qualname() method to support nested classes
|
||||
context.push_diagnostic(
|
||||
format!(
|
||||
|
||||
@@ -10,13 +10,17 @@ use tracing_subscriber::layer::{Context, Filter, SubscriberExt};
|
||||
use tracing_subscriber::{Layer, Registry};
|
||||
use tracing_tree::time::Uptime;
|
||||
|
||||
use red_knot::program::{FileWatcherChange, Program};
|
||||
use red_knot::target_version::TargetVersion;
|
||||
use red_knot::db::RootDatabase;
|
||||
use red_knot::watch::FileWatcher;
|
||||
use red_knot::Workspace;
|
||||
use red_knot_module_resolver::{set_module_resolution_settings, RawModuleResolutionSettings};
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::system::{OsSystem, System, SystemPath, SystemPathBuf};
|
||||
use red_knot::watch::FileWatcherChange;
|
||||
use red_knot::workspace::WorkspaceMetadata;
|
||||
use ruff_db::program::{ProgramSettings, SearchPathSettings};
|
||||
use ruff_db::system::{OsSystem, System, SystemPathBuf};
|
||||
|
||||
use cli::target_version::TargetVersion;
|
||||
use cli::verbosity::{Verbosity, VerbosityLevel};
|
||||
|
||||
mod cli;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(
|
||||
@@ -26,22 +30,33 @@ use ruff_db::system::{OsSystem, System, SystemPath, SystemPathBuf};
|
||||
)]
|
||||
#[command(version)]
|
||||
struct Args {
|
||||
#[clap(help = "File to check", required = true, value_name = "FILE")]
|
||||
entry_point: SystemPathBuf,
|
||||
#[arg(
|
||||
long,
|
||||
help = "Changes the current working directory.",
|
||||
long_help = "Changes the current working directory before any specified operations. This affects the workspace and configuration discovery.",
|
||||
value_name = "PATH"
|
||||
)]
|
||||
current_directory: Option<SystemPathBuf>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
value_name = "DIRECTORY",
|
||||
help = "Custom directory to use for stdlib typeshed stubs"
|
||||
)]
|
||||
custom_typeshed_dir: Option<SystemPathBuf>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
value_name = "PATH",
|
||||
help = "Additional path to use as a module-resolution source (can be passed multiple times)"
|
||||
)]
|
||||
extra_search_path: Vec<SystemPathBuf>,
|
||||
|
||||
#[arg(long, help = "Python version to assume when resolving types", default_value_t = TargetVersion::default(), value_name="VERSION")]
|
||||
target_version: TargetVersion,
|
||||
|
||||
#[clap(flatten)]
|
||||
verbosity: Verbosity,
|
||||
}
|
||||
|
||||
#[allow(
|
||||
@@ -51,60 +66,46 @@ struct Args {
|
||||
clippy::dbg_macro
|
||||
)]
|
||||
pub fn main() -> anyhow::Result<()> {
|
||||
countme::enable(true);
|
||||
setup_tracing();
|
||||
|
||||
let Args {
|
||||
entry_point,
|
||||
current_directory,
|
||||
custom_typeshed_dir,
|
||||
extra_search_path: extra_search_paths,
|
||||
extra_search_path: extra_paths,
|
||||
target_version,
|
||||
verbosity,
|
||||
} = Args::parse_from(std::env::args().collect::<Vec<_>>());
|
||||
|
||||
tracing::trace!("Target version: {target_version}");
|
||||
if let Some(custom_typeshed) = custom_typeshed_dir.as_ref() {
|
||||
tracing::trace!("Custom typeshed directory: {custom_typeshed}");
|
||||
}
|
||||
if !extra_search_paths.is_empty() {
|
||||
tracing::trace!("extra search paths: {extra_search_paths:?}");
|
||||
}
|
||||
let verbosity = verbosity.level();
|
||||
countme::enable(verbosity == Some(VerbosityLevel::Trace));
|
||||
setup_tracing(verbosity);
|
||||
|
||||
let cwd = std::env::current_dir().unwrap();
|
||||
let cwd = SystemPath::from_std_path(&cwd).unwrap();
|
||||
let system = OsSystem::new(cwd);
|
||||
let cwd = if let Some(cwd) = current_directory {
|
||||
let canonicalized = cwd.as_utf8_path().canonicalize_utf8().unwrap();
|
||||
SystemPathBuf::from_utf8_path_buf(canonicalized)
|
||||
} else {
|
||||
let cwd = std::env::current_dir().unwrap();
|
||||
SystemPathBuf::from_path_buf(cwd).unwrap()
|
||||
};
|
||||
|
||||
if !system.path_exists(&entry_point) {
|
||||
eprintln!("The entry point does not exist.");
|
||||
return Err(anyhow::anyhow!("Invalid arguments"));
|
||||
}
|
||||
let system = OsSystem::new(cwd.clone());
|
||||
let workspace_metadata =
|
||||
WorkspaceMetadata::from_path(system.current_directory(), &system).unwrap();
|
||||
|
||||
if !system.is_file(&entry_point) {
|
||||
eprintln!("The entry point is not a file.");
|
||||
return Err(anyhow::anyhow!("Invalid arguments"));
|
||||
}
|
||||
|
||||
let workspace_folder = entry_point.parent().unwrap();
|
||||
let workspace = Workspace::new(workspace_folder.to_path_buf());
|
||||
|
||||
let workspace_search_path = workspace.root().to_path_buf();
|
||||
|
||||
let mut program = Program::new(workspace, system);
|
||||
|
||||
set_module_resolution_settings(
|
||||
&mut program,
|
||||
RawModuleResolutionSettings {
|
||||
extra_paths: extra_search_paths,
|
||||
workspace_root: workspace_search_path,
|
||||
site_packages: None,
|
||||
// TODO: Respect the settings from the workspace metadata. when resolving the program settings.
|
||||
let program_settings = ProgramSettings {
|
||||
target_version: target_version.into(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths,
|
||||
workspace_root: workspace_metadata.root().to_path_buf(),
|
||||
custom_typeshed: custom_typeshed_dir,
|
||||
target_version: red_knot_module_resolver::TargetVersion::from(target_version),
|
||||
site_packages: None,
|
||||
},
|
||||
);
|
||||
};
|
||||
|
||||
let entry_id = system_path_to_file(&program, entry_point.clone()).unwrap();
|
||||
program.workspace_mut().open_file(entry_id);
|
||||
// TODO: Use the `program_settings` to compute the key for the database's persistent
|
||||
// cache and load the cache if it exists.
|
||||
let mut db = RootDatabase::new(workspace_metadata, program_settings, system);
|
||||
|
||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new();
|
||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new(verbosity);
|
||||
|
||||
// Listen to Ctrl+C and abort the watch mode.
|
||||
let main_loop_cancellation_token = Mutex::new(Some(main_loop_cancellation_token));
|
||||
@@ -123,9 +124,9 @@ pub fn main() -> anyhow::Result<()> {
|
||||
file_changes_notifier.notify(changes);
|
||||
})?;
|
||||
|
||||
file_watcher.watch_folder(workspace_folder.as_std_path())?;
|
||||
file_watcher.watch_folder(db.workspace().root(&db).as_std_path())?;
|
||||
|
||||
main_loop.run(&mut program);
|
||||
main_loop.run(&mut db);
|
||||
|
||||
println!("{}", countme::get_all());
|
||||
|
||||
@@ -133,18 +134,19 @@ pub fn main() -> anyhow::Result<()> {
|
||||
}
|
||||
|
||||
struct MainLoop {
|
||||
orchestrator_sender: crossbeam_channel::Sender<OrchestratorMessage>,
|
||||
main_loop_receiver: crossbeam_channel::Receiver<MainLoopMessage>,
|
||||
verbosity: Option<VerbosityLevel>,
|
||||
orchestrator: crossbeam_channel::Sender<OrchestratorMessage>,
|
||||
receiver: crossbeam_channel::Receiver<MainLoopMessage>,
|
||||
}
|
||||
|
||||
impl MainLoop {
|
||||
fn new() -> (Self, MainLoopCancellationToken) {
|
||||
fn new(verbosity: Option<VerbosityLevel>) -> (Self, MainLoopCancellationToken) {
|
||||
let (orchestrator_sender, orchestrator_receiver) = crossbeam_channel::bounded(1);
|
||||
let (main_loop_sender, main_loop_receiver) = crossbeam_channel::bounded(1);
|
||||
|
||||
let mut orchestrator = Orchestrator {
|
||||
receiver: orchestrator_receiver,
|
||||
sender: main_loop_sender.clone(),
|
||||
main_loop: main_loop_sender.clone(),
|
||||
revision: 0,
|
||||
};
|
||||
|
||||
@@ -154,8 +156,9 @@ impl MainLoop {
|
||||
|
||||
(
|
||||
Self {
|
||||
orchestrator_sender,
|
||||
main_loop_receiver,
|
||||
verbosity,
|
||||
orchestrator: orchestrator_sender,
|
||||
receiver: main_loop_receiver,
|
||||
},
|
||||
MainLoopCancellationToken {
|
||||
sender: main_loop_sender,
|
||||
@@ -165,30 +168,28 @@ impl MainLoop {
|
||||
|
||||
fn file_changes_notifier(&self) -> FileChangesNotifier {
|
||||
FileChangesNotifier {
|
||||
sender: self.orchestrator_sender.clone(),
|
||||
sender: self.orchestrator.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::print_stderr)]
|
||||
fn run(self, program: &mut Program) {
|
||||
self.orchestrator_sender
|
||||
.send(OrchestratorMessage::Run)
|
||||
.unwrap();
|
||||
fn run(self, db: &mut RootDatabase) {
|
||||
self.orchestrator.send(OrchestratorMessage::Run).unwrap();
|
||||
|
||||
for message in &self.main_loop_receiver {
|
||||
for message in &self.receiver {
|
||||
tracing::trace!("Main Loop: Tick");
|
||||
|
||||
match message {
|
||||
MainLoopMessage::CheckProgram { revision } => {
|
||||
let program = program.snapshot();
|
||||
let sender = self.orchestrator_sender.clone();
|
||||
MainLoopMessage::CheckWorkspace { revision } => {
|
||||
let db = db.snapshot();
|
||||
let orchestrator = self.orchestrator.clone();
|
||||
|
||||
// Spawn a new task that checks the program. This needs to be done in a separate thread
|
||||
// Spawn a new task that checks the workspace. This needs to be done in a separate thread
|
||||
// to prevent blocking the main loop here.
|
||||
rayon::spawn(move || {
|
||||
if let Ok(result) = program.check() {
|
||||
sender
|
||||
.send(OrchestratorMessage::CheckProgramCompleted {
|
||||
if let Ok(result) = db.check() {
|
||||
orchestrator
|
||||
.send(OrchestratorMessage::CheckCompleted {
|
||||
diagnostics: result,
|
||||
revision,
|
||||
})
|
||||
@@ -198,14 +199,18 @@ impl MainLoop {
|
||||
}
|
||||
MainLoopMessage::ApplyChanges(changes) => {
|
||||
// Automatically cancels any pending queries and waits for them to complete.
|
||||
program.apply_changes(changes);
|
||||
db.apply_changes(changes);
|
||||
}
|
||||
MainLoopMessage::CheckCompleted(diagnostics) => {
|
||||
eprintln!("{}", diagnostics.join("\n"));
|
||||
eprintln!("{}", countme::get_all());
|
||||
if self.verbosity == Some(VerbosityLevel::Trace) {
|
||||
eprintln!("{}", countme::get_all());
|
||||
}
|
||||
}
|
||||
MainLoopMessage::Exit => {
|
||||
eprintln!("{}", countme::get_all());
|
||||
if self.verbosity == Some(VerbosityLevel::Trace) {
|
||||
eprintln!("{}", countme::get_all());
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -215,7 +220,7 @@ impl MainLoop {
|
||||
|
||||
impl Drop for MainLoop {
|
||||
fn drop(&mut self) {
|
||||
self.orchestrator_sender
|
||||
self.orchestrator
|
||||
.send(OrchestratorMessage::Shutdown)
|
||||
.unwrap();
|
||||
}
|
||||
@@ -247,7 +252,7 @@ impl MainLoopCancellationToken {
|
||||
|
||||
struct Orchestrator {
|
||||
/// Sends messages to the main loop.
|
||||
sender: crossbeam_channel::Sender<MainLoopMessage>,
|
||||
main_loop: crossbeam_channel::Sender<MainLoopMessage>,
|
||||
/// Receives messages from the main loop.
|
||||
receiver: crossbeam_channel::Receiver<OrchestratorMessage>,
|
||||
revision: usize,
|
||||
@@ -259,20 +264,20 @@ impl Orchestrator {
|
||||
while let Ok(message) = self.receiver.recv() {
|
||||
match message {
|
||||
OrchestratorMessage::Run => {
|
||||
self.sender
|
||||
.send(MainLoopMessage::CheckProgram {
|
||||
self.main_loop
|
||||
.send(MainLoopMessage::CheckWorkspace {
|
||||
revision: self.revision,
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
OrchestratorMessage::CheckProgramCompleted {
|
||||
OrchestratorMessage::CheckCompleted {
|
||||
diagnostics,
|
||||
revision,
|
||||
} => {
|
||||
// Only take the diagnostics if they are for the latest revision.
|
||||
if self.revision == revision {
|
||||
self.sender
|
||||
self.main_loop
|
||||
.send(MainLoopMessage::CheckCompleted(diagnostics))
|
||||
.unwrap();
|
||||
} else {
|
||||
@@ -307,7 +312,7 @@ impl Orchestrator {
|
||||
changes.extend(file_changes);
|
||||
}
|
||||
|
||||
Ok(OrchestratorMessage::CheckProgramCompleted { .. })=> {
|
||||
Ok(OrchestratorMessage::CheckCompleted { .. })=> {
|
||||
// disregard any outdated completion message.
|
||||
}
|
||||
Ok(OrchestratorMessage::Run) => unreachable!("The orchestrator is already running."),
|
||||
@@ -320,8 +325,8 @@ impl Orchestrator {
|
||||
},
|
||||
default(std::time::Duration::from_millis(10)) => {
|
||||
// No more file changes after 10 ms, send the changes and schedule a new analysis
|
||||
self.sender.send(MainLoopMessage::ApplyChanges(changes)).unwrap();
|
||||
self.sender.send(MainLoopMessage::CheckProgram { revision: self.revision}).unwrap();
|
||||
self.main_loop.send(MainLoopMessage::ApplyChanges(changes)).unwrap();
|
||||
self.main_loop.send(MainLoopMessage::CheckWorkspace { revision: self.revision}).unwrap();
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -337,7 +342,7 @@ impl Orchestrator {
|
||||
/// Message sent from the orchestrator to the main loop.
|
||||
#[derive(Debug)]
|
||||
enum MainLoopMessage {
|
||||
CheckProgram { revision: usize },
|
||||
CheckWorkspace { revision: usize },
|
||||
CheckCompleted(Vec<String>),
|
||||
ApplyChanges(Vec<FileWatcherChange>),
|
||||
Exit,
|
||||
@@ -348,7 +353,7 @@ enum OrchestratorMessage {
|
||||
Run,
|
||||
Shutdown,
|
||||
|
||||
CheckProgramCompleted {
|
||||
CheckCompleted {
|
||||
diagnostics: Vec<String>,
|
||||
revision: usize,
|
||||
},
|
||||
@@ -356,7 +361,14 @@ enum OrchestratorMessage {
|
||||
FileChanges(Vec<FileWatcherChange>),
|
||||
}
|
||||
|
||||
fn setup_tracing() {
|
||||
fn setup_tracing(verbosity: Option<VerbosityLevel>) {
|
||||
let trace_level = match verbosity {
|
||||
None => Level::WARN,
|
||||
Some(VerbosityLevel::Info) => Level::INFO,
|
||||
Some(VerbosityLevel::Debug) => Level::DEBUG,
|
||||
Some(VerbosityLevel::Trace) => Level::TRACE,
|
||||
};
|
||||
|
||||
let subscriber = Registry::default().with(
|
||||
tracing_tree::HierarchicalLayer::default()
|
||||
.with_indent_lines(true)
|
||||
@@ -366,9 +378,7 @@ fn setup_tracing() {
|
||||
.with_targets(true)
|
||||
.with_writer(|| Box::new(std::io::stderr()))
|
||||
.with_timer(Uptime::default())
|
||||
.with_filter(LoggingFilter {
|
||||
trace_level: Level::TRACE,
|
||||
}),
|
||||
.with_filter(LoggingFilter { trace_level }),
|
||||
);
|
||||
|
||||
tracing::subscriber::set_global_default(subscriber).unwrap();
|
||||
|
||||
@@ -1,32 +0,0 @@
|
||||
use ruff_db::files::File;
|
||||
use salsa::Cancelled;
|
||||
|
||||
use crate::lint::{lint_semantic, lint_syntax, Diagnostics};
|
||||
use crate::program::Program;
|
||||
|
||||
impl Program {
|
||||
/// Checks all open files in the workspace and its dependencies.
|
||||
#[tracing::instrument(level = "debug", skip_all)]
|
||||
pub fn check(&self) -> Result<Vec<String>, Cancelled> {
|
||||
self.with_db(|db| {
|
||||
let mut result = Vec::new();
|
||||
for open_file in db.workspace.open_files() {
|
||||
result.extend_from_slice(&db.check_file_impl(open_file));
|
||||
}
|
||||
|
||||
result
|
||||
})
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(self))]
|
||||
pub fn check_file(&self, file: File) -> Result<Diagnostics, Cancelled> {
|
||||
self.with_db(|db| db.check_file_impl(file))
|
||||
}
|
||||
|
||||
fn check_file_impl(&self, file: File) -> Diagnostics {
|
||||
let mut diagnostics = Vec::new();
|
||||
diagnostics.extend_from_slice(lint_syntax(self, file));
|
||||
diagnostics.extend_from_slice(lint_semantic(self, file));
|
||||
Diagnostics::from(diagnostics)
|
||||
}
|
||||
}
|
||||
@@ -1,153 +0,0 @@
|
||||
use std::panic::{AssertUnwindSafe, RefUnwindSafe};
|
||||
use std::sync::Arc;
|
||||
|
||||
use salsa::{Cancelled, Database};
|
||||
|
||||
use red_knot_module_resolver::{vendored_typeshed_stubs, Db as ResolverDb, Jar as ResolverJar};
|
||||
use red_knot_python_semantic::{Db as SemanticDb, Jar as SemanticJar};
|
||||
use ruff_db::files::{File, Files};
|
||||
use ruff_db::system::{System, SystemPathBuf};
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
use ruff_db::{Db as SourceDb, Jar as SourceJar, Upcast};
|
||||
|
||||
use crate::db::{Db, Jar};
|
||||
use crate::Workspace;
|
||||
|
||||
mod check;
|
||||
|
||||
#[salsa::db(SourceJar, ResolverJar, SemanticJar, Jar)]
|
||||
pub struct Program {
|
||||
storage: salsa::Storage<Program>,
|
||||
files: Files,
|
||||
system: Arc<dyn System + Send + Sync + RefUnwindSafe>,
|
||||
workspace: Workspace,
|
||||
}
|
||||
|
||||
impl Program {
|
||||
pub fn new<S>(workspace: Workspace, system: S) -> Self
|
||||
where
|
||||
S: System + 'static + Send + Sync + RefUnwindSafe,
|
||||
{
|
||||
Self {
|
||||
storage: salsa::Storage::default(),
|
||||
files: Files::default(),
|
||||
system: Arc::new(system),
|
||||
workspace,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn apply_changes<I>(&mut self, changes: I)
|
||||
where
|
||||
I: IntoIterator<Item = FileWatcherChange>,
|
||||
{
|
||||
for change in changes {
|
||||
File::touch_path(self, &change.path);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn workspace(&self) -> &Workspace {
|
||||
&self.workspace
|
||||
}
|
||||
|
||||
pub fn workspace_mut(&mut self) -> &mut Workspace {
|
||||
&mut self.workspace
|
||||
}
|
||||
|
||||
fn with_db<F, T>(&self, f: F) -> Result<T, Cancelled>
|
||||
where
|
||||
F: FnOnce(&Program) -> T + std::panic::UnwindSafe,
|
||||
{
|
||||
// The `AssertUnwindSafe` here looks scary, but is a consequence of Salsa's design.
|
||||
// Salsa uses panics to implement cancellation and to recover from cycles. However, the Salsa
|
||||
// storage isn't `UnwindSafe` or `RefUnwindSafe` because its dependencies `DashMap` and `parking_lot::*` aren't
|
||||
// unwind safe.
|
||||
//
|
||||
// Having to use `AssertUnwindSafe` isn't as big as a deal as it might seem because
|
||||
// the `UnwindSafe` and `RefUnwindSafe` traits are designed to catch logical bugs.
|
||||
// They don't protect against [UB](https://internals.rust-lang.org/t/pre-rfc-deprecating-unwindsafe/15974).
|
||||
// On top of that, `Cancelled` only catches specific Salsa-panics and propagates all other panics.
|
||||
//
|
||||
// That still leaves us with possible logical bugs in two sources:
|
||||
// * In Salsa itself: This must be considered a bug in Salsa and needs fixing upstream.
|
||||
// Reviewing Salsa code specifically around unwind safety seems doable.
|
||||
// * Our code: This is the main concern. Luckily, it only involves code that uses internal mutability
|
||||
// and calls into Salsa queries when mutating the internal state. Using `AssertUnwindSafe`
|
||||
// certainly makes it harder to catch these issues in our user code.
|
||||
//
|
||||
// For now, this is the only solution at hand unless Salsa decides to change its design.
|
||||
// [Zulip support thread](https://salsa.zulipchat.com/#narrow/stream/145099-general/topic/How.20to.20use.20.60Cancelled.3A.3Acatch.60)
|
||||
let db = &AssertUnwindSafe(self);
|
||||
Cancelled::catch(|| f(db))
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SemanticDb> for Program {
|
||||
fn upcast(&self) -> &(dyn SemanticDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SourceDb> for Program {
|
||||
fn upcast(&self) -> &(dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn ResolverDb> for Program {
|
||||
fn upcast(&self) -> &(dyn ResolverDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl ResolverDb for Program {}
|
||||
|
||||
impl SemanticDb for Program {}
|
||||
|
||||
impl SourceDb for Program {
|
||||
fn vendored(&self) -> &VendoredFileSystem {
|
||||
vendored_typeshed_stubs()
|
||||
}
|
||||
|
||||
fn system(&self) -> &dyn System {
|
||||
&*self.system
|
||||
}
|
||||
|
||||
fn files(&self) -> &Files {
|
||||
&self.files
|
||||
}
|
||||
}
|
||||
|
||||
impl Database for Program {}
|
||||
|
||||
impl Db for Program {}
|
||||
|
||||
impl salsa::ParallelDatabase for Program {
|
||||
fn snapshot(&self) -> salsa::Snapshot<Self> {
|
||||
salsa::Snapshot::new(Self {
|
||||
storage: self.storage.snapshot(),
|
||||
files: self.files.snapshot(),
|
||||
system: self.system.clone(),
|
||||
workspace: self.workspace.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct FileWatcherChange {
|
||||
path: SystemPathBuf,
|
||||
#[allow(unused)]
|
||||
kind: FileChangeKind,
|
||||
}
|
||||
|
||||
impl FileWatcherChange {
|
||||
pub fn new(path: SystemPathBuf, kind: FileChangeKind) -> Self {
|
||||
Self { path, kind }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
pub enum FileChangeKind {
|
||||
Created,
|
||||
Modified,
|
||||
Deleted,
|
||||
}
|
||||
@@ -1,50 +0,0 @@
|
||||
use std::fmt;
|
||||
|
||||
/// Enumeration of all supported Python versions
|
||||
///
|
||||
/// TODO: unify with the `PythonVersion` enum in the linter/formatter crates?
|
||||
#[derive(Copy, Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord, Default, clap::ValueEnum)]
|
||||
pub enum TargetVersion {
|
||||
Py37,
|
||||
#[default]
|
||||
Py38,
|
||||
Py39,
|
||||
Py310,
|
||||
Py311,
|
||||
Py312,
|
||||
Py313,
|
||||
}
|
||||
|
||||
impl TargetVersion {
|
||||
const fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
Self::Py37 => "py37",
|
||||
Self::Py38 => "py38",
|
||||
Self::Py39 => "py39",
|
||||
Self::Py310 => "py310",
|
||||
Self::Py311 => "py311",
|
||||
Self::Py312 => "py312",
|
||||
Self::Py313 => "py313",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for TargetVersion {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TargetVersion> for red_knot_module_resolver::TargetVersion {
|
||||
fn from(value: TargetVersion) -> Self {
|
||||
match value {
|
||||
TargetVersion::Py37 => red_knot_module_resolver::TargetVersion::Py37,
|
||||
TargetVersion::Py38 => red_knot_module_resolver::TargetVersion::Py38,
|
||||
TargetVersion::Py39 => red_knot_module_resolver::TargetVersion::Py39,
|
||||
TargetVersion::Py310 => red_knot_module_resolver::TargetVersion::Py310,
|
||||
TargetVersion::Py311 => red_knot_module_resolver::TargetVersion::Py311,
|
||||
TargetVersion::Py312 => red_knot_module_resolver::TargetVersion::Py312,
|
||||
TargetVersion::Py313 => red_knot_module_resolver::TargetVersion::Py313,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,12 +1,10 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Context;
|
||||
use notify::event::{CreateKind, RemoveKind};
|
||||
use notify::event::{CreateKind, ModifyKind, RemoveKind};
|
||||
use notify::{recommended_watcher, Event, EventKind, RecommendedWatcher, RecursiveMode, Watcher};
|
||||
|
||||
use ruff_db::system::SystemPath;
|
||||
|
||||
use crate::program::{FileChangeKind, FileWatcherChange};
|
||||
use ruff_db::system::{SystemPath, SystemPathBuf};
|
||||
|
||||
pub struct FileWatcher {
|
||||
watcher: RecommendedWatcher,
|
||||
@@ -35,12 +33,25 @@ impl FileWatcher {
|
||||
}
|
||||
|
||||
fn from_handler(handler: Box<dyn EventHandler>) -> anyhow::Result<Self> {
|
||||
let watcher = recommended_watcher(move |changes: notify::Result<Event>| {
|
||||
match changes {
|
||||
let watcher = recommended_watcher(move |event: notify::Result<Event>| {
|
||||
match event {
|
||||
Ok(event) => {
|
||||
// TODO verify that this handles all events correctly
|
||||
let change_kind = match event.kind {
|
||||
EventKind::Create(CreateKind::File) => FileChangeKind::Created,
|
||||
EventKind::Modify(ModifyKind::Name(notify::event::RenameMode::From)) => {
|
||||
FileChangeKind::Deleted
|
||||
}
|
||||
EventKind::Modify(ModifyKind::Name(notify::event::RenameMode::To)) => {
|
||||
FileChangeKind::Created
|
||||
}
|
||||
EventKind::Modify(ModifyKind::Name(notify::event::RenameMode::Any)) => {
|
||||
// TODO Introduce a better catch all event for cases that we don't understand.
|
||||
FileChangeKind::Created
|
||||
}
|
||||
EventKind::Modify(ModifyKind::Name(notify::event::RenameMode::Both)) => {
|
||||
todo!("Handle both create and delete event.");
|
||||
}
|
||||
EventKind::Modify(_) => FileChangeKind::Modified,
|
||||
EventKind::Remove(RemoveKind::File) => FileChangeKind::Deleted,
|
||||
_ => {
|
||||
@@ -51,13 +62,9 @@ impl FileWatcher {
|
||||
let mut changes = Vec::new();
|
||||
|
||||
for path in event.paths {
|
||||
if path.is_file() {
|
||||
if let Some(fs_path) = SystemPath::from_std_path(&path) {
|
||||
changes.push(FileWatcherChange::new(
|
||||
fs_path.to_path_buf(),
|
||||
change_kind,
|
||||
));
|
||||
}
|
||||
if let Some(fs_path) = SystemPath::from_std_path(&path) {
|
||||
changes
|
||||
.push(FileWatcherChange::new(fs_path.to_path_buf(), change_kind));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -82,3 +89,23 @@ impl FileWatcher {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct FileWatcherChange {
|
||||
pub path: SystemPathBuf,
|
||||
#[allow(unused)]
|
||||
pub kind: FileChangeKind,
|
||||
}
|
||||
|
||||
impl FileWatcherChange {
|
||||
pub fn new(path: SystemPathBuf, kind: FileChangeKind) -> Self {
|
||||
Self { path, kind }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
pub enum FileChangeKind {
|
||||
Created,
|
||||
Modified,
|
||||
Deleted,
|
||||
}
|
||||
|
||||
344
crates/red_knot/src/workspace.rs
Normal file
344
crates/red_knot/src/workspace.rs
Normal file
@@ -0,0 +1,344 @@
|
||||
// TODO: Fix clippy warnings created by salsa macros
|
||||
#![allow(clippy::used_underscore_binding)]
|
||||
|
||||
use std::{collections::BTreeMap, sync::Arc};
|
||||
|
||||
use rustc_hash::{FxBuildHasher, FxHashSet};
|
||||
|
||||
pub use metadata::{PackageMetadata, WorkspaceMetadata};
|
||||
use ruff_db::{
|
||||
files::{system_path_to_file, File},
|
||||
system::{walk_directory::WalkState, SystemPath, SystemPathBuf},
|
||||
};
|
||||
use ruff_python_ast::{name::Name, PySourceType};
|
||||
|
||||
use crate::{
|
||||
db::Db,
|
||||
lint::{lint_semantic, lint_syntax, Diagnostics},
|
||||
};
|
||||
|
||||
mod metadata;
|
||||
|
||||
/// The project workspace as a Salsa ingredient.
|
||||
///
|
||||
/// A workspace consists of one or multiple packages. Packages can be nested. A file in a workspace
|
||||
/// belongs to no or exactly one package (files can't belong to multiple packages).
|
||||
///
|
||||
/// How workspaces and packages are discovered is TBD. For now, a workspace can be any directory,
|
||||
/// and it always contains a single package which has the same root as the workspace.
|
||||
///
|
||||
/// ## Examples
|
||||
///
|
||||
/// ```text
|
||||
/// app-1/
|
||||
/// pyproject.toml
|
||||
/// src/
|
||||
/// ... python files
|
||||
///
|
||||
/// app-2/
|
||||
/// pyproject.toml
|
||||
/// src/
|
||||
/// ... python files
|
||||
///
|
||||
/// shared/
|
||||
/// pyproject.toml
|
||||
/// src/
|
||||
/// ... python files
|
||||
///
|
||||
/// pyproject.toml
|
||||
/// ```
|
||||
///
|
||||
/// The above project structure has three packages: `app-1`, `app-2`, and `shared`.
|
||||
/// Each of the packages can define their own settings in their `pyproject.toml` file, but
|
||||
/// they must be compatible. For example, each package can define a different `requires-python` range,
|
||||
/// but the ranges must overlap.
|
||||
///
|
||||
/// ## How is a workspace different from a program?
|
||||
/// There are two (related) motivations:
|
||||
///
|
||||
/// 1. Program is defined in `ruff_db` and it can't reference the settings types for the linter and formatter
|
||||
/// without introducing a cyclic dependency. The workspace is defined in a higher level crate
|
||||
/// where it can reference these setting types.
|
||||
/// 2. Running `ruff check` with different target versions results in different programs (settings) but
|
||||
/// it remains the same workspace. That's why program is a narrowed view of the workspace only
|
||||
/// holding on to the most fundamental settings required for checking.
|
||||
#[salsa::input]
|
||||
pub struct Workspace {
|
||||
#[id]
|
||||
#[return_ref]
|
||||
root_buf: SystemPathBuf,
|
||||
|
||||
/// The files that are open in the workspace.
|
||||
///
|
||||
/// Setting the open files to a non-`None` value changes `check` to only check the
|
||||
/// open files rather than all files in the workspace.
|
||||
#[return_ref]
|
||||
open_file_set: Option<Arc<FxHashSet<File>>>,
|
||||
|
||||
/// The (first-party) packages in this workspace.
|
||||
#[return_ref]
|
||||
package_tree: BTreeMap<SystemPathBuf, Package>,
|
||||
}
|
||||
|
||||
/// A first-party package in a workspace.
|
||||
#[salsa::input]
|
||||
pub struct Package {
|
||||
#[return_ref]
|
||||
pub name: Name,
|
||||
|
||||
/// The path to the root directory of the package.
|
||||
#[id]
|
||||
#[return_ref]
|
||||
root_buf: SystemPathBuf,
|
||||
|
||||
/// The files that are part of this package.
|
||||
#[return_ref]
|
||||
file_set: Arc<FxHashSet<File>>,
|
||||
// TODO: Add the loaded settings.
|
||||
}
|
||||
|
||||
impl Workspace {
|
||||
/// Discovers the closest workspace at `path` and returns its metadata.
|
||||
pub fn from_metadata(db: &dyn Db, metadata: WorkspaceMetadata) -> Self {
|
||||
let mut packages = BTreeMap::new();
|
||||
|
||||
for package in metadata.packages {
|
||||
packages.insert(package.root.clone(), Package::from_metadata(db, package));
|
||||
}
|
||||
|
||||
Workspace::new(db, metadata.root, None, packages)
|
||||
}
|
||||
|
||||
pub fn root(self, db: &dyn Db) -> &SystemPath {
|
||||
self.root_buf(db)
|
||||
}
|
||||
|
||||
pub fn packages(self, db: &dyn Db) -> impl Iterator<Item = Package> + '_ {
|
||||
self.package_tree(db).values().copied()
|
||||
}
|
||||
|
||||
pub fn reload(self, db: &mut dyn Db, metadata: WorkspaceMetadata) {
|
||||
assert_eq!(self.root(db), metadata.root());
|
||||
|
||||
let mut old_packages = self.package_tree(db).clone();
|
||||
let mut new_packages = BTreeMap::new();
|
||||
|
||||
for package_metadata in metadata.packages {
|
||||
let path = package_metadata.root().to_path_buf();
|
||||
|
||||
let package = if let Some(old_package) = old_packages.remove(&path) {
|
||||
old_package.update(db, package_metadata);
|
||||
old_package
|
||||
} else {
|
||||
Package::from_metadata(db, package_metadata)
|
||||
};
|
||||
|
||||
new_packages.insert(path, package);
|
||||
}
|
||||
|
||||
self.set_package_tree(db).to(new_packages);
|
||||
}
|
||||
|
||||
pub fn update_package(self, db: &mut dyn Db, metadata: PackageMetadata) -> anyhow::Result<()> {
|
||||
let path = metadata.root().to_path_buf();
|
||||
|
||||
if let Some(package) = self.package_tree(db).get(&path).copied() {
|
||||
package.update(db, metadata);
|
||||
Ok(())
|
||||
} else {
|
||||
Err(anyhow::anyhow!("Package {path} not found"))
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the closest package to which the first-party `path` belongs.
|
||||
///
|
||||
/// Returns `None` if the `path` is outside of any package or if `file` isn't a first-party file
|
||||
/// (e.g. third-party dependencies or `excluded`).
|
||||
pub fn package(self, db: &dyn Db, path: &SystemPath) -> Option<Package> {
|
||||
let packages = self.package_tree(db);
|
||||
|
||||
let (package_path, package) = packages.range(..path.to_path_buf()).next_back()?;
|
||||
|
||||
if path.starts_with(package_path) {
|
||||
Some(*package)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Checks all open files in the workspace and its dependencies.
|
||||
#[tracing::instrument(level = "debug", skip_all)]
|
||||
pub fn check(self, db: &dyn Db) -> Vec<String> {
|
||||
let mut result = Vec::new();
|
||||
|
||||
if let Some(open_files) = self.open_files(db) {
|
||||
for file in open_files {
|
||||
result.extend_from_slice(&check_file(db, *file));
|
||||
}
|
||||
} else {
|
||||
for package in self.packages(db) {
|
||||
result.extend(package.check(db));
|
||||
}
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
/// Opens a file in the workspace.
|
||||
///
|
||||
/// This changes the behavior of `check` to only check the open files rather than all files in the workspace.
|
||||
#[tracing::instrument(level = "debug", skip(self, db))]
|
||||
pub fn open_file(self, db: &mut dyn Db, file: File) {
|
||||
let mut open_files = self.take_open_files(db);
|
||||
open_files.insert(file);
|
||||
self.set_open_files(db, open_files);
|
||||
}
|
||||
|
||||
/// Closes a file in the workspace.
|
||||
#[tracing::instrument(level = "debug", skip(self, db))]
|
||||
pub fn close_file(self, db: &mut dyn Db, file: File) -> bool {
|
||||
let mut open_files = self.take_open_files(db);
|
||||
let removed = open_files.remove(&file);
|
||||
|
||||
if removed {
|
||||
self.set_open_files(db, open_files);
|
||||
}
|
||||
|
||||
removed
|
||||
}
|
||||
|
||||
/// Returns the open files in the workspace or `None` if the entire workspace should be checked.
|
||||
pub fn open_files(self, db: &dyn Db) -> Option<&FxHashSet<File>> {
|
||||
self.open_file_set(db).as_deref()
|
||||
}
|
||||
|
||||
/// Sets the open files in the workspace.
|
||||
///
|
||||
/// This changes the behavior of `check` to only check the open files rather than all files in the workspace.
|
||||
#[tracing::instrument(level = "debug", skip(self, db))]
|
||||
pub fn set_open_files(self, db: &mut dyn Db, open_files: FxHashSet<File>) {
|
||||
self.set_open_file_set(db).to(Some(Arc::new(open_files)));
|
||||
}
|
||||
|
||||
/// This takes the open files from the workspace and returns them.
|
||||
///
|
||||
/// This changes the behavior of `check` to check all files in the workspace instead of just the open files.
|
||||
pub fn take_open_files(self, db: &mut dyn Db) -> FxHashSet<File> {
|
||||
let open_files = self.open_file_set(db).clone();
|
||||
|
||||
if let Some(open_files) = open_files {
|
||||
// Salsa will cancel any pending queries and remove its own reference to `open_files`
|
||||
// so that the reference counter to `open_files` now drops to 1.
|
||||
self.set_open_file_set(db).to(None);
|
||||
|
||||
Arc::try_unwrap(open_files).unwrap()
|
||||
} else {
|
||||
FxHashSet::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Package {
|
||||
pub fn root(self, db: &dyn Db) -> &SystemPath {
|
||||
self.root_buf(db)
|
||||
}
|
||||
|
||||
/// Returns `true` if `file` is a first-party file part of this package.
|
||||
pub fn contains_file(self, db: &dyn Db, file: File) -> bool {
|
||||
self.files(db).contains(&file)
|
||||
}
|
||||
|
||||
pub fn files(self, db: &dyn Db) -> &FxHashSet<File> {
|
||||
self.file_set(db)
|
||||
}
|
||||
|
||||
pub fn remove_file(self, db: &mut dyn Db, file: File) -> bool {
|
||||
let mut files_arc = self.file_set(db).clone();
|
||||
|
||||
// Set a dummy value. Salsa will cancel any pending queries and remove its own reference to `files`
|
||||
// so that the reference counter to `files` now drops to 1.
|
||||
self.set_file_set(db).to(Arc::new(FxHashSet::default()));
|
||||
|
||||
let files = Arc::get_mut(&mut files_arc).unwrap();
|
||||
let removed = files.remove(&file);
|
||||
self.set_file_set(db).to(files_arc);
|
||||
|
||||
removed
|
||||
}
|
||||
|
||||
pub(crate) fn check(self, db: &dyn Db) -> Vec<String> {
|
||||
let mut result = Vec::new();
|
||||
for file in self.files(db) {
|
||||
let diagnostics = check_file(db, *file);
|
||||
result.extend_from_slice(&diagnostics);
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn from_metadata(db: &dyn Db, metadata: PackageMetadata) -> Self {
|
||||
let files = discover_package_files(db, metadata.root());
|
||||
|
||||
Self::new(db, metadata.name, metadata.root, Arc::new(files))
|
||||
}
|
||||
|
||||
fn update(self, db: &mut dyn Db, metadata: PackageMetadata) {
|
||||
let root = self.root(db);
|
||||
assert_eq!(root, metadata.root());
|
||||
|
||||
let files = discover_package_files(db, root);
|
||||
|
||||
self.set_name(db).to(metadata.name);
|
||||
self.set_file_set(db).to(Arc::new(files));
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn check_file(db: &dyn Db, file: File) -> Diagnostics {
|
||||
let mut diagnostics = Vec::new();
|
||||
diagnostics.extend_from_slice(lint_syntax(db, file));
|
||||
diagnostics.extend_from_slice(lint_semantic(db, file));
|
||||
Diagnostics::from(diagnostics)
|
||||
}
|
||||
|
||||
fn discover_package_files(db: &dyn Db, path: &SystemPath) -> FxHashSet<File> {
|
||||
let paths = std::sync::Mutex::new(Vec::new());
|
||||
|
||||
db.system().walk_directory(path).run(|| {
|
||||
Box::new(|entry| {
|
||||
match entry {
|
||||
Ok(entry) => {
|
||||
// Skip over any non python files to avoid creating too many entries in `Files`.
|
||||
if entry.file_type().is_file()
|
||||
&& entry
|
||||
.path()
|
||||
.extension()
|
||||
.and_then(PySourceType::try_from_extension)
|
||||
.is_some()
|
||||
{
|
||||
let mut paths = paths.lock().unwrap();
|
||||
paths.push(entry.into_path());
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
// TODO Handle error
|
||||
tracing::error!("Failed to walk path: {error}");
|
||||
}
|
||||
}
|
||||
|
||||
WalkState::Continue
|
||||
})
|
||||
});
|
||||
|
||||
let paths = paths.into_inner().unwrap();
|
||||
let mut files = FxHashSet::with_capacity_and_hasher(paths.len(), FxBuildHasher);
|
||||
|
||||
for path in paths {
|
||||
// If this returns `None`, then the file was deleted between the `walk_directory` call and now.
|
||||
// We can ignore this.
|
||||
if let Some(file) = system_path_to_file(db.upcast(), &path) {
|
||||
files.insert(file);
|
||||
}
|
||||
}
|
||||
|
||||
files
|
||||
}
|
||||
68
crates/red_knot/src/workspace/metadata.rs
Normal file
68
crates/red_knot/src/workspace/metadata.rs
Normal file
@@ -0,0 +1,68 @@
|
||||
use ruff_db::system::{System, SystemPath, SystemPathBuf};
|
||||
use ruff_python_ast::name::Name;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct WorkspaceMetadata {
|
||||
pub(super) root: SystemPathBuf,
|
||||
|
||||
/// The (first-party) packages in this workspace.
|
||||
pub(super) packages: Vec<PackageMetadata>,
|
||||
}
|
||||
|
||||
/// A first-party package in a workspace.
|
||||
#[derive(Debug)]
|
||||
pub struct PackageMetadata {
|
||||
pub(super) name: Name,
|
||||
|
||||
/// The path to the root directory of the package.
|
||||
pub(super) root: SystemPathBuf,
|
||||
// TODO: Add the loaded package configuration (not the nested ruff settings)
|
||||
}
|
||||
|
||||
impl WorkspaceMetadata {
|
||||
/// Discovers the closest workspace at `path` and returns its metadata.
|
||||
pub fn from_path(path: &SystemPath, system: &dyn System) -> anyhow::Result<WorkspaceMetadata> {
|
||||
let root = if system.is_file(path) {
|
||||
path.parent().unwrap().to_path_buf()
|
||||
} else {
|
||||
path.to_path_buf()
|
||||
};
|
||||
|
||||
if !system.is_directory(&root) {
|
||||
anyhow::bail!("no workspace found at {:?}", root);
|
||||
}
|
||||
|
||||
// TODO: Discover package name from `pyproject.toml`.
|
||||
let package_name: Name = path.file_name().unwrap_or("<root>").into();
|
||||
|
||||
let package = PackageMetadata {
|
||||
name: package_name,
|
||||
root: root.clone(),
|
||||
};
|
||||
|
||||
let workspace = WorkspaceMetadata {
|
||||
root,
|
||||
packages: vec![package],
|
||||
};
|
||||
|
||||
Ok(workspace)
|
||||
}
|
||||
|
||||
pub fn root(&self) -> &SystemPath {
|
||||
&self.root
|
||||
}
|
||||
|
||||
pub fn packages(&self) -> &[PackageMetadata] {
|
||||
&self.packages
|
||||
}
|
||||
}
|
||||
|
||||
impl PackageMetadata {
|
||||
pub fn name(&self) -> &Name {
|
||||
&self.name
|
||||
}
|
||||
|
||||
pub fn root(&self) -> &SystemPath {
|
||||
&self.root
|
||||
}
|
||||
}
|
||||
@@ -28,6 +28,8 @@ walkdir = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
ruff_db = { workspace = true, features = ["os"] }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
insta = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
use ruff_db::Upcast;
|
||||
|
||||
use crate::resolver::{
|
||||
file_to_module,
|
||||
internal::{ModuleNameIngredient, ModuleResolverSettings},
|
||||
resolve_module_query,
|
||||
editable_install_resolution_paths, file_to_module, internal::ModuleNameIngredient,
|
||||
module_resolution_settings, resolve_module_query,
|
||||
};
|
||||
use crate::typeshed::parse_typeshed_versions;
|
||||
|
||||
#[salsa::jar(db=Db)]
|
||||
pub struct Jar(
|
||||
ModuleNameIngredient<'_>,
|
||||
ModuleResolverSettings,
|
||||
module_resolution_settings,
|
||||
editable_install_resolution_paths,
|
||||
resolve_module_query,
|
||||
file_to_module,
|
||||
parse_typeshed_versions,
|
||||
|
||||
@@ -4,7 +4,6 @@ mod module_name;
|
||||
mod path;
|
||||
mod resolver;
|
||||
mod state;
|
||||
mod supported_py_version;
|
||||
mod typeshed;
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -13,8 +12,7 @@ mod testing;
|
||||
pub use db::{Db, Jar};
|
||||
pub use module::{Module, ModuleKind};
|
||||
pub use module_name::ModuleName;
|
||||
pub use resolver::{resolve_module, set_module_resolution_settings, RawModuleResolutionSettings};
|
||||
pub use supported_py_version::TargetVersion;
|
||||
pub use resolver::resolve_module;
|
||||
pub use typeshed::{
|
||||
vendored_typeshed_stubs, TypeshedVersionsParseError, TypeshedVersionsParseErrorKind,
|
||||
};
|
||||
|
||||
@@ -55,8 +55,7 @@ impl ModuleName {
|
||||
#[inline]
|
||||
#[must_use]
|
||||
pub fn new_static(name: &'static str) -> Option<Self> {
|
||||
// TODO(Micha): Use CompactString::const_new once we upgrade to 0.8 https://github.com/ParkMyCar/compact_str/pull/336
|
||||
Self::is_valid_name(name).then(|| Self(CompactString::from(name)))
|
||||
Self::is_valid_name(name).then(|| Self(CompactString::const_new(name)))
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
use std::fmt;
|
||||
|
||||
use ruff_db::files::{system_path_to_file, vendored_path_to_file, File, FilePath};
|
||||
use ruff_db::system::{SystemPath, SystemPathBuf};
|
||||
use ruff_db::system::{System, SystemPath, SystemPathBuf};
|
||||
use ruff_db::vendored::{VendoredPath, VendoredPathBuf};
|
||||
|
||||
use crate::db::Db;
|
||||
@@ -73,6 +73,7 @@ enum ModuleResolutionPathBufInner {
|
||||
FirstParty(SystemPathBuf),
|
||||
StandardLibrary(FilePath),
|
||||
SitePackages(SystemPathBuf),
|
||||
EditableInstall(SystemPathBuf),
|
||||
}
|
||||
|
||||
impl ModuleResolutionPathBufInner {
|
||||
@@ -134,6 +135,19 @@ impl ModuleResolutionPathBufInner {
|
||||
);
|
||||
path.push(component);
|
||||
}
|
||||
Self::EditableInstall(ref mut path) => {
|
||||
if let Some(extension) = extension {
|
||||
assert!(
|
||||
matches!(extension, "pyi" | "py"),
|
||||
"Extension must be `py` or `pyi`; got `{extension}`"
|
||||
);
|
||||
}
|
||||
assert!(
|
||||
path.extension().is_none(),
|
||||
"Cannot push part {component} to {path}, which already has an extension"
|
||||
);
|
||||
path.push(component);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -197,6 +211,18 @@ impl ModuleResolutionPathBuf {
|
||||
.then_some(Self(ModuleResolutionPathBufInner::SitePackages(path)))
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub(crate) fn editable_installation_root(
|
||||
system: &dyn System,
|
||||
path: impl Into<SystemPathBuf>,
|
||||
) -> Option<Self> {
|
||||
let path = path.into();
|
||||
// TODO: Add Salsa invalidation to this system call:
|
||||
system
|
||||
.is_directory(&path)
|
||||
.then_some(Self(ModuleResolutionPathBufInner::EditableInstall(path)))
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub(crate) fn is_regular_package(&self, search_path: &Self, resolver: &ResolverState) -> bool {
|
||||
ModuleResolutionPathRef::from(self).is_regular_package(search_path, resolver)
|
||||
@@ -207,6 +233,10 @@ impl ModuleResolutionPathBuf {
|
||||
ModuleResolutionPathRef::from(self).is_directory(search_path, resolver)
|
||||
}
|
||||
|
||||
pub(crate) fn is_site_packages(&self) -> bool {
|
||||
matches!(self.0, ModuleResolutionPathBufInner::SitePackages(_))
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub(crate) fn with_pyi_extension(&self) -> Self {
|
||||
ModuleResolutionPathRef::from(self).with_pyi_extension()
|
||||
@@ -229,6 +259,16 @@ impl ModuleResolutionPathBuf {
|
||||
pub(crate) fn to_file(&self, search_path: &Self, resolver: &ResolverState) -> Option<File> {
|
||||
ModuleResolutionPathRef::from(self).to_file(search_path, resolver)
|
||||
}
|
||||
|
||||
pub(crate) fn as_system_path(&self) -> Option<&SystemPathBuf> {
|
||||
match &self.0 {
|
||||
ModuleResolutionPathBufInner::Extra(path) => Some(path),
|
||||
ModuleResolutionPathBufInner::FirstParty(path) => Some(path),
|
||||
ModuleResolutionPathBufInner::StandardLibrary(_) => None,
|
||||
ModuleResolutionPathBufInner::SitePackages(path) => Some(path),
|
||||
ModuleResolutionPathBufInner::EditableInstall(path) => Some(path),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for ModuleResolutionPathBuf {
|
||||
@@ -250,6 +290,10 @@ impl fmt::Debug for ModuleResolutionPathBuf {
|
||||
.debug_tuple("ModuleResolutionPathBuf::StandardLibrary")
|
||||
.field(path)
|
||||
.finish(),
|
||||
ModuleResolutionPathBufInner::EditableInstall(path) => f
|
||||
.debug_tuple("ModuleResolutionPathBuf::EditableInstall")
|
||||
.field(path)
|
||||
.finish(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -272,6 +316,7 @@ enum ModuleResolutionPathRefInner<'a> {
|
||||
FirstParty(&'a SystemPath),
|
||||
StandardLibrary(FilePathRef<'a>),
|
||||
SitePackages(&'a SystemPath),
|
||||
EditableInstall(&'a SystemPath),
|
||||
}
|
||||
|
||||
impl<'a> ModuleResolutionPathRefInner<'a> {
|
||||
@@ -306,6 +351,7 @@ impl<'a> ModuleResolutionPathRefInner<'a> {
|
||||
(Self::Extra(path), Self::Extra(_)) => resolver.system().is_directory(path),
|
||||
(Self::FirstParty(path), Self::FirstParty(_)) => resolver.system().is_directory(path),
|
||||
(Self::SitePackages(path), Self::SitePackages(_)) => resolver.system().is_directory(path),
|
||||
(Self::EditableInstall(path), Self::EditableInstall(_)) => resolver.system().is_directory(path),
|
||||
(Self::StandardLibrary(path), Self::StandardLibrary(stdlib_root)) => {
|
||||
match Self::query_stdlib_version(path, search_path, &stdlib_root, resolver) {
|
||||
TypeshedVersionsQueryResult::DoesNotExist => false,
|
||||
@@ -323,16 +369,16 @@ impl<'a> ModuleResolutionPathRefInner<'a> {
|
||||
|
||||
#[must_use]
|
||||
fn is_regular_package(&self, search_path: Self, resolver: &ResolverState) -> bool {
|
||||
fn is_non_stdlib_pkg(state: &ResolverState, path: &SystemPath) -> bool {
|
||||
let file_system = state.system();
|
||||
file_system.path_exists(&path.join("__init__.py"))
|
||||
|| file_system.path_exists(&path.join("__init__.pyi"))
|
||||
fn is_non_stdlib_pkg(resolver: &ResolverState, path: &SystemPath) -> bool {
|
||||
system_path_to_file(resolver.db.upcast(), path.join("__init__.py")).is_some()
|
||||
|| system_path_to_file(resolver.db.upcast(), path.join("__init__.py")).is_some()
|
||||
}
|
||||
|
||||
match (self, search_path) {
|
||||
(Self::Extra(path), Self::Extra(_)) => is_non_stdlib_pkg(resolver, path),
|
||||
(Self::FirstParty(path), Self::FirstParty(_)) => is_non_stdlib_pkg(resolver, path),
|
||||
(Self::SitePackages(path), Self::SitePackages(_)) => is_non_stdlib_pkg(resolver, path),
|
||||
(Self::EditableInstall(path), Self::EditableInstall(_)) => is_non_stdlib_pkg(resolver, path),
|
||||
// Unlike the other variants:
|
||||
// (1) Account for VERSIONS
|
||||
// (2) Only test for `__init__.pyi`, not `__init__.py`
|
||||
@@ -340,8 +386,13 @@ impl<'a> ModuleResolutionPathRefInner<'a> {
|
||||
match Self::query_stdlib_version( path, search_path, &stdlib_root, resolver) {
|
||||
TypeshedVersionsQueryResult::DoesNotExist => false,
|
||||
TypeshedVersionsQueryResult::Exists | TypeshedVersionsQueryResult::MaybeExists => match path {
|
||||
FilePathRef::System(path) => resolver.db.system().path_exists(&path.join("__init__.pyi")),
|
||||
FilePathRef::Vendored(path) => resolver.db.vendored().exists(path.join("__init__.pyi")),
|
||||
FilePathRef::System(path) => system_path_to_file(resolver.db.upcast(),path.join("__init__.pyi")).is_some(),
|
||||
// No need to use `vendored_path_to_file` here:
|
||||
// (1) The vendored filesystem is immutable, so we don't need to worry about Salsa invalidation
|
||||
// (2) The caching Salsa provides probably won't speed us up that much
|
||||
// (TODO: check that assumption when we're able to run red-knot on larger code bases)
|
||||
// (3) We don't need the `File` object that `vendored_path_to_file` would return; we just need to know if the file exists
|
||||
FilePathRef::Vendored(path) => resolver.db.vendored().exists(path.join("__init__.pyi"))
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -358,6 +409,7 @@ impl<'a> ModuleResolutionPathRefInner<'a> {
|
||||
(Self::SitePackages(path), Self::SitePackages(_)) => {
|
||||
system_path_to_file(resolver.db.upcast(), path)
|
||||
}
|
||||
(Self::EditableInstall(path), Self::EditableInstall(_)) => system_path_to_file(resolver.db.upcast(), path),
|
||||
(Self::StandardLibrary(path), Self::StandardLibrary(stdlib_root)) => {
|
||||
match Self::query_stdlib_version(&path, search_path, &stdlib_root, resolver) {
|
||||
TypeshedVersionsQueryResult::DoesNotExist => None,
|
||||
@@ -374,7 +426,10 @@ impl<'a> ModuleResolutionPathRefInner<'a> {
|
||||
#[must_use]
|
||||
fn to_module_name(self) -> Option<ModuleName> {
|
||||
match self {
|
||||
Self::Extra(path) | Self::FirstParty(path) | Self::SitePackages(path) => {
|
||||
Self::Extra(path)
|
||||
| Self::FirstParty(path)
|
||||
| Self::SitePackages(path)
|
||||
| Self::EditableInstall(path) => {
|
||||
let parent = path.parent()?;
|
||||
let parent_components = parent.components().map(|component| component.as_str());
|
||||
let skip_final_part =
|
||||
@@ -421,6 +476,9 @@ impl<'a> ModuleResolutionPathRefInner<'a> {
|
||||
Self::SitePackages(path) => {
|
||||
ModuleResolutionPathBufInner::SitePackages(path.with_extension("pyi"))
|
||||
}
|
||||
Self::EditableInstall(path) => {
|
||||
ModuleResolutionPathBufInner::EditableInstall(path.with_extension("pyi"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -437,6 +495,9 @@ impl<'a> ModuleResolutionPathRefInner<'a> {
|
||||
Self::SitePackages(path) => Some(ModuleResolutionPathBufInner::SitePackages(
|
||||
path.with_extension("py"),
|
||||
)),
|
||||
Self::EditableInstall(path) => Some(ModuleResolutionPathBufInner::EditableInstall(
|
||||
path.with_extension("py"),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -474,6 +535,13 @@ impl<'a> ModuleResolutionPathRefInner<'a> {
|
||||
.then_some(Self::SitePackages(path))
|
||||
})
|
||||
}
|
||||
(Self::EditableInstall(root), FilePathRef::System(absolute_path)) => {
|
||||
absolute_path.strip_prefix(root).ok().and_then(|path| {
|
||||
path.extension()
|
||||
.map_or(true, |ext| matches!(ext, "pyi" | "py"))
|
||||
.then_some(Self::EditableInstall(path))
|
||||
})
|
||||
}
|
||||
(Self::Extra(_), FilePathRef::Vendored(_)) => None,
|
||||
(Self::FirstParty(_), FilePathRef::Vendored(_)) => None,
|
||||
(Self::StandardLibrary(root), FilePathRef::Vendored(absolute_path)) => match root {
|
||||
@@ -487,6 +555,7 @@ impl<'a> ModuleResolutionPathRefInner<'a> {
|
||||
}
|
||||
},
|
||||
(Self::SitePackages(_), FilePathRef::Vendored(_)) => None,
|
||||
(Self::EditableInstall(_), FilePathRef::Vendored(_)) => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -562,6 +631,10 @@ impl fmt::Debug for ModuleResolutionPathRef<'_> {
|
||||
.debug_tuple("ModuleResolutionPathRef::StandardLibrary")
|
||||
.field(path)
|
||||
.finish(),
|
||||
ModuleResolutionPathRefInner::EditableInstall(path) => f
|
||||
.debug_tuple("ModuleResolutionPathRef::EditableInstall")
|
||||
.field(path)
|
||||
.finish(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -582,6 +655,9 @@ impl<'a> From<&'a ModuleResolutionPathBuf> for ModuleResolutionPathRef<'a> {
|
||||
ModuleResolutionPathBufInner::SitePackages(path) => {
|
||||
ModuleResolutionPathRefInner::SitePackages(path)
|
||||
}
|
||||
ModuleResolutionPathBufInner::EditableInstall(path) => {
|
||||
ModuleResolutionPathRefInner::EditableInstall(path)
|
||||
}
|
||||
};
|
||||
ModuleResolutionPathRef(inner)
|
||||
}
|
||||
@@ -593,6 +669,7 @@ impl PartialEq<SystemPath> for ModuleResolutionPathRef<'_> {
|
||||
ModuleResolutionPathRefInner::Extra(path) => path == other,
|
||||
ModuleResolutionPathRefInner::FirstParty(path) => path == other,
|
||||
ModuleResolutionPathRefInner::SitePackages(path) => path == other,
|
||||
ModuleResolutionPathRefInner::EditableInstall(path) => path == other,
|
||||
ModuleResolutionPathRefInner::StandardLibrary(FilePathRef::System(path)) => {
|
||||
path == other
|
||||
}
|
||||
@@ -625,6 +702,7 @@ impl PartialEq<VendoredPath> for ModuleResolutionPathRef<'_> {
|
||||
ModuleResolutionPathRefInner::Extra(_) => false,
|
||||
ModuleResolutionPathRefInner::FirstParty(_) => false,
|
||||
ModuleResolutionPathRefInner::SitePackages(_) => false,
|
||||
ModuleResolutionPathRefInner::EditableInstall(_) => false,
|
||||
ModuleResolutionPathRefInner::StandardLibrary(FilePathRef::System(_)) => false,
|
||||
ModuleResolutionPathRefInner::StandardLibrary(FilePathRef::Vendored(path)) => {
|
||||
path == other
|
||||
@@ -654,9 +732,9 @@ impl PartialEq<ModuleResolutionPathRef<'_>> for VendoredPathBuf {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use insta::assert_debug_snapshot;
|
||||
use ruff_db::program::TargetVersion;
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::supported_py_version::TargetVersion;
|
||||
use crate::testing::{FileSpec, MockedTypeshed, TestCase, TestCaseBuilder};
|
||||
|
||||
use super::*;
|
||||
@@ -707,6 +785,9 @@ mod tests {
|
||||
ModuleResolutionPathRefInner::SitePackages(path) => {
|
||||
ModuleResolutionPathBufInner::SitePackages(path.to_path_buf())
|
||||
}
|
||||
ModuleResolutionPathRefInner::EditableInstall(path) => {
|
||||
ModuleResolutionPathBufInner::EditableInstall(path.to_path_buf())
|
||||
}
|
||||
};
|
||||
ModuleResolutionPathBuf(inner)
|
||||
}
|
||||
|
||||
@@ -1,31 +1,20 @@
|
||||
use std::ops::Deref;
|
||||
use std::borrow::Cow;
|
||||
use std::iter::FusedIterator;
|
||||
use std::sync::Arc;
|
||||
|
||||
use rustc_hash::{FxBuildHasher, FxHashSet};
|
||||
|
||||
use ruff_db::files::{File, FilePath};
|
||||
use ruff_db::system::SystemPathBuf;
|
||||
use ruff_db::program::{Program, SearchPathSettings, TargetVersion};
|
||||
use ruff_db::system::{DirectoryEntry, System, SystemPath, SystemPathBuf};
|
||||
|
||||
use crate::db::Db;
|
||||
use crate::module::{Module, ModuleKind};
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::path::ModuleResolutionPathBuf;
|
||||
use crate::resolver::internal::ModuleResolverSettings;
|
||||
use crate::state::ResolverState;
|
||||
use crate::supported_py_version::TargetVersion;
|
||||
|
||||
/// Configures the module resolver settings.
|
||||
///
|
||||
/// Must be called before calling any other module resolution functions.
|
||||
pub fn set_module_resolution_settings(db: &mut dyn Db, config: RawModuleResolutionSettings) {
|
||||
// There's no concurrency issue here because we hold a `&mut dyn Db` reference. No other
|
||||
// thread can mutate the `Db` while we're in this call, so using `try_get` to test if
|
||||
// the settings have already been set is safe.
|
||||
let resolved_settings = config.into_configuration_settings();
|
||||
if let Some(existing) = ModuleResolverSettings::try_get(db) {
|
||||
existing.set_settings(db).to(resolved_settings);
|
||||
} else {
|
||||
ModuleResolverSettings::new(db, resolved_settings);
|
||||
}
|
||||
}
|
||||
type SearchPathRoot = Arc<ModuleResolutionPathBuf>;
|
||||
|
||||
/// Resolves a module name to a module.
|
||||
pub fn resolve_module(db: &dyn Db, module_name: ModuleName) -> Option<Module> {
|
||||
@@ -80,14 +69,16 @@ pub(crate) fn file_to_module(db: &dyn Db, file: File) -> Option<Module> {
|
||||
|
||||
let path = file.path(db.upcast());
|
||||
|
||||
let resolver_settings = module_resolver_settings(db);
|
||||
let settings = module_resolution_settings(db);
|
||||
|
||||
let relative_path = resolver_settings
|
||||
.search_paths()
|
||||
.iter()
|
||||
.find_map(|root| root.relativize_path(path))?;
|
||||
let mut search_paths = settings.search_paths(db);
|
||||
|
||||
let module_name = relative_path.to_module_name()?;
|
||||
let module_name = loop {
|
||||
let candidate = search_paths.next()?;
|
||||
if let Some(relative_path) = candidate.relativize_path(path) {
|
||||
break relative_path.to_module_name()?;
|
||||
}
|
||||
};
|
||||
|
||||
// Resolve the module name to see if Python would resolve the name to the same path.
|
||||
// If it doesn't, then that means that multiple modules have the same name in different
|
||||
@@ -109,101 +100,327 @@ pub(crate) fn file_to_module(db: &dyn Db, file: File) -> Option<Module> {
|
||||
}
|
||||
}
|
||||
|
||||
/// "Raw" configuration settings for module resolution: unvalidated, unnormalized
|
||||
#[derive(Eq, PartialEq, Debug)]
|
||||
pub struct RawModuleResolutionSettings {
|
||||
/// The target Python version the user has specified
|
||||
pub target_version: TargetVersion,
|
||||
/// Validate and normalize the raw settings given by the user
|
||||
/// into settings we can use for module resolution
|
||||
///
|
||||
/// This method also implements the typing spec's [module resolution order].
|
||||
///
|
||||
/// TODO(Alex): this method does multiple `.unwrap()` calls when it should really return an error.
|
||||
/// Each `.unwrap()` call is a point where we're validating a setting that the user would pass
|
||||
/// and transforming it into an internal representation for a validated path.
|
||||
/// Rather than panicking if a path fails to validate, we should display an error message to the user
|
||||
/// and exit the process with a nonzero exit code.
|
||||
/// This validation should probably be done outside of Salsa?
|
||||
///
|
||||
/// [module resolution order]: https://typing.readthedocs.io/en/latest/spec/distributing.html#import-resolution-ordering
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub(crate) fn module_resolution_settings(db: &dyn Db) -> ModuleResolutionSettings {
|
||||
let program = Program::get(db.upcast());
|
||||
|
||||
/// List of user-provided paths that should take first priority in the module resolution.
|
||||
/// Examples in other type checkers are mypy's MYPYPATH environment variable,
|
||||
/// or pyright's stubPath configuration setting.
|
||||
pub extra_paths: Vec<SystemPathBuf>,
|
||||
let SearchPathSettings {
|
||||
extra_paths,
|
||||
workspace_root,
|
||||
custom_typeshed,
|
||||
site_packages,
|
||||
} = program.search_paths(db.upcast());
|
||||
|
||||
/// The root of the workspace, used for finding first-party modules.
|
||||
pub workspace_root: SystemPathBuf,
|
||||
if let Some(custom_typeshed) = custom_typeshed {
|
||||
tracing::info!("Custom typeshed directory: {custom_typeshed}");
|
||||
}
|
||||
|
||||
/// Optional (already validated) path to standard-library typeshed stubs.
|
||||
/// If this is not provided, we will fallback to our vendored typeshed stubs
|
||||
/// bundled as a zip file in the binary
|
||||
pub custom_typeshed: Option<SystemPathBuf>,
|
||||
if !extra_paths.is_empty() {
|
||||
tracing::info!("extra search paths: {extra_paths:?}");
|
||||
}
|
||||
|
||||
/// The path to the user's `site-packages` directory, where third-party packages from ``PyPI`` are installed.
|
||||
pub site_packages: Option<SystemPathBuf>,
|
||||
let current_directory = db.system().current_directory();
|
||||
|
||||
let mut static_search_paths: Vec<_> = extra_paths
|
||||
.iter()
|
||||
.map(|fs_path| {
|
||||
Arc::new(
|
||||
ModuleResolutionPathBuf::extra(SystemPath::absolute(fs_path, current_directory))
|
||||
.unwrap(),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
static_search_paths.push(Arc::new(
|
||||
ModuleResolutionPathBuf::first_party(SystemPath::absolute(
|
||||
workspace_root,
|
||||
current_directory,
|
||||
))
|
||||
.unwrap(),
|
||||
));
|
||||
|
||||
static_search_paths.push(Arc::new(custom_typeshed.as_ref().map_or_else(
|
||||
ModuleResolutionPathBuf::vendored_stdlib,
|
||||
|custom| {
|
||||
ModuleResolutionPathBuf::stdlib_from_custom_typeshed_root(&SystemPath::absolute(
|
||||
custom,
|
||||
current_directory,
|
||||
))
|
||||
.unwrap()
|
||||
},
|
||||
)));
|
||||
|
||||
if let Some(path) = site_packages {
|
||||
let site_packages_root = Arc::new(
|
||||
ModuleResolutionPathBuf::site_packages(SystemPath::absolute(path, current_directory))
|
||||
.unwrap(),
|
||||
);
|
||||
static_search_paths.push(site_packages_root);
|
||||
}
|
||||
|
||||
// TODO vendor typeshed's third-party stubs as well as the stdlib and fallback to them as a final step
|
||||
|
||||
let target_version = program.target_version(db.upcast());
|
||||
tracing::info!("Target version: {target_version}");
|
||||
|
||||
// Filter out module resolution paths that point to the same directory on disk (the same invariant maintained by [`sys.path` at runtime]).
|
||||
// (Paths may, however, *overlap* -- e.g. you could have both `src/` and `src/foo`
|
||||
// as module resolution paths simultaneously.)
|
||||
//
|
||||
// [`sys.path` at runtime]: https://docs.python.org/3/library/site.html#module-site
|
||||
// This code doesn't use an `IndexSet` because the key is the system path and not the search root.
|
||||
let mut seen_paths =
|
||||
FxHashSet::with_capacity_and_hasher(static_search_paths.len(), FxBuildHasher);
|
||||
|
||||
static_search_paths.retain(|path| {
|
||||
if let Some(path) = path.as_system_path() {
|
||||
seen_paths.insert(path.to_path_buf())
|
||||
} else {
|
||||
true
|
||||
}
|
||||
});
|
||||
|
||||
ModuleResolutionSettings {
|
||||
target_version,
|
||||
static_search_paths,
|
||||
}
|
||||
}
|
||||
|
||||
impl RawModuleResolutionSettings {
|
||||
/// Implementation of the typing spec's [module resolution order]
|
||||
///
|
||||
/// TODO(Alex): this method does multiple `.unwrap()` calls when it should really return an error.
|
||||
/// Each `.unwrap()` call is a point where we're validating a setting that the user would pass
|
||||
/// and transforming it into an internal representation for a validated path.
|
||||
/// Rather than panicking if a path fails to validate, we should display an error message to the user
|
||||
/// and exit the process with a nonzero exit code.
|
||||
/// This validation should probably be done outside of Salsa?
|
||||
///
|
||||
/// [module resolution order]: https://typing.readthedocs.io/en/latest/spec/distributing.html#import-resolution-ordering
|
||||
fn into_configuration_settings(self) -> ModuleResolutionSettings {
|
||||
let RawModuleResolutionSettings {
|
||||
target_version,
|
||||
extra_paths,
|
||||
workspace_root,
|
||||
site_packages,
|
||||
custom_typeshed,
|
||||
} = self;
|
||||
/// Collect all dynamic search paths:
|
||||
/// search paths listed in `.pth` files in the `site-packages` directory
|
||||
/// due to editable installations of third-party packages.
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub(crate) fn editable_install_resolution_paths(db: &dyn Db) -> Vec<Arc<ModuleResolutionPathBuf>> {
|
||||
// This query needs to be re-executed each time a `.pth` file
|
||||
// is added, modified or removed from the `site-packages` directory.
|
||||
// However, we don't use Salsa queries to read the source text of `.pth` files;
|
||||
// we use the APIs on the `System` trait directly. As such, for now we simply ask
|
||||
// Salsa to recompute this query on each new revision.
|
||||
//
|
||||
// TODO: add some kind of watcher for the `site-packages` directory that looks
|
||||
// for `site-packages/*.pth` files being added/modified/removed; get rid of this.
|
||||
// When doing so, also make the test
|
||||
// `deleting_pth_file_on_which_module_resolution_depends_invalidates_cache()`
|
||||
// more principled!
|
||||
db.report_untracked_read();
|
||||
|
||||
let mut paths: Vec<ModuleResolutionPathBuf> = extra_paths
|
||||
.into_iter()
|
||||
.map(|fs_path| ModuleResolutionPathBuf::extra(fs_path).unwrap())
|
||||
let static_search_paths = &module_resolution_settings(db).static_search_paths;
|
||||
let site_packages = static_search_paths
|
||||
.iter()
|
||||
.find(|path| path.is_site_packages());
|
||||
|
||||
let mut dynamic_paths = Vec::default();
|
||||
|
||||
if let Some(site_packages) = site_packages {
|
||||
let site_packages = site_packages
|
||||
.as_system_path()
|
||||
.expect("Expected site-packages never to be a VendoredPath!");
|
||||
|
||||
// As well as modules installed directly into `site-packages`,
|
||||
// the directory may also contain `.pth` files.
|
||||
// Each `.pth` file in `site-packages` may contain one or more lines
|
||||
// containing a (relative or absolute) path.
|
||||
// Each of these paths may point to an editable install of a package,
|
||||
// so should be considered an additional search path.
|
||||
let Ok(pth_file_iterator) = PthFileIterator::new(db, site_packages) else {
|
||||
return dynamic_paths;
|
||||
};
|
||||
|
||||
// The Python documentation specifies that `.pth` files in `site-packages`
|
||||
// are processed in alphabetical order, so collecting and then sorting is necessary.
|
||||
// https://docs.python.org/3/library/site.html#module-site
|
||||
let mut all_pth_files: Vec<PthFile> = pth_file_iterator.collect();
|
||||
all_pth_files.sort_by(|a, b| a.path.cmp(&b.path));
|
||||
|
||||
let mut existing_paths: FxHashSet<_> = static_search_paths
|
||||
.iter()
|
||||
.filter_map(|path| path.as_system_path())
|
||||
.map(Cow::Borrowed)
|
||||
.collect();
|
||||
|
||||
paths.push(ModuleResolutionPathBuf::first_party(workspace_root).unwrap());
|
||||
dynamic_paths.reserve(all_pth_files.len());
|
||||
|
||||
paths.push(
|
||||
custom_typeshed.map_or_else(ModuleResolutionPathBuf::vendored_stdlib, |custom| {
|
||||
ModuleResolutionPathBuf::stdlib_from_custom_typeshed_root(&custom).unwrap()
|
||||
}),
|
||||
);
|
||||
|
||||
// TODO vendor typeshed's third-party stubs as well as the stdlib and fallback to them as a final step
|
||||
if let Some(site_packages) = site_packages {
|
||||
paths.push(ModuleResolutionPathBuf::site_packages(site_packages).unwrap());
|
||||
}
|
||||
|
||||
ModuleResolutionSettings {
|
||||
target_version,
|
||||
search_paths: OrderedSearchPaths(paths.into_iter().map(Arc::new).collect()),
|
||||
for pth_file in &all_pth_files {
|
||||
for installation in pth_file.editable_installations() {
|
||||
if existing_paths.insert(Cow::Owned(
|
||||
installation.as_system_path().unwrap().to_path_buf(),
|
||||
)) {
|
||||
dynamic_paths.push(Arc::new(installation));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
dynamic_paths
|
||||
}
|
||||
|
||||
/// A resolved module resolution order as per the [typing spec]
|
||||
/// Iterate over the available module-resolution search paths,
|
||||
/// following the invariants maintained by [`sys.path` at runtime]:
|
||||
/// "No item is added to `sys.path` more than once."
|
||||
/// Dynamic search paths (required for editable installs into `site-packages`)
|
||||
/// are only calculated lazily.
|
||||
///
|
||||
/// [typing spec]: https://typing.readthedocs.io/en/latest/spec/distributing.html#import-resolution-ordering
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq)]
|
||||
pub(crate) struct OrderedSearchPaths(Vec<Arc<ModuleResolutionPathBuf>>);
|
||||
/// [`sys.path` at runtime]: https://docs.python.org/3/library/site.html#module-site
|
||||
struct SearchPathIterator<'db> {
|
||||
db: &'db dyn Db,
|
||||
static_paths: std::slice::Iter<'db, SearchPathRoot>,
|
||||
dynamic_paths: Option<std::slice::Iter<'db, SearchPathRoot>>,
|
||||
}
|
||||
|
||||
impl Deref for OrderedSearchPaths {
|
||||
type Target = [Arc<ModuleResolutionPathBuf>];
|
||||
impl<'db> Iterator for SearchPathIterator<'db> {
|
||||
type Item = &'db SearchPathRoot;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let SearchPathIterator {
|
||||
db,
|
||||
static_paths,
|
||||
dynamic_paths,
|
||||
} = self;
|
||||
|
||||
static_paths.next().or_else(|| {
|
||||
dynamic_paths
|
||||
.get_or_insert_with(|| editable_install_resolution_paths(*db).iter())
|
||||
.next()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> FusedIterator for SearchPathIterator<'db> {}
|
||||
|
||||
/// Represents a single `.pth` file in a `site-packages` directory.
|
||||
/// One or more lines in a `.pth` file may be a (relative or absolute)
|
||||
/// path that represents an editable installation of a package.
|
||||
struct PthFile<'db> {
|
||||
system: &'db dyn System,
|
||||
path: SystemPathBuf,
|
||||
contents: String,
|
||||
site_packages: &'db SystemPath,
|
||||
}
|
||||
|
||||
impl<'db> PthFile<'db> {
|
||||
/// Yield paths in this `.pth` file that appear to represent editable installations,
|
||||
/// and should therefore be added as module-resolution search paths.
|
||||
fn editable_installations(&'db self) -> impl Iterator<Item = ModuleResolutionPathBuf> + 'db {
|
||||
let PthFile {
|
||||
system,
|
||||
path: _,
|
||||
contents,
|
||||
site_packages,
|
||||
} = self;
|
||||
|
||||
// Empty lines or lines starting with '#' are ignored by the Python interpreter.
|
||||
// Lines that start with "import " or "import\t" do not represent editable installs at all;
|
||||
// instead, these are lines that are executed by Python at startup.
|
||||
// https://docs.python.org/3/library/site.html#module-site
|
||||
contents.lines().filter_map(move |line| {
|
||||
let line = line.trim_end();
|
||||
if line.is_empty()
|
||||
|| line.starts_with('#')
|
||||
|| line.starts_with("import ")
|
||||
|| line.starts_with("import\t")
|
||||
{
|
||||
return None;
|
||||
}
|
||||
let possible_editable_install = SystemPath::absolute(line, site_packages);
|
||||
ModuleResolutionPathBuf::editable_installation_root(*system, possible_editable_install)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterator that yields a [`PthFile`] instance for every `.pth` file
|
||||
/// found in a given `site-packages` directory.
|
||||
struct PthFileIterator<'db> {
|
||||
db: &'db dyn Db,
|
||||
directory_iterator: Box<dyn Iterator<Item = std::io::Result<DirectoryEntry>> + 'db>,
|
||||
site_packages: &'db SystemPath,
|
||||
}
|
||||
|
||||
impl<'db> PthFileIterator<'db> {
|
||||
fn new(db: &'db dyn Db, site_packages: &'db SystemPath) -> std::io::Result<Self> {
|
||||
Ok(Self {
|
||||
db,
|
||||
directory_iterator: db.system().read_directory(site_packages)?,
|
||||
site_packages,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> Iterator for PthFileIterator<'db> {
|
||||
type Item = PthFile<'db>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let PthFileIterator {
|
||||
db,
|
||||
directory_iterator,
|
||||
site_packages,
|
||||
} = self;
|
||||
|
||||
let system = db.system();
|
||||
|
||||
loop {
|
||||
let entry_result = directory_iterator.next()?;
|
||||
let Ok(entry) = entry_result else {
|
||||
continue;
|
||||
};
|
||||
let file_type = entry.file_type();
|
||||
if file_type.is_directory() {
|
||||
continue;
|
||||
}
|
||||
let path = entry.into_path();
|
||||
if path.extension() != Some("pth") {
|
||||
continue;
|
||||
}
|
||||
|
||||
let Ok(contents) = db.system().read_to_string(&path) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
return Some(PthFile {
|
||||
system,
|
||||
path,
|
||||
contents,
|
||||
site_packages,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Validated and normalized module-resolution settings.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(crate) struct ModuleResolutionSettings {
|
||||
search_paths: OrderedSearchPaths,
|
||||
target_version: TargetVersion,
|
||||
/// Search paths that have been statically determined purely from reading Ruff's configuration settings.
|
||||
/// These shouldn't ever change unless the config settings themselves change.
|
||||
///
|
||||
/// Note that `site-packages` *is included* as a search path in this sequence,
|
||||
/// but it is also stored separately so that we're able to find editable installs later.
|
||||
static_search_paths: Vec<SearchPathRoot>,
|
||||
}
|
||||
|
||||
impl ModuleResolutionSettings {
|
||||
pub(crate) fn search_paths(&self) -> &[Arc<ModuleResolutionPathBuf>] {
|
||||
&self.search_paths
|
||||
fn target_version(&self) -> TargetVersion {
|
||||
self.target_version
|
||||
}
|
||||
|
||||
pub(crate) fn target_version(&self) -> TargetVersion {
|
||||
self.target_version
|
||||
fn search_paths<'db>(&'db self, db: &'db dyn Db) -> SearchPathIterator<'db> {
|
||||
SearchPathIterator {
|
||||
db,
|
||||
static_paths: self.static_search_paths.iter(),
|
||||
dynamic_paths: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -214,13 +431,6 @@ impl ModuleResolutionSettings {
|
||||
#[allow(unreachable_pub, clippy::used_underscore_binding)]
|
||||
pub(crate) mod internal {
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::resolver::ModuleResolutionSettings;
|
||||
|
||||
#[salsa::input(singleton)]
|
||||
pub(crate) struct ModuleResolverSettings {
|
||||
#[return_ref]
|
||||
pub(super) settings: ModuleResolutionSettings,
|
||||
}
|
||||
|
||||
/// A thin wrapper around `ModuleName` to make it a Salsa ingredient.
|
||||
///
|
||||
@@ -232,20 +442,16 @@ pub(crate) mod internal {
|
||||
}
|
||||
}
|
||||
|
||||
fn module_resolver_settings(db: &dyn Db) -> &ModuleResolutionSettings {
|
||||
ModuleResolverSettings::get(db).settings(db)
|
||||
}
|
||||
|
||||
/// Given a module name and a list of search paths in which to lookup modules,
|
||||
/// attempt to resolve the module name
|
||||
fn resolve_name(
|
||||
db: &dyn Db,
|
||||
name: &ModuleName,
|
||||
) -> Option<(Arc<ModuleResolutionPathBuf>, File, ModuleKind)> {
|
||||
let resolver_settings = module_resolver_settings(db);
|
||||
let resolver_settings = module_resolution_settings(db);
|
||||
let resolver_state = ResolverState::new(db, resolver_settings.target_version());
|
||||
|
||||
for search_path in resolver_settings.search_paths() {
|
||||
for search_path in resolver_settings.search_paths(db) {
|
||||
let mut components = name.components();
|
||||
let module_name = components.next_back()?;
|
||||
|
||||
@@ -388,6 +594,7 @@ mod tests {
|
||||
use ruff_db::files::{system_path_to_file, File, FilePath};
|
||||
use ruff_db::system::{DbWithTestSystem, OsSystem, SystemPath};
|
||||
use ruff_db::testing::assert_function_query_was_not_run;
|
||||
use ruff_db::Db;
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::module::ModuleKind;
|
||||
@@ -877,11 +1084,13 @@ mod tests {
|
||||
#[test]
|
||||
#[cfg(target_family = "unix")]
|
||||
fn symlink() -> anyhow::Result<()> {
|
||||
use ruff_db::program::Program;
|
||||
|
||||
let mut db = TestDb::new();
|
||||
|
||||
let temp_dir = tempfile::tempdir()?;
|
||||
let root = SystemPath::from_std_path(temp_dir.path()).unwrap();
|
||||
db.use_os_system(OsSystem::new(root));
|
||||
db.use_system(OsSystem::new(root));
|
||||
|
||||
let src = root.join("src");
|
||||
let site_packages = root.join("site-packages");
|
||||
@@ -897,15 +1106,14 @@ mod tests {
|
||||
std::fs::write(foo.as_std_path(), "")?;
|
||||
std::os::unix::fs::symlink(foo.as_std_path(), bar.as_std_path())?;
|
||||
|
||||
let settings = RawModuleResolutionSettings {
|
||||
target_version: TargetVersion::Py38,
|
||||
let search_paths = SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
workspace_root: src.clone(),
|
||||
site_packages: Some(site_packages.clone()),
|
||||
custom_typeshed: Some(custom_typeshed.clone()),
|
||||
site_packages: Some(site_packages.clone()),
|
||||
};
|
||||
|
||||
set_module_resolution_settings(&mut db, settings);
|
||||
Program::new(&db, TargetVersion::Py38, search_paths);
|
||||
|
||||
let foo_module = resolve_module(&db, ModuleName::new_static("foo").unwrap()).unwrap();
|
||||
let bar_module = resolve_module(&db, ModuleName::new_static("bar").unwrap()).unwrap();
|
||||
@@ -1140,4 +1348,283 @@ mod tests {
|
||||
system_path_to_file(&db, stdlib.join("functools.pyi"))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn editable_install_absolute_path() {
|
||||
const SITE_PACKAGES: &[FileSpec] = &[("_foo.pth", "/x/src")];
|
||||
let x_directory = [("/x/src/foo/__init__.py", ""), ("/x/src/foo/bar.py", "")];
|
||||
|
||||
let TestCase { mut db, .. } = TestCaseBuilder::new()
|
||||
.with_site_packages_files(SITE_PACKAGES)
|
||||
.build();
|
||||
|
||||
db.write_files(x_directory).unwrap();
|
||||
|
||||
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
||||
let foo_bar_module_name = ModuleName::new_static("foo.bar").unwrap();
|
||||
|
||||
let foo_module = resolve_module(&db, foo_module_name.clone()).unwrap();
|
||||
let foo_bar_module = resolve_module(&db, foo_bar_module_name.clone()).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
foo_module.file().path(&db),
|
||||
&FilePath::system("/x/src/foo/__init__.py")
|
||||
);
|
||||
assert_eq!(
|
||||
foo_bar_module.file().path(&db),
|
||||
&FilePath::system("/x/src/foo/bar.py")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn editable_install_pth_file_with_whitespace() {
|
||||
const SITE_PACKAGES: &[FileSpec] = &[
|
||||
("_foo.pth", " /x/src"),
|
||||
("_bar.pth", "/y/src "),
|
||||
];
|
||||
let external_files = [("/x/src/foo.py", ""), ("/y/src/bar.py", "")];
|
||||
|
||||
let TestCase { mut db, .. } = TestCaseBuilder::new()
|
||||
.with_site_packages_files(SITE_PACKAGES)
|
||||
.build();
|
||||
|
||||
db.write_files(external_files).unwrap();
|
||||
|
||||
// Lines with leading whitespace in `.pth` files do not parse:
|
||||
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
||||
assert_eq!(resolve_module(&db, foo_module_name), None);
|
||||
|
||||
// Lines with trailing whitespace in `.pth` files do:
|
||||
let bar_module_name = ModuleName::new_static("bar").unwrap();
|
||||
let bar_module = resolve_module(&db, bar_module_name.clone()).unwrap();
|
||||
assert_eq!(
|
||||
bar_module.file().path(&db),
|
||||
&FilePath::system("/y/src/bar.py")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn editable_install_relative_path() {
|
||||
const SITE_PACKAGES: &[FileSpec] = &[
|
||||
("_foo.pth", "../../x/../x/y/src"),
|
||||
("../x/y/src/foo.pyi", ""),
|
||||
];
|
||||
|
||||
let TestCase { db, .. } = TestCaseBuilder::new()
|
||||
.with_site_packages_files(SITE_PACKAGES)
|
||||
.build();
|
||||
|
||||
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
||||
let foo_module = resolve_module(&db, foo_module_name.clone()).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
foo_module.file().path(&db),
|
||||
&FilePath::system("/x/y/src/foo.pyi")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn editable_install_multiple_pth_files_with_multiple_paths() {
|
||||
const COMPLEX_PTH_FILE: &str = "\
|
||||
/
|
||||
|
||||
# a comment
|
||||
/baz
|
||||
|
||||
import not_an_editable_install; do_something_else_crazy_dynamic()
|
||||
|
||||
# another comment
|
||||
spam
|
||||
|
||||
not_a_directory
|
||||
";
|
||||
|
||||
const SITE_PACKAGES: &[FileSpec] = &[
|
||||
("_foo.pth", "../../x/../x/y/src"),
|
||||
("_lots_of_others.pth", COMPLEX_PTH_FILE),
|
||||
("../x/y/src/foo.pyi", ""),
|
||||
("spam/spam.py", ""),
|
||||
];
|
||||
|
||||
let root_files = [("/a.py", ""), ("/baz/b.py", "")];
|
||||
|
||||
let TestCase {
|
||||
mut db,
|
||||
site_packages,
|
||||
..
|
||||
} = TestCaseBuilder::new()
|
||||
.with_site_packages_files(SITE_PACKAGES)
|
||||
.build();
|
||||
|
||||
db.write_files(root_files).unwrap();
|
||||
|
||||
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
||||
let a_module_name = ModuleName::new_static("a").unwrap();
|
||||
let b_module_name = ModuleName::new_static("b").unwrap();
|
||||
let spam_module_name = ModuleName::new_static("spam").unwrap();
|
||||
|
||||
let foo_module = resolve_module(&db, foo_module_name.clone()).unwrap();
|
||||
let a_module = resolve_module(&db, a_module_name.clone()).unwrap();
|
||||
let b_module = resolve_module(&db, b_module_name.clone()).unwrap();
|
||||
let spam_module = resolve_module(&db, spam_module_name.clone()).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
foo_module.file().path(&db),
|
||||
&FilePath::system("/x/y/src/foo.pyi")
|
||||
);
|
||||
assert_eq!(a_module.file().path(&db), &FilePath::system("/a.py"));
|
||||
assert_eq!(b_module.file().path(&db), &FilePath::system("/baz/b.py"));
|
||||
assert_eq!(
|
||||
spam_module.file().path(&db),
|
||||
&FilePath::System(site_packages.join("spam/spam.py"))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn module_resolution_paths_cached_between_different_module_resolutions() {
|
||||
const SITE_PACKAGES: &[FileSpec] = &[("_foo.pth", "/x/src"), ("_bar.pth", "/y/src")];
|
||||
let external_directories = [("/x/src/foo.py", ""), ("/y/src/bar.py", "")];
|
||||
|
||||
let TestCase { mut db, .. } = TestCaseBuilder::new()
|
||||
.with_site_packages_files(SITE_PACKAGES)
|
||||
.build();
|
||||
|
||||
db.write_files(external_directories).unwrap();
|
||||
|
||||
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
||||
let bar_module_name = ModuleName::new_static("bar").unwrap();
|
||||
|
||||
let foo_module = resolve_module(&db, foo_module_name).unwrap();
|
||||
assert_eq!(
|
||||
foo_module.file().path(&db),
|
||||
&FilePath::system("/x/src/foo.py")
|
||||
);
|
||||
|
||||
db.clear_salsa_events();
|
||||
let bar_module = resolve_module(&db, bar_module_name).unwrap();
|
||||
assert_eq!(
|
||||
bar_module.file().path(&db),
|
||||
&FilePath::system("/y/src/bar.py")
|
||||
);
|
||||
let events = db.take_salsa_events();
|
||||
assert_function_query_was_not_run::<editable_install_resolution_paths, _, _>(
|
||||
&db,
|
||||
|res| &res.function,
|
||||
&(),
|
||||
&events,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deleting_pth_file_on_which_module_resolution_depends_invalidates_cache() {
|
||||
const SITE_PACKAGES: &[FileSpec] = &[("_foo.pth", "/x/src")];
|
||||
let x_directory = [("/x/src/foo.py", "")];
|
||||
|
||||
let TestCase {
|
||||
mut db,
|
||||
site_packages,
|
||||
..
|
||||
} = TestCaseBuilder::new()
|
||||
.with_site_packages_files(SITE_PACKAGES)
|
||||
.build();
|
||||
|
||||
db.write_files(x_directory).unwrap();
|
||||
|
||||
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
||||
let foo_module = resolve_module(&db, foo_module_name.clone()).unwrap();
|
||||
assert_eq!(
|
||||
foo_module.file().path(&db),
|
||||
&FilePath::system("/x/src/foo.py")
|
||||
);
|
||||
|
||||
db.memory_file_system()
|
||||
.remove_file(site_packages.join("_foo.pth"))
|
||||
.unwrap();
|
||||
|
||||
// Why are we touching a random file in the path that's been editably installed,
|
||||
// rather than the `.pth` file, when the `.pth` file is the one that has been deleted?
|
||||
// It's because the `.pth` file isn't directly tracked as a dependency by Salsa
|
||||
// currently (we don't use `system_path_to_file()` to get the file, and we don't use
|
||||
// `source_text()` to read the source of the file). Instead of using these APIs which
|
||||
// would automatically add the existence and contents of the file as a Salsa-tracked
|
||||
// dependency, we use `.report_untracked_read()` to force Salsa to re-parse all
|
||||
// `.pth` files on each new "revision". Making a random modification to a tracked
|
||||
// Salsa file forces a new revision.
|
||||
//
|
||||
// TODO: get rid of the `.report_untracked_read()` call...
|
||||
File::touch_path(&mut db, SystemPath::new("/x/src/foo.py"));
|
||||
|
||||
assert_eq!(resolve_module(&db, foo_module_name.clone()), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deleting_editable_install_on_which_module_resolution_depends_invalidates_cache() {
|
||||
const SITE_PACKAGES: &[FileSpec] = &[("_foo.pth", "/x/src")];
|
||||
let x_directory = [("/x/src/foo.py", "")];
|
||||
|
||||
let TestCase { mut db, .. } = TestCaseBuilder::new()
|
||||
.with_site_packages_files(SITE_PACKAGES)
|
||||
.build();
|
||||
|
||||
db.write_files(x_directory).unwrap();
|
||||
|
||||
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
||||
let foo_module = resolve_module(&db, foo_module_name.clone()).unwrap();
|
||||
let src_path = SystemPathBuf::from("/x/src");
|
||||
assert_eq!(
|
||||
foo_module.file().path(&db),
|
||||
&FilePath::System(src_path.join("foo.py"))
|
||||
);
|
||||
|
||||
db.memory_file_system()
|
||||
.remove_file(src_path.join("foo.py"))
|
||||
.unwrap();
|
||||
db.memory_file_system().remove_directory(&src_path).unwrap();
|
||||
File::touch_path(&mut db, &src_path.join("foo.py"));
|
||||
File::touch_path(&mut db, &src_path);
|
||||
assert_eq!(resolve_module(&db, foo_module_name.clone()), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_duplicate_search_paths_added() {
|
||||
let TestCase { db, .. } = TestCaseBuilder::new()
|
||||
.with_src_files(&[("foo.py", "")])
|
||||
.with_site_packages_files(&[("_foo.pth", "/src")])
|
||||
.build();
|
||||
|
||||
let search_paths: Vec<&SearchPathRoot> =
|
||||
module_resolution_settings(&db).search_paths(&db).collect();
|
||||
|
||||
assert!(search_paths.contains(&&Arc::new(
|
||||
ModuleResolutionPathBuf::first_party("/src").unwrap()
|
||||
)));
|
||||
|
||||
assert!(!search_paths.contains(&&Arc::new(
|
||||
ModuleResolutionPathBuf::editable_installation_root(db.system(), "/src").unwrap()
|
||||
)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_duplicate_editable_search_paths_added() {
|
||||
let TestCase { mut db, .. } = TestCaseBuilder::new()
|
||||
.with_site_packages_files(&[("_foo.pth", "/x"), ("_bar.pth", "/x")])
|
||||
.build();
|
||||
|
||||
db.write_file("/x/foo.py", "").unwrap();
|
||||
|
||||
let search_paths: Vec<&SearchPathRoot> =
|
||||
module_resolution_settings(&db).search_paths(&db).collect();
|
||||
|
||||
let editable_install =
|
||||
ModuleResolutionPathBuf::editable_installation_root(db.system(), "/x").unwrap();
|
||||
|
||||
assert_eq!(
|
||||
search_paths
|
||||
.iter()
|
||||
.filter(|path| ****path == editable_install)
|
||||
.count(),
|
||||
1,
|
||||
"Unexpected search paths: {search_paths:?}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
use ruff_db::program::TargetVersion;
|
||||
use ruff_db::system::System;
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
|
||||
use crate::db::Db;
|
||||
use crate::supported_py_version::TargetVersion;
|
||||
use crate::typeshed::LazyTypeshedVersions;
|
||||
|
||||
pub(crate) struct ResolverState<'db> {
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
/// Enumeration of all supported Python versions
|
||||
///
|
||||
/// TODO: unify with the `PythonVersion` enum in the linter/formatter crates?
|
||||
#[derive(Copy, Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord, Default)]
|
||||
pub enum TargetVersion {
|
||||
Py37,
|
||||
#[default]
|
||||
Py38,
|
||||
Py39,
|
||||
Py310,
|
||||
Py311,
|
||||
Py312,
|
||||
Py313,
|
||||
}
|
||||
@@ -1,9 +1,8 @@
|
||||
use ruff_db::program::{Program, SearchPathSettings, TargetVersion};
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPath, SystemPathBuf};
|
||||
use ruff_db::vendored::VendoredPathBuf;
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::resolver::{set_module_resolution_settings, RawModuleResolutionSettings};
|
||||
use crate::supported_py_version::TargetVersion;
|
||||
|
||||
/// A test case for the module resolver.
|
||||
///
|
||||
@@ -215,10 +214,10 @@ impl TestCaseBuilder<MockedTypeshed> {
|
||||
let src = Self::write_mock_directory(&mut db, "/src", first_party_files);
|
||||
let typeshed = Self::build_typeshed_mock(&mut db, &typeshed_option);
|
||||
|
||||
set_module_resolution_settings(
|
||||
&mut db,
|
||||
RawModuleResolutionSettings {
|
||||
target_version,
|
||||
Program::new(
|
||||
&db,
|
||||
target_version,
|
||||
SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
workspace_root: src.clone(),
|
||||
custom_typeshed: Some(typeshed.clone()),
|
||||
@@ -268,10 +267,10 @@ impl TestCaseBuilder<VendoredTypeshed> {
|
||||
Self::write_mock_directory(&mut db, "/site-packages", site_packages_files);
|
||||
let src = Self::write_mock_directory(&mut db, "/src", first_party_files);
|
||||
|
||||
set_module_resolution_settings(
|
||||
&mut db,
|
||||
RawModuleResolutionSettings {
|
||||
target_version,
|
||||
Program::new(
|
||||
&db,
|
||||
target_version,
|
||||
SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
workspace_root: src.clone(),
|
||||
custom_typeshed: None,
|
||||
|
||||
@@ -6,15 +6,14 @@ use std::ops::{RangeFrom, RangeInclusive};
|
||||
use std::str::FromStr;
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use ruff_db::program::TargetVersion;
|
||||
use ruff_db::system::SystemPath;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
use ruff_db::source::source_text;
|
||||
|
||||
use crate::db::Db;
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::supported_py_version::TargetVersion;
|
||||
|
||||
use super::vendored::vendored_typeshed_stubs;
|
||||
|
||||
@@ -74,7 +73,10 @@ pub(crate) fn parse_typeshed_versions(
|
||||
db: &dyn Db,
|
||||
versions_file: File,
|
||||
) -> Result<TypeshedVersions, TypeshedVersionsParseError> {
|
||||
let file_content = source_text(db.upcast(), versions_file);
|
||||
// TODO: Handle IO errors
|
||||
let file_content = versions_file
|
||||
.read_to_string(db.upcast())
|
||||
.unwrap_or_default();
|
||||
file_content.parse()
|
||||
}
|
||||
|
||||
@@ -438,6 +440,7 @@ mod tests {
|
||||
use std::path::Path;
|
||||
|
||||
use insta::assert_snapshot;
|
||||
use ruff_db::program::TargetVersion;
|
||||
|
||||
use super::*;
|
||||
|
||||
|
||||
@@ -1 +1 @@
|
||||
dcab6e88883c629ede9637fb011958f8b4918f52
|
||||
f863db6bc5242348ceaa6a3bca4e59aa9e62faaa
|
||||
|
||||
@@ -35,6 +35,8 @@ _dummy_threading: 3.0-3.8
|
||||
_heapq: 3.0-
|
||||
_imp: 3.0-
|
||||
_interpchannels: 3.13-
|
||||
_interpqueues: 3.13-
|
||||
_interpreters: 3.13-
|
||||
_json: 3.0-
|
||||
_locale: 3.0-
|
||||
_lsprof: 3.0-
|
||||
@@ -112,6 +114,7 @@ curses: 3.0-
|
||||
dataclasses: 3.7-
|
||||
datetime: 3.0-
|
||||
dbm: 3.0-
|
||||
dbm.sqlite3: 3.13-
|
||||
decimal: 3.0-
|
||||
difflib: 3.0-
|
||||
dis: 3.0-
|
||||
@@ -155,6 +158,7 @@ importlib: 3.0-
|
||||
importlib._abc: 3.10-
|
||||
importlib.metadata: 3.8-
|
||||
importlib.metadata._meta: 3.10-
|
||||
importlib.metadata.diagnose: 3.13-
|
||||
importlib.readers: 3.10-
|
||||
importlib.resources: 3.7-
|
||||
importlib.resources.abc: 3.11-
|
||||
|
||||
@@ -70,6 +70,8 @@ _VT_co = TypeVar("_VT_co", covariant=True) # Value type covariant containers.
|
||||
@final
|
||||
class dict_keys(KeysView[_KT_co], Generic[_KT_co, _VT_co]): # undocumented
|
||||
def __eq__(self, value: object, /) -> bool: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def isdisjoint(self, other: Iterable[_KT_co], /) -> bool: ...
|
||||
if sys.version_info >= (3, 10):
|
||||
@property
|
||||
def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ...
|
||||
@@ -83,6 +85,8 @@ class dict_values(ValuesView[_VT_co], Generic[_KT_co, _VT_co]): # undocumented
|
||||
@final
|
||||
class dict_items(ItemsView[_KT_co, _VT_co]): # undocumented
|
||||
def __eq__(self, value: object, /) -> bool: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def isdisjoint(self, other: Iterable[tuple[_KT_co, _VT_co]], /) -> bool: ...
|
||||
if sys.version_info >= (3, 10):
|
||||
@property
|
||||
def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ...
|
||||
|
||||
@@ -64,7 +64,6 @@ class _CData(metaclass=_CDataMeta):
|
||||
# Structure.from_buffer(...) # valid at runtime
|
||||
# Structure(...).from_buffer(...) # invalid at runtime
|
||||
#
|
||||
|
||||
@classmethod
|
||||
def from_buffer(cls, source: WriteableBuffer, offset: int = ...) -> Self: ...
|
||||
@classmethod
|
||||
@@ -100,8 +99,8 @@ class _Pointer(_PointerLike, _CData, Generic[_CT]):
|
||||
def __getitem__(self, key: slice, /) -> list[Any]: ...
|
||||
def __setitem__(self, key: int, value: Any, /) -> None: ...
|
||||
|
||||
def POINTER(type: type[_CT]) -> type[_Pointer[_CT]]: ...
|
||||
def pointer(arg: _CT, /) -> _Pointer[_CT]: ...
|
||||
def POINTER(type: type[_CT], /) -> type[_Pointer[_CT]]: ...
|
||||
def pointer(obj: _CT, /) -> _Pointer[_CT]: ...
|
||||
|
||||
class _CArgObject: ...
|
||||
|
||||
@@ -203,9 +202,9 @@ class Array(_CData, Generic[_CT]):
|
||||
if sys.version_info >= (3, 9):
|
||||
def __class_getitem__(cls, item: Any, /) -> GenericAlias: ...
|
||||
|
||||
def addressof(obj: _CData) -> int: ...
|
||||
def alignment(obj_or_type: _CData | type[_CData]) -> int: ...
|
||||
def addressof(obj: _CData, /) -> int: ...
|
||||
def alignment(obj_or_type: _CData | type[_CData], /) -> int: ...
|
||||
def get_errno() -> int: ...
|
||||
def resize(obj: _CData, size: int) -> None: ...
|
||||
def set_errno(value: int) -> int: ...
|
||||
def sizeof(obj_or_type: _CData | type[_CData]) -> int: ...
|
||||
def resize(obj: _CData, size: int, /) -> None: ...
|
||||
def set_errno(value: int, /) -> int: ...
|
||||
def sizeof(obj_or_type: _CData | type[_CData], /) -> int: ...
|
||||
|
||||
16
crates/red_knot_module_resolver/vendor/typeshed/stdlib/_interpqueues.pyi
vendored
Normal file
16
crates/red_knot_module_resolver/vendor/typeshed/stdlib/_interpqueues.pyi
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
from typing import Any, SupportsIndex
|
||||
|
||||
class QueueError(RuntimeError): ...
|
||||
class QueueNotFoundError(QueueError): ...
|
||||
|
||||
def bind(qid: SupportsIndex) -> None: ...
|
||||
def create(maxsize: SupportsIndex, fmt: SupportsIndex) -> int: ...
|
||||
def destroy(qid: SupportsIndex) -> None: ...
|
||||
def get(qid: SupportsIndex) -> tuple[Any, int]: ...
|
||||
def get_count(qid: SupportsIndex) -> int: ...
|
||||
def get_maxsize(qid: SupportsIndex) -> int: ...
|
||||
def get_queue_defaults(qid: SupportsIndex) -> tuple[int]: ...
|
||||
def is_full(qid: SupportsIndex) -> bool: ...
|
||||
def list_all() -> list[tuple[int, int]]: ...
|
||||
def put(qid: SupportsIndex, obj: Any, fmt: SupportsIndex) -> None: ...
|
||||
def release(qid: SupportsIndex) -> None: ...
|
||||
50
crates/red_knot_module_resolver/vendor/typeshed/stdlib/_interpreters.pyi
vendored
Normal file
50
crates/red_knot_module_resolver/vendor/typeshed/stdlib/_interpreters.pyi
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
import types
|
||||
from collections.abc import Callable, Mapping
|
||||
from typing import Final, Literal, SupportsIndex
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
_Configs: TypeAlias = Literal["default", "isolated", "legacy", "empty", ""]
|
||||
|
||||
class InterpreterError(Exception): ...
|
||||
class InterpreterNotFoundError(InterpreterError): ...
|
||||
class NotShareableError(Exception): ...
|
||||
|
||||
class CrossInterpreterBufferView:
|
||||
def __buffer__(self, flags: int, /) -> memoryview: ...
|
||||
|
||||
def new_config(name: _Configs = "isolated", /, **overides: object) -> types.SimpleNamespace: ...
|
||||
def create(config: types.SimpleNamespace | _Configs | None = "isolated", *, reqrefs: bool = False) -> int: ...
|
||||
def destroy(id: SupportsIndex, *, restrict: bool = False) -> None: ...
|
||||
def list_all(*, require_ready: bool) -> list[tuple[int, int]]: ...
|
||||
def get_current() -> tuple[int, int]: ...
|
||||
def get_main() -> tuple[int, int]: ...
|
||||
def is_running(id: SupportsIndex, *, restrict: bool = False) -> bool: ...
|
||||
def get_config(id: SupportsIndex, *, restrict: bool = False) -> types.SimpleNamespace: ...
|
||||
def whence(id: SupportsIndex) -> int: ...
|
||||
def exec(id: SupportsIndex, code: str, shared: bool | None = None, *, restrict: bool = False) -> None: ...
|
||||
def call(
|
||||
id: SupportsIndex,
|
||||
callable: Callable[..., object],
|
||||
args: tuple[object, ...] | None = None,
|
||||
kwargs: dict[str, object] | None = None,
|
||||
*,
|
||||
restrict: bool = False,
|
||||
) -> object: ...
|
||||
def run_string(
|
||||
id: SupportsIndex, script: str | types.CodeType | Callable[[], object], shared: bool | None = None, *, restrict: bool = False
|
||||
) -> None: ...
|
||||
def run_func(
|
||||
id: SupportsIndex, func: types.CodeType | Callable[[], object], shared: bool | None = None, *, restrict: bool = False
|
||||
) -> None: ...
|
||||
def set___main___attrs(id: SupportsIndex, updates: Mapping[str, object], *, restrict: bool = False) -> None: ...
|
||||
def incref(id: SupportsIndex, *, implieslink: bool = False, restrict: bool = False) -> None: ...
|
||||
def decref(id: SupportsIndex, *, restrict: bool = False) -> None: ...
|
||||
def is_shareable(obj: object) -> bool: ...
|
||||
def capture_exception(exc: BaseException | None = None) -> types.SimpleNamespace: ...
|
||||
|
||||
WHENCE_UNKNOWN: Final = 0
|
||||
WHENCE_RUNTIME: Final = 1
|
||||
WHENCE_LEGACY_CAPI: Final = 2
|
||||
WHENCE_CAPI: Final = 3
|
||||
WHENCE_XI: Final = 4
|
||||
WHENCE_STDLIB: Final = 5
|
||||
@@ -13,7 +13,7 @@ error = RuntimeError
|
||||
def _count() -> int: ...
|
||||
@final
|
||||
class LockType:
|
||||
def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ...
|
||||
def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: ...
|
||||
def release(self) -> None: ...
|
||||
def locked(self) -> bool: ...
|
||||
def __enter__(self) -> bool: ...
|
||||
@@ -22,14 +22,14 @@ class LockType:
|
||||
) -> None: ...
|
||||
|
||||
@overload
|
||||
def start_new_thread(function: Callable[[Unpack[_Ts]], object], args: tuple[Unpack[_Ts]]) -> int: ...
|
||||
def start_new_thread(function: Callable[[Unpack[_Ts]], object], args: tuple[Unpack[_Ts]], /) -> int: ...
|
||||
@overload
|
||||
def start_new_thread(function: Callable[..., object], args: tuple[Any, ...], kwargs: dict[str, Any]) -> int: ...
|
||||
def start_new_thread(function: Callable[..., object], args: tuple[Any, ...], kwargs: dict[str, Any], /) -> int: ...
|
||||
def interrupt_main() -> None: ...
|
||||
def exit() -> NoReturn: ...
|
||||
def allocate_lock() -> LockType: ...
|
||||
def get_ident() -> int: ...
|
||||
def stack_size(size: int = ...) -> int: ...
|
||||
def stack_size(size: int = 0, /) -> int: ...
|
||||
|
||||
TIMEOUT_MAX: float
|
||||
|
||||
|
||||
@@ -28,17 +28,17 @@ class ABCMeta(type):
|
||||
def register(cls: ABCMeta, subclass: type[_T]) -> type[_T]: ...
|
||||
|
||||
def abstractmethod(funcobj: _FuncT) -> _FuncT: ...
|
||||
@deprecated("Deprecated, use 'classmethod' with 'abstractmethod' instead")
|
||||
@deprecated("Use 'classmethod' with 'abstractmethod' instead")
|
||||
class abstractclassmethod(classmethod[_T, _P, _R_co]):
|
||||
__isabstractmethod__: Literal[True]
|
||||
def __init__(self, callable: Callable[Concatenate[type[_T], _P], _R_co]) -> None: ...
|
||||
|
||||
@deprecated("Deprecated, use 'staticmethod' with 'abstractmethod' instead")
|
||||
@deprecated("Use 'staticmethod' with 'abstractmethod' instead")
|
||||
class abstractstaticmethod(staticmethod[_P, _R_co]):
|
||||
__isabstractmethod__: Literal[True]
|
||||
def __init__(self, callable: Callable[_P, _R_co]) -> None: ...
|
||||
|
||||
@deprecated("Deprecated, use 'property' with 'abstractmethod' instead")
|
||||
@deprecated("Use 'property' with 'abstractmethod' instead")
|
||||
class abstractproperty(property):
|
||||
__isabstractmethod__: Literal[True]
|
||||
|
||||
|
||||
@@ -49,6 +49,10 @@ class Server(AbstractServer):
|
||||
ssl_handshake_timeout: float | None,
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
def close_clients(self) -> None: ...
|
||||
def abort_clients(self) -> None: ...
|
||||
|
||||
def get_loop(self) -> AbstractEventLoop: ...
|
||||
def is_serving(self) -> bool: ...
|
||||
async def start_serving(self) -> None: ...
|
||||
@@ -222,7 +226,48 @@ class BaseEventLoop(AbstractEventLoop):
|
||||
happy_eyeballs_delay: float | None = None,
|
||||
interleave: int | None = None,
|
||||
) -> tuple[Transport, _ProtocolT]: ...
|
||||
if sys.version_info >= (3, 11):
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
# 3.13 added `keep_alive`.
|
||||
@overload
|
||||
async def create_server(
|
||||
self,
|
||||
protocol_factory: _ProtocolFactory,
|
||||
host: str | Sequence[str] | None = None,
|
||||
port: int = ...,
|
||||
*,
|
||||
family: int = ...,
|
||||
flags: int = ...,
|
||||
sock: None = None,
|
||||
backlog: int = 100,
|
||||
ssl: _SSLContext = None,
|
||||
reuse_address: bool | None = None,
|
||||
reuse_port: bool | None = None,
|
||||
keep_alive: bool | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
start_serving: bool = True,
|
||||
) -> Server: ...
|
||||
@overload
|
||||
async def create_server(
|
||||
self,
|
||||
protocol_factory: _ProtocolFactory,
|
||||
host: None = None,
|
||||
port: None = None,
|
||||
*,
|
||||
family: int = ...,
|
||||
flags: int = ...,
|
||||
sock: socket = ...,
|
||||
backlog: int = 100,
|
||||
ssl: _SSLContext = None,
|
||||
reuse_address: bool | None = None,
|
||||
reuse_port: bool | None = None,
|
||||
keep_alive: bool | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
start_serving: bool = True,
|
||||
) -> Server: ...
|
||||
elif sys.version_info >= (3, 11):
|
||||
@overload
|
||||
async def create_server(
|
||||
self,
|
||||
@@ -259,26 +304,6 @@ class BaseEventLoop(AbstractEventLoop):
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
start_serving: bool = True,
|
||||
) -> Server: ...
|
||||
async def start_tls(
|
||||
self,
|
||||
transport: BaseTransport,
|
||||
protocol: BaseProtocol,
|
||||
sslcontext: ssl.SSLContext,
|
||||
*,
|
||||
server_side: bool = False,
|
||||
server_hostname: str | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
) -> Transport | None: ...
|
||||
async def connect_accepted_socket(
|
||||
self,
|
||||
protocol_factory: Callable[[], _ProtocolT],
|
||||
sock: socket,
|
||||
*,
|
||||
ssl: _SSLContext = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
) -> tuple[Transport, _ProtocolT]: ...
|
||||
else:
|
||||
@overload
|
||||
async def create_server(
|
||||
@@ -314,6 +339,29 @@ class BaseEventLoop(AbstractEventLoop):
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
start_serving: bool = True,
|
||||
) -> Server: ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
async def start_tls(
|
||||
self,
|
||||
transport: BaseTransport,
|
||||
protocol: BaseProtocol,
|
||||
sslcontext: ssl.SSLContext,
|
||||
*,
|
||||
server_side: bool = False,
|
||||
server_hostname: str | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
) -> Transport | None: ...
|
||||
async def connect_accepted_socket(
|
||||
self,
|
||||
protocol_factory: Callable[[], _ProtocolT],
|
||||
sock: socket,
|
||||
*,
|
||||
ssl: _SSLContext = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
) -> tuple[Transport, _ProtocolT]: ...
|
||||
else:
|
||||
async def start_tls(
|
||||
self,
|
||||
transport: BaseTransport,
|
||||
|
||||
@@ -94,6 +94,12 @@ class TimerHandle(Handle):
|
||||
class AbstractServer:
|
||||
@abstractmethod
|
||||
def close(self) -> None: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
@abstractmethod
|
||||
def close_clients(self) -> None: ...
|
||||
@abstractmethod
|
||||
def abort_clients(self) -> None: ...
|
||||
|
||||
async def __aenter__(self) -> Self: ...
|
||||
async def __aexit__(self, *exc: Unused) -> None: ...
|
||||
@abstractmethod
|
||||
@@ -272,7 +278,50 @@ class AbstractEventLoop:
|
||||
happy_eyeballs_delay: float | None = None,
|
||||
interleave: int | None = None,
|
||||
) -> tuple[Transport, _ProtocolT]: ...
|
||||
if sys.version_info >= (3, 11):
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
# 3.13 added `keep_alive`.
|
||||
@overload
|
||||
@abstractmethod
|
||||
async def create_server(
|
||||
self,
|
||||
protocol_factory: _ProtocolFactory,
|
||||
host: str | Sequence[str] | None = None,
|
||||
port: int = ...,
|
||||
*,
|
||||
family: int = ...,
|
||||
flags: int = ...,
|
||||
sock: None = None,
|
||||
backlog: int = 100,
|
||||
ssl: _SSLContext = None,
|
||||
reuse_address: bool | None = None,
|
||||
reuse_port: bool | None = None,
|
||||
keep_alive: bool | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
start_serving: bool = True,
|
||||
) -> Server: ...
|
||||
@overload
|
||||
@abstractmethod
|
||||
async def create_server(
|
||||
self,
|
||||
protocol_factory: _ProtocolFactory,
|
||||
host: None = None,
|
||||
port: None = None,
|
||||
*,
|
||||
family: int = ...,
|
||||
flags: int = ...,
|
||||
sock: socket = ...,
|
||||
backlog: int = 100,
|
||||
ssl: _SSLContext = None,
|
||||
reuse_address: bool | None = None,
|
||||
reuse_port: bool | None = None,
|
||||
keep_alive: bool | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
start_serving: bool = True,
|
||||
) -> Server: ...
|
||||
elif sys.version_info >= (3, 11):
|
||||
@overload
|
||||
@abstractmethod
|
||||
async def create_server(
|
||||
@@ -311,30 +360,6 @@ class AbstractEventLoop:
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
start_serving: bool = True,
|
||||
) -> Server: ...
|
||||
@abstractmethod
|
||||
async def start_tls(
|
||||
self,
|
||||
transport: WriteTransport,
|
||||
protocol: BaseProtocol,
|
||||
sslcontext: ssl.SSLContext,
|
||||
*,
|
||||
server_side: bool = False,
|
||||
server_hostname: str | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
) -> Transport | None: ...
|
||||
async def create_unix_server(
|
||||
self,
|
||||
protocol_factory: _ProtocolFactory,
|
||||
path: StrPath | None = None,
|
||||
*,
|
||||
sock: socket | None = None,
|
||||
backlog: int = 100,
|
||||
ssl: _SSLContext = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
start_serving: bool = True,
|
||||
) -> Server: ...
|
||||
else:
|
||||
@overload
|
||||
@abstractmethod
|
||||
@@ -372,6 +397,33 @@ class AbstractEventLoop:
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
start_serving: bool = True,
|
||||
) -> Server: ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
@abstractmethod
|
||||
async def start_tls(
|
||||
self,
|
||||
transport: WriteTransport,
|
||||
protocol: BaseProtocol,
|
||||
sslcontext: ssl.SSLContext,
|
||||
*,
|
||||
server_side: bool = False,
|
||||
server_hostname: str | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
) -> Transport | None: ...
|
||||
async def create_unix_server(
|
||||
self,
|
||||
protocol_factory: _ProtocolFactory,
|
||||
path: StrPath | None = None,
|
||||
*,
|
||||
sock: socket | None = None,
|
||||
backlog: int = 100,
|
||||
ssl: _SSLContext = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
start_serving: bool = True,
|
||||
) -> Server: ...
|
||||
else:
|
||||
@abstractmethod
|
||||
async def start_tls(
|
||||
self,
|
||||
@@ -394,6 +446,7 @@ class AbstractEventLoop:
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
start_serving: bool = True,
|
||||
) -> Server: ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
async def connect_accepted_socket(
|
||||
self,
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import functools
|
||||
import sys
|
||||
import traceback
|
||||
from collections.abc import Iterable
|
||||
from types import FrameType, FunctionType
|
||||
@@ -14,7 +15,17 @@ _FuncType: TypeAlias = FunctionType | _HasWrapper | functools.partial[Any] | fun
|
||||
def _get_function_source(func: _FuncType) -> tuple[str, int]: ...
|
||||
@overload
|
||||
def _get_function_source(func: object) -> tuple[str, int] | None: ...
|
||||
def _format_callback_source(func: object, args: Iterable[Any]) -> str: ...
|
||||
def _format_args_and_kwargs(args: Iterable[Any], kwargs: dict[str, Any]) -> str: ...
|
||||
def _format_callback(func: object, args: Iterable[Any], kwargs: dict[str, Any], suffix: str = "") -> str: ...
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
def _format_callback_source(func: object, args: Iterable[Any], *, debug: bool = False) -> str: ...
|
||||
def _format_args_and_kwargs(args: Iterable[Any], kwargs: dict[str, Any], *, debug: bool = False) -> str: ...
|
||||
def _format_callback(
|
||||
func: object, args: Iterable[Any], kwargs: dict[str, Any], *, debug: bool = False, suffix: str = ""
|
||||
) -> str: ...
|
||||
|
||||
else:
|
||||
def _format_callback_source(func: object, args: Iterable[Any]) -> str: ...
|
||||
def _format_args_and_kwargs(args: Iterable[Any], kwargs: dict[str, Any]) -> str: ...
|
||||
def _format_callback(func: object, args: Iterable[Any], kwargs: dict[str, Any], suffix: str = "") -> str: ...
|
||||
|
||||
def extract_stack(f: FrameType | None = None, limit: int | None = None) -> traceback.StackSummary: ...
|
||||
|
||||
@@ -10,13 +10,20 @@ if sys.version_info >= (3, 10):
|
||||
else:
|
||||
_LoopBoundMixin = object
|
||||
|
||||
__all__ = ("Queue", "PriorityQueue", "LifoQueue", "QueueFull", "QueueEmpty")
|
||||
|
||||
class QueueEmpty(Exception): ...
|
||||
class QueueFull(Exception): ...
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
__all__ = ("Queue", "PriorityQueue", "LifoQueue", "QueueFull", "QueueEmpty", "QueueShutDown")
|
||||
|
||||
else:
|
||||
__all__ = ("Queue", "PriorityQueue", "LifoQueue", "QueueFull", "QueueEmpty")
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
class QueueShutDown(Exception): ...
|
||||
|
||||
# If Generic[_T] is last and _LoopBoundMixin is object, pyright is unhappy.
|
||||
# We can remove the noqa pragma when dropping 3.9 support.
|
||||
class Queue(Generic[_T], _LoopBoundMixin): # noqa: Y059
|
||||
@@ -42,6 +49,8 @@ class Queue(Generic[_T], _LoopBoundMixin): # noqa: Y059
|
||||
def task_done(self) -> None: ...
|
||||
if sys.version_info >= (3, 9):
|
||||
def __class_getitem__(cls, type: Any, /) -> GenericAlias: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def shutdown(self, immediate: bool = False) -> None: ...
|
||||
|
||||
class PriorityQueue(Queue[_T]): ...
|
||||
class LifoQueue(Queue[_T]): ...
|
||||
|
||||
@@ -2,6 +2,7 @@ import ssl
|
||||
import sys
|
||||
from _typeshed import ReadableBuffer, StrPath
|
||||
from collections.abc import AsyncIterator, Awaitable, Callable, Iterable, Sequence, Sized
|
||||
from types import ModuleType
|
||||
from typing import Any, Protocol, SupportsIndex
|
||||
from typing_extensions import Self, TypeAlias
|
||||
|
||||
@@ -130,7 +131,10 @@ class StreamWriter:
|
||||
async def start_tls(
|
||||
self, sslcontext: ssl.SSLContext, *, server_hostname: str | None = None, ssl_handshake_timeout: float | None = None
|
||||
) -> None: ...
|
||||
if sys.version_info >= (3, 11):
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
def __del__(self, warnings: ModuleType = ...) -> None: ...
|
||||
elif sys.version_info >= (3, 11):
|
||||
def __del__(self) -> None: ...
|
||||
|
||||
class StreamReader(AsyncIterator[bytes]):
|
||||
|
||||
@@ -1,15 +1,55 @@
|
||||
import sys
|
||||
import types
|
||||
from _typeshed import StrPath
|
||||
from abc import ABCMeta, abstractmethod
|
||||
from collections.abc import Callable
|
||||
from socket import socket
|
||||
from typing import Literal
|
||||
from typing_extensions import Self, TypeVarTuple, Unpack, deprecated
|
||||
|
||||
from .base_events import Server, _ProtocolFactory, _SSLContext
|
||||
from .events import AbstractEventLoop, BaseDefaultEventLoopPolicy
|
||||
from .selector_events import BaseSelectorEventLoop
|
||||
|
||||
_Ts = TypeVarTuple("_Ts")
|
||||
|
||||
if sys.platform != "win32":
|
||||
if sys.version_info >= (3, 14):
|
||||
__all__ = ("SelectorEventLoop", "DefaultEventLoopPolicy", "EventLoop")
|
||||
elif sys.version_info >= (3, 13):
|
||||
__all__ = (
|
||||
"SelectorEventLoop",
|
||||
"AbstractChildWatcher",
|
||||
"SafeChildWatcher",
|
||||
"FastChildWatcher",
|
||||
"PidfdChildWatcher",
|
||||
"MultiLoopChildWatcher",
|
||||
"ThreadedChildWatcher",
|
||||
"DefaultEventLoopPolicy",
|
||||
"EventLoop",
|
||||
)
|
||||
elif sys.version_info >= (3, 9):
|
||||
__all__ = (
|
||||
"SelectorEventLoop",
|
||||
"AbstractChildWatcher",
|
||||
"SafeChildWatcher",
|
||||
"FastChildWatcher",
|
||||
"PidfdChildWatcher",
|
||||
"MultiLoopChildWatcher",
|
||||
"ThreadedChildWatcher",
|
||||
"DefaultEventLoopPolicy",
|
||||
)
|
||||
else:
|
||||
__all__ = (
|
||||
"SelectorEventLoop",
|
||||
"AbstractChildWatcher",
|
||||
"SafeChildWatcher",
|
||||
"FastChildWatcher",
|
||||
"MultiLoopChildWatcher",
|
||||
"ThreadedChildWatcher",
|
||||
"DefaultEventLoopPolicy",
|
||||
)
|
||||
|
||||
# This is also technically not available on Win,
|
||||
# but other parts of typeshed need this definition.
|
||||
# So, it is special cased.
|
||||
@@ -58,30 +98,6 @@ if sys.version_info < (3, 14):
|
||||
def is_active(self) -> bool: ...
|
||||
|
||||
if sys.platform != "win32":
|
||||
if sys.version_info >= (3, 14):
|
||||
__all__ = ("SelectorEventLoop", "DefaultEventLoopPolicy")
|
||||
elif sys.version_info >= (3, 9):
|
||||
__all__ = (
|
||||
"SelectorEventLoop",
|
||||
"AbstractChildWatcher",
|
||||
"SafeChildWatcher",
|
||||
"FastChildWatcher",
|
||||
"PidfdChildWatcher",
|
||||
"MultiLoopChildWatcher",
|
||||
"ThreadedChildWatcher",
|
||||
"DefaultEventLoopPolicy",
|
||||
)
|
||||
else:
|
||||
__all__ = (
|
||||
"SelectorEventLoop",
|
||||
"AbstractChildWatcher",
|
||||
"SafeChildWatcher",
|
||||
"FastChildWatcher",
|
||||
"MultiLoopChildWatcher",
|
||||
"ThreadedChildWatcher",
|
||||
"DefaultEventLoopPolicy",
|
||||
)
|
||||
|
||||
if sys.version_info < (3, 14):
|
||||
if sys.version_info >= (3, 12):
|
||||
# Doesn't actually have ABCMeta metaclass at runtime, but mypy complains if we don't have it in the stub.
|
||||
@@ -141,7 +157,21 @@ if sys.platform != "win32":
|
||||
) -> None: ...
|
||||
def remove_child_handler(self, pid: int) -> bool: ...
|
||||
|
||||
class _UnixSelectorEventLoop(BaseSelectorEventLoop): ...
|
||||
class _UnixSelectorEventLoop(BaseSelectorEventLoop):
|
||||
if sys.version_info >= (3, 13):
|
||||
async def create_unix_server( # type: ignore[override]
|
||||
self,
|
||||
protocol_factory: _ProtocolFactory,
|
||||
path: StrPath | None = None,
|
||||
*,
|
||||
sock: socket | None = None,
|
||||
backlog: int = 100,
|
||||
ssl: _SSLContext = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
start_serving: bool = True,
|
||||
cleanup_socket: bool = True,
|
||||
) -> Server: ...
|
||||
|
||||
class _UnixDefaultEventLoopPolicy(BaseDefaultEventLoopPolicy):
|
||||
if sys.version_info < (3, 14):
|
||||
@@ -158,6 +188,9 @@ if sys.platform != "win32":
|
||||
|
||||
DefaultEventLoopPolicy = _UnixDefaultEventLoopPolicy
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
EventLoop = SelectorEventLoop
|
||||
|
||||
if sys.version_info < (3, 14):
|
||||
if sys.version_info >= (3, 12):
|
||||
@deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14")
|
||||
|
||||
@@ -7,14 +7,26 @@ from typing import IO, Any, ClassVar, Literal, NoReturn
|
||||
from . import events, futures, proactor_events, selector_events, streams, windows_utils
|
||||
|
||||
if sys.platform == "win32":
|
||||
__all__ = (
|
||||
"SelectorEventLoop",
|
||||
"ProactorEventLoop",
|
||||
"IocpProactor",
|
||||
"DefaultEventLoopPolicy",
|
||||
"WindowsSelectorEventLoopPolicy",
|
||||
"WindowsProactorEventLoopPolicy",
|
||||
)
|
||||
if sys.version_info >= (3, 13):
|
||||
# 3.13 added `EventLoop`.
|
||||
__all__ = (
|
||||
"SelectorEventLoop",
|
||||
"ProactorEventLoop",
|
||||
"IocpProactor",
|
||||
"DefaultEventLoopPolicy",
|
||||
"WindowsSelectorEventLoopPolicy",
|
||||
"WindowsProactorEventLoopPolicy",
|
||||
"EventLoop",
|
||||
)
|
||||
else:
|
||||
__all__ = (
|
||||
"SelectorEventLoop",
|
||||
"ProactorEventLoop",
|
||||
"IocpProactor",
|
||||
"DefaultEventLoopPolicy",
|
||||
"WindowsSelectorEventLoopPolicy",
|
||||
"WindowsProactorEventLoopPolicy",
|
||||
)
|
||||
|
||||
NULL: Literal[0]
|
||||
INFINITE: Literal[0xFFFFFFFF]
|
||||
@@ -84,3 +96,5 @@ if sys.platform == "win32":
|
||||
def set_child_watcher(self, watcher: Any) -> NoReturn: ...
|
||||
|
||||
DefaultEventLoopPolicy = WindowsSelectorEventLoopPolicy
|
||||
if sys.version_info >= (3, 13):
|
||||
EventLoop = ProactorEventLoop
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import sys
|
||||
from _typeshed import ExcInfo, TraceFunction
|
||||
from _typeshed import ExcInfo, TraceFunction, Unused
|
||||
from collections.abc import Callable, Iterable, Mapping
|
||||
from types import CodeType, FrameType, TracebackType
|
||||
from typing import IO, Any, Literal, SupportsInt, TypeVar
|
||||
@@ -32,6 +32,9 @@ class Bdb:
|
||||
def dispatch_call(self, frame: FrameType, arg: None) -> TraceFunction: ...
|
||||
def dispatch_return(self, frame: FrameType, arg: Any) -> TraceFunction: ...
|
||||
def dispatch_exception(self, frame: FrameType, arg: ExcInfo) -> TraceFunction: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def dispatch_opcode(self, frame: FrameType, arg: Unused) -> Callable[[FrameType, str, Any], TraceFunction]: ...
|
||||
|
||||
def is_skipped_module(self, module_name: str) -> bool: ...
|
||||
def stop_here(self, frame: FrameType) -> bool: ...
|
||||
def break_here(self, frame: FrameType) -> bool: ...
|
||||
@@ -42,7 +45,13 @@ class Bdb:
|
||||
def user_return(self, frame: FrameType, return_value: Any) -> None: ...
|
||||
def user_exception(self, frame: FrameType, exc_info: ExcInfo) -> None: ...
|
||||
def set_until(self, frame: FrameType, lineno: int | None = None) -> None: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def user_opcode(self, frame: FrameType) -> None: ... # undocumented
|
||||
|
||||
def set_step(self) -> None: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def set_stepinstr(self) -> None: ... # undocumented
|
||||
|
||||
def set_next(self, frame: FrameType) -> None: ...
|
||||
def set_return(self, frame: FrameType) -> None: ...
|
||||
def set_trace(self, frame: FrameType | None = None) -> None: ...
|
||||
|
||||
@@ -75,6 +75,7 @@ if sys.version_info >= (3, 9):
|
||||
from types import GenericAlias
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_I = TypeVar("_I", default=int)
|
||||
_T_co = TypeVar("_T_co", covariant=True)
|
||||
_T_contra = TypeVar("_T_contra", contravariant=True)
|
||||
_R_co = TypeVar("_R_co", covariant=True)
|
||||
@@ -823,8 +824,12 @@ class bytearray(MutableSequence[int]):
|
||||
def __buffer__(self, flags: int, /) -> memoryview: ...
|
||||
def __release_buffer__(self, buffer: memoryview, /) -> None: ...
|
||||
|
||||
_IntegerFormats: TypeAlias = Literal[
|
||||
"b", "B", "@b", "@B", "h", "H", "@h", "@H", "i", "I", "@i", "@I", "l", "L", "@l", "@L", "q", "Q", "@q", "@Q", "P", "@P"
|
||||
]
|
||||
|
||||
@final
|
||||
class memoryview(Sequence[int]):
|
||||
class memoryview(Sequence[_I]):
|
||||
@property
|
||||
def format(self) -> str: ...
|
||||
@property
|
||||
@@ -854,13 +859,20 @@ class memoryview(Sequence[int]):
|
||||
def __exit__(
|
||||
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None, /
|
||||
) -> None: ...
|
||||
def cast(self, format: str, shape: list[int] | tuple[int, ...] = ...) -> memoryview: ...
|
||||
@overload
|
||||
def __getitem__(self, key: SupportsIndex | tuple[SupportsIndex, ...], /) -> int: ...
|
||||
def cast(self, format: Literal["c", "@c"], shape: list[int] | tuple[int, ...] = ...) -> memoryview[bytes]: ...
|
||||
@overload
|
||||
def __getitem__(self, key: slice, /) -> memoryview: ...
|
||||
def cast(self, format: Literal["f", "@f", "d", "@d"], shape: list[int] | tuple[int, ...] = ...) -> memoryview[float]: ...
|
||||
@overload
|
||||
def cast(self, format: Literal["?"], shape: list[int] | tuple[int, ...] = ...) -> memoryview[bool]: ...
|
||||
@overload
|
||||
def cast(self, format: _IntegerFormats, shape: list[int] | tuple[int, ...] = ...) -> memoryview: ...
|
||||
@overload
|
||||
def __getitem__(self, key: SupportsIndex | tuple[SupportsIndex, ...], /) -> _I: ...
|
||||
@overload
|
||||
def __getitem__(self, key: slice, /) -> memoryview[_I]: ...
|
||||
def __contains__(self, x: object, /) -> bool: ...
|
||||
def __iter__(self) -> Iterator[int]: ...
|
||||
def __iter__(self) -> Iterator[_I]: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __eq__(self, value: object, /) -> bool: ...
|
||||
def __hash__(self) -> int: ...
|
||||
@@ -2006,9 +2018,9 @@ if sys.version_info >= (3, 10):
|
||||
class EncodingWarning(Warning): ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
_BaseExceptionT_co = TypeVar("_BaseExceptionT_co", bound=BaseException, covariant=True)
|
||||
_BaseExceptionT_co = TypeVar("_BaseExceptionT_co", bound=BaseException, covariant=True, default=BaseException)
|
||||
_BaseExceptionT = TypeVar("_BaseExceptionT", bound=BaseException)
|
||||
_ExceptionT_co = TypeVar("_ExceptionT_co", bound=Exception, covariant=True)
|
||||
_ExceptionT_co = TypeVar("_ExceptionT_co", bound=Exception, covariant=True, default=Exception)
|
||||
_ExceptionT = TypeVar("_ExceptionT", bound=Exception)
|
||||
|
||||
# See `check_exception_group.py` for use-cases and comments.
|
||||
@@ -2072,5 +2084,4 @@ if sys.version_info >= (3, 11):
|
||||
) -> tuple[ExceptionGroup[_ExceptionT_co] | None, ExceptionGroup[_ExceptionT_co] | None]: ...
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
class IncompleteInputError(SyntaxError): ...
|
||||
class PythonFinalizationError(RuntimeError): ...
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import sys
|
||||
|
||||
from ._base import (
|
||||
ALL_COMPLETED as ALL_COMPLETED,
|
||||
FIRST_COMPLETED as FIRST_COMPLETED,
|
||||
@@ -14,19 +16,36 @@ from ._base import (
|
||||
from .process import ProcessPoolExecutor as ProcessPoolExecutor
|
||||
from .thread import ThreadPoolExecutor as ThreadPoolExecutor
|
||||
|
||||
__all__ = (
|
||||
"FIRST_COMPLETED",
|
||||
"FIRST_EXCEPTION",
|
||||
"ALL_COMPLETED",
|
||||
"CancelledError",
|
||||
"TimeoutError",
|
||||
"BrokenExecutor",
|
||||
"Future",
|
||||
"Executor",
|
||||
"wait",
|
||||
"as_completed",
|
||||
"ProcessPoolExecutor",
|
||||
"ThreadPoolExecutor",
|
||||
)
|
||||
if sys.version_info >= (3, 13):
|
||||
__all__ = (
|
||||
"FIRST_COMPLETED",
|
||||
"FIRST_EXCEPTION",
|
||||
"ALL_COMPLETED",
|
||||
"CancelledError",
|
||||
"TimeoutError",
|
||||
"InvalidStateError",
|
||||
"BrokenExecutor",
|
||||
"Future",
|
||||
"Executor",
|
||||
"wait",
|
||||
"as_completed",
|
||||
"ProcessPoolExecutor",
|
||||
"ThreadPoolExecutor",
|
||||
)
|
||||
else:
|
||||
__all__ = (
|
||||
"FIRST_COMPLETED",
|
||||
"FIRST_EXCEPTION",
|
||||
"ALL_COMPLETED",
|
||||
"CancelledError",
|
||||
"TimeoutError",
|
||||
"BrokenExecutor",
|
||||
"Future",
|
||||
"Executor",
|
||||
"wait",
|
||||
"as_completed",
|
||||
"ProcessPoolExecutor",
|
||||
"ThreadPoolExecutor",
|
||||
)
|
||||
|
||||
def __dir__() -> tuple[str, ...]: ...
|
||||
|
||||
@@ -19,6 +19,9 @@ if sys.platform != "win32":
|
||||
def reorganize(self) -> None: ...
|
||||
def sync(self) -> None: ...
|
||||
def close(self) -> None: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def clear(self) -> None: ...
|
||||
|
||||
def __getitem__(self, item: _KeyType) -> bytes: ...
|
||||
def __setitem__(self, key: _KeyType, value: _ValueType) -> None: ...
|
||||
def __delitem__(self, key: _KeyType) -> None: ...
|
||||
|
||||
@@ -15,6 +15,9 @@ if sys.platform != "win32":
|
||||
# Actual typename dbm, not exposed by the implementation
|
||||
class _dbm:
|
||||
def close(self) -> None: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def clear(self) -> None: ...
|
||||
|
||||
def __getitem__(self, item: _KeyType) -> bytes: ...
|
||||
def __setitem__(self, key: _KeyType, value: _ValueType) -> None: ...
|
||||
def __delitem__(self, key: _KeyType) -> None: ...
|
||||
|
||||
29
crates/red_knot_module_resolver/vendor/typeshed/stdlib/dbm/sqlite3.pyi
vendored
Normal file
29
crates/red_knot_module_resolver/vendor/typeshed/stdlib/dbm/sqlite3.pyi
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
from _typeshed import ReadableBuffer, StrOrBytesPath, Unused
|
||||
from collections.abc import Generator, MutableMapping
|
||||
from typing import Final, Literal
|
||||
from typing_extensions import LiteralString, Self, TypeAlias
|
||||
|
||||
BUILD_TABLE: Final[LiteralString]
|
||||
GET_SIZE: Final[LiteralString]
|
||||
LOOKUP_KEY: Final[LiteralString]
|
||||
STORE_KV: Final[LiteralString]
|
||||
DELETE_KEY: Final[LiteralString]
|
||||
ITER_KEYS: Final[LiteralString]
|
||||
|
||||
_SqliteData: TypeAlias = str | ReadableBuffer | int | float
|
||||
|
||||
class error(OSError): ...
|
||||
|
||||
class _Database(MutableMapping[bytes, bytes]):
|
||||
def __init__(self, path: StrOrBytesPath, /, *, flag: Literal["r", "w", "c", "n"], mode: int) -> None: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __getitem__(self, key: _SqliteData) -> bytes: ...
|
||||
def __setitem__(self, key: _SqliteData, value: _SqliteData) -> None: ...
|
||||
def __delitem__(self, key: _SqliteData) -> None: ...
|
||||
def __iter__(self) -> Generator[bytes]: ...
|
||||
def close(self) -> None: ...
|
||||
def keys(self) -> list[bytes]: ... # type: ignore[override]
|
||||
def __enter__(self) -> Self: ...
|
||||
def __exit__(self, *args: Unused) -> None: ...
|
||||
|
||||
def open(filename: StrOrBytesPath, /, flag: Literal["r", "w,", "c", "n"] = "r", mode: int = 0o666) -> _Database: ...
|
||||
@@ -31,6 +31,9 @@ __all__ = [
|
||||
"EXTENDED_ARG",
|
||||
"stack_effect",
|
||||
]
|
||||
if sys.version_info >= (3, 13):
|
||||
__all__ += ["hasjump"]
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
__all__ += ["hasarg", "hasexc"]
|
||||
else:
|
||||
@@ -86,12 +89,41 @@ else:
|
||||
is_jump_target: bool
|
||||
|
||||
class Instruction(_Instruction):
|
||||
def _disassemble(self, lineno_width: int = 3, mark_as_current: bool = False, offset_width: int = 4) -> str: ...
|
||||
if sys.version_info < (3, 13):
|
||||
def _disassemble(self, lineno_width: int = 3, mark_as_current: bool = False, offset_width: int = 4) -> str: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
@property
|
||||
def oparg(self) -> int: ...
|
||||
@property
|
||||
def baseopcode(self) -> int: ...
|
||||
@property
|
||||
def baseopname(self) -> str: ...
|
||||
@property
|
||||
def cache_offset(self) -> int: ...
|
||||
@property
|
||||
def end_offset(self) -> int: ...
|
||||
@property
|
||||
def jump_target(self) -> int: ...
|
||||
@property
|
||||
def is_jump_target(self) -> bool: ...
|
||||
|
||||
class Bytecode:
|
||||
codeobj: types.CodeType
|
||||
first_line: int
|
||||
if sys.version_info >= (3, 11):
|
||||
if sys.version_info >= (3, 13):
|
||||
show_offsets: bool
|
||||
# 3.13 added `show_offsets`
|
||||
def __init__(
|
||||
self,
|
||||
x: _HaveCodeType | str,
|
||||
*,
|
||||
first_line: int | None = None,
|
||||
current_offset: int | None = None,
|
||||
show_caches: bool = False,
|
||||
adaptive: bool = False,
|
||||
show_offsets: bool = False,
|
||||
) -> None: ...
|
||||
elif sys.version_info >= (3, 11):
|
||||
def __init__(
|
||||
self,
|
||||
x: _HaveCodeType | str,
|
||||
@@ -101,12 +133,15 @@ class Bytecode:
|
||||
show_caches: bool = False,
|
||||
adaptive: bool = False,
|
||||
) -> None: ...
|
||||
@classmethod
|
||||
def from_traceback(cls, tb: types.TracebackType, *, show_caches: bool = False, adaptive: bool = False) -> Self: ...
|
||||
else:
|
||||
def __init__(
|
||||
self, x: _HaveCodeType | str, *, first_line: int | None = None, current_offset: int | None = None
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
@classmethod
|
||||
def from_traceback(cls, tb: types.TracebackType, *, show_caches: bool = False, adaptive: bool = False) -> Self: ...
|
||||
else:
|
||||
@classmethod
|
||||
def from_traceback(cls, tb: types.TracebackType) -> Self: ...
|
||||
|
||||
@@ -121,7 +156,41 @@ def findlinestarts(code: _HaveCodeType) -> Iterator[tuple[int, int]]: ...
|
||||
def pretty_flags(flags: int) -> str: ...
|
||||
def code_info(x: _HaveCodeType | str) -> str: ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
if sys.version_info >= (3, 13):
|
||||
# 3.13 added `show_offsets`
|
||||
def dis(
|
||||
x: _HaveCodeType | str | bytes | bytearray | None = None,
|
||||
*,
|
||||
file: IO[str] | None = None,
|
||||
depth: int | None = None,
|
||||
show_caches: bool = False,
|
||||
adaptive: bool = False,
|
||||
show_offsets: bool = False,
|
||||
) -> None: ...
|
||||
def disassemble(
|
||||
co: _HaveCodeType,
|
||||
lasti: int = -1,
|
||||
*,
|
||||
file: IO[str] | None = None,
|
||||
show_caches: bool = False,
|
||||
adaptive: bool = False,
|
||||
show_offsets: bool = False,
|
||||
) -> None: ...
|
||||
def distb(
|
||||
tb: types.TracebackType | None = None,
|
||||
*,
|
||||
file: IO[str] | None = None,
|
||||
show_caches: bool = False,
|
||||
adaptive: bool = False,
|
||||
show_offsets: bool = False,
|
||||
) -> None: ...
|
||||
# 3.13 made `show_cache` `None` by default
|
||||
def get_instructions(
|
||||
x: _HaveCodeType, *, first_line: int | None = None, show_caches: bool | None = None, adaptive: bool = False
|
||||
) -> Iterator[Instruction]: ...
|
||||
|
||||
elif sys.version_info >= (3, 11):
|
||||
# 3.11 added `show_caches` and `adaptive`
|
||||
def dis(
|
||||
x: _HaveCodeType | str | bytes | bytearray | None = None,
|
||||
*,
|
||||
@@ -130,19 +199,9 @@ if sys.version_info >= (3, 11):
|
||||
show_caches: bool = False,
|
||||
adaptive: bool = False,
|
||||
) -> None: ...
|
||||
|
||||
else:
|
||||
def dis(
|
||||
x: _HaveCodeType | str | bytes | bytearray | None = None, *, file: IO[str] | None = None, depth: int | None = None
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
def disassemble(
|
||||
co: _HaveCodeType, lasti: int = -1, *, file: IO[str] | None = None, show_caches: bool = False, adaptive: bool = False
|
||||
) -> None: ...
|
||||
def disco(
|
||||
co: _HaveCodeType, lasti: int = -1, *, file: IO[str] | None = None, show_caches: bool = False, adaptive: bool = False
|
||||
) -> None: ...
|
||||
def distb(
|
||||
tb: types.TracebackType | None = None, *, file: IO[str] | None = None, show_caches: bool = False, adaptive: bool = False
|
||||
) -> None: ...
|
||||
@@ -151,9 +210,13 @@ if sys.version_info >= (3, 11):
|
||||
) -> Iterator[Instruction]: ...
|
||||
|
||||
else:
|
||||
def dis(
|
||||
x: _HaveCodeType | str | bytes | bytearray | None = None, *, file: IO[str] | None = None, depth: int | None = None
|
||||
) -> None: ...
|
||||
def disassemble(co: _HaveCodeType, lasti: int = -1, *, file: IO[str] | None = None) -> None: ...
|
||||
def disco(co: _HaveCodeType, lasti: int = -1, *, file: IO[str] | None = None) -> None: ...
|
||||
def distb(tb: types.TracebackType | None = None, *, file: IO[str] | None = None) -> None: ...
|
||||
def get_instructions(x: _HaveCodeType, *, first_line: int | None = None) -> Iterator[Instruction]: ...
|
||||
|
||||
def show_code(co: _HaveCodeType, *, file: IO[str] | None = None) -> None: ...
|
||||
|
||||
disco = disassemble
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import datetime
|
||||
import sys
|
||||
from _typeshed import Unused
|
||||
from collections.abc import Iterable
|
||||
from email import _ParamType
|
||||
from email.charset import Charset
|
||||
from typing import overload
|
||||
@@ -28,9 +29,21 @@ _PDTZ: TypeAlias = tuple[int, int, int, int, int, int, int, int, int, int | None
|
||||
|
||||
def quote(str: str) -> str: ...
|
||||
def unquote(str: str) -> str: ...
|
||||
def parseaddr(addr: str | None) -> tuple[str, str]: ...
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
def parseaddr(addr: str | list[str], *, strict: bool = True) -> tuple[str, str]: ...
|
||||
|
||||
else:
|
||||
def parseaddr(addr: str) -> tuple[str, str]: ...
|
||||
|
||||
def formataddr(pair: tuple[str | None, str], charset: str | Charset = "utf-8") -> str: ...
|
||||
def getaddresses(fieldvalues: list[str]) -> list[tuple[str, str]]: ...
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
def getaddresses(fieldvalues: Iterable[str], *, strict: bool = True) -> list[tuple[str, str]]: ...
|
||||
|
||||
else:
|
||||
def getaddresses(fieldvalues: Iterable[str]) -> list[tuple[str, str]]: ...
|
||||
|
||||
@overload
|
||||
def parsedate(data: None) -> None: ...
|
||||
@overload
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import abc
|
||||
import pathlib
|
||||
import sys
|
||||
import types
|
||||
from _collections_abc import dict_keys, dict_values
|
||||
from _typeshed import StrPath
|
||||
from collections.abc import Iterable, Iterator, Mapping
|
||||
@@ -36,11 +37,8 @@ if sys.version_info >= (3, 10):
|
||||
from importlib.metadata._meta import PackageMetadata as PackageMetadata, SimplePath
|
||||
def packages_distributions() -> Mapping[str, list[str]]: ...
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
# It's generic but shouldn't be
|
||||
_SimplePath: TypeAlias = SimplePath[Any]
|
||||
else:
|
||||
_SimplePath: TypeAlias = SimplePath
|
||||
_SimplePath: TypeAlias = SimplePath
|
||||
|
||||
else:
|
||||
_SimplePath: TypeAlias = Path
|
||||
|
||||
@@ -48,7 +46,9 @@ class PackageNotFoundError(ModuleNotFoundError):
|
||||
@property
|
||||
def name(self) -> str: ... # type: ignore[override]
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
if sys.version_info >= (3, 13):
|
||||
_EntryPointBase = object
|
||||
elif sys.version_info >= (3, 11):
|
||||
class DeprecatedTuple:
|
||||
def __getitem__(self, item: int) -> str: ...
|
||||
|
||||
@@ -226,6 +226,9 @@ class Distribution(_distribution_parent):
|
||||
if sys.version_info >= (3, 10):
|
||||
@property
|
||||
def name(self) -> str: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
@property
|
||||
def origin(self) -> types.SimpleNamespace: ...
|
||||
|
||||
class DistributionFinder(MetaPathFinder):
|
||||
class Context:
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
import sys
|
||||
from _typeshed import StrPath
|
||||
from collections.abc import Iterator
|
||||
from typing import Any, Protocol, TypeVar, overload
|
||||
from os import PathLike
|
||||
from typing import Any, Protocol, overload
|
||||
from typing_extensions import TypeVar
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_T_co = TypeVar("_T_co", covariant=True)
|
||||
_T_co = TypeVar("_T_co", covariant=True, default=Any)
|
||||
|
||||
class PackageMetadata(Protocol):
|
||||
def __len__(self) -> int: ...
|
||||
@@ -22,7 +25,18 @@ class PackageMetadata(Protocol):
|
||||
@overload
|
||||
def get(self, name: str, failobj: _T) -> _T | str: ...
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
if sys.version_info >= (3, 13):
|
||||
class SimplePath(Protocol):
|
||||
def joinpath(self, other: StrPath, /) -> SimplePath: ...
|
||||
def __truediv__(self, other: StrPath, /) -> SimplePath: ...
|
||||
# Incorrect at runtime
|
||||
@property
|
||||
def parent(self) -> PathLike[str]: ...
|
||||
def read_text(self, encoding: str | None = None) -> str: ...
|
||||
def read_bytes(self) -> bytes: ...
|
||||
def exists(self) -> bool: ...
|
||||
|
||||
elif sys.version_info >= (3, 12):
|
||||
class SimplePath(Protocol[_T_co]):
|
||||
# At runtime this is defined as taking `str | _T`, but that causes trouble.
|
||||
# See #11436.
|
||||
|
||||
2
crates/red_knot_module_resolver/vendor/typeshed/stdlib/importlib/metadata/diagnose.pyi
vendored
Normal file
2
crates/red_knot_module_resolver/vendor/typeshed/stdlib/importlib/metadata/diagnose.pyi
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
def inspect(path: str) -> None: ...
|
||||
def run() -> None: ...
|
||||
@@ -176,20 +176,24 @@ TPFLAGS_IS_ABSTRACT: Literal[1048576]
|
||||
modulesbyfile: dict[str, Any]
|
||||
|
||||
_GetMembersPredicateTypeGuard: TypeAlias = Callable[[Any], TypeGuard[_T]]
|
||||
_GetMembersPredicateTypeIs: TypeAlias = Callable[[Any], TypeIs[_T]]
|
||||
_GetMembersPredicate: TypeAlias = Callable[[Any], bool]
|
||||
_GetMembersReturnTypeGuard: TypeAlias = list[tuple[str, _T]]
|
||||
_GetMembersReturn: TypeAlias = list[tuple[str, Any]]
|
||||
_GetMembersReturn: TypeAlias = list[tuple[str, _T]]
|
||||
|
||||
@overload
|
||||
def getmembers(object: object, predicate: _GetMembersPredicateTypeGuard[_T]) -> _GetMembersReturnTypeGuard[_T]: ...
|
||||
def getmembers(object: object, predicate: _GetMembersPredicateTypeGuard[_T]) -> _GetMembersReturn[_T]: ...
|
||||
@overload
|
||||
def getmembers(object: object, predicate: _GetMembersPredicate | None = None) -> _GetMembersReturn: ...
|
||||
def getmembers(object: object, predicate: _GetMembersPredicateTypeIs[_T]) -> _GetMembersReturn[_T]: ...
|
||||
@overload
|
||||
def getmembers(object: object, predicate: _GetMembersPredicate | None = None) -> _GetMembersReturn[Any]: ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
@overload
|
||||
def getmembers_static(object: object, predicate: _GetMembersPredicateTypeGuard[_T]) -> _GetMembersReturnTypeGuard[_T]: ...
|
||||
def getmembers_static(object: object, predicate: _GetMembersPredicateTypeGuard[_T]) -> _GetMembersReturn[_T]: ...
|
||||
@overload
|
||||
def getmembers_static(object: object, predicate: _GetMembersPredicate | None = None) -> _GetMembersReturn: ...
|
||||
def getmembers_static(object: object, predicate: _GetMembersPredicateTypeIs[_T]) -> _GetMembersReturn[_T]: ...
|
||||
@overload
|
||||
def getmembers_static(object: object, predicate: _GetMembersPredicate | None = None) -> _GetMembersReturn[Any]: ...
|
||||
|
||||
def getmodulename(path: StrPath) -> str | None: ...
|
||||
def ismodule(object: object) -> TypeIs[ModuleType]: ...
|
||||
|
||||
@@ -6,7 +6,7 @@ from _typeshed import FileDescriptorOrPath, ReadableBuffer, WriteableBuffer
|
||||
from collections.abc import Callable, Iterable, Iterator
|
||||
from os import _Opener
|
||||
from types import TracebackType
|
||||
from typing import IO, Any, BinaryIO, Literal, Protocol, TextIO, TypeVar, overload, type_check_only
|
||||
from typing import IO, Any, BinaryIO, Generic, Literal, Protocol, TextIO, TypeVar, overload, type_check_only
|
||||
from typing_extensions import Self
|
||||
|
||||
__all__ = [
|
||||
@@ -173,12 +173,12 @@ class _WrappedBuffer(Protocol):
|
||||
# def seek(self, offset: Literal[0], whence: Literal[2]) -> int: ...
|
||||
# def tell(self) -> int: ...
|
||||
|
||||
# TODO: Should be generic over the buffer type, but needs to wait for
|
||||
# TypeVar defaults.
|
||||
class TextIOWrapper(TextIOBase, TextIO): # type: ignore[misc] # incompatible definitions of write in the base classes
|
||||
_BufferT_co = TypeVar("_BufferT_co", bound=_WrappedBuffer, default=_WrappedBuffer, covariant=True)
|
||||
|
||||
class TextIOWrapper(TextIOBase, TextIO, Generic[_BufferT_co]): # type: ignore[misc] # incompatible definitions of write in the base classes
|
||||
def __init__(
|
||||
self,
|
||||
buffer: _WrappedBuffer,
|
||||
buffer: _BufferT_co,
|
||||
encoding: str | None = None,
|
||||
errors: str | None = None,
|
||||
newline: str | None = None,
|
||||
@@ -187,7 +187,7 @@ class TextIOWrapper(TextIOBase, TextIO): # type: ignore[misc] # incompatible d
|
||||
) -> None: ...
|
||||
# Equals the "buffer" argument passed in to the constructor.
|
||||
@property
|
||||
def buffer(self) -> BinaryIO: ...
|
||||
def buffer(self) -> _BufferT_co: ... # type: ignore[override]
|
||||
@property
|
||||
def closed(self) -> bool: ...
|
||||
@property
|
||||
@@ -211,7 +211,7 @@ class TextIOWrapper(TextIOBase, TextIO): # type: ignore[misc] # incompatible d
|
||||
def readline(self, size: int = -1, /) -> str: ... # type: ignore[override]
|
||||
def readlines(self, hint: int = -1, /) -> list[str]: ... # type: ignore[override]
|
||||
# Equals the "buffer" argument passed in to the constructor.
|
||||
def detach(self) -> BinaryIO: ...
|
||||
def detach(self) -> _BufferT_co: ... # type: ignore[override]
|
||||
# TextIOWrapper's version of seek only supports a limited subset of
|
||||
# operations.
|
||||
def seek(self, cookie: int, whence: int = 0, /) -> int: ...
|
||||
|
||||
@@ -326,6 +326,10 @@ if sys.version_info >= (3, 10):
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
class batched(Iterator[tuple[_T_co, ...]], Generic[_T_co]):
|
||||
def __new__(cls, iterable: Iterable[_T_co], n: int) -> Self: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def __new__(cls, iterable: Iterable[_T_co], n: int, *, strict: bool = False) -> Self: ...
|
||||
else:
|
||||
def __new__(cls, iterable: Iterable[_T_co], n: int) -> Self: ...
|
||||
|
||||
def __iter__(self) -> Self: ...
|
||||
def __next__(self) -> tuple[_T_co, ...]: ...
|
||||
|
||||
@@ -115,6 +115,14 @@ class Maildir(Mailbox[MaildirMessage]):
|
||||
def get_message(self, key: str) -> MaildirMessage: ...
|
||||
def get_bytes(self, key: str) -> bytes: ...
|
||||
def get_file(self, key: str) -> _ProxyFile[bytes]: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def get_info(self, key: str) -> str: ...
|
||||
def set_info(self, key: str, info: str) -> None: ...
|
||||
def get_flags(self, key: str) -> str: ...
|
||||
def set_flags(self, key: str, flags: str) -> None: ...
|
||||
def add_flag(self, key: str, flag: str) -> None: ...
|
||||
def remove_flag(self, key: str, flag: str) -> None: ...
|
||||
|
||||
def iterkeys(self) -> Iterator[str]: ...
|
||||
def __contains__(self, key: str) -> bool: ...
|
||||
def __len__(self) -> int: ...
|
||||
|
||||
@@ -45,6 +45,7 @@ class MimeTypes:
|
||||
types_map: tuple[dict[str, str], dict[str, str]]
|
||||
types_map_inv: tuple[dict[str, str], dict[str, str]]
|
||||
def __init__(self, filenames: tuple[str, ...] = (), strict: bool = True) -> None: ...
|
||||
def add_type(self, type: str, ext: str, strict: bool = True) -> None: ...
|
||||
def guess_extension(self, type: str, strict: bool = True) -> str | None: ...
|
||||
def guess_type(self, url: StrPath, strict: bool = True) -> tuple[str | None, str | None]: ...
|
||||
def guess_all_extensions(self, type: str, strict: bool = True) -> list[str]: ...
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import sys
|
||||
from _typeshed import ReadableBuffer, Unused
|
||||
from collections.abc import Iterable, Iterator, Sized
|
||||
from typing import Final, NoReturn, overload
|
||||
from typing import Final, Literal, NoReturn, overload
|
||||
from typing_extensions import Self
|
||||
|
||||
ACCESS_DEFAULT: int
|
||||
@@ -77,7 +77,7 @@ class mmap(Iterable[int], Sized):
|
||||
def __buffer__(self, flags: int, /) -> memoryview: ...
|
||||
def __release_buffer__(self, buffer: memoryview, /) -> None: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def seekable(self) -> bool: ...
|
||||
def seekable(self) -> Literal[True]: ...
|
||||
|
||||
if sys.platform != "win32":
|
||||
MADV_NORMAL: int
|
||||
|
||||
@@ -113,7 +113,7 @@ class Path(PurePath):
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
@classmethod
|
||||
def from_uri(cls, uri: str) -> Path: ...
|
||||
def from_uri(cls, uri: str) -> Self: ...
|
||||
def is_dir(self, *, follow_symlinks: bool = True) -> bool: ...
|
||||
def is_file(self, *, follow_symlinks: bool = True) -> bool: ...
|
||||
def read_text(self, encoding: str | None = None, errors: str | None = None, newline: str | None = None) -> str: ...
|
||||
|
||||
@@ -5,7 +5,7 @@ from cmd import Cmd
|
||||
from collections.abc import Callable, Iterable, Mapping, Sequence
|
||||
from inspect import _SourceObjectType
|
||||
from types import CodeType, FrameType, TracebackType
|
||||
from typing import IO, Any, ClassVar, TypeVar
|
||||
from typing import IO, Any, ClassVar, Final, TypeVar
|
||||
from typing_extensions import ParamSpec, Self
|
||||
|
||||
__all__ = ["run", "pm", "Pdb", "runeval", "runctx", "runcall", "set_trace", "post_mortem", "help"]
|
||||
@@ -30,6 +30,9 @@ class Pdb(Bdb, Cmd):
|
||||
|
||||
commands_resuming: ClassVar[list[str]]
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
MAX_CHAINED_EXCEPTION_DEPTH: Final = 999
|
||||
|
||||
aliases: dict[str, str]
|
||||
mainpyfile: str
|
||||
_wait_for_mainpyfile: bool
|
||||
@@ -58,8 +61,16 @@ class Pdb(Bdb, Cmd):
|
||||
if sys.version_info < (3, 11):
|
||||
def execRcLines(self) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
user_opcode = Bdb.user_line
|
||||
|
||||
def bp_commands(self, frame: FrameType) -> bool: ...
|
||||
def interaction(self, frame: FrameType | None, traceback: TracebackType | None) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
def interaction(self, frame: FrameType | None, tb_or_exc: TracebackType | BaseException | None) -> None: ...
|
||||
else:
|
||||
def interaction(self, frame: FrameType | None, traceback: TracebackType | None) -> None: ...
|
||||
|
||||
def displayhook(self, obj: object) -> None: ...
|
||||
def handle_command_def(self, line: str) -> bool: ...
|
||||
def defaultFile(self) -> str: ...
|
||||
@@ -72,6 +83,9 @@ class Pdb(Bdb, Cmd):
|
||||
if sys.version_info < (3, 11):
|
||||
def _runscript(self, filename: str) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
def completedefault(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ... # type: ignore[override]
|
||||
|
||||
def do_commands(self, arg: str) -> bool | None: ...
|
||||
def do_break(self, arg: str, temporary: bool = ...) -> bool | None: ...
|
||||
def do_tbreak(self, arg: str) -> bool | None: ...
|
||||
@@ -81,6 +95,9 @@ class Pdb(Bdb, Cmd):
|
||||
def do_ignore(self, arg: str) -> bool | None: ...
|
||||
def do_clear(self, arg: str) -> bool | None: ...
|
||||
def do_where(self, arg: str) -> bool | None: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def do_exceptions(self, arg: str) -> bool | None: ...
|
||||
|
||||
def do_up(self, arg: str) -> bool | None: ...
|
||||
def do_down(self, arg: str) -> bool | None: ...
|
||||
def do_until(self, arg: str) -> bool | None: ...
|
||||
@@ -125,8 +142,14 @@ class Pdb(Bdb, Cmd):
|
||||
def help_exec(self) -> None: ...
|
||||
def help_pdb(self) -> None: ...
|
||||
def sigint_handler(self, signum: signal.Signals, frame: FrameType) -> None: ...
|
||||
def message(self, msg: str) -> None: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def message(self, msg: str, end: str = "\n") -> None: ...
|
||||
else:
|
||||
def message(self, msg: str) -> None: ...
|
||||
|
||||
def error(self, msg: str) -> None: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def completenames(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ... # type: ignore[override]
|
||||
if sys.version_info >= (3, 12):
|
||||
def set_convenience_variable(self, frame: FrameType, name: str, value: Any) -> None: ...
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ from builtins import list as _list # "list" conflicts with method name
|
||||
from collections.abc import Callable, Container, Mapping, MutableMapping
|
||||
from reprlib import Repr
|
||||
from types import MethodType, ModuleType, TracebackType
|
||||
from typing import IO, Any, AnyStr, Final, NoReturn, TypeVar
|
||||
from typing import IO, Any, AnyStr, Final, NoReturn, Protocol, TypeVar
|
||||
from typing_extensions import TypeGuard
|
||||
|
||||
__all__ = ["help"]
|
||||
@@ -17,6 +17,9 @@ __date__: Final[str]
|
||||
__version__: Final[str]
|
||||
__credits__: Final[str]
|
||||
|
||||
class _Pager(Protocol):
|
||||
def __call__(self, text: str, title: str = "") -> None: ...
|
||||
|
||||
def pathdirs() -> list[str]: ...
|
||||
def getdoc(object: object) -> str: ...
|
||||
def splitdoc(doc: AnyStr) -> tuple[AnyStr, AnyStr]: ...
|
||||
@@ -229,16 +232,36 @@ class TextDoc(Doc):
|
||||
doc: Any | None = None,
|
||||
) -> str: ...
|
||||
|
||||
def pager(text: str) -> None: ...
|
||||
def getpager() -> Callable[[str], None]: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def pager(text: str, title: str = "") -> None: ...
|
||||
|
||||
else:
|
||||
def pager(text: str) -> None: ...
|
||||
|
||||
def plain(text: str) -> str: ...
|
||||
def pipepager(text: str, cmd: str) -> None: ...
|
||||
def tempfilepager(text: str, cmd: str) -> None: ...
|
||||
def ttypager(text: str) -> None: ...
|
||||
def plainpager(text: str) -> None: ...
|
||||
def describe(thing: Any) -> str: ...
|
||||
def locate(path: str, forceload: bool = ...) -> object: ...
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
def get_pager() -> _Pager: ...
|
||||
def pipe_pager(text: str, cmd: str, title: str = "") -> None: ...
|
||||
def tempfile_pager(text: str, cmd: str, title: str = "") -> None: ...
|
||||
def tty_pager(text: str, title: str = "") -> None: ...
|
||||
def plain_pager(text: str, title: str = "") -> None: ...
|
||||
|
||||
# For backwards compatibility.
|
||||
getpager = get_pager
|
||||
pipepager = pipe_pager
|
||||
tempfilepager = tempfile_pager
|
||||
ttypager = tty_pager
|
||||
plainpager = plain_pager
|
||||
else:
|
||||
def getpager() -> Callable[[str], None]: ...
|
||||
def pipepager(text: str, cmd: str) -> None: ...
|
||||
def tempfilepager(text: str, cmd: str) -> None: ...
|
||||
def ttypager(text: str) -> None: ...
|
||||
def plainpager(text: str) -> None: ...
|
||||
|
||||
text: TextDoc
|
||||
html: HTMLDoc
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import sys
|
||||
from _typeshed import StrPath
|
||||
from collections.abc import Iterable
|
||||
|
||||
@@ -13,7 +14,15 @@ def addsitedir(sitedir: str, known_paths: set[str] | None = None) -> None: ...
|
||||
def addsitepackages(known_paths: set[str] | None, prefixes: Iterable[str] | None = None) -> set[str] | None: ... # undocumented
|
||||
def addusersitepackages(known_paths: set[str] | None) -> set[str] | None: ... # undocumented
|
||||
def check_enableusersite() -> bool | None: ... # undocumented
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
def gethistoryfile() -> str: ... # undocumented
|
||||
|
||||
def enablerlcompleter() -> None: ... # undocumented
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
def register_readline() -> None: ... # undocumented
|
||||
|
||||
def execsitecustomize() -> None: ... # undocumented
|
||||
def execusercustomize() -> None: ... # undocumented
|
||||
def getsitepackages(prefixes: Iterable[str] | None = None) -> list[str]: ...
|
||||
|
||||
@@ -30,7 +30,8 @@ AT_LOCALE: dict[_NamedIntConstant, _NamedIntConstant]
|
||||
AT_UNICODE: dict[_NamedIntConstant, _NamedIntConstant]
|
||||
CH_LOCALE: dict[_NamedIntConstant, _NamedIntConstant]
|
||||
CH_UNICODE: dict[_NamedIntConstant, _NamedIntConstant]
|
||||
SRE_FLAG_TEMPLATE: int
|
||||
if sys.version_info < (3, 13):
|
||||
SRE_FLAG_TEMPLATE: int
|
||||
SRE_FLAG_IGNORECASE: int
|
||||
SRE_FLAG_LOCALE: int
|
||||
SRE_FLAG_MULTILINE: int
|
||||
|
||||
@@ -5,11 +5,30 @@ from typing import Any
|
||||
|
||||
__all__ = ["symtable", "SymbolTable", "Class", "Function", "Symbol"]
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
__all__ += ["SymbolTableType"]
|
||||
|
||||
def symtable(code: str, filename: str, compile_type: str) -> SymbolTable: ...
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
from enum import StrEnum
|
||||
|
||||
class SymbolTableType(StrEnum):
|
||||
MODULE = "module"
|
||||
FUNCTION = "function"
|
||||
CLASS = "class"
|
||||
ANNOTATION = "annotation"
|
||||
TYPE_ALIAS = "type alias"
|
||||
TYPE_PARAMETERS = "type parameters"
|
||||
TYPE_VARIABLE = "type variable"
|
||||
|
||||
class SymbolTable:
|
||||
def __init__(self, raw_table: Any, filename: str) -> None: ...
|
||||
def get_type(self) -> str: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def get_type(self) -> SymbolTableType: ...
|
||||
else:
|
||||
def get_type(self) -> str: ...
|
||||
|
||||
def get_id(self) -> int: ...
|
||||
def get_name(self) -> str: ...
|
||||
def get_lineno(self) -> int: ...
|
||||
@@ -42,13 +61,23 @@ class Symbol:
|
||||
def get_name(self) -> str: ...
|
||||
def is_referenced(self) -> bool: ...
|
||||
def is_parameter(self) -> bool: ...
|
||||
if sys.version_info >= (3, 14):
|
||||
def is_type_parameter(self) -> bool: ...
|
||||
|
||||
def is_global(self) -> bool: ...
|
||||
def is_declared_global(self) -> bool: ...
|
||||
def is_local(self) -> bool: ...
|
||||
def is_annotated(self) -> bool: ...
|
||||
def is_free(self) -> bool: ...
|
||||
if sys.version_info >= (3, 14):
|
||||
def is_free_class(self) -> bool: ...
|
||||
|
||||
def is_imported(self) -> bool: ...
|
||||
def is_assigned(self) -> bool: ...
|
||||
if sys.version_info >= (3, 14):
|
||||
def is_comp_iter(self) -> bool: ...
|
||||
def is_comp_cell(self) -> bool: ...
|
||||
|
||||
def is_namespace(self) -> bool: ...
|
||||
def get_namespaces(self) -> Sequence[SymbolTable]: ...
|
||||
def get_namespace(self) -> SymbolTable: ...
|
||||
|
||||
@@ -355,7 +355,11 @@ def set_int_max_str_digits(maxdigits: int) -> None: ...
|
||||
def get_int_max_str_digits() -> int: ...
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
def getunicodeinternedsize() -> int: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def getunicodeinternedsize(*, _only_immortal: bool = False) -> int: ...
|
||||
else:
|
||||
def getunicodeinternedsize() -> int: ...
|
||||
|
||||
def deactivate_stack_trampoline() -> None: ...
|
||||
def is_stack_trampoline_active() -> bool: ...
|
||||
# It always exists, but raises on non-linux platforms:
|
||||
|
||||
@@ -61,7 +61,7 @@ if sys.version_info >= (3, 10):
|
||||
def gettrace() -> TraceFunction | None: ...
|
||||
def getprofile() -> ProfileFunction | None: ...
|
||||
|
||||
def stack_size(size: int = ...) -> int: ...
|
||||
def stack_size(size: int = 0, /) -> int: ...
|
||||
|
||||
TIMEOUT_MAX: float
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import _tkinter
|
||||
import sys
|
||||
from _typeshed import Incomplete, StrEnum, StrOrBytesPath
|
||||
from collections.abc import Callable, Mapping, Sequence
|
||||
from collections.abc import Callable, Iterable, Mapping, Sequence
|
||||
from tkinter.constants import *
|
||||
from tkinter.font import _FontDescription
|
||||
from types import TracebackType
|
||||
@@ -3331,9 +3331,33 @@ class PhotoImage(Image, _PhotoImageLike):
|
||||
def blank(self) -> None: ...
|
||||
def cget(self, option: str) -> str: ...
|
||||
def __getitem__(self, key: str) -> str: ... # always string: image['height'] can be '0'
|
||||
def copy(self) -> PhotoImage: ...
|
||||
def zoom(self, x: int, y: int | Literal[""] = "") -> PhotoImage: ...
|
||||
def subsample(self, x: int, y: int | Literal[""] = "") -> PhotoImage: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def copy(
|
||||
self,
|
||||
*,
|
||||
from_coords: Iterable[int] | None = None,
|
||||
zoom: int | tuple[int, int] | list[int] | None = None,
|
||||
subsample: int | tuple[int, int] | list[int] | None = None,
|
||||
) -> PhotoImage: ...
|
||||
def subsample(self, x: int, y: Literal[""] = "", *, from_coords: Iterable[int] | None = None) -> PhotoImage: ...
|
||||
def zoom(self, x: int, y: Literal[""] = "", *, from_coords: Iterable[int] | None = None) -> PhotoImage: ...
|
||||
def copy_replace(
|
||||
self,
|
||||
sourceImage: PhotoImage | str,
|
||||
*,
|
||||
from_coords: Iterable[int] | None = None,
|
||||
to: Iterable[int] | None = None,
|
||||
shrink: bool = False,
|
||||
zoom: int | tuple[int, int] | list[int] | None = None,
|
||||
subsample: int | tuple[int, int] | list[int] | None = None,
|
||||
# `None` defaults to overlay.
|
||||
compositingrule: Literal["overlay", "set"] | None = None,
|
||||
) -> None: ...
|
||||
else:
|
||||
def copy(self) -> PhotoImage: ...
|
||||
def zoom(self, x: int, y: int | Literal[""] = "") -> PhotoImage: ...
|
||||
def subsample(self, x: int, y: int | Literal[""] = "") -> PhotoImage: ...
|
||||
|
||||
def get(self, x: int, y: int) -> tuple[int, int, int]: ...
|
||||
def put(
|
||||
self,
|
||||
@@ -3348,7 +3372,44 @@ class PhotoImage(Image, _PhotoImageLike):
|
||||
),
|
||||
to: tuple[int, int] | None = None,
|
||||
) -> None: ...
|
||||
def write(self, filename: StrOrBytesPath, format: str | None = None, from_coords: tuple[int, int] | None = None) -> None: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def read(
|
||||
self,
|
||||
filename: StrOrBytesPath,
|
||||
format: str | None = None,
|
||||
*,
|
||||
from_coords: Iterable[int] | None = None,
|
||||
to: Iterable[int] | None = None,
|
||||
shrink: bool = False,
|
||||
) -> None: ...
|
||||
def write(
|
||||
self,
|
||||
filename: StrOrBytesPath,
|
||||
format: str | None = None,
|
||||
from_coords: Iterable[int] | None = None,
|
||||
*,
|
||||
background: str | None = None,
|
||||
grayscale: bool = False,
|
||||
) -> None: ...
|
||||
@overload
|
||||
def data(
|
||||
self, format: str, *, from_coords: Iterable[int] | None = None, background: str | None = None, grayscale: bool = False
|
||||
) -> bytes: ...
|
||||
@overload
|
||||
def data(
|
||||
self,
|
||||
format: None = None,
|
||||
*,
|
||||
from_coords: Iterable[int] | None = None,
|
||||
background: str | None = None,
|
||||
grayscale: bool = False,
|
||||
) -> tuple[str, ...]: ...
|
||||
|
||||
else:
|
||||
def write(
|
||||
self, filename: StrOrBytesPath, format: str | None = None, from_coords: tuple[int, int] | None = None
|
||||
) -> None: ...
|
||||
|
||||
def transparency_get(self, x: int, y: int) -> bool: ...
|
||||
def transparency_set(self, x: int, y: int, boolean: bool) -> None: ...
|
||||
|
||||
|
||||
@@ -27,7 +27,18 @@ class CoverageResults:
|
||||
outfile: StrPath | None = None,
|
||||
) -> None: ... # undocumented
|
||||
def update(self, other: CoverageResults) -> None: ...
|
||||
def write_results(self, show_missing: bool = True, summary: bool = False, coverdir: StrPath | None = None) -> None: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def write_results(
|
||||
self,
|
||||
show_missing: bool = True,
|
||||
summary: bool = False,
|
||||
coverdir: StrPath | None = None,
|
||||
*,
|
||||
ignore_missing_files: bool = False,
|
||||
) -> None: ...
|
||||
else:
|
||||
def write_results(self, show_missing: bool = True, summary: bool = False, coverdir: StrPath | None = None) -> None: ...
|
||||
|
||||
def write_results_file(
|
||||
self, path: StrPath, lines: Sequence[str], lnotab: Any, lines_hit: Mapping[int, int], encoding: str | None = None
|
||||
) -> tuple[int, int]: ...
|
||||
|
||||
@@ -101,7 +101,6 @@ __all__ = [
|
||||
"setheading",
|
||||
"setpos",
|
||||
"setposition",
|
||||
"settiltangle",
|
||||
"setundobuffer",
|
||||
"setx",
|
||||
"sety",
|
||||
@@ -132,6 +131,9 @@ __all__ = [
|
||||
if sys.version_info >= (3, 12):
|
||||
__all__ += ["teleport"]
|
||||
|
||||
if sys.version_info < (3, 13):
|
||||
__all__ += ["settiltangle"]
|
||||
|
||||
# Note: '_Color' is the alias we use for arguments and _AnyColor is the
|
||||
# alias we use for return types. Really, these two aliases should be the
|
||||
# same, but as per the "no union returns" typeshed policy, we'll return
|
||||
@@ -399,7 +401,10 @@ class RawTurtle(TPen, TNavigator):
|
||||
self, t11: float | None = None, t12: float | None = None, t21: float | None = None, t22: float | None = None
|
||||
) -> None: ...
|
||||
def get_shapepoly(self) -> _PolygonCoords | None: ...
|
||||
def settiltangle(self, angle: float) -> None: ...
|
||||
|
||||
if sys.version_info < (3, 13):
|
||||
def settiltangle(self, angle: float) -> None: ...
|
||||
|
||||
@overload
|
||||
def tiltangle(self, angle: None = None) -> float: ...
|
||||
@overload
|
||||
@@ -672,7 +677,10 @@ def shapetransform(
|
||||
t11: float | None = None, t12: float | None = None, t21: float | None = None, t22: float | None = None
|
||||
) -> None: ...
|
||||
def get_shapepoly() -> _PolygonCoords | None: ...
|
||||
def settiltangle(angle: float) -> None: ...
|
||||
|
||||
if sys.version_info < (3, 13):
|
||||
def settiltangle(angle: float) -> None: ...
|
||||
|
||||
@overload
|
||||
def tiltangle(angle: None = None) -> float: ...
|
||||
@overload
|
||||
|
||||
@@ -245,7 +245,7 @@ class CodeType:
|
||||
co_qualname: str = ...,
|
||||
co_linetable: bytes = ...,
|
||||
co_exceptiontable: bytes = ...,
|
||||
) -> CodeType: ...
|
||||
) -> Self: ...
|
||||
elif sys.version_info >= (3, 10):
|
||||
def replace(
|
||||
self,
|
||||
@@ -266,7 +266,7 @@ class CodeType:
|
||||
co_filename: str = ...,
|
||||
co_name: str = ...,
|
||||
co_linetable: bytes = ...,
|
||||
) -> CodeType: ...
|
||||
) -> Self: ...
|
||||
else:
|
||||
def replace(
|
||||
self,
|
||||
@@ -287,7 +287,10 @@ class CodeType:
|
||||
co_filename: str = ...,
|
||||
co_name: str = ...,
|
||||
co_lnotab: bytes = ...,
|
||||
) -> CodeType: ...
|
||||
) -> Self: ...
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
__replace__ = replace
|
||||
|
||||
@final
|
||||
class MappingProxyType(Mapping[_KT, _VT_co]):
|
||||
@@ -309,11 +312,17 @@ class MappingProxyType(Mapping[_KT, _VT_co]):
|
||||
|
||||
class SimpleNamespace:
|
||||
__hash__: ClassVar[None] # type: ignore[assignment]
|
||||
def __init__(self, **kwargs: Any) -> None: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def __init__(self, mapping_or_iterable: Mapping[str, Any] | Iterable[tuple[str, Any]] = (), /, **kwargs: Any) -> None: ...
|
||||
else:
|
||||
def __init__(self, **kwargs: Any) -> None: ...
|
||||
|
||||
def __eq__(self, value: object, /) -> bool: ...
|
||||
def __getattribute__(self, name: str, /) -> Any: ...
|
||||
def __setattr__(self, name: str, value: Any, /) -> None: ...
|
||||
def __delattr__(self, name: str, /) -> None: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def __replace__(self, **kwargs: Any) -> Self: ...
|
||||
|
||||
class ModuleType:
|
||||
__name__: str
|
||||
|
||||
@@ -542,16 +542,18 @@ class AsyncIterator(AsyncIterable[_T_co], Protocol[_T_co]):
|
||||
class AsyncGenerator(AsyncIterator[_YieldT_co], Generic[_YieldT_co, _SendT_contra]):
|
||||
def __anext__(self) -> Awaitable[_YieldT_co]: ...
|
||||
@abstractmethod
|
||||
def asend(self, value: _SendT_contra, /) -> Awaitable[_YieldT_co]: ...
|
||||
def asend(self, value: _SendT_contra, /) -> Coroutine[Any, Any, _YieldT_co]: ...
|
||||
@overload
|
||||
@abstractmethod
|
||||
def athrow(
|
||||
self, typ: type[BaseException], val: BaseException | object = None, tb: TracebackType | None = None, /
|
||||
) -> Awaitable[_YieldT_co]: ...
|
||||
) -> Coroutine[Any, Any, _YieldT_co]: ...
|
||||
@overload
|
||||
@abstractmethod
|
||||
def athrow(self, typ: BaseException, val: None = None, tb: TracebackType | None = None, /) -> Awaitable[_YieldT_co]: ...
|
||||
def aclose(self) -> Awaitable[None]: ...
|
||||
def athrow(
|
||||
self, typ: BaseException, val: None = None, tb: TracebackType | None = None, /
|
||||
) -> Coroutine[Any, Any, _YieldT_co]: ...
|
||||
def aclose(self) -> Coroutine[Any, Any, None]: ...
|
||||
@property
|
||||
def ag_await(self) -> Any: ...
|
||||
@property
|
||||
|
||||
@@ -11,13 +11,7 @@ from .case import (
|
||||
skipIf as skipIf,
|
||||
skipUnless as skipUnless,
|
||||
)
|
||||
from .loader import (
|
||||
TestLoader as TestLoader,
|
||||
defaultTestLoader as defaultTestLoader,
|
||||
findTestCases as findTestCases,
|
||||
getTestCaseNames as getTestCaseNames,
|
||||
makeSuite as makeSuite,
|
||||
)
|
||||
from .loader import TestLoader as TestLoader, defaultTestLoader as defaultTestLoader
|
||||
from .main import TestProgram as TestProgram, main as main
|
||||
from .result import TestResult as TestResult
|
||||
from .runner import TextTestResult as TextTestResult, TextTestRunner as TextTestRunner
|
||||
@@ -52,12 +46,14 @@ __all__ = [
|
||||
"registerResult",
|
||||
"removeResult",
|
||||
"removeHandler",
|
||||
"getTestCaseNames",
|
||||
"makeSuite",
|
||||
"findTestCases",
|
||||
"addModuleCleanup",
|
||||
]
|
||||
|
||||
if sys.version_info < (3, 13):
|
||||
from .loader import findTestCases as findTestCases, getTestCaseNames as getTestCaseNames, makeSuite as makeSuite
|
||||
|
||||
__all__ += ["getTestCaseNames", "makeSuite", "findTestCases"]
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
__all__ += ["enterModuleContext", "doModuleCleanups"]
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import sys
|
||||
from asyncio.events import AbstractEventLoop
|
||||
from collections.abc import Awaitable, Callable
|
||||
from typing import TypeVar
|
||||
from typing_extensions import ParamSpec
|
||||
@@ -12,6 +13,9 @@ _T = TypeVar("_T")
|
||||
_P = ParamSpec("_P")
|
||||
|
||||
class IsolatedAsyncioTestCase(TestCase):
|
||||
if sys.version_info >= (3, 13):
|
||||
loop_factory: Callable[[], AbstractEventLoop] | None = None
|
||||
|
||||
async def asyncSetUp(self) -> None: ...
|
||||
async def asyncTearDown(self) -> None: ...
|
||||
def addAsyncCleanup(self, func: Callable[_P, Awaitable[object]], /, *args: _P.args, **kwargs: _P.kwargs) -> None: ...
|
||||
|
||||
@@ -5,7 +5,7 @@ from collections.abc import Callable, Sequence
|
||||
from re import Pattern
|
||||
from types import ModuleType
|
||||
from typing import Any
|
||||
from typing_extensions import TypeAlias
|
||||
from typing_extensions import TypeAlias, deprecated
|
||||
|
||||
_SortComparisonMethod: TypeAlias = Callable[[str, str], int]
|
||||
_SuiteClass: TypeAlias = Callable[[list[unittest.case.TestCase]], unittest.suite.TestSuite]
|
||||
@@ -34,18 +34,22 @@ class TestLoader:
|
||||
|
||||
defaultTestLoader: TestLoader
|
||||
|
||||
def getTestCaseNames(
|
||||
testCaseClass: type[unittest.case.TestCase],
|
||||
prefix: str,
|
||||
sortUsing: _SortComparisonMethod = ...,
|
||||
testNamePatterns: list[str] | None = None,
|
||||
) -> Sequence[str]: ...
|
||||
def makeSuite(
|
||||
testCaseClass: type[unittest.case.TestCase],
|
||||
prefix: str = "test",
|
||||
sortUsing: _SortComparisonMethod = ...,
|
||||
suiteClass: _SuiteClass = ...,
|
||||
) -> unittest.suite.TestSuite: ...
|
||||
def findTestCases(
|
||||
module: ModuleType, prefix: str = "test", sortUsing: _SortComparisonMethod = ..., suiteClass: _SuiteClass = ...
|
||||
) -> unittest.suite.TestSuite: ...
|
||||
if sys.version_info < (3, 13):
|
||||
@deprecated("Deprecated in Python 3.11; removal scheduled for Python 3.13")
|
||||
def getTestCaseNames(
|
||||
testCaseClass: type[unittest.case.TestCase],
|
||||
prefix: str,
|
||||
sortUsing: _SortComparisonMethod = ...,
|
||||
testNamePatterns: list[str] | None = None,
|
||||
) -> Sequence[str]: ...
|
||||
@deprecated("Deprecated in Python 3.11; removal scheduled for Python 3.13")
|
||||
def makeSuite(
|
||||
testCaseClass: type[unittest.case.TestCase],
|
||||
prefix: str = "test",
|
||||
sortUsing: _SortComparisonMethod = ...,
|
||||
suiteClass: _SuiteClass = ...,
|
||||
) -> unittest.suite.TestSuite: ...
|
||||
@deprecated("Deprecated in Python 3.11; removal scheduled for Python 3.13")
|
||||
def findTestCases(
|
||||
module: ModuleType, prefix: str = "test", sortUsing: _SortComparisonMethod = ..., suiteClass: _SuiteClass = ...
|
||||
) -> unittest.suite.TestSuite: ...
|
||||
|
||||
@@ -6,6 +6,7 @@ import unittest.suite
|
||||
from collections.abc import Iterable
|
||||
from types import ModuleType
|
||||
from typing import Any, Protocol
|
||||
from typing_extensions import deprecated
|
||||
|
||||
MAIN_EXAMPLES: str
|
||||
MODULE_EXAMPLES: str
|
||||
@@ -61,7 +62,10 @@ class TestProgram:
|
||||
tb_locals: bool = False,
|
||||
) -> None: ...
|
||||
|
||||
def usageExit(self, msg: Any = None) -> None: ...
|
||||
if sys.version_info < (3, 13):
|
||||
@deprecated("Deprecated in Python 3.11; removal scheduled for Python 3.13")
|
||||
def usageExit(self, msg: Any = None) -> None: ...
|
||||
|
||||
def parseArgs(self, argv: list[str]) -> None: ...
|
||||
def createTests(self, from_discovery: bool = False, Loader: unittest.loader.TestLoader | None = None) -> None: ...
|
||||
def runTests(self) -> None: ... # undocumented
|
||||
|
||||
@@ -12,23 +12,44 @@ _F = TypeVar("_F", bound=Callable[..., Any])
|
||||
_AF = TypeVar("_AF", bound=Callable[..., Coroutine[Any, Any, Any]])
|
||||
_P = ParamSpec("_P")
|
||||
|
||||
__all__ = (
|
||||
"Mock",
|
||||
"MagicMock",
|
||||
"patch",
|
||||
"sentinel",
|
||||
"DEFAULT",
|
||||
"ANY",
|
||||
"call",
|
||||
"create_autospec",
|
||||
"AsyncMock",
|
||||
"FILTER_DIR",
|
||||
"NonCallableMock",
|
||||
"NonCallableMagicMock",
|
||||
"mock_open",
|
||||
"PropertyMock",
|
||||
"seal",
|
||||
)
|
||||
if sys.version_info >= (3, 13):
|
||||
# ThreadingMock added in 3.13
|
||||
__all__ = (
|
||||
"Mock",
|
||||
"MagicMock",
|
||||
"patch",
|
||||
"sentinel",
|
||||
"DEFAULT",
|
||||
"ANY",
|
||||
"call",
|
||||
"create_autospec",
|
||||
"ThreadingMock",
|
||||
"AsyncMock",
|
||||
"FILTER_DIR",
|
||||
"NonCallableMock",
|
||||
"NonCallableMagicMock",
|
||||
"mock_open",
|
||||
"PropertyMock",
|
||||
"seal",
|
||||
)
|
||||
else:
|
||||
__all__ = (
|
||||
"Mock",
|
||||
"MagicMock",
|
||||
"patch",
|
||||
"sentinel",
|
||||
"DEFAULT",
|
||||
"ANY",
|
||||
"call",
|
||||
"create_autospec",
|
||||
"AsyncMock",
|
||||
"FILTER_DIR",
|
||||
"NonCallableMock",
|
||||
"NonCallableMagicMock",
|
||||
"mock_open",
|
||||
"PropertyMock",
|
||||
"seal",
|
||||
)
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
__version__: Final[str]
|
||||
@@ -124,7 +145,6 @@ class NonCallableMock(Base, Any):
|
||||
def __delattr__(self, name: str) -> None: ...
|
||||
def __setattr__(self, name: str, value: Any) -> None: ...
|
||||
def __dir__(self) -> list[str]: ...
|
||||
def _calls_repr(self, prefix: str = "Calls") -> str: ...
|
||||
def assert_called_with(self, *args: Any, **kwargs: Any) -> None: ...
|
||||
def assert_not_called(self) -> None: ...
|
||||
def assert_called_once_with(self, *args: Any, **kwargs: Any) -> None: ...
|
||||
@@ -150,6 +170,10 @@ class NonCallableMock(Base, Any):
|
||||
def _format_mock_call_signature(self, args: Any, kwargs: Any) -> str: ...
|
||||
def _call_matcher(self, _call: tuple[_Call, ...]) -> _Call: ...
|
||||
def _get_child_mock(self, **kw: Any) -> NonCallableMock: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def _calls_repr(self) -> str: ...
|
||||
else:
|
||||
def _calls_repr(self, prefix: str = "Calls") -> str: ...
|
||||
|
||||
class CallableMixin(Base):
|
||||
side_effect: Any
|
||||
@@ -427,4 +451,16 @@ class PropertyMock(Mock):
|
||||
def __get__(self, obj: _T, obj_type: type[_T] | None = None) -> Self: ...
|
||||
def __set__(self, obj: Any, val: Any) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
class ThreadingMixin(Base):
|
||||
DEFAULT_TIMEOUT: Final[float | None] = None
|
||||
|
||||
def __init__(self, /, *args: Any, timeout: float | None | _SentinelObject = ..., **kwargs: Any) -> None: ...
|
||||
# Same as `NonCallableMock.reset_mock.`
|
||||
def reset_mock(self, visited: Any = None, *, return_value: bool = False, side_effect: bool = False) -> None: ...
|
||||
def wait_until_called(self, *, timeout: float | None | _SentinelObject = ...) -> None: ...
|
||||
def wait_until_any_call_with(self, *args: Any, **kwargs: Any) -> None: ...
|
||||
|
||||
class ThreadingMock(ThreadingMixin, MagicMixin, Mock): ...
|
||||
|
||||
def seal(mock: Any) -> None: ...
|
||||
|
||||
@@ -21,8 +21,10 @@ if sys.version_info >= (3, 13):
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_W = TypeVar("_W", bound=list[WarningMessage] | None)
|
||||
_ActionKind: TypeAlias = Literal["default", "error", "ignore", "always", "module", "once"]
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
_ActionKind: TypeAlias = Literal["default", "error", "ignore", "always", "module", "once"]
|
||||
else:
|
||||
_ActionKind: TypeAlias = Literal["default", "error", "ignore", "always", "all", "module", "once"]
|
||||
filters: Sequence[tuple[str, str | None, type[Warning], str | None, int]] # undocumented, do not mutate
|
||||
|
||||
def showwarning(
|
||||
|
||||
@@ -239,9 +239,15 @@ if sys.version_info >= (3, 9):
|
||||
def indent(tree: Element | ElementTree, space: str = " ", level: int = 0) -> None: ...
|
||||
|
||||
def parse(source: _FileRead, parser: XMLParser | None = None) -> ElementTree: ...
|
||||
def iterparse(
|
||||
source: _FileRead, events: Sequence[str] | None = None, parser: XMLParser | None = None
|
||||
) -> Iterator[tuple[str, Any]]: ...
|
||||
|
||||
class _IterParseIterator(Iterator[tuple[str, Any]]):
|
||||
def __next__(self) -> tuple[str, Any]: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def close(self) -> None: ...
|
||||
if sys.version_info >= (3, 11):
|
||||
def __del__(self) -> None: ...
|
||||
|
||||
def iterparse(source: _FileRead, events: Sequence[str] | None = None, parser: XMLParser | None = None) -> _IterParseIterator: ...
|
||||
|
||||
class XMLPullParser:
|
||||
def __init__(self, events: Sequence[str] | None = None, *, _parser: XMLParser | None = None) -> None: ...
|
||||
|
||||
@@ -206,6 +206,9 @@ class ZipInfo:
|
||||
compress_size: int
|
||||
file_size: int
|
||||
orig_filename: str # undocumented
|
||||
if sys.version_info >= (3, 13):
|
||||
compress_level: int | None
|
||||
|
||||
def __init__(self, filename: str = "NoName", date_time: _DateTuple = (1980, 1, 1, 0, 0, 0)) -> None: ...
|
||||
@classmethod
|
||||
def from_file(cls, filename: StrPath, arcname: StrPath | None = None, *, strict_timestamps: bool = True) -> Self: ...
|
||||
|
||||
@@ -3,12 +3,14 @@ from _typeshed import StrPath
|
||||
from collections.abc import Iterator, Sequence
|
||||
from io import TextIOWrapper
|
||||
from os import PathLike
|
||||
from typing import IO, Literal, overload
|
||||
from typing import IO, Literal, TypeVar, overload
|
||||
from typing_extensions import Self, TypeAlias
|
||||
from zipfile import ZipFile
|
||||
|
||||
_ReadWriteBinaryMode: TypeAlias = Literal["r", "w", "rb", "wb"]
|
||||
|
||||
_ZF = TypeVar("_ZF", bound=ZipFile)
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
class InitializedState:
|
||||
def __init__(self, *args: object, **kwargs: object) -> None: ...
|
||||
@@ -23,6 +25,9 @@ if sys.version_info >= (3, 12):
|
||||
@overload
|
||||
@classmethod
|
||||
def make(cls, source: StrPath | IO[bytes]) -> Self: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
@classmethod
|
||||
def inject(cls, zf: _ZF) -> _ZF: ...
|
||||
|
||||
class Path:
|
||||
root: CompleteDirs
|
||||
|
||||
@@ -15,6 +15,7 @@ red_knot_module_resolver = { workspace = true }
|
||||
ruff_db = { workspace = true }
|
||||
ruff_index = { workspace = true }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_python_trivia = { workspace = true }
|
||||
ruff_text_size = { workspace = true }
|
||||
|
||||
bitflags = { workspace = true }
|
||||
|
||||
@@ -27,12 +27,13 @@ pub struct AstNodeRef<T> {
|
||||
|
||||
#[allow(unsafe_code)]
|
||||
impl<T> AstNodeRef<T> {
|
||||
/// Creates a new `AstNodeRef` that reference `node`. The `parsed` is the [`ParsedModule`] to which
|
||||
/// the `AstNodeRef` belongs.
|
||||
/// Creates a new `AstNodeRef` that reference `node`. The `parsed` is the [`ParsedModule`] to
|
||||
/// which the `AstNodeRef` belongs.
|
||||
///
|
||||
/// ## Safety
|
||||
/// Dereferencing the `node` can result in undefined behavior if `parsed` isn't the [`ParsedModule`] to
|
||||
/// which `node` belongs. It's the caller's responsibility to ensure that the invariant `node belongs to parsed` is upheld.
|
||||
/// Dereferencing the `node` can result in undefined behavior if `parsed` isn't the
|
||||
/// [`ParsedModule`] to which `node` belongs. It's the caller's responsibility to ensure that
|
||||
/// the invariant `node belongs to parsed` is upheld.
|
||||
|
||||
pub(super) unsafe fn new(parsed: ParsedModule, node: &T) -> Self {
|
||||
Self {
|
||||
@@ -43,8 +44,8 @@ impl<T> AstNodeRef<T> {
|
||||
|
||||
/// Returns a reference to the wrapped node.
|
||||
pub fn node(&self) -> &T {
|
||||
// SAFETY: Holding on to `parsed` ensures that the AST to which `node` belongs is still alive
|
||||
// and not moved.
|
||||
// SAFETY: Holding on to `parsed` ensures that the AST to which `node` belongs is still
|
||||
// alive and not moved.
|
||||
unsafe { self.node.as_ref() }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,27 +4,30 @@ use red_knot_module_resolver::Db as ResolverDb;
|
||||
use ruff_db::{Db as SourceDb, Upcast};
|
||||
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::symbol::{public_symbols_map, PublicSymbolId, ScopeId};
|
||||
use crate::semantic_index::{root_scope, semantic_index, symbol_table};
|
||||
use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::symbol::ScopeId;
|
||||
use crate::semantic_index::{global_scope, semantic_index, symbol_table, use_def_map};
|
||||
use crate::types::{
|
||||
infer_types, public_symbol_ty, ClassType, FunctionType, IntersectionType, UnionType,
|
||||
infer_definition_types, infer_expression_types, infer_scope_types, ClassType, FunctionType,
|
||||
IntersectionType, UnionType,
|
||||
};
|
||||
|
||||
#[salsa::jar(db=Db)]
|
||||
pub struct Jar(
|
||||
ScopeId<'_>,
|
||||
PublicSymbolId<'_>,
|
||||
Definition<'_>,
|
||||
Expression<'_>,
|
||||
FunctionType<'_>,
|
||||
ClassType<'_>,
|
||||
UnionType<'_>,
|
||||
IntersectionType<'_>,
|
||||
symbol_table,
|
||||
root_scope,
|
||||
use_def_map,
|
||||
global_scope,
|
||||
semantic_index,
|
||||
infer_types,
|
||||
public_symbol_ty,
|
||||
public_symbols_map,
|
||||
infer_definition_types,
|
||||
infer_expression_types,
|
||||
infer_scope_types,
|
||||
);
|
||||
|
||||
/// Database giving access to semantic information about a Python program.
|
||||
@@ -44,6 +47,7 @@ pub(crate) mod tests {
|
||||
use ruff_db::system::{DbWithTestSystem, System, TestSystem};
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
use ruff_db::{Db as SourceDb, Jar as SourceJar, Upcast};
|
||||
use ruff_python_trivia::textwrap;
|
||||
|
||||
use super::{Db, Jar};
|
||||
|
||||
@@ -85,6 +89,12 @@ pub(crate) mod tests {
|
||||
pub(crate) fn clear_salsa_events(&mut self) {
|
||||
self.take_salsa_events();
|
||||
}
|
||||
|
||||
/// Write auto-dedented text to a file.
|
||||
pub(crate) fn write_dedented(&mut self, path: &str, content: &str) -> anyhow::Result<()> {
|
||||
self.write_file(path, textwrap::dedent(content))?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl DbWithTestSystem for TestDb {
|
||||
|
||||
@@ -10,17 +10,21 @@ use ruff_index::{IndexSlice, IndexVec};
|
||||
use crate::semantic_index::ast_ids::node_key::ExpressionNodeKey;
|
||||
use crate::semantic_index::ast_ids::AstIds;
|
||||
use crate::semantic_index::builder::SemanticIndexBuilder;
|
||||
use crate::semantic_index::definition::{Definition, DefinitionNodeKey, DefinitionNodeRef};
|
||||
use crate::semantic_index::definition::{Definition, DefinitionNodeKey};
|
||||
use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::symbol::{
|
||||
FileScopeId, NodeWithScopeKey, NodeWithScopeRef, PublicSymbolId, Scope, ScopeId,
|
||||
ScopedSymbolId, SymbolTable,
|
||||
FileScopeId, NodeWithScopeKey, NodeWithScopeRef, Scope, ScopeId, ScopedSymbolId, SymbolTable,
|
||||
};
|
||||
use crate::Db;
|
||||
|
||||
pub mod ast_ids;
|
||||
mod builder;
|
||||
pub mod definition;
|
||||
pub mod expression;
|
||||
pub mod symbol;
|
||||
mod use_def;
|
||||
|
||||
pub(crate) use self::use_def::UseDefMap;
|
||||
|
||||
type SymbolMap = hashbrown::HashMap<ScopedSymbolId, (), ()>;
|
||||
|
||||
@@ -42,57 +46,63 @@ pub(crate) fn semantic_index(db: &dyn Db, file: File) -> SemanticIndex<'_> {
|
||||
/// Salsa can avoid invalidating dependent queries if this scope's symbol table
|
||||
/// is unchanged.
|
||||
#[salsa::tracked]
|
||||
pub(crate) fn symbol_table<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> Arc<SymbolTable<'db>> {
|
||||
pub(crate) fn symbol_table<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> Arc<SymbolTable> {
|
||||
let _span = tracing::trace_span!("symbol_table", ?scope).entered();
|
||||
let index = semantic_index(db, scope.file(db));
|
||||
|
||||
index.symbol_table(scope.file_scope_id(db))
|
||||
}
|
||||
|
||||
/// Returns the root scope of `file`.
|
||||
/// Returns the use-def map for a specific `scope`.
|
||||
///
|
||||
/// Using [`use_def_map`] over [`semantic_index`] has the advantage that
|
||||
/// Salsa can avoid invalidating dependent queries if this scope's use-def map
|
||||
/// is unchanged.
|
||||
#[salsa::tracked]
|
||||
pub(crate) fn root_scope(db: &dyn Db, file: File) -> ScopeId<'_> {
|
||||
let _span = tracing::trace_span!("root_scope", ?file).entered();
|
||||
pub(crate) fn use_def_map<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> Arc<UseDefMap<'db>> {
|
||||
let _span = tracing::trace_span!("use_def_map", ?scope).entered();
|
||||
let index = semantic_index(db, scope.file(db));
|
||||
|
||||
FileScopeId::root().to_scope_id(db, file)
|
||||
index.use_def_map(scope.file_scope_id(db))
|
||||
}
|
||||
|
||||
/// Returns the symbol with the given name in `file`'s public scope or `None` if
|
||||
/// no symbol with the given name exists.
|
||||
pub(crate) fn public_symbol<'db>(
|
||||
db: &'db dyn Db,
|
||||
file: File,
|
||||
name: &str,
|
||||
) -> Option<PublicSymbolId<'db>> {
|
||||
let root_scope = root_scope(db, file);
|
||||
let symbol_table = symbol_table(db, root_scope);
|
||||
let local = symbol_table.symbol_id_by_name(name)?;
|
||||
Some(local.to_public_symbol(db, file))
|
||||
/// Returns the module global scope of `file`.
|
||||
#[salsa::tracked]
|
||||
pub(crate) fn global_scope(db: &dyn Db, file: File) -> ScopeId<'_> {
|
||||
let _span = tracing::trace_span!("global_scope", ?file).entered();
|
||||
|
||||
FileScopeId::global().to_scope_id(db, file)
|
||||
}
|
||||
|
||||
/// The symbol tables for an entire file.
|
||||
/// The symbol tables and use-def maps for all scopes in a file.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct SemanticIndex<'db> {
|
||||
/// List of all symbol tables in this file, indexed by scope.
|
||||
symbol_tables: IndexVec<FileScopeId, Arc<SymbolTable<'db>>>,
|
||||
symbol_tables: IndexVec<FileScopeId, Arc<SymbolTable>>,
|
||||
|
||||
/// List of all scopes in this file.
|
||||
scopes: IndexVec<FileScopeId, Scope>,
|
||||
|
||||
/// Maps expressions to their corresponding scope.
|
||||
/// Map expressions to their corresponding scope.
|
||||
/// We can't use [`ExpressionId`] here, because the challenge is how to get from
|
||||
/// an [`ast::Expr`] to an [`ExpressionId`] (which requires knowing the scope).
|
||||
scopes_by_expression: FxHashMap<ExpressionNodeKey, FileScopeId>,
|
||||
|
||||
/// Maps from a node creating a definition node to its definition.
|
||||
/// Map from a node creating a definition to its definition.
|
||||
definitions_by_node: FxHashMap<DefinitionNodeKey, Definition<'db>>,
|
||||
|
||||
/// Map from a standalone expression to its [`Expression`] ingredient.
|
||||
expressions_by_node: FxHashMap<ExpressionNodeKey, Expression<'db>>,
|
||||
|
||||
/// Map from nodes that create a scope to the scope they create.
|
||||
scopes_by_node: FxHashMap<NodeWithScopeKey, FileScopeId>,
|
||||
|
||||
/// Map from the file-local [`FileScopeId`] to the salsa-ingredient [`ScopeId`].
|
||||
scope_ids_by_scope: IndexVec<FileScopeId, ScopeId<'db>>,
|
||||
|
||||
/// Use-def map for each scope in this file.
|
||||
use_def_maps: IndexVec<FileScopeId, Arc<UseDefMap<'db>>>,
|
||||
|
||||
/// Lookup table to map between node ids and ast nodes.
|
||||
///
|
||||
/// Note: We should not depend on this map when analysing other files or
|
||||
@@ -105,10 +115,18 @@ impl<'db> SemanticIndex<'db> {
|
||||
///
|
||||
/// Use the Salsa cached [`symbol_table`] query if you only need the
|
||||
/// symbol table for a single scope.
|
||||
pub(super) fn symbol_table(&self, scope_id: FileScopeId) -> Arc<SymbolTable<'db>> {
|
||||
pub(super) fn symbol_table(&self, scope_id: FileScopeId) -> Arc<SymbolTable> {
|
||||
self.symbol_tables[scope_id].clone()
|
||||
}
|
||||
|
||||
/// Returns the use-def map for a specific scope.
|
||||
///
|
||||
/// Use the Salsa cached [`use_def_map`] query if you only need the
|
||||
/// use-def map for a single scope.
|
||||
pub(super) fn use_def_map(&self, scope_id: FileScopeId) -> Arc<UseDefMap> {
|
||||
self.use_def_maps[scope_id].clone()
|
||||
}
|
||||
|
||||
pub(crate) fn ast_ids(&self, scope_id: FileScopeId) -> &AstIds {
|
||||
&self.ast_ids[scope_id]
|
||||
}
|
||||
@@ -157,16 +175,28 @@ impl<'db> SemanticIndex<'db> {
|
||||
}
|
||||
|
||||
/// Returns an iterator over all ancestors of `scope`, starting with `scope` itself.
|
||||
#[allow(unused)]
|
||||
pub(crate) fn ancestor_scopes(&self, scope: FileScopeId) -> AncestorsIter {
|
||||
AncestorsIter::new(self, scope)
|
||||
}
|
||||
|
||||
/// Returns the [`Definition`] salsa ingredient for `definition_node`.
|
||||
pub(crate) fn definition<'def>(
|
||||
/// Returns the [`Definition`] salsa ingredient for `definition_key`.
|
||||
pub(crate) fn definition(
|
||||
&self,
|
||||
definition_node: impl Into<DefinitionNodeRef<'def>>,
|
||||
definition_key: impl Into<DefinitionNodeKey>,
|
||||
) -> Definition<'db> {
|
||||
self.definitions_by_node[&definition_node.into().key()]
|
||||
self.definitions_by_node[&definition_key.into()]
|
||||
}
|
||||
|
||||
/// Returns the [`Expression`] ingredient for an expression node.
|
||||
/// Panics if we have no expression ingredient for that node. We can only call this method for
|
||||
/// standalone-inferable expressions, which we call `add_standalone_expression` for in
|
||||
/// [`SemanticIndexBuilder`].
|
||||
pub(crate) fn expression(
|
||||
&self,
|
||||
expression_key: impl Into<ExpressionNodeKey>,
|
||||
) -> Expression<'db> {
|
||||
self.expressions_by_node[&expression_key.into()]
|
||||
}
|
||||
|
||||
/// Returns the id of the scope that `node` creates. This is different from [`Definition::scope`] which
|
||||
@@ -176,8 +206,6 @@ impl<'db> SemanticIndex<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
/// ID that uniquely identifies an expression inside a [`Scope`].
|
||||
|
||||
pub struct AncestorsIter<'a> {
|
||||
scopes: &'a IndexSlice<FileScopeId, Scope>,
|
||||
next_id: Option<FileScopeId>,
|
||||
@@ -275,10 +303,13 @@ mod tests {
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::system::DbWithTestSystem;
|
||||
use ruff_python_ast as ast;
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::semantic_index::ast_ids::HasScopedUseId;
|
||||
use crate::semantic_index::definition::DefinitionKind;
|
||||
use crate::semantic_index::symbol::{FileScopeId, Scope, ScopeKind, SymbolTable};
|
||||
use crate::semantic_index::{root_scope, semantic_index, symbol_table};
|
||||
use crate::semantic_index::{global_scope, semantic_index, symbol_table, use_def_map};
|
||||
use crate::Db;
|
||||
|
||||
struct TestCase {
|
||||
@@ -305,95 +336,113 @@ mod tests {
|
||||
#[test]
|
||||
fn empty() {
|
||||
let TestCase { db, file } = test_case("");
|
||||
let root_table = symbol_table(&db, root_scope(&db, file));
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
let root_names = names(&root_table);
|
||||
let global_names = names(&global_table);
|
||||
|
||||
assert_eq!(root_names, Vec::<&str>::new());
|
||||
assert_eq!(global_names, Vec::<&str>::new());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn simple() {
|
||||
let TestCase { db, file } = test_case("x");
|
||||
let root_table = symbol_table(&db, root_scope(&db, file));
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert_eq!(names(&root_table), vec!["x"]);
|
||||
assert_eq!(names(&global_table), vec!["x"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn annotation_only() {
|
||||
let TestCase { db, file } = test_case("x: int");
|
||||
let root_table = symbol_table(&db, root_scope(&db, file));
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert_eq!(names(&root_table), vec!["int", "x"]);
|
||||
assert_eq!(names(&global_table), vec!["int", "x"]);
|
||||
// TODO record definition
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import() {
|
||||
let TestCase { db, file } = test_case("import foo");
|
||||
let root_table = symbol_table(&db, root_scope(&db, file));
|
||||
let scope = global_scope(&db, file);
|
||||
let global_table = symbol_table(&db, scope);
|
||||
|
||||
assert_eq!(names(&root_table), vec!["foo"]);
|
||||
let foo = root_table.symbol_by_name("foo").unwrap();
|
||||
assert_eq!(names(&global_table), vec!["foo"]);
|
||||
let foo = global_table.symbol_id_by_name("foo").unwrap();
|
||||
|
||||
assert_eq!(foo.definitions().len(), 1);
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let [definition] = use_def.public_definitions(foo) else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
assert!(matches!(definition.node(&db), DefinitionKind::Import(_)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import_sub() {
|
||||
let TestCase { db, file } = test_case("import foo.bar");
|
||||
let root_table = symbol_table(&db, root_scope(&db, file));
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert_eq!(names(&root_table), vec!["foo"]);
|
||||
assert_eq!(names(&global_table), vec!["foo"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import_as() {
|
||||
let TestCase { db, file } = test_case("import foo.bar as baz");
|
||||
let root_table = symbol_table(&db, root_scope(&db, file));
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert_eq!(names(&root_table), vec!["baz"]);
|
||||
assert_eq!(names(&global_table), vec!["baz"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import_from() {
|
||||
let TestCase { db, file } = test_case("from bar import foo");
|
||||
let root_table = symbol_table(&db, root_scope(&db, file));
|
||||
let scope = global_scope(&db, file);
|
||||
let global_table = symbol_table(&db, scope);
|
||||
|
||||
assert_eq!(names(&root_table), vec!["foo"]);
|
||||
assert_eq!(
|
||||
root_table
|
||||
.symbol_by_name("foo")
|
||||
.unwrap()
|
||||
.definitions()
|
||||
.len(),
|
||||
1
|
||||
);
|
||||
assert_eq!(names(&global_table), vec!["foo"]);
|
||||
assert!(
|
||||
root_table
|
||||
global_table
|
||||
.symbol_by_name("foo")
|
||||
.is_some_and(|symbol| { symbol.is_defined() || !symbol.is_used() }),
|
||||
.is_some_and(|symbol| { symbol.is_defined() && !symbol.is_used() }),
|
||||
"symbols that are defined get the defined flag"
|
||||
);
|
||||
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let [definition] = use_def.public_definitions(
|
||||
global_table
|
||||
.symbol_id_by_name("foo")
|
||||
.expect("symbol to exist"),
|
||||
) else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::ImportFrom(_)
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn assign() {
|
||||
let TestCase { db, file } = test_case("x = foo");
|
||||
let root_table = symbol_table(&db, root_scope(&db, file));
|
||||
let scope = global_scope(&db, file);
|
||||
let global_table = symbol_table(&db, scope);
|
||||
|
||||
assert_eq!(names(&root_table), vec!["foo", "x"]);
|
||||
assert_eq!(
|
||||
root_table.symbol_by_name("x").unwrap().definitions().len(),
|
||||
1
|
||||
);
|
||||
assert_eq!(names(&global_table), vec!["foo", "x"]);
|
||||
assert!(
|
||||
root_table
|
||||
global_table
|
||||
.symbol_by_name("foo")
|
||||
.is_some_and(|symbol| { !symbol.is_defined() && symbol.is_used() }),
|
||||
"a symbol used but not defined in a scope should have only the used flag"
|
||||
);
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let [definition] =
|
||||
use_def.public_definitions(global_table.symbol_id_by_name("x").expect("symbol exists"))
|
||||
else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::Assignment(_)
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -405,26 +454,34 @@ class C:
|
||||
y = 2
|
||||
",
|
||||
);
|
||||
let root_table = symbol_table(&db, root_scope(&db, file));
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert_eq!(names(&root_table), vec!["C", "y"]);
|
||||
assert_eq!(names(&global_table), vec!["C", "y"]);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
|
||||
let scopes: Vec<_> = index.child_scopes(FileScopeId::root()).collect();
|
||||
assert_eq!(scopes.len(), 1);
|
||||
|
||||
let (class_scope_id, class_scope) = scopes[0];
|
||||
let [(class_scope_id, class_scope)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one child scope")
|
||||
};
|
||||
assert_eq!(class_scope.kind(), ScopeKind::Class);
|
||||
|
||||
assert_eq!(class_scope_id.to_scope_id(&db, file).name(&db), "C");
|
||||
|
||||
let class_table = index.symbol_table(class_scope_id);
|
||||
assert_eq!(names(&class_table), vec!["x"]);
|
||||
assert_eq!(
|
||||
class_table.symbol_by_name("x").unwrap().definitions().len(),
|
||||
1
|
||||
);
|
||||
|
||||
let use_def = index.use_def_map(class_scope_id);
|
||||
let [definition] =
|
||||
use_def.public_definitions(class_table.symbol_id_by_name("x").expect("symbol exists"))
|
||||
else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::Assignment(_)
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -437,27 +494,34 @@ y = 2
|
||||
",
|
||||
);
|
||||
let index = semantic_index(&db, file);
|
||||
let root_table = index.symbol_table(FileScopeId::root());
|
||||
let global_table = index.symbol_table(FileScopeId::global());
|
||||
|
||||
assert_eq!(names(&root_table), vec!["func", "y"]);
|
||||
assert_eq!(names(&global_table), vec!["func", "y"]);
|
||||
|
||||
let scopes = index.child_scopes(FileScopeId::root()).collect::<Vec<_>>();
|
||||
assert_eq!(scopes.len(), 1);
|
||||
|
||||
let (function_scope_id, function_scope) = scopes[0];
|
||||
let [(function_scope_id, function_scope)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one child scope")
|
||||
};
|
||||
assert_eq!(function_scope.kind(), ScopeKind::Function);
|
||||
assert_eq!(function_scope_id.to_scope_id(&db, file).name(&db), "func");
|
||||
|
||||
let function_table = index.symbol_table(function_scope_id);
|
||||
assert_eq!(names(&function_table), vec!["x"]);
|
||||
assert_eq!(
|
||||
|
||||
let use_def = index.use_def_map(function_scope_id);
|
||||
let [definition] = use_def.public_definitions(
|
||||
function_table
|
||||
.symbol_by_name("x")
|
||||
.unwrap()
|
||||
.definitions()
|
||||
.len(),
|
||||
1
|
||||
);
|
||||
.symbol_id_by_name("x")
|
||||
.expect("symbol exists"),
|
||||
) else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::Assignment(_)
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -471,14 +535,15 @@ def func():
|
||||
",
|
||||
);
|
||||
let index = semantic_index(&db, file);
|
||||
let root_table = index.symbol_table(FileScopeId::root());
|
||||
let global_table = index.symbol_table(FileScopeId::global());
|
||||
|
||||
assert_eq!(names(&root_table), vec!["func"]);
|
||||
let scopes: Vec<_> = index.child_scopes(FileScopeId::root()).collect();
|
||||
assert_eq!(scopes.len(), 2);
|
||||
|
||||
let (func_scope1_id, func_scope_1) = scopes[0];
|
||||
let (func_scope2_id, func_scope_2) = scopes[1];
|
||||
assert_eq!(names(&global_table), vec!["func"]);
|
||||
let [(func_scope1_id, func_scope_1), (func_scope2_id, func_scope_2)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected two child scopes");
|
||||
};
|
||||
|
||||
assert_eq!(func_scope_1.kind(), ScopeKind::Function);
|
||||
|
||||
@@ -490,14 +555,16 @@ def func():
|
||||
let func2_table = index.symbol_table(func_scope2_id);
|
||||
assert_eq!(names(&func1_table), vec!["x"]);
|
||||
assert_eq!(names(&func2_table), vec!["y"]);
|
||||
assert_eq!(
|
||||
root_table
|
||||
.symbol_by_name("func")
|
||||
.unwrap()
|
||||
.definitions()
|
||||
.len(),
|
||||
2
|
||||
);
|
||||
|
||||
let use_def = index.use_def_map(FileScopeId::global());
|
||||
let [definition] = use_def.public_definitions(
|
||||
global_table
|
||||
.symbol_id_by_name("func")
|
||||
.expect("symbol exists"),
|
||||
) else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
assert!(matches!(definition.node(&db), DefinitionKind::Function(_)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -510,22 +577,27 @@ def func[T]():
|
||||
);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let root_table = index.symbol_table(FileScopeId::root());
|
||||
let global_table = index.symbol_table(FileScopeId::global());
|
||||
|
||||
assert_eq!(names(&root_table), vec!["func"]);
|
||||
assert_eq!(names(&global_table), vec!["func"]);
|
||||
|
||||
let scopes: Vec<_> = index.child_scopes(FileScopeId::root()).collect();
|
||||
assert_eq!(scopes.len(), 1);
|
||||
let (ann_scope_id, ann_scope) = scopes[0];
|
||||
let [(ann_scope_id, ann_scope)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one child scope");
|
||||
};
|
||||
|
||||
assert_eq!(ann_scope.kind(), ScopeKind::Annotation);
|
||||
assert_eq!(ann_scope_id.to_scope_id(&db, file).name(&db), "func");
|
||||
let ann_table = index.symbol_table(ann_scope_id);
|
||||
assert_eq!(names(&ann_table), vec!["T"]);
|
||||
|
||||
let scopes: Vec<_> = index.child_scopes(ann_scope_id).collect();
|
||||
assert_eq!(scopes.len(), 1);
|
||||
let (func_scope_id, func_scope) = scopes[0];
|
||||
let [(func_scope_id, func_scope)] =
|
||||
index.child_scopes(ann_scope_id).collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one child scope");
|
||||
};
|
||||
assert_eq!(func_scope.kind(), ScopeKind::Function);
|
||||
assert_eq!(func_scope_id.to_scope_id(&db, file).name(&db), "func");
|
||||
let func_table = index.symbol_table(func_scope_id);
|
||||
@@ -542,14 +614,17 @@ class C[T]:
|
||||
);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let root_table = index.symbol_table(FileScopeId::root());
|
||||
let global_table = index.symbol_table(FileScopeId::global());
|
||||
|
||||
assert_eq!(names(&root_table), vec!["C"]);
|
||||
assert_eq!(names(&global_table), vec!["C"]);
|
||||
|
||||
let scopes: Vec<_> = index.child_scopes(FileScopeId::root()).collect();
|
||||
let [(ann_scope_id, ann_scope)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one child scope");
|
||||
};
|
||||
|
||||
assert_eq!(scopes.len(), 1);
|
||||
let (ann_scope_id, ann_scope) = scopes[0];
|
||||
assert_eq!(ann_scope.kind(), ScopeKind::Annotation);
|
||||
assert_eq!(ann_scope_id.to_scope_id(&db, file).name(&db), "C");
|
||||
let ann_table = index.symbol_table(ann_scope_id);
|
||||
@@ -561,48 +636,49 @@ class C[T]:
|
||||
"type parameters are defined by the scope that introduces them"
|
||||
);
|
||||
|
||||
let scopes: Vec<_> = index.child_scopes(ann_scope_id).collect();
|
||||
assert_eq!(scopes.len(), 1);
|
||||
let (class_scope_id, class_scope) = scopes[0];
|
||||
let [(class_scope_id, class_scope)] =
|
||||
index.child_scopes(ann_scope_id).collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one child scope");
|
||||
};
|
||||
|
||||
assert_eq!(class_scope.kind(), ScopeKind::Class);
|
||||
assert_eq!(class_scope_id.to_scope_id(&db, file).name(&db), "C");
|
||||
assert_eq!(names(&index.symbol_table(class_scope_id)), vec!["x"]);
|
||||
}
|
||||
|
||||
// TODO: After porting the control flow graph.
|
||||
// #[test]
|
||||
// fn reachability_trivial() {
|
||||
// let parsed = parse("x = 1; x");
|
||||
// let ast = parsed.syntax();
|
||||
// let index = SemanticIndex::from_ast(ast);
|
||||
// let table = &index.symbol_table;
|
||||
// let x_sym = table
|
||||
// .root_symbol_id_by_name("x")
|
||||
// .expect("x symbol should exist");
|
||||
// let ast::Stmt::Expr(ast::StmtExpr { value: x_use, .. }) = &ast.body[1] else {
|
||||
// panic!("should be an expr")
|
||||
// };
|
||||
// let x_defs: Vec<_> = index
|
||||
// .reachable_definitions(x_sym, x_use)
|
||||
// .map(|constrained_definition| constrained_definition.definition)
|
||||
// .collect();
|
||||
// assert_eq!(x_defs.len(), 1);
|
||||
// let Definition::Assignment(node_key) = &x_defs[0] else {
|
||||
// panic!("def should be an assignment")
|
||||
// };
|
||||
// let Some(def_node) = node_key.resolve(ast.into()) else {
|
||||
// panic!("node key should resolve")
|
||||
// };
|
||||
// let ast::Expr::NumberLiteral(ast::ExprNumberLiteral {
|
||||
// value: ast::Number::Int(num),
|
||||
// ..
|
||||
// }) = &*def_node.value
|
||||
// else {
|
||||
// panic!("should be a number literal")
|
||||
// };
|
||||
// assert_eq!(*num, 1);
|
||||
// }
|
||||
#[test]
|
||||
fn reachability_trivial() {
|
||||
let TestCase { db, file } = test_case("x = 1; x");
|
||||
let parsed = parsed_module(&db, file);
|
||||
let scope = global_scope(&db, file);
|
||||
let ast = parsed.syntax();
|
||||
let ast::Stmt::Expr(ast::StmtExpr {
|
||||
value: x_use_expr, ..
|
||||
}) = &ast.body[1]
|
||||
else {
|
||||
panic!("should be an expr")
|
||||
};
|
||||
let ast::Expr::Name(x_use_expr_name) = x_use_expr.as_ref() else {
|
||||
panic!("expected a Name");
|
||||
};
|
||||
let x_use_id = x_use_expr_name.scoped_use_id(&db, scope);
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let [definition] = use_def.use_definitions(x_use_id) else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
let DefinitionKind::Assignment(assignment) = definition.node(&db) else {
|
||||
panic!("should be an assignment definition")
|
||||
};
|
||||
let ast::Expr::NumberLiteral(ast::ExprNumberLiteral {
|
||||
value: ast::Number::Int(num),
|
||||
..
|
||||
}) = &*assignment.assignment().value
|
||||
else {
|
||||
panic!("should be a number literal")
|
||||
};
|
||||
assert_eq!(*num, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn expression_scope() {
|
||||
@@ -616,7 +692,7 @@ class C[T]:
|
||||
let x = &x_stmt.targets[0];
|
||||
|
||||
assert_eq!(index.expression_scope(x).kind(), ScopeKind::Module);
|
||||
assert_eq!(index.expression_scope_id(x), FileScopeId::root());
|
||||
assert_eq!(index.expression_scope_id(x), FileScopeId::global());
|
||||
|
||||
let def = ast.body[1].as_function_def_stmt().unwrap();
|
||||
let y_stmt = def.body[0].as_assign_stmt().unwrap();
|
||||
@@ -653,16 +729,16 @@ def x():
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
|
||||
let descendents = index.descendent_scopes(FileScopeId::root());
|
||||
let descendents = index.descendent_scopes(FileScopeId::global());
|
||||
assert_eq!(
|
||||
scope_names(descendents, &db, file),
|
||||
vec!["Test", "foo", "bar", "baz", "x"]
|
||||
);
|
||||
|
||||
let children = index.child_scopes(FileScopeId::root());
|
||||
let children = index.child_scopes(FileScopeId::global());
|
||||
assert_eq!(scope_names(children, &db, file), vec!["Test", "x"]);
|
||||
|
||||
let test_class = index.child_scopes(FileScopeId::root()).next().unwrap().0;
|
||||
let test_class = index.child_scopes(FileScopeId::global()).next().unwrap().0;
|
||||
let test_child_scopes = index.child_scopes(test_class);
|
||||
assert_eq!(
|
||||
scope_names(test_child_scopes, &db, file),
|
||||
@@ -670,7 +746,7 @@ def x():
|
||||
);
|
||||
|
||||
let bar_scope = index
|
||||
.descendent_scopes(FileScopeId::root())
|
||||
.descendent_scopes(FileScopeId::global())
|
||||
.nth(2)
|
||||
.unwrap()
|
||||
.0;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_index::{newtype_index, Idx};
|
||||
use ruff_index::newtype_index;
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::ExpressionRef;
|
||||
|
||||
@@ -28,18 +28,54 @@ use crate::Db;
|
||||
pub(crate) struct AstIds {
|
||||
/// Maps expressions to their expression id. Uses `NodeKey` because it avoids cloning [`Parsed`].
|
||||
expressions_map: FxHashMap<ExpressionNodeKey, ScopedExpressionId>,
|
||||
/// Maps expressions which "use" a symbol (that is, [`ExprName`]) to a use id.
|
||||
uses_map: FxHashMap<ExpressionNodeKey, ScopedUseId>,
|
||||
}
|
||||
|
||||
impl AstIds {
|
||||
fn expression_id(&self, key: impl Into<ExpressionNodeKey>) -> ScopedExpressionId {
|
||||
self.expressions_map[&key.into()]
|
||||
}
|
||||
|
||||
fn use_id(&self, key: impl Into<ExpressionNodeKey>) -> ScopedUseId {
|
||||
self.uses_map[&key.into()]
|
||||
}
|
||||
}
|
||||
|
||||
fn ast_ids<'db>(db: &'db dyn Db, scope: ScopeId) -> &'db AstIds {
|
||||
semantic_index(db, scope.file(db)).ast_ids(scope.file_scope_id(db))
|
||||
}
|
||||
|
||||
pub trait HasScopedUseId {
|
||||
/// The type of the ID uniquely identifying the use.
|
||||
type Id: Copy;
|
||||
|
||||
/// Returns the ID that uniquely identifies the use in `scope`.
|
||||
fn scoped_use_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id;
|
||||
}
|
||||
|
||||
/// Uniquely identifies a use of a name in a [`crate::semantic_index::symbol::FileScopeId`].
|
||||
#[newtype_index]
|
||||
pub struct ScopedUseId;
|
||||
|
||||
impl HasScopedUseId for ast::ExprName {
|
||||
type Id = ScopedUseId;
|
||||
|
||||
fn scoped_use_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id {
|
||||
let expression_ref = ExpressionRef::from(self);
|
||||
expression_ref.scoped_use_id(db, scope)
|
||||
}
|
||||
}
|
||||
|
||||
impl HasScopedUseId for ast::ExpressionRef<'_> {
|
||||
type Id = ScopedUseId;
|
||||
|
||||
fn scoped_use_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id {
|
||||
let ast_ids = ast_ids(db, scope);
|
||||
ast_ids.use_id(*self)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait HasScopedAstId {
|
||||
/// The type of the ID uniquely identifying the node.
|
||||
type Id: Copy;
|
||||
@@ -110,38 +146,43 @@ impl HasScopedAstId for ast::ExpressionRef<'_> {
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(super) struct AstIdsBuilder {
|
||||
next_id: ScopedExpressionId,
|
||||
expressions_map: FxHashMap<ExpressionNodeKey, ScopedExpressionId>,
|
||||
uses_map: FxHashMap<ExpressionNodeKey, ScopedUseId>,
|
||||
}
|
||||
|
||||
impl AstIdsBuilder {
|
||||
pub(super) fn new() -> Self {
|
||||
Self {
|
||||
next_id: ScopedExpressionId::new(0),
|
||||
expressions_map: FxHashMap::default(),
|
||||
uses_map: FxHashMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds `expr` to the AST ids map and returns its id.
|
||||
///
|
||||
/// ## Safety
|
||||
/// The function is marked as unsafe because it calls [`AstNodeRef::new`] which requires
|
||||
/// that `expr` is a child of `parsed`.
|
||||
#[allow(unsafe_code)]
|
||||
/// Adds `expr` to the expression ids map and returns its id.
|
||||
pub(super) fn record_expression(&mut self, expr: &ast::Expr) -> ScopedExpressionId {
|
||||
let expression_id = self.next_id;
|
||||
self.next_id = expression_id + 1;
|
||||
let expression_id = self.expressions_map.len().into();
|
||||
|
||||
self.expressions_map.insert(expr.into(), expression_id);
|
||||
|
||||
expression_id
|
||||
}
|
||||
|
||||
/// Adds `expr` to the use ids map and returns its id.
|
||||
pub(super) fn record_use(&mut self, expr: &ast::Expr) -> ScopedUseId {
|
||||
let use_id = self.uses_map.len().into();
|
||||
|
||||
self.uses_map.insert(expr.into(), use_id);
|
||||
|
||||
use_id
|
||||
}
|
||||
|
||||
pub(super) fn finish(mut self) -> AstIds {
|
||||
self.expressions_map.shrink_to_fit();
|
||||
self.uses_map.shrink_to_fit();
|
||||
|
||||
AstIds {
|
||||
expressions_map: self.expressions_map,
|
||||
uses_map: self.uses_map,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,55 +9,62 @@ use ruff_python_ast as ast;
|
||||
use ruff_python_ast::name::Name;
|
||||
use ruff_python_ast::visitor::{walk_expr, walk_stmt, Visitor};
|
||||
|
||||
use crate::ast_node_ref::AstNodeRef;
|
||||
use crate::semantic_index::ast_ids::node_key::ExpressionNodeKey;
|
||||
use crate::semantic_index::ast_ids::AstIdsBuilder;
|
||||
use crate::semantic_index::definition::{Definition, DefinitionNodeKey, DefinitionNodeRef};
|
||||
use crate::semantic_index::definition::{
|
||||
AssignmentDefinitionNodeRef, Definition, DefinitionNodeKey, DefinitionNodeRef,
|
||||
ImportFromDefinitionNodeRef,
|
||||
};
|
||||
use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::symbol::{
|
||||
FileScopeId, NodeWithScopeKey, NodeWithScopeRef, Scope, ScopeId, ScopedSymbolId, SymbolFlags,
|
||||
SymbolTableBuilder,
|
||||
};
|
||||
use crate::semantic_index::use_def::{FlowSnapshot, UseDefMapBuilder};
|
||||
use crate::semantic_index::SemanticIndex;
|
||||
use crate::Db;
|
||||
|
||||
pub(super) struct SemanticIndexBuilder<'db, 'ast> {
|
||||
pub(super) struct SemanticIndexBuilder<'db> {
|
||||
// Builder state
|
||||
db: &'db dyn Db,
|
||||
file: File,
|
||||
module: &'db ParsedModule,
|
||||
scope_stack: Vec<FileScopeId>,
|
||||
/// the target we're currently inferring
|
||||
current_target: Option<CurrentTarget<'ast>>,
|
||||
/// the assignment we're currently visiting
|
||||
current_assignment: Option<CurrentAssignment<'db>>,
|
||||
|
||||
// Semantic Index fields
|
||||
scopes: IndexVec<FileScopeId, Scope>,
|
||||
scope_ids_by_scope: IndexVec<FileScopeId, ScopeId<'db>>,
|
||||
symbol_tables: IndexVec<FileScopeId, SymbolTableBuilder<'db>>,
|
||||
symbol_tables: IndexVec<FileScopeId, SymbolTableBuilder>,
|
||||
ast_ids: IndexVec<FileScopeId, AstIdsBuilder>,
|
||||
use_def_maps: IndexVec<FileScopeId, UseDefMapBuilder<'db>>,
|
||||
scopes_by_node: FxHashMap<NodeWithScopeKey, FileScopeId>,
|
||||
scopes_by_expression: FxHashMap<ExpressionNodeKey, FileScopeId>,
|
||||
definitions_by_node: FxHashMap<DefinitionNodeKey, Definition<'db>>,
|
||||
expressions_by_node: FxHashMap<ExpressionNodeKey, Expression<'db>>,
|
||||
}
|
||||
|
||||
impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast>
|
||||
where
|
||||
'db: 'ast,
|
||||
{
|
||||
impl<'db> SemanticIndexBuilder<'db> {
|
||||
pub(super) fn new(db: &'db dyn Db, file: File, parsed: &'db ParsedModule) -> Self {
|
||||
let mut builder = Self {
|
||||
db,
|
||||
file,
|
||||
module: parsed,
|
||||
scope_stack: Vec::new(),
|
||||
current_target: None,
|
||||
current_assignment: None,
|
||||
|
||||
scopes: IndexVec::new(),
|
||||
symbol_tables: IndexVec::new(),
|
||||
ast_ids: IndexVec::new(),
|
||||
scope_ids_by_scope: IndexVec::new(),
|
||||
use_def_maps: IndexVec::new(),
|
||||
|
||||
scopes_by_expression: FxHashMap::default(),
|
||||
scopes_by_node: FxHashMap::default(),
|
||||
definitions_by_node: FxHashMap::default(),
|
||||
expressions_by_node: FxHashMap::default(),
|
||||
};
|
||||
|
||||
builder.push_scope_with_parent(NodeWithScopeRef::Module, None);
|
||||
@@ -72,16 +79,12 @@ where
|
||||
.expect("Always to have a root scope")
|
||||
}
|
||||
|
||||
fn push_scope(&mut self, node: NodeWithScopeRef<'ast>) {
|
||||
fn push_scope(&mut self, node: NodeWithScopeRef) {
|
||||
let parent = self.current_scope();
|
||||
self.push_scope_with_parent(node, Some(parent));
|
||||
}
|
||||
|
||||
fn push_scope_with_parent(
|
||||
&mut self,
|
||||
node: NodeWithScopeRef<'ast>,
|
||||
parent: Option<FileScopeId>,
|
||||
) {
|
||||
fn push_scope_with_parent(&mut self, node: NodeWithScopeRef, parent: Option<FileScopeId>) {
|
||||
let children_start = self.scopes.next_index() + 1;
|
||||
|
||||
let scope = Scope {
|
||||
@@ -92,6 +95,7 @@ where
|
||||
|
||||
let file_scope_id = self.scopes.push(scope);
|
||||
self.symbol_tables.push(SymbolTableBuilder::new());
|
||||
self.use_def_maps.push(UseDefMapBuilder::new());
|
||||
let ast_id_scope = self.ast_ids.push(AstIdsBuilder::new());
|
||||
|
||||
#[allow(unsafe_code)]
|
||||
@@ -116,32 +120,54 @@ where
|
||||
id
|
||||
}
|
||||
|
||||
fn current_symbol_table(&mut self) -> &mut SymbolTableBuilder<'db> {
|
||||
fn current_symbol_table(&mut self) -> &mut SymbolTableBuilder {
|
||||
let scope_id = self.current_scope();
|
||||
&mut self.symbol_tables[scope_id]
|
||||
}
|
||||
|
||||
fn current_use_def_map(&mut self) -> &mut UseDefMapBuilder<'db> {
|
||||
let scope_id = self.current_scope();
|
||||
&mut self.use_def_maps[scope_id]
|
||||
}
|
||||
|
||||
fn current_ast_ids(&mut self) -> &mut AstIdsBuilder {
|
||||
let scope_id = self.current_scope();
|
||||
&mut self.ast_ids[scope_id]
|
||||
}
|
||||
|
||||
fn add_or_update_symbol(&mut self, name: Name, flags: SymbolFlags) -> ScopedSymbolId {
|
||||
let symbol_table = self.current_symbol_table();
|
||||
symbol_table.add_or_update_symbol(name, flags)
|
||||
fn flow_snapshot(&mut self) -> FlowSnapshot {
|
||||
self.current_use_def_map().snapshot()
|
||||
}
|
||||
|
||||
fn add_definition(
|
||||
fn flow_restore(&mut self, state: FlowSnapshot) {
|
||||
self.current_use_def_map().restore(state);
|
||||
}
|
||||
|
||||
fn flow_merge(&mut self, state: &FlowSnapshot) {
|
||||
self.current_use_def_map().merge(state);
|
||||
}
|
||||
|
||||
fn add_or_update_symbol(&mut self, name: Name, flags: SymbolFlags) -> ScopedSymbolId {
|
||||
let symbol_table = self.current_symbol_table();
|
||||
let (symbol_id, added) = symbol_table.add_or_update_symbol(name, flags);
|
||||
if added {
|
||||
let use_def_map = self.current_use_def_map();
|
||||
use_def_map.add_symbol(symbol_id);
|
||||
}
|
||||
symbol_id
|
||||
}
|
||||
|
||||
fn add_definition<'a>(
|
||||
&mut self,
|
||||
definition_node: impl Into<DefinitionNodeRef<'ast>>,
|
||||
symbol_id: ScopedSymbolId,
|
||||
symbol: ScopedSymbolId,
|
||||
definition_node: impl Into<DefinitionNodeRef<'a>>,
|
||||
) -> Definition<'db> {
|
||||
let definition_node = definition_node.into();
|
||||
let definition = Definition::new(
|
||||
self.db,
|
||||
self.file,
|
||||
self.current_scope(),
|
||||
symbol_id,
|
||||
symbol,
|
||||
#[allow(unsafe_code)]
|
||||
unsafe {
|
||||
definition_node.into_owned(self.module.clone())
|
||||
@@ -150,26 +176,31 @@ where
|
||||
|
||||
self.definitions_by_node
|
||||
.insert(definition_node.key(), definition);
|
||||
self.current_use_def_map()
|
||||
.record_definition(symbol, definition);
|
||||
|
||||
definition
|
||||
}
|
||||
|
||||
fn add_or_update_symbol_with_definition(
|
||||
&mut self,
|
||||
name: Name,
|
||||
definition: impl Into<DefinitionNodeRef<'ast>>,
|
||||
) -> (ScopedSymbolId, Definition<'db>) {
|
||||
let symbol_table = self.current_symbol_table();
|
||||
|
||||
let id = symbol_table.add_or_update_symbol(name, SymbolFlags::IS_DEFINED);
|
||||
let definition = self.add_definition(definition, id);
|
||||
self.current_symbol_table().add_definition(id, definition);
|
||||
(id, definition)
|
||||
/// Record an expression that needs to be a Salsa ingredient, because we need to infer its type
|
||||
/// standalone (type narrowing tests, RHS of an assignment.)
|
||||
fn add_standalone_expression(&mut self, expression_node: &ast::Expr) {
|
||||
let expression = Expression::new(
|
||||
self.db,
|
||||
self.file,
|
||||
self.current_scope(),
|
||||
#[allow(unsafe_code)]
|
||||
unsafe {
|
||||
AstNodeRef::new(self.module.clone(), expression_node)
|
||||
},
|
||||
);
|
||||
self.expressions_by_node
|
||||
.insert(expression_node.into(), expression);
|
||||
}
|
||||
|
||||
fn with_type_params(
|
||||
&mut self,
|
||||
with_params: &WithTypeParams<'ast>,
|
||||
with_params: &WithTypeParams,
|
||||
nested: impl FnOnce(&mut Self) -> FileScopeId,
|
||||
) -> FileScopeId {
|
||||
let type_params = with_params.type_parameters();
|
||||
@@ -213,7 +244,7 @@ where
|
||||
self.pop_scope();
|
||||
assert!(self.scope_stack.is_empty());
|
||||
|
||||
assert!(self.current_target.is_none());
|
||||
assert!(self.current_assignment.is_none());
|
||||
|
||||
let mut symbol_tables: IndexVec<_, _> = self
|
||||
.symbol_tables
|
||||
@@ -221,6 +252,12 @@ where
|
||||
.map(|builder| Arc::new(builder.finish()))
|
||||
.collect();
|
||||
|
||||
let mut use_def_maps: IndexVec<_, _> = self
|
||||
.use_def_maps
|
||||
.into_iter()
|
||||
.map(|builder| Arc::new(builder.finish()))
|
||||
.collect();
|
||||
|
||||
let mut ast_ids: IndexVec<_, _> = self
|
||||
.ast_ids
|
||||
.into_iter()
|
||||
@@ -228,8 +265,9 @@ where
|
||||
.collect();
|
||||
|
||||
self.scopes.shrink_to_fit();
|
||||
ast_ids.shrink_to_fit();
|
||||
symbol_tables.shrink_to_fit();
|
||||
use_def_maps.shrink_to_fit();
|
||||
ast_ids.shrink_to_fit();
|
||||
self.scopes_by_expression.shrink_to_fit();
|
||||
self.definitions_by_node.shrink_to_fit();
|
||||
|
||||
@@ -240,17 +278,19 @@ where
|
||||
symbol_tables,
|
||||
scopes: self.scopes,
|
||||
definitions_by_node: self.definitions_by_node,
|
||||
expressions_by_node: self.expressions_by_node,
|
||||
scope_ids_by_scope: self.scope_ids_by_scope,
|
||||
ast_ids,
|
||||
scopes_by_expression: self.scopes_by_expression,
|
||||
scopes_by_node: self.scopes_by_node,
|
||||
use_def_maps,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db, 'ast> Visitor<'ast> for SemanticIndexBuilder<'db, 'ast>
|
||||
impl<'db, 'ast> Visitor<'ast> for SemanticIndexBuilder<'db>
|
||||
where
|
||||
'db: 'ast,
|
||||
'ast: 'db,
|
||||
{
|
||||
fn visit_stmt(&mut self, stmt: &'ast ast::Stmt) {
|
||||
match stmt {
|
||||
@@ -259,10 +299,9 @@ where
|
||||
self.visit_decorator(decorator);
|
||||
}
|
||||
|
||||
self.add_or_update_symbol_with_definition(
|
||||
function_def.name.id.clone(),
|
||||
function_def,
|
||||
);
|
||||
let symbol = self
|
||||
.add_or_update_symbol(function_def.name.id.clone(), SymbolFlags::IS_DEFINED);
|
||||
self.add_definition(symbol, function_def);
|
||||
|
||||
self.with_type_params(
|
||||
&WithTypeParams::FunctionDef { node: function_def },
|
||||
@@ -283,7 +322,9 @@ where
|
||||
self.visit_decorator(decorator);
|
||||
}
|
||||
|
||||
self.add_or_update_symbol_with_definition(class.name.id.clone(), class);
|
||||
let symbol =
|
||||
self.add_or_update_symbol(class.name.id.clone(), SymbolFlags::IS_DEFINED);
|
||||
self.add_definition(symbol, class);
|
||||
|
||||
self.with_type_params(&WithTypeParams::ClassDef { node: class }, |builder| {
|
||||
if let Some(arguments) = &class.arguments {
|
||||
@@ -296,41 +337,84 @@ where
|
||||
builder.pop_scope()
|
||||
});
|
||||
}
|
||||
ast::Stmt::Import(ast::StmtImport { names, .. }) => {
|
||||
for alias in names {
|
||||
ast::Stmt::Import(node) => {
|
||||
for alias in &node.names {
|
||||
let symbol_name = if let Some(asname) = &alias.asname {
|
||||
asname.id.clone()
|
||||
} else {
|
||||
Name::new(alias.name.id.split('.').next().unwrap())
|
||||
};
|
||||
|
||||
self.add_or_update_symbol_with_definition(symbol_name, alias);
|
||||
let symbol = self.add_or_update_symbol(symbol_name, SymbolFlags::IS_DEFINED);
|
||||
self.add_definition(symbol, alias);
|
||||
}
|
||||
}
|
||||
ast::Stmt::ImportFrom(ast::StmtImportFrom {
|
||||
module: _,
|
||||
names,
|
||||
level: _,
|
||||
..
|
||||
}) => {
|
||||
for alias in names {
|
||||
ast::Stmt::ImportFrom(node) => {
|
||||
for (alias_index, alias) in node.names.iter().enumerate() {
|
||||
let symbol_name = if let Some(asname) = &alias.asname {
|
||||
&asname.id
|
||||
} else {
|
||||
&alias.name.id
|
||||
};
|
||||
|
||||
self.add_or_update_symbol_with_definition(symbol_name.clone(), alias);
|
||||
let symbol =
|
||||
self.add_or_update_symbol(symbol_name.clone(), SymbolFlags::IS_DEFINED);
|
||||
self.add_definition(symbol, ImportFromDefinitionNodeRef { node, alias_index });
|
||||
}
|
||||
}
|
||||
ast::Stmt::Assign(node) => {
|
||||
debug_assert!(self.current_target.is_none());
|
||||
debug_assert!(self.current_assignment.is_none());
|
||||
self.visit_expr(&node.value);
|
||||
self.add_standalone_expression(&node.value);
|
||||
self.current_assignment = Some(node.into());
|
||||
for target in &node.targets {
|
||||
self.current_target = Some(CurrentTarget::Expr(target));
|
||||
self.visit_expr(target);
|
||||
}
|
||||
self.current_target = None;
|
||||
self.current_assignment = None;
|
||||
}
|
||||
ast::Stmt::AnnAssign(node) => {
|
||||
debug_assert!(self.current_assignment.is_none());
|
||||
// TODO deferred annotation visiting
|
||||
self.visit_expr(&node.annotation);
|
||||
match &node.value {
|
||||
Some(value) => {
|
||||
self.visit_expr(value);
|
||||
self.current_assignment = Some(node.into());
|
||||
self.visit_expr(&node.target);
|
||||
self.current_assignment = None;
|
||||
}
|
||||
None => {
|
||||
// TODO annotation-only assignments
|
||||
self.visit_expr(&node.target);
|
||||
}
|
||||
}
|
||||
}
|
||||
ast::Stmt::If(node) => {
|
||||
self.visit_expr(&node.test);
|
||||
let pre_if = self.flow_snapshot();
|
||||
self.visit_body(&node.body);
|
||||
let mut post_clauses: Vec<FlowSnapshot> = vec![];
|
||||
for clause in &node.elif_else_clauses {
|
||||
// snapshot after every block except the last; the last one will just become
|
||||
// the state that we merge the other snapshots into
|
||||
post_clauses.push(self.flow_snapshot());
|
||||
// we can only take an elif/else branch if none of the previous ones were
|
||||
// taken, so the block entry state is always `pre_if`
|
||||
self.flow_restore(pre_if.clone());
|
||||
self.visit_elif_else_clause(clause);
|
||||
}
|
||||
for post_clause_state in post_clauses {
|
||||
self.flow_merge(&post_clause_state);
|
||||
}
|
||||
let has_else = node
|
||||
.elif_else_clauses
|
||||
.last()
|
||||
.is_some_and(|clause| clause.test.is_none());
|
||||
if !has_else {
|
||||
// if there's no else clause, then it's possible we took none of the branches,
|
||||
// and the pre_if state can reach here
|
||||
self.flow_merge(&pre_if);
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
walk_stmt(self, stmt);
|
||||
@@ -344,57 +428,64 @@ where
|
||||
self.current_ast_ids().record_expression(expr);
|
||||
|
||||
match expr {
|
||||
ast::Expr::Name(ast::ExprName { id, ctx, .. }) => {
|
||||
ast::Expr::Name(name_node) => {
|
||||
let ast::ExprName { id, ctx, .. } = name_node;
|
||||
let flags = match ctx {
|
||||
ast::ExprContext::Load => SymbolFlags::IS_USED,
|
||||
ast::ExprContext::Store => SymbolFlags::IS_DEFINED,
|
||||
ast::ExprContext::Del => SymbolFlags::IS_DEFINED,
|
||||
ast::ExprContext::Invalid => SymbolFlags::empty(),
|
||||
};
|
||||
match self.current_target {
|
||||
Some(target) if flags.contains(SymbolFlags::IS_DEFINED) => {
|
||||
self.add_or_update_symbol_with_definition(id.clone(), target);
|
||||
}
|
||||
_ => {
|
||||
self.add_or_update_symbol(id.clone(), flags);
|
||||
let symbol = self.add_or_update_symbol(id.clone(), flags);
|
||||
if flags.contains(SymbolFlags::IS_DEFINED) {
|
||||
match self.current_assignment {
|
||||
Some(CurrentAssignment::Assign(assignment)) => {
|
||||
self.add_definition(
|
||||
symbol,
|
||||
AssignmentDefinitionNodeRef {
|
||||
assignment,
|
||||
target: name_node,
|
||||
},
|
||||
);
|
||||
}
|
||||
Some(CurrentAssignment::AnnAssign(ann_assign)) => {
|
||||
self.add_definition(symbol, ann_assign);
|
||||
}
|
||||
Some(CurrentAssignment::Named(named)) => {
|
||||
self.add_definition(symbol, named);
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
}
|
||||
|
||||
if flags.contains(SymbolFlags::IS_USED) {
|
||||
let use_id = self.current_ast_ids().record_use(expr);
|
||||
self.current_use_def_map().record_use(symbol, use_id);
|
||||
}
|
||||
|
||||
walk_expr(self, expr);
|
||||
}
|
||||
ast::Expr::Named(node) => {
|
||||
debug_assert!(self.current_target.is_none());
|
||||
self.current_target = Some(CurrentTarget::ExprNamed(node));
|
||||
debug_assert!(self.current_assignment.is_none());
|
||||
self.current_assignment = Some(node.into());
|
||||
// TODO walrus in comprehensions is implicitly nonlocal
|
||||
self.visit_expr(&node.target);
|
||||
self.current_target = None;
|
||||
self.current_assignment = None;
|
||||
self.visit_expr(&node.value);
|
||||
}
|
||||
ast::Expr::If(ast::ExprIf {
|
||||
body, test, orelse, ..
|
||||
}) => {
|
||||
// TODO detect statically known truthy or falsy test (via type inference, not naive
|
||||
// AST inspection, so we can't simplify here, need to record test expression in CFG
|
||||
// for later checking)
|
||||
|
||||
// AST inspection, so we can't simplify here, need to record test expression for
|
||||
// later checking)
|
||||
self.visit_expr(test);
|
||||
|
||||
// let if_branch = self.flow_graph_builder.add_branch(self.current_flow_node());
|
||||
|
||||
// self.set_current_flow_node(if_branch);
|
||||
// self.insert_constraint(test);
|
||||
let pre_if = self.flow_snapshot();
|
||||
self.visit_expr(body);
|
||||
|
||||
// let post_body = self.current_flow_node();
|
||||
|
||||
// self.set_current_flow_node(if_branch);
|
||||
let post_body = self.flow_snapshot();
|
||||
self.flow_restore(pre_if);
|
||||
self.visit_expr(orelse);
|
||||
|
||||
// let post_else = self
|
||||
// .flow_graph_builder
|
||||
// .add_phi(self.current_flow_node(), post_body);
|
||||
|
||||
// self.set_current_flow_node(post_else);
|
||||
self.flow_merge(&post_body);
|
||||
}
|
||||
_ => {
|
||||
walk_expr(self, expr);
|
||||
@@ -418,16 +509,26 @@ impl<'node> WithTypeParams<'node> {
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
enum CurrentTarget<'a> {
|
||||
Expr(&'a ast::Expr),
|
||||
ExprNamed(&'a ast::ExprNamed),
|
||||
enum CurrentAssignment<'a> {
|
||||
Assign(&'a ast::StmtAssign),
|
||||
AnnAssign(&'a ast::StmtAnnAssign),
|
||||
Named(&'a ast::ExprNamed),
|
||||
}
|
||||
|
||||
impl<'a> From<CurrentTarget<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(val: CurrentTarget<'a>) -> Self {
|
||||
match val {
|
||||
CurrentTarget::Expr(expression) => DefinitionNodeRef::Target(expression),
|
||||
CurrentTarget::ExprNamed(named) => DefinitionNodeRef::NamedExpression(named),
|
||||
}
|
||||
impl<'a> From<&'a ast::StmtAssign> for CurrentAssignment<'a> {
|
||||
fn from(value: &'a ast::StmtAssign) -> Self {
|
||||
Self::Assign(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtAnnAssign> for CurrentAssignment<'a> {
|
||||
fn from(value: &'a ast::StmtAnnAssign) -> Self {
|
||||
Self::AnnAssign(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::ExprNamed> for CurrentAssignment<'a> {
|
||||
fn from(value: &'a ast::ExprNamed) -> Self {
|
||||
Self::Named(value)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,63 +4,111 @@ use ruff_python_ast as ast;
|
||||
|
||||
use crate::ast_node_ref::AstNodeRef;
|
||||
use crate::node_key::NodeKey;
|
||||
use crate::semantic_index::symbol::{FileScopeId, ScopedSymbolId};
|
||||
use crate::semantic_index::symbol::{FileScopeId, ScopeId, ScopedSymbolId};
|
||||
use crate::Db;
|
||||
|
||||
#[salsa::tracked]
|
||||
pub struct Definition<'db> {
|
||||
/// The file in which the definition is defined.
|
||||
/// The file in which the definition occurs.
|
||||
#[id]
|
||||
pub(super) file: File,
|
||||
pub(crate) file: File,
|
||||
|
||||
/// The scope in which the definition is defined.
|
||||
/// The scope in which the definition occurs.
|
||||
#[id]
|
||||
pub(crate) scope: FileScopeId,
|
||||
pub(crate) file_scope: FileScopeId,
|
||||
|
||||
/// The id of the corresponding symbol. Mainly used as ID.
|
||||
/// The symbol defined.
|
||||
#[id]
|
||||
symbol_id: ScopedSymbolId,
|
||||
pub(crate) symbol: ScopedSymbolId,
|
||||
|
||||
#[no_eq]
|
||||
#[return_ref]
|
||||
pub(crate) node: DefinitionKind,
|
||||
}
|
||||
|
||||
impl<'db> Definition<'db> {
|
||||
pub(crate) fn scope(self, db: &'db dyn Db) -> ScopeId<'db> {
|
||||
self.file_scope(db).to_scope_id(db, self.file(db))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) enum DefinitionNodeRef<'a> {
|
||||
Alias(&'a ast::Alias),
|
||||
Import(&'a ast::Alias),
|
||||
ImportFrom(ImportFromDefinitionNodeRef<'a>),
|
||||
Function(&'a ast::StmtFunctionDef),
|
||||
Class(&'a ast::StmtClassDef),
|
||||
NamedExpression(&'a ast::ExprNamed),
|
||||
Target(&'a ast::Expr),
|
||||
Assignment(AssignmentDefinitionNodeRef<'a>),
|
||||
AnnotatedAssignment(&'a ast::StmtAnnAssign),
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::Alias> for DefinitionNodeRef<'a> {
|
||||
fn from(node: &'a ast::Alias) -> Self {
|
||||
Self::Alias(node)
|
||||
}
|
||||
}
|
||||
impl<'a> From<&'a ast::StmtFunctionDef> for DefinitionNodeRef<'a> {
|
||||
fn from(node: &'a ast::StmtFunctionDef) -> Self {
|
||||
Self::Function(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtClassDef> for DefinitionNodeRef<'a> {
|
||||
fn from(node: &'a ast::StmtClassDef) -> Self {
|
||||
Self::Class(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::ExprNamed> for DefinitionNodeRef<'a> {
|
||||
fn from(node: &'a ast::ExprNamed) -> Self {
|
||||
Self::NamedExpression(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtAnnAssign> for DefinitionNodeRef<'a> {
|
||||
fn from(node: &'a ast::StmtAnnAssign) -> Self {
|
||||
Self::AnnotatedAssignment(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::Alias> for DefinitionNodeRef<'a> {
|
||||
fn from(node_ref: &'a ast::Alias) -> Self {
|
||||
Self::Import(node_ref)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<ImportFromDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(node_ref: ImportFromDefinitionNodeRef<'a>) -> Self {
|
||||
Self::ImportFrom(node_ref)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<AssignmentDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(node_ref: AssignmentDefinitionNodeRef<'a>) -> Self {
|
||||
Self::Assignment(node_ref)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct ImportFromDefinitionNodeRef<'a> {
|
||||
pub(crate) node: &'a ast::StmtImportFrom,
|
||||
pub(crate) alias_index: usize,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct AssignmentDefinitionNodeRef<'a> {
|
||||
pub(crate) assignment: &'a ast::StmtAssign,
|
||||
pub(crate) target: &'a ast::ExprName,
|
||||
}
|
||||
|
||||
impl DefinitionNodeRef<'_> {
|
||||
#[allow(unsafe_code)]
|
||||
pub(super) unsafe fn into_owned(self, parsed: ParsedModule) -> DefinitionKind {
|
||||
match self {
|
||||
DefinitionNodeRef::Alias(alias) => {
|
||||
DefinitionKind::Alias(AstNodeRef::new(parsed, alias))
|
||||
DefinitionNodeRef::Import(alias) => {
|
||||
DefinitionKind::Import(AstNodeRef::new(parsed, alias))
|
||||
}
|
||||
DefinitionNodeRef::ImportFrom(ImportFromDefinitionNodeRef { node, alias_index }) => {
|
||||
DefinitionKind::ImportFrom(ImportFromDefinitionKind {
|
||||
node: AstNodeRef::new(parsed, node),
|
||||
alias_index,
|
||||
})
|
||||
}
|
||||
DefinitionNodeRef::Function(function) => {
|
||||
DefinitionKind::Function(AstNodeRef::new(parsed, function))
|
||||
@@ -71,33 +119,111 @@ impl DefinitionNodeRef<'_> {
|
||||
DefinitionNodeRef::NamedExpression(named) => {
|
||||
DefinitionKind::NamedExpression(AstNodeRef::new(parsed, named))
|
||||
}
|
||||
DefinitionNodeRef::Target(target) => {
|
||||
DefinitionKind::Target(AstNodeRef::new(parsed, target))
|
||||
DefinitionNodeRef::Assignment(AssignmentDefinitionNodeRef { assignment, target }) => {
|
||||
DefinitionKind::Assignment(AssignmentDefinitionKind {
|
||||
assignment: AstNodeRef::new(parsed.clone(), assignment),
|
||||
target: AstNodeRef::new(parsed, target),
|
||||
})
|
||||
}
|
||||
DefinitionNodeRef::AnnotatedAssignment(assign) => {
|
||||
DefinitionKind::AnnotatedAssignment(AstNodeRef::new(parsed, assign))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl DefinitionNodeRef<'_> {
|
||||
pub(super) fn key(self) -> DefinitionNodeKey {
|
||||
match self {
|
||||
Self::Alias(node) => DefinitionNodeKey(NodeKey::from_node(node)),
|
||||
Self::Function(node) => DefinitionNodeKey(NodeKey::from_node(node)),
|
||||
Self::Class(node) => DefinitionNodeKey(NodeKey::from_node(node)),
|
||||
Self::NamedExpression(node) => DefinitionNodeKey(NodeKey::from_node(node)),
|
||||
Self::Target(node) => DefinitionNodeKey(NodeKey::from_node(node)),
|
||||
Self::Import(node) => node.into(),
|
||||
Self::ImportFrom(ImportFromDefinitionNodeRef { node, alias_index }) => {
|
||||
(&node.names[alias_index]).into()
|
||||
}
|
||||
Self::Function(node) => node.into(),
|
||||
Self::Class(node) => node.into(),
|
||||
Self::NamedExpression(node) => node.into(),
|
||||
Self::Assignment(AssignmentDefinitionNodeRef {
|
||||
assignment: _,
|
||||
target,
|
||||
}) => target.into(),
|
||||
Self::AnnotatedAssignment(node) => node.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum DefinitionKind {
|
||||
Alias(AstNodeRef<ast::Alias>),
|
||||
Import(AstNodeRef<ast::Alias>),
|
||||
ImportFrom(ImportFromDefinitionKind),
|
||||
Function(AstNodeRef<ast::StmtFunctionDef>),
|
||||
Class(AstNodeRef<ast::StmtClassDef>),
|
||||
NamedExpression(AstNodeRef<ast::ExprNamed>),
|
||||
Target(AstNodeRef<ast::Expr>),
|
||||
Assignment(AssignmentDefinitionKind),
|
||||
AnnotatedAssignment(AstNodeRef<ast::StmtAnnAssign>),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ImportFromDefinitionKind {
|
||||
node: AstNodeRef<ast::StmtImportFrom>,
|
||||
alias_index: usize,
|
||||
}
|
||||
|
||||
impl ImportFromDefinitionKind {
|
||||
pub(crate) fn import(&self) -> &ast::StmtImportFrom {
|
||||
self.node.node()
|
||||
}
|
||||
|
||||
pub(crate) fn alias(&self) -> &ast::Alias {
|
||||
&self.node.node().names[self.alias_index]
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[allow(dead_code)]
|
||||
pub struct AssignmentDefinitionKind {
|
||||
assignment: AstNodeRef<ast::StmtAssign>,
|
||||
target: AstNodeRef<ast::ExprName>,
|
||||
}
|
||||
|
||||
impl AssignmentDefinitionKind {
|
||||
pub(crate) fn assignment(&self) -> &ast::StmtAssign {
|
||||
self.assignment.node()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
|
||||
pub(super) struct DefinitionNodeKey(NodeKey);
|
||||
pub(crate) struct DefinitionNodeKey(NodeKey);
|
||||
|
||||
impl From<&ast::Alias> for DefinitionNodeKey {
|
||||
fn from(node: &ast::Alias) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::StmtFunctionDef> for DefinitionNodeKey {
|
||||
fn from(node: &ast::StmtFunctionDef) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::StmtClassDef> for DefinitionNodeKey {
|
||||
fn from(node: &ast::StmtClassDef) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::ExprName> for DefinitionNodeKey {
|
||||
fn from(node: &ast::ExprName) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::ExprNamed> for DefinitionNodeKey {
|
||||
fn from(node: &ast::ExprNamed) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::StmtAnnAssign> for DefinitionNodeKey {
|
||||
fn from(node: &ast::StmtAnnAssign) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,31 @@
|
||||
use crate::ast_node_ref::AstNodeRef;
|
||||
use crate::db::Db;
|
||||
use crate::semantic_index::symbol::{FileScopeId, ScopeId};
|
||||
use ruff_db::files::File;
|
||||
use ruff_python_ast as ast;
|
||||
use salsa;
|
||||
|
||||
/// An independently type-inferable expression.
|
||||
///
|
||||
/// Includes constraint expressions (e.g. if tests) and the RHS of an unpacking assignment.
|
||||
#[salsa::tracked]
|
||||
pub(crate) struct Expression<'db> {
|
||||
/// The file in which the expression occurs.
|
||||
#[id]
|
||||
pub(crate) file: File,
|
||||
|
||||
/// The scope in which the expression occurs.
|
||||
#[id]
|
||||
pub(crate) file_scope: FileScopeId,
|
||||
|
||||
/// The expression node.
|
||||
#[no_eq]
|
||||
#[return_ref]
|
||||
pub(crate) node: AstNodeRef<ast::Expr>,
|
||||
}
|
||||
|
||||
impl<'db> Expression<'db> {
|
||||
pub(crate) fn scope(self, db: &'db dyn Db) -> ScopeId<'db> {
|
||||
self.file_scope(db).to_scope_id(db, self.file(db))
|
||||
}
|
||||
}
|
||||
@@ -12,33 +12,23 @@ use rustc_hash::FxHasher;
|
||||
|
||||
use crate::ast_node_ref::AstNodeRef;
|
||||
use crate::node_key::NodeKey;
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::{root_scope, semantic_index, symbol_table, SymbolMap};
|
||||
use crate::semantic_index::{semantic_index, SymbolMap};
|
||||
use crate::Db;
|
||||
|
||||
#[derive(Eq, PartialEq, Debug)]
|
||||
pub struct Symbol<'db> {
|
||||
pub struct Symbol {
|
||||
name: Name,
|
||||
flags: SymbolFlags,
|
||||
/// The nodes that define this symbol, in source order.
|
||||
///
|
||||
/// TODO: Use smallvec here, but it creates the same lifetime issues as in [QualifiedName](https://github.com/astral-sh/ruff/blob/5109b50bb3847738eeb209352cf26bda392adf62/crates/ruff_python_ast/src/name.rs#L562-L569)
|
||||
definitions: Vec<Definition<'db>>,
|
||||
}
|
||||
|
||||
impl<'db> Symbol<'db> {
|
||||
impl Symbol {
|
||||
fn new(name: Name) -> Self {
|
||||
Self {
|
||||
name,
|
||||
flags: SymbolFlags::empty(),
|
||||
definitions: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn push_definition(&mut self, definition: Definition<'db>) {
|
||||
self.definitions.push(definition);
|
||||
}
|
||||
|
||||
fn insert_flags(&mut self, flags: SymbolFlags) {
|
||||
self.flags.insert(flags);
|
||||
}
|
||||
@@ -57,10 +47,6 @@ impl<'db> Symbol<'db> {
|
||||
pub fn is_defined(&self) -> bool {
|
||||
self.flags.contains(SymbolFlags::IS_DEFINED)
|
||||
}
|
||||
|
||||
pub fn definitions(&self) -> &[Definition] {
|
||||
&self.definitions
|
||||
}
|
||||
}
|
||||
|
||||
bitflags! {
|
||||
@@ -75,15 +61,6 @@ bitflags! {
|
||||
}
|
||||
}
|
||||
|
||||
/// ID that uniquely identifies a public symbol defined in a module's root scope.
|
||||
#[salsa::tracked]
|
||||
pub struct PublicSymbolId<'db> {
|
||||
#[id]
|
||||
pub(crate) file: File,
|
||||
#[id]
|
||||
pub(crate) scoped_symbol_id: ScopedSymbolId,
|
||||
}
|
||||
|
||||
/// ID that uniquely identifies a symbol in a file.
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub struct FileSymbolId {
|
||||
@@ -111,47 +88,6 @@ impl From<FileSymbolId> for ScopedSymbolId {
|
||||
#[newtype_index]
|
||||
pub struct ScopedSymbolId;
|
||||
|
||||
impl ScopedSymbolId {
|
||||
/// Converts the symbol to a public symbol.
|
||||
///
|
||||
/// # Panics
|
||||
/// May panic if the symbol does not belong to `file` or is not a symbol of `file`'s root scope.
|
||||
pub(crate) fn to_public_symbol(self, db: &dyn Db, file: File) -> PublicSymbolId {
|
||||
let symbols = public_symbols_map(db, file);
|
||||
symbols.public(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub(crate) fn public_symbols_map(db: &dyn Db, file: File) -> PublicSymbolsMap<'_> {
|
||||
let _span = tracing::trace_span!("public_symbols_map", ?file).entered();
|
||||
|
||||
let module_scope = root_scope(db, file);
|
||||
let symbols = symbol_table(db, module_scope);
|
||||
|
||||
let public_symbols: IndexVec<_, _> = symbols
|
||||
.symbol_ids()
|
||||
.map(|id| PublicSymbolId::new(db, file, id))
|
||||
.collect();
|
||||
|
||||
PublicSymbolsMap {
|
||||
symbols: public_symbols,
|
||||
}
|
||||
}
|
||||
|
||||
/// Maps [`LocalSymbolId`] of a file's root scope to the corresponding [`PublicSymbolId`] (Salsa ingredients).
|
||||
#[derive(Eq, PartialEq, Debug)]
|
||||
pub(crate) struct PublicSymbolsMap<'db> {
|
||||
symbols: IndexVec<ScopedSymbolId, PublicSymbolId<'db>>,
|
||||
}
|
||||
|
||||
impl<'db> PublicSymbolsMap<'db> {
|
||||
/// Resolve the [`PublicSymbolId`] for the module-level `symbol_id`.
|
||||
fn public(&self, symbol_id: ScopedSymbolId) -> PublicSymbolId<'db> {
|
||||
self.symbols[symbol_id]
|
||||
}
|
||||
}
|
||||
|
||||
/// A cross-module identifier of a scope that can be used as a salsa query parameter.
|
||||
#[salsa::tracked]
|
||||
pub struct ScopeId<'db> {
|
||||
@@ -167,6 +103,17 @@ pub struct ScopeId<'db> {
|
||||
}
|
||||
|
||||
impl<'db> ScopeId<'db> {
|
||||
pub(crate) fn is_function_like(self, db: &'db dyn Db) -> bool {
|
||||
// Type parameter scopes behave like function scopes in terms of name resolution; CPython
|
||||
// symbol table also uses the term "function-like" for these scopes.
|
||||
matches!(
|
||||
self.node(db),
|
||||
NodeWithScopeKind::ClassTypeParameters(_)
|
||||
| NodeWithScopeKind::FunctionTypeParameters(_)
|
||||
| NodeWithScopeKind::Function(_)
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn name(self, db: &'db dyn Db) -> &'db str {
|
||||
match self.node(db) {
|
||||
@@ -185,8 +132,8 @@ impl<'db> ScopeId<'db> {
|
||||
pub struct FileScopeId;
|
||||
|
||||
impl FileScopeId {
|
||||
/// Returns the scope id of the Root scope.
|
||||
pub fn root() -> Self {
|
||||
/// Returns the scope id of the module-global scope.
|
||||
pub fn global() -> Self {
|
||||
FileScopeId::from_u32(0)
|
||||
}
|
||||
|
||||
@@ -223,15 +170,15 @@ pub enum ScopeKind {
|
||||
|
||||
/// Symbol table for a specific [`Scope`].
|
||||
#[derive(Debug)]
|
||||
pub struct SymbolTable<'db> {
|
||||
pub struct SymbolTable {
|
||||
/// The symbols in this scope.
|
||||
symbols: IndexVec<ScopedSymbolId, Symbol<'db>>,
|
||||
symbols: IndexVec<ScopedSymbolId, Symbol>,
|
||||
|
||||
/// The symbols indexed by name.
|
||||
symbols_by_name: SymbolMap,
|
||||
}
|
||||
|
||||
impl<'db> SymbolTable<'db> {
|
||||
impl SymbolTable {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
symbols: IndexVec::new(),
|
||||
@@ -243,21 +190,21 @@ impl<'db> SymbolTable<'db> {
|
||||
self.symbols.shrink_to_fit();
|
||||
}
|
||||
|
||||
pub(crate) fn symbol(&self, symbol_id: impl Into<ScopedSymbolId>) -> &Symbol<'db> {
|
||||
pub(crate) fn symbol(&self, symbol_id: impl Into<ScopedSymbolId>) -> &Symbol {
|
||||
&self.symbols[symbol_id.into()]
|
||||
}
|
||||
|
||||
pub(crate) fn symbol_ids(&self) -> impl Iterator<Item = ScopedSymbolId> + 'db {
|
||||
#[allow(unused)]
|
||||
pub(crate) fn symbol_ids(&self) -> impl Iterator<Item = ScopedSymbolId> {
|
||||
self.symbols.indices()
|
||||
}
|
||||
|
||||
pub fn symbols(&self) -> impl Iterator<Item = &Symbol<'db>> {
|
||||
pub fn symbols(&self) -> impl Iterator<Item = &Symbol> {
|
||||
self.symbols.iter()
|
||||
}
|
||||
|
||||
/// Returns the symbol named `name`.
|
||||
#[allow(unused)]
|
||||
pub(crate) fn symbol_by_name(&self, name: &str) -> Option<&Symbol<'db>> {
|
||||
pub(crate) fn symbol_by_name(&self, name: &str) -> Option<&Symbol> {
|
||||
let id = self.symbol_id_by_name(name)?;
|
||||
Some(self.symbol(id))
|
||||
}
|
||||
@@ -281,21 +228,21 @@ impl<'db> SymbolTable<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for SymbolTable<'_> {
|
||||
impl PartialEq for SymbolTable {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
// We don't need to compare the symbols_by_name because the name is already captured in `Symbol`.
|
||||
self.symbols == other.symbols
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for SymbolTable<'_> {}
|
||||
impl Eq for SymbolTable {}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(super) struct SymbolTableBuilder<'db> {
|
||||
table: SymbolTable<'db>,
|
||||
pub(super) struct SymbolTableBuilder {
|
||||
table: SymbolTable,
|
||||
}
|
||||
|
||||
impl<'db> SymbolTableBuilder<'db> {
|
||||
impl SymbolTableBuilder {
|
||||
pub(super) fn new() -> Self {
|
||||
Self {
|
||||
table: SymbolTable::new(),
|
||||
@@ -306,7 +253,7 @@ impl<'db> SymbolTableBuilder<'db> {
|
||||
&mut self,
|
||||
name: Name,
|
||||
flags: SymbolFlags,
|
||||
) -> ScopedSymbolId {
|
||||
) -> (ScopedSymbolId, bool) {
|
||||
let hash = SymbolTable::hash_name(&name);
|
||||
let entry = self
|
||||
.table
|
||||
@@ -319,7 +266,7 @@ impl<'db> SymbolTableBuilder<'db> {
|
||||
let symbol = &mut self.table.symbols[*entry.key()];
|
||||
symbol.insert_flags(flags);
|
||||
|
||||
*entry.key()
|
||||
(*entry.key(), false)
|
||||
}
|
||||
RawEntryMut::Vacant(entry) => {
|
||||
let mut symbol = Symbol::new(name);
|
||||
@@ -329,16 +276,12 @@ impl<'db> SymbolTableBuilder<'db> {
|
||||
entry.insert_with_hasher(hash, id, (), |id| {
|
||||
SymbolTable::hash_name(self.table.symbols[*id].name().as_str())
|
||||
});
|
||||
id
|
||||
(id, true)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn add_definition(&mut self, symbol: ScopedSymbolId, definition: Definition<'db>) {
|
||||
self.table.symbols[symbol].push_definition(definition);
|
||||
}
|
||||
|
||||
pub(super) fn finish(mut self) -> SymbolTable<'db> {
|
||||
pub(super) fn finish(mut self) -> SymbolTable {
|
||||
self.table.shrink_to_fit();
|
||||
self.table
|
||||
}
|
||||
|
||||
353
crates/red_knot_python_semantic/src/semantic_index/use_def.rs
Normal file
353
crates/red_knot_python_semantic/src/semantic_index/use_def.rs
Normal file
@@ -0,0 +1,353 @@
|
||||
//! Build a map from each use of a symbol to the definitions visible from that use.
|
||||
//!
|
||||
//! Let's take this code sample:
|
||||
//!
|
||||
//! ```python
|
||||
//! x = 1
|
||||
//! x = 2
|
||||
//! y = x
|
||||
//! if flag:
|
||||
//! x = 3
|
||||
//! else:
|
||||
//! x = 4
|
||||
//! z = x
|
||||
//! ```
|
||||
//!
|
||||
//! In this snippet, we have four definitions of `x` (the statements assigning `1`, `2`, `3`,
|
||||
//! and `4` to it), and two uses of `x` (the `y = x` and `z = x` assignments). The first
|
||||
//! [`Definition`] of `x` is never visible to any use, because it's immediately replaced by the
|
||||
//! second definition, before any use happens. (A linter could thus flag the statement `x = 1`
|
||||
//! as likely superfluous.)
|
||||
//!
|
||||
//! The first use of `x` has one definition visible to it: the assignment `x = 2`.
|
||||
//!
|
||||
//! Things get a bit more complex when we have branches. We will definitely take either the `if` or
|
||||
//! the `else` branch. Thus, the second use of `x` has two definitions visible to it: `x = 3` and
|
||||
//! `x = 4`. The `x = 2` definition is no longer visible, because it must be replaced by either `x
|
||||
//! = 3` or `x = 4`, no matter which branch was taken. We don't know which branch was taken, so we
|
||||
//! must consider both definitions as visible, which means eventually we would (in type inference)
|
||||
//! look at these two definitions and infer a type of `Literal[3, 4]` -- the union of `Literal[3]`
|
||||
//! and `Literal[4]` -- for the second use of `x`.
|
||||
//!
|
||||
//! So that's one question our use-def map needs to answer: given a specific use of a symbol, which
|
||||
//! definition(s) is/are visible from that use. In
|
||||
//! [`AstIds`](crate::semantic_index::ast_ids::AstIds) we number all uses (that means a `Name` node
|
||||
//! with `Load` context) so we have a `ScopedUseId` to efficiently represent each use.
|
||||
//!
|
||||
//! The other case we need to handle is when a symbol is referenced from a different scope (the
|
||||
//! most obvious example of this is an import). We call this "public" use of a symbol. So the other
|
||||
//! question we need to be able to answer is, what are the publicly-visible definitions of each
|
||||
//! symbol?
|
||||
//!
|
||||
//! Technically, public use of a symbol could also occur from any point in control flow of the
|
||||
//! scope where the symbol is defined (via inline imports and import cycles, in the case of an
|
||||
//! import, or via a function call partway through the local scope that ends up using a symbol from
|
||||
//! the scope via a global or nonlocal reference.) But modeling this fully accurately requires
|
||||
//! whole-program analysis that isn't tractable for an efficient incremental compiler, since it
|
||||
//! means a given symbol could have a different type every place it's referenced throughout the
|
||||
//! program, depending on the shape of arbitrarily-sized call/import graphs. So we follow other
|
||||
//! Python type-checkers in making the simplifying assumption that usually the scope will finish
|
||||
//! execution before its symbols are made visible to other scopes; for instance, most imports will
|
||||
//! import from a complete module, not a partially-executed module. (We may want to get a little
|
||||
//! smarter than this in the future, in particular for closures, but for now this is where we
|
||||
//! start.)
|
||||
//!
|
||||
//! So this means that the publicly-visible definitions of a symbol are the definitions still
|
||||
//! visible at the end of the scope.
|
||||
//!
|
||||
//! The data structure we build to answer these two questions is the `UseDefMap`. It has a
|
||||
//! `definitions_by_use` vector indexed by [`ScopedUseId`] and a `public_definitions` vector
|
||||
//! indexed by [`ScopedSymbolId`]. The values in each of these vectors are (in principle) a list of
|
||||
//! visible definitions at that use, or at the end of the scope for that symbol.
|
||||
//!
|
||||
//! In order to avoid vectors-of-vectors and all the allocations that would entail, we don't
|
||||
//! actually store these "list of visible definitions" as a vector of [`Definition`] IDs. Instead,
|
||||
//! the values in `definitions_by_use` and `public_definitions` are a [`Definitions`] struct that
|
||||
//! keeps a [`Range`] into a third vector of [`Definition`] IDs, `all_definitions`. The trick with
|
||||
//! this representation is that it requires that the definitions visible at any given use of a
|
||||
//! symbol are stored sequentially in `all_definitions`.
|
||||
//!
|
||||
//! There is another special kind of possible "definition" for a symbol: it might be unbound in the
|
||||
//! scope. (This isn't equivalent to "zero visible definitions", since we may go through an `if`
|
||||
//! that has a definition for the symbol, leaving us with one visible definition, but still also
|
||||
//! the "unbound" possibility, since we might not have taken the `if` branch.)
|
||||
//!
|
||||
//! The simplest way to model "unbound" would be as an actual [`Definition`] itself: the initial
|
||||
//! visible [`Definition`] for each symbol in a scope. But actually modeling it this way would
|
||||
//! dramatically increase the number of [`Definition`] that Salsa must track. Since "unbound" is a
|
||||
//! special definition in that all symbols share it, and it doesn't have any additional per-symbol
|
||||
//! state, we can represent it more efficiently: we use the `may_be_unbound` boolean on the
|
||||
//! [`Definitions`] struct. If this flag is `true`, it means the symbol/use really has one
|
||||
//! additional visible "definition", which is the unbound state. If this flag is `false`, it means
|
||||
//! we've eliminated the possibility of unbound: every path we've followed includes a definition
|
||||
//! for this symbol.
|
||||
//!
|
||||
//! To build a [`UseDefMap`], the [`UseDefMapBuilder`] is notified of each new use and definition
|
||||
//! as they are encountered by the
|
||||
//! [`SemanticIndexBuilder`](crate::semantic_index::builder::SemanticIndexBuilder) AST visit. For
|
||||
//! each symbol, the builder tracks the currently-visible definitions for that symbol. When we hit
|
||||
//! a use of a symbol, it records the currently-visible definitions for that symbol as the visible
|
||||
//! definitions for that use. When we reach the end of the scope, it records the currently-visible
|
||||
//! definitions for each symbol as the public definitions of that symbol.
|
||||
//!
|
||||
//! Let's walk through the above example. Initially we record for `x` that it has no visible
|
||||
//! definitions, and may be unbound. When we see `x = 1`, we record that as the sole visible
|
||||
//! definition of `x`, and flip `may_be_unbound` to `false`. Then we see `x = 2`, and it replaces
|
||||
//! `x = 1` as the sole visible definition of `x`. When we get to `y = x`, we record that the
|
||||
//! visible definitions for that use of `x` are just the `x = 2` definition.
|
||||
//!
|
||||
//! Then we hit the `if` branch. We visit the `test` node (`flag` in this case), since that will
|
||||
//! happen regardless. Then we take a pre-branch snapshot of the currently visible definitions for
|
||||
//! all symbols, which we'll need later. Then we go ahead and visit the `if` body. When we see `x =
|
||||
//! 3`, it replaces `x = 2` as the sole visible definition of `x`. At the end of the `if` body, we
|
||||
//! take another snapshot of the currently-visible definitions; we'll call this the post-if-body
|
||||
//! snapshot.
|
||||
//!
|
||||
//! Now we need to visit the `else` clause. The conditions when entering the `else` clause should
|
||||
//! be the pre-if conditions; if we are entering the `else` clause, we know that the `if` test
|
||||
//! failed and we didn't execute the `if` body. So we first reset the builder to the pre-if state,
|
||||
//! using the snapshot we took previously (meaning we now have `x = 2` as the sole visible
|
||||
//! definition for `x` again), then visit the `else` clause, where `x = 4` replaces `x = 2` as the
|
||||
//! sole visible definition of `x`.
|
||||
//!
|
||||
//! Now we reach the end of the if/else, and want to visit the following code. The state here needs
|
||||
//! to reflect that we might have gone through the `if` branch, or we might have gone through the
|
||||
//! `else` branch, and we don't know which. So we need to "merge" our current builder state
|
||||
//! (reflecting the end-of-else state, with `x = 4` as the only visible definition) with our
|
||||
//! post-if-body snapshot (which has `x = 3` as the only visible definition). The result of this
|
||||
//! merge is that we now have two visible definitions of `x`: `x = 3` and `x = 4`.
|
||||
//!
|
||||
//! The [`UseDefMapBuilder`] itself just exposes methods for taking a snapshot, resetting to a
|
||||
//! snapshot, and merging a snapshot into the current state. The logic using these methods lives in
|
||||
//! [`SemanticIndexBuilder`](crate::semantic_index::builder::SemanticIndexBuilder), e.g. where it
|
||||
//! visits a `StmtIf` node.
|
||||
//!
|
||||
//! (In the future we may have some other questions we want to answer as well, such as "is this
|
||||
//! definition used?", which will require tracking a bit more info in our map, e.g. a "used" bit
|
||||
//! for each [`Definition`] which is flipped to true when we record that definition for a use.)
|
||||
use crate::semantic_index::ast_ids::ScopedUseId;
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::symbol::ScopedSymbolId;
|
||||
use ruff_index::IndexVec;
|
||||
use std::ops::Range;
|
||||
|
||||
/// All definitions that can reach a given use of a name.
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub(crate) struct UseDefMap<'db> {
|
||||
// TODO store constraints with definitions for type narrowing
|
||||
/// Definition IDs array for `definitions_by_use` and `public_definitions` to slice into.
|
||||
all_definitions: Vec<Definition<'db>>,
|
||||
|
||||
/// Definitions that can reach a [`ScopedUseId`].
|
||||
definitions_by_use: IndexVec<ScopedUseId, Definitions>,
|
||||
|
||||
/// Definitions of each symbol visible at end of scope.
|
||||
public_definitions: IndexVec<ScopedSymbolId, Definitions>,
|
||||
}
|
||||
|
||||
impl<'db> UseDefMap<'db> {
|
||||
pub(crate) fn use_definitions(&self, use_id: ScopedUseId) -> &[Definition<'db>] {
|
||||
&self.all_definitions[self.definitions_by_use[use_id].definitions_range.clone()]
|
||||
}
|
||||
|
||||
pub(crate) fn use_may_be_unbound(&self, use_id: ScopedUseId) -> bool {
|
||||
self.definitions_by_use[use_id].may_be_unbound
|
||||
}
|
||||
|
||||
pub(crate) fn public_definitions(&self, symbol: ScopedSymbolId) -> &[Definition<'db>] {
|
||||
&self.all_definitions[self.public_definitions[symbol].definitions_range.clone()]
|
||||
}
|
||||
|
||||
pub(crate) fn public_may_be_unbound(&self, symbol: ScopedSymbolId) -> bool {
|
||||
self.public_definitions[symbol].may_be_unbound
|
||||
}
|
||||
}
|
||||
|
||||
/// Definitions visible for a symbol at a particular use (or end-of-scope).
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
struct Definitions {
|
||||
/// [`Range`] in `all_definitions` of the visible definition IDs.
|
||||
definitions_range: Range<usize>,
|
||||
/// Is the symbol possibly unbound at this point?
|
||||
may_be_unbound: bool,
|
||||
}
|
||||
|
||||
impl Definitions {
|
||||
/// The default state of a symbol is "no definitions, may be unbound", aka definitely-unbound.
|
||||
fn unbound() -> Self {
|
||||
Self {
|
||||
definitions_range: Range::default(),
|
||||
may_be_unbound: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Definitions {
|
||||
fn default() -> Self {
|
||||
Definitions::unbound()
|
||||
}
|
||||
}
|
||||
|
||||
/// A snapshot of the visible definitions for each symbol at a particular point in control flow.
|
||||
#[derive(Clone, Debug)]
|
||||
pub(super) struct FlowSnapshot {
|
||||
definitions_by_symbol: IndexVec<ScopedSymbolId, Definitions>,
|
||||
}
|
||||
|
||||
pub(super) struct UseDefMapBuilder<'db> {
|
||||
/// Definition IDs array for `definitions_by_use` and `definitions_by_symbol` to slice into.
|
||||
all_definitions: Vec<Definition<'db>>,
|
||||
|
||||
/// Visible definitions at each so-far-recorded use.
|
||||
definitions_by_use: IndexVec<ScopedUseId, Definitions>,
|
||||
|
||||
/// Currently visible definitions for each symbol.
|
||||
definitions_by_symbol: IndexVec<ScopedSymbolId, Definitions>,
|
||||
}
|
||||
|
||||
impl<'db> UseDefMapBuilder<'db> {
|
||||
pub(super) fn new() -> Self {
|
||||
Self {
|
||||
all_definitions: Vec::new(),
|
||||
definitions_by_use: IndexVec::new(),
|
||||
definitions_by_symbol: IndexVec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn add_symbol(&mut self, symbol: ScopedSymbolId) {
|
||||
let new_symbol = self.definitions_by_symbol.push(Definitions::unbound());
|
||||
debug_assert_eq!(symbol, new_symbol);
|
||||
}
|
||||
|
||||
pub(super) fn record_definition(
|
||||
&mut self,
|
||||
symbol: ScopedSymbolId,
|
||||
definition: Definition<'db>,
|
||||
) {
|
||||
// We have a new definition of a symbol; this replaces any previous definitions in this
|
||||
// path.
|
||||
let def_idx = self.all_definitions.len();
|
||||
self.all_definitions.push(definition);
|
||||
self.definitions_by_symbol[symbol] = Definitions {
|
||||
#[allow(clippy::range_plus_one)]
|
||||
definitions_range: def_idx..(def_idx + 1),
|
||||
may_be_unbound: false,
|
||||
};
|
||||
}
|
||||
|
||||
pub(super) fn record_use(&mut self, symbol: ScopedSymbolId, use_id: ScopedUseId) {
|
||||
// We have a use of a symbol; clone the currently visible definitions for that symbol, and
|
||||
// record them as the visible definitions for this use.
|
||||
let new_use = self
|
||||
.definitions_by_use
|
||||
.push(self.definitions_by_symbol[symbol].clone());
|
||||
debug_assert_eq!(use_id, new_use);
|
||||
}
|
||||
|
||||
/// Take a snapshot of the current visible-symbols state.
|
||||
pub(super) fn snapshot(&self) -> FlowSnapshot {
|
||||
FlowSnapshot {
|
||||
definitions_by_symbol: self.definitions_by_symbol.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Restore the current builder visible-definitions state to the given snapshot.
|
||||
pub(super) fn restore(&mut self, snapshot: FlowSnapshot) {
|
||||
// We never remove symbols from `definitions_by_symbol` (it's an IndexVec, and the symbol
|
||||
// IDs must line up), so the current number of known symbols must always be equal to or
|
||||
// greater than the number of known symbols in a previously-taken snapshot.
|
||||
let num_symbols = self.definitions_by_symbol.len();
|
||||
debug_assert!(num_symbols >= snapshot.definitions_by_symbol.len());
|
||||
|
||||
// Restore the current visible-definitions state to the given snapshot.
|
||||
self.definitions_by_symbol = snapshot.definitions_by_symbol;
|
||||
|
||||
// If the snapshot we are restoring is missing some symbols we've recorded since, we need
|
||||
// to fill them in so the symbol IDs continue to line up. Since they don't exist in the
|
||||
// snapshot, the correct state to fill them in with is "unbound", the default.
|
||||
self.definitions_by_symbol
|
||||
.resize(num_symbols, Definitions::unbound());
|
||||
}
|
||||
|
||||
/// Merge the given snapshot into the current state, reflecting that we might have taken either
|
||||
/// path to get here. The new visible-definitions state for each symbol should include
|
||||
/// definitions from both the prior state and the snapshot.
|
||||
pub(super) fn merge(&mut self, snapshot: &FlowSnapshot) {
|
||||
// The tricky thing about merging two Ranges pointing into `all_definitions` is that if the
|
||||
// two Ranges aren't already adjacent in `all_definitions`, we will have to copy at least
|
||||
// one or the other of the ranges to the end of `all_definitions` so as to make them
|
||||
// adjacent. We can't ever move things around in `all_definitions` because previously
|
||||
// recorded uses may still have ranges pointing to any part of it; all we can do is append.
|
||||
// It's possible we may end up with some old entries in `all_definitions` that nobody is
|
||||
// pointing to, but that's OK.
|
||||
|
||||
// We never remove symbols from `definitions_by_symbol` (it's an IndexVec, and the symbol
|
||||
// IDs must line up), so the current number of known symbols must always be equal to or
|
||||
// greater than the number of known symbols in a previously-taken snapshot.
|
||||
debug_assert!(self.definitions_by_symbol.len() >= snapshot.definitions_by_symbol.len());
|
||||
|
||||
for (symbol_id, current) in self.definitions_by_symbol.iter_mut_enumerated() {
|
||||
let Some(snapshot) = snapshot.definitions_by_symbol.get(symbol_id) else {
|
||||
// Symbol not present in snapshot, so it's unbound from that path.
|
||||
current.may_be_unbound = true;
|
||||
continue;
|
||||
};
|
||||
|
||||
// If the symbol can be unbound in either predecessor, it can be unbound post-merge.
|
||||
current.may_be_unbound |= snapshot.may_be_unbound;
|
||||
|
||||
// Merge the definition ranges.
|
||||
let current = &mut current.definitions_range;
|
||||
let snapshot = &snapshot.definitions_range;
|
||||
|
||||
// We never create reversed ranges.
|
||||
debug_assert!(current.end >= current.start);
|
||||
debug_assert!(snapshot.end >= snapshot.start);
|
||||
|
||||
if current == snapshot {
|
||||
// Ranges already identical, nothing to do.
|
||||
} else if snapshot.is_empty() {
|
||||
// Merging from an empty range; nothing to do.
|
||||
} else if (*current).is_empty() {
|
||||
// Merging to an empty range; just use the incoming range.
|
||||
*current = snapshot.clone();
|
||||
} else if snapshot.end >= current.start && snapshot.start <= current.end {
|
||||
// Ranges are adjacent or overlapping, merge them in-place.
|
||||
*current = current.start.min(snapshot.start)..current.end.max(snapshot.end);
|
||||
} else if current.end == self.all_definitions.len() {
|
||||
// Ranges are not adjacent or overlapping, `current` is at the end of
|
||||
// `all_definitions`, we need to copy `snapshot` to the end so they are adjacent
|
||||
// and can be merged into one range.
|
||||
self.all_definitions.extend_from_within(snapshot.clone());
|
||||
current.end = self.all_definitions.len();
|
||||
} else if snapshot.end == self.all_definitions.len() {
|
||||
// Ranges are not adjacent or overlapping, `snapshot` is at the end of
|
||||
// `all_definitions`, we need to copy `current` to the end so they are adjacent and
|
||||
// can be merged into one range.
|
||||
self.all_definitions.extend_from_within(current.clone());
|
||||
current.start = snapshot.start;
|
||||
current.end = self.all_definitions.len();
|
||||
} else {
|
||||
// Ranges are not adjacent and neither one is at the end of `all_definitions`, we
|
||||
// have to copy both to the end so they are adjacent and we can merge them.
|
||||
let start = self.all_definitions.len();
|
||||
self.all_definitions.extend_from_within(current.clone());
|
||||
self.all_definitions.extend_from_within(snapshot.clone());
|
||||
current.start = start;
|
||||
current.end = self.all_definitions.len();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn finish(mut self) -> UseDefMap<'db> {
|
||||
self.all_definitions.shrink_to_fit();
|
||||
self.definitions_by_symbol.shrink_to_fit();
|
||||
self.definitions_by_use.shrink_to_fit();
|
||||
|
||||
UseDefMap {
|
||||
all_definitions: self.all_definitions,
|
||||
definitions_by_use: self.definitions_by_use,
|
||||
public_definitions: self.definitions_by_symbol,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -4,9 +4,8 @@ use ruff_python_ast as ast;
|
||||
use ruff_python_ast::{Expr, ExpressionRef, StmtClassDef};
|
||||
|
||||
use crate::semantic_index::ast_ids::HasScopedAstId;
|
||||
use crate::semantic_index::symbol::PublicSymbolId;
|
||||
use crate::semantic_index::{public_symbol, semantic_index};
|
||||
use crate::types::{infer_types, public_symbol_ty, Type};
|
||||
use crate::semantic_index::semantic_index;
|
||||
use crate::types::{definition_ty, global_symbol_ty_by_name, infer_scope_types, Type};
|
||||
use crate::Db;
|
||||
|
||||
pub struct SemanticModel<'db> {
|
||||
@@ -29,12 +28,8 @@ impl<'db> SemanticModel<'db> {
|
||||
resolve_module(self.db.upcast(), module_name)
|
||||
}
|
||||
|
||||
pub fn public_symbol(&self, module: &Module, symbol_name: &str) -> Option<PublicSymbolId<'db>> {
|
||||
public_symbol(self.db, module.file(), symbol_name)
|
||||
}
|
||||
|
||||
pub fn public_symbol_ty(&self, symbol: PublicSymbolId<'db>) -> Type {
|
||||
public_symbol_ty(self.db, symbol)
|
||||
pub fn global_symbol_ty(&self, module: &Module, symbol_name: &str) -> Type<'db> {
|
||||
global_symbol_ty_by_name(self.db, module.file(), symbol_name)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -53,7 +48,7 @@ impl HasTy for ast::ExpressionRef<'_> {
|
||||
let scope = file_scope.to_scope_id(model.db, model.file);
|
||||
|
||||
let expression_id = self.scoped_ast_id(model.db, scope);
|
||||
infer_types(model.db, scope).expression_ty(expression_id)
|
||||
infer_scope_types(model.db, scope).expression_ty(expression_id)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -145,11 +140,7 @@ impl HasTy for ast::StmtFunctionDef {
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let index = semantic_index(model.db, model.file);
|
||||
let definition = index.definition(self);
|
||||
|
||||
let scope = definition.scope(model.db).to_scope_id(model.db, model.file);
|
||||
let types = infer_types(model.db, scope);
|
||||
|
||||
types.definition_ty(definition)
|
||||
definition_ty(model.db, definition)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -157,11 +148,7 @@ impl HasTy for StmtClassDef {
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let index = semantic_index(model.db, model.file);
|
||||
let definition = index.definition(self);
|
||||
|
||||
let scope = definition.scope(model.db).to_scope_id(model.db, model.file);
|
||||
let types = infer_types(model.db, scope);
|
||||
|
||||
types.definition_ty(definition)
|
||||
definition_ty(model.db, definition)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -169,21 +156,15 @@ impl HasTy for ast::Alias {
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let index = semantic_index(model.db, model.file);
|
||||
let definition = index.definition(self);
|
||||
|
||||
let scope = definition.scope(model.db).to_scope_id(model.db, model.file);
|
||||
let types = infer_types(model.db, scope);
|
||||
|
||||
types.definition_ty(definition)
|
||||
definition_ty(model.db, definition)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use red_knot_module_resolver::{
|
||||
set_module_resolution_settings, RawModuleResolutionSettings, TargetVersion,
|
||||
};
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::program::{Program, SearchPathSettings, TargetVersion};
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
@@ -191,15 +172,15 @@ mod tests {
|
||||
use crate::{HasTy, SemanticModel};
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
let mut db = TestDb::new();
|
||||
set_module_resolution_settings(
|
||||
&mut db,
|
||||
RawModuleResolutionSettings {
|
||||
let db = TestDb::new();
|
||||
Program::new(
|
||||
&db,
|
||||
TargetVersion::Py38,
|
||||
SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
workspace_root: SystemPathBuf::from("/src"),
|
||||
site_packages: None,
|
||||
custom_typeshed: None,
|
||||
target_version: TargetVersion::Py38,
|
||||
},
|
||||
);
|
||||
|
||||
|
||||
@@ -1,94 +1,100 @@
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_python_ast::name::Name;
|
||||
|
||||
use crate::semantic_index::symbol::{NodeWithScopeKind, PublicSymbolId, ScopeId};
|
||||
use crate::semantic_index::{public_symbol, root_scope, semantic_index, symbol_table};
|
||||
use crate::types::infer::{TypeInference, TypeInferenceBuilder};
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::symbol::{ScopeId, ScopedSymbolId};
|
||||
use crate::semantic_index::{global_scope, symbol_table, use_def_map};
|
||||
use crate::{Db, FxOrderSet};
|
||||
|
||||
mod display;
|
||||
mod infer;
|
||||
|
||||
/// Infers the type of a public symbol.
|
||||
///
|
||||
/// This is a Salsa query to get symbol-level invalidation instead of file-level dependency invalidation.
|
||||
/// Without this being a query, changing any public type of a module would invalidate the type inference
|
||||
/// for the module scope of its dependents and the transitive dependents because.
|
||||
///
|
||||
/// For example if we have
|
||||
/// ```python
|
||||
/// # a.py
|
||||
/// import x from b
|
||||
///
|
||||
/// # b.py
|
||||
///
|
||||
/// x = 20
|
||||
/// ```
|
||||
///
|
||||
/// And x is now changed from `x = 20` to `x = 30`. The following happens:
|
||||
///
|
||||
/// * The module level types of `b.py` change because `x` now is a `Literal[30]`.
|
||||
/// * The module level types of `a.py` change because the imported symbol `x` now has a `Literal[30]` type
|
||||
/// * The module level types of any dependents of `a.py` change because the imported symbol `x` now has a `Literal[30]` type
|
||||
/// * And so on for all transitive dependencies.
|
||||
///
|
||||
/// This being a query ensures that the invalidation short-circuits if the type of this symbol didn't change.
|
||||
#[salsa::tracked]
|
||||
pub(crate) fn public_symbol_ty<'db>(db: &'db dyn Db, symbol: PublicSymbolId<'db>) -> Type<'db> {
|
||||
let _span = tracing::trace_span!("public_symbol_ty", ?symbol).entered();
|
||||
pub(crate) use self::infer::{infer_definition_types, infer_expression_types, infer_scope_types};
|
||||
|
||||
let file = symbol.file(db);
|
||||
let scope = root_scope(db, file);
|
||||
|
||||
// TODO switch to inferring just the definition(s), not the whole scope
|
||||
let inference = infer_types(db, scope);
|
||||
inference.symbol_ty(symbol.scoped_symbol_id(db))
|
||||
}
|
||||
|
||||
/// Shorthand for `public_symbol_ty` that takes a symbol name instead of a [`PublicSymbolId`].
|
||||
pub(crate) fn public_symbol_ty_by_name<'db>(
|
||||
/// Infer the public type of a symbol (its type as seen from outside its scope).
|
||||
pub(crate) fn symbol_ty<'db>(
|
||||
db: &'db dyn Db,
|
||||
file: File,
|
||||
scope: ScopeId<'db>,
|
||||
symbol: ScopedSymbolId,
|
||||
) -> Type<'db> {
|
||||
let _span = tracing::trace_span!("symbol_ty", ?symbol).entered();
|
||||
|
||||
let use_def = use_def_map(db, scope);
|
||||
definitions_ty(
|
||||
db,
|
||||
use_def.public_definitions(symbol),
|
||||
use_def
|
||||
.public_may_be_unbound(symbol)
|
||||
.then_some(Type::Unbound),
|
||||
)
|
||||
}
|
||||
|
||||
/// Shorthand for `symbol_ty` that takes a symbol name instead of an ID.
|
||||
pub(crate) fn symbol_ty_by_name<'db>(
|
||||
db: &'db dyn Db,
|
||||
scope: ScopeId<'db>,
|
||||
name: &str,
|
||||
) -> Option<Type<'db>> {
|
||||
let symbol = public_symbol(db, file, name)?;
|
||||
Some(public_symbol_ty(db, symbol))
|
||||
) -> Type<'db> {
|
||||
let table = symbol_table(db, scope);
|
||||
table
|
||||
.symbol_id_by_name(name)
|
||||
.map(|symbol| symbol_ty(db, scope, symbol))
|
||||
.unwrap_or(Type::Unbound)
|
||||
}
|
||||
|
||||
/// Infers all types for `scope`.
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub(crate) fn infer_types<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> TypeInference<'db> {
|
||||
let _span = tracing::trace_span!("infer_types", ?scope).entered();
|
||||
/// Shorthand for `symbol_ty` that looks up a module-global symbol in a file.
|
||||
pub(crate) fn global_symbol_ty_by_name<'db>(db: &'db dyn Db, file: File, name: &str) -> Type<'db> {
|
||||
symbol_ty_by_name(db, global_scope(db, file), name)
|
||||
}
|
||||
|
||||
let file = scope.file(db);
|
||||
// Using the index here is fine because the code below depends on the AST anyway.
|
||||
// The isolation of the query is by the return inferred types.
|
||||
let index = semantic_index(db, file);
|
||||
/// Infer the type of a [`Definition`].
|
||||
pub(crate) fn definition_ty<'db>(db: &'db dyn Db, definition: Definition<'db>) -> Type<'db> {
|
||||
let inference = infer_definition_types(db, definition);
|
||||
inference.definition_ty(definition)
|
||||
}
|
||||
|
||||
let node = scope.node(db);
|
||||
/// Infer the combined type of an array of [`Definition`]s, plus one optional "unbound type".
|
||||
///
|
||||
/// Will return a union if there is more than one definition, or at least one plus an unbound
|
||||
/// type.
|
||||
///
|
||||
/// The "unbound type" represents the type in case control flow may not have passed through any
|
||||
/// definitions in this scope. If this isn't possible, then it will be `None`. If it is possible,
|
||||
/// and the result in that case should be Unbound (e.g. an unbound function local), then it will be
|
||||
/// `Some(Type::Unbound)`. If it is possible and the result should be something else (e.g. an
|
||||
/// implicit global lookup), then `unbound_type` will be `Some(the_global_symbol_type)`.
|
||||
///
|
||||
/// # Panics
|
||||
/// Will panic if called with zero definitions and no `unbound_ty`. This is a logic error,
|
||||
/// as any symbol with zero visible definitions clearly may be unbound, and the caller should
|
||||
/// provide an `unbound_ty`.
|
||||
pub(crate) fn definitions_ty<'db>(
|
||||
db: &'db dyn Db,
|
||||
definitions: &[Definition<'db>],
|
||||
unbound_ty: Option<Type<'db>>,
|
||||
) -> Type<'db> {
|
||||
let def_types = definitions.iter().map(|def| definition_ty(db, *def));
|
||||
let mut all_types = unbound_ty.into_iter().chain(def_types);
|
||||
|
||||
let mut context = TypeInferenceBuilder::new(db, scope, index);
|
||||
let Some(first) = all_types.next() else {
|
||||
panic!("definitions_ty should never be called with zero definitions and no unbound_ty.")
|
||||
};
|
||||
|
||||
match node {
|
||||
NodeWithScopeKind::Module => {
|
||||
let parsed = parsed_module(db.upcast(), file);
|
||||
context.infer_module(parsed.syntax());
|
||||
}
|
||||
NodeWithScopeKind::Function(function) => context.infer_function_body(function.node()),
|
||||
NodeWithScopeKind::Class(class) => context.infer_class_body(class.node()),
|
||||
NodeWithScopeKind::ClassTypeParameters(class) => {
|
||||
context.infer_class_type_params(class.node());
|
||||
}
|
||||
NodeWithScopeKind::FunctionTypeParameters(function) => {
|
||||
context.infer_function_type_params(function.node());
|
||||
if let Some(second) = all_types.next() {
|
||||
let mut builder = UnionTypeBuilder::new(db);
|
||||
builder = builder.add(first).add(second);
|
||||
|
||||
for variant in all_types {
|
||||
builder = builder.add(variant);
|
||||
}
|
||||
|
||||
Type::Union(builder.build())
|
||||
} else {
|
||||
first
|
||||
}
|
||||
|
||||
context.finish()
|
||||
}
|
||||
|
||||
/// unique ID for a type
|
||||
/// Unique ID for a type.
|
||||
#[derive(Copy, Clone, Debug, PartialOrd, Ord, PartialEq, Eq, Hash)]
|
||||
pub enum Type<'db> {
|
||||
/// the dynamic type: a statically-unknown set of values
|
||||
@@ -96,9 +102,10 @@ pub enum Type<'db> {
|
||||
/// the empty set of values
|
||||
Never,
|
||||
/// unknown type (no annotation)
|
||||
/// equivalent to Any, or to object in strict mode
|
||||
/// equivalent to Any, or possibly to object in strict mode
|
||||
Unknown,
|
||||
/// name is not bound to any value
|
||||
/// name does not exist or is not bound to any value (this represents an error, but with some
|
||||
/// leniency options it could be silently resolved to Unknown in some cases)
|
||||
Unbound,
|
||||
/// the None object (TODO remove this in favor of Instance(types.NoneType)
|
||||
None,
|
||||
@@ -125,15 +132,16 @@ impl<'db> Type<'db> {
|
||||
matches!(self, Type::Unknown)
|
||||
}
|
||||
|
||||
pub fn member(&self, db: &'db dyn Db, name: &Name) -> Option<Type<'db>> {
|
||||
#[must_use]
|
||||
pub fn member(&self, db: &'db dyn Db, name: &Name) -> Type<'db> {
|
||||
match self {
|
||||
Type::Any => Some(Type::Any),
|
||||
Type::Any => Type::Any,
|
||||
Type::Never => todo!("attribute lookup on Never type"),
|
||||
Type::Unknown => Some(Type::Unknown),
|
||||
Type::Unbound => todo!("attribute lookup on Unbound type"),
|
||||
Type::Unknown => Type::Unknown,
|
||||
Type::Unbound => Type::Unbound,
|
||||
Type::None => todo!("attribute lookup on None type"),
|
||||
Type::Function(_) => todo!("attribute lookup on Function type"),
|
||||
Type::Module(file) => public_symbol_ty_by_name(db, *file, name),
|
||||
Type::Module(file) => global_symbol_ty_by_name(db, *file, name),
|
||||
Type::Class(class) => class.class_member(db, name),
|
||||
Type::Instance(_) => {
|
||||
// TODO MRO? get_own_instance_member, get_instance_member
|
||||
@@ -152,7 +160,7 @@ impl<'db> Type<'db> {
|
||||
}
|
||||
Type::IntLiteral(_) => {
|
||||
// TODO raise error
|
||||
Some(Type::Unknown)
|
||||
Type::Unknown
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -188,32 +196,30 @@ impl<'db> ClassType<'db> {
|
||||
/// Returns the class member of this class named `name`.
|
||||
///
|
||||
/// The member resolves to a member of the class itself or any of its bases.
|
||||
pub fn class_member(self, db: &'db dyn Db, name: &Name) -> Option<Type<'db>> {
|
||||
if let Some(member) = self.own_class_member(db, name) {
|
||||
return Some(member);
|
||||
pub fn class_member(self, db: &'db dyn Db, name: &Name) -> Type<'db> {
|
||||
let member = self.own_class_member(db, name);
|
||||
if !member.is_unbound() {
|
||||
return member;
|
||||
}
|
||||
|
||||
self.inherited_class_member(db, name)
|
||||
}
|
||||
|
||||
/// Returns the inferred type of the class member named `name`.
|
||||
pub fn own_class_member(self, db: &'db dyn Db, name: &Name) -> Option<Type<'db>> {
|
||||
pub fn own_class_member(self, db: &'db dyn Db, name: &Name) -> Type<'db> {
|
||||
let scope = self.body_scope(db);
|
||||
let symbols = symbol_table(db, scope);
|
||||
let symbol = symbols.symbol_id_by_name(name)?;
|
||||
let types = infer_types(db, scope);
|
||||
|
||||
Some(types.symbol_ty(symbol))
|
||||
symbol_ty_by_name(db, scope, name)
|
||||
}
|
||||
|
||||
pub fn inherited_class_member(self, db: &'db dyn Db, name: &Name) -> Option<Type<'db>> {
|
||||
pub fn inherited_class_member(self, db: &'db dyn Db, name: &Name) -> Type<'db> {
|
||||
for base in self.bases(db) {
|
||||
if let Some(member) = base.member(db, name) {
|
||||
return Some(member);
|
||||
let member = base.member(db, name);
|
||||
if !member.is_unbound() {
|
||||
return member;
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
Type::Unbound
|
||||
}
|
||||
}
|
||||
|
||||
@@ -268,165 +274,3 @@ pub struct IntersectionType<'db> {
|
||||
// the intersection type does not include any value in any of these types
|
||||
negative: FxOrderSet<Type<'db>>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use red_knot_module_resolver::{
|
||||
set_module_resolution_settings, RawModuleResolutionSettings, TargetVersion,
|
||||
};
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
use ruff_db::testing::{assert_function_query_was_not_run, assert_function_query_was_run};
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::semantic_index::root_scope;
|
||||
use crate::types::{infer_types, public_symbol_ty_by_name};
|
||||
use crate::{HasTy, SemanticModel};
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
let mut db = TestDb::new();
|
||||
set_module_resolution_settings(
|
||||
&mut db,
|
||||
RawModuleResolutionSettings {
|
||||
target_version: TargetVersion::Py38,
|
||||
extra_paths: vec![],
|
||||
workspace_root: SystemPathBuf::from("/src"),
|
||||
site_packages: None,
|
||||
custom_typeshed: None,
|
||||
},
|
||||
);
|
||||
|
||||
db
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn local_inference() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_file("/src/a.py", "x = 10")?;
|
||||
let a = system_path_to_file(&db, "/src/a.py").unwrap();
|
||||
|
||||
let parsed = parsed_module(&db, a);
|
||||
|
||||
let statement = parsed.suite().first().unwrap().as_assign_stmt().unwrap();
|
||||
let model = SemanticModel::new(&db, a);
|
||||
|
||||
let literal_ty = statement.value.ty(&model);
|
||||
|
||||
assert_eq!(format!("{}", literal_ty.display(&db)), "Literal[10]");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dependency_public_symbol_type_change() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_files([
|
||||
("/src/a.py", "from foo import x"),
|
||||
("/src/foo.py", "x = 10\ndef foo(): ..."),
|
||||
])?;
|
||||
|
||||
let a = system_path_to_file(&db, "/src/a.py").unwrap();
|
||||
let x_ty = public_symbol_ty_by_name(&db, a, "x").unwrap();
|
||||
|
||||
assert_eq!(x_ty.display(&db).to_string(), "Literal[10]");
|
||||
|
||||
// Change `x` to a different value
|
||||
db.write_file("/src/foo.py", "x = 20\ndef foo(): ...")?;
|
||||
|
||||
let a = system_path_to_file(&db, "/src/a.py").unwrap();
|
||||
|
||||
db.clear_salsa_events();
|
||||
let x_ty_2 = public_symbol_ty_by_name(&db, a, "x").unwrap();
|
||||
|
||||
assert_eq!(x_ty_2.display(&db).to_string(), "Literal[20]");
|
||||
|
||||
let events = db.take_salsa_events();
|
||||
|
||||
let a_root_scope = root_scope(&db, a);
|
||||
assert_function_query_was_run::<infer_types, _, _>(
|
||||
&db,
|
||||
|ty| &ty.function,
|
||||
&a_root_scope,
|
||||
&events,
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dependency_non_public_symbol_change() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_files([
|
||||
("/src/a.py", "from foo import x"),
|
||||
("/src/foo.py", "x = 10\ndef foo(): y = 1"),
|
||||
])?;
|
||||
|
||||
let a = system_path_to_file(&db, "/src/a.py").unwrap();
|
||||
let x_ty = public_symbol_ty_by_name(&db, a, "x").unwrap();
|
||||
|
||||
assert_eq!(x_ty.display(&db).to_string(), "Literal[10]");
|
||||
|
||||
db.write_file("/src/foo.py", "x = 10\ndef foo(): pass")?;
|
||||
|
||||
let a = system_path_to_file(&db, "/src/a.py").unwrap();
|
||||
|
||||
db.clear_salsa_events();
|
||||
|
||||
let x_ty_2 = public_symbol_ty_by_name(&db, a, "x").unwrap();
|
||||
|
||||
assert_eq!(x_ty_2.display(&db).to_string(), "Literal[10]");
|
||||
|
||||
let events = db.take_salsa_events();
|
||||
|
||||
let a_root_scope = root_scope(&db, a);
|
||||
|
||||
assert_function_query_was_not_run::<infer_types, _, _>(
|
||||
&db,
|
||||
|ty| &ty.function,
|
||||
&a_root_scope,
|
||||
&events,
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dependency_unrelated_public_symbol() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_files([
|
||||
("/src/a.py", "from foo import x"),
|
||||
("/src/foo.py", "x = 10\ny = 20"),
|
||||
])?;
|
||||
|
||||
let a = system_path_to_file(&db, "/src/a.py").unwrap();
|
||||
let x_ty = public_symbol_ty_by_name(&db, a, "x").unwrap();
|
||||
|
||||
assert_eq!(x_ty.display(&db).to_string(), "Literal[10]");
|
||||
|
||||
db.write_file("/src/foo.py", "x = 10\ny = 30")?;
|
||||
|
||||
let a = system_path_to_file(&db, "/src/a.py").unwrap();
|
||||
|
||||
db.clear_salsa_events();
|
||||
|
||||
let x_ty_2 = public_symbol_ty_by_name(&db, a, "x").unwrap();
|
||||
|
||||
assert_eq!(x_ty_2.display(&db).to_string(), "Literal[10]");
|
||||
|
||||
let events = db.take_salsa_events();
|
||||
|
||||
let a_root_scope = root_scope(&db, a);
|
||||
assert_function_query_was_not_run::<infer_types, _, _>(
|
||||
&db,
|
||||
|ty| &ty.function,
|
||||
&a_root_scope,
|
||||
&events,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.5.2"
|
||||
version = "0.5.3"
|
||||
publish = true
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user