Compare commits

..

1 Commits

Author SHA1 Message Date
Jane Lewis
9d63accb54 Add a snapshot test for editor settings resolution 2024-06-14 17:13:23 -07:00
813 changed files with 3582 additions and 18395 deletions

4
.gitattributes vendored
View File

@@ -8,10 +8,6 @@ crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_3.py text eol=crlf
crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring_code_examples_crlf.py text eol=crlf
crates/ruff_python_formatter/tests/snapshots/format@docstring_code_examples_crlf.py.snap text eol=crlf
crates/ruff_python_parser/resources/invalid/re_lexing/line_continuation_windows_eol.py text eol=crlf
crates/ruff_python_parser/resources/invalid/re_lex_logical_token_windows_eol.py text eol=crlf
crates/ruff_python_parser/resources/invalid/re_lex_logical_token_mac_eol.py text eol=cr
crates/ruff_python_parser/resources/inline linguist-generated=true
ruff.schema.json linguist-generated=true text=auto eol=lf

View File

@@ -257,13 +257,10 @@ jobs:
- uses: Swatinem/rust-cache@v2
with:
workspaces: "fuzz -> target"
- name: "Install cargo-binstall"
uses: cargo-bins/cargo-binstall@main
- name: "Install cargo-fuzz"
uses: taiki-e/install-action@v2
with:
tool: cargo-fuzz@0.11.2
- name: "Install cargo-fuzz"
# Download the latest version from quick install and not the github releases because github releases only has MUSL targets.
run: cargo binstall cargo-fuzz --force --disable-strategies crate-meta-data --no-confirm
- run: cargo fuzz build -s none
fuzz-parser:
@@ -341,7 +338,7 @@ jobs:
name: ruff
path: target/debug
- uses: dawidd6/action-download-artifact@v6
- uses: dawidd6/action-download-artifact@v5
name: Download baseline Ruff binary
with:
name: ruff

View File

@@ -17,7 +17,7 @@ jobs:
comment:
runs-on: ubuntu-latest
steps:
- uses: dawidd6/action-download-artifact@v6
- uses: dawidd6/action-download-artifact@v5
name: Download pull request number
with:
name: pr-number
@@ -32,7 +32,7 @@ jobs:
echo "pr-number=$(<pr-number)" >> $GITHUB_OUTPUT
fi
- uses: dawidd6/action-download-artifact@v6
- uses: dawidd6/action-download-artifact@v5
name: "Download ecosystem results"
id: download-ecosystem-result
if: steps.pr-number.outputs.pr-number
@@ -48,14 +48,6 @@ jobs:
id: generate-comment
if: steps.download-ecosystem-result.outputs.found_artifact == 'true'
run: |
# Guard against malicious ecosystem results that symlink to a secret
# file on this runner
if [[ -L pr/ecosystem/ecosystem-result ]]
then
echo "Error: ecosystem-result cannot be a symlink"
exit 1
fi
# Note this identifier is used to find the comment to update on
# subsequent runs
echo '<!-- generated-comment ecosystem -->' >> comment.txt

View File

@@ -163,9 +163,6 @@ jobs:
with:
target: ${{ matrix.platform.target }}
args: --release --locked --out dist
env:
# aarch64 build fails, see https://github.com/PyO3/maturin/issues/2110
XWIN_VERSION: 16
- name: "Test wheel"
if: ${{ !startsWith(matrix.platform.target, 'aarch64') }}
shell: bash
@@ -572,7 +569,7 @@ jobs:
fi
- name: "Build and push Docker image"
uses: docker/build-push-action@v6
uses: docker/build-push-action@v5
with:
context: .
platforms: linux/amd64,linux/arm64

View File

@@ -37,13 +37,13 @@ jobs:
- name: Sync typeshed
id: sync
run: |
rm -rf ruff/crates/red_knot_module_resolver/vendor/typeshed
mkdir ruff/crates/red_knot_module_resolver/vendor/typeshed
cp typeshed/README.md ruff/crates/red_knot_module_resolver/vendor/typeshed
cp typeshed/LICENSE ruff/crates/red_knot_module_resolver/vendor/typeshed
cp -r typeshed/stdlib ruff/crates/red_knot_module_resolver/vendor/typeshed/stdlib
rm -rf ruff/crates/red_knot_module_resolver/vendor/typeshed/stdlib/@tests
git -C typeshed rev-parse HEAD > ruff/crates/red_knot_module_resolver/vendor/typeshed/source_commit.txt
rm -rf ruff/crates/red_knot/vendor/typeshed
mkdir ruff/crates/red_knot/vendor/typeshed
cp typeshed/README.md ruff/crates/red_knot/vendor/typeshed
cp typeshed/LICENSE ruff/crates/red_knot/vendor/typeshed
cp -r typeshed/stdlib ruff/crates/red_knot/vendor/typeshed/stdlib
rm -rf ruff/crates/red_knot/vendor/typeshed/stdlib/@tests
git -C typeshed rev-parse HEAD > ruff/crates/red_knot/vendor/typeshed/source_commit.txt
- name: Commit the changes
id: commit
if: ${{ steps.sync.outcome == 'success' }}

View File

@@ -2,7 +2,7 @@ fail_fast: true
exclude: |
(?x)^(
crates/red_knot_module_resolver/vendor/.*|
crates/red_knot/vendor/.*|
crates/ruff_linter/resources/.*|
crates/ruff_linter/src/rules/.*/snapshots/.*|
crates/ruff/resources/.*|
@@ -42,7 +42,7 @@ repos:
)$
- repo: https://github.com/crate-ci/typos
rev: v1.22.9
rev: v1.22.3
hooks:
- id: typos
@@ -56,7 +56,7 @@ repos:
pass_filenames: false # This makes it a lot faster
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.4.10
rev: v0.4.8
hooks:
- id: ruff-format
- id: ruff

View File

@@ -1,59 +1,5 @@
# Changelog
## 0.4.10
### Parser
- Implement re-lexing logic for better error recovery ([#11845](https://github.com/astral-sh/ruff/pull/11845))
### Rule changes
- \[`flake8-copyright`\] Update `CPY001` to check the first 4096 bytes instead of 1024 ([#11927](https://github.com/astral-sh/ruff/pull/11927))
- \[`pycodestyle`\] Update `E999` to show all syntax errors instead of just the first one ([#11900](https://github.com/astral-sh/ruff/pull/11900))
### Server
- Add tracing setup guide to Helix documentation ([#11883](https://github.com/astral-sh/ruff/pull/11883))
- Add tracing setup guide to Neovim documentation ([#11884](https://github.com/astral-sh/ruff/pull/11884))
- Defer notebook cell deletion to avoid an error message ([#11864](https://github.com/astral-sh/ruff/pull/11864))
### Security
- Guard against malicious ecosystem comment artifacts ([#11879](https://github.com/astral-sh/ruff/pull/11879))
## 0.4.9
### Preview features
- \[`pylint`\] Implement `consider-dict-items` (`C0206`) ([#11688](https://github.com/astral-sh/ruff/pull/11688))
- \[`refurb`\] Implement `repeated-global` (`FURB154`) ([#11187](https://github.com/astral-sh/ruff/pull/11187))
### Rule changes
- \[`pycodestyle`\] Adapt fix for `E203` to work identical to `ruff format` ([#10999](https://github.com/astral-sh/ruff/pull/10999))
### Formatter
- Fix formatter instability for lines only consisting of zero-width characters ([#11748](https://github.com/astral-sh/ruff/pull/11748))
### Server
- Add supported commands in server capabilities ([#11850](https://github.com/astral-sh/ruff/pull/11850))
- Use real file path when available in `ruff server` ([#11800](https://github.com/astral-sh/ruff/pull/11800))
- Improve error message when a command is run on an unavailable document ([#11823](https://github.com/astral-sh/ruff/pull/11823))
- Introduce the `ruff.printDebugInformation` command ([#11831](https://github.com/astral-sh/ruff/pull/11831))
- Tracing system now respects log level and trace level, with options to log to a file ([#11747](https://github.com/astral-sh/ruff/pull/11747))
### CLI
- Handle non-printable characters in diff view ([#11687](https://github.com/astral-sh/ruff/pull/11687))
### Bug fixes
- \[`refurb`\] Avoid suggesting starmap when arguments are used outside call (`FURB140`) ([#11830](https://github.com/astral-sh/ruff/pull/11830))
- \[`flake8-bugbear`\] Avoid panic in `B909` when checking large loop blocks ([#11772](https://github.com/astral-sh/ruff/pull/11772))
- \[`refurb`\] Fix misbehavior of `operator.itemgetter` when getter param is a tuple (`FURB118`) ([#11774](https://github.com/astral-sh/ruff/pull/11774))
## 0.4.8
### Performance

View File

@@ -351,9 +351,7 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
- The commit hash of the merged release pull request on `main`
1. The release workflow will do the following:
1. Build all the assets. If this fails (even though we tested in step 4), we haven't tagged or
uploaded anything, you can restart after pushing a fix. If you just need to rerun the build,
make sure you're [re-running all the failed
jobs](https://docs.github.com/en/actions/managing-workflow-runs/re-running-workflows-and-jobs#re-running-failed-jobs-in-a-workflow) and not just a single failed job.
uploaded anything, you can restart after pushing a fix.
1. Upload to PyPI.
1. Create and push the Git tag (as extracted from `pyproject.toml`). We create the Git tag only
after building the wheels and uploading to PyPI, since we can't delete or modify the tag ([#4468](https://github.com/astral-sh/ruff/issues/4468)).

218
Cargo.lock generated
View File

@@ -133,12 +133,6 @@ version = "1.0.86"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da"
[[package]]
name = "arc-swap"
version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457"
[[package]]
name = "argfile"
version = "0.2.0"
@@ -220,12 +214,6 @@ dependencies = [
"tempfile",
]
[[package]]
name = "camino"
version = "1.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e0ec6b951b160caa93cc0c7b209e5a3bff7aae9062213451ac99493cd844c239"
[[package]]
name = "cast"
version = "0.3.0"
@@ -305,9 +293,9 @@ dependencies = [
[[package]]
name = "clap"
version = "4.5.7"
version = "4.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5db83dced34638ad474f39f250d7fea9598bdd239eaced1bdf45d597da0f433f"
checksum = "a9689a29b593160de5bc4aacab7b5d54fb52231de70122626c178e6a368994c7"
dependencies = [
"clap_builder",
"clap_derive",
@@ -315,9 +303,9 @@ dependencies = [
[[package]]
name = "clap_builder"
version = "4.5.7"
version = "4.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f7e204572485eb3fbf28f871612191521df159bc3e15a9f5064c66dba3a8c05f"
checksum = "2e5387378c84f6faa26890ebf9f0a92989f8873d4d380467bcd0d8d8620424df"
dependencies = [
"anstream",
"anstyle",
@@ -373,7 +361,7 @@ version = "4.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c780290ccf4fb26629baa7a1081e68ced113f1d3ec302fa5948f1c381ebf06c6"
dependencies = [
"heck 0.5.0",
"heck",
"proc-macro2",
"quote",
"syn",
@@ -754,16 +742,6 @@ dependencies = [
"windows-sys 0.52.0",
]
[[package]]
name = "eyre"
version = "0.6.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7cd915d99f24784cdc19fd37ef22b97e3ff0ae756c7e492e9fbfe897d61e2aec"
dependencies = [
"indenter",
"once_cell",
]
[[package]]
name = "fastrand"
version = "2.0.2"
@@ -901,21 +879,6 @@ dependencies = [
"allocator-api2",
]
[[package]]
name = "hashlink"
version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7"
dependencies = [
"hashbrown 0.14.5",
]
[[package]]
name = "heck"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
[[package]]
name = "heck"
version = "0.5.0"
@@ -1018,12 +981,6 @@ dependencies = [
"rust-stemmers",
]
[[package]]
name = "indenter"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683"
[[package]]
name = "indexmap"
version = "2.2.6"
@@ -1260,9 +1217,9 @@ dependencies = [
[[package]]
name = "libmimalloc-sys"
version = "0.1.39"
version = "0.1.38"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "23aa6811d3bd4deb8a84dde645f943476d13b248d818edcf8ce0b2f37f036b44"
checksum = "0e7bb23d733dfcc8af652a78b7bf232f0e967710d044732185e561e47c0336b6"
dependencies = [
"cc",
"libc",
@@ -1353,15 +1310,15 @@ checksum = "540f1c43aed89909c0cc0cc604e3bb2f7e7a341a3728a9e6cfe760e733cd11ed"
[[package]]
name = "memchr"
version = "2.7.4"
version = "2.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d"
[[package]]
name = "mimalloc"
version = "0.1.43"
version = "0.1.42"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68914350ae34959d83f732418d51e2427a794055d0b9529f48259ac07af65633"
checksum = "e9186d86b79b52f4a77af65604b51225e8db1d6ee7e3f41aec1e40829c71a176"
dependencies = [
"libmimalloc-sys",
]
@@ -1722,9 +1679,9 @@ dependencies = [
[[package]]
name = "proc-macro2"
version = "1.0.86"
version = "1.0.85"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77"
checksum = "22244ce15aa966053a896d1accb3a6e68469b97c7f33f284b99f0d576879fc23"
dependencies = [
"unicode-ident",
]
@@ -1840,59 +1797,21 @@ dependencies = [
"notify",
"parking_lot",
"rayon",
"red_knot_module_resolver",
"ruff_index",
"ruff_notebook",
"ruff_python_ast",
"ruff_python_parser",
"ruff_text_size",
"rustc-hash 2.0.0",
"rustc-hash",
"smol_str",
"tempfile",
"tracing",
"tracing-subscriber",
"tracing-tree",
]
[[package]]
name = "red_knot_module_resolver"
version = "0.0.0"
dependencies = [
"anyhow",
"insta",
"path-slash",
"ruff_db",
"ruff_python_stdlib",
"rustc-hash 2.0.0",
"salsa",
"smol_str",
"tempfile",
"tracing",
"walkdir",
"zip",
]
[[package]]
name = "red_knot_python_semantic"
version = "0.0.0"
dependencies = [
"anyhow",
"bitflags 2.5.0",
"hashbrown 0.14.5",
"indexmap",
"red_knot_module_resolver",
"ruff_db",
"ruff_index",
"ruff_python_ast",
"ruff_python_parser",
"ruff_text_size",
"rustc-hash 2.0.0",
"salsa",
"smallvec",
"smol_str",
"tracing",
]
[[package]]
name = "redox_syscall"
version = "0.4.1"
@@ -1974,7 +1893,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.4.10"
version = "0.4.8"
dependencies = [
"anyhow",
"argfile",
@@ -2009,7 +1928,7 @@ dependencies = [
"ruff_source_file",
"ruff_text_size",
"ruff_workspace",
"rustc-hash 2.0.0",
"rustc-hash",
"serde",
"serde_json",
"shellexpand",
@@ -2057,26 +1976,6 @@ dependencies = [
"seahash",
]
[[package]]
name = "ruff_db"
version = "0.0.0"
dependencies = [
"camino",
"countme",
"dashmap",
"filetime",
"insta",
"once_cell",
"ruff_python_ast",
"ruff_python_parser",
"ruff_source_file",
"ruff_text_size",
"rustc-hash 2.0.0",
"salsa",
"tracing",
"zip",
]
[[package]]
name = "ruff_dev"
version = "0.0.0"
@@ -2103,6 +2002,7 @@ dependencies = [
"ruff_python_parser",
"ruff_python_stdlib",
"ruff_python_trivia",
"ruff_text_size",
"ruff_workspace",
"schemars",
"serde",
@@ -2135,7 +2035,7 @@ dependencies = [
"ruff_cache",
"ruff_macros",
"ruff_text_size",
"rustc-hash 2.0.0",
"rustc-hash",
"schemars",
"serde",
"static_assertions",
@@ -2153,7 +2053,7 @@ dependencies = [
[[package]]
name = "ruff_linter"
version = "0.4.10"
version = "0.4.8"
dependencies = [
"aho-corasick",
"annotate-snippets 0.9.2",
@@ -2195,7 +2095,7 @@ dependencies = [
"ruff_python_trivia",
"ruff_source_file",
"ruff_text_size",
"rustc-hash 2.0.0",
"rustc-hash",
"schemars",
"serde",
"serde_json",
@@ -2254,7 +2154,7 @@ dependencies = [
"ruff_python_trivia",
"ruff_source_file",
"ruff_text_size",
"rustc-hash 2.0.0",
"rustc-hash",
"serde",
]
@@ -2301,7 +2201,7 @@ dependencies = [
"ruff_python_trivia",
"ruff_source_file",
"ruff_text_size",
"rustc-hash 2.0.0",
"rustc-hash",
"schemars",
"serde",
"serde_json",
@@ -2347,7 +2247,7 @@ dependencies = [
"ruff_python_trivia",
"ruff_source_file",
"ruff_text_size",
"rustc-hash 2.0.0",
"rustc-hash",
"static_assertions",
"unicode-ident",
"unicode-normalization",
@@ -2377,7 +2277,7 @@ dependencies = [
"ruff_python_stdlib",
"ruff_source_file",
"ruff_text_size",
"rustc-hash 2.0.0",
"rustc-hash",
]
[[package]]
@@ -2433,7 +2333,7 @@ dependencies = [
"ruff_source_file",
"ruff_text_size",
"ruff_workspace",
"rustc-hash 2.0.0",
"rustc-hash",
"serde",
"serde_json",
"shellexpand",
@@ -2512,7 +2412,7 @@ dependencies = [
"ruff_python_ast",
"ruff_python_formatter",
"ruff_source_file",
"rustc-hash 2.0.0",
"rustc-hash",
"schemars",
"serde",
"shellexpand",
@@ -2537,12 +2437,6 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
[[package]]
name = "rustc-hash"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152"
[[package]]
name = "rustix"
version = "0.38.34"
@@ -2599,37 +2493,6 @@ version = "1.0.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1"
[[package]]
name = "salsa"
version = "0.18.0"
source = "git+https://github.com/salsa-rs/salsa.git?rev=f706aa2d32d473ee633a77c1af01d180c85da308#f706aa2d32d473ee633a77c1af01d180c85da308"
dependencies = [
"arc-swap",
"crossbeam",
"crossbeam-utils",
"dashmap",
"hashlink",
"indexmap",
"log",
"parking_lot",
"rustc-hash 1.1.0",
"salsa-macros",
"smallvec",
]
[[package]]
name = "salsa-macros"
version = "0.18.0"
source = "git+https://github.com/salsa-rs/salsa.git?rev=f706aa2d32d473ee633a77c1af01d180c85da308#f706aa2d32d473ee633a77c1af01d180c85da308"
dependencies = [
"eyre",
"heck 0.4.1",
"proc-macro2",
"quote",
"syn",
"synstructure",
]
[[package]]
name = "same-file"
version = "1.0.6"
@@ -2866,9 +2729,9 @@ checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
[[package]]
name = "strum"
version = "0.26.3"
version = "0.26.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06"
checksum = "5d8cec3501a5194c432b2b7976db6b7d10ec95c253208b45f83f7136aa985e29"
dependencies = [
"strum_macros",
]
@@ -2879,7 +2742,7 @@ version = "0.26.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be"
dependencies = [
"heck 0.5.0",
"heck",
"proc-macro2",
"quote",
"rustversion",
@@ -2894,26 +2757,15 @@ checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc"
[[package]]
name = "syn"
version = "2.0.68"
version = "2.0.66"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "901fa70d88b9d6c98022e23b4136f9f3e54e4662c3bc1bd1d84a42a9a0f0c1e9"
checksum = "c42f3f41a2de00b01c0aaad383c5a45241efc8b2d1eda5661812fda5f3cdcff5"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "synstructure"
version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "tempfile"
version = "3.10.1"
@@ -3305,9 +3157,9 @@ dependencies = [
[[package]]
name = "url"
version = "2.5.2"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c"
checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633"
dependencies = [
"form_urlencoded",
"idna",
@@ -3804,9 +3656,9 @@ dependencies = [
[[package]]
name = "zstd-sys"
version = "2.0.11+zstd.1.5.6"
version = "2.0.10+zstd.1.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75652c55c0b6f3e6f12eb786fe1bc960396bf05a1eb3bf1f3691c3610ac2e6d4"
checksum = "c253a4914af5bafc8fa8c86ee400827e83cf6ec01195ec1f1ed8441bf00d65aa"
dependencies = [
"cc",
"pkg-config",

View File

@@ -14,7 +14,6 @@ license = "MIT"
[workspace.dependencies]
ruff = { path = "crates/ruff" }
ruff_cache = { path = "crates/ruff_cache" }
ruff_db = { path = "crates/ruff_db" }
ruff_diagnostics = { path = "crates/ruff_diagnostics" }
ruff_formatter = { path = "crates/ruff_formatter" }
ruff_index = { path = "crates/ruff_index" }
@@ -35,8 +34,6 @@ ruff_source_file = { path = "crates/ruff_source_file" }
ruff_text_size = { path = "crates/ruff_text_size" }
ruff_workspace = { path = "crates/ruff_workspace" }
red_knot_module_resolver = { path = "crates/red_knot_module_resolver" }
aho-corasick = { version = "1.1.3" }
annotate-snippets = { version = "0.9.2", features = ["color"] }
anyhow = { version = "1.0.80" }
@@ -45,7 +42,6 @@ bincode = { version = "1.3.3" }
bitflags = { version = "2.5.0" }
bstr = { version = "1.9.1" }
cachedir = { version = "0.3.1" }
camino = { version = "1.1.7" }
chrono = { version = "0.4.35", default-features = false, features = ["clock"] }
clap = { version = "4.5.3", features = ["derive"] }
clap_complete_command = { version = "0.5.1" }
@@ -72,7 +68,7 @@ imperative = { version = "1.0.4" }
indexmap = { version = "2.2.6" }
indicatif = { version = "0.17.8" }
indoc = { version = "2.0.4" }
insta = { version = "1.35.1" }
insta = { version = "1.35.1", feature = ["filters", "glob"] }
insta-cmd = { version = "0.6.0" }
is-macro = { version = "0.3.5" }
is-wsl = { version = "0.4.0" }
@@ -83,9 +79,7 @@ libc = { version = "0.2.153" }
libcst = { version = "1.1.0", default-features = false }
log = { version = "0.4.17" }
lsp-server = { version = "0.7.6" }
lsp-types = { git = "https://github.com/astral-sh/lsp-types.git", rev = "3512a9f", features = [
"proposed",
] }
lsp-types = { git = "https://github.com/astral-sh/lsp-types.git", rev = "3512a9f", features = ["proposed"] }
matchit = { version = "0.8.1" }
memchr = { version = "2.7.1" }
mimalloc = { version = "0.1.39" }
@@ -105,21 +99,17 @@ quote = { version = "1.0.23" }
rand = { version = "0.8.5" }
rayon = { version = "1.10.0" }
regex = { version = "1.10.2" }
rustc-hash = { version = "2.0.0" }
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "f706aa2d32d473ee633a77c1af01d180c85da308" }
rustc-hash = { version = "1.1.0" }
schemars = { version = "0.8.16" }
seahash = { version = "4.1.0" }
serde = { version = "1.0.197", features = ["derive"] }
serde-wasm-bindgen = { version = "0.6.4" }
serde_json = { version = "1.0.113" }
serde_test = { version = "1.0.152" }
serde_with = { version = "3.6.0", default-features = false, features = [
"macros",
] }
serde_with = { version = "3.6.0", default-features = false, features = ["macros"] }
shellexpand = { version = "3.0.0" }
similar = { version = "2.4.0", features = ["inline"] }
smallvec = { version = "1.13.2" }
smol_str = { version = "0.2.2" }
static_assertions = "1.1.0"
strum = { version = "0.26.0", features = ["strum_macros"] }
strum_macros = { version = "0.26.0" }
@@ -141,12 +131,7 @@ unicode_names2 = { version = "1.2.2" }
unicode-normalization = { version = "0.1.23" }
ureq = { version = "2.9.6" }
url = { version = "2.5.0" }
uuid = { version = "1.6.1", features = [
"v4",
"fast-rng",
"macro-diagnostics",
"js",
] }
uuid = { version = "1.6.1", features = ["v4", "fast-rng", "macro-diagnostics", "js"] }
walkdir = { version = "2.3.2" }
wasm-bindgen = { version = "0.2.92" }
wasm-bindgen-test = { version = "0.3.42" }

View File

@@ -28,7 +28,7 @@ An extremely fast Python linter and code formatter, written in Rust.
- ⚡️ 10-100x faster than existing linters (like Flake8) and formatters (like Black)
- 🐍 Installable via `pip`
- 🛠️ `pyproject.toml` support
- 🤝 Python 3.13 compatibility
- 🤝 Python 3.12 compatibility
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruff-compare-to-flake8), isort, and Black
- 📦 Built-in caching, to avoid re-analyzing unchanged files
- 🔧 Fix support, for automatic error correction (e.g., automatically remove unused imports)
@@ -152,7 +152,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
```yaml
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.4.10
rev: v0.4.8
hooks:
# Run the linter.
- id: ruff
@@ -442,7 +442,6 @@ Ruff is used by a number of major open-source projects and companies, including:
- [NumPyro](https://github.com/pyro-ppl/numpyro)
- [ONNX](https://github.com/onnx/onnx)
- [OpenBB](https://github.com/OpenBB-finance/OpenBBTerminal)
- [Open Wine Components](https://github.com/Open-Wine-Components/umu-launcher)
- [PDM](https://github.com/pdm-project/pdm)
- [PaddlePaddle](https://github.com/PaddlePaddle/Paddle)
- [Pandas](https://github.com/pandas-dev/pandas)

View File

@@ -1,6 +1,6 @@
[files]
# https://github.com/crate-ci/typos/issues/868
extend-exclude = ["crates/red_knot_module_resolver/vendor/**/*", "**/resources/**/*", "**/snapshots/**/*"]
extend-exclude = ["crates/red_knot/vendor/**/*", "**/resources/**/*", "**/snapshots/**/*"]
[default.extend-words]
"arange" = "arange" # e.g. `numpy.arange`

View File

@@ -12,8 +12,6 @@ license.workspace = true
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
red_knot_module_resolver = { workspace = true }
ruff_python_parser = { workspace = true }
ruff_python_ast = { workspace = true }
ruff_text_size = { workspace = true }
@@ -36,6 +34,11 @@ smol_str = { version = "0.2.1" }
tracing = { workspace = true }
tracing-subscriber = { workspace = true }
tracing-tree = { workspace = true }
zip = { workspace = true }
[build-dependencies]
zip = { workspace = true }
walkdir = { workspace = true }
[dev-dependencies]
tempfile = { workspace = true }

View File

@@ -0,0 +1,9 @@
# Red Knot
The Red Knot crate contains code working towards multifile analysis, type inference and, ultimately, type-checking. It's very much a work in progress for now.
## Vendored types for the stdlib
Red Knot vendors [typeshed](https://github.com/python/typeshed)'s stubs for the standard library. The vendored stubs can be found in `crates/red_knot/vendor/typeshed`. The file `crates/red_knot/vendor/typeshed/source_commit.txt` tells you the typeshed commit that our vendored stdlib stubs currently correspond to.
The typeshed stubs are updated every two weeks via an automated PR using the `sync_typeshed.yaml` workflow in the `.github/workflows` directory. This workflow can also be triggered at any time via [workflow dispatch](https://docs.github.com/en/actions/using-workflows/manually-running-a-workflow#running-a-workflow).

View File

@@ -3,12 +3,11 @@
//!
//! This script should be automatically run at build time
//! whenever the script itself changes, or whenever any files
//! in `crates/red_knot_module_resolver/vendor/typeshed` change.
//! in `crates/red_knot/vendor/typeshed` change.
use std::fs::File;
use std::path::Path;
use path_slash::PathExt;
use zip::result::ZipResult;
use zip::write::{FileOptions, ZipWriter};
use zip::CompressionMethod;
@@ -29,25 +28,25 @@ fn zip_dir(directory_path: &str, writer: File) -> ZipResult<File> {
for entry in walkdir::WalkDir::new(directory_path) {
let dir_entry = entry.unwrap();
let absolute_path = dir_entry.path();
let normalized_relative_path = absolute_path
let relative_path = dir_entry.path();
let name = relative_path
.strip_prefix(Path::new(directory_path))
.unwrap()
.to_slash()
.to_str()
.expect("Unexpected non-utf8 typeshed path!");
// Write file or directory explicitly
// Some unzip tools unzip files with directory paths correctly, some do not!
if absolute_path.is_file() {
println!("adding file {absolute_path:?} as {normalized_relative_path:?} ...");
zip.start_file(normalized_relative_path, options)?;
let mut f = File::open(absolute_path)?;
if relative_path.is_file() {
println!("adding file {relative_path:?} as {name:?} ...");
zip.start_file(name, options)?;
let mut f = File::open(relative_path)?;
std::io::copy(&mut f, &mut zip).unwrap();
} else if !normalized_relative_path.is_empty() {
} else if !name.is_empty() {
// Only if not root! Avoids path spec / warning
// and mapname conversion failed error on unzip
println!("adding dir {absolute_path:?} as {normalized_relative_path:?} ...");
zip.add_directory(normalized_relative_path, options)?;
println!("adding dir {relative_path:?} as {name:?} ...");
zip.add_directory(name, options)?;
}
}
zip.finish()

View File

@@ -7,8 +7,6 @@ use std::sync::Arc;
use dashmap::mapref::entry::Entry;
use smol_str::SmolStr;
use red_knot_module_resolver::ModuleKind;
use crate::db::{QueryResult, SemanticDb, SemanticJar};
use crate::files::FileId;
use crate::semantic::Dependency;
@@ -179,6 +177,15 @@ impl std::fmt::Display for ModuleName {
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
pub enum ModuleKind {
/// A single-file module (e.g. `foo.py` or `foo.pyi`)
Module,
/// A python package (`foo/__init__.py` or `foo/__init__.pyi`)
Package,
}
/// A search path in which to search modules.
/// Corresponds to a path in [`sys.path`](https://docs.python.org/3/library/sys_path_init.html) at runtime.
///
@@ -763,8 +770,11 @@ impl PackageKind {
#[cfg(test)]
mod tests {
use std::io::{Cursor, Read};
use std::num::NonZeroU32;
use std::path::PathBuf;
use std::path::{Path, PathBuf};
use zip::ZipArchive;
use crate::db::tests::TestDb;
use crate::db::SourceDb;
@@ -916,6 +926,28 @@ mod tests {
Ok(())
}
#[test]
fn typeshed_zip_created_at_build_time() -> anyhow::Result<()> {
// The file path here is hardcoded in this crate's `build.rs` script.
// Luckily this crate will fail to build if this file isn't available at build time.
const TYPESHED_ZIP_BYTES: &[u8] =
include_bytes!(concat!(env!("OUT_DIR"), "/zipped_typeshed.zip"));
assert!(!TYPESHED_ZIP_BYTES.is_empty());
let mut typeshed_zip_archive = ZipArchive::new(Cursor::new(TYPESHED_ZIP_BYTES))?;
let path_to_functools = Path::new("stdlib").join("functools.pyi");
let mut functools_module_stub = typeshed_zip_archive
.by_name(path_to_functools.to_str().unwrap())
.unwrap();
assert!(functools_module_stub.is_file());
let mut functools_module_stub_source = String::new();
functools_module_stub.read_to_string(&mut functools_module_stub_source)?;
assert!(functools_module_stub_source.contains("def update_wrapper("));
Ok(())
}
#[test]
fn resolve_package() -> anyhow::Result<()> {
let TestCase {

View File

@@ -2,7 +2,6 @@ use std::num::NonZeroU32;
use ruff_python_ast as ast;
use ruff_python_ast::visitor::source_order::SourceOrderVisitor;
use ruff_python_ast::AstNode;
use crate::ast_ids::{NodeKey, TypedNodeKey};
use crate::cache::KeyValueCache;
@@ -14,9 +13,8 @@ use crate::parse::parse;
use crate::Name;
pub(crate) use definitions::Definition;
use definitions::{ImportDefinition, ImportFromDefinition};
pub(crate) use flow_graph::ConstrainedDefinition;
use flow_graph::{FlowGraph, FlowGraphBuilder, FlowNodeId, ReachableDefinitionsIterator};
use ruff_index::{newtype_index, IndexVec};
use ruff_index::newtype_index;
use rustc_hash::FxHashMap;
use std::ops::{Deref, DerefMut};
use std::sync::Arc;
@@ -67,7 +65,6 @@ pub struct SemanticIndex {
symbol_table: SymbolTable,
flow_graph: FlowGraph,
expressions: FxHashMap<NodeKey, ExpressionId>,
expressions_by_id: IndexVec<ExpressionId, NodeKey>,
}
impl SemanticIndex {
@@ -81,24 +78,12 @@ impl SemanticIndex {
current_flow_node_id: FlowGraph::start(),
}],
expressions: FxHashMap::default(),
expressions_by_id: IndexVec::default(),
current_definition: None,
};
indexer.visit_body(&module.body);
indexer.finish()
}
fn resolve_expression_id<'a>(
&self,
ast: &'a ast::ModModule,
expression_id: ExpressionId,
) -> ast::AnyNodeRef<'a> {
let node_key = self.expressions_by_id[expression_id];
node_key
.resolve(ast.as_any_node_ref())
.expect("node to resolve")
}
/// Return an iterator over all definitions of `symbol_id` reachable from `use_expr`. The value
/// of `symbol_id` in `use_expr` must originate from one of the iterated definitions (or from
/// an external reassignment of the name outside of this scope).
@@ -138,23 +123,19 @@ struct SemanticIndexer {
/// the definition whose target(s) we are currently walking
current_definition: Option<Definition>,
expressions: FxHashMap<NodeKey, ExpressionId>,
expressions_by_id: IndexVec<ExpressionId, NodeKey>,
}
impl SemanticIndexer {
pub(crate) fn finish(mut self) -> SemanticIndex {
pub(crate) fn finish(self) -> SemanticIndex {
let SemanticIndexer {
flow_graph_builder,
symbol_table_builder,
..
} = self;
self.expressions.shrink_to_fit();
self.expressions_by_id.shrink_to_fit();
SemanticIndex {
flow_graph: flow_graph_builder.finish(),
symbol_table: symbol_table_builder.finish(),
expressions: self.expressions,
expressions_by_id: self.expressions_by_id,
}
}
@@ -230,15 +211,6 @@ impl SemanticIndexer {
.record_scope_for_node(node_key, scope_id);
}
fn insert_constraint(&mut self, expr: &ast::Expr) {
let node_key = NodeKey::from_node(expr.into());
let expression_id = self.expressions[&node_key];
let constraint = self
.flow_graph_builder
.add_constraint(self.current_flow_node(), expression_id);
self.set_current_flow_node(constraint);
}
fn with_type_params(
&mut self,
name: &str,
@@ -268,21 +240,18 @@ impl SemanticIndexer {
impl SourceOrderVisitor<'_> for SemanticIndexer {
fn visit_expr(&mut self, expr: &ast::Expr) {
let node_key = NodeKey::from_node(expr.into());
let expression_id = self.expressions_by_id.push(node_key);
let flow_expression_id = self
let expression_id = self
.flow_graph_builder
.record_expr(self.current_flow_node());
debug_assert_eq!(expression_id, flow_expression_id);
let symbol_expression_id = self
.symbol_table_builder
.record_expression(self.cur_scope());
debug_assert_eq!(
expression_id,
self.symbol_table_builder
.record_expression(self.cur_scope())
);
debug_assert_eq!(expression_id, symbol_expression_id);
self.expressions.insert(node_key, expression_id);
self.expressions
.insert(NodeKey::from_node(expr.into()), expression_id);
match expr {
ast::Expr::Name(ast::ExprName { id, ctx, .. }) => {
@@ -321,7 +290,6 @@ impl SourceOrderVisitor<'_> for SemanticIndexer {
let if_branch = self.flow_graph_builder.add_branch(self.current_flow_node());
self.set_current_flow_node(if_branch);
self.insert_constraint(test);
self.visit_expr(body);
let post_body = self.current_flow_node();
@@ -463,7 +431,6 @@ impl SourceOrderVisitor<'_> for SemanticIndexer {
}
ast::Stmt::Assign(node) => {
debug_assert!(self.current_definition.is_none());
self.visit_expr(&node.value);
self.current_definition =
Some(Definition::Assignment(TypedNodeKey::from_node(node)));
for expr in &node.targets {
@@ -471,6 +438,7 @@ impl SourceOrderVisitor<'_> for SemanticIndexer {
}
self.current_definition = None;
self.visit_expr(&node.value);
}
ast::Stmt::If(node) => {
// TODO detect statically known truthy or falsy test (via type inference, not naive
@@ -485,7 +453,6 @@ impl SourceOrderVisitor<'_> for SemanticIndexer {
// visit the body of the `if` clause
self.set_current_flow_node(if_branch);
self.insert_constraint(&node.test);
self.visit_body(&node.body);
// Flow node for the last if/elif condition branch; represents the "no branch
@@ -504,15 +471,13 @@ impl SourceOrderVisitor<'_> for SemanticIndexer {
let mut last_branch_is_else = false;
for clause in &node.elif_else_clauses {
if let Some(test) = &clause.test {
self.visit_expr(test);
if clause.test.is_some() {
// This is an elif clause. Create a new branch node. Its predecessor is the
// previous branch node, because we can only take one branch in an entire
// if/elif/else chain, so if we take this branch, it can only be because we
// didn't take the previous one.
prior_branch = self.flow_graph_builder.add_branch(prior_branch);
self.set_current_flow_node(prior_branch);
self.insert_constraint(test);
} else {
// This is an else clause. No need to create a branch node; there's no
// branch here, if we haven't taken any previous branch, we definitely go
@@ -834,10 +799,7 @@ mod tests {
let ast::Stmt::Expr(ast::StmtExpr { value: x_use, .. }) = &ast.body[1] else {
panic!("should be an expr")
};
let x_defs: Vec<_> = index
.reachable_definitions(x_sym, x_use)
.map(|constrained_definition| constrained_definition.definition)
.collect();
let x_defs: Vec<_> = index.reachable_definitions(x_sym, x_use).collect();
assert_eq!(x_defs.len(), 1);
let Definition::Assignment(node_key) = &x_defs[0] else {
panic!("def should be an assignment")

View File

@@ -2,7 +2,6 @@ use super::symbol_table::SymbolId;
use crate::semantic::{Definition, ExpressionId};
use ruff_index::{newtype_index, IndexVec};
use std::iter::FusedIterator;
use std::ops::Range;
#[newtype_index]
pub struct FlowNodeId;
@@ -13,10 +12,9 @@ pub(crate) enum FlowNode {
Definition(DefinitionFlowNode),
Branch(BranchFlowNode),
Phi(PhiFlowNode),
Constraint(ConstraintFlowNode),
}
/// A point in control flow where a symbol is defined
/// A Definition node represents a point in control flow where a symbol is defined
#[derive(Debug)]
pub(crate) struct DefinitionFlowNode {
symbol_id: SymbolId,
@@ -24,26 +22,19 @@ pub(crate) struct DefinitionFlowNode {
predecessor: FlowNodeId,
}
/// A branch in control flow
/// A Branch node represents a branch in control flow
#[derive(Debug)]
pub(crate) struct BranchFlowNode {
predecessor: FlowNodeId,
}
/// A join point where control flow paths come together
/// A Phi node represents a join point where control flow paths come together
#[derive(Debug)]
pub(crate) struct PhiFlowNode {
first_predecessor: FlowNodeId,
second_predecessor: FlowNodeId,
}
/// A branch test which may apply constraints to a symbol's type
#[derive(Debug)]
pub(crate) struct ConstraintFlowNode {
predecessor: FlowNodeId,
test_expression: ExpressionId,
}
#[derive(Debug)]
pub struct FlowGraph {
flow_nodes_by_id: IndexVec<FlowNodeId, FlowNode>,
@@ -107,17 +98,6 @@ impl FlowGraphBuilder {
}))
}
pub(crate) fn add_constraint(
&mut self,
predecessor: FlowNodeId,
test_expression: ExpressionId,
) -> FlowNodeId {
self.add(FlowNode::Constraint(ConstraintFlowNode {
predecessor,
test_expression,
}))
}
pub(super) fn record_expr(&mut self, node_id: FlowNodeId) -> ExpressionId {
self.flow_graph.expression_map.push(node_id)
}
@@ -129,26 +109,11 @@ impl FlowGraphBuilder {
}
}
/// A definition, and the set of constraints between a use and the definition
#[derive(Debug, Clone)]
pub struct ConstrainedDefinition {
pub definition: Definition,
pub constraints: Vec<ExpressionId>,
}
/// A flow node and the constraints we passed through to reach it
#[derive(Debug)]
struct FlowState {
node_id: FlowNodeId,
constraints_range: Range<usize>,
}
#[derive(Debug)]
pub struct ReachableDefinitionsIterator<'a> {
flow_graph: &'a FlowGraph,
symbol_id: SymbolId,
pending: Vec<FlowState>,
constraints: Vec<ExpressionId>,
pending: Vec<FlowNodeId>,
}
impl<'a> ReachableDefinitionsIterator<'a> {
@@ -156,56 +121,31 @@ impl<'a> ReachableDefinitionsIterator<'a> {
Self {
flow_graph,
symbol_id,
pending: vec![FlowState {
node_id: start_node_id,
constraints_range: 0..0,
}],
constraints: vec![],
pending: vec![start_node_id],
}
}
}
impl<'a> Iterator for ReachableDefinitionsIterator<'a> {
type Item = ConstrainedDefinition;
type Item = Definition;
fn next(&mut self) -> Option<Self::Item> {
let FlowState {
mut node_id,
mut constraints_range,
} = self.pending.pop()?;
self.constraints.truncate(constraints_range.end + 1);
loop {
match &self.flow_graph.flow_nodes_by_id[node_id] {
FlowNode::Start => {
// constraints on unbound are irrelevant
return Some(ConstrainedDefinition {
definition: Definition::Unbound,
constraints: vec![],
});
}
let flow_node_id = self.pending.pop()?;
match &self.flow_graph.flow_nodes_by_id[flow_node_id] {
FlowNode::Start => return Some(Definition::Unbound),
FlowNode::Definition(def_node) => {
if def_node.symbol_id == self.symbol_id {
return Some(ConstrainedDefinition {
definition: def_node.definition.clone(),
constraints: self.constraints[constraints_range].to_vec(),
});
return Some(def_node.definition.clone());
}
node_id = def_node.predecessor;
self.pending.push(def_node.predecessor);
}
FlowNode::Branch(branch_node) => {
node_id = branch_node.predecessor;
self.pending.push(branch_node.predecessor);
}
FlowNode::Phi(phi_node) => {
self.pending.push(FlowState {
node_id: phi_node.first_predecessor,
constraints_range: constraints_range.clone(),
});
node_id = phi_node.second_predecessor;
}
FlowNode::Constraint(constraint_node) => {
node_id = constraint_node.predecessor;
self.constraints.push(constraint_node.test_expression);
constraints_range.end += 1;
self.pending.push(phi_node.first_predecessor);
self.pending.push(phi_node.second_predecessor);
}
}
}
@@ -254,15 +194,6 @@ impl std::fmt::Display for FlowGraph {
id.as_u32()
)?;
}
FlowNode::Constraint(constraint_node) => {
writeln!(f, r"((Constraint))")?;
writeln!(
f,
r" id{}-->id{}",
constraint_node.predecessor.as_u32(),
id.as_u32()
)?;
}
}
}
Ok(())

View File

@@ -213,7 +213,7 @@ impl TypeStore {
self.modules.get(&file_id)
}
fn add_function_type(
fn add_function(
&self,
file_id: FileId,
name: &str,
@@ -225,18 +225,7 @@ impl TypeStore {
.add_function(name, symbol_id, scope_id, decorators)
}
fn add_function(
&self,
file_id: FileId,
name: &str,
symbol_id: SymbolId,
scope_id: ScopeId,
decorators: Vec<Type>,
) -> Type {
Type::Function(self.add_function_type(file_id, name, symbol_id, scope_id, decorators))
}
fn add_class_type(
fn add_class(
&self,
file_id: FileId,
name: &str,
@@ -247,17 +236,7 @@ impl TypeStore {
.add_class(name, scope_id, bases)
}
fn add_class(&self, file_id: FileId, name: &str, scope_id: ScopeId, bases: Vec<Type>) -> Type {
Type::Class(self.add_class_type(file_id, name, scope_id, bases))
}
/// add "raw" union type with exactly given elements
fn add_union_type(&self, file_id: FileId, elems: &[Type]) -> UnionTypeId {
self.add_or_get_module(file_id).add_union(elems)
}
/// add union with normalization; may not return a `UnionType`
fn add_union(&self, file_id: FileId, elems: &[Type]) -> Type {
fn add_union(&self, file_id: FileId, elems: &[Type]) -> UnionTypeId {
let mut flattened = Vec::with_capacity(elems.len());
for ty in elems {
match ty {
@@ -265,17 +244,10 @@ impl TypeStore {
_ => flattened.push(*ty),
}
}
// TODO don't add identical unions
// TODO de-duplicate union elements
match flattened[..] {
[] => Type::Never,
[ty] => ty,
_ => Type::Union(self.add_union_type(file_id, &flattened)),
}
self.add_or_get_module(file_id).add_union(&flattened)
}
/// add "raw" intersection type with exactly given elements
fn add_intersection_type(
fn add_intersection(
&self,
file_id: FileId,
positive: &[Type],
@@ -285,38 +257,6 @@ impl TypeStore {
.add_intersection(positive, negative)
}
/// add intersection with normalization; may not return an `IntersectionType`
fn add_intersection(&self, file_id: FileId, positive: &[Type], negative: &[Type]) -> Type {
let mut pos_flattened = Vec::with_capacity(positive.len());
let mut neg_flattened = Vec::with_capacity(negative.len());
for ty in positive {
match ty {
Type::Intersection(intersection_id) => {
pos_flattened.extend(intersection_id.positive(self));
neg_flattened.extend(intersection_id.negative(self));
}
_ => pos_flattened.push(*ty),
}
}
for ty in negative {
match ty {
Type::Intersection(intersection_id) => {
pos_flattened.extend(intersection_id.negative(self));
neg_flattened.extend(intersection_id.positive(self));
}
_ => neg_flattened.push(*ty),
}
}
// TODO don't add identical intersections
// TODO deduplicate intersection elements
// TODO maintain DNF form (union of intersections)
match (&pos_flattened[..], &neg_flattened[..]) {
([], []) => Type::Any, // TODO should be object
([ty], []) => *ty,
(pos, neg) => Type::Intersection(self.add_intersection_type(file_id, pos, neg)),
}
}
fn get_function(&self, id: FunctionTypeId) -> FunctionTypeRef {
FunctionTypeRef {
module_store: self.get_module(id.file_id),
@@ -603,18 +543,6 @@ pub struct IntersectionTypeId {
intersection_id: ModuleIntersectionTypeId,
}
impl IntersectionTypeId {
pub fn positive(self, type_store: &TypeStore) -> Vec<Type> {
let intersection = type_store.get_intersection(self);
intersection.positive.iter().copied().collect()
}
pub fn negative(self, type_store: &TypeStore) -> Vec<Type> {
let intersection = type_store.get_intersection(self);
intersection.negative.iter().copied().collect()
}
}
#[newtype_index]
struct ModuleFunctionTypeId;
@@ -904,68 +832,15 @@ mod tests {
use crate::files::Files;
use crate::semantic::symbol_table::SymbolTableBuilder;
use crate::semantic::{FileId, ScopeId, SymbolFlags, SymbolTable, TypeStore};
use crate::semantic::{SymbolFlags, SymbolTable, TypeStore};
use crate::FxIndexSet;
struct TestCase {
store: TypeStore,
files: Files,
file_id: FileId,
root_scope: ScopeId,
}
fn create_test() -> TestCase {
let files = Files::default();
let file_id = files.intern(Path::new("/foo"));
TestCase {
store: TypeStore::default(),
files,
file_id,
root_scope: SymbolTable::root_scope_id(),
}
}
fn assert_union_elements(store: &TypeStore, union: Type, elements: &[Type]) {
let Type::Union(union_id) = union else {
panic!("should be a union")
};
assert_eq!(
store.get_union(union_id).elements,
elements.iter().copied().collect::<FxIndexSet<_>>()
);
}
fn assert_intersection_elements(
store: &TypeStore,
intersection: Type,
positive: &[Type],
negative: &[Type],
) {
let Type::Intersection(intersection_id) = intersection else {
panic!("should be a intersection")
};
assert_eq!(
store.get_intersection(intersection_id).positive,
positive.iter().copied().collect::<FxIndexSet<_>>()
);
assert_eq!(
store.get_intersection(intersection_id).negative,
negative.iter().copied().collect::<FxIndexSet<_>>()
);
}
#[test]
fn add_class() {
let TestCase {
store,
file_id,
root_scope,
..
} = create_test();
let id = store.add_class_type(file_id, "C", root_scope, Vec::new());
let store = TypeStore::default();
let files = Files::default();
let file_id = files.intern(Path::new("/foo"));
let id = store.add_class(file_id, "C", SymbolTable::root_scope_id(), Vec::new());
assert_eq!(store.get_class(id).name(), "C");
let inst = Type::Instance(id);
assert_eq!(format!("{}", inst.display(&store)), "C");
@@ -973,13 +848,9 @@ mod tests {
#[test]
fn add_function() {
let TestCase {
store,
file_id,
root_scope,
..
} = create_test();
let store = TypeStore::default();
let files = Files::default();
let file_id = files.intern(Path::new("/foo"));
let mut builder = SymbolTableBuilder::new();
let func_symbol = builder.add_or_update_symbol(
SymbolTable::root_scope_id(),
@@ -988,11 +859,11 @@ mod tests {
);
builder.finish();
let id = store.add_function_type(
let id = store.add_function(
file_id,
"func",
func_symbol,
root_scope,
SymbolTable::root_scope_id(),
vec![Type::Unknown],
);
assert_eq!(store.get_function(id).name(), "func");
@@ -1003,117 +874,41 @@ mod tests {
#[test]
fn add_union() {
let TestCase {
store,
file_id,
root_scope,
..
} = create_test();
let c1 = store.add_class_type(file_id, "C1", root_scope, Vec::new());
let c2 = store.add_class_type(file_id, "C2", root_scope, Vec::new());
let store = TypeStore::default();
let files = Files::default();
let file_id = files.intern(Path::new("/foo"));
let c1 = store.add_class(file_id, "C1", SymbolTable::root_scope_id(), Vec::new());
let c2 = store.add_class(file_id, "C2", SymbolTable::root_scope_id(), Vec::new());
let elems = vec![Type::Instance(c1), Type::Instance(c2)];
let id = store.add_union_type(file_id, &elems);
let id = store.add_union(file_id, &elems);
assert_eq!(
store.get_union(id).elements,
elems.into_iter().collect::<FxIndexSet<_>>()
);
let union = Type::Union(id);
assert_union_elements(&store, union, &elems);
assert_eq!(format!("{}", union.display(&store)), "C1 | C2");
}
#[test]
fn add_intersection() {
let TestCase {
store,
file_id,
root_scope,
..
} = create_test();
let c1 = store.add_class_type(file_id, "C1", root_scope, Vec::new());
let c2 = store.add_class_type(file_id, "C2", root_scope, Vec::new());
let c3 = store.add_class_type(file_id, "C3", root_scope, Vec::new());
let store = TypeStore::default();
let files = Files::default();
let file_id = files.intern(Path::new("/foo"));
let c1 = store.add_class(file_id, "C1", SymbolTable::root_scope_id(), Vec::new());
let c2 = store.add_class(file_id, "C2", SymbolTable::root_scope_id(), Vec::new());
let c3 = store.add_class(file_id, "C3", SymbolTable::root_scope_id(), Vec::new());
let pos = vec![Type::Instance(c1), Type::Instance(c2)];
let neg = vec![Type::Instance(c3)];
let id = store.add_intersection_type(file_id, &pos, &neg);
let id = store.add_intersection(file_id, &pos, &neg);
assert_eq!(
store.get_intersection(id).positive,
pos.into_iter().collect::<FxIndexSet<_>>()
);
assert_eq!(
store.get_intersection(id).negative,
neg.into_iter().collect::<FxIndexSet<_>>()
);
let intersection = Type::Intersection(id);
assert_intersection_elements(&store, intersection, &pos, &neg);
assert_eq!(format!("{}", intersection.display(&store)), "C1 & C2 & ~C3");
}
#[test]
fn flatten_union_zero_elements() {
let TestCase { store, file_id, .. } = create_test();
let ty = store.add_union(file_id, &[]);
assert!(matches!(ty, Type::Never), "{ty:?} should be Never");
}
#[test]
fn flatten_union_one_element() {
let TestCase { store, file_id, .. } = create_test();
let ty = store.add_union(file_id, &[Type::None]);
assert!(matches!(ty, Type::None), "{ty:?} should be None");
}
#[test]
fn flatten_nested_union() {
let TestCase { store, file_id, .. } = create_test();
let l1 = Type::IntLiteral(1);
let l2 = Type::IntLiteral(2);
let u1 = store.add_union(file_id, &[l1, l2]);
let u2 = store.add_union(file_id, &[u1, Type::None]);
assert_union_elements(&store, u2, &[l1, l2, Type::None]);
}
#[test]
fn flatten_intersection_zero_elements() {
let TestCase { store, file_id, .. } = create_test();
let ty = store.add_intersection(file_id, &[], &[]);
// TODO should be object, not Any
assert!(matches!(ty, Type::Any), "{ty:?} should be object");
}
#[test]
fn flatten_intersection_one_positive_element() {
let TestCase { store, file_id, .. } = create_test();
let ty = store.add_intersection(file_id, &[Type::None], &[]);
assert!(matches!(ty, Type::None), "{ty:?} should be None");
}
#[test]
fn flatten_intersection_one_negative_element() {
let TestCase { store, file_id, .. } = create_test();
let ty = store.add_intersection(file_id, &[], &[Type::None]);
assert_intersection_elements(&store, ty, &[], &[Type::None]);
}
#[test]
fn flatten_nested_intersection() {
let TestCase {
store,
file_id,
root_scope,
..
} = create_test();
let c1 = Type::Instance(store.add_class_type(file_id, "C1", root_scope, vec![]));
let c2 = Type::Instance(store.add_class_type(file_id, "C2", root_scope, vec![]));
let c1sub = Type::Instance(store.add_class_type(file_id, "C1sub", root_scope, vec![c1]));
let i1 = store.add_intersection(file_id, &[c1, c2], &[c1sub]);
let i2 = store.add_intersection(file_id, &[i1, Type::None], &[]);
assert_intersection_elements(&store, i2, &[c1, c2, Type::None], &[c1sub]);
}
}

View File

@@ -10,8 +10,8 @@ use crate::module::{resolve_module, ModuleName};
use crate::parse::parse;
use crate::semantic::types::{ModuleTypeId, Type};
use crate::semantic::{
resolve_global_symbol, semantic_index, ConstrainedDefinition, Definition, GlobalSymbolId,
ImportDefinition, ImportFromDefinition,
resolve_global_symbol, semantic_index, Definition, GlobalSymbolId, ImportDefinition,
ImportFromDefinition,
};
use crate::{FileId, Name};
@@ -41,47 +41,25 @@ pub fn infer_symbol_public_type(db: &dyn SemanticDb, symbol: GlobalSymbolId) ->
Ok(ty)
}
/// Infer type of a symbol as union of the given `Definitions`.
fn infer_type_from_definitions<T>(
#[tracing::instrument(level = "trace", skip(db))]
pub fn infer_type_from_definitions<T>(
db: &dyn SemanticDb,
symbol: GlobalSymbolId,
definitions: T,
) -> QueryResult<Type>
where
T: Debug + IntoIterator<Item = Definition>,
{
infer_type_from_constrained_definitions(
db,
symbol,
definitions
.into_iter()
.map(|definition| ConstrainedDefinition {
definition,
constraints: vec![],
}),
)
}
/// Infer type of a symbol as union of the given `ConstrainedDefinitions`.
fn infer_type_from_constrained_definitions<T>(
db: &dyn SemanticDb,
symbol: GlobalSymbolId,
constrained_definitions: T,
) -> QueryResult<Type>
where
T: IntoIterator<Item = ConstrainedDefinition>,
T: Debug + Iterator<Item = Definition>,
{
let jar: &SemanticJar = db.jar()?;
let mut tys = constrained_definitions
.into_iter()
.map(|def| infer_constrained_definition_type(db, symbol, def.clone()))
let mut tys = definitions
.map(|def| infer_definition_type(db, symbol, def.clone()))
.peekable();
if let Some(first) = tys.next() {
if tys.peek().is_some() {
Ok(jar.type_store.add_union(
Ok(Type::Union(jar.type_store.add_union(
symbol.file_id,
&Iterator::chain(std::iter::once(first), tys).collect::<QueryResult<Vec<_>>>()?,
))
)))
} else {
first
}
@@ -90,37 +68,6 @@ where
}
}
/// Infer type for a ConstrainedDefinition (intersection of the definition type and the
/// constraints)
#[tracing::instrument(level = "trace", skip(db))]
pub fn infer_constrained_definition_type(
db: &dyn SemanticDb,
symbol: GlobalSymbolId,
constrained_definition: ConstrainedDefinition,
) -> QueryResult<Type> {
let ConstrainedDefinition {
definition,
constraints,
} = constrained_definition;
let index = semantic_index(db, symbol.file_id)?;
let parsed = parse(db.upcast(), symbol.file_id)?;
let mut intersected_types = vec![infer_definition_type(db, symbol, definition)?];
for constraint in constraints {
if let Some(constraint_type) = infer_constraint_type(
db,
symbol,
index.resolve_expression_id(parsed.syntax(), constraint),
)? {
intersected_types.push(constraint_type);
}
}
let jar: &SemanticJar = db.jar()?;
Ok(jar
.type_store
.add_intersection(symbol.file_id, &intersected_types, &[]))
}
/// Infer a type for a Definition
#[tracing::instrument(level = "trace", skip(db))]
pub fn infer_definition_type(
db: &dyn SemanticDb,
@@ -175,7 +122,7 @@ pub fn infer_definition_type(
bases.push(infer_expr_type(db, file_id, base)?);
}
let scope_id = index.symbol_table().scope_id_for_node(node_key.erased());
let ty = type_store.add_class(file_id, &node.name.id, scope_id, bases);
let ty = Type::Class(type_store.add_class(file_id, &node.name.id, scope_id, bases));
type_store.cache_node_type(file_id, *node_key.erased(), ty);
Ok(ty)
}
@@ -197,13 +144,15 @@ pub fn infer_definition_type(
.map(|decorator| infer_expr_type(db, file_id, &decorator.expression))
.collect::<QueryResult<_>>()?;
let scope_id = index.symbol_table().scope_id_for_node(node_key.erased());
let ty = type_store.add_function(
file_id,
&node.name.id,
symbol.symbol_id,
scope_id,
decorator_tys,
);
let ty = type_store
.add_function(
file_id,
&node.name.id,
symbol.symbol_id,
scope_id,
decorator_tys,
)
.into();
type_store.cache_node_type(file_id, *node_key.erased(), ty);
Ok(ty)
}
@@ -235,57 +184,6 @@ pub fn infer_definition_type(
}
}
/// Return the type that the given constraint (an expression from a control-flow test) requires the
/// given symbol to have. For example, returns the Type "~None" as the constraint type if given the
/// symbol ID for x and the expression ID for `x is not None`. Returns (Rust) None if the given
/// expression applies no constraints on the given symbol.
#[tracing::instrument(level = "trace", skip(db))]
fn infer_constraint_type(
db: &dyn SemanticDb,
symbol_id: GlobalSymbolId,
// TODO this should preferably take an &ast::Expr instead of AnyNodeRef
expression: ast::AnyNodeRef,
) -> QueryResult<Option<Type>> {
let file_id = symbol_id.file_id;
let index = semantic_index(db, file_id)?;
let jar: &SemanticJar = db.jar()?;
let symbol_name = symbol_id.symbol_id.symbol(&index.symbol_table).name();
// TODO narrowing attributes
// TODO narrowing dict keys
// TODO isinstance, ==/!=, type(...), literals, bools...
match expression {
ast::AnyNodeRef::ExprCompare(ast::ExprCompare {
left,
ops,
comparators,
..
}) => {
// TODO chained comparisons
match left.as_ref() {
ast::Expr::Name(ast::ExprName { id, .. }) if id == symbol_name => match ops[0] {
ast::CmpOp::Is | ast::CmpOp::IsNot => {
Ok(match infer_expr_type(db, file_id, &comparators[0])? {
Type::None => Some(Type::None),
_ => None,
}
.map(|ty| {
if matches!(ops[0], ast::CmpOp::IsNot) {
jar.type_store.add_intersection(file_id, &[], &[ty])
} else {
ty
}
}))
}
_ => Ok(None),
},
_ => Ok(None),
}
}
_ => Ok(None),
}
}
/// Infer type of the given expression.
fn infer_expr_type(db: &dyn SemanticDb, file_id: FileId, expr: &ast::Expr) -> QueryResult<Type> {
// TODO cache the resolution of the type on the node
let index = semantic_index(db, file_id)?;
@@ -304,7 +202,8 @@ fn infer_expr_type(db: &dyn SemanticDb, file_id: FileId, expr: &ast::Expr) -> Qu
ast::Expr::Name(name) => {
// TODO look up in the correct scope, don't assume global
if let Some(symbol_id) = index.symbol_table().root_symbol_id_by_name(&name.id) {
infer_type_from_constrained_definitions(
// TODO should use only reachable definitions, not public type
infer_type_from_definitions(
db,
GlobalSymbolId { file_id, symbol_id },
index.reachable_definitions(symbol_id, expr),
@@ -334,7 +233,9 @@ fn infer_expr_type(db: &dyn SemanticDb, file_id: FileId, expr: &ast::Expr) -> Qu
let body_ty = infer_expr_type(db, file_id, body)?;
let else_ty = infer_expr_type(db, file_id, orelse)?;
let jar: &SemanticJar = db.jar()?;
Ok(jar.type_store.add_union(file_id, &[body_ty, else_ty]))
Ok(Type::Union(
jar.type_store.add_union(file_id, &[body_ty, else_ty]),
))
}
_ => todo!("expression type resolution for {:?}", expr),
}
@@ -738,25 +639,4 @@ mod tests {
assert_public_type(&case, "a", "x", "Literal[1] | None")
}
#[test]
fn narrow_none() -> anyhow::Result<()> {
let case = create_test()?;
write_to_path(
&case,
"a.py",
"
x = 1 if flag else None
y = 0
if x is not None:
y = x
z = y
",
)?;
// TODO normalization of unions and intersections: this type is technically correct but
// begging for normalization
assert_public_type(&case, "a", "z", "Literal[0] | Literal[1] | None & ~None")
}
}

View File

@@ -0,0 +1 @@
4b6558c12ac43cd40716cd6452fe98a632ae65d7

View File

@@ -65,9 +65,9 @@ array: 3.0-
ast: 3.0-
asynchat: 3.0-3.11
asyncio: 3.4-
asyncio.mixins: 3.10-
asyncio.exceptions: 3.8-
asyncio.format_helpers: 3.7-
asyncio.mixins: 3.10-
asyncio.runners: 3.7-
asyncio.staggered: 3.8-
asyncio.taskgroups: 3.11-
@@ -270,7 +270,6 @@ threading: 3.0-
time: 3.0-
timeit: 3.0-
tkinter: 3.0-
tkinter.tix: 3.0-3.12
token: 3.0-
tokenize: 3.0-
tomllib: 3.11-

View File

@@ -7,11 +7,8 @@ PyCF_ONLY_AST: Literal[1024]
PyCF_TYPE_COMMENTS: Literal[4096]
PyCF_ALLOW_TOP_LEVEL_AWAIT: Literal[8192]
if sys.version_info >= (3, 13):
PyCF_OPTIMIZED_AST: Literal[33792]
# Used for node end positions in constructor keyword arguments
_EndPositionT = typing_extensions.TypeVar("_EndPositionT", int, int | None, default=int | None)
_EndPositionT = typing_extensions.TypeVar("_EndPositionT", int, int | None, default=int | None) # noqa: Y023
# Alias used for fields that must always be valid identifiers
# A string `x` counts as a valid identifier if both the following are True

View File

@@ -63,7 +63,8 @@ A_COLOR: int
A_DIM: int
A_HORIZONTAL: int
A_INVIS: int
A_ITALIC: int
if sys.platform != "darwin":
A_ITALIC: int
A_LEFT: int
A_LOW: int
A_NORMAL: int

View File

@@ -45,5 +45,5 @@ class make_scanner:
def __init__(self, context: make_scanner) -> None: ...
def __call__(self, string: str, index: int) -> tuple[Any, int]: ...
def encode_basestring_ascii(s: str, /) -> str: ...
def encode_basestring_ascii(s: str) -> str: ...
def scanstring(string: str, end: int, strict: bool = ...) -> tuple[str, int]: ...

View File

@@ -1,7 +1,5 @@
import sys
from collections.abc import Callable
from typing import Any, ClassVar, Literal, final
from typing_extensions import TypeAlias
# _tkinter is meant to be only used internally by tkinter, but some tkinter
# functions e.g. return _tkinter.Tcl_Obj objects. Tcl_Obj represents a Tcl
@@ -32,8 +30,6 @@ class Tcl_Obj:
class TclError(Exception): ...
_TkinterTraceFunc: TypeAlias = Callable[[tuple[str, ...]], object]
# This class allows running Tcl code. Tkinter uses it internally a lot, and
# it's often handy to drop a piece of Tcl code into a tkinter program. Example:
#
@@ -90,9 +86,6 @@ class TkappType:
def unsetvar(self, *args, **kwargs): ...
def wantobjects(self, *args, **kwargs): ...
def willdispatch(self): ...
if sys.version_info >= (3, 12):
def gettrace(self, /) -> _TkinterTraceFunc | None: ...
def settrace(self, func: _TkinterTraceFunc | None, /) -> None: ...
# These should be kept in sync with tkinter.tix constants, except ALL_EVENTS which doesn't match TCL_ALL_EVENTS
ALL_EVENTS: Literal[-3]

View File

@@ -365,6 +365,3 @@ def walk(node: AST) -> Iterator[AST]: ...
if sys.version_info >= (3, 9):
def main() -> None: ...
if sys.version_info >= (3, 14):
def compare(left: AST, right: AST, /, *, compare_attributes: bool = False) -> bool: ...

Some files were not shown because too many files have changed in this diff Show More