Compare commits
517 Commits
editables-
...
0.6.6
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d01cbf7f8f | ||
|
|
770b276c21 | ||
|
|
4e935f7d7d | ||
|
|
260c2ecd15 | ||
|
|
f110d80279 | ||
|
|
a6d3d2fccd | ||
|
|
afdb659111 | ||
|
|
a8d9104fa3 | ||
|
|
d3530ab997 | ||
|
|
cf1e91bb59 | ||
|
|
125eaafae0 | ||
|
|
7aae80903c | ||
|
|
4aca9b91ba | ||
|
|
8b3da1867e | ||
|
|
c173ec5bc7 | ||
|
|
44d916fb4e | ||
|
|
4eb849aed3 | ||
|
|
6ac61d7b89 | ||
|
|
c7b2e336f0 | ||
|
|
70748950ae | ||
|
|
dcfebaa4a8 | ||
|
|
d86e5ad031 | ||
|
|
bb12fe9d0c | ||
|
|
3b57faf19b | ||
|
|
c9f7c3d652 | ||
|
|
489dbbaadc | ||
|
|
47e9ea2d5d | ||
|
|
7919a7122a | ||
|
|
a70d693b1c | ||
|
|
1365b0806d | ||
|
|
f4de49ab37 | ||
|
|
8b49845537 | ||
|
|
d988204b1b | ||
|
|
8558126df1 | ||
|
|
9bd9981e70 | ||
|
|
21bfab9b69 | ||
|
|
43a5922f6f | ||
|
|
175d067250 | ||
|
|
4dc2c257ef | ||
|
|
b72d49be16 | ||
|
|
eded78a39b | ||
|
|
a7b8cc08f0 | ||
|
|
b93d0ab57c | ||
|
|
e6b927a583 | ||
|
|
acab1f4fd8 | ||
|
|
2ca78721e6 | ||
|
|
a528edad35 | ||
|
|
1d5bd89987 | ||
|
|
b7cef6c999 | ||
|
|
110193af57 | ||
|
|
d6bd841512 | ||
|
|
210a9e6068 | ||
|
|
7c872e639b | ||
|
|
5ef6979d9a | ||
|
|
62c7d8f6ba | ||
|
|
6f53aaf931 | ||
|
|
ac720cd705 | ||
|
|
312bd86e48 | ||
|
|
b04948fb72 | ||
|
|
a98dbcee78 | ||
|
|
1eb3e4057f | ||
|
|
346dbf45b5 | ||
|
|
f427a7a5a3 | ||
|
|
955dc8804a | ||
|
|
e1603e3dca | ||
|
|
35d45c1e4b | ||
|
|
e4aa479515 | ||
|
|
a7c936878d | ||
|
|
c3bcd5c842 | ||
|
|
594dee1b0b | ||
|
|
a4ebe7d344 | ||
|
|
2a3775e525 | ||
|
|
65cc6ec41d | ||
|
|
66fe226608 | ||
|
|
e965f9cc0e | ||
|
|
0512428a6f | ||
|
|
46a457318d | ||
|
|
57289099bb | ||
|
|
9d1bd7a8a7 | ||
|
|
e37bde458e | ||
|
|
862bd0c429 | ||
|
|
e1e9143c47 | ||
|
|
3c4ec82aee | ||
|
|
29c36a56b2 | ||
|
|
dfee65882b | ||
|
|
50c8ee5175 | ||
|
|
c2aac5f826 | ||
|
|
387af831f9 | ||
|
|
9d517061f2 | ||
|
|
facf6febf0 | ||
|
|
46e687e8d1 | ||
|
|
599103c933 | ||
|
|
54df960a4a | ||
|
|
3463683632 | ||
|
|
6b973b2556 | ||
|
|
c0e2c13d0d | ||
|
|
591a7a152c | ||
|
|
b7c7b4b387 | ||
|
|
ea0246c51a | ||
|
|
0f85769976 | ||
|
|
47f0b45be3 | ||
|
|
f4bed22b05 | ||
|
|
17eb65b26f | ||
|
|
9986397d56 | ||
|
|
58c641c92f | ||
|
|
227fa4e035 | ||
|
|
2b21b77ee6 | ||
|
|
ba272b093c | ||
|
|
8972e5d175 | ||
|
|
9ac2e61bad | ||
|
|
6deb056117 | ||
|
|
c4aad4b161 | ||
|
|
3abd5c08a5 | ||
|
|
2014cba87f | ||
|
|
5661353334 | ||
|
|
dd5d0d523c | ||
|
|
1be8c2e340 | ||
|
|
52d8847b60 | ||
|
|
d3b6e8f58b | ||
|
|
bf620dcb38 | ||
|
|
fae0573817 | ||
|
|
0c23b868dc | ||
|
|
3ceedf76b8 | ||
|
|
828871dc5c | ||
|
|
ee21fc7fd8 | ||
|
|
28ab5f4065 | ||
|
|
a73bebcf15 | ||
|
|
34dafb67a2 | ||
|
|
f8656ff35e | ||
|
|
34b4732c46 | ||
|
|
ce68f1cc1b | ||
|
|
281e6d9791 | ||
|
|
ee258caed7 | ||
|
|
b4d9d26020 | ||
|
|
a99832088a | ||
|
|
770ef2ab27 | ||
|
|
c6023c03a2 | ||
|
|
df694ca1c1 | ||
|
|
2e75cfbfe7 | ||
|
|
cfafaa7637 | ||
|
|
3e9c7adeee | ||
|
|
81cd438d88 | ||
|
|
483748c188 | ||
|
|
eb3dc37faa | ||
|
|
aba1802828 | ||
|
|
96b42b0c8f | ||
|
|
e6d0c4a65d | ||
|
|
4e1b289a67 | ||
|
|
a5ef124201 | ||
|
|
390bb43276 | ||
|
|
fe8b15291f | ||
|
|
c8e01d7c53 | ||
|
|
c4d628cc4c | ||
|
|
ab3648c4c5 | ||
|
|
a822fd6642 | ||
|
|
f8f2e2a442 | ||
|
|
0b5828a1e8 | ||
|
|
5af48337a5 | ||
|
|
39ad6b9472 | ||
|
|
41dec93cd2 | ||
|
|
aee2caa733 | ||
|
|
fe5544e137 | ||
|
|
14c014a48b | ||
|
|
ecd0597d6b | ||
|
|
202271fba6 | ||
|
|
4bdb0b4f86 | ||
|
|
2286f916c1 | ||
|
|
1e4c944251 | ||
|
|
f50f8732e9 | ||
|
|
ecab04e338 | ||
|
|
8c09496b07 | ||
|
|
d19fd1b91c | ||
|
|
99df859e20 | ||
|
|
2d5fe9a6d3 | ||
|
|
1f2cb09853 | ||
|
|
cfe25ab465 | ||
|
|
551ed2706b | ||
|
|
21c5606793 | ||
|
|
c73a7bb929 | ||
|
|
4f6accb5c6 | ||
|
|
1ca14e4335 | ||
|
|
b9c8113a8a | ||
|
|
2edd32aa31 | ||
|
|
02c4373a49 | ||
|
|
d37e2e5d33 | ||
|
|
d1d067896c | ||
|
|
93f9023ea3 | ||
|
|
8144a11f98 | ||
|
|
dce87c21fd | ||
|
|
f873d2ac12 | ||
|
|
ecd9e6a650 | ||
|
|
785c39927b | ||
|
|
a35cdbb275 | ||
|
|
0c98b5949c | ||
|
|
e5f37a8254 | ||
|
|
5c5dfc11f0 | ||
|
|
678045e1aa | ||
|
|
dedefd73da | ||
|
|
37a60460ed | ||
|
|
0bd258a370 | ||
|
|
9baab8672a | ||
|
|
c65e3310d5 | ||
|
|
38c19fb96e | ||
|
|
abb4cdbf3d | ||
|
|
fc811f5168 | ||
|
|
1a8f29ea41 | ||
|
|
aefaddeae7 | ||
|
|
df09045176 | ||
|
|
049cda2ff3 | ||
|
|
358792f2c9 | ||
|
|
e6d5a7af37 | ||
|
|
f5bff82e70 | ||
|
|
ab44152eb5 | ||
|
|
f4c8c7eb70 | ||
|
|
65de8f2c9b | ||
|
|
e6226436fd | ||
|
|
0345d46759 | ||
|
|
4d0d3b00cb | ||
|
|
2be1c4ff04 | ||
|
|
edd86d5603 | ||
|
|
78ad7959ca | ||
|
|
d72ecd6ded | ||
|
|
8617a508bd | ||
|
|
c88bd4e884 | ||
|
|
fbcda90316 | ||
|
|
169d4390cb | ||
|
|
80ade591df | ||
|
|
4881d32c80 | ||
|
|
81a2220ce1 | ||
|
|
900e98b584 | ||
|
|
f9d8189670 | ||
|
|
52ba94191a | ||
|
|
96802d6a7f | ||
|
|
dd0a7ec73e | ||
|
|
25f5ae44c4 | ||
|
|
251efe5c41 | ||
|
|
6359e55383 | ||
|
|
a9847af6e8 | ||
|
|
d61d75d4fa | ||
|
|
499c0bd875 | ||
|
|
4cb30b598f | ||
|
|
aba0d83c11 | ||
|
|
c319414e54 | ||
|
|
ef1f6d98a0 | ||
|
|
b850b812de | ||
|
|
a87b27c075 | ||
|
|
9b73532b11 | ||
|
|
d8debb7a36 | ||
|
|
bd4a947b29 | ||
|
|
f121f8b31b | ||
|
|
80efb865e9 | ||
|
|
52d27befe8 | ||
|
|
6ed06afd28 | ||
|
|
b9da31610a | ||
|
|
ac7b1770e2 | ||
|
|
e4c2859c0f | ||
|
|
6dcd743111 | ||
|
|
73160dc8b6 | ||
|
|
15aa5a6d57 | ||
|
|
33512a4249 | ||
|
|
d8ebb03591 | ||
|
|
2e211c5c22 | ||
|
|
9fd8aaaf29 | ||
|
|
d110bd4e60 | ||
|
|
eb9c7ae869 | ||
|
|
7defc0d136 | ||
|
|
45f459bafd | ||
|
|
99e946a005 | ||
|
|
78a7ac0722 | ||
|
|
fa2f3f9f2f | ||
|
|
3898d737d8 | ||
|
|
c487149b7d | ||
|
|
bebed67bf1 | ||
|
|
3ddcad64f5 | ||
|
|
05c35b6975 | ||
|
|
7fc39ad624 | ||
|
|
2520ebb145 | ||
|
|
89c8b49027 | ||
|
|
e05953a991 | ||
|
|
d0ac38f9d3 | ||
|
|
ff53db3d99 | ||
|
|
899a52390b | ||
|
|
82a3e69b8a | ||
|
|
7027344dfc | ||
|
|
fb9f0c448f | ||
|
|
75131c6f4a | ||
|
|
4b9ddc4a06 | ||
|
|
99dc208b00 | ||
|
|
540023262e | ||
|
|
2ea79572ae | ||
|
|
aa0db338d9 | ||
|
|
a99a45868c | ||
|
|
fabf19fdc9 | ||
|
|
59f712a566 | ||
|
|
1d080465de | ||
|
|
3481e16cdf | ||
|
|
d7e9280e1e | ||
|
|
f237d36d2f | ||
|
|
12f22b1fdd | ||
|
|
47d05ee9ea | ||
|
|
9caec36b59 | ||
|
|
cb364780b3 | ||
|
|
71b8bf211f | ||
|
|
109b9cc4f9 | ||
|
|
5d02627794 | ||
|
|
65444bb00e | ||
|
|
8822a79b4d | ||
|
|
2df4d23113 | ||
|
|
603b62607a | ||
|
|
2b71fc4510 | ||
|
|
1b78d872ec | ||
|
|
feba5031dc | ||
|
|
0c2b88f224 | ||
|
|
cf1a57df5a | ||
|
|
597c5f9124 | ||
|
|
69e1c567d4 | ||
|
|
37b9bac403 | ||
|
|
83db48d316 | ||
|
|
c4e651921b | ||
|
|
b595346213 | ||
|
|
253474b312 | ||
|
|
a176679b24 | ||
|
|
1f51048fa4 | ||
|
|
2abfab0f9b | ||
|
|
64f1f3468d | ||
|
|
ffaa35eafe | ||
|
|
c906b0183b | ||
|
|
bc5b9b81dd | ||
|
|
221ea662e0 | ||
|
|
d28c5afd14 | ||
|
|
f1de08c2a0 | ||
|
|
33e9a6a54e | ||
|
|
f577e03021 | ||
|
|
f53733525c | ||
|
|
2daa914334 | ||
|
|
6d9205e346 | ||
|
|
df7345e118 | ||
|
|
dc6aafecc2 | ||
|
|
5107a50ae7 | ||
|
|
a631d600ac | ||
|
|
f34b9a77f0 | ||
|
|
7997da47f5 | ||
|
|
d380b37a09 | ||
|
|
b14fee9320 | ||
|
|
037e817450 | ||
|
|
7fcfedd430 | ||
|
|
50ff5c7544 | ||
|
|
90e5bc2bd9 | ||
|
|
aae9619d3d | ||
|
|
7fa76a2b2b | ||
|
|
14dd6d980e | ||
|
|
846f57fd15 | ||
|
|
8e6aa78796 | ||
|
|
e91a0fe94a | ||
|
|
d2c627efb3 | ||
|
|
10e977d5f5 | ||
|
|
f0318ff889 | ||
|
|
5cc3fed9a8 | ||
|
|
39dd732e27 | ||
|
|
52630a1d55 | ||
|
|
7b5fd63ce8 | ||
|
|
5499821c67 | ||
|
|
7ee7c68f36 | ||
|
|
2393d19f91 | ||
|
|
a8e2ba508e | ||
|
|
0b4d3ce39b | ||
|
|
0a345dc627 | ||
|
|
ff2aa3ea00 | ||
|
|
0d3bad877d | ||
|
|
756060d676 | ||
|
|
b647f3fba8 | ||
|
|
82e69ebf23 | ||
|
|
2c79045342 | ||
|
|
3497f5257b | ||
|
|
25aabec814 | ||
|
|
0e71485ea9 | ||
|
|
43a9d282f7 | ||
|
|
6f357b8b45 | ||
|
|
73d9f11a9c | ||
|
|
d6c6db5a44 | ||
|
|
56d985a972 | ||
|
|
b3e0655cc9 | ||
|
|
06baffec9e | ||
|
|
67a2ae800a | ||
|
|
7a2c75e2fc | ||
|
|
9ee44637ca | ||
|
|
733341ab39 | ||
|
|
341a25eec1 | ||
|
|
38e178e914 | ||
|
|
daccb3f4f3 | ||
|
|
c858afe03a | ||
|
|
3c1c3199d0 | ||
|
|
fbfe2cb2f5 | ||
|
|
1c311e4fdb | ||
|
|
12177a42e3 | ||
|
|
dfb08856eb | ||
|
|
94d817e1a5 | ||
|
|
9296bd4e3f | ||
|
|
da824ba316 | ||
|
|
012198a1b0 | ||
|
|
fbab04fbe1 | ||
|
|
9aa43d5f91 | ||
|
|
966563c79b | ||
|
|
27edadec29 | ||
|
|
2e2b1b460f | ||
|
|
a3e67abf4c | ||
|
|
ee0518e8f7 | ||
|
|
d774a3bd48 | ||
|
|
7e6b19048e | ||
|
|
8e383b9587 | ||
|
|
3f49ab126f | ||
|
|
c1bc7f4dee | ||
|
|
a44d579f21 | ||
|
|
a3900d2b0b | ||
|
|
83b1c48a93 | ||
|
|
138e70bd5c | ||
|
|
ee103ffb25 | ||
|
|
18f87b9497 | ||
|
|
adc8d4e1e7 | ||
|
|
90db361199 | ||
|
|
4738135801 | ||
|
|
264cd750e9 | ||
|
|
7a4419a2a5 | ||
|
|
ac1666d6e2 | ||
|
|
459c85ba27 | ||
|
|
aaa56eb0bd | ||
|
|
f3c14a4276 | ||
|
|
3169d408fa | ||
|
|
a2286c8e47 | ||
|
|
fb9f566f56 | ||
|
|
381bd1ff4a | ||
|
|
2f54d05d97 | ||
|
|
e18b4e42d3 | ||
|
|
9495331a5f | ||
|
|
e1076db7d0 | ||
|
|
1986c9e8e2 | ||
|
|
d7e80dc955 | ||
|
|
87d09f77cd | ||
|
|
bd37ef13b8 | ||
|
|
ec23c974db | ||
|
|
122e5ab428 | ||
|
|
2f2149aca8 | ||
|
|
9d5c31e7da | ||
|
|
25f3ad6238 | ||
|
|
79926329a4 | ||
|
|
9cdc578dd9 | ||
|
|
665c75f7ab | ||
|
|
f37b39d6cc | ||
|
|
e18c45c310 | ||
|
|
d930052de8 | ||
|
|
7ad4df9e9f | ||
|
|
425761e960 | ||
|
|
4b69271809 | ||
|
|
bf23d38a21 | ||
|
|
49f51583fa | ||
|
|
1fe4a5faed | ||
|
|
998bfe0847 | ||
|
|
6f4db8675b | ||
|
|
71f7aa4971 | ||
|
|
9f72f474e6 | ||
|
|
10c993e21a | ||
|
|
2d3914296d | ||
|
|
7571da8778 | ||
|
|
2ceac5f868 | ||
|
|
5ce80827d2 | ||
|
|
e047b9685a | ||
|
|
fc16d8d04d | ||
|
|
175e5d7b88 | ||
|
|
c03f257ed7 | ||
|
|
6bbb4a28c2 | ||
|
|
2ce3e3ae60 | ||
|
|
2a64cccb61 | ||
|
|
928ffd6650 | ||
|
|
e52be0951a | ||
|
|
889073578e | ||
|
|
eac965ecaf | ||
|
|
8659f2f4ea | ||
|
|
c1b292a0dc | ||
|
|
3af6ccb720 | ||
|
|
f0fc6a95fe | ||
|
|
f96a3c71ff | ||
|
|
b9b7deff17 | ||
|
|
40d9324f5a | ||
|
|
a9f8bd59b2 | ||
|
|
143e172431 | ||
|
|
b2d3a05ee4 | ||
|
|
ef1ca0dd38 | ||
|
|
c7b13bb8fc | ||
|
|
dbbe3526ef | ||
|
|
f22c8ab811 | ||
|
|
2a8f95c437 | ||
|
|
ea2d51c2bb | ||
|
|
ed238e0c76 | ||
|
|
3ace12943e | ||
|
|
978909fcf4 | ||
|
|
f8735e1ee8 | ||
|
|
d70ceb6a56 | ||
|
|
fc7d9e95b8 | ||
|
|
b578fca9cb | ||
|
|
8d3146c2b2 | ||
|
|
fa5c841154 | ||
|
|
f8fcbc19d9 | ||
|
|
97fdd48208 | ||
|
|
731ed2e40b | ||
|
|
3a742c17f8 | ||
|
|
053243635c | ||
|
|
82355712c3 | ||
|
|
4bc73dd87e | ||
|
|
53b84ab054 | ||
|
|
3664f85f45 | ||
|
|
2c1926beeb | ||
|
|
4bcc96ae51 | ||
|
|
c0a2b49bac | ||
|
|
ca22248628 | ||
|
|
d8cf8ac2ef | ||
|
|
1c7b84059e | ||
|
|
f82bb67555 |
@@ -20,7 +20,7 @@
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
"rust-lang.rust-analyzer",
|
||||
"serayuzgur.crates",
|
||||
"fill-labs.dependi",
|
||||
"tamasfe.even-better-toml",
|
||||
"Swellaby.vscode-rust-test-adapter",
|
||||
"charliermarsh.ruff"
|
||||
|
||||
27
.github/renovate.json5
vendored
27
.github/renovate.json5
vendored
@@ -8,15 +8,32 @@
|
||||
semanticCommits: "disabled",
|
||||
separateMajorMinor: false,
|
||||
prHourlyLimit: 10,
|
||||
enabledManagers: ["github-actions", "pre-commit", "cargo", "pep621", "npm"],
|
||||
enabledManagers: ["github-actions", "pre-commit", "cargo", "pep621", "pip_requirements", "npm"],
|
||||
cargo: {
|
||||
// See https://docs.renovatebot.com/configuration-options/#rangestrategy
|
||||
rangeStrategy: "update-lockfile",
|
||||
},
|
||||
pep621: {
|
||||
// The default for this package manager is to only search for `pyproject.toml` files
|
||||
// found at the repository root: https://docs.renovatebot.com/modules/manager/pep621/#file-matching
|
||||
fileMatch: ["^(python|scripts)/.*pyproject\\.toml$"],
|
||||
},
|
||||
pip_requirements: {
|
||||
// The default for this package manager is to run on all requirements.txt files:
|
||||
// https://docs.renovatebot.com/modules/manager/pip_requirements/#file-matching
|
||||
// `fileMatch` doesn't work for excluding files; to exclude `requirements.txt` files
|
||||
// outside the `doc/` directory, we instead have to use `ignorePaths`. Unlike `fileMatch`,
|
||||
// which takes a regex string, `ignorePaths` takes a glob string, so we have to use
|
||||
// a "negative glob pattern".
|
||||
// See:
|
||||
// - https://docs.renovatebot.com/modules/manager/#ignoring-files-that-match-the-default-filematch
|
||||
// - https://docs.renovatebot.com/configuration-options/#ignorepaths
|
||||
// - https://docs.renovatebot.com/string-pattern-matching/#negative-matching
|
||||
ignorePaths: ["!docs/requirements*.txt"]
|
||||
},
|
||||
npm: {
|
||||
// The default for this package manager is to only search for `package.json` files
|
||||
// found at the repository root: https://docs.renovatebot.com/modules/manager/npm/#file-matching
|
||||
fileMatch: ["^playground/.*package\\.json$"],
|
||||
},
|
||||
"pre-commit": {
|
||||
@@ -48,6 +65,14 @@
|
||||
matchManagers: ["cargo"],
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
// `mkdocs-material` requires a manual update to keep the version in sync
|
||||
// with `mkdocs-material-insider`.
|
||||
// See: https://squidfunk.github.io/mkdocs-material/insiders/upgrade/
|
||||
matchManagers: ["pip_requirements"],
|
||||
matchPackagePatterns: ["mkdocs-material"],
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
groupName: "pre-commit dependencies",
|
||||
matchManagers: ["pre-commit"],
|
||||
|
||||
19
.github/workflows/ci.yaml
vendored
19
.github/workflows/ci.yaml
vendored
@@ -37,7 +37,7 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: tj-actions/changed-files@v44
|
||||
- uses: tj-actions/changed-files@v45
|
||||
id: changed
|
||||
with:
|
||||
files_yaml: |
|
||||
@@ -111,7 +111,7 @@ jobs:
|
||||
- name: "Clippy"
|
||||
run: cargo clippy --workspace --all-targets --all-features --locked -- -D warnings
|
||||
- name: "Clippy (wasm)"
|
||||
run: cargo clippy -p ruff_wasm --target wasm32-unknown-unknown --all-features --locked -- -D warnings
|
||||
run: cargo clippy -p ruff_wasm -p red_knot_wasm --target wasm32-unknown-unknown --all-features --locked -- -D warnings
|
||||
|
||||
cargo-test-linux:
|
||||
name: "cargo test (linux)"
|
||||
@@ -142,6 +142,13 @@ jobs:
|
||||
|
||||
# Check for broken links in the documentation.
|
||||
- run: cargo doc --all --no-deps
|
||||
env:
|
||||
RUSTDOCFLAGS: "-D warnings"
|
||||
# Use --document-private-items so that all our doc comments are kept in
|
||||
# sync, not just public items. Eventually we should do this for all
|
||||
# crates; for now add crates here as they are warning-clean to prevent
|
||||
# regression.
|
||||
- run: cargo doc --no-deps -p red_knot_python_semantic -p red_knot -p ruff_db --document-private-items
|
||||
env:
|
||||
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
|
||||
RUSTDOCFLAGS: "-D warnings"
|
||||
@@ -191,10 +198,14 @@ jobs:
|
||||
cache-dependency-path: playground/package-lock.json
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run wasm-pack"
|
||||
- name: "Test ruff_wasm"
|
||||
run: |
|
||||
cd crates/ruff_wasm
|
||||
wasm-pack test --node
|
||||
- name: "Test red_knot_wasm"
|
||||
run: |
|
||||
cd crates/red_knot_wasm
|
||||
wasm-pack test --node
|
||||
|
||||
cargo-build-release:
|
||||
name: "cargo build (release)"
|
||||
@@ -619,7 +630,7 @@ jobs:
|
||||
run: cargo codspeed build --features codspeed -p ruff_benchmark
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@v2
|
||||
uses: CodSpeedHQ/action@v3
|
||||
with:
|
||||
run: cargo codspeed run
|
||||
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||
|
||||
2
.github/workflows/pr-comment.yaml
vendored
2
.github/workflows/pr-comment.yaml
vendored
@@ -23,6 +23,7 @@ jobs:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Parse pull request number
|
||||
id: pr-number
|
||||
@@ -43,6 +44,7 @@ jobs:
|
||||
path: pr/ecosystem
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
|
||||
13
.github/workflows/publish-docs.yml
vendored
13
.github/workflows/publish-docs.yml
vendored
@@ -34,10 +34,10 @@ jobs:
|
||||
- name: "Set docs version"
|
||||
run: |
|
||||
version="${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || inputs.ref }}"
|
||||
# if version is missing, exit with error
|
||||
if [[ -z "$version" ]]; then
|
||||
echo "Can't build docs without a version."
|
||||
exit 1
|
||||
# if version is missing, use 'latest'
|
||||
if [ -z "$version" ]; then
|
||||
echo "Using 'latest' as version"
|
||||
version="latest"
|
||||
fi
|
||||
|
||||
# Use version as display name for now
|
||||
@@ -104,8 +104,8 @@ jobs:
|
||||
run: |
|
||||
branch_name="${{ env.branch_name }}"
|
||||
|
||||
git config user.name "$GITHUB_ACTOR"
|
||||
git config user.email "$GITHUB_ACTOR@users.noreply.github.com"
|
||||
git config user.name "astral-docs-bot"
|
||||
git config user.email "176161322+astral-docs-bot@users.noreply.github.com"
|
||||
|
||||
git checkout -b $branch_name
|
||||
git add site/ruff
|
||||
@@ -145,6 +145,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
|
||||
run: |
|
||||
branch_name="${{ env.branch_name }}"
|
||||
|
||||
# auto-merge the PR if the build was triggered by a release. Manual builds should be reviewed by a human.
|
||||
# give the PR a few seconds to be created before trying to auto-merge it
|
||||
sleep 10
|
||||
|
||||
14
.github/workflows/sync_typeshed.yaml
vendored
14
.github/workflows/sync_typeshed.yaml
vendored
@@ -37,13 +37,13 @@ jobs:
|
||||
- name: Sync typeshed
|
||||
id: sync
|
||||
run: |
|
||||
rm -rf ruff/crates/red_knot_module_resolver/vendor/typeshed
|
||||
mkdir ruff/crates/red_knot_module_resolver/vendor/typeshed
|
||||
cp typeshed/README.md ruff/crates/red_knot_module_resolver/vendor/typeshed
|
||||
cp typeshed/LICENSE ruff/crates/red_knot_module_resolver/vendor/typeshed
|
||||
cp -r typeshed/stdlib ruff/crates/red_knot_module_resolver/vendor/typeshed/stdlib
|
||||
rm -rf ruff/crates/red_knot_module_resolver/vendor/typeshed/stdlib/@tests
|
||||
git -C typeshed rev-parse HEAD > ruff/crates/red_knot_module_resolver/vendor/typeshed/source_commit.txt
|
||||
rm -rf ruff/crates/red_knot_python_semantic/vendor/typeshed
|
||||
mkdir ruff/crates/red_knot_python_semantic/vendor/typeshed
|
||||
cp typeshed/README.md ruff/crates/red_knot_python_semantic/vendor/typeshed
|
||||
cp typeshed/LICENSE ruff/crates/red_knot_python_semantic/vendor/typeshed
|
||||
cp -r typeshed/stdlib ruff/crates/red_knot_python_semantic/vendor/typeshed/stdlib
|
||||
rm -rf ruff/crates/red_knot_python_semantic/vendor/typeshed/stdlib/@tests
|
||||
git -C typeshed rev-parse HEAD > ruff/crates/red_knot_python_semantic/vendor/typeshed/source_commit.txt
|
||||
- name: Commit the changes
|
||||
id: commit
|
||||
if: ${{ steps.sync.outcome == 'success' }}
|
||||
|
||||
8
.gitignore
vendored
8
.gitignore
vendored
@@ -21,6 +21,14 @@ flamegraph.svg
|
||||
# `CARGO_TARGET_DIR=target-llvm-lines RUSTFLAGS="-Csymbol-mangling-version=v0" cargo llvm-lines -p ruff --lib`
|
||||
/target*
|
||||
|
||||
# samply profiles
|
||||
profile.json
|
||||
|
||||
# tracing-flame traces
|
||||
tracing.folded
|
||||
tracing-flamechart.svg
|
||||
tracing-flamegraph.svg
|
||||
|
||||
###
|
||||
# Rust.gitignore
|
||||
###
|
||||
|
||||
@@ -14,6 +14,9 @@ MD041: false
|
||||
# MD013/line-length
|
||||
MD013: false
|
||||
|
||||
# MD014/commands-show-output
|
||||
MD014: false
|
||||
|
||||
# MD024/no-duplicate-heading
|
||||
MD024:
|
||||
# Allow when nested under different parents e.g. CHANGELOG.md
|
||||
|
||||
@@ -2,9 +2,12 @@ fail_fast: true
|
||||
|
||||
exclude: |
|
||||
(?x)^(
|
||||
crates/red_knot_module_resolver/vendor/.*|
|
||||
crates/red_knot_python_semantic/vendor/.*|
|
||||
crates/red_knot_workspace/resources/.*|
|
||||
crates/ruff_linter/resources/.*|
|
||||
crates/ruff_linter/src/rules/.*/snapshots/.*|
|
||||
crates/ruff_notebook/resources/.*|
|
||||
crates/ruff_server/resources/.*|
|
||||
crates/ruff/resources/.*|
|
||||
crates/ruff_python_formatter/resources/.*|
|
||||
crates/ruff_python_formatter/tests/snapshots/.*|
|
||||
@@ -14,7 +17,7 @@ exclude: |
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/abravalheri/validate-pyproject
|
||||
rev: v0.18
|
||||
rev: v0.19
|
||||
hooks:
|
||||
- id: validate-pyproject
|
||||
|
||||
@@ -42,7 +45,7 @@ repos:
|
||||
)$
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.23.2
|
||||
rev: v1.24.5
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
@@ -56,18 +59,13 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.5.2
|
||||
rev: v0.6.5
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
args: [--fix, --exit-non-zero-on-fix]
|
||||
types_or: [python, pyi]
|
||||
require_serial: true
|
||||
exclude: |
|
||||
(?x)^(
|
||||
crates/ruff_linter/resources/.*|
|
||||
crates/ruff_python_formatter/resources/.*
|
||||
)$
|
||||
|
||||
# Prettier
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
|
||||
@@ -1,5 +1,43 @@
|
||||
# Breaking Changes
|
||||
|
||||
## 0.6.0
|
||||
|
||||
- Detect imports in `src` layouts by default for `isort` rules ([#12848](https://github.com/astral-sh/ruff/pull/12848))
|
||||
|
||||
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments ([#12838](https://github.com/astral-sh/ruff/pull/12838)).
|
||||
|
||||
- Lint and format Jupyter Notebook by default ([#12878](https://github.com/astral-sh/ruff/pull/12878)).
|
||||
|
||||
You can disable specific rules for notebooks using [`per-file-ignores`](https://docs.astral.sh/ruff/settings/#lint_per-file-ignores):
|
||||
|
||||
```toml
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"*.ipynb" = ["E501"] # disable line-too-long in notebooks
|
||||
```
|
||||
|
||||
If you'd prefer to either only lint or only format Jupyter Notebook files, you can use the
|
||||
section-specific `exclude` option to do so. For example, the following would only lint Jupyter
|
||||
Notebook files and not format them:
|
||||
|
||||
```toml
|
||||
[tool.ruff.format]
|
||||
exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
And, conversely, the following would only format Jupyter Notebook files and not lint them:
|
||||
|
||||
```toml
|
||||
[tool.ruff.lint]
|
||||
exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
You can completely disable Jupyter Notebook support by updating the [`extend-exclude`](https://docs.astral.sh/ruff/settings/#extend-exclude) setting:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
extend-exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
## 0.5.0
|
||||
|
||||
- Follow the XDG specification to discover user-level configurations on macOS (same as on other Unix platforms)
|
||||
|
||||
396
CHANGELOG.md
396
CHANGELOG.md
@@ -1,5 +1,401 @@
|
||||
# Changelog
|
||||
|
||||
## 0.6.6
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`refurb`\] Skip `slice-to-remove-prefix-or-suffix` (`FURB188`) when non-trivial slice steps are present ([#13405](https://github.com/astral-sh/ruff/pull/13405))
|
||||
- Add a subcommand to generate dependency graphs ([#13402](https://github.com/astral-sh/ruff/pull/13402))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Fix placement of inline parameter comments ([#13379](https://github.com/astral-sh/ruff/pull/13379))
|
||||
|
||||
### Server
|
||||
|
||||
- Fix off-by one error in the `LineIndex::offset` calculation ([#13407](https://github.com/astral-sh/ruff/pull/13407))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`fastapi`\] Respect FastAPI aliases in route definitions ([#13394](https://github.com/astral-sh/ruff/pull/13394))
|
||||
- \[`pydocstyle`\] Respect word boundaries when detecting function signature in docs ([#13388](https://github.com/astral-sh/ruff/pull/13388))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add backlinks to rule overview linter ([#13368](https://github.com/astral-sh/ruff/pull/13368))
|
||||
- Fix documentation for editor vim plugin ALE ([#13348](https://github.com/astral-sh/ruff/pull/13348))
|
||||
- Fix rendering of `FURB188` docs ([#13406](https://github.com/astral-sh/ruff/pull/13406))
|
||||
|
||||
## 0.6.5
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`pydoclint`\] Ignore `DOC201` when function name is "**new**" ([#13300](https://github.com/astral-sh/ruff/pull/13300))
|
||||
- \[`refurb`\] Implement `slice-to-remove-prefix-or-suffix` (`FURB188`) ([#13256](https://github.com/astral-sh/ruff/pull/13256))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`eradicate`\] Ignore script-comments with multiple end-tags (`ERA001`) ([#13283](https://github.com/astral-sh/ruff/pull/13283))
|
||||
- \[`pyflakes`\] Improve error message for `UndefinedName` when a builtin was added in a newer version than specified in Ruff config (`F821`) ([#13293](https://github.com/astral-sh/ruff/pull/13293))
|
||||
|
||||
### Server
|
||||
|
||||
- Add support for extensionless Python files for server ([#13326](https://github.com/astral-sh/ruff/pull/13326))
|
||||
- Fix configuration inheritance for configurations specified in the LSP settings ([#13285](https://github.com/astral-sh/ruff/pull/13285))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`ruff`\] Handle unary operators in `decimal-from-float-literal` (`RUF032`) ([#13275](https://github.com/astral-sh/ruff/pull/13275))
|
||||
|
||||
### CLI
|
||||
|
||||
- Only include rules with diagnostics in SARIF metadata ([#13268](https://github.com/astral-sh/ruff/pull/13268))
|
||||
|
||||
### Playground
|
||||
|
||||
- Add "Copy as pyproject.toml/ruff.toml" and "Paste from TOML" ([#13328](https://github.com/astral-sh/ruff/pull/13328))
|
||||
- Fix errors not shown for restored snippet on page load ([#13262](https://github.com/astral-sh/ruff/pull/13262))
|
||||
|
||||
## 0.6.4
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-builtins`\] Use dynamic builtins list based on Python version ([#13172](https://github.com/astral-sh/ruff/pull/13172))
|
||||
- \[`pydoclint`\] Permit yielding `None` in `DOC402` and `DOC403` ([#13148](https://github.com/astral-sh/ruff/pull/13148))
|
||||
- \[`pylint`\] Update diagnostic message for `PLW3201` ([#13194](https://github.com/astral-sh/ruff/pull/13194))
|
||||
- \[`ruff`\] Implement `post-init-default` (`RUF033`) ([#13192](https://github.com/astral-sh/ruff/pull/13192))
|
||||
- \[`ruff`\] Implement useless if-else (`RUF034`) ([#13218](https://github.com/astral-sh/ruff/pull/13218))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-pyi`\] Respect `pep8_naming.classmethod-decorators` settings when determining if a method is a classmethod in `custom-type-var-return-type` (`PYI019`) ([#13162](https://github.com/astral-sh/ruff/pull/13162))
|
||||
- \[`flake8-pyi`\] Teach various rules that annotations might be stringized ([#12951](https://github.com/astral-sh/ruff/pull/12951))
|
||||
- \[`pylint`\] Avoid `no-self-use` for `attrs`-style validators ([#13166](https://github.com/astral-sh/ruff/pull/13166))
|
||||
- \[`pylint`\] Recurse into subscript subexpressions when searching for list/dict lookups (`PLR1733`, `PLR1736`) ([#13186](https://github.com/astral-sh/ruff/pull/13186))
|
||||
- \[`pyupgrade`\] Detect `aiofiles.open` calls in `UP015` ([#13173](https://github.com/astral-sh/ruff/pull/13173))
|
||||
- \[`pyupgrade`\] Mark `sys.version_info[0] < 3` and similar comparisons as outdated (`UP036`) ([#13175](https://github.com/astral-sh/ruff/pull/13175))
|
||||
|
||||
### CLI
|
||||
|
||||
- Enrich messages of SARIF results ([#13180](https://github.com/astral-sh/ruff/pull/13180))
|
||||
- Handle singular case for incompatible rules warning in `ruff format` output ([#13212](https://github.com/astral-sh/ruff/pull/13212))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`pydocstyle`\] Improve heuristics for detecting Google-style docstrings ([#13142](https://github.com/astral-sh/ruff/pull/13142))
|
||||
- \[`refurb`\] Treat `sep` arguments with effects as unsafe removals (`FURB105`) ([#13165](https://github.com/astral-sh/ruff/pull/13165))
|
||||
|
||||
## 0.6.3
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-simplify`\] Extend `open-file-with-context-handler` to work with `dbm.sqlite3` (`SIM115`) ([#13104](https://github.com/astral-sh/ruff/pull/13104))
|
||||
- \[`pycodestyle`\] Disable `E741` in stub files (`.pyi`) ([#13119](https://github.com/astral-sh/ruff/pull/13119))
|
||||
- \[`pydoclint`\] Avoid `DOC201` on explicit returns in functions that only return `None` ([#13064](https://github.com/astral-sh/ruff/pull/13064))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-async`\] Disable check for `asyncio` before Python 3.11 (`ASYNC109`) ([#13023](https://github.com/astral-sh/ruff/pull/13023))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`FastAPI`\] Avoid introducing invalid syntax in fix for `fast-api-non-annotated-dependency` (`FAST002`) ([#13133](https://github.com/astral-sh/ruff/pull/13133))
|
||||
- \[`flake8-implicit-str-concat`\] Normalize octals before merging concatenated strings in `single-line-implicit-string-concatenation` (`ISC001`) ([#13118](https://github.com/astral-sh/ruff/pull/13118))
|
||||
- \[`flake8-pytest-style`\] Improve help message for `pytest-incorrect-mark-parentheses-style` (`PT023`) ([#13092](https://github.com/astral-sh/ruff/pull/13092))
|
||||
- \[`pylint`\] Avoid autofix for calls that aren't `min` or `max` as starred expression (`PLW3301`) ([#13089](https://github.com/astral-sh/ruff/pull/13089))
|
||||
- \[`ruff`\] Add `datetime.time`, `datetime.tzinfo`, and `datetime.timezone` as immutable function calls (`RUF009`) ([#13109](https://github.com/astral-sh/ruff/pull/13109))
|
||||
- \[`ruff`\] Extend comment deletion for `RUF100` to include trailing text from `noqa` directives while preserving any following comments on the same line, if any ([#13105](https://github.com/astral-sh/ruff/pull/13105))
|
||||
- Fix dark theme on initial page load for the Ruff playground ([#13077](https://github.com/astral-sh/ruff/pull/13077))
|
||||
|
||||
## 0.6.2
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-simplify`\] Extend `open-file-with-context-handler` to work with other standard-library IO modules (`SIM115`) ([#12959](https://github.com/astral-sh/ruff/pull/12959))
|
||||
- \[`ruff`\] Avoid `unused-async` for functions with FastAPI route decorator (`RUF029`) ([#12938](https://github.com/astral-sh/ruff/pull/12938))
|
||||
- \[`ruff`\] Ignore `fstring-missing-syntax` (`RUF027`) for `fastAPI` paths ([#12939](https://github.com/astral-sh/ruff/pull/12939))
|
||||
- \[`ruff`\] Implement check for Decimal called with a float literal (RUF032) ([#12909](https://github.com/astral-sh/ruff/pull/12909))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-bugbear`\] Update diagnostic message when expression is at the end of function (`B015`) ([#12944](https://github.com/astral-sh/ruff/pull/12944))
|
||||
- \[`flake8-pyi`\] Skip type annotations in `string-or-bytes-too-long` (`PYI053`) ([#13002](https://github.com/astral-sh/ruff/pull/13002))
|
||||
- \[`flake8-type-checking`\] Always recognise relative imports as first-party ([#12994](https://github.com/astral-sh/ruff/pull/12994))
|
||||
- \[`flake8-unused-arguments`\] Ignore unused arguments on stub functions (`ARG001`) ([#12966](https://github.com/astral-sh/ruff/pull/12966))
|
||||
- \[`pylint`\] Ignore augmented assignment for `self-cls-assignment` (`PLW0642`) ([#12957](https://github.com/astral-sh/ruff/pull/12957))
|
||||
|
||||
### Server
|
||||
|
||||
- Show full context in error log messages ([#13029](https://github.com/astral-sh/ruff/pull/13029))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`pep8-naming`\] Don't flag `from` imports following conventional import names (`N817`) ([#12946](https://github.com/astral-sh/ruff/pull/12946))
|
||||
- \[`pylint`\] - Allow `__new__` methods to have `cls` as their first argument even if decorated with `@staticmethod` for `bad-staticmethod-argument` (`PLW0211`) ([#12958](https://github.com/astral-sh/ruff/pull/12958))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add `hyperfine` installation instructions; update `hyperfine` code samples ([#13034](https://github.com/astral-sh/ruff/pull/13034))
|
||||
- Expand note to use Ruff with other language server in Kate ([#12806](https://github.com/astral-sh/ruff/pull/12806))
|
||||
- Update example for `PT001` as per the new default behavior ([#13019](https://github.com/astral-sh/ruff/pull/13019))
|
||||
- \[`perflint`\] Improve docs for `try-except-in-loop` (`PERF203`) ([#12947](https://github.com/astral-sh/ruff/pull/12947))
|
||||
- \[`pydocstyle`\] Add reference to `lint.pydocstyle.ignore-decorators` setting to rule docs ([#12996](https://github.com/astral-sh/ruff/pull/12996))
|
||||
|
||||
## 0.6.1
|
||||
|
||||
This is a hotfix release to address an issue with `ruff-pre-commit`. In v0.6,
|
||||
Ruff changed its behavior to lint and format Jupyter notebooks by default;
|
||||
however, due to an oversight, these files were still excluded by default if
|
||||
Ruff was run via pre-commit, leading to inconsistent behavior.
|
||||
This has [now been fixed](https://github.com/astral-sh/ruff-pre-commit/pull/96).
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`fastapi`\] Implement `fast-api-unused-path-parameter` (`FAST003`) ([#12638](https://github.com/astral-sh/ruff/pull/12638))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pylint`\] Rename `too-many-positional` to `too-many-positional-arguments` (`R0917`) ([#12905](https://github.com/astral-sh/ruff/pull/12905))
|
||||
|
||||
### Server
|
||||
|
||||
- Fix crash when applying "fix-all" code-action to notebook cells ([#12929](https://github.com/astral-sh/ruff/pull/12929))
|
||||
|
||||
### Other changes
|
||||
|
||||
- \[`flake8-naming`\]: Respect import conventions (`N817`) ([#12922](https://github.com/astral-sh/ruff/pull/12922))
|
||||
|
||||
## 0.6.0
|
||||
|
||||
Check out the [blog post](https://astral.sh/blog/ruff-v0.6.0) for a migration guide and overview of the changes!
|
||||
|
||||
### Breaking changes
|
||||
|
||||
See also, the "Remapped rules" section which may result in disabled rules.
|
||||
|
||||
- Lint and format Jupyter Notebook by default ([#12878](https://github.com/astral-sh/ruff/pull/12878)).
|
||||
- Detect imports in `src` layouts by default for `isort` rules ([#12848](https://github.com/astral-sh/ruff/pull/12848))
|
||||
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments ([#12838](https://github.com/astral-sh/ruff/pull/12838)).
|
||||
|
||||
### Deprecations
|
||||
|
||||
The following rules are now deprecated:
|
||||
|
||||
- [`pytest-missing-fixture-name-underscore`](https://docs.astral.sh/ruff/rules/pytest-missing-fixture-name-underscore/) (`PT004`)
|
||||
- [`pytest-incorrect-fixture-name-underscore`](https://docs.astral.sh/ruff/rules/pytest-incorrect-fixture-name-underscore/) (`PT005`)
|
||||
- [`unpacked-list-comprehension`](https://docs.astral.sh/ruff/rules/unpacked-list-comprehension/) (`UP027`)
|
||||
|
||||
### Remapped rules
|
||||
|
||||
The following rules have been remapped to new rule codes:
|
||||
|
||||
- [`unnecessary-dict-comprehension-for-iterable`](https://docs.astral.sh/ruff/rules/unnecessary-dict-comprehension-for-iterable/): `RUF025` to `C420`
|
||||
|
||||
### Stabilization
|
||||
|
||||
The following rules have been stabilized and are no longer in preview:
|
||||
|
||||
- [`singledispatch-method`](https://docs.astral.sh/ruff/rules/singledispatch-method/) (`PLE1519`)
|
||||
- [`singledispatchmethod-function`](https://docs.astral.sh/ruff/rules/singledispatchmethod-function/) (`PLE1520`)
|
||||
- [`bad-staticmethod-argument`](https://docs.astral.sh/ruff/rules/bad-staticmethod-argument/) (`PLW0211`)
|
||||
- [`if-stmt-min-max`](https://docs.astral.sh/ruff/rules/if-stmt-min-max/) (`PLR1730`)
|
||||
- [`invalid-bytes-return-type`](https://docs.astral.sh/ruff/rules/invalid-bytes-return-type/) (`PLE0308`)
|
||||
- [`invalid-hash-return-type`](https://docs.astral.sh/ruff/rules/invalid-hash-return-type/) (`PLE0309`)
|
||||
- [`invalid-index-return-type`](https://docs.astral.sh/ruff/rules/invalid-index-return-type/) (`PLE0305`)
|
||||
- [`invalid-length-return-type`](https://docs.astral.sh/ruff/rules/invalid-length-return-type/) (`PLEE303`)
|
||||
- [`self-or-cls-assignment`](https://docs.astral.sh/ruff/rules/self-or-cls-assignment/) (`PLW0642`)
|
||||
- [`byte-string-usage`](https://docs.astral.sh/ruff/rules/byte-string-usage/) (`PYI057`)
|
||||
- [`duplicate-literal-member`](https://docs.astral.sh/ruff/rules/duplicate-literal-member/) (`PYI062`)
|
||||
- [`redirected-noqa`](https://docs.astral.sh/ruff/rules/redirected-noqa/) (`RUF101`)
|
||||
|
||||
The following behaviors have been stabilized:
|
||||
|
||||
- [`cancel-scope-no-checkpoint`](https://docs.astral.sh/ruff/rules/cancel-scope-no-checkpoint/) (`ASYNC100`): Support `asyncio` and `anyio` context mangers.
|
||||
- [`async-function-with-timeout`](https://docs.astral.sh/ruff/rules/async-function-with-timeout/) (`ASYNC109`): Support `asyncio` and `anyio` context mangers.
|
||||
- [`async-busy-wait`](https://docs.astral.sh/ruff/rules/async-busy-wait/) (`ASYNC110`): Support `asyncio` and `anyio` context mangers.
|
||||
- [`async-zero-sleep`](https://docs.astral.sh/ruff/rules/async-zero-sleep/) (`ASYNC115`): Support `anyio` context mangers.
|
||||
- [`long-sleep-not-forever`](https://docs.astral.sh/ruff/rules/long-sleep-not-forever/) (`ASYNC116`): Support `anyio` context mangers.
|
||||
|
||||
The following fixes have been stabilized:
|
||||
|
||||
- [`superfluous-else-return`](https://docs.astral.sh/ruff/rules/superfluous-else-return/) (`RET505`)
|
||||
- [`superfluous-else-raise`](https://docs.astral.sh/ruff/rules/superfluous-else-raise/) (`RET506`)
|
||||
- [`superfluous-else-continue`](https://docs.astral.sh/ruff/rules/superfluous-else-continue/) (`RET507`)
|
||||
- [`superfluous-else-break`](https://docs.astral.sh/ruff/rules/superfluous-else-break/) (`RET508`)
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-simplify`\] Further simplify to binary in preview for (`SIM108`) ([#12796](https://github.com/astral-sh/ruff/pull/12796))
|
||||
- \[`pyupgrade`\] Show violations without auto-fix (`UP031`) ([#11229](https://github.com/astral-sh/ruff/pull/11229))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-import-conventions`\] Add `xml.etree.ElementTree` to default conventions ([#12455](https://github.com/astral-sh/ruff/pull/12455))
|
||||
- \[`flake8-pytest-style`\] Add a space after comma in CSV output (`PT006`) ([#12853](https://github.com/astral-sh/ruff/pull/12853))
|
||||
|
||||
### Server
|
||||
|
||||
- Show a message for incorrect settings ([#12781](https://github.com/astral-sh/ruff/pull/12781))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-async`\] Do not lint yield in context manager (`ASYNC100`) ([#12896](https://github.com/astral-sh/ruff/pull/12896))
|
||||
- \[`flake8-comprehensions`\] Do not lint `async for` comprehensions (`C419`) ([#12895](https://github.com/astral-sh/ruff/pull/12895))
|
||||
- \[`flake8-return`\] Only add return `None` at end of a function (`RET503`) ([#11074](https://github.com/astral-sh/ruff/pull/11074))
|
||||
- \[`flake8-type-checking`\] Avoid treating `dataclasses.KW_ONLY` as typing-only (`TCH003`) ([#12863](https://github.com/astral-sh/ruff/pull/12863))
|
||||
- \[`pep8-naming`\] Treat `type(Protocol)` et al as metaclass base (`N805`) ([#12770](https://github.com/astral-sh/ruff/pull/12770))
|
||||
- \[`pydoclint`\] Don't enforce returns and yields in abstract methods (`DOC201`, `DOC202`) ([#12771](https://github.com/astral-sh/ruff/pull/12771))
|
||||
- \[`ruff`\] Skip tuples with slice expressions in (`RUF031`) ([#12768](https://github.com/astral-sh/ruff/pull/12768))
|
||||
- \[`ruff`\] Ignore unparenthesized tuples in subscripts when the subscript is a type annotation or type alias (`RUF031`) ([#12762](https://github.com/astral-sh/ruff/pull/12762))
|
||||
- \[`ruff`\] Ignore template strings passed to logging and `builtins._()` calls (`RUF027`) ([#12889](https://github.com/astral-sh/ruff/pull/12889))
|
||||
- \[`ruff`\] Do not remove parens for tuples with starred expressions in Python \<=3.10 (`RUF031`) ([#12784](https://github.com/astral-sh/ruff/pull/12784))
|
||||
- Evaluate default parameter values for a function in that function's enclosing scope ([#12852](https://github.com/astral-sh/ruff/pull/12852))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Respect VS Code cell metadata when detecting the language of Jupyter Notebook cells ([#12864](https://github.com/astral-sh/ruff/pull/12864))
|
||||
- Respect `kernelspec` notebook metadata when detecting the preferred language for a Jupyter Notebook ([#12875](https://github.com/astral-sh/ruff/pull/12875))
|
||||
|
||||
## 0.5.7
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-comprehensions`\] Account for list and set comprehensions in `unnecessary-literal-within-tuple-call` (`C409`) ([#12657](https://github.com/astral-sh/ruff/pull/12657))
|
||||
- \[`flake8-pyi`\] Add autofix for `future-annotations-in-stub` (`PYI044`) ([#12676](https://github.com/astral-sh/ruff/pull/12676))
|
||||
- \[`flake8-return`\] Avoid syntax error when auto-fixing `RET505` with mixed indentation (space and tabs) ([#12740](https://github.com/astral-sh/ruff/pull/12740))
|
||||
- \[`pydoclint`\] Add `docstring-missing-yields` (`DOC402`) and `docstring-extraneous-yields` (`DOC403`) ([#12538](https://github.com/astral-sh/ruff/pull/12538))
|
||||
- \[`pydoclint`\] Avoid `DOC201` if docstring begins with "Return", "Returns", "Yield", or "Yields" ([#12675](https://github.com/astral-sh/ruff/pull/12675))
|
||||
- \[`pydoclint`\] Deduplicate collected exceptions after traversing function bodies (`DOC501`) ([#12642](https://github.com/astral-sh/ruff/pull/12642))
|
||||
- \[`pydoclint`\] Ignore `DOC` errors for stub functions ([#12651](https://github.com/astral-sh/ruff/pull/12651))
|
||||
- \[`pydoclint`\] Teach rules to understand reraised exceptions as being explicitly raised (`DOC501`, `DOC502`) ([#12639](https://github.com/astral-sh/ruff/pull/12639))
|
||||
- \[`ruff`\] Implement `incorrectly-parenthesized-tuple-in-subscript` (`RUF031`) ([#12480](https://github.com/astral-sh/ruff/pull/12480))
|
||||
- \[`ruff`\] Mark `RUF023` fix as unsafe if `__slots__` is not a set and the binding is used elsewhere ([#12692](https://github.com/astral-sh/ruff/pull/12692))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`refurb`\] Add autofix for `implicit-cwd` (`FURB177`) ([#12708](https://github.com/astral-sh/ruff/pull/12708))
|
||||
- \[`ruff`\] Add autofix for `zip-instead-of-pairwise` (`RUF007`) ([#12663](https://github.com/astral-sh/ruff/pull/12663))
|
||||
- \[`tryceratops`\] Add `BaseException` to `raise-vanilla-class` rule (`TRY002`) ([#12620](https://github.com/astral-sh/ruff/pull/12620))
|
||||
|
||||
### Server
|
||||
|
||||
- Ignore non-file workspace URL; Ruff will display a warning notification in this case ([#12725](https://github.com/astral-sh/ruff/pull/12725))
|
||||
|
||||
### CLI
|
||||
|
||||
- Fix cache invalidation for nested `pyproject.toml` files ([#12727](https://github.com/astral-sh/ruff/pull/12727))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-async`\] Fix false positives with multiple `async with` items (`ASYNC100`) ([#12643](https://github.com/astral-sh/ruff/pull/12643))
|
||||
- \[`flake8-bandit`\] Avoid false-positives for list concatenations in SQL construction (`S608`) ([#12720](https://github.com/astral-sh/ruff/pull/12720))
|
||||
- \[`flake8-bugbear`\] Treat `return` as equivalent to `break` (`B909`) ([#12646](https://github.com/astral-sh/ruff/pull/12646))
|
||||
- \[`flake8-comprehensions`\] Set comprehensions not a violation for `sum` in `unnecessary-comprehension-in-call` (`C419`) ([#12691](https://github.com/astral-sh/ruff/pull/12691))
|
||||
- \[`flake8-simplify`\] Parenthesize conditions based on precedence when merging if arms (`SIM114`) ([#12737](https://github.com/astral-sh/ruff/pull/12737))
|
||||
- \[`pydoclint`\] Try both 'Raises' section styles when convention is unspecified (`DOC501`) ([#12649](https://github.com/astral-sh/ruff/pull/12649))
|
||||
|
||||
## 0.5.6
|
||||
|
||||
Ruff 0.5.6 automatically enables linting and formatting of notebooks in *preview mode*.
|
||||
You can opt-out of this behavior by adding `*.ipynb` to the `extend-exclude` setting.
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
extend-exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
### Preview features
|
||||
|
||||
- Enable notebooks by default in preview mode ([#12621](https://github.com/astral-sh/ruff/pull/12621))
|
||||
- \[`flake8-builtins`\] Implement import, lambda, and module shadowing ([#12546](https://github.com/astral-sh/ruff/pull/12546))
|
||||
- \[`pydoclint`\] Add `docstring-missing-returns` (`DOC201`) and `docstring-extraneous-returns` (`DOC202`) ([#12485](https://github.com/astral-sh/ruff/pull/12485))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-return`\] Exempt cached properties and other property-like decorators from explicit return rule (`RET501`) ([#12563](https://github.com/astral-sh/ruff/pull/12563))
|
||||
|
||||
### Server
|
||||
|
||||
- Make server panic hook more error resilient ([#12610](https://github.com/astral-sh/ruff/pull/12610))
|
||||
- Use `$/logTrace` for server trace logs in Zed and VS Code ([#12564](https://github.com/astral-sh/ruff/pull/12564))
|
||||
- Keep track of deleted cells for reorder change request ([#12575](https://github.com/astral-sh/ruff/pull/12575))
|
||||
|
||||
### Configuration
|
||||
|
||||
- \[`flake8-implicit-str-concat`\] Always allow explicit multi-line concatenations when implicit concatenations are banned ([#12532](https://github.com/astral-sh/ruff/pull/12532))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-async`\] Avoid flagging `asyncio.timeout`s as unused when the context manager includes `asyncio.TaskGroup` ([#12605](https://github.com/astral-sh/ruff/pull/12605))
|
||||
- \[`flake8-slots`\] Avoid recommending `__slots__` for classes that inherit from more than `namedtuple` ([#12531](https://github.com/astral-sh/ruff/pull/12531))
|
||||
- \[`isort`\] Avoid marking required imports as unused ([#12537](https://github.com/astral-sh/ruff/pull/12537))
|
||||
- \[`isort`\] Preserve trailing inline comments on import-from statements ([#12498](https://github.com/astral-sh/ruff/pull/12498))
|
||||
- \[`pycodestyle`\] Add newlines before comments (`E305`) ([#12606](https://github.com/astral-sh/ruff/pull/12606))
|
||||
- \[`pycodestyle`\] Don't attach comments with mismatched indents ([#12604](https://github.com/astral-sh/ruff/pull/12604))
|
||||
- \[`pyflakes`\] Fix preview-mode bugs in `F401` when attempting to autofix unused first-party submodule imports in an `__init__.py` file ([#12569](https://github.com/astral-sh/ruff/pull/12569))
|
||||
- \[`pylint`\] Respect start index in `unnecessary-list-index-lookup` ([#12603](https://github.com/astral-sh/ruff/pull/12603))
|
||||
- \[`pyupgrade`\] Avoid recommending no-argument super in `slots=True` dataclasses ([#12530](https://github.com/astral-sh/ruff/pull/12530))
|
||||
- \[`pyupgrade`\] Use colon rather than dot formatting for integer-only types ([#12534](https://github.com/astral-sh/ruff/pull/12534))
|
||||
- Fix NFKC normalization bug when removing unused imports ([#12571](https://github.com/astral-sh/ruff/pull/12571))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Consider more stdlib decorators to be property-like ([#12583](https://github.com/astral-sh/ruff/pull/12583))
|
||||
- Improve handling of metaclasses in various linter rules ([#12579](https://github.com/astral-sh/ruff/pull/12579))
|
||||
- Improve consistency between linter rules in determining whether a function is property ([#12581](https://github.com/astral-sh/ruff/pull/12581))
|
||||
|
||||
## 0.5.5
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`fastapi`\] Implement `fastapi-redundant-response-model` (`FAST001`) and `fastapi-non-annotated-dependency`(`FAST002`) ([#11579](https://github.com/astral-sh/ruff/pull/11579))
|
||||
- \[`pydoclint`\] Implement `docstring-missing-exception` (`DOC501`) and `docstring-extraneous-exception` (`DOC502`) ([#11471](https://github.com/astral-sh/ruff/pull/11471))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`numpy`\] Fix NumPy 2.0 rule for `np.alltrue` and `np.sometrue` ([#12473](https://github.com/astral-sh/ruff/pull/12473))
|
||||
- \[`numpy`\] Ignore `NPY201` inside `except` blocks for compatibility with older numpy versions ([#12490](https://github.com/astral-sh/ruff/pull/12490))
|
||||
- \[`pep8-naming`\] Avoid applying `ignore-names` to `self` and `cls` function names (`N804`, `N805`) ([#12497](https://github.com/astral-sh/ruff/pull/12497))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Fix incorrect placement of leading function comment with type params ([#12447](https://github.com/astral-sh/ruff/pull/12447))
|
||||
|
||||
### Server
|
||||
|
||||
- Do not bail code action resolution when a quick fix is requested ([#12462](https://github.com/astral-sh/ruff/pull/12462))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix `Ord` implementation of `cmp_fix` ([#12471](https://github.com/astral-sh/ruff/pull/12471))
|
||||
- Raise syntax error for unparenthesized generator expression in multi-argument call ([#12445](https://github.com/astral-sh/ruff/pull/12445))
|
||||
- \[`pydoclint`\] Fix panic in `DOC501` reported in [#12428](https://github.com/astral-sh/ruff/pull/12428) ([#12435](https://github.com/astral-sh/ruff/pull/12435))
|
||||
- \[`flake8-bugbear`\] Allow singleton tuples with starred expressions in `B013` ([#12484](https://github.com/astral-sh/ruff/pull/12484))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add Eglot setup guide for Emacs editor ([#12426](https://github.com/astral-sh/ruff/pull/12426))
|
||||
- Add note about the breaking change in `nvim-lspconfig` ([#12507](https://github.com/astral-sh/ruff/pull/12507))
|
||||
- Add note to include notebook files for native server ([#12449](https://github.com/astral-sh/ruff/pull/12449))
|
||||
- Add setup docs for Zed editor ([#12501](https://github.com/astral-sh/ruff/pull/12501))
|
||||
|
||||
## 0.5.4
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`ruff`\] Rename `RUF007` to `zip-instead-of-pairwise` ([#12399](https://github.com/astral-sh/ruff/pull/12399))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-builtins`\] Avoid shadowing diagnostics for `@override` methods ([#12415](https://github.com/astral-sh/ruff/pull/12415))
|
||||
- \[`flake8-comprehensions`\] Insert parentheses for multi-argument generators ([#12422](https://github.com/astral-sh/ruff/pull/12422))
|
||||
- \[`pydocstyle`\] Handle escaped docstrings within docstring (`D301`) ([#12192](https://github.com/astral-sh/ruff/pull/12192))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix GitHub link to Neovim setup ([#12410](https://github.com/astral-sh/ruff/pull/12410))
|
||||
- Fix `output-format` default in settings reference ([#12409](https://github.com/astral-sh/ruff/pull/12409))
|
||||
|
||||
## 0.5.3
|
||||
|
||||
**Ruff 0.5.3 marks the stable release of the Ruff language server and introduces revamped
|
||||
|
||||
@@ -2,35 +2,6 @@
|
||||
|
||||
Welcome! We're happy to have you here. Thank you in advance for your contribution to Ruff.
|
||||
|
||||
- [The Basics](#the-basics)
|
||||
- [Prerequisites](#prerequisites)
|
||||
- [Development](#development)
|
||||
- [Project Structure](#project-structure)
|
||||
- [Example: Adding a new lint rule](#example-adding-a-new-lint-rule)
|
||||
- [Rule naming convention](#rule-naming-convention)
|
||||
- [Rule testing: fixtures and snapshots](#rule-testing-fixtures-and-snapshots)
|
||||
- [Example: Adding a new configuration option](#example-adding-a-new-configuration-option)
|
||||
- [MkDocs](#mkdocs)
|
||||
- [Release Process](#release-process)
|
||||
- [Creating a new release](#creating-a-new-release)
|
||||
- [Ecosystem CI](#ecosystem-ci)
|
||||
- [Benchmarking and Profiling](#benchmarking-and-profiling)
|
||||
- [CPython Benchmark](#cpython-benchmark)
|
||||
- [Microbenchmarks](#microbenchmarks)
|
||||
- [Benchmark-driven Development](#benchmark-driven-development)
|
||||
- [PR Summary](#pr-summary)
|
||||
- [Tips](#tips)
|
||||
- [Profiling Projects](#profiling-projects)
|
||||
- [Linux](#linux)
|
||||
- [Mac](#mac)
|
||||
- [`cargo dev`](#cargo-dev)
|
||||
- [Subsystems](#subsystems)
|
||||
- [Compilation Pipeline](#compilation-pipeline)
|
||||
- [Import Categorization](#import-categorization)
|
||||
- [Project root](#project-root)
|
||||
- [Package root](#package-root)
|
||||
- [Import categorization](#import-categorization-1)
|
||||
|
||||
## The Basics
|
||||
|
||||
Ruff welcomes contributions in the form of pull requests.
|
||||
@@ -333,22 +304,34 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
|
||||
### Creating a new release
|
||||
|
||||
1. Install `uv`: `curl -LsSf https://astral.sh/uv/install.sh | sh`
|
||||
|
||||
1. Run `./scripts/release.sh`; this command will:
|
||||
|
||||
- Generate a temporary virtual environment with `rooster`
|
||||
- Generate a changelog entry in `CHANGELOG.md`
|
||||
- Update versions in `pyproject.toml` and `Cargo.toml`
|
||||
- Update references to versions in the `README.md` and documentation
|
||||
- Display contributors for the release
|
||||
|
||||
1. The changelog should then be editorialized for consistency
|
||||
|
||||
- Often labels will be missing from pull requests they will need to be manually organized into the proper section
|
||||
- Changes should be edited to be user-facing descriptions, avoiding internal details
|
||||
|
||||
1. Highlight any breaking changes in `BREAKING_CHANGES.md`
|
||||
|
||||
1. Run `cargo check`. This should update the lock file with new versions.
|
||||
|
||||
1. Create a pull request with the changelog and version updates
|
||||
|
||||
1. Merge the PR
|
||||
|
||||
1. Run the [release workflow](https://github.com/astral-sh/ruff/actions/workflows/release.yml) with:
|
||||
|
||||
- The new version number (without starting `v`)
|
||||
|
||||
1. The release workflow will do the following:
|
||||
|
||||
1. Build all the assets. If this fails (even though we tested in step 4), we haven't tagged or
|
||||
uploaded anything, you can restart after pushing a fix. If you just need to rerun the build,
|
||||
make sure you're [re-running all the failed
|
||||
@@ -359,14 +342,25 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
|
||||
1. Attach artifacts to draft GitHub release
|
||||
1. Trigger downstream repositories. This can fail non-catastrophically, as we can run any
|
||||
downstream jobs manually if needed.
|
||||
|
||||
1. Verify the GitHub release:
|
||||
|
||||
1. The Changelog should match the content of `CHANGELOG.md`
|
||||
1. Append the contributors from the `bump.sh` script
|
||||
1. Append the contributors from the `scripts/release.sh` script
|
||||
|
||||
1. If needed, [update the schemastore](https://github.com/astral-sh/ruff/blob/main/scripts/update_schemastore.py).
|
||||
|
||||
1. One can determine if an update is needed when
|
||||
`git diff old-version-tag new-version-tag -- ruff.schema.json` returns a non-empty diff.
|
||||
1. Once run successfully, you should follow the link in the output to create a PR.
|
||||
1. If needed, update the `ruff-lsp` and `ruff-vscode` repositories.
|
||||
|
||||
1. If needed, update the [`ruff-lsp`](https://github.com/astral-sh/ruff-lsp) and
|
||||
[`ruff-vscode`](https://github.com/astral-sh/ruff-vscode) repositories and follow
|
||||
the release instructions in those repositories. `ruff-lsp` should always be updated
|
||||
before `ruff-vscode`.
|
||||
|
||||
This step is generally not required for a patch release, but should always be done
|
||||
for a minor release.
|
||||
|
||||
## Ecosystem CI
|
||||
|
||||
@@ -389,7 +383,7 @@ We have several ways of benchmarking and profiling Ruff:
|
||||
- Microbenchmarks which run the linter or the formatter on individual files. These run on pull requests.
|
||||
- Profiling the linter on either the microbenchmarks or entire projects
|
||||
|
||||
> \[!NOTE\]
|
||||
> **Note**
|
||||
> When running benchmarks, ensure that your CPU is otherwise idle (e.g., close any background
|
||||
> applications, like web browsers). You may also want to switch your CPU to a "performance"
|
||||
> mode, if it exists, especially when benchmarking short-lived processes.
|
||||
@@ -403,12 +397,18 @@ which makes it a good target for benchmarking.
|
||||
git clone --branch 3.10 https://github.com/python/cpython.git crates/ruff_linter/resources/test/cpython
|
||||
```
|
||||
|
||||
Install `hyperfine`:
|
||||
|
||||
```shell
|
||||
cargo install hyperfine
|
||||
```
|
||||
|
||||
To benchmark the release build:
|
||||
|
||||
```shell
|
||||
cargo build --release && hyperfine --warmup 10 \
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache -e" \
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ -e"
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache -e" \
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ -e"
|
||||
|
||||
Benchmark 1: ./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache
|
||||
Time (mean ± σ): 293.8 ms ± 3.2 ms [User: 2384.6 ms, System: 90.3 ms]
|
||||
@@ -427,7 +427,7 @@ To benchmark against the ecosystem's existing tools:
|
||||
|
||||
```shell
|
||||
hyperfine --ignore-failure --warmup 5 \
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache" \
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache" \
|
||||
"pyflakes crates/ruff_linter/resources/test/cpython" \
|
||||
"autoflake --recursive --expand-star-imports --remove-all-unused-imports --remove-unused-variables --remove-duplicate-keys resources/test/cpython" \
|
||||
"pycodestyle crates/ruff_linter/resources/test/cpython" \
|
||||
@@ -473,7 +473,7 @@ To benchmark a subset of rules, e.g. `LineTooLong` and `DocLineTooLong`:
|
||||
|
||||
```shell
|
||||
cargo build --release && hyperfine --warmup 10 \
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache -e --select W505,E501"
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache -e --select W505,E501"
|
||||
```
|
||||
|
||||
You can run `poetry install` from `./scripts/benchmarks` to create a working environment for the
|
||||
@@ -530,6 +530,8 @@ You can run the benchmarks with
|
||||
cargo benchmark
|
||||
```
|
||||
|
||||
`cargo benchmark` is an alias for `cargo bench -p ruff_benchmark --bench linter --bench formatter --`
|
||||
|
||||
#### Benchmark-driven Development
|
||||
|
||||
Ruff uses [Criterion.rs](https://bheisler.github.io/criterion.rs/book/) for benchmarks. You can use
|
||||
@@ -568,7 +570,7 @@ cargo install critcmp
|
||||
|
||||
#### Tips
|
||||
|
||||
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo benchmark lexer`
|
||||
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo bench -p ruff_benchmark lexer`
|
||||
to only run the lexer benchmarks.
|
||||
- Use `cargo bench -p ruff_benchmark -- --quiet` for a more cleaned up output (without statistical relevance)
|
||||
- Use `cargo bench -p ruff_benchmark -- --quick` to get faster results (more prone to noise)
|
||||
@@ -905,15 +907,11 @@ There are three ways in which an import can be categorized as "first-party":
|
||||
package (e.g., `from foo import bar` or `import foo.bar`), they'll be classified as first-party
|
||||
automatically. This check is as simple as comparing the first segment of the current file's
|
||||
module path to the first segment of the import.
|
||||
1. **Source roots**: Ruff supports a `[src](https://docs.astral.sh/ruff/settings/#src)` setting, which
|
||||
1. **Source roots**: Ruff supports a [`src`](https://docs.astral.sh/ruff/settings/#src) setting, which
|
||||
sets the directories to scan when identifying first-party imports. The algorithm is
|
||||
straightforward: given an import, like `import foo`, iterate over the directories enumerated in
|
||||
the `src` setting and, for each directory, check for the existence of a subdirectory `foo` or a
|
||||
file `foo.py`.
|
||||
|
||||
By default, `src` is set to the project root. In the above example, we'd want to set
|
||||
`src = ["./src"]` to ensure that we locate `./my_project/src/foo` and thus categorize `import foo`
|
||||
as first-party in `baz.py`. In practice, for this limited example, setting `src = ["./src"]` is
|
||||
unnecessary, as all imports within `./my_project/src/foo` would be categorized as first-party via
|
||||
the same-package heuristic; but if your project contains multiple packages, you'll want to set `src`
|
||||
explicitly.
|
||||
By default, `src` is set to the project root, along with `"src"` subdirectory in the project root.
|
||||
This ensures that Ruff supports both flat and "src" layouts out of the box.
|
||||
|
||||
735
Cargo.lock
generated
735
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
21
Cargo.toml
21
Cargo.toml
@@ -4,7 +4,7 @@ resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
edition = "2021"
|
||||
rust-version = "1.75"
|
||||
rust-version = "1.76"
|
||||
homepage = "https://docs.astral.sh/ruff"
|
||||
documentation = "https://docs.astral.sh/ruff"
|
||||
repository = "https://github.com/astral-sh/ruff"
|
||||
@@ -17,6 +17,7 @@ ruff_cache = { path = "crates/ruff_cache" }
|
||||
ruff_db = { path = "crates/ruff_db" }
|
||||
ruff_diagnostics = { path = "crates/ruff_diagnostics" }
|
||||
ruff_formatter = { path = "crates/ruff_formatter" }
|
||||
ruff_graph = { path = "crates/ruff_graph" }
|
||||
ruff_index = { path = "crates/ruff_index" }
|
||||
ruff_linter = { path = "crates/ruff_linter" }
|
||||
ruff_macros = { path = "crates/ruff_macros" }
|
||||
@@ -35,13 +36,14 @@ ruff_source_file = { path = "crates/ruff_source_file" }
|
||||
ruff_text_size = { path = "crates/ruff_text_size" }
|
||||
ruff_workspace = { path = "crates/ruff_workspace" }
|
||||
|
||||
red_knot = { path = "crates/red_knot" }
|
||||
red_knot_module_resolver = { path = "crates/red_knot_module_resolver" }
|
||||
red_knot_python_semantic = { path = "crates/red_knot_python_semantic" }
|
||||
red_knot_server = { path = "crates/red_knot_server" }
|
||||
red_knot_workspace = { path = "crates/red_knot_workspace" }
|
||||
|
||||
aho-corasick = { version = "1.1.3" }
|
||||
annotate-snippets = { version = "0.9.2", features = ["color"] }
|
||||
anyhow = { version = "1.0.80" }
|
||||
assert_fs = { version = "1.1.0" }
|
||||
argfile = { version = "0.2.0" }
|
||||
bincode = { version = "1.3.3" }
|
||||
bitflags = { version = "2.5.0" }
|
||||
@@ -68,6 +70,7 @@ fern = { version = "0.6.1" }
|
||||
filetime = { version = "0.2.23" }
|
||||
glob = { version = "0.3.1" }
|
||||
globset = { version = "0.4.14" }
|
||||
globwalk = { version = "0.9.1" }
|
||||
hashbrown = "0.14.3"
|
||||
ignore = { version = "0.4.22" }
|
||||
imara-diff = { version = "0.1.5" }
|
||||
@@ -102,13 +105,13 @@ pep440_rs = { version = "0.6.0", features = ["serde"] }
|
||||
pretty_assertions = "1.3.0"
|
||||
proc-macro2 = { version = "1.0.79" }
|
||||
pyproject-toml = { version = "0.9.0" }
|
||||
quick-junit = { version = "0.4.0" }
|
||||
quick-junit = { version = "0.5.0" }
|
||||
quote = { version = "1.0.23" }
|
||||
rand = { version = "0.8.5" }
|
||||
rayon = { version = "1.10.0" }
|
||||
regex = { version = "1.10.2" }
|
||||
rustc-hash = { version = "2.0.0" }
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "a1bf3a613f451af7fc0a59411c56abc47fe8e8e1" }
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "f608ff8b24f07706492027199f51132244034f29" }
|
||||
schemars = { version = "0.8.16" }
|
||||
seahash = { version = "4.1.0" }
|
||||
serde = { version = "1.0.197", features = ["derive"] }
|
||||
@@ -131,9 +134,10 @@ thiserror = { version = "1.0.58" }
|
||||
tikv-jemallocator = { version = "0.6.0" }
|
||||
toml = { version = "0.8.11" }
|
||||
tracing = { version = "0.1.40" }
|
||||
tracing-flame = { version = "0.2.0" }
|
||||
tracing-indicatif = { version = "0.3.6" }
|
||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
|
||||
tracing-tree = { version = "0.3.0" }
|
||||
tracing-subscriber = { version = "0.3.18", default-features = false, features = ["env-filter", "fmt"] }
|
||||
tracing-tree = { version = "0.4.0" }
|
||||
typed-arena = { version = "2.0.2" }
|
||||
unic-ucd-category = { version = "0.9" }
|
||||
unicode-ident = { version = "1.0.12" }
|
||||
@@ -152,11 +156,12 @@ walkdir = { version = "2.3.2" }
|
||||
wasm-bindgen = { version = "0.2.92" }
|
||||
wasm-bindgen-test = { version = "0.3.42" }
|
||||
wild = { version = "2" }
|
||||
zip = { version = "0.6.6", default-features = false, features = ["zstd"] }
|
||||
zip = { version = "0.6.6", default-features = false }
|
||||
|
||||
[workspace.lints.rust]
|
||||
unsafe_code = "warn"
|
||||
unreachable_pub = "warn"
|
||||
unexpected_cfgs = { level = "warn", check-cfg = ["cfg(fuzzing)", "cfg(codspeed)"] }
|
||||
|
||||
[workspace.lints.clippy]
|
||||
pedantic = { level = "warn", priority = -2 }
|
||||
|
||||
25
LICENSE
25
LICENSE
@@ -1371,3 +1371,28 @@ are:
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
"""
|
||||
|
||||
- pydoclint, licensed as follows:
|
||||
"""
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2023 jsh9
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
"""
|
||||
|
||||
33
README.md
33
README.md
@@ -29,14 +29,14 @@ An extremely fast Python linter and code formatter, written in Rust.
|
||||
- 🐍 Installable via `pip`
|
||||
- 🛠️ `pyproject.toml` support
|
||||
- 🤝 Python 3.13 compatibility
|
||||
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruff-compare-to-flake8), isort, and Black
|
||||
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruffs-linter-compare-to-flake8), isort, and [Black](https://docs.astral.sh/ruff/faq/#how-does-ruffs-formatter-compare-to-black)
|
||||
- 📦 Built-in caching, to avoid re-analyzing unchanged files
|
||||
- 🔧 Fix support, for automatic error correction (e.g., automatically remove unused imports)
|
||||
- 📏 Over [800 built-in rules](https://docs.astral.sh/ruff/rules/), with native re-implementations
|
||||
of popular Flake8 plugins, like flake8-bugbear
|
||||
- ⌨️ First-party [editor integrations](https://docs.astral.sh/ruff/integrations/) for
|
||||
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://github.com/astral-sh/ruff-lsp)
|
||||
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://docs.astral.sh/ruff/configuration/#pyprojecttoml-discovery)
|
||||
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://docs.astral.sh/ruff/editors/setup)
|
||||
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://docs.astral.sh/ruff/configuration/#config-file-discovery)
|
||||
|
||||
Ruff aims to be orders of magnitude faster than alternative tools while integrating more
|
||||
functionality behind a single, common interface.
|
||||
@@ -110,7 +110,7 @@ For more, see the [documentation](https://docs.astral.sh/ruff/).
|
||||
1. [Who's Using Ruff?](#whos-using-ruff)
|
||||
1. [License](#license)
|
||||
|
||||
## Getting Started
|
||||
## Getting Started<a id="getting-started"></a>
|
||||
|
||||
For more, see the [documentation](https://docs.astral.sh/ruff/).
|
||||
|
||||
@@ -136,8 +136,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.5.3/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.5.3/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.6.6/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.6.6/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -170,7 +170,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.5.3
|
||||
rev: v0.6.6
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
@@ -179,8 +179,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
- id: ruff-format
|
||||
```
|
||||
|
||||
Ruff can also be used as a [VS Code extension](https://github.com/astral-sh/ruff-vscode) or
|
||||
alongside any other editor through the [Ruff LSP](https://github.com/astral-sh/ruff-lsp).
|
||||
Ruff can also be used as a [VS Code extension](https://github.com/astral-sh/ruff-vscode) or with [various other editors](https://docs.astral.sh/ruff/editors/setup).
|
||||
|
||||
Ruff can also be used as a [GitHub Action](https://github.com/features/actions) via
|
||||
[`ruff-action`](https://github.com/chartboost/ruff-action):
|
||||
@@ -196,7 +195,7 @@ jobs:
|
||||
- uses: chartboost/ruff-action@v1
|
||||
```
|
||||
|
||||
### Configuration
|
||||
### Configuration<a id="configuration"></a>
|
||||
|
||||
Ruff can be configured through a `pyproject.toml`, `ruff.toml`, or `.ruff.toml` file (see:
|
||||
[_Configuration_](https://docs.astral.sh/ruff/configuration/), or [_Settings_](https://docs.astral.sh/ruff/settings/)
|
||||
@@ -292,7 +291,7 @@ features that may change prior to stabilization.
|
||||
See `ruff help` for more on Ruff's top-level commands, or `ruff help check` and `ruff help format`
|
||||
for more on the linting and formatting commands, respectively.
|
||||
|
||||
## Rules
|
||||
## Rules<a id="rules"></a>
|
||||
|
||||
<!-- Begin section: Rules -->
|
||||
|
||||
@@ -368,21 +367,21 @@ quality tools, including:
|
||||
|
||||
For a complete enumeration of the supported rules, see [_Rules_](https://docs.astral.sh/ruff/rules/).
|
||||
|
||||
## Contributing
|
||||
## Contributing<a id="contributing"></a>
|
||||
|
||||
Contributions are welcome and highly appreciated. To get started, check out the
|
||||
[**contributing guidelines**](https://docs.astral.sh/ruff/contributing/).
|
||||
|
||||
You can also join us on [**Discord**](https://discord.com/invite/astral-sh).
|
||||
|
||||
## Support
|
||||
## Support<a id="support"></a>
|
||||
|
||||
Having trouble? Check out the existing issues on [**GitHub**](https://github.com/astral-sh/ruff/issues),
|
||||
or feel free to [**open a new one**](https://github.com/astral-sh/ruff/issues/new).
|
||||
|
||||
You can also ask for help on [**Discord**](https://discord.com/invite/astral-sh).
|
||||
|
||||
## Acknowledgements
|
||||
## Acknowledgements<a id="acknowledgements"></a>
|
||||
|
||||
Ruff's linter draws on both the APIs and implementation details of many other
|
||||
tools in the Python ecosystem, especially [Flake8](https://github.com/PyCQA/flake8), [Pyflakes](https://github.com/PyCQA/pyflakes),
|
||||
@@ -406,7 +405,7 @@ Ruff is the beneficiary of a large number of [contributors](https://github.com/a
|
||||
|
||||
Ruff is released under the MIT license.
|
||||
|
||||
## Who's Using Ruff?
|
||||
## Who's Using Ruff?<a id="whos-using-ruff"></a>
|
||||
|
||||
Ruff is used by a number of major open-source projects and companies, including:
|
||||
|
||||
@@ -424,6 +423,7 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [Dagger](https://github.com/dagger/dagger)
|
||||
- [Dagster](https://github.com/dagster-io/dagster)
|
||||
- Databricks ([MLflow](https://github.com/mlflow/mlflow))
|
||||
- [Dify](https://github.com/langgenius/dify)
|
||||
- [FastAPI](https://github.com/tiangolo/fastapi)
|
||||
- [Godot](https://github.com/godotengine/godot)
|
||||
- [Gradio](https://github.com/gradio-app/gradio)
|
||||
@@ -434,6 +434,7 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- Hugging Face ([Transformers](https://github.com/huggingface/transformers),
|
||||
[Datasets](https://github.com/huggingface/datasets),
|
||||
[Diffusers](https://github.com/huggingface/diffusers))
|
||||
- IBM ([Qiskit](https://github.com/Qiskit/qiskit))
|
||||
- ING Bank ([popmon](https://github.com/ing-bank/popmon), [probatus](https://github.com/ing-bank/probatus))
|
||||
- [Ibis](https://github.com/ibis-project/ibis)
|
||||
- [ivy](https://github.com/unifyai/ivy)
|
||||
@@ -523,7 +524,7 @@ If you're using Ruff, consider adding the Ruff badge to your project's `README.m
|
||||
<a href="https://github.com/astral-sh/ruff"><img src="https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json" alt="Ruff" style="max-width:100%;"></a>
|
||||
```
|
||||
|
||||
## License
|
||||
## License<a id="license"></a>
|
||||
|
||||
This repository is licensed under the [MIT License](https://github.com/astral-sh/ruff/blob/main/LICENSE)
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[files]
|
||||
# https://github.com/crate-ci/typos/issues/868
|
||||
extend-exclude = ["crates/red_knot_module_resolver/vendor/**/*", "**/resources/**/*", "**/snapshots/**/*"]
|
||||
extend-exclude = ["crates/red_knot_python_semantic/vendor/**/*", "**/resources/**/*", "**/snapshots/**/*"]
|
||||
|
||||
[default.extend-words]
|
||||
"arange" = "arange" # e.g. `numpy.arange`
|
||||
|
||||
@@ -10,4 +10,12 @@ doc-valid-idents = [
|
||||
"SCREAMING_SNAKE_CASE",
|
||||
"SQLAlchemy",
|
||||
"StackOverflow",
|
||||
"PyCharm",
|
||||
]
|
||||
|
||||
ignore-interior-mutability = [
|
||||
# Interned is read-only. The wrapped `Rc` never gets updated.
|
||||
"ruff_formatter::format_element::Interned",
|
||||
# The expression is read-only.
|
||||
"ruff_python_ast::hashable::HashableExpr",
|
||||
]
|
||||
|
||||
@@ -12,25 +12,29 @@ license.workspace = true
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
red_knot_module_resolver = { workspace = true }
|
||||
red_knot_python_semantic = { workspace = true }
|
||||
red_knot_workspace = { workspace = true }
|
||||
red_knot_server = { workspace = true }
|
||||
|
||||
ruff_db = { workspace = true, features = ["os", "cache"] }
|
||||
ruff_python_ast = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
clap = { workspace = true, features = ["wrap_help"] }
|
||||
colored = { workspace = true }
|
||||
countme = { workspace = true, features = ["enable"] }
|
||||
crossbeam = { workspace = true }
|
||||
ctrlc = { version = "3.4.4" }
|
||||
notify = { workspace = true }
|
||||
rayon = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true }
|
||||
tracing = { workspace = true, features = ["release_max_level_debug"] }
|
||||
tracing-subscriber = { workspace = true, features = ["env-filter", "fmt"] }
|
||||
tracing-flame = { workspace = true }
|
||||
tracing-tree = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
filetime = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
BIN
crates/red_knot/docs/tracing-flamegraph.png
Normal file
BIN
crates/red_knot/docs/tracing-flamegraph.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 40 KiB |
128
crates/red_knot/docs/tracing.md
Normal file
128
crates/red_knot/docs/tracing.md
Normal file
@@ -0,0 +1,128 @@
|
||||
# Tracing
|
||||
|
||||
Traces are a useful tool to narrow down the location of a bug or, at least, to understand why the compiler is doing a particular thing.
|
||||
Note, tracing messages with severity `debug` or greater are user-facing. They should be phrased accordingly.
|
||||
Tracing spans are only shown when using `-vvv`.
|
||||
|
||||
## Verbosity levels
|
||||
|
||||
The CLI supports different verbosity levels.
|
||||
|
||||
- default: Only show errors and warnings.
|
||||
- `-v` activates `info!`: Show generally useful information such as paths of configuration files, detected platform, etc., but it's not a lot of messages, it's something you'll activate in CI by default. cargo build e.g. shows you which packages are fresh.
|
||||
- `-vv` activates `debug!` and timestamps: This should be enough information to get to the bottom of bug reports. When you're processing many packages or files, you'll get pages and pages of output, but each line is link to a specific action or state change.
|
||||
- `-vvv` activates `trace!` (only in debug builds) and shows tracing-spans: At this level, you're logging everything. Most of this is wasted, it's really slow, we dump e.g. the entire resolution graph. Only useful to developers, and you almost certainly want to use `RED_KNOT_LOG` to filter it down to the area your investigating.
|
||||
|
||||
## Better logging with `RED_KNOT_LOG` and `RAYON_NUM_THREADS`
|
||||
|
||||
By default, the CLI shows messages from the `ruff` and `red_knot` crates. Tracing messages from other crates are not shown.
|
||||
The `RED_KNOT_LOG` environment variable allows you to customize which messages are shown by specifying one
|
||||
or more [filter directives](https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives).
|
||||
|
||||
The `RAYON_NUM_THREADS` environment variable, meanwhile, can be used to control the level of concurrency red-knot uses.
|
||||
By default, red-knot will attempt to parallelize its work so that multiple files are checked simultaneously,
|
||||
but this can result in a confused logging output where messages from different threads are intertwined.
|
||||
To switch off concurrency entirely and have more readable logs, use `RAYON_NUM_THREADS=1`.
|
||||
|
||||
### Examples
|
||||
|
||||
#### Show all debug messages
|
||||
|
||||
Shows debug messages from all crates.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG=debug
|
||||
```
|
||||
|
||||
#### Show salsa query execution messages
|
||||
|
||||
Show the salsa `execute: my_query` messages in addition to all red knot messages.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG=ruff=trace,red_knot=trace,salsa=info
|
||||
```
|
||||
|
||||
#### Show typing traces
|
||||
|
||||
Only show traces for the `red_knot_python_semantic::types` module.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG="red_knot_python_semantic::types"
|
||||
```
|
||||
|
||||
Note: Ensure that you use `-vvv` to see tracing spans.
|
||||
|
||||
#### Show messages for a single file
|
||||
|
||||
Shows all messages that are inside of a span for a specific file.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG=red_knot[{file=/home/micha/astral/test/x.py}]=trace
|
||||
```
|
||||
|
||||
**Note**: Tracing still shows all spans because tracing can't know at the time of entering the span
|
||||
whether one if its children has the file `x.py`.
|
||||
|
||||
**Note**: Salsa currently logs the entire memoized values. In our case, the source text and parsed AST.
|
||||
This very quickly leads to extremely long outputs.
|
||||
|
||||
## Tracing and Salsa
|
||||
|
||||
Be mindful about using `tracing` in Salsa queries, especially when using `warn` or `error` because it isn't guaranteed
|
||||
that the query will execute after restoring from a persistent cache. In which case the user won't see the message.
|
||||
|
||||
For example, don't use `tracing` to show the user a message when generating a lint violation failed
|
||||
because the message would only be shown when linting the file the first time, but not on subsequent analysis
|
||||
runs or when restoring from a persistent cache. This can be confusing for users because they
|
||||
don't understand why a specific lint violation isn't raised. Instead, change your
|
||||
query to return the failure as part of the query's result or use a Salsa accumulator.
|
||||
|
||||
## Tracing in tests
|
||||
|
||||
You can use `ruff_db::testing::setup_logging` or `ruff_db::testing::setup_logging_with_filter` to set up logging in tests.
|
||||
|
||||
```rust
|
||||
use ruff_db::testing::setup_logging;
|
||||
|
||||
#[test]
|
||||
fn test() {
|
||||
let _logging = setup_logging();
|
||||
|
||||
tracing::info!("This message will be printed to stderr");
|
||||
}
|
||||
```
|
||||
|
||||
Note: Most test runners capture stderr and only show its output when a test fails.
|
||||
|
||||
Note also that `setup_logging` only sets up logging for the current thread because [`set_global_default`](https://docs.rs/tracing/latest/tracing/subscriber/fn.set_global_default.html) can only be
|
||||
called **once**.
|
||||
|
||||
## Release builds
|
||||
|
||||
`trace!` events are removed in release builds.
|
||||
|
||||
## Profiling
|
||||
|
||||
Red Knot generates a folded stack trace to the current directory named `tracing.folded` when setting the environment variable `RED_KNOT_LOG_PROFILE` to `1` or `true`.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG_PROFILE=1 red_knot -- --current-directory=../test -vvv
|
||||
```
|
||||
|
||||
You can convert the textual representation into a visual one using `inferno`.
|
||||
|
||||
```shell
|
||||
cargo install inferno
|
||||
```
|
||||
|
||||
```shell
|
||||
# flamegraph
|
||||
cat tracing.folded | inferno-flamegraph > tracing-flamegraph.svg
|
||||
|
||||
# flamechart
|
||||
cat tracing.folded | inferno-flamegraph --flamechart > tracing-flamechart.svg
|
||||
```
|
||||
|
||||

|
||||
|
||||
See [`tracing-flame`](https://crates.io/crates/tracing-flame) for more details.
|
||||
@@ -1,2 +0,0 @@
|
||||
pub(crate) mod target_version;
|
||||
pub(crate) mod verbosity;
|
||||
@@ -1,34 +0,0 @@
|
||||
/// Enumeration of all supported Python versions
|
||||
///
|
||||
/// TODO: unify with the `PythonVersion` enum in the linter/formatter crates?
|
||||
#[derive(Copy, Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord, Default, clap::ValueEnum)]
|
||||
pub enum TargetVersion {
|
||||
Py37,
|
||||
#[default]
|
||||
Py38,
|
||||
Py39,
|
||||
Py310,
|
||||
Py311,
|
||||
Py312,
|
||||
Py313,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for TargetVersion {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
ruff_db::program::TargetVersion::from(*self).fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TargetVersion> for ruff_db::program::TargetVersion {
|
||||
fn from(value: TargetVersion) -> Self {
|
||||
match value {
|
||||
TargetVersion::Py37 => Self::Py37,
|
||||
TargetVersion::Py38 => Self::Py38,
|
||||
TargetVersion::Py39 => Self::Py39,
|
||||
TargetVersion::Py310 => Self::Py310,
|
||||
TargetVersion::Py311 => Self::Py311,
|
||||
TargetVersion::Py312 => Self::Py312,
|
||||
TargetVersion::Py313 => Self::Py313,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,34 +0,0 @@
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
|
||||
pub(crate) enum VerbosityLevel {
|
||||
Info,
|
||||
Debug,
|
||||
Trace,
|
||||
}
|
||||
|
||||
/// Logging flags to `#[command(flatten)]` into your CLI
|
||||
#[derive(clap::Args, Debug, Clone, Default)]
|
||||
#[command(about = None, long_about = None)]
|
||||
pub(crate) struct Verbosity {
|
||||
#[arg(
|
||||
long,
|
||||
short = 'v',
|
||||
help = "Use verbose output (or `-vv` and `-vvv` for more verbose output)",
|
||||
action = clap::ArgAction::Count,
|
||||
global = true,
|
||||
)]
|
||||
verbose: u8,
|
||||
}
|
||||
|
||||
impl Verbosity {
|
||||
/// Returns the verbosity level based on the number of `-v` flags.
|
||||
///
|
||||
/// Returns `None` if the user did not specify any verbosity flags.
|
||||
pub(crate) fn level(&self) -> Option<VerbosityLevel> {
|
||||
match self.verbose {
|
||||
0 => None,
|
||||
1 => Some(VerbosityLevel::Info),
|
||||
2 => Some(VerbosityLevel::Debug),
|
||||
_ => Some(VerbosityLevel::Trace),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,200 +0,0 @@
|
||||
use std::panic::{AssertUnwindSafe, RefUnwindSafe};
|
||||
use std::sync::Arc;
|
||||
|
||||
use salsa::{Cancelled, Database, DbWithJar};
|
||||
|
||||
use red_knot_module_resolver::{vendored_typeshed_stubs, Db as ResolverDb, Jar as ResolverJar};
|
||||
use red_knot_python_semantic::{Db as SemanticDb, Jar as SemanticJar};
|
||||
use ruff_db::files::{system_path_to_file, File, Files};
|
||||
use ruff_db::program::{Program, ProgramSettings};
|
||||
use ruff_db::system::System;
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
use ruff_db::{Db as SourceDb, Jar as SourceJar, Upcast};
|
||||
|
||||
use crate::lint::{lint_semantic, lint_syntax, unwind_if_cancelled, Diagnostics};
|
||||
use crate::watch::{FileChangeKind, FileWatcherChange};
|
||||
use crate::workspace::{check_file, Package, Workspace, WorkspaceMetadata};
|
||||
|
||||
pub trait Db: DbWithJar<Jar> + SemanticDb + Upcast<dyn SemanticDb> {}
|
||||
|
||||
#[salsa::jar(db=Db)]
|
||||
pub struct Jar(
|
||||
Workspace,
|
||||
Package,
|
||||
lint_syntax,
|
||||
lint_semantic,
|
||||
unwind_if_cancelled,
|
||||
);
|
||||
|
||||
#[salsa::db(SourceJar, ResolverJar, SemanticJar, Jar)]
|
||||
pub struct RootDatabase {
|
||||
workspace: Option<Workspace>,
|
||||
storage: salsa::Storage<RootDatabase>,
|
||||
files: Files,
|
||||
system: Arc<dyn System + Send + Sync + RefUnwindSafe>,
|
||||
}
|
||||
|
||||
impl RootDatabase {
|
||||
pub fn new<S>(workspace: WorkspaceMetadata, settings: ProgramSettings, system: S) -> Self
|
||||
where
|
||||
S: System + 'static + Send + Sync + RefUnwindSafe,
|
||||
{
|
||||
let mut db = Self {
|
||||
workspace: None,
|
||||
storage: salsa::Storage::default(),
|
||||
files: Files::default(),
|
||||
system: Arc::new(system),
|
||||
};
|
||||
|
||||
let workspace = Workspace::from_metadata(&db, workspace);
|
||||
// Initialize the `Program` singleton
|
||||
Program::from_settings(&db, settings);
|
||||
|
||||
db.workspace = Some(workspace);
|
||||
db
|
||||
}
|
||||
|
||||
pub fn workspace(&self) -> Workspace {
|
||||
// SAFETY: The workspace is always initialized in `new`.
|
||||
self.workspace.unwrap()
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(self, changes))]
|
||||
pub fn apply_changes(&mut self, changes: Vec<FileWatcherChange>) {
|
||||
let workspace = self.workspace();
|
||||
let workspace_path = workspace.root(self).to_path_buf();
|
||||
|
||||
// TODO: Optimize change tracking by only reloading a package if a file that is part of the package was changed.
|
||||
let mut structural_change = false;
|
||||
for change in changes {
|
||||
if matches!(
|
||||
change.path.file_name(),
|
||||
Some(".gitignore" | ".ignore" | "ruff.toml" | ".ruff.toml" | "pyproject.toml")
|
||||
) {
|
||||
// Changes to ignore files or settings can change the workspace structure or add/remove files
|
||||
// from packages.
|
||||
structural_change = true;
|
||||
} else {
|
||||
match change.kind {
|
||||
FileChangeKind::Created => {
|
||||
// Reload the package when a new file was added. This is necessary because the file might be excluded
|
||||
// by a gitignore.
|
||||
if workspace.package(self, &change.path).is_some() {
|
||||
structural_change = true;
|
||||
}
|
||||
}
|
||||
FileChangeKind::Modified => {}
|
||||
FileChangeKind::Deleted => {
|
||||
if let Some(package) = workspace.package(self, &change.path) {
|
||||
if let Some(file) = system_path_to_file(self, &change.path) {
|
||||
package.remove_file(self, file);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
File::touch_path(self, &change.path);
|
||||
}
|
||||
|
||||
if structural_change {
|
||||
match WorkspaceMetadata::from_path(&workspace_path, self.system()) {
|
||||
Ok(metadata) => {
|
||||
tracing::debug!("Reload workspace after structural change.");
|
||||
// TODO: Handle changes in the program settings.
|
||||
workspace.reload(self, metadata);
|
||||
}
|
||||
Err(error) => {
|
||||
tracing::error!("Failed to load workspace, keep old workspace: {error}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Checks all open files in the workspace and its dependencies.
|
||||
pub fn check(&self) -> Result<Vec<String>, Cancelled> {
|
||||
self.with_db(|db| db.workspace().check(db))
|
||||
}
|
||||
|
||||
pub fn check_file(&self, file: File) -> Result<Diagnostics, Cancelled> {
|
||||
self.with_db(|db| check_file(db, file))
|
||||
}
|
||||
|
||||
pub(crate) fn with_db<F, T>(&self, f: F) -> Result<T, Cancelled>
|
||||
where
|
||||
F: FnOnce(&RootDatabase) -> T + std::panic::UnwindSafe,
|
||||
{
|
||||
// The `AssertUnwindSafe` here looks scary, but is a consequence of Salsa's design.
|
||||
// Salsa uses panics to implement cancellation and to recover from cycles. However, the Salsa
|
||||
// storage isn't `UnwindSafe` or `RefUnwindSafe` because its dependencies `DashMap` and `parking_lot::*` aren't
|
||||
// unwind safe.
|
||||
//
|
||||
// Having to use `AssertUnwindSafe` isn't as big as a deal as it might seem because
|
||||
// the `UnwindSafe` and `RefUnwindSafe` traits are designed to catch logical bugs.
|
||||
// They don't protect against [UB](https://internals.rust-lang.org/t/pre-rfc-deprecating-unwindsafe/15974).
|
||||
// On top of that, `Cancelled` only catches specific Salsa-panics and propagates all other panics.
|
||||
//
|
||||
// That still leaves us with possible logical bugs in two sources:
|
||||
// * In Salsa itself: This must be considered a bug in Salsa and needs fixing upstream.
|
||||
// Reviewing Salsa code specifically around unwind safety seems doable.
|
||||
// * Our code: This is the main concern. Luckily, it only involves code that uses internal mutability
|
||||
// and calls into Salsa queries when mutating the internal state. Using `AssertUnwindSafe`
|
||||
// certainly makes it harder to catch these issues in our user code.
|
||||
//
|
||||
// For now, this is the only solution at hand unless Salsa decides to change its design.
|
||||
// [Zulip support thread](https://salsa.zulipchat.com/#narrow/stream/145099-general/topic/How.20to.20use.20.60Cancelled.3A.3Acatch.60)
|
||||
let db = &AssertUnwindSafe(self);
|
||||
Cancelled::catch(|| f(db))
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SemanticDb> for RootDatabase {
|
||||
fn upcast(&self) -> &(dyn SemanticDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SourceDb> for RootDatabase {
|
||||
fn upcast(&self) -> &(dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn ResolverDb> for RootDatabase {
|
||||
fn upcast(&self) -> &(dyn ResolverDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl ResolverDb for RootDatabase {}
|
||||
|
||||
impl SemanticDb for RootDatabase {}
|
||||
|
||||
impl SourceDb for RootDatabase {
|
||||
fn vendored(&self) -> &VendoredFileSystem {
|
||||
vendored_typeshed_stubs()
|
||||
}
|
||||
|
||||
fn system(&self) -> &dyn System {
|
||||
&*self.system
|
||||
}
|
||||
|
||||
fn files(&self) -> &Files {
|
||||
&self.files
|
||||
}
|
||||
}
|
||||
|
||||
impl Database for RootDatabase {}
|
||||
|
||||
impl Db for RootDatabase {}
|
||||
|
||||
impl salsa::ParallelDatabase for RootDatabase {
|
||||
fn snapshot(&self) -> salsa::Snapshot<Self> {
|
||||
salsa::Snapshot::new(Self {
|
||||
workspace: self.workspace,
|
||||
storage: self.storage.snapshot(),
|
||||
files: self.files.snapshot(),
|
||||
system: self.system.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
254
crates/red_knot/src/logging.rs
Normal file
254
crates/red_knot/src/logging.rs
Normal file
@@ -0,0 +1,254 @@
|
||||
//! Sets up logging for Red Knot
|
||||
|
||||
use anyhow::Context;
|
||||
use colored::Colorize;
|
||||
use std::fmt;
|
||||
use std::fs::File;
|
||||
use std::io::BufWriter;
|
||||
use tracing::{Event, Subscriber};
|
||||
use tracing_subscriber::filter::LevelFilter;
|
||||
use tracing_subscriber::fmt::format::Writer;
|
||||
use tracing_subscriber::fmt::{FmtContext, FormatEvent, FormatFields};
|
||||
use tracing_subscriber::registry::LookupSpan;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
|
||||
/// Logging flags to `#[command(flatten)]` into your CLI
|
||||
#[derive(clap::Args, Debug, Clone, Default)]
|
||||
#[command(about = None, long_about = None)]
|
||||
pub(crate) struct Verbosity {
|
||||
#[arg(
|
||||
long,
|
||||
short = 'v',
|
||||
help = "Use verbose output (or `-vv` and `-vvv` for more verbose output)",
|
||||
action = clap::ArgAction::Count,
|
||||
global = true,
|
||||
)]
|
||||
verbose: u8,
|
||||
}
|
||||
|
||||
impl Verbosity {
|
||||
/// Returns the verbosity level based on the number of `-v` flags.
|
||||
///
|
||||
/// Returns `None` if the user did not specify any verbosity flags.
|
||||
pub(crate) fn level(&self) -> VerbosityLevel {
|
||||
match self.verbose {
|
||||
0 => VerbosityLevel::Default,
|
||||
1 => VerbosityLevel::Verbose,
|
||||
2 => VerbosityLevel::ExtraVerbose,
|
||||
_ => VerbosityLevel::Trace,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
|
||||
pub(crate) enum VerbosityLevel {
|
||||
/// Default output level. Only shows Ruff and Red Knot events up to the [`WARN`](tracing::Level::WARN).
|
||||
Default,
|
||||
|
||||
/// Enables verbose output. Emits Ruff and Red Knot events up to the [`INFO`](tracing::Level::INFO).
|
||||
/// Corresponds to `-v`.
|
||||
Verbose,
|
||||
|
||||
/// Enables a more verbose tracing format and emits Ruff and Red Knot events up to [`DEBUG`](tracing::Level::DEBUG).
|
||||
/// Corresponds to `-vv`
|
||||
ExtraVerbose,
|
||||
|
||||
/// Enables all tracing events and uses a tree-like output format. Corresponds to `-vvv`.
|
||||
Trace,
|
||||
}
|
||||
|
||||
impl VerbosityLevel {
|
||||
const fn level_filter(self) -> LevelFilter {
|
||||
match self {
|
||||
VerbosityLevel::Default => LevelFilter::WARN,
|
||||
VerbosityLevel::Verbose => LevelFilter::INFO,
|
||||
VerbosityLevel::ExtraVerbose => LevelFilter::DEBUG,
|
||||
VerbosityLevel::Trace => LevelFilter::TRACE,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) const fn is_trace(self) -> bool {
|
||||
matches!(self, VerbosityLevel::Trace)
|
||||
}
|
||||
|
||||
pub(crate) const fn is_extra_verbose(self) -> bool {
|
||||
matches!(self, VerbosityLevel::ExtraVerbose)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn setup_tracing(level: VerbosityLevel) -> anyhow::Result<TracingGuard> {
|
||||
use tracing_subscriber::prelude::*;
|
||||
|
||||
// The `RED_KNOT_LOG` environment variable overrides the default log level.
|
||||
let filter = if let Ok(log_env_variable) = std::env::var("RED_KNOT_LOG") {
|
||||
EnvFilter::builder()
|
||||
.parse(log_env_variable)
|
||||
.context("Failed to parse directives specified in RED_KNOT_LOG environment variable.")?
|
||||
} else {
|
||||
match level {
|
||||
VerbosityLevel::Default => {
|
||||
// Show warning traces
|
||||
EnvFilter::default().add_directive(LevelFilter::WARN.into())
|
||||
}
|
||||
level => {
|
||||
let level_filter = level.level_filter();
|
||||
|
||||
// Show info|debug|trace events, but allow `RED_KNOT_LOG` to override
|
||||
let filter = EnvFilter::default().add_directive(
|
||||
format!("red_knot={level_filter}")
|
||||
.parse()
|
||||
.expect("Hardcoded directive to be valid"),
|
||||
);
|
||||
|
||||
filter.add_directive(
|
||||
format!("ruff={level_filter}")
|
||||
.parse()
|
||||
.expect("Hardcoded directive to be valid"),
|
||||
)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let (profiling_layer, guard) = setup_profile();
|
||||
|
||||
let registry = tracing_subscriber::registry()
|
||||
.with(filter)
|
||||
.with(profiling_layer);
|
||||
|
||||
if level.is_trace() {
|
||||
let subscriber = registry.with(
|
||||
tracing_tree::HierarchicalLayer::default()
|
||||
.with_indent_lines(true)
|
||||
.with_indent_amount(2)
|
||||
.with_bracketed_fields(true)
|
||||
.with_thread_ids(true)
|
||||
.with_targets(true)
|
||||
.with_writer(std::io::stderr)
|
||||
.with_timer(tracing_tree::time::Uptime::default()),
|
||||
);
|
||||
|
||||
subscriber.init();
|
||||
} else {
|
||||
let subscriber = registry.with(
|
||||
tracing_subscriber::fmt::layer()
|
||||
.event_format(RedKnotFormat {
|
||||
display_level: true,
|
||||
display_timestamp: level.is_extra_verbose(),
|
||||
show_spans: false,
|
||||
})
|
||||
.with_writer(std::io::stderr),
|
||||
);
|
||||
|
||||
subscriber.init();
|
||||
}
|
||||
|
||||
Ok(TracingGuard {
|
||||
_flame_guard: guard,
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn setup_profile<S>() -> (
|
||||
Option<tracing_flame::FlameLayer<S, BufWriter<File>>>,
|
||||
Option<tracing_flame::FlushGuard<BufWriter<File>>>,
|
||||
)
|
||||
where
|
||||
S: Subscriber + for<'span> LookupSpan<'span>,
|
||||
{
|
||||
if let Ok("1" | "true") = std::env::var("RED_KNOT_LOG_PROFILE").as_deref() {
|
||||
let (layer, guard) = tracing_flame::FlameLayer::with_file("tracing.folded")
|
||||
.expect("Flame layer to be created");
|
||||
(Some(layer), Some(guard))
|
||||
} else {
|
||||
(None, None)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct TracingGuard {
|
||||
_flame_guard: Option<tracing_flame::FlushGuard<BufWriter<File>>>,
|
||||
}
|
||||
|
||||
struct RedKnotFormat {
|
||||
display_timestamp: bool,
|
||||
display_level: bool,
|
||||
show_spans: bool,
|
||||
}
|
||||
|
||||
/// See <https://docs.rs/tracing-subscriber/0.3.18/src/tracing_subscriber/fmt/format/mod.rs.html#1026-1156>
|
||||
impl<S, N> FormatEvent<S, N> for RedKnotFormat
|
||||
where
|
||||
S: Subscriber + for<'a> LookupSpan<'a>,
|
||||
N: for<'a> FormatFields<'a> + 'static,
|
||||
{
|
||||
fn format_event(
|
||||
&self,
|
||||
ctx: &FmtContext<'_, S, N>,
|
||||
mut writer: Writer<'_>,
|
||||
event: &Event<'_>,
|
||||
) -> fmt::Result {
|
||||
let meta = event.metadata();
|
||||
let ansi = writer.has_ansi_escapes();
|
||||
|
||||
if self.display_timestamp {
|
||||
let timestamp = chrono::Local::now()
|
||||
.format("%Y-%m-%d %H:%M:%S.%f")
|
||||
.to_string();
|
||||
if ansi {
|
||||
write!(writer, "{} ", timestamp.dimmed())?;
|
||||
} else {
|
||||
write!(
|
||||
writer,
|
||||
"{} ",
|
||||
chrono::Local::now().format("%Y-%m-%d %H:%M:%S.%f")
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
if self.display_level {
|
||||
let level = meta.level();
|
||||
// Same colors as tracing
|
||||
if ansi {
|
||||
let formatted_level = level.to_string();
|
||||
match *level {
|
||||
tracing::Level::TRACE => {
|
||||
write!(writer, "{} ", formatted_level.purple().bold())?;
|
||||
}
|
||||
tracing::Level::DEBUG => write!(writer, "{} ", formatted_level.blue().bold())?,
|
||||
tracing::Level::INFO => write!(writer, "{} ", formatted_level.green().bold())?,
|
||||
tracing::Level::WARN => write!(writer, "{} ", formatted_level.yellow().bold())?,
|
||||
tracing::Level::ERROR => write!(writer, "{} ", level.to_string().red().bold())?,
|
||||
}
|
||||
} else {
|
||||
write!(writer, "{level} ")?;
|
||||
}
|
||||
}
|
||||
|
||||
if self.show_spans {
|
||||
let span = event.parent();
|
||||
let mut seen = false;
|
||||
|
||||
let span = span
|
||||
.and_then(|id| ctx.span(id))
|
||||
.or_else(|| ctx.lookup_current());
|
||||
|
||||
let scope = span.into_iter().flat_map(|span| span.scope().from_root());
|
||||
|
||||
for span in scope {
|
||||
seen = true;
|
||||
if ansi {
|
||||
write!(writer, "{}:", span.metadata().name().bold())?;
|
||||
} else {
|
||||
write!(writer, "{}:", span.metadata().name())?;
|
||||
}
|
||||
}
|
||||
|
||||
if seen {
|
||||
writer.write_char(' ')?;
|
||||
}
|
||||
}
|
||||
|
||||
ctx.field_format().format_fields(writer.by_ref(), event)?;
|
||||
|
||||
writeln!(writer)
|
||||
}
|
||||
}
|
||||
@@ -1,35 +1,39 @@
|
||||
use std::process::{ExitCode, Termination};
|
||||
use std::sync::Mutex;
|
||||
|
||||
use anyhow::{anyhow, Context};
|
||||
use clap::Parser;
|
||||
use colored::Colorize;
|
||||
use crossbeam::channel as crossbeam_channel;
|
||||
use salsa::ParallelDatabase;
|
||||
use tracing::subscriber::Interest;
|
||||
use tracing::{Level, Metadata};
|
||||
use tracing_subscriber::filter::LevelFilter;
|
||||
use tracing_subscriber::layer::{Context, Filter, SubscriberExt};
|
||||
use tracing_subscriber::{Layer, Registry};
|
||||
use tracing_tree::time::Uptime;
|
||||
use salsa::plumbing::ZalsaDatabase;
|
||||
|
||||
use red_knot::db::RootDatabase;
|
||||
use red_knot::watch::FileWatcher;
|
||||
use red_knot::watch::FileWatcherChange;
|
||||
use red_knot::workspace::WorkspaceMetadata;
|
||||
use ruff_db::program::{ProgramSettings, SearchPathSettings};
|
||||
use ruff_db::system::{OsSystem, System, SystemPathBuf};
|
||||
use red_knot_python_semantic::SitePackages;
|
||||
use red_knot_server::run_server;
|
||||
use red_knot_workspace::db::RootDatabase;
|
||||
use red_knot_workspace::watch;
|
||||
use red_knot_workspace::watch::WorkspaceWatcher;
|
||||
use red_knot_workspace::workspace::settings::Configuration;
|
||||
use red_knot_workspace::workspace::WorkspaceMetadata;
|
||||
use ruff_db::system::{OsSystem, System, SystemPath, SystemPathBuf};
|
||||
use target_version::TargetVersion;
|
||||
|
||||
use cli::target_version::TargetVersion;
|
||||
use cli::verbosity::{Verbosity, VerbosityLevel};
|
||||
use crate::logging::{setup_tracing, Verbosity};
|
||||
|
||||
mod cli;
|
||||
mod logging;
|
||||
mod target_version;
|
||||
mod verbosity;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(
|
||||
author,
|
||||
name = "red-knot",
|
||||
about = "An experimental multifile analysis backend for Ruff"
|
||||
about = "An extremely fast Python type checker."
|
||||
)]
|
||||
#[command(version)]
|
||||
struct Args {
|
||||
#[command(subcommand)]
|
||||
pub(crate) command: Option<Command>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help = "Changes the current working directory.",
|
||||
@@ -38,6 +42,17 @@ struct Args {
|
||||
)]
|
||||
current_directory: Option<SystemPathBuf>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help = "Path to the virtual environment the project uses",
|
||||
long_help = "\
|
||||
Path to the virtual environment the project uses. \
|
||||
If provided, red-knot will use the `site-packages` directory of this virtual environment \
|
||||
to resolve type information for the project's third-party dependencies.",
|
||||
value_name = "PATH"
|
||||
)]
|
||||
venv_path: Option<SystemPathBuf>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
value_name = "DIRECTORY",
|
||||
@@ -50,62 +65,135 @@ struct Args {
|
||||
value_name = "PATH",
|
||||
help = "Additional path to use as a module-resolution source (can be passed multiple times)"
|
||||
)]
|
||||
extra_search_path: Vec<SystemPathBuf>,
|
||||
extra_search_path: Option<Vec<SystemPathBuf>>,
|
||||
|
||||
#[arg(long, help = "Python version to assume when resolving types", default_value_t = TargetVersion::default(), value_name="VERSION")]
|
||||
target_version: TargetVersion,
|
||||
#[arg(
|
||||
long,
|
||||
help = "Python version to assume when resolving types",
|
||||
value_name = "VERSION"
|
||||
)]
|
||||
target_version: Option<TargetVersion>,
|
||||
|
||||
#[clap(flatten)]
|
||||
verbosity: Verbosity,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help = "Run in watch mode by re-running whenever files change",
|
||||
short = 'W'
|
||||
)]
|
||||
watch: bool,
|
||||
}
|
||||
|
||||
#[allow(
|
||||
clippy::print_stdout,
|
||||
clippy::unnecessary_wraps,
|
||||
clippy::print_stderr,
|
||||
clippy::dbg_macro
|
||||
)]
|
||||
pub fn main() -> anyhow::Result<()> {
|
||||
let Args {
|
||||
current_directory,
|
||||
custom_typeshed_dir,
|
||||
extra_search_path: extra_paths,
|
||||
target_version,
|
||||
verbosity,
|
||||
} = Args::parse_from(std::env::args().collect::<Vec<_>>());
|
||||
impl Args {
|
||||
fn to_configuration(&self, cli_cwd: &SystemPath) -> Configuration {
|
||||
let mut configuration = Configuration::default();
|
||||
|
||||
let verbosity = verbosity.level();
|
||||
countme::enable(verbosity == Some(VerbosityLevel::Trace));
|
||||
setup_tracing(verbosity);
|
||||
if let Some(target_version) = self.target_version {
|
||||
configuration.target_version = Some(target_version.into());
|
||||
}
|
||||
|
||||
let cwd = if let Some(cwd) = current_directory {
|
||||
let canonicalized = cwd.as_utf8_path().canonicalize_utf8().unwrap();
|
||||
SystemPathBuf::from_utf8_path_buf(canonicalized)
|
||||
} else {
|
||||
let cwd = std::env::current_dir().unwrap();
|
||||
SystemPathBuf::from_path_buf(cwd).unwrap()
|
||||
if let Some(venv_path) = &self.venv_path {
|
||||
configuration.search_paths.site_packages = Some(SitePackages::Derived {
|
||||
venv_path: SystemPath::absolute(venv_path, cli_cwd),
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(custom_typeshed_dir) = &self.custom_typeshed_dir {
|
||||
configuration.search_paths.custom_typeshed =
|
||||
Some(SystemPath::absolute(custom_typeshed_dir, cli_cwd));
|
||||
}
|
||||
|
||||
if let Some(extra_search_paths) = &self.extra_search_path {
|
||||
configuration.search_paths.extra_paths = extra_search_paths
|
||||
.iter()
|
||||
.map(|path| Some(SystemPath::absolute(path, cli_cwd)))
|
||||
.collect();
|
||||
}
|
||||
|
||||
configuration
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Subcommand)]
|
||||
pub enum Command {
|
||||
/// Start the language server
|
||||
Server,
|
||||
}
|
||||
|
||||
#[allow(clippy::print_stdout, clippy::unnecessary_wraps, clippy::print_stderr)]
|
||||
pub fn main() -> ExitStatus {
|
||||
run().unwrap_or_else(|error| {
|
||||
use std::io::Write;
|
||||
|
||||
// Use `writeln` instead of `eprintln` to avoid panicking when the stderr pipe is broken.
|
||||
let mut stderr = std::io::stderr().lock();
|
||||
|
||||
// This communicates that this isn't a linter error but Red Knot itself hard-errored for
|
||||
// some reason (e.g. failed to resolve the configuration)
|
||||
writeln!(stderr, "{}", "Red Knot failed".red().bold()).ok();
|
||||
// Currently we generally only see one error, but e.g. with io errors when resolving
|
||||
// the configuration it is help to chain errors ("resolving configuration failed" ->
|
||||
// "failed to read file: subdir/pyproject.toml")
|
||||
for cause in error.chain() {
|
||||
writeln!(stderr, " {} {cause}", "Cause:".bold()).ok();
|
||||
}
|
||||
|
||||
ExitStatus::Error
|
||||
})
|
||||
}
|
||||
|
||||
fn run() -> anyhow::Result<ExitStatus> {
|
||||
let args = Args::parse_from(std::env::args().collect::<Vec<_>>());
|
||||
|
||||
if matches!(args.command, Some(Command::Server)) {
|
||||
return run_server().map(|()| ExitStatus::Success);
|
||||
}
|
||||
|
||||
let verbosity = args.verbosity.level();
|
||||
countme::enable(verbosity.is_trace());
|
||||
let _guard = setup_tracing(verbosity)?;
|
||||
|
||||
// The base path to which all CLI arguments are relative to.
|
||||
let cli_base_path = {
|
||||
let cwd = std::env::current_dir().context("Failed to get the current working directory")?;
|
||||
SystemPathBuf::from_path_buf(cwd)
|
||||
.map_err(|path| {
|
||||
anyhow!(
|
||||
"The current working directory '{}' contains non-unicode characters. Red Knot only supports unicode paths.",
|
||||
path.display()
|
||||
)
|
||||
})?
|
||||
};
|
||||
|
||||
let cwd = args
|
||||
.current_directory
|
||||
.as_ref()
|
||||
.map(|cwd| {
|
||||
if cwd.as_std_path().is_dir() {
|
||||
Ok(SystemPath::absolute(cwd, &cli_base_path))
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
"Provided current-directory path '{cwd}' is not a directory."
|
||||
))
|
||||
}
|
||||
})
|
||||
.transpose()?
|
||||
.unwrap_or_else(|| cli_base_path.clone());
|
||||
|
||||
let system = OsSystem::new(cwd.clone());
|
||||
let workspace_metadata =
|
||||
WorkspaceMetadata::from_path(system.current_directory(), &system).unwrap();
|
||||
|
||||
// TODO: Respect the settings from the workspace metadata. when resolving the program settings.
|
||||
let program_settings = ProgramSettings {
|
||||
target_version: target_version.into(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths,
|
||||
workspace_root: workspace_metadata.root().to_path_buf(),
|
||||
custom_typeshed: custom_typeshed_dir,
|
||||
site_packages: None,
|
||||
},
|
||||
};
|
||||
let cli_configuration = args.to_configuration(&cwd);
|
||||
let workspace_metadata = WorkspaceMetadata::from_path(
|
||||
system.current_directory(),
|
||||
&system,
|
||||
Some(cli_configuration.clone()),
|
||||
)?;
|
||||
|
||||
// TODO: Use the `program_settings` to compute the key for the database's persistent
|
||||
// cache and load the cache if it exists.
|
||||
let mut db = RootDatabase::new(workspace_metadata, program_settings, system);
|
||||
let mut db = RootDatabase::new(workspace_metadata, system)?;
|
||||
|
||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new(verbosity);
|
||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new(cli_configuration);
|
||||
|
||||
// Listen to Ctrl+C and abort the watch mode.
|
||||
let main_loop_cancellation_token = Mutex::new(Some(main_loop_cancellation_token));
|
||||
@@ -117,125 +205,161 @@ pub fn main() -> anyhow::Result<()> {
|
||||
}
|
||||
})?;
|
||||
|
||||
let file_changes_notifier = main_loop.file_changes_notifier();
|
||||
let exit_status = if args.watch {
|
||||
main_loop.watch(&mut db)?
|
||||
} else {
|
||||
main_loop.run(&mut db)
|
||||
};
|
||||
|
||||
// Watch for file changes and re-trigger the analysis.
|
||||
let mut file_watcher = FileWatcher::new(move |changes| {
|
||||
file_changes_notifier.notify(changes);
|
||||
})?;
|
||||
tracing::trace!("Counts for entire CLI run:\n{}", countme::get_all());
|
||||
|
||||
file_watcher.watch_folder(db.workspace().root(&db).as_std_path())?;
|
||||
std::mem::forget(db);
|
||||
|
||||
main_loop.run(&mut db);
|
||||
Ok(exit_status)
|
||||
}
|
||||
|
||||
println!("{}", countme::get_all());
|
||||
#[derive(Copy, Clone)]
|
||||
pub enum ExitStatus {
|
||||
/// Checking was successful and there were no errors.
|
||||
Success = 0,
|
||||
|
||||
Ok(())
|
||||
/// Checking was successful but there were errors.
|
||||
Failure = 1,
|
||||
|
||||
/// Checking failed.
|
||||
Error = 2,
|
||||
}
|
||||
|
||||
impl Termination for ExitStatus {
|
||||
fn report(self) -> ExitCode {
|
||||
ExitCode::from(self as u8)
|
||||
}
|
||||
}
|
||||
|
||||
struct MainLoop {
|
||||
verbosity: Option<VerbosityLevel>,
|
||||
orchestrator: crossbeam_channel::Sender<OrchestratorMessage>,
|
||||
/// Sender that can be used to send messages to the main loop.
|
||||
sender: crossbeam_channel::Sender<MainLoopMessage>,
|
||||
|
||||
/// Receiver for the messages sent **to** the main loop.
|
||||
receiver: crossbeam_channel::Receiver<MainLoopMessage>,
|
||||
|
||||
/// The file system watcher, if running in watch mode.
|
||||
watcher: Option<WorkspaceWatcher>,
|
||||
|
||||
cli_configuration: Configuration,
|
||||
}
|
||||
|
||||
impl MainLoop {
|
||||
fn new(verbosity: Option<VerbosityLevel>) -> (Self, MainLoopCancellationToken) {
|
||||
let (orchestrator_sender, orchestrator_receiver) = crossbeam_channel::bounded(1);
|
||||
let (main_loop_sender, main_loop_receiver) = crossbeam_channel::bounded(1);
|
||||
|
||||
let mut orchestrator = Orchestrator {
|
||||
receiver: orchestrator_receiver,
|
||||
main_loop: main_loop_sender.clone(),
|
||||
revision: 0,
|
||||
};
|
||||
|
||||
std::thread::spawn(move || {
|
||||
orchestrator.run();
|
||||
});
|
||||
fn new(cli_configuration: Configuration) -> (Self, MainLoopCancellationToken) {
|
||||
let (sender, receiver) = crossbeam_channel::bounded(10);
|
||||
|
||||
(
|
||||
Self {
|
||||
verbosity,
|
||||
orchestrator: orchestrator_sender,
|
||||
receiver: main_loop_receiver,
|
||||
},
|
||||
MainLoopCancellationToken {
|
||||
sender: main_loop_sender,
|
||||
sender: sender.clone(),
|
||||
receiver,
|
||||
watcher: None,
|
||||
cli_configuration,
|
||||
},
|
||||
MainLoopCancellationToken { sender },
|
||||
)
|
||||
}
|
||||
|
||||
fn file_changes_notifier(&self) -> FileChangesNotifier {
|
||||
FileChangesNotifier {
|
||||
sender: self.orchestrator.clone(),
|
||||
}
|
||||
fn watch(mut self, db: &mut RootDatabase) -> anyhow::Result<ExitStatus> {
|
||||
tracing::debug!("Starting watch mode");
|
||||
let sender = self.sender.clone();
|
||||
let watcher = watch::directory_watcher(move |event| {
|
||||
sender.send(MainLoopMessage::ApplyChanges(event)).unwrap();
|
||||
})?;
|
||||
|
||||
self.watcher = Some(WorkspaceWatcher::new(watcher, db));
|
||||
|
||||
self.run(db);
|
||||
|
||||
Ok(ExitStatus::Success)
|
||||
}
|
||||
|
||||
#[allow(clippy::print_stderr)]
|
||||
fn run(self, db: &mut RootDatabase) {
|
||||
self.orchestrator.send(OrchestratorMessage::Run).unwrap();
|
||||
fn run(mut self, db: &mut RootDatabase) -> ExitStatus {
|
||||
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
|
||||
|
||||
for message in &self.receiver {
|
||||
tracing::trace!("Main Loop: Tick");
|
||||
let result = self.main_loop(db);
|
||||
|
||||
tracing::debug!("Exiting main loop");
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn main_loop(&mut self, db: &mut RootDatabase) -> ExitStatus {
|
||||
// Schedule the first check.
|
||||
tracing::debug!("Starting main loop");
|
||||
|
||||
let mut revision = 0u64;
|
||||
|
||||
while let Ok(message) = self.receiver.recv() {
|
||||
match message {
|
||||
MainLoopMessage::CheckWorkspace { revision } => {
|
||||
MainLoopMessage::CheckWorkspace => {
|
||||
let db = db.snapshot();
|
||||
let orchestrator = self.orchestrator.clone();
|
||||
let sender = self.sender.clone();
|
||||
|
||||
// Spawn a new task that checks the workspace. This needs to be done in a separate thread
|
||||
// to prevent blocking the main loop here.
|
||||
rayon::spawn(move || {
|
||||
if let Ok(result) = db.check() {
|
||||
orchestrator
|
||||
.send(OrchestratorMessage::CheckCompleted {
|
||||
diagnostics: result,
|
||||
revision,
|
||||
})
|
||||
// Send the result back to the main loop for printing.
|
||||
sender
|
||||
.send(MainLoopMessage::CheckCompleted { result, revision })
|
||||
.unwrap();
|
||||
}
|
||||
});
|
||||
}
|
||||
MainLoopMessage::ApplyChanges(changes) => {
|
||||
// Automatically cancels any pending queries and waits for them to complete.
|
||||
db.apply_changes(changes);
|
||||
}
|
||||
MainLoopMessage::CheckCompleted(diagnostics) => {
|
||||
eprintln!("{}", diagnostics.join("\n"));
|
||||
if self.verbosity == Some(VerbosityLevel::Trace) {
|
||||
eprintln!("{}", countme::get_all());
|
||||
|
||||
MainLoopMessage::CheckCompleted {
|
||||
result,
|
||||
revision: check_revision,
|
||||
} => {
|
||||
let has_diagnostics = !result.is_empty();
|
||||
if check_revision == revision {
|
||||
for diagnostic in result {
|
||||
tracing::error!("{}", diagnostic);
|
||||
}
|
||||
} else {
|
||||
tracing::debug!(
|
||||
"Discarding check result for outdated revision: current: {revision}, result revision: {check_revision}"
|
||||
);
|
||||
}
|
||||
|
||||
if self.watcher.is_none() {
|
||||
return if has_diagnostics {
|
||||
ExitStatus::Failure
|
||||
} else {
|
||||
ExitStatus::Success
|
||||
};
|
||||
}
|
||||
|
||||
tracing::trace!("Counts after last check:\n{}", countme::get_all());
|
||||
}
|
||||
|
||||
MainLoopMessage::ApplyChanges(changes) => {
|
||||
revision += 1;
|
||||
// Automatically cancels any pending queries and waits for them to complete.
|
||||
db.apply_changes(changes, Some(&self.cli_configuration));
|
||||
if let Some(watcher) = self.watcher.as_mut() {
|
||||
watcher.update(db);
|
||||
}
|
||||
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
|
||||
}
|
||||
MainLoopMessage::Exit => {
|
||||
if self.verbosity == Some(VerbosityLevel::Trace) {
|
||||
eprintln!("{}", countme::get_all());
|
||||
}
|
||||
return;
|
||||
// Cancel any pending queries and wait for them to complete.
|
||||
// TODO: Don't use Salsa internal APIs
|
||||
// [Zulip-Thread](https://salsa.zulipchat.com/#narrow/stream/333573-salsa-3.2E0/topic/Expose.20an.20API.20to.20cancel.20other.20queries)
|
||||
let _ = db.zalsa_mut();
|
||||
return ExitStatus::Success;
|
||||
}
|
||||
}
|
||||
|
||||
tracing::debug!("Waiting for next main loop message.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for MainLoop {
|
||||
fn drop(&mut self) {
|
||||
self.orchestrator
|
||||
.send(OrchestratorMessage::Shutdown)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct FileChangesNotifier {
|
||||
sender: crossbeam_channel::Sender<OrchestratorMessage>,
|
||||
}
|
||||
|
||||
impl FileChangesNotifier {
|
||||
fn notify(&self, changes: Vec<FileWatcherChange>) {
|
||||
self.sender
|
||||
.send(OrchestratorMessage::FileChanges(changes))
|
||||
.unwrap();
|
||||
ExitStatus::Success
|
||||
}
|
||||
}
|
||||
|
||||
@@ -250,170 +374,11 @@ impl MainLoopCancellationToken {
|
||||
}
|
||||
}
|
||||
|
||||
struct Orchestrator {
|
||||
/// Sends messages to the main loop.
|
||||
main_loop: crossbeam_channel::Sender<MainLoopMessage>,
|
||||
/// Receives messages from the main loop.
|
||||
receiver: crossbeam_channel::Receiver<OrchestratorMessage>,
|
||||
revision: usize,
|
||||
}
|
||||
|
||||
impl Orchestrator {
|
||||
#[allow(clippy::print_stderr)]
|
||||
fn run(&mut self) {
|
||||
while let Ok(message) = self.receiver.recv() {
|
||||
match message {
|
||||
OrchestratorMessage::Run => {
|
||||
self.main_loop
|
||||
.send(MainLoopMessage::CheckWorkspace {
|
||||
revision: self.revision,
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
OrchestratorMessage::CheckCompleted {
|
||||
diagnostics,
|
||||
revision,
|
||||
} => {
|
||||
// Only take the diagnostics if they are for the latest revision.
|
||||
if self.revision == revision {
|
||||
self.main_loop
|
||||
.send(MainLoopMessage::CheckCompleted(diagnostics))
|
||||
.unwrap();
|
||||
} else {
|
||||
tracing::debug!("Discarding diagnostics for outdated revision {revision} (current: {}).", self.revision);
|
||||
}
|
||||
}
|
||||
|
||||
OrchestratorMessage::FileChanges(changes) => {
|
||||
// Request cancellation, but wait until all analysis tasks have completed to
|
||||
// avoid stale messages in the next main loop.
|
||||
|
||||
self.revision += 1;
|
||||
self.debounce_changes(changes);
|
||||
}
|
||||
OrchestratorMessage::Shutdown => {
|
||||
return self.shutdown();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn debounce_changes(&self, mut changes: Vec<FileWatcherChange>) {
|
||||
loop {
|
||||
// Consume possibly incoming file change messages before running a new analysis, but don't wait for more than 100ms.
|
||||
crossbeam_channel::select! {
|
||||
recv(self.receiver) -> message => {
|
||||
match message {
|
||||
Ok(OrchestratorMessage::Shutdown) => {
|
||||
return self.shutdown();
|
||||
}
|
||||
Ok(OrchestratorMessage::FileChanges(file_changes)) => {
|
||||
changes.extend(file_changes);
|
||||
}
|
||||
|
||||
Ok(OrchestratorMessage::CheckCompleted { .. })=> {
|
||||
// disregard any outdated completion message.
|
||||
}
|
||||
Ok(OrchestratorMessage::Run) => unreachable!("The orchestrator is already running."),
|
||||
|
||||
Err(_) => {
|
||||
// There are no more senders, no point in waiting for more messages
|
||||
return;
|
||||
}
|
||||
}
|
||||
},
|
||||
default(std::time::Duration::from_millis(10)) => {
|
||||
// No more file changes after 10 ms, send the changes and schedule a new analysis
|
||||
self.main_loop.send(MainLoopMessage::ApplyChanges(changes)).unwrap();
|
||||
self.main_loop.send(MainLoopMessage::CheckWorkspace { revision: self.revision}).unwrap();
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::unused_self)]
|
||||
fn shutdown(&self) {
|
||||
tracing::trace!("Shutting down orchestrator.");
|
||||
}
|
||||
}
|
||||
|
||||
/// Message sent from the orchestrator to the main loop.
|
||||
#[derive(Debug)]
|
||||
enum MainLoopMessage {
|
||||
CheckWorkspace { revision: usize },
|
||||
CheckCompleted(Vec<String>),
|
||||
ApplyChanges(Vec<FileWatcherChange>),
|
||||
CheckWorkspace,
|
||||
CheckCompleted { result: Vec<String>, revision: u64 },
|
||||
ApplyChanges(Vec<watch::ChangeEvent>),
|
||||
Exit,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum OrchestratorMessage {
|
||||
Run,
|
||||
Shutdown,
|
||||
|
||||
CheckCompleted {
|
||||
diagnostics: Vec<String>,
|
||||
revision: usize,
|
||||
},
|
||||
|
||||
FileChanges(Vec<FileWatcherChange>),
|
||||
}
|
||||
|
||||
fn setup_tracing(verbosity: Option<VerbosityLevel>) {
|
||||
let trace_level = match verbosity {
|
||||
None => Level::WARN,
|
||||
Some(VerbosityLevel::Info) => Level::INFO,
|
||||
Some(VerbosityLevel::Debug) => Level::DEBUG,
|
||||
Some(VerbosityLevel::Trace) => Level::TRACE,
|
||||
};
|
||||
|
||||
let subscriber = Registry::default().with(
|
||||
tracing_tree::HierarchicalLayer::default()
|
||||
.with_indent_lines(true)
|
||||
.with_indent_amount(2)
|
||||
.with_bracketed_fields(true)
|
||||
.with_thread_ids(true)
|
||||
.with_targets(true)
|
||||
.with_writer(|| Box::new(std::io::stderr()))
|
||||
.with_timer(Uptime::default())
|
||||
.with_filter(LoggingFilter { trace_level }),
|
||||
);
|
||||
|
||||
tracing::subscriber::set_global_default(subscriber).unwrap();
|
||||
}
|
||||
|
||||
struct LoggingFilter {
|
||||
trace_level: Level,
|
||||
}
|
||||
|
||||
impl LoggingFilter {
|
||||
fn is_enabled(&self, meta: &Metadata<'_>) -> bool {
|
||||
let filter = if meta.target().starts_with("red_knot") || meta.target().starts_with("ruff") {
|
||||
self.trace_level
|
||||
} else {
|
||||
Level::INFO
|
||||
};
|
||||
|
||||
meta.level() <= &filter
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> Filter<S> for LoggingFilter {
|
||||
fn enabled(&self, meta: &Metadata<'_>, _cx: &Context<'_, S>) -> bool {
|
||||
self.is_enabled(meta)
|
||||
}
|
||||
|
||||
fn callsite_enabled(&self, meta: &'static Metadata<'static>) -> Interest {
|
||||
if self.is_enabled(meta) {
|
||||
Interest::always()
|
||||
} else {
|
||||
Interest::never()
|
||||
}
|
||||
}
|
||||
|
||||
fn max_level_hint(&self) -> Option<LevelFilter> {
|
||||
Some(LevelFilter::from_level(self.trace_level))
|
||||
}
|
||||
}
|
||||
|
||||
48
crates/red_knot/src/target_version.rs
Normal file
48
crates/red_knot/src/target_version.rs
Normal file
@@ -0,0 +1,48 @@
|
||||
/// Enumeration of all supported Python versions
|
||||
///
|
||||
/// TODO: unify with the `PythonVersion` enum in the linter/formatter crates?
|
||||
#[derive(Copy, Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord, Default, clap::ValueEnum)]
|
||||
pub enum TargetVersion {
|
||||
Py37,
|
||||
#[default]
|
||||
Py38,
|
||||
Py39,
|
||||
Py310,
|
||||
Py311,
|
||||
Py312,
|
||||
Py313,
|
||||
}
|
||||
|
||||
impl TargetVersion {
|
||||
const fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
Self::Py37 => "py37",
|
||||
Self::Py38 => "py38",
|
||||
Self::Py39 => "py39",
|
||||
Self::Py310 => "py310",
|
||||
Self::Py311 => "py311",
|
||||
Self::Py312 => "py312",
|
||||
Self::Py313 => "py313",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for TargetVersion {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TargetVersion> for red_knot_python_semantic::PythonVersion {
|
||||
fn from(value: TargetVersion) -> Self {
|
||||
match value {
|
||||
TargetVersion::Py37 => Self::PY37,
|
||||
TargetVersion::Py38 => Self::PY38,
|
||||
TargetVersion::Py39 => Self::PY39,
|
||||
TargetVersion::Py310 => Self::PY310,
|
||||
TargetVersion::Py311 => Self::PY311,
|
||||
TargetVersion::Py312 => Self::PY312,
|
||||
TargetVersion::Py313 => Self::PY313,
|
||||
}
|
||||
}
|
||||
}
|
||||
1
crates/red_knot/src/verbosity.rs
Normal file
1
crates/red_knot/src/verbosity.rs
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
@@ -1,111 +0,0 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Context;
|
||||
use notify::event::{CreateKind, ModifyKind, RemoveKind};
|
||||
use notify::{recommended_watcher, Event, EventKind, RecommendedWatcher, RecursiveMode, Watcher};
|
||||
|
||||
use ruff_db::system::{SystemPath, SystemPathBuf};
|
||||
|
||||
pub struct FileWatcher {
|
||||
watcher: RecommendedWatcher,
|
||||
}
|
||||
|
||||
pub trait EventHandler: Send + 'static {
|
||||
fn handle(&self, changes: Vec<FileWatcherChange>);
|
||||
}
|
||||
|
||||
impl<F> EventHandler for F
|
||||
where
|
||||
F: Fn(Vec<FileWatcherChange>) + Send + 'static,
|
||||
{
|
||||
fn handle(&self, changes: Vec<FileWatcherChange>) {
|
||||
let f = self;
|
||||
f(changes);
|
||||
}
|
||||
}
|
||||
|
||||
impl FileWatcher {
|
||||
pub fn new<E>(handler: E) -> anyhow::Result<Self>
|
||||
where
|
||||
E: EventHandler,
|
||||
{
|
||||
Self::from_handler(Box::new(handler))
|
||||
}
|
||||
|
||||
fn from_handler(handler: Box<dyn EventHandler>) -> anyhow::Result<Self> {
|
||||
let watcher = recommended_watcher(move |event: notify::Result<Event>| {
|
||||
match event {
|
||||
Ok(event) => {
|
||||
// TODO verify that this handles all events correctly
|
||||
let change_kind = match event.kind {
|
||||
EventKind::Create(CreateKind::File) => FileChangeKind::Created,
|
||||
EventKind::Modify(ModifyKind::Name(notify::event::RenameMode::From)) => {
|
||||
FileChangeKind::Deleted
|
||||
}
|
||||
EventKind::Modify(ModifyKind::Name(notify::event::RenameMode::To)) => {
|
||||
FileChangeKind::Created
|
||||
}
|
||||
EventKind::Modify(ModifyKind::Name(notify::event::RenameMode::Any)) => {
|
||||
// TODO Introduce a better catch all event for cases that we don't understand.
|
||||
FileChangeKind::Created
|
||||
}
|
||||
EventKind::Modify(ModifyKind::Name(notify::event::RenameMode::Both)) => {
|
||||
todo!("Handle both create and delete event.");
|
||||
}
|
||||
EventKind::Modify(_) => FileChangeKind::Modified,
|
||||
EventKind::Remove(RemoveKind::File) => FileChangeKind::Deleted,
|
||||
_ => {
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let mut changes = Vec::new();
|
||||
|
||||
for path in event.paths {
|
||||
if let Some(fs_path) = SystemPath::from_std_path(&path) {
|
||||
changes
|
||||
.push(FileWatcherChange::new(fs_path.to_path_buf(), change_kind));
|
||||
}
|
||||
}
|
||||
|
||||
if !changes.is_empty() {
|
||||
handler.handle(changes);
|
||||
}
|
||||
}
|
||||
// TODO proper error handling
|
||||
Err(err) => {
|
||||
panic!("Error: {err}");
|
||||
}
|
||||
}
|
||||
})
|
||||
.context("Failed to create file watcher.")?;
|
||||
|
||||
Ok(Self { watcher })
|
||||
}
|
||||
|
||||
pub fn watch_folder(&mut self, path: &Path) -> anyhow::Result<()> {
|
||||
self.watcher.watch(path, RecursiveMode::Recursive)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct FileWatcherChange {
|
||||
pub path: SystemPathBuf,
|
||||
#[allow(unused)]
|
||||
pub kind: FileChangeKind,
|
||||
}
|
||||
|
||||
impl FileWatcherChange {
|
||||
pub fn new(path: SystemPathBuf, kind: FileChangeKind) -> Self {
|
||||
Self { path, kind }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
pub enum FileChangeKind {
|
||||
Created,
|
||||
Modified,
|
||||
Deleted,
|
||||
}
|
||||
@@ -1,344 +0,0 @@
|
||||
// TODO: Fix clippy warnings created by salsa macros
|
||||
#![allow(clippy::used_underscore_binding)]
|
||||
|
||||
use std::{collections::BTreeMap, sync::Arc};
|
||||
|
||||
use rustc_hash::{FxBuildHasher, FxHashSet};
|
||||
|
||||
pub use metadata::{PackageMetadata, WorkspaceMetadata};
|
||||
use ruff_db::{
|
||||
files::{system_path_to_file, File},
|
||||
system::{walk_directory::WalkState, SystemPath, SystemPathBuf},
|
||||
};
|
||||
use ruff_python_ast::{name::Name, PySourceType};
|
||||
|
||||
use crate::{
|
||||
db::Db,
|
||||
lint::{lint_semantic, lint_syntax, Diagnostics},
|
||||
};
|
||||
|
||||
mod metadata;
|
||||
|
||||
/// The project workspace as a Salsa ingredient.
|
||||
///
|
||||
/// A workspace consists of one or multiple packages. Packages can be nested. A file in a workspace
|
||||
/// belongs to no or exactly one package (files can't belong to multiple packages).
|
||||
///
|
||||
/// How workspaces and packages are discovered is TBD. For now, a workspace can be any directory,
|
||||
/// and it always contains a single package which has the same root as the workspace.
|
||||
///
|
||||
/// ## Examples
|
||||
///
|
||||
/// ```text
|
||||
/// app-1/
|
||||
/// pyproject.toml
|
||||
/// src/
|
||||
/// ... python files
|
||||
///
|
||||
/// app-2/
|
||||
/// pyproject.toml
|
||||
/// src/
|
||||
/// ... python files
|
||||
///
|
||||
/// shared/
|
||||
/// pyproject.toml
|
||||
/// src/
|
||||
/// ... python files
|
||||
///
|
||||
/// pyproject.toml
|
||||
/// ```
|
||||
///
|
||||
/// The above project structure has three packages: `app-1`, `app-2`, and `shared`.
|
||||
/// Each of the packages can define their own settings in their `pyproject.toml` file, but
|
||||
/// they must be compatible. For example, each package can define a different `requires-python` range,
|
||||
/// but the ranges must overlap.
|
||||
///
|
||||
/// ## How is a workspace different from a program?
|
||||
/// There are two (related) motivations:
|
||||
///
|
||||
/// 1. Program is defined in `ruff_db` and it can't reference the settings types for the linter and formatter
|
||||
/// without introducing a cyclic dependency. The workspace is defined in a higher level crate
|
||||
/// where it can reference these setting types.
|
||||
/// 2. Running `ruff check` with different target versions results in different programs (settings) but
|
||||
/// it remains the same workspace. That's why program is a narrowed view of the workspace only
|
||||
/// holding on to the most fundamental settings required for checking.
|
||||
#[salsa::input]
|
||||
pub struct Workspace {
|
||||
#[id]
|
||||
#[return_ref]
|
||||
root_buf: SystemPathBuf,
|
||||
|
||||
/// The files that are open in the workspace.
|
||||
///
|
||||
/// Setting the open files to a non-`None` value changes `check` to only check the
|
||||
/// open files rather than all files in the workspace.
|
||||
#[return_ref]
|
||||
open_file_set: Option<Arc<FxHashSet<File>>>,
|
||||
|
||||
/// The (first-party) packages in this workspace.
|
||||
#[return_ref]
|
||||
package_tree: BTreeMap<SystemPathBuf, Package>,
|
||||
}
|
||||
|
||||
/// A first-party package in a workspace.
|
||||
#[salsa::input]
|
||||
pub struct Package {
|
||||
#[return_ref]
|
||||
pub name: Name,
|
||||
|
||||
/// The path to the root directory of the package.
|
||||
#[id]
|
||||
#[return_ref]
|
||||
root_buf: SystemPathBuf,
|
||||
|
||||
/// The files that are part of this package.
|
||||
#[return_ref]
|
||||
file_set: Arc<FxHashSet<File>>,
|
||||
// TODO: Add the loaded settings.
|
||||
}
|
||||
|
||||
impl Workspace {
|
||||
/// Discovers the closest workspace at `path` and returns its metadata.
|
||||
pub fn from_metadata(db: &dyn Db, metadata: WorkspaceMetadata) -> Self {
|
||||
let mut packages = BTreeMap::new();
|
||||
|
||||
for package in metadata.packages {
|
||||
packages.insert(package.root.clone(), Package::from_metadata(db, package));
|
||||
}
|
||||
|
||||
Workspace::new(db, metadata.root, None, packages)
|
||||
}
|
||||
|
||||
pub fn root(self, db: &dyn Db) -> &SystemPath {
|
||||
self.root_buf(db)
|
||||
}
|
||||
|
||||
pub fn packages(self, db: &dyn Db) -> impl Iterator<Item = Package> + '_ {
|
||||
self.package_tree(db).values().copied()
|
||||
}
|
||||
|
||||
pub fn reload(self, db: &mut dyn Db, metadata: WorkspaceMetadata) {
|
||||
assert_eq!(self.root(db), metadata.root());
|
||||
|
||||
let mut old_packages = self.package_tree(db).clone();
|
||||
let mut new_packages = BTreeMap::new();
|
||||
|
||||
for package_metadata in metadata.packages {
|
||||
let path = package_metadata.root().to_path_buf();
|
||||
|
||||
let package = if let Some(old_package) = old_packages.remove(&path) {
|
||||
old_package.update(db, package_metadata);
|
||||
old_package
|
||||
} else {
|
||||
Package::from_metadata(db, package_metadata)
|
||||
};
|
||||
|
||||
new_packages.insert(path, package);
|
||||
}
|
||||
|
||||
self.set_package_tree(db).to(new_packages);
|
||||
}
|
||||
|
||||
pub fn update_package(self, db: &mut dyn Db, metadata: PackageMetadata) -> anyhow::Result<()> {
|
||||
let path = metadata.root().to_path_buf();
|
||||
|
||||
if let Some(package) = self.package_tree(db).get(&path).copied() {
|
||||
package.update(db, metadata);
|
||||
Ok(())
|
||||
} else {
|
||||
Err(anyhow::anyhow!("Package {path} not found"))
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the closest package to which the first-party `path` belongs.
|
||||
///
|
||||
/// Returns `None` if the `path` is outside of any package or if `file` isn't a first-party file
|
||||
/// (e.g. third-party dependencies or `excluded`).
|
||||
pub fn package(self, db: &dyn Db, path: &SystemPath) -> Option<Package> {
|
||||
let packages = self.package_tree(db);
|
||||
|
||||
let (package_path, package) = packages.range(..path.to_path_buf()).next_back()?;
|
||||
|
||||
if path.starts_with(package_path) {
|
||||
Some(*package)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Checks all open files in the workspace and its dependencies.
|
||||
#[tracing::instrument(level = "debug", skip_all)]
|
||||
pub fn check(self, db: &dyn Db) -> Vec<String> {
|
||||
let mut result = Vec::new();
|
||||
|
||||
if let Some(open_files) = self.open_files(db) {
|
||||
for file in open_files {
|
||||
result.extend_from_slice(&check_file(db, *file));
|
||||
}
|
||||
} else {
|
||||
for package in self.packages(db) {
|
||||
result.extend(package.check(db));
|
||||
}
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
/// Opens a file in the workspace.
|
||||
///
|
||||
/// This changes the behavior of `check` to only check the open files rather than all files in the workspace.
|
||||
#[tracing::instrument(level = "debug", skip(self, db))]
|
||||
pub fn open_file(self, db: &mut dyn Db, file: File) {
|
||||
let mut open_files = self.take_open_files(db);
|
||||
open_files.insert(file);
|
||||
self.set_open_files(db, open_files);
|
||||
}
|
||||
|
||||
/// Closes a file in the workspace.
|
||||
#[tracing::instrument(level = "debug", skip(self, db))]
|
||||
pub fn close_file(self, db: &mut dyn Db, file: File) -> bool {
|
||||
let mut open_files = self.take_open_files(db);
|
||||
let removed = open_files.remove(&file);
|
||||
|
||||
if removed {
|
||||
self.set_open_files(db, open_files);
|
||||
}
|
||||
|
||||
removed
|
||||
}
|
||||
|
||||
/// Returns the open files in the workspace or `None` if the entire workspace should be checked.
|
||||
pub fn open_files(self, db: &dyn Db) -> Option<&FxHashSet<File>> {
|
||||
self.open_file_set(db).as_deref()
|
||||
}
|
||||
|
||||
/// Sets the open files in the workspace.
|
||||
///
|
||||
/// This changes the behavior of `check` to only check the open files rather than all files in the workspace.
|
||||
#[tracing::instrument(level = "debug", skip(self, db))]
|
||||
pub fn set_open_files(self, db: &mut dyn Db, open_files: FxHashSet<File>) {
|
||||
self.set_open_file_set(db).to(Some(Arc::new(open_files)));
|
||||
}
|
||||
|
||||
/// This takes the open files from the workspace and returns them.
|
||||
///
|
||||
/// This changes the behavior of `check` to check all files in the workspace instead of just the open files.
|
||||
pub fn take_open_files(self, db: &mut dyn Db) -> FxHashSet<File> {
|
||||
let open_files = self.open_file_set(db).clone();
|
||||
|
||||
if let Some(open_files) = open_files {
|
||||
// Salsa will cancel any pending queries and remove its own reference to `open_files`
|
||||
// so that the reference counter to `open_files` now drops to 1.
|
||||
self.set_open_file_set(db).to(None);
|
||||
|
||||
Arc::try_unwrap(open_files).unwrap()
|
||||
} else {
|
||||
FxHashSet::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Package {
|
||||
pub fn root(self, db: &dyn Db) -> &SystemPath {
|
||||
self.root_buf(db)
|
||||
}
|
||||
|
||||
/// Returns `true` if `file` is a first-party file part of this package.
|
||||
pub fn contains_file(self, db: &dyn Db, file: File) -> bool {
|
||||
self.files(db).contains(&file)
|
||||
}
|
||||
|
||||
pub fn files(self, db: &dyn Db) -> &FxHashSet<File> {
|
||||
self.file_set(db)
|
||||
}
|
||||
|
||||
pub fn remove_file(self, db: &mut dyn Db, file: File) -> bool {
|
||||
let mut files_arc = self.file_set(db).clone();
|
||||
|
||||
// Set a dummy value. Salsa will cancel any pending queries and remove its own reference to `files`
|
||||
// so that the reference counter to `files` now drops to 1.
|
||||
self.set_file_set(db).to(Arc::new(FxHashSet::default()));
|
||||
|
||||
let files = Arc::get_mut(&mut files_arc).unwrap();
|
||||
let removed = files.remove(&file);
|
||||
self.set_file_set(db).to(files_arc);
|
||||
|
||||
removed
|
||||
}
|
||||
|
||||
pub(crate) fn check(self, db: &dyn Db) -> Vec<String> {
|
||||
let mut result = Vec::new();
|
||||
for file in self.files(db) {
|
||||
let diagnostics = check_file(db, *file);
|
||||
result.extend_from_slice(&diagnostics);
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn from_metadata(db: &dyn Db, metadata: PackageMetadata) -> Self {
|
||||
let files = discover_package_files(db, metadata.root());
|
||||
|
||||
Self::new(db, metadata.name, metadata.root, Arc::new(files))
|
||||
}
|
||||
|
||||
fn update(self, db: &mut dyn Db, metadata: PackageMetadata) {
|
||||
let root = self.root(db);
|
||||
assert_eq!(root, metadata.root());
|
||||
|
||||
let files = discover_package_files(db, root);
|
||||
|
||||
self.set_name(db).to(metadata.name);
|
||||
self.set_file_set(db).to(Arc::new(files));
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn check_file(db: &dyn Db, file: File) -> Diagnostics {
|
||||
let mut diagnostics = Vec::new();
|
||||
diagnostics.extend_from_slice(lint_syntax(db, file));
|
||||
diagnostics.extend_from_slice(lint_semantic(db, file));
|
||||
Diagnostics::from(diagnostics)
|
||||
}
|
||||
|
||||
fn discover_package_files(db: &dyn Db, path: &SystemPath) -> FxHashSet<File> {
|
||||
let paths = std::sync::Mutex::new(Vec::new());
|
||||
|
||||
db.system().walk_directory(path).run(|| {
|
||||
Box::new(|entry| {
|
||||
match entry {
|
||||
Ok(entry) => {
|
||||
// Skip over any non python files to avoid creating too many entries in `Files`.
|
||||
if entry.file_type().is_file()
|
||||
&& entry
|
||||
.path()
|
||||
.extension()
|
||||
.and_then(PySourceType::try_from_extension)
|
||||
.is_some()
|
||||
{
|
||||
let mut paths = paths.lock().unwrap();
|
||||
paths.push(entry.into_path());
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
// TODO Handle error
|
||||
tracing::error!("Failed to walk path: {error}");
|
||||
}
|
||||
}
|
||||
|
||||
WalkState::Continue
|
||||
})
|
||||
});
|
||||
|
||||
let paths = paths.into_inner().unwrap();
|
||||
let mut files = FxHashSet::with_capacity_and_hasher(paths.len(), FxBuildHasher);
|
||||
|
||||
for path in paths {
|
||||
// If this returns `None`, then the file was deleted between the `walk_directory` call and now.
|
||||
// We can ignore this.
|
||||
if let Some(file) = system_path_to_file(db.upcast(), &path) {
|
||||
files.insert(file);
|
||||
}
|
||||
}
|
||||
|
||||
files
|
||||
}
|
||||
1282
crates/red_knot/tests/file_watching.rs
Normal file
1282
crates/red_knot/tests/file_watching.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,39 +0,0 @@
|
||||
[package]
|
||||
name = "red_knot_module_resolver"
|
||||
version = "0.0.0"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
ruff_db = { workspace = true }
|
||||
ruff_python_stdlib = { workspace = true }
|
||||
|
||||
compact_str = { workspace = true }
|
||||
camino = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
path-slash = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
ruff_db = { workspace = true, features = ["os"] }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
insta = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
@@ -1,126 +0,0 @@
|
||||
use ruff_db::Upcast;
|
||||
|
||||
use crate::resolver::{
|
||||
editable_install_resolution_paths, file_to_module, internal::ModuleNameIngredient,
|
||||
module_resolution_settings, resolve_module_query,
|
||||
};
|
||||
use crate::typeshed::parse_typeshed_versions;
|
||||
|
||||
#[salsa::jar(db=Db)]
|
||||
pub struct Jar(
|
||||
ModuleNameIngredient<'_>,
|
||||
module_resolution_settings,
|
||||
editable_install_resolution_paths,
|
||||
resolve_module_query,
|
||||
file_to_module,
|
||||
parse_typeshed_versions,
|
||||
);
|
||||
|
||||
pub trait Db: salsa::DbWithJar<Jar> + ruff_db::Db + Upcast<dyn ruff_db::Db> {}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use std::sync;
|
||||
|
||||
use salsa::DebugWithDb;
|
||||
|
||||
use ruff_db::files::Files;
|
||||
use ruff_db::system::{DbWithTestSystem, TestSystem};
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
|
||||
use crate::vendored_typeshed_stubs;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[salsa::db(Jar, ruff_db::Jar)]
|
||||
pub(crate) struct TestDb {
|
||||
storage: salsa::Storage<Self>,
|
||||
system: TestSystem,
|
||||
vendored: VendoredFileSystem,
|
||||
files: Files,
|
||||
events: sync::Arc<sync::Mutex<Vec<salsa::Event>>>,
|
||||
}
|
||||
|
||||
impl TestDb {
|
||||
pub(crate) fn new() -> Self {
|
||||
Self {
|
||||
storage: salsa::Storage::default(),
|
||||
system: TestSystem::default(),
|
||||
vendored: vendored_typeshed_stubs().snapshot(),
|
||||
events: sync::Arc::default(),
|
||||
files: Files::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Takes the salsa events.
|
||||
///
|
||||
/// ## Panics
|
||||
/// If there are any pending salsa snapshots.
|
||||
pub(crate) fn take_salsa_events(&mut self) -> Vec<salsa::Event> {
|
||||
let inner = sync::Arc::get_mut(&mut self.events).expect("no pending salsa snapshots");
|
||||
|
||||
let events = inner.get_mut().unwrap();
|
||||
std::mem::take(&mut *events)
|
||||
}
|
||||
|
||||
/// Clears the salsa events.
|
||||
///
|
||||
/// ## Panics
|
||||
/// If there are any pending salsa snapshots.
|
||||
pub(crate) fn clear_salsa_events(&mut self) {
|
||||
self.take_salsa_events();
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn ruff_db::Db> for TestDb {
|
||||
fn upcast(&self) -> &(dyn ruff_db::Db + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl ruff_db::Db for TestDb {
|
||||
fn vendored(&self) -> &VendoredFileSystem {
|
||||
&self.vendored
|
||||
}
|
||||
|
||||
fn system(&self) -> &dyn ruff_db::system::System {
|
||||
&self.system
|
||||
}
|
||||
|
||||
fn files(&self) -> &Files {
|
||||
&self.files
|
||||
}
|
||||
}
|
||||
|
||||
impl Db for TestDb {}
|
||||
|
||||
impl DbWithTestSystem for TestDb {
|
||||
fn test_system(&self) -> &TestSystem {
|
||||
&self.system
|
||||
}
|
||||
|
||||
fn test_system_mut(&mut self) -> &mut TestSystem {
|
||||
&mut self.system
|
||||
}
|
||||
}
|
||||
|
||||
impl salsa::Database for TestDb {
|
||||
fn salsa_event(&self, event: salsa::Event) {
|
||||
tracing::trace!("event: {:?}", event.debug(self));
|
||||
let mut events = self.events.lock().unwrap();
|
||||
events.push(event);
|
||||
}
|
||||
}
|
||||
|
||||
impl salsa::ParallelDatabase for TestDb {
|
||||
fn snapshot(&self) -> salsa::Snapshot<Self> {
|
||||
salsa::Snapshot::new(Self {
|
||||
storage: self.storage.snapshot(),
|
||||
system: self.system.snapshot(),
|
||||
vendored: self.vendored.snapshot(),
|
||||
files: self.files.snapshot(),
|
||||
events: self.events.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
mod db;
|
||||
mod module;
|
||||
mod module_name;
|
||||
mod path;
|
||||
mod resolver;
|
||||
mod state;
|
||||
mod typeshed;
|
||||
|
||||
#[cfg(test)]
|
||||
mod testing;
|
||||
|
||||
pub use db::{Db, Jar};
|
||||
pub use module::{Module, ModuleKind};
|
||||
pub use module_name::ModuleName;
|
||||
pub use resolver::resolve_module;
|
||||
pub use typeshed::{
|
||||
vendored_typeshed_stubs, TypeshedVersionsParseError, TypeshedVersionsParseErrorKind,
|
||||
};
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,30 +0,0 @@
|
||||
use ruff_db::program::TargetVersion;
|
||||
use ruff_db::system::System;
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
|
||||
use crate::db::Db;
|
||||
use crate::typeshed::LazyTypeshedVersions;
|
||||
|
||||
pub(crate) struct ResolverState<'db> {
|
||||
pub(crate) db: &'db dyn Db,
|
||||
pub(crate) typeshed_versions: LazyTypeshedVersions<'db>,
|
||||
pub(crate) target_version: TargetVersion,
|
||||
}
|
||||
|
||||
impl<'db> ResolverState<'db> {
|
||||
pub(crate) fn new(db: &'db dyn Db, target_version: TargetVersion) -> Self {
|
||||
Self {
|
||||
db,
|
||||
typeshed_versions: LazyTypeshedVersions::new(),
|
||||
target_version,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn system(&self) -> &dyn System {
|
||||
self.db.system()
|
||||
}
|
||||
|
||||
pub(crate) fn vendored(&self) -> &VendoredFileSystem {
|
||||
self.db.vendored()
|
||||
}
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
pub use self::vendored::vendored_typeshed_stubs;
|
||||
pub(crate) use self::versions::{
|
||||
parse_typeshed_versions, LazyTypeshedVersions, TypeshedVersionsQueryResult,
|
||||
};
|
||||
pub use self::versions::{TypeshedVersionsParseError, TypeshedVersionsParseErrorKind};
|
||||
|
||||
mod vendored;
|
||||
mod versions;
|
||||
@@ -1 +0,0 @@
|
||||
f863db6bc5242348ceaa6a3bca4e59aa9e62faaa
|
||||
@@ -1,100 +0,0 @@
|
||||
import sys
|
||||
from _typeshed import StrPath
|
||||
from collections.abc import Mapping
|
||||
|
||||
LC_CTYPE: int
|
||||
LC_COLLATE: int
|
||||
LC_TIME: int
|
||||
LC_MONETARY: int
|
||||
LC_NUMERIC: int
|
||||
LC_ALL: int
|
||||
CHAR_MAX: int
|
||||
|
||||
def setlocale(category: int, locale: str | None = None, /) -> str: ...
|
||||
def localeconv() -> Mapping[str, int | str | list[int]]: ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
def getencoding() -> str: ...
|
||||
|
||||
def strcoll(os1: str, os2: str, /) -> int: ...
|
||||
def strxfrm(string: str, /) -> str: ...
|
||||
|
||||
# native gettext functions
|
||||
# https://docs.python.org/3/library/locale.html#access-to-message-catalogs
|
||||
# https://github.com/python/cpython/blob/f4c03484da59049eb62a9bf7777b963e2267d187/Modules/_localemodule.c#L626
|
||||
if sys.platform != "win32":
|
||||
LC_MESSAGES: int
|
||||
|
||||
ABDAY_1: int
|
||||
ABDAY_2: int
|
||||
ABDAY_3: int
|
||||
ABDAY_4: int
|
||||
ABDAY_5: int
|
||||
ABDAY_6: int
|
||||
ABDAY_7: int
|
||||
|
||||
ABMON_1: int
|
||||
ABMON_2: int
|
||||
ABMON_3: int
|
||||
ABMON_4: int
|
||||
ABMON_5: int
|
||||
ABMON_6: int
|
||||
ABMON_7: int
|
||||
ABMON_8: int
|
||||
ABMON_9: int
|
||||
ABMON_10: int
|
||||
ABMON_11: int
|
||||
ABMON_12: int
|
||||
|
||||
DAY_1: int
|
||||
DAY_2: int
|
||||
DAY_3: int
|
||||
DAY_4: int
|
||||
DAY_5: int
|
||||
DAY_6: int
|
||||
DAY_7: int
|
||||
|
||||
ERA: int
|
||||
ERA_D_T_FMT: int
|
||||
ERA_D_FMT: int
|
||||
ERA_T_FMT: int
|
||||
|
||||
MON_1: int
|
||||
MON_2: int
|
||||
MON_3: int
|
||||
MON_4: int
|
||||
MON_5: int
|
||||
MON_6: int
|
||||
MON_7: int
|
||||
MON_8: int
|
||||
MON_9: int
|
||||
MON_10: int
|
||||
MON_11: int
|
||||
MON_12: int
|
||||
|
||||
CODESET: int
|
||||
D_T_FMT: int
|
||||
D_FMT: int
|
||||
T_FMT: int
|
||||
T_FMT_AMPM: int
|
||||
AM_STR: int
|
||||
PM_STR: int
|
||||
|
||||
RADIXCHAR: int
|
||||
THOUSEP: int
|
||||
YESEXPR: int
|
||||
NOEXPR: int
|
||||
CRNCYSTR: int
|
||||
ALT_DIGITS: int
|
||||
|
||||
def nl_langinfo(key: int, /) -> str: ...
|
||||
|
||||
# This is dependent on `libintl.h` which is a part of `gettext`
|
||||
# system dependency. These functions might be missing.
|
||||
# But, we always say that they are present.
|
||||
def gettext(msg: str, /) -> str: ...
|
||||
def dgettext(domain: str | None, msg: str, /) -> str: ...
|
||||
def dcgettext(domain: str | None, msg: str, category: int, /) -> str: ...
|
||||
def textdomain(domain: str | None, /) -> str: ...
|
||||
def bindtextdomain(domain: str, dir: StrPath | None, /) -> str: ...
|
||||
def bind_textdomain_codeset(domain: str, codeset: str | None, /) -> str | None: ...
|
||||
@@ -1,117 +0,0 @@
|
||||
import sys
|
||||
from typing import Literal
|
||||
|
||||
SF_APPEND: Literal[0x00040000]
|
||||
SF_ARCHIVED: Literal[0x00010000]
|
||||
SF_IMMUTABLE: Literal[0x00020000]
|
||||
SF_NOUNLINK: Literal[0x00100000]
|
||||
SF_SNAPSHOT: Literal[0x00200000]
|
||||
|
||||
ST_MODE: Literal[0]
|
||||
ST_INO: Literal[1]
|
||||
ST_DEV: Literal[2]
|
||||
ST_NLINK: Literal[3]
|
||||
ST_UID: Literal[4]
|
||||
ST_GID: Literal[5]
|
||||
ST_SIZE: Literal[6]
|
||||
ST_ATIME: Literal[7]
|
||||
ST_MTIME: Literal[8]
|
||||
ST_CTIME: Literal[9]
|
||||
|
||||
S_IFIFO: Literal[0o010000]
|
||||
S_IFLNK: Literal[0o120000]
|
||||
S_IFREG: Literal[0o100000]
|
||||
S_IFSOCK: Literal[0o140000]
|
||||
S_IFBLK: Literal[0o060000]
|
||||
S_IFCHR: Literal[0o020000]
|
||||
S_IFDIR: Literal[0o040000]
|
||||
|
||||
# These are 0 on systems that don't support the specific kind of file.
|
||||
# Example: Linux doesn't support door files, so S_IFDOOR is 0 on linux.
|
||||
S_IFDOOR: int
|
||||
S_IFPORT: int
|
||||
S_IFWHT: int
|
||||
|
||||
S_ISUID: Literal[0o4000]
|
||||
S_ISGID: Literal[0o2000]
|
||||
S_ISVTX: Literal[0o1000]
|
||||
|
||||
S_IRWXU: Literal[0o0700]
|
||||
S_IRUSR: Literal[0o0400]
|
||||
S_IWUSR: Literal[0o0200]
|
||||
S_IXUSR: Literal[0o0100]
|
||||
|
||||
S_IRWXG: Literal[0o0070]
|
||||
S_IRGRP: Literal[0o0040]
|
||||
S_IWGRP: Literal[0o0020]
|
||||
S_IXGRP: Literal[0o0010]
|
||||
|
||||
S_IRWXO: Literal[0o0007]
|
||||
S_IROTH: Literal[0o0004]
|
||||
S_IWOTH: Literal[0o0002]
|
||||
S_IXOTH: Literal[0o0001]
|
||||
|
||||
S_ENFMT: Literal[0o2000]
|
||||
S_IREAD: Literal[0o0400]
|
||||
S_IWRITE: Literal[0o0200]
|
||||
S_IEXEC: Literal[0o0100]
|
||||
|
||||
UF_APPEND: Literal[0x00000004]
|
||||
UF_COMPRESSED: Literal[0x00000020] # OS X 10.6+ only
|
||||
UF_HIDDEN: Literal[0x00008000] # OX X 10.5+ only
|
||||
UF_IMMUTABLE: Literal[0x00000002]
|
||||
UF_NODUMP: Literal[0x00000001]
|
||||
UF_NOUNLINK: Literal[0x00000010]
|
||||
UF_OPAQUE: Literal[0x00000008]
|
||||
|
||||
def S_IMODE(mode: int, /) -> int: ...
|
||||
def S_IFMT(mode: int, /) -> int: ...
|
||||
def S_ISBLK(mode: int, /) -> bool: ...
|
||||
def S_ISCHR(mode: int, /) -> bool: ...
|
||||
def S_ISDIR(mode: int, /) -> bool: ...
|
||||
def S_ISDOOR(mode: int, /) -> bool: ...
|
||||
def S_ISFIFO(mode: int, /) -> bool: ...
|
||||
def S_ISLNK(mode: int, /) -> bool: ...
|
||||
def S_ISPORT(mode: int, /) -> bool: ...
|
||||
def S_ISREG(mode: int, /) -> bool: ...
|
||||
def S_ISSOCK(mode: int, /) -> bool: ...
|
||||
def S_ISWHT(mode: int, /) -> bool: ...
|
||||
def filemode(mode: int, /) -> str: ...
|
||||
|
||||
if sys.platform == "win32":
|
||||
IO_REPARSE_TAG_SYMLINK: int
|
||||
IO_REPARSE_TAG_MOUNT_POINT: int
|
||||
IO_REPARSE_TAG_APPEXECLINK: int
|
||||
|
||||
if sys.platform == "win32":
|
||||
FILE_ATTRIBUTE_ARCHIVE: Literal[32]
|
||||
FILE_ATTRIBUTE_COMPRESSED: Literal[2048]
|
||||
FILE_ATTRIBUTE_DEVICE: Literal[64]
|
||||
FILE_ATTRIBUTE_DIRECTORY: Literal[16]
|
||||
FILE_ATTRIBUTE_ENCRYPTED: Literal[16384]
|
||||
FILE_ATTRIBUTE_HIDDEN: Literal[2]
|
||||
FILE_ATTRIBUTE_INTEGRITY_STREAM: Literal[32768]
|
||||
FILE_ATTRIBUTE_NORMAL: Literal[128]
|
||||
FILE_ATTRIBUTE_NOT_CONTENT_INDEXED: Literal[8192]
|
||||
FILE_ATTRIBUTE_NO_SCRUB_DATA: Literal[131072]
|
||||
FILE_ATTRIBUTE_OFFLINE: Literal[4096]
|
||||
FILE_ATTRIBUTE_READONLY: Literal[1]
|
||||
FILE_ATTRIBUTE_REPARSE_POINT: Literal[1024]
|
||||
FILE_ATTRIBUTE_SPARSE_FILE: Literal[512]
|
||||
FILE_ATTRIBUTE_SYSTEM: Literal[4]
|
||||
FILE_ATTRIBUTE_TEMPORARY: Literal[256]
|
||||
FILE_ATTRIBUTE_VIRTUAL: Literal[65536]
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
SF_SETTABLE: Literal[0x3FFF0000]
|
||||
# https://github.com/python/cpython/issues/114081#issuecomment-2119017790
|
||||
# SF_RESTRICTED: Literal[0x00080000]
|
||||
SF_FIRMLINK: Literal[0x00800000]
|
||||
SF_DATALESS: Literal[0x40000000]
|
||||
|
||||
SF_SUPPORTED: Literal[0x9F0000]
|
||||
SF_SYNTHETIC: Literal[0xC0000000]
|
||||
|
||||
UF_TRACKED: Literal[0x00000040]
|
||||
UF_DATAVAULT: Literal[0x00000080]
|
||||
UF_SETTABLE: Literal[0x0000FFFF]
|
||||
@@ -1,255 +0,0 @@
|
||||
import sys
|
||||
from _typeshed import ReadableBuffer
|
||||
from collections.abc import Sequence
|
||||
from typing import Any, Literal, NoReturn, final, overload
|
||||
|
||||
if sys.platform == "win32":
|
||||
ABOVE_NORMAL_PRIORITY_CLASS: Literal[0x8000]
|
||||
BELOW_NORMAL_PRIORITY_CLASS: Literal[0x4000]
|
||||
|
||||
CREATE_BREAKAWAY_FROM_JOB: Literal[0x1000000]
|
||||
CREATE_DEFAULT_ERROR_MODE: Literal[0x4000000]
|
||||
CREATE_NO_WINDOW: Literal[0x8000000]
|
||||
CREATE_NEW_CONSOLE: Literal[0x10]
|
||||
CREATE_NEW_PROCESS_GROUP: Literal[0x200]
|
||||
|
||||
DETACHED_PROCESS: Literal[8]
|
||||
DUPLICATE_CLOSE_SOURCE: Literal[1]
|
||||
DUPLICATE_SAME_ACCESS: Literal[2]
|
||||
|
||||
ERROR_ALREADY_EXISTS: Literal[183]
|
||||
ERROR_BROKEN_PIPE: Literal[109]
|
||||
ERROR_IO_PENDING: Literal[997]
|
||||
ERROR_MORE_DATA: Literal[234]
|
||||
ERROR_NETNAME_DELETED: Literal[64]
|
||||
ERROR_NO_DATA: Literal[232]
|
||||
ERROR_NO_SYSTEM_RESOURCES: Literal[1450]
|
||||
ERROR_OPERATION_ABORTED: Literal[995]
|
||||
ERROR_PIPE_BUSY: Literal[231]
|
||||
ERROR_PIPE_CONNECTED: Literal[535]
|
||||
ERROR_SEM_TIMEOUT: Literal[121]
|
||||
|
||||
FILE_FLAG_FIRST_PIPE_INSTANCE: Literal[0x80000]
|
||||
FILE_FLAG_OVERLAPPED: Literal[0x40000000]
|
||||
|
||||
FILE_GENERIC_READ: Literal[1179785]
|
||||
FILE_GENERIC_WRITE: Literal[1179926]
|
||||
|
||||
FILE_MAP_ALL_ACCESS: Literal[983071]
|
||||
FILE_MAP_COPY: Literal[1]
|
||||
FILE_MAP_EXECUTE: Literal[32]
|
||||
FILE_MAP_READ: Literal[4]
|
||||
FILE_MAP_WRITE: Literal[2]
|
||||
|
||||
FILE_TYPE_CHAR: Literal[2]
|
||||
FILE_TYPE_DISK: Literal[1]
|
||||
FILE_TYPE_PIPE: Literal[3]
|
||||
FILE_TYPE_REMOTE: Literal[32768]
|
||||
FILE_TYPE_UNKNOWN: Literal[0]
|
||||
|
||||
GENERIC_READ: Literal[0x80000000]
|
||||
GENERIC_WRITE: Literal[0x40000000]
|
||||
HIGH_PRIORITY_CLASS: Literal[0x80]
|
||||
INFINITE: Literal[0xFFFFFFFF]
|
||||
# Ignore the Flake8 error -- flake8-pyi assumes
|
||||
# most numbers this long will be implementation details,
|
||||
# but here we can see that it's a power of 2
|
||||
INVALID_HANDLE_VALUE: Literal[0xFFFFFFFFFFFFFFFF] # noqa: Y054
|
||||
IDLE_PRIORITY_CLASS: Literal[0x40]
|
||||
NORMAL_PRIORITY_CLASS: Literal[0x20]
|
||||
REALTIME_PRIORITY_CLASS: Literal[0x100]
|
||||
NMPWAIT_WAIT_FOREVER: Literal[0xFFFFFFFF]
|
||||
|
||||
MEM_COMMIT: Literal[0x1000]
|
||||
MEM_FREE: Literal[0x10000]
|
||||
MEM_IMAGE: Literal[0x1000000]
|
||||
MEM_MAPPED: Literal[0x40000]
|
||||
MEM_PRIVATE: Literal[0x20000]
|
||||
MEM_RESERVE: Literal[0x2000]
|
||||
|
||||
NULL: Literal[0]
|
||||
OPEN_EXISTING: Literal[3]
|
||||
|
||||
PIPE_ACCESS_DUPLEX: Literal[3]
|
||||
PIPE_ACCESS_INBOUND: Literal[1]
|
||||
PIPE_READMODE_MESSAGE: Literal[2]
|
||||
PIPE_TYPE_MESSAGE: Literal[4]
|
||||
PIPE_UNLIMITED_INSTANCES: Literal[255]
|
||||
PIPE_WAIT: Literal[0]
|
||||
|
||||
PAGE_EXECUTE: Literal[0x10]
|
||||
PAGE_EXECUTE_READ: Literal[0x20]
|
||||
PAGE_EXECUTE_READWRITE: Literal[0x40]
|
||||
PAGE_EXECUTE_WRITECOPY: Literal[0x80]
|
||||
PAGE_GUARD: Literal[0x100]
|
||||
PAGE_NOACCESS: Literal[0x1]
|
||||
PAGE_NOCACHE: Literal[0x200]
|
||||
PAGE_READONLY: Literal[0x2]
|
||||
PAGE_READWRITE: Literal[0x4]
|
||||
PAGE_WRITECOMBINE: Literal[0x400]
|
||||
PAGE_WRITECOPY: Literal[0x8]
|
||||
|
||||
PROCESS_ALL_ACCESS: Literal[0x1FFFFF]
|
||||
PROCESS_DUP_HANDLE: Literal[0x40]
|
||||
|
||||
SEC_COMMIT: Literal[0x8000000]
|
||||
SEC_IMAGE: Literal[0x1000000]
|
||||
SEC_LARGE_PAGES: Literal[0x80000000]
|
||||
SEC_NOCACHE: Literal[0x10000000]
|
||||
SEC_RESERVE: Literal[0x4000000]
|
||||
SEC_WRITECOMBINE: Literal[0x40000000]
|
||||
|
||||
STARTF_USESHOWWINDOW: Literal[0x1]
|
||||
STARTF_USESTDHANDLES: Literal[0x100]
|
||||
|
||||
STD_ERROR_HANDLE: Literal[0xFFFFFFF4]
|
||||
STD_OUTPUT_HANDLE: Literal[0xFFFFFFF5]
|
||||
STD_INPUT_HANDLE: Literal[0xFFFFFFF6]
|
||||
|
||||
STILL_ACTIVE: Literal[259]
|
||||
SW_HIDE: Literal[0]
|
||||
SYNCHRONIZE: Literal[0x100000]
|
||||
WAIT_ABANDONED_0: Literal[128]
|
||||
WAIT_OBJECT_0: Literal[0]
|
||||
WAIT_TIMEOUT: Literal[258]
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
LOCALE_NAME_INVARIANT: str
|
||||
LOCALE_NAME_MAX_LENGTH: int
|
||||
LOCALE_NAME_SYSTEM_DEFAULT: str
|
||||
LOCALE_NAME_USER_DEFAULT: str | None
|
||||
|
||||
LCMAP_FULLWIDTH: int
|
||||
LCMAP_HALFWIDTH: int
|
||||
LCMAP_HIRAGANA: int
|
||||
LCMAP_KATAKANA: int
|
||||
LCMAP_LINGUISTIC_CASING: int
|
||||
LCMAP_LOWERCASE: int
|
||||
LCMAP_SIMPLIFIED_CHINESE: int
|
||||
LCMAP_TITLECASE: int
|
||||
LCMAP_TRADITIONAL_CHINESE: int
|
||||
LCMAP_UPPERCASE: int
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
COPYFILE2_CALLBACK_CHUNK_STARTED: Literal[1]
|
||||
COPYFILE2_CALLBACK_CHUNK_FINISHED: Literal[2]
|
||||
COPYFILE2_CALLBACK_STREAM_STARTED: Literal[3]
|
||||
COPYFILE2_CALLBACK_STREAM_FINISHED: Literal[4]
|
||||
COPYFILE2_CALLBACK_POLL_CONTINUE: Literal[5]
|
||||
COPYFILE2_CALLBACK_ERROR: Literal[6]
|
||||
|
||||
COPYFILE2_PROGRESS_CONTINUE: Literal[0]
|
||||
COPYFILE2_PROGRESS_CANCEL: Literal[1]
|
||||
COPYFILE2_PROGRESS_STOP: Literal[2]
|
||||
COPYFILE2_PROGRESS_QUIET: Literal[3]
|
||||
COPYFILE2_PROGRESS_PAUSE: Literal[4]
|
||||
|
||||
COPY_FILE_FAIL_IF_EXISTS: Literal[0x1]
|
||||
COPY_FILE_RESTARTABLE: Literal[0x2]
|
||||
COPY_FILE_OPEN_SOURCE_FOR_WRITE: Literal[0x4]
|
||||
COPY_FILE_ALLOW_DECRYPTED_DESTINATION: Literal[0x8]
|
||||
COPY_FILE_COPY_SYMLINK: Literal[0x800]
|
||||
COPY_FILE_NO_BUFFERING: Literal[0x1000]
|
||||
COPY_FILE_REQUEST_SECURITY_PRIVILEGES: Literal[0x2000]
|
||||
COPY_FILE_RESUME_FROM_PAUSE: Literal[0x4000]
|
||||
COPY_FILE_NO_OFFLOAD: Literal[0x40000]
|
||||
COPY_FILE_REQUEST_COMPRESSED_TRAFFIC: Literal[0x10000000]
|
||||
|
||||
ERROR_ACCESS_DENIED: Literal[5]
|
||||
ERROR_PRIVILEGE_NOT_HELD: Literal[1314]
|
||||
|
||||
def CloseHandle(handle: int, /) -> None: ...
|
||||
@overload
|
||||
def ConnectNamedPipe(handle: int, overlapped: Literal[True]) -> Overlapped: ...
|
||||
@overload
|
||||
def ConnectNamedPipe(handle: int, overlapped: Literal[False] = False) -> None: ...
|
||||
@overload
|
||||
def ConnectNamedPipe(handle: int, overlapped: bool) -> Overlapped | None: ...
|
||||
def CreateFile(
|
||||
file_name: str,
|
||||
desired_access: int,
|
||||
share_mode: int,
|
||||
security_attributes: int,
|
||||
creation_disposition: int,
|
||||
flags_and_attributes: int,
|
||||
template_file: int,
|
||||
/,
|
||||
) -> int: ...
|
||||
def CreateJunction(src_path: str, dst_path: str, /) -> None: ...
|
||||
def CreateNamedPipe(
|
||||
name: str,
|
||||
open_mode: int,
|
||||
pipe_mode: int,
|
||||
max_instances: int,
|
||||
out_buffer_size: int,
|
||||
in_buffer_size: int,
|
||||
default_timeout: int,
|
||||
security_attributes: int,
|
||||
/,
|
||||
) -> int: ...
|
||||
def CreatePipe(pipe_attrs: Any, size: int, /) -> tuple[int, int]: ...
|
||||
def CreateProcess(
|
||||
application_name: str | None,
|
||||
command_line: str | None,
|
||||
proc_attrs: Any,
|
||||
thread_attrs: Any,
|
||||
inherit_handles: bool,
|
||||
creation_flags: int,
|
||||
env_mapping: dict[str, str],
|
||||
current_directory: str | None,
|
||||
startup_info: Any,
|
||||
/,
|
||||
) -> tuple[int, int, int, int]: ...
|
||||
def DuplicateHandle(
|
||||
source_process_handle: int,
|
||||
source_handle: int,
|
||||
target_process_handle: int,
|
||||
desired_access: int,
|
||||
inherit_handle: bool,
|
||||
options: int = 0,
|
||||
/,
|
||||
) -> int: ...
|
||||
def ExitProcess(ExitCode: int, /) -> NoReturn: ...
|
||||
def GetACP() -> int: ...
|
||||
def GetFileType(handle: int) -> int: ...
|
||||
def GetCurrentProcess() -> int: ...
|
||||
def GetExitCodeProcess(process: int, /) -> int: ...
|
||||
def GetLastError() -> int: ...
|
||||
def GetModuleFileName(module_handle: int, /) -> str: ...
|
||||
def GetStdHandle(std_handle: int, /) -> int: ...
|
||||
def GetVersion() -> int: ...
|
||||
def OpenProcess(desired_access: int, inherit_handle: bool, process_id: int, /) -> int: ...
|
||||
def PeekNamedPipe(handle: int, size: int = 0, /) -> tuple[int, int] | tuple[bytes, int, int]: ...
|
||||
if sys.version_info >= (3, 10):
|
||||
def LCMapStringEx(locale: str, flags: int, src: str) -> str: ...
|
||||
def UnmapViewOfFile(address: int, /) -> None: ...
|
||||
|
||||
@overload
|
||||
def ReadFile(handle: int, size: int, overlapped: Literal[True]) -> tuple[Overlapped, int]: ...
|
||||
@overload
|
||||
def ReadFile(handle: int, size: int, overlapped: Literal[False] = False) -> tuple[bytes, int]: ...
|
||||
@overload
|
||||
def ReadFile(handle: int, size: int, overlapped: int | bool) -> tuple[Any, int]: ...
|
||||
def SetNamedPipeHandleState(
|
||||
named_pipe: int, mode: int | None, max_collection_count: int | None, collect_data_timeout: int | None, /
|
||||
) -> None: ...
|
||||
def TerminateProcess(handle: int, exit_code: int, /) -> None: ...
|
||||
def WaitForMultipleObjects(handle_seq: Sequence[int], wait_flag: bool, milliseconds: int = 0xFFFFFFFF, /) -> int: ...
|
||||
def WaitForSingleObject(handle: int, milliseconds: int, /) -> int: ...
|
||||
def WaitNamedPipe(name: str, timeout: int, /) -> None: ...
|
||||
@overload
|
||||
def WriteFile(handle: int, buffer: ReadableBuffer, overlapped: Literal[True]) -> tuple[Overlapped, int]: ...
|
||||
@overload
|
||||
def WriteFile(handle: int, buffer: ReadableBuffer, overlapped: Literal[False] = False) -> tuple[int, int]: ...
|
||||
@overload
|
||||
def WriteFile(handle: int, buffer: ReadableBuffer, overlapped: int | bool) -> tuple[Any, int]: ...
|
||||
@final
|
||||
class Overlapped:
|
||||
event: int
|
||||
def GetOverlappedResult(self, wait: bool, /) -> tuple[int, int]: ...
|
||||
def cancel(self) -> None: ...
|
||||
def getbuffer(self) -> bytes | None: ...
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
def CopyFile2(existing_file_name: str, new_file_name: str, flags: int, progress_routine: int | None = None) -> int: ...
|
||||
def NeedCurrentDirectoryForExePath(exe_name: str, /) -> bool: ...
|
||||
@@ -1,20 +0,0 @@
|
||||
import enum
|
||||
import sys
|
||||
from typing import Literal
|
||||
|
||||
LOG_THRESHOLD_FOR_CONNLOST_WRITES: Literal[5]
|
||||
ACCEPT_RETRY_DELAY: Literal[1]
|
||||
DEBUG_STACK_DEPTH: Literal[10]
|
||||
SSL_HANDSHAKE_TIMEOUT: float
|
||||
SENDFILE_FALLBACK_READBUFFER_SIZE: Literal[262144]
|
||||
if sys.version_info >= (3, 11):
|
||||
SSL_SHUTDOWN_TIMEOUT: float
|
||||
FLOW_CONTROL_HIGH_WATER_SSL_READ: Literal[256]
|
||||
FLOW_CONTROL_HIGH_WATER_SSL_WRITE: Literal[512]
|
||||
if sys.version_info >= (3, 12):
|
||||
THREAD_JOIN_TIMEOUT: Literal[300]
|
||||
|
||||
class _SendfileMode(enum.Enum):
|
||||
UNSUPPORTED = 1
|
||||
TRY_NATIVE = 2
|
||||
FALLBACK = 3
|
||||
@@ -1,16 +0,0 @@
|
||||
from typing import Any, TypeVar
|
||||
|
||||
__all__ = ["Error", "copy", "deepcopy"]
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
# None in CPython but non-None in Jython
|
||||
PyStringMap: Any
|
||||
|
||||
# Note: memo and _nil are internal kwargs.
|
||||
def deepcopy(x: _T, memo: dict[int, Any] | None = None, _nil: Any = []) -> _T: ...
|
||||
def copy(x: _T) -> _T: ...
|
||||
|
||||
class Error(Exception): ...
|
||||
|
||||
error = Error
|
||||
@@ -1,99 +0,0 @@
|
||||
from _typeshed import BytesPath, Incomplete, StrOrBytesPath, StrPath, Unused
|
||||
from abc import abstractmethod
|
||||
from collections.abc import Callable, Iterable
|
||||
from distutils.dist import Distribution
|
||||
from distutils.file_util import _BytesPathT, _StrPathT
|
||||
from typing import Any, ClassVar, Literal, overload
|
||||
|
||||
class Command:
|
||||
distribution: Distribution
|
||||
# Any to work around variance issues
|
||||
sub_commands: ClassVar[list[tuple[str, Callable[[Any], bool] | None]]]
|
||||
def __init__(self, dist: Distribution) -> None: ...
|
||||
@abstractmethod
|
||||
def initialize_options(self) -> None: ...
|
||||
@abstractmethod
|
||||
def finalize_options(self) -> None: ...
|
||||
@abstractmethod
|
||||
def run(self) -> None: ...
|
||||
def announce(self, msg: str, level: int = 1) -> None: ...
|
||||
def debug_print(self, msg: str) -> None: ...
|
||||
def ensure_string(self, option: str, default: str | None = None) -> None: ...
|
||||
def ensure_string_list(self, option: str | list[str]) -> None: ...
|
||||
def ensure_filename(self, option: str) -> None: ...
|
||||
def ensure_dirname(self, option: str) -> None: ...
|
||||
def get_command_name(self) -> str: ...
|
||||
def set_undefined_options(self, src_cmd: str, *option_pairs: tuple[str, str]) -> None: ...
|
||||
def get_finalized_command(self, command: str, create: bool | Literal[0, 1] = 1) -> Command: ...
|
||||
def reinitialize_command(self, command: Command | str, reinit_subcommands: bool | Literal[0, 1] = 0) -> Command: ...
|
||||
def run_command(self, command: str) -> None: ...
|
||||
def get_sub_commands(self) -> list[str]: ...
|
||||
def warn(self, msg: str) -> None: ...
|
||||
def execute(self, func: Callable[..., object], args: Iterable[Any], msg: str | None = None, level: int = 1) -> None: ...
|
||||
def mkpath(self, name: str, mode: int = 0o777) -> None: ...
|
||||
@overload
|
||||
def copy_file(
|
||||
self,
|
||||
infile: StrPath,
|
||||
outfile: _StrPathT,
|
||||
preserve_mode: bool | Literal[0, 1] = 1,
|
||||
preserve_times: bool | Literal[0, 1] = 1,
|
||||
link: str | None = None,
|
||||
level: Unused = 1,
|
||||
) -> tuple[_StrPathT | str, bool]: ...
|
||||
@overload
|
||||
def copy_file(
|
||||
self,
|
||||
infile: BytesPath,
|
||||
outfile: _BytesPathT,
|
||||
preserve_mode: bool | Literal[0, 1] = 1,
|
||||
preserve_times: bool | Literal[0, 1] = 1,
|
||||
link: str | None = None,
|
||||
level: Unused = 1,
|
||||
) -> tuple[_BytesPathT | bytes, bool]: ...
|
||||
def copy_tree(
|
||||
self,
|
||||
infile: StrPath,
|
||||
outfile: str,
|
||||
preserve_mode: bool | Literal[0, 1] = 1,
|
||||
preserve_times: bool | Literal[0, 1] = 1,
|
||||
preserve_symlinks: bool | Literal[0, 1] = 0,
|
||||
level: Unused = 1,
|
||||
) -> list[str]: ...
|
||||
@overload
|
||||
def move_file(self, src: StrPath, dst: _StrPathT, level: Unused = 1) -> _StrPathT | str: ...
|
||||
@overload
|
||||
def move_file(self, src: BytesPath, dst: _BytesPathT, level: Unused = 1) -> _BytesPathT | bytes: ...
|
||||
def spawn(self, cmd: Iterable[str], search_path: bool | Literal[0, 1] = 1, level: Unused = 1) -> None: ...
|
||||
@overload
|
||||
def make_archive(
|
||||
self,
|
||||
base_name: str,
|
||||
format: str,
|
||||
root_dir: StrOrBytesPath | None = None,
|
||||
base_dir: str | None = None,
|
||||
owner: str | None = None,
|
||||
group: str | None = None,
|
||||
) -> str: ...
|
||||
@overload
|
||||
def make_archive(
|
||||
self,
|
||||
base_name: StrPath,
|
||||
format: str,
|
||||
root_dir: StrOrBytesPath,
|
||||
base_dir: str | None = None,
|
||||
owner: str | None = None,
|
||||
group: str | None = None,
|
||||
) -> str: ...
|
||||
def make_file(
|
||||
self,
|
||||
infiles: str | list[str] | tuple[str, ...],
|
||||
outfile: StrOrBytesPath,
|
||||
func: Callable[..., object],
|
||||
args: list[Any],
|
||||
exec_msg: str | None = None,
|
||||
skip_msg: str | None = None,
|
||||
level: Unused = 1,
|
||||
) -> None: ...
|
||||
def ensure_finalized(self) -> None: ...
|
||||
def dump_options(self, header: Incomplete | None = None, indent: str = "") -> None: ...
|
||||
@@ -1,25 +0,0 @@
|
||||
from typing import Any
|
||||
|
||||
from ..cmd import Command
|
||||
|
||||
def show_formats() -> None: ...
|
||||
|
||||
class bdist(Command):
|
||||
description: str
|
||||
user_options: Any
|
||||
boolean_options: Any
|
||||
help_options: Any
|
||||
no_format_option: Any
|
||||
default_format: Any
|
||||
format_commands: Any
|
||||
format_command: Any
|
||||
bdist_base: Any
|
||||
plat_name: Any
|
||||
formats: Any
|
||||
dist_dir: Any
|
||||
skip_build: int
|
||||
group: Any
|
||||
owner: Any
|
||||
def initialize_options(self) -> None: ...
|
||||
def finalize_options(self) -> None: ...
|
||||
def run(self) -> None: ...
|
||||
@@ -1 +0,0 @@
|
||||
DEBUG: bool | None
|
||||
@@ -1,149 +0,0 @@
|
||||
from _typeshed import Incomplete, StrOrBytesPath, StrPath, SupportsWrite
|
||||
from collections.abc import Iterable, Mapping
|
||||
from distutils.cmd import Command
|
||||
from re import Pattern
|
||||
from typing import IO, Any, ClassVar, Literal, TypeVar, overload
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
command_re: Pattern[str]
|
||||
|
||||
_OptionsList: TypeAlias = list[tuple[str, str | None, str, int] | tuple[str, str | None, str]]
|
||||
_CommandT = TypeVar("_CommandT", bound=Command)
|
||||
|
||||
class DistributionMetadata:
|
||||
def __init__(self, path: StrOrBytesPath | None = None) -> None: ...
|
||||
name: str | None
|
||||
version: str | None
|
||||
author: str | None
|
||||
author_email: str | None
|
||||
maintainer: str | None
|
||||
maintainer_email: str | None
|
||||
url: str | None
|
||||
license: str | None
|
||||
description: str | None
|
||||
long_description: str | None
|
||||
keywords: str | list[str] | None
|
||||
platforms: str | list[str] | None
|
||||
classifiers: str | list[str] | None
|
||||
download_url: str | None
|
||||
provides: list[str] | None
|
||||
requires: list[str] | None
|
||||
obsoletes: list[str] | None
|
||||
def read_pkg_file(self, file: IO[str]) -> None: ...
|
||||
def write_pkg_info(self, base_dir: StrPath) -> None: ...
|
||||
def write_pkg_file(self, file: SupportsWrite[str]) -> None: ...
|
||||
def get_name(self) -> str: ...
|
||||
def get_version(self) -> str: ...
|
||||
def get_fullname(self) -> str: ...
|
||||
def get_author(self) -> str: ...
|
||||
def get_author_email(self) -> str: ...
|
||||
def get_maintainer(self) -> str: ...
|
||||
def get_maintainer_email(self) -> str: ...
|
||||
def get_contact(self) -> str: ...
|
||||
def get_contact_email(self) -> str: ...
|
||||
def get_url(self) -> str: ...
|
||||
def get_license(self) -> str: ...
|
||||
def get_licence(self) -> str: ...
|
||||
def get_description(self) -> str: ...
|
||||
def get_long_description(self) -> str: ...
|
||||
def get_keywords(self) -> str | list[str]: ...
|
||||
def get_platforms(self) -> str | list[str]: ...
|
||||
def get_classifiers(self) -> str | list[str]: ...
|
||||
def get_download_url(self) -> str: ...
|
||||
def get_requires(self) -> list[str]: ...
|
||||
def set_requires(self, value: Iterable[str]) -> None: ...
|
||||
def get_provides(self) -> list[str]: ...
|
||||
def set_provides(self, value: Iterable[str]) -> None: ...
|
||||
def get_obsoletes(self) -> list[str]: ...
|
||||
def set_obsoletes(self, value: Iterable[str]) -> None: ...
|
||||
|
||||
class Distribution:
|
||||
cmdclass: dict[str, type[Command]]
|
||||
metadata: DistributionMetadata
|
||||
def __init__(self, attrs: Mapping[str, Any] | None = None) -> None: ...
|
||||
def get_option_dict(self, command: str) -> dict[str, tuple[str, str]]: ...
|
||||
def parse_config_files(self, filenames: Iterable[str] | None = None) -> None: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: str, create: Literal[1, True] = 1) -> Command: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: str, create: Literal[0, False]) -> Command | None: ...
|
||||
global_options: ClassVar[_OptionsList]
|
||||
common_usage: ClassVar[str]
|
||||
display_options: ClassVar[_OptionsList]
|
||||
display_option_names: ClassVar[list[str]]
|
||||
negative_opt: ClassVar[dict[str, str]]
|
||||
verbose: int
|
||||
dry_run: int
|
||||
help: int
|
||||
command_packages: list[str] | None
|
||||
script_name: str | None
|
||||
script_args: list[str] | None
|
||||
command_options: dict[str, dict[str, tuple[str, str]]]
|
||||
dist_files: list[tuple[str, str, str]]
|
||||
packages: Incomplete
|
||||
package_data: dict[str, list[str]]
|
||||
package_dir: Incomplete
|
||||
py_modules: Incomplete
|
||||
libraries: Incomplete
|
||||
headers: Incomplete
|
||||
ext_modules: Incomplete
|
||||
ext_package: Incomplete
|
||||
include_dirs: Incomplete
|
||||
extra_path: Incomplete
|
||||
scripts: Incomplete
|
||||
data_files: Incomplete
|
||||
password: str
|
||||
command_obj: Incomplete
|
||||
have_run: Incomplete
|
||||
want_user_cfg: bool
|
||||
def dump_option_dicts(
|
||||
self, header: Incomplete | None = None, commands: Incomplete | None = None, indent: str = ""
|
||||
) -> None: ...
|
||||
def find_config_files(self): ...
|
||||
commands: Incomplete
|
||||
def parse_command_line(self): ...
|
||||
def finalize_options(self) -> None: ...
|
||||
def handle_display_options(self, option_order): ...
|
||||
def print_command_list(self, commands, header, max_length) -> None: ...
|
||||
def print_commands(self) -> None: ...
|
||||
def get_command_list(self): ...
|
||||
def get_command_packages(self): ...
|
||||
def get_command_class(self, command: str) -> type[Command]: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: str, reinit_subcommands: bool = False) -> Command: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: _CommandT, reinit_subcommands: bool = False) -> _CommandT: ...
|
||||
def announce(self, msg, level: int = 2) -> None: ...
|
||||
def run_commands(self) -> None: ...
|
||||
def run_command(self, command: str) -> None: ...
|
||||
def has_pure_modules(self) -> bool: ...
|
||||
def has_ext_modules(self) -> bool: ...
|
||||
def has_c_libraries(self) -> bool: ...
|
||||
def has_modules(self) -> bool: ...
|
||||
def has_headers(self) -> bool: ...
|
||||
def has_scripts(self) -> bool: ...
|
||||
def has_data_files(self) -> bool: ...
|
||||
def is_pure(self) -> bool: ...
|
||||
|
||||
# Getter methods generated in __init__
|
||||
def get_name(self) -> str: ...
|
||||
def get_version(self) -> str: ...
|
||||
def get_fullname(self) -> str: ...
|
||||
def get_author(self) -> str: ...
|
||||
def get_author_email(self) -> str: ...
|
||||
def get_maintainer(self) -> str: ...
|
||||
def get_maintainer_email(self) -> str: ...
|
||||
def get_contact(self) -> str: ...
|
||||
def get_contact_email(self) -> str: ...
|
||||
def get_url(self) -> str: ...
|
||||
def get_license(self) -> str: ...
|
||||
def get_licence(self) -> str: ...
|
||||
def get_description(self) -> str: ...
|
||||
def get_long_description(self) -> str: ...
|
||||
def get_keywords(self) -> str | list[str]: ...
|
||||
def get_platforms(self) -> str | list[str]: ...
|
||||
def get_classifiers(self) -> str | list[str]: ...
|
||||
def get_download_url(self) -> str: ...
|
||||
def get_requires(self) -> list[str]: ...
|
||||
def get_provides(self) -> list[str]: ...
|
||||
def get_obsoletes(self) -> list[str]: ...
|
||||
@@ -1,67 +0,0 @@
|
||||
ENDMARKER: int
|
||||
NAME: int
|
||||
NUMBER: int
|
||||
STRING: int
|
||||
NEWLINE: int
|
||||
INDENT: int
|
||||
DEDENT: int
|
||||
LPAR: int
|
||||
RPAR: int
|
||||
LSQB: int
|
||||
RSQB: int
|
||||
COLON: int
|
||||
COMMA: int
|
||||
SEMI: int
|
||||
PLUS: int
|
||||
MINUS: int
|
||||
STAR: int
|
||||
SLASH: int
|
||||
VBAR: int
|
||||
AMPER: int
|
||||
LESS: int
|
||||
GREATER: int
|
||||
EQUAL: int
|
||||
DOT: int
|
||||
PERCENT: int
|
||||
BACKQUOTE: int
|
||||
LBRACE: int
|
||||
RBRACE: int
|
||||
EQEQUAL: int
|
||||
NOTEQUAL: int
|
||||
LESSEQUAL: int
|
||||
GREATEREQUAL: int
|
||||
TILDE: int
|
||||
CIRCUMFLEX: int
|
||||
LEFTSHIFT: int
|
||||
RIGHTSHIFT: int
|
||||
DOUBLESTAR: int
|
||||
PLUSEQUAL: int
|
||||
MINEQUAL: int
|
||||
STAREQUAL: int
|
||||
SLASHEQUAL: int
|
||||
PERCENTEQUAL: int
|
||||
AMPEREQUAL: int
|
||||
VBAREQUAL: int
|
||||
CIRCUMFLEXEQUAL: int
|
||||
LEFTSHIFTEQUAL: int
|
||||
RIGHTSHIFTEQUAL: int
|
||||
DOUBLESTAREQUAL: int
|
||||
DOUBLESLASH: int
|
||||
DOUBLESLASHEQUAL: int
|
||||
OP: int
|
||||
COMMENT: int
|
||||
NL: int
|
||||
RARROW: int
|
||||
AT: int
|
||||
ATEQUAL: int
|
||||
AWAIT: int
|
||||
ASYNC: int
|
||||
ERRORTOKEN: int
|
||||
COLONEQUAL: int
|
||||
N_TOKENS: int
|
||||
NT_OFFSET: int
|
||||
tok_name: dict[int, str]
|
||||
|
||||
def ISTERMINAL(x: int) -> bool: ...
|
||||
def ISNONTERMINAL(x: int) -> bool: ...
|
||||
def ISEOF(x: int) -> bool: ...
|
||||
@@ -1,19 +0,0 @@
|
||||
import sys
|
||||
from collections.abc import Callable, Iterable
|
||||
from typing import Literal
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
if sys.platform != "win32":
|
||||
__all__ = ["openpty", "fork", "spawn"]
|
||||
_Reader: TypeAlias = Callable[[int], bytes]
|
||||
|
||||
STDIN_FILENO: Literal[0]
|
||||
STDOUT_FILENO: Literal[1]
|
||||
STDERR_FILENO: Literal[2]
|
||||
|
||||
CHILD: Literal[0]
|
||||
def openpty() -> tuple[int, int]: ...
|
||||
def master_open() -> tuple[int, str]: ... # deprecated, use openpty()
|
||||
def slave_open(tty_name: str) -> int: ... # deprecated, use openpty()
|
||||
def fork() -> tuple[int, int]: ...
|
||||
def spawn(argv: str | Iterable[str], master_read: _Reader = ..., stdin_read: _Reader = ...) -> int: ...
|
||||
@@ -1,49 +0,0 @@
|
||||
import sys
|
||||
|
||||
codes: dict[str, int]
|
||||
messages: dict[int, str]
|
||||
|
||||
XML_ERROR_ABORTED: str
|
||||
XML_ERROR_ASYNC_ENTITY: str
|
||||
XML_ERROR_ATTRIBUTE_EXTERNAL_ENTITY_REF: str
|
||||
XML_ERROR_BAD_CHAR_REF: str
|
||||
XML_ERROR_BINARY_ENTITY_REF: str
|
||||
XML_ERROR_CANT_CHANGE_FEATURE_ONCE_PARSING: str
|
||||
XML_ERROR_DUPLICATE_ATTRIBUTE: str
|
||||
XML_ERROR_ENTITY_DECLARED_IN_PE: str
|
||||
XML_ERROR_EXTERNAL_ENTITY_HANDLING: str
|
||||
XML_ERROR_FEATURE_REQUIRES_XML_DTD: str
|
||||
XML_ERROR_FINISHED: str
|
||||
XML_ERROR_INCOMPLETE_PE: str
|
||||
XML_ERROR_INCORRECT_ENCODING: str
|
||||
XML_ERROR_INVALID_TOKEN: str
|
||||
XML_ERROR_JUNK_AFTER_DOC_ELEMENT: str
|
||||
XML_ERROR_MISPLACED_XML_PI: str
|
||||
XML_ERROR_NOT_STANDALONE: str
|
||||
XML_ERROR_NOT_SUSPENDED: str
|
||||
XML_ERROR_NO_ELEMENTS: str
|
||||
XML_ERROR_NO_MEMORY: str
|
||||
XML_ERROR_PARAM_ENTITY_REF: str
|
||||
XML_ERROR_PARTIAL_CHAR: str
|
||||
XML_ERROR_PUBLICID: str
|
||||
XML_ERROR_RECURSIVE_ENTITY_REF: str
|
||||
XML_ERROR_SUSPENDED: str
|
||||
XML_ERROR_SUSPEND_PE: str
|
||||
XML_ERROR_SYNTAX: str
|
||||
XML_ERROR_TAG_MISMATCH: str
|
||||
XML_ERROR_TEXT_DECL: str
|
||||
XML_ERROR_UNBOUND_PREFIX: str
|
||||
XML_ERROR_UNCLOSED_CDATA_SECTION: str
|
||||
XML_ERROR_UNCLOSED_TOKEN: str
|
||||
XML_ERROR_UNDECLARING_PREFIX: str
|
||||
XML_ERROR_UNDEFINED_ENTITY: str
|
||||
XML_ERROR_UNEXPECTED_STATE: str
|
||||
XML_ERROR_UNKNOWN_ENCODING: str
|
||||
XML_ERROR_XML_DECL: str
|
||||
if sys.version_info >= (3, 11):
|
||||
XML_ERROR_RESERVED_PREFIX_XML: str
|
||||
XML_ERROR_RESERVED_PREFIX_XMLNS: str
|
||||
XML_ERROR_RESERVED_NAMESPACE_URI: str
|
||||
XML_ERROR_INVALID_ARGUMENT: str
|
||||
XML_ERROR_NO_BUFFER: str
|
||||
XML_ERROR_AMPLIFICATION_LIMIT_BREACH: str
|
||||
@@ -1,11 +0,0 @@
|
||||
XML_CTYPE_ANY: int
|
||||
XML_CTYPE_CHOICE: int
|
||||
XML_CTYPE_EMPTY: int
|
||||
XML_CTYPE_MIXED: int
|
||||
XML_CTYPE_NAME: int
|
||||
XML_CTYPE_SEQ: int
|
||||
|
||||
XML_CQUANT_NONE: int
|
||||
XML_CQUANT_OPT: int
|
||||
XML_CQUANT_PLUS: int
|
||||
XML_CQUANT_REP: int
|
||||
@@ -1,55 +0,0 @@
|
||||
import sys
|
||||
from typing import Literal, overload
|
||||
|
||||
if sys.platform != "win32":
|
||||
LOG_ALERT: Literal[1]
|
||||
LOG_AUTH: Literal[32]
|
||||
LOG_AUTHPRIV: Literal[80]
|
||||
LOG_CONS: Literal[2]
|
||||
LOG_CRIT: Literal[2]
|
||||
LOG_CRON: Literal[72]
|
||||
LOG_DAEMON: Literal[24]
|
||||
LOG_DEBUG: Literal[7]
|
||||
LOG_EMERG: Literal[0]
|
||||
LOG_ERR: Literal[3]
|
||||
LOG_INFO: Literal[6]
|
||||
LOG_KERN: Literal[0]
|
||||
LOG_LOCAL0: Literal[128]
|
||||
LOG_LOCAL1: Literal[136]
|
||||
LOG_LOCAL2: Literal[144]
|
||||
LOG_LOCAL3: Literal[152]
|
||||
LOG_LOCAL4: Literal[160]
|
||||
LOG_LOCAL5: Literal[168]
|
||||
LOG_LOCAL6: Literal[176]
|
||||
LOG_LOCAL7: Literal[184]
|
||||
LOG_LPR: Literal[48]
|
||||
LOG_MAIL: Literal[16]
|
||||
LOG_NDELAY: Literal[8]
|
||||
LOG_NEWS: Literal[56]
|
||||
LOG_NOTICE: Literal[5]
|
||||
LOG_NOWAIT: Literal[16]
|
||||
LOG_ODELAY: Literal[4]
|
||||
LOG_PERROR: Literal[32]
|
||||
LOG_PID: Literal[1]
|
||||
LOG_SYSLOG: Literal[40]
|
||||
LOG_USER: Literal[8]
|
||||
LOG_UUCP: Literal[64]
|
||||
LOG_WARNING: Literal[4]
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
LOG_FTP: Literal[88]
|
||||
LOG_INSTALL: Literal[112]
|
||||
LOG_LAUNCHD: Literal[192]
|
||||
LOG_NETINFO: Literal[96]
|
||||
LOG_RAS: Literal[120]
|
||||
LOG_REMOTEAUTH: Literal[104]
|
||||
|
||||
def LOG_MASK(pri: int, /) -> int: ...
|
||||
def LOG_UPTO(pri: int, /) -> int: ...
|
||||
def closelog() -> None: ...
|
||||
def openlog(ident: str = ..., logoption: int = ..., facility: int = ...) -> None: ...
|
||||
def setlogmask(maskpri: int, /) -> int: ...
|
||||
@overload
|
||||
def syslog(priority: int, message: str) -> None: ...
|
||||
@overload
|
||||
def syslog(message: str) -> None: ...
|
||||
@@ -1,80 +0,0 @@
|
||||
from typing import Literal
|
||||
|
||||
# These are not actually bools. See #4669
|
||||
NO: bool
|
||||
YES: bool
|
||||
TRUE: bool
|
||||
FALSE: bool
|
||||
ON: bool
|
||||
OFF: bool
|
||||
N: Literal["n"]
|
||||
S: Literal["s"]
|
||||
W: Literal["w"]
|
||||
E: Literal["e"]
|
||||
NW: Literal["nw"]
|
||||
SW: Literal["sw"]
|
||||
NE: Literal["ne"]
|
||||
SE: Literal["se"]
|
||||
NS: Literal["ns"]
|
||||
EW: Literal["ew"]
|
||||
NSEW: Literal["nsew"]
|
||||
CENTER: Literal["center"]
|
||||
NONE: Literal["none"]
|
||||
X: Literal["x"]
|
||||
Y: Literal["y"]
|
||||
BOTH: Literal["both"]
|
||||
LEFT: Literal["left"]
|
||||
TOP: Literal["top"]
|
||||
RIGHT: Literal["right"]
|
||||
BOTTOM: Literal["bottom"]
|
||||
RAISED: Literal["raised"]
|
||||
SUNKEN: Literal["sunken"]
|
||||
FLAT: Literal["flat"]
|
||||
RIDGE: Literal["ridge"]
|
||||
GROOVE: Literal["groove"]
|
||||
SOLID: Literal["solid"]
|
||||
HORIZONTAL: Literal["horizontal"]
|
||||
VERTICAL: Literal["vertical"]
|
||||
NUMERIC: Literal["numeric"]
|
||||
CHAR: Literal["char"]
|
||||
WORD: Literal["word"]
|
||||
BASELINE: Literal["baseline"]
|
||||
INSIDE: Literal["inside"]
|
||||
OUTSIDE: Literal["outside"]
|
||||
SEL: Literal["sel"]
|
||||
SEL_FIRST: Literal["sel.first"]
|
||||
SEL_LAST: Literal["sel.last"]
|
||||
END: Literal["end"]
|
||||
INSERT: Literal["insert"]
|
||||
CURRENT: Literal["current"]
|
||||
ANCHOR: Literal["anchor"]
|
||||
ALL: Literal["all"]
|
||||
NORMAL: Literal["normal"]
|
||||
DISABLED: Literal["disabled"]
|
||||
ACTIVE: Literal["active"]
|
||||
HIDDEN: Literal["hidden"]
|
||||
CASCADE: Literal["cascade"]
|
||||
CHECKBUTTON: Literal["checkbutton"]
|
||||
COMMAND: Literal["command"]
|
||||
RADIOBUTTON: Literal["radiobutton"]
|
||||
SEPARATOR: Literal["separator"]
|
||||
SINGLE: Literal["single"]
|
||||
BROWSE: Literal["browse"]
|
||||
MULTIPLE: Literal["multiple"]
|
||||
EXTENDED: Literal["extended"]
|
||||
DOTBOX: Literal["dotbox"]
|
||||
UNDERLINE: Literal["underline"]
|
||||
PIESLICE: Literal["pieslice"]
|
||||
CHORD: Literal["chord"]
|
||||
ARC: Literal["arc"]
|
||||
FIRST: Literal["first"]
|
||||
LAST: Literal["last"]
|
||||
BUTT: Literal["butt"]
|
||||
PROJECTING: Literal["projecting"]
|
||||
ROUND: Literal["round"]
|
||||
BEVEL: Literal["bevel"]
|
||||
MITER: Literal["miter"]
|
||||
MOVETO: Literal["moveto"]
|
||||
SCROLL: Literal["scroll"]
|
||||
UNITS: Literal["units"]
|
||||
PAGES: Literal["pages"]
|
||||
@@ -1,28 +0,0 @@
|
||||
import sys
|
||||
from _typeshed import ReadableBuffer
|
||||
from typing import Literal, overload
|
||||
|
||||
if sys.platform == "win32":
|
||||
SND_APPLICATION: Literal[128]
|
||||
SND_FILENAME: Literal[131072]
|
||||
SND_ALIAS: Literal[65536]
|
||||
SND_LOOP: Literal[8]
|
||||
SND_MEMORY: Literal[4]
|
||||
SND_PURGE: Literal[64]
|
||||
SND_ASYNC: Literal[1]
|
||||
SND_NODEFAULT: Literal[2]
|
||||
SND_NOSTOP: Literal[16]
|
||||
SND_NOWAIT: Literal[8192]
|
||||
|
||||
MB_ICONASTERISK: Literal[64]
|
||||
MB_ICONEXCLAMATION: Literal[48]
|
||||
MB_ICONHAND: Literal[16]
|
||||
MB_ICONQUESTION: Literal[32]
|
||||
MB_OK: Literal[0]
|
||||
def Beep(frequency: int, duration: int) -> None: ...
|
||||
# Can actually accept anything ORed with 4, and if not it's definitely str, but that's inexpressible
|
||||
@overload
|
||||
def PlaySound(sound: ReadableBuffer | None, flags: Literal[4]) -> None: ...
|
||||
@overload
|
||||
def PlaySound(sound: str | ReadableBuffer | None, flags: int) -> None: ...
|
||||
def MessageBeep(type: int = 0) -> None: ...
|
||||
@@ -11,24 +11,45 @@ repository = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
red_knot_module_resolver = { workspace = true }
|
||||
ruff_db = { workspace = true }
|
||||
ruff_index = { workspace = true }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_python_trivia = { workspace = true }
|
||||
ruff_python_stdlib = { workspace = true }
|
||||
ruff_source_file = { workspace = true }
|
||||
ruff_text_size = { workspace = true }
|
||||
ruff_python_literal = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
bitflags = { workspace = true }
|
||||
camino = { workspace = true }
|
||||
compact_str = { workspace = true }
|
||||
countme = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
ordermap = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
hashbrown = { workspace = true }
|
||||
smallvec = { workspace = true }
|
||||
static_assertions = { workspace = true }
|
||||
test-case = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
path-slash = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
zip = { workspace = true, features = ["zstd", "deflate"] }
|
||||
|
||||
[dev-dependencies]
|
||||
anyhow = { workspace = true }
|
||||
ruff_db = { workspace = true, features = ["os", "testing"] }
|
||||
ruff_python_parser = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
insta = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
# Red Knot
|
||||
|
||||
A work-in-progress multifile module resolver for Ruff.
|
||||
Semantic analysis for the red-knot project.
|
||||
|
||||
## Vendored types for the stdlib
|
||||
|
||||
This crate vendors [typeshed](https://github.com/python/typeshed)'s stubs for the standard library. The vendored stubs can be found in `crates/red_knot_module_resolver/vendor/typeshed`. The file `crates/red_knot_module_resolver/vendor/typeshed/source_commit.txt` tells you the typeshed commit that our vendored stdlib stubs currently correspond to.
|
||||
This crate vendors [typeshed](https://github.com/python/typeshed)'s stubs for the standard library. The vendored stubs can be found in `crates/red_knot_python_semantic/vendor/typeshed`. The file `crates/red_knot_python_semantic/vendor/typeshed/source_commit.txt` tells you the typeshed commit that our vendored stdlib stubs currently correspond to.
|
||||
|
||||
The typeshed stubs are updated every two weeks via an automated PR using the `sync_typeshed.yaml` workflow in the `.github/workflows` directory. This workflow can also be triggered at any time via [workflow dispatch](https://docs.github.com/en/actions/using-workflows/manually-running-a-workflow#running-a-workflow).
|
||||
@@ -3,7 +3,7 @@
|
||||
//!
|
||||
//! This script should be automatically run at build time
|
||||
//! whenever the script itself changes, or whenever any files
|
||||
//! in `crates/red_knot_module_resolver/vendor/typeshed` change.
|
||||
//! in `crates/red_knot_python_semantic/vendor/typeshed` change.
|
||||
|
||||
use std::fs::File;
|
||||
use std::path::Path;
|
||||
@@ -23,8 +23,21 @@ const TYPESHED_ZIP_LOCATION: &str = "/zipped_typeshed.zip";
|
||||
fn zip_dir(directory_path: &str, writer: File) -> ZipResult<File> {
|
||||
let mut zip = ZipWriter::new(writer);
|
||||
|
||||
// Use deflated compression for WASM builds because compiling `zstd-sys` requires clang
|
||||
// [source](https://github.com/gyscos/zstd-rs/wiki/Compile-for-WASM) which complicates the build
|
||||
// by a lot. Deflated compression is slower but it shouldn't matter much for the WASM use case
|
||||
// (WASM itself is already slower than a native build for a specific platform).
|
||||
// We can't use `#[cfg(...)]` here because the target-arch in a build script is the
|
||||
// architecture of the system running the build script and not the architecture of the build-target.
|
||||
// That's why we use the `TARGET` environment variable here.
|
||||
let method = if std::env::var("TARGET").unwrap().contains("wasm32") {
|
||||
CompressionMethod::Deflated
|
||||
} else {
|
||||
CompressionMethod::Zstd
|
||||
};
|
||||
|
||||
let options = FileOptions::default()
|
||||
.compression_method(CompressionMethod::Zstd)
|
||||
.compression_method(method)
|
||||
.unix_permissions(0o644);
|
||||
|
||||
for entry in walkdir::WalkDir::new(directory_path) {
|
||||
@@ -31,10 +31,10 @@ impl<T> AstNodeRef<T> {
|
||||
/// which the `AstNodeRef` belongs.
|
||||
///
|
||||
/// ## Safety
|
||||
///
|
||||
/// Dereferencing the `node` can result in undefined behavior if `parsed` isn't the
|
||||
/// [`ParsedModule`] to which `node` belongs. It's the caller's responsibility to ensure that
|
||||
/// the invariant `node belongs to parsed` is upheld.
|
||||
|
||||
pub(super) unsafe fn new(parsed: ParsedModule, node: &T) -> Self {
|
||||
Self {
|
||||
_parsed: parsed,
|
||||
|
||||
@@ -1,57 +1,25 @@
|
||||
use salsa::DbWithJar;
|
||||
|
||||
use red_knot_module_resolver::Db as ResolverDb;
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::{Db as SourceDb, Upcast};
|
||||
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::symbol::ScopeId;
|
||||
use crate::semantic_index::{global_scope, semantic_index, symbol_table, use_def_map};
|
||||
use crate::types::{
|
||||
infer_definition_types, infer_expression_types, infer_scope_types, ClassType, FunctionType,
|
||||
IntersectionType, UnionType,
|
||||
};
|
||||
|
||||
#[salsa::jar(db=Db)]
|
||||
pub struct Jar(
|
||||
ScopeId<'_>,
|
||||
Definition<'_>,
|
||||
Expression<'_>,
|
||||
FunctionType<'_>,
|
||||
ClassType<'_>,
|
||||
UnionType<'_>,
|
||||
IntersectionType<'_>,
|
||||
symbol_table,
|
||||
use_def_map,
|
||||
global_scope,
|
||||
semantic_index,
|
||||
infer_definition_types,
|
||||
infer_expression_types,
|
||||
infer_scope_types,
|
||||
);
|
||||
|
||||
/// Database giving access to semantic information about a Python program.
|
||||
pub trait Db:
|
||||
SourceDb + ResolverDb + DbWithJar<Jar> + Upcast<dyn SourceDb> + Upcast<dyn ResolverDb>
|
||||
{
|
||||
#[salsa::db]
|
||||
pub trait Db: SourceDb + Upcast<dyn SourceDb> {
|
||||
fn is_file_open(&self, file: File) -> bool;
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use std::sync::Arc;
|
||||
|
||||
use salsa::DebugWithDb;
|
||||
|
||||
use red_knot_module_resolver::{vendored_typeshed_stubs, Db as ResolverDb, Jar as ResolverJar};
|
||||
use ruff_db::files::Files;
|
||||
use crate::module_resolver::vendored_typeshed_stubs;
|
||||
use ruff_db::files::{File, Files};
|
||||
use ruff_db::system::{DbWithTestSystem, System, TestSystem};
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
use ruff_db::{Db as SourceDb, Jar as SourceJar, Upcast};
|
||||
use ruff_python_trivia::textwrap;
|
||||
use ruff_db::{Db as SourceDb, Upcast};
|
||||
|
||||
use super::{Db, Jar};
|
||||
use super::Db;
|
||||
|
||||
#[salsa::db(Jar, ResolverJar, SourceJar)]
|
||||
#[salsa::db]
|
||||
pub(crate) struct TestDb {
|
||||
storage: salsa::Storage<Self>,
|
||||
files: Files,
|
||||
@@ -65,7 +33,7 @@ pub(crate) mod tests {
|
||||
Self {
|
||||
storage: salsa::Storage::default(),
|
||||
system: TestSystem::default(),
|
||||
vendored: vendored_typeshed_stubs().snapshot(),
|
||||
vendored: vendored_typeshed_stubs().clone(),
|
||||
events: std::sync::Arc::default(),
|
||||
files: Files::default(),
|
||||
}
|
||||
@@ -89,12 +57,6 @@ pub(crate) mod tests {
|
||||
pub(crate) fn clear_salsa_events(&mut self) {
|
||||
self.take_salsa_events();
|
||||
}
|
||||
|
||||
/// Write auto-dedented text to a file.
|
||||
pub(crate) fn write_dedented(&mut self, path: &str, content: &str) -> anyhow::Result<()> {
|
||||
self.write_file(path, textwrap::dedent(content))?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl DbWithTestSystem for TestDb {
|
||||
@@ -107,6 +69,7 @@ pub(crate) mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl SourceDb for TestDb {
|
||||
fn vendored(&self) -> &VendoredFileSystem {
|
||||
&self.vendored
|
||||
@@ -125,34 +88,25 @@ pub(crate) mod tests {
|
||||
fn upcast(&self) -> &(dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn ResolverDb> for TestDb {
|
||||
fn upcast(&self) -> &(dyn ResolverDb + 'static) {
|
||||
fn upcast_mut(&mut self) -> &mut (dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl red_knot_module_resolver::Db for TestDb {}
|
||||
impl Db for TestDb {}
|
||||
#[salsa::db]
|
||||
impl Db for TestDb {
|
||||
fn is_file_open(&self, file: File) -> bool {
|
||||
!file.path(self).is_vendored_path()
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl salsa::Database for TestDb {
|
||||
fn salsa_event(&self, event: salsa::Event) {
|
||||
tracing::trace!("event: {:?}", event.debug(self));
|
||||
fn salsa_event(&self, event: &dyn Fn() -> salsa::Event) {
|
||||
let event = event();
|
||||
tracing::trace!("event: {event:?}");
|
||||
let mut events = self.events.lock().unwrap();
|
||||
events.push(event);
|
||||
}
|
||||
}
|
||||
|
||||
impl salsa::ParallelDatabase for TestDb {
|
||||
fn snapshot(&self) -> salsa::Snapshot<Self> {
|
||||
salsa::Snapshot::new(Self {
|
||||
storage: self.storage.snapshot(),
|
||||
files: self.files.snapshot(),
|
||||
system: self.system.snapshot(),
|
||||
vendored: self.vendored.snapshot(),
|
||||
events: self.events.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,14 +2,26 @@ use std::hash::BuildHasherDefault;
|
||||
|
||||
use rustc_hash::FxHasher;
|
||||
|
||||
pub use db::{Db, Jar};
|
||||
pub use db::Db;
|
||||
pub use module_name::ModuleName;
|
||||
pub use module_resolver::{
|
||||
resolve_module, system_module_search_paths, vendored_typeshed_stubs, Module,
|
||||
};
|
||||
pub use program::{Program, ProgramSettings, SearchPathSettings, SitePackages};
|
||||
pub use python_version::PythonVersion;
|
||||
pub use semantic_model::{HasTy, SemanticModel};
|
||||
|
||||
pub mod ast_node_ref;
|
||||
mod db;
|
||||
mod module_name;
|
||||
mod module_resolver;
|
||||
mod node_key;
|
||||
mod program;
|
||||
mod python_version;
|
||||
pub mod semantic_index;
|
||||
mod semantic_model;
|
||||
pub(crate) mod site_packages;
|
||||
mod stdlib;
|
||||
pub mod types;
|
||||
|
||||
type FxOrderSet<V> = ordermap::set::OrderSet<V, BuildHasherDefault<FxHasher>>;
|
||||
|
||||
@@ -42,7 +42,7 @@ impl ModuleName {
|
||||
/// ## Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_module_resolver::ModuleName;
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar").as_deref(), Some("foo.bar"));
|
||||
/// assert_eq!(ModuleName::new_static(""), None);
|
||||
@@ -68,7 +68,7 @@ impl ModuleName {
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_module_resolver::ModuleName;
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar.baz").unwrap().components().collect::<Vec<_>>(), vec!["foo", "bar", "baz"]);
|
||||
/// ```
|
||||
@@ -82,7 +82,7 @@ impl ModuleName {
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_module_resolver::ModuleName;
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar").unwrap().parent(), Some(ModuleName::new_static("foo").unwrap()));
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar.baz").unwrap().parent(), Some(ModuleName::new_static("foo.bar").unwrap()));
|
||||
@@ -101,7 +101,7 @@ impl ModuleName {
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_module_resolver::ModuleName;
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// assert!(ModuleName::new_static("foo.bar").unwrap().starts_with(&ModuleName::new_static("foo").unwrap()));
|
||||
///
|
||||
@@ -133,7 +133,7 @@ impl ModuleName {
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_module_resolver::ModuleName;
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// assert_eq!(&*ModuleName::from_components(["a"]).unwrap(), "a");
|
||||
/// assert_eq!(&*ModuleName::from_components(["a", "b"]).unwrap(), "a.b");
|
||||
@@ -168,6 +168,24 @@ impl ModuleName {
|
||||
};
|
||||
Some(Self(name))
|
||||
}
|
||||
|
||||
/// Extend `self` with the components of `other`
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// let mut module_name = ModuleName::new_static("foo").unwrap();
|
||||
/// module_name.extend(&ModuleName::new_static("bar").unwrap());
|
||||
/// assert_eq!(&module_name, "foo.bar");
|
||||
/// module_name.extend(&ModuleName::new_static("baz.eggs.ham").unwrap());
|
||||
/// assert_eq!(&module_name, "foo.bar.baz.eggs.ham");
|
||||
/// ```
|
||||
pub fn extend(&mut self, other: &ModuleName) {
|
||||
self.0.push('.');
|
||||
self.0.push_str(other);
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for ModuleName {
|
||||
46
crates/red_knot_python_semantic/src/module_resolver/mod.rs
Normal file
46
crates/red_knot_python_semantic/src/module_resolver/mod.rs
Normal file
@@ -0,0 +1,46 @@
|
||||
use std::iter::FusedIterator;
|
||||
|
||||
pub use module::Module;
|
||||
pub use resolver::resolve_module;
|
||||
pub(crate) use resolver::{file_to_module, SearchPaths};
|
||||
use ruff_db::system::SystemPath;
|
||||
pub use typeshed::vendored_typeshed_stubs;
|
||||
|
||||
use crate::module_resolver::resolver::search_paths;
|
||||
use crate::Db;
|
||||
use resolver::SearchPathIterator;
|
||||
|
||||
mod module;
|
||||
mod path;
|
||||
mod resolver;
|
||||
mod typeshed;
|
||||
|
||||
#[cfg(test)]
|
||||
mod testing;
|
||||
|
||||
/// Returns an iterator over all search paths pointing to a system path
|
||||
pub fn system_module_search_paths(db: &dyn Db) -> SystemModuleSearchPathsIter {
|
||||
SystemModuleSearchPathsIter {
|
||||
inner: search_paths(db),
|
||||
}
|
||||
}
|
||||
|
||||
pub struct SystemModuleSearchPathsIter<'db> {
|
||||
inner: SearchPathIterator<'db>,
|
||||
}
|
||||
|
||||
impl<'db> Iterator for SystemModuleSearchPathsIter<'db> {
|
||||
type Item = &'db SystemPath;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
loop {
|
||||
let next = self.inner.next()?;
|
||||
|
||||
if let Some(system_path) = next.as_system_path() {
|
||||
return Some(system_path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FusedIterator for SystemModuleSearchPathsIter<'_> {}
|
||||
@@ -3,9 +3,8 @@ use std::sync::Arc;
|
||||
|
||||
use ruff_db::files::File;
|
||||
|
||||
use crate::db::Db;
|
||||
use super::path::SearchPath;
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::path::{ModuleResolutionPathBuf, ModuleResolutionPathRef};
|
||||
|
||||
/// Representation of a Python module.
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
@@ -17,7 +16,7 @@ impl Module {
|
||||
pub(crate) fn new(
|
||||
name: ModuleName,
|
||||
kind: ModuleKind,
|
||||
search_path: Arc<ModuleResolutionPathBuf>,
|
||||
search_path: SearchPath,
|
||||
file: File,
|
||||
) -> Self {
|
||||
Self {
|
||||
@@ -41,8 +40,8 @@ impl Module {
|
||||
}
|
||||
|
||||
/// The search path from which the module was resolved.
|
||||
pub(crate) fn search_path(&self) -> ModuleResolutionPathRef {
|
||||
ModuleResolutionPathRef::from(&*self.inner.search_path)
|
||||
pub(crate) fn search_path(&self) -> &SearchPath {
|
||||
&self.inner.search_path
|
||||
}
|
||||
|
||||
/// Determine whether this module is a single-file module or a package
|
||||
@@ -62,22 +61,11 @@ impl std::fmt::Debug for Module {
|
||||
}
|
||||
}
|
||||
|
||||
impl salsa::DebugWithDb<dyn Db> for Module {
|
||||
fn fmt(&self, f: &mut Formatter<'_>, db: &dyn Db) -> std::fmt::Result {
|
||||
f.debug_struct("Module")
|
||||
.field("name", &self.name())
|
||||
.field("kind", &self.kind())
|
||||
.field("file", &self.file().debug(db.upcast()))
|
||||
.field("search_path", &self.search_path())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq)]
|
||||
struct ModuleInner {
|
||||
name: ModuleName,
|
||||
kind: ModuleKind,
|
||||
search_path: Arc<ModuleResolutionPathBuf>,
|
||||
search_path: SearchPath,
|
||||
file: File,
|
||||
}
|
||||
|
||||
@@ -89,3 +77,9 @@ pub enum ModuleKind {
|
||||
/// A python package (`foo/__init__.py` or `foo/__init__.pyi`)
|
||||
Package,
|
||||
}
|
||||
|
||||
impl ModuleKind {
|
||||
pub const fn is_package(self) -> bool {
|
||||
matches!(self, ModuleKind::Package)
|
||||
}
|
||||
}
|
||||
1098
crates/red_knot_python_semantic/src/module_resolver/path.rs
Normal file
1098
crates/red_knot_python_semantic/src/module_resolver/path.rs
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,8 +1,10 @@
|
||||
use ruff_db::program::{Program, SearchPathSettings, TargetVersion};
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPath, SystemPathBuf};
|
||||
use ruff_db::vendored::VendoredPathBuf;
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::program::{Program, SearchPathSettings};
|
||||
use crate::python_version::PythonVersion;
|
||||
use crate::{ProgramSettings, SitePackages};
|
||||
|
||||
/// A test case for the module resolver.
|
||||
///
|
||||
@@ -12,8 +14,11 @@ pub(crate) struct TestCase<T> {
|
||||
pub(crate) db: TestDb,
|
||||
pub(crate) src: SystemPathBuf,
|
||||
pub(crate) stdlib: T,
|
||||
// Most test cases only ever need a single `site-packages` directory,
|
||||
// so this is a single directory instead of a `Vec` of directories,
|
||||
// like it is in `ruff_db::Program`.
|
||||
pub(crate) site_packages: SystemPathBuf,
|
||||
pub(crate) target_version: TargetVersion,
|
||||
pub(crate) target_version: PythonVersion,
|
||||
}
|
||||
|
||||
/// A `(file_name, file_contents)` tuple
|
||||
@@ -95,7 +100,7 @@ pub(crate) struct UnspecifiedTypeshed;
|
||||
/// to `()`.
|
||||
pub(crate) struct TestCaseBuilder<T> {
|
||||
typeshed_option: T,
|
||||
target_version: TargetVersion,
|
||||
target_version: PythonVersion,
|
||||
first_party_files: Vec<FileSpec>,
|
||||
site_packages_files: Vec<FileSpec>,
|
||||
}
|
||||
@@ -114,7 +119,7 @@ impl<T> TestCaseBuilder<T> {
|
||||
}
|
||||
|
||||
/// Specify the target Python version the module resolver should assume
|
||||
pub(crate) fn with_target_version(mut self, target_version: TargetVersion) -> Self {
|
||||
pub(crate) fn with_target_version(mut self, target_version: PythonVersion) -> Self {
|
||||
self.target_version = target_version;
|
||||
self
|
||||
}
|
||||
@@ -125,6 +130,8 @@ impl<T> TestCaseBuilder<T> {
|
||||
files: impl IntoIterator<Item = FileSpec>,
|
||||
) -> SystemPathBuf {
|
||||
let root = location.as_ref().to_path_buf();
|
||||
// Make sure to create the directory even if the list of files is empty:
|
||||
db.memory_file_system().create_directory_all(&root).unwrap();
|
||||
db.write_files(
|
||||
files
|
||||
.into_iter()
|
||||
@@ -139,7 +146,7 @@ impl TestCaseBuilder<UnspecifiedTypeshed> {
|
||||
pub(crate) fn new() -> TestCaseBuilder<UnspecifiedTypeshed> {
|
||||
Self {
|
||||
typeshed_option: UnspecifiedTypeshed,
|
||||
target_version: TargetVersion::default(),
|
||||
target_version: PythonVersion::default(),
|
||||
first_party_files: vec![],
|
||||
site_packages_files: vec![],
|
||||
}
|
||||
@@ -172,6 +179,7 @@ impl TestCaseBuilder<UnspecifiedTypeshed> {
|
||||
first_party_files,
|
||||
site_packages_files,
|
||||
} = self;
|
||||
|
||||
TestCaseBuilder {
|
||||
typeshed_option: typeshed,
|
||||
target_version,
|
||||
@@ -188,6 +196,7 @@ impl TestCaseBuilder<UnspecifiedTypeshed> {
|
||||
site_packages,
|
||||
target_version,
|
||||
} = self.with_custom_typeshed(MockedTypeshed::default()).build();
|
||||
|
||||
TestCase {
|
||||
db,
|
||||
src,
|
||||
@@ -214,16 +223,19 @@ impl TestCaseBuilder<MockedTypeshed> {
|
||||
let src = Self::write_mock_directory(&mut db, "/src", first_party_files);
|
||||
let typeshed = Self::build_typeshed_mock(&mut db, &typeshed_option);
|
||||
|
||||
Program::new(
|
||||
Program::from_settings(
|
||||
&db,
|
||||
target_version,
|
||||
SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
workspace_root: src.clone(),
|
||||
custom_typeshed: Some(typeshed.clone()),
|
||||
site_packages: Some(site_packages.clone()),
|
||||
&ProgramSettings {
|
||||
target_version,
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
custom_typeshed: Some(typeshed.clone()),
|
||||
site_packages: SitePackages::Known(vec![site_packages.clone()]),
|
||||
},
|
||||
},
|
||||
);
|
||||
)
|
||||
.expect("Valid program settings");
|
||||
|
||||
TestCase {
|
||||
db,
|
||||
@@ -267,16 +279,17 @@ impl TestCaseBuilder<VendoredTypeshed> {
|
||||
Self::write_mock_directory(&mut db, "/site-packages", site_packages_files);
|
||||
let src = Self::write_mock_directory(&mut db, "/src", first_party_files);
|
||||
|
||||
Program::new(
|
||||
Program::from_settings(
|
||||
&db,
|
||||
target_version,
|
||||
SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
workspace_root: src.clone(),
|
||||
custom_typeshed: None,
|
||||
site_packages: Some(site_packages.clone()),
|
||||
&ProgramSettings {
|
||||
target_version,
|
||||
search_paths: SearchPathSettings {
|
||||
site_packages: SitePackages::Known(vec![site_packages.clone()]),
|
||||
..SearchPathSettings::new(src.clone())
|
||||
},
|
||||
},
|
||||
);
|
||||
)
|
||||
.expect("Valid search path settings");
|
||||
|
||||
TestCase {
|
||||
db,
|
||||
@@ -0,0 +1,8 @@
|
||||
pub use self::vendored::vendored_typeshed_stubs;
|
||||
pub(super) use self::versions::{
|
||||
typeshed_versions, vendored_typeshed_versions, TypeshedVersions, TypeshedVersionsParseError,
|
||||
TypeshedVersionsQueryResult,
|
||||
};
|
||||
|
||||
mod vendored;
|
||||
mod versions;
|
||||
@@ -1,4 +1,3 @@
|
||||
use std::cell::OnceCell;
|
||||
use std::collections::BTreeMap;
|
||||
use std::fmt;
|
||||
use std::num::{NonZeroU16, NonZeroUsize};
|
||||
@@ -6,79 +5,12 @@ use std::ops::{RangeFrom, RangeInclusive};
|
||||
use std::str::FromStr;
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use ruff_db::program::TargetVersion;
|
||||
use ruff_db::system::SystemPath;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
|
||||
use super::vendored::vendored_typeshed_stubs;
|
||||
use crate::db::Db;
|
||||
use crate::module_name::ModuleName;
|
||||
|
||||
use super::vendored::vendored_typeshed_stubs;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct LazyTypeshedVersions<'db>(OnceCell<&'db TypeshedVersions>);
|
||||
|
||||
impl<'db> LazyTypeshedVersions<'db> {
|
||||
#[must_use]
|
||||
pub(crate) fn new() -> Self {
|
||||
Self(OnceCell::new())
|
||||
}
|
||||
|
||||
/// Query whether a module exists at runtime in the stdlib on a certain Python version.
|
||||
///
|
||||
/// Simply probing whether a file exists in typeshed is insufficient for this question,
|
||||
/// as a module in the stdlib may have been added in Python 3.10, but the typeshed stub
|
||||
/// will still be available (either in a custom typeshed dir or in our vendored copy)
|
||||
/// even if the user specified Python 3.8 as the target version.
|
||||
///
|
||||
/// For top-level modules and packages, the VERSIONS file can always provide an unambiguous answer
|
||||
/// as to whether the module exists on the specified target version. However, VERSIONS does not
|
||||
/// provide comprehensive information on all submodules, meaning that this method sometimes
|
||||
/// returns [`TypeshedVersionsQueryResult::MaybeExists`].
|
||||
/// See [`TypeshedVersionsQueryResult`] for more details.
|
||||
#[must_use]
|
||||
pub(crate) fn query_module(
|
||||
&self,
|
||||
db: &'db dyn Db,
|
||||
module: &ModuleName,
|
||||
stdlib_root: Option<&SystemPath>,
|
||||
target_version: TargetVersion,
|
||||
) -> TypeshedVersionsQueryResult {
|
||||
let versions = self.0.get_or_init(|| {
|
||||
let versions_path = if let Some(system_path) = stdlib_root {
|
||||
system_path.join("VERSIONS")
|
||||
} else {
|
||||
return &VENDORED_VERSIONS;
|
||||
};
|
||||
let Some(versions_file) = system_path_to_file(db.upcast(), &versions_path) else {
|
||||
todo!(
|
||||
"Still need to figure out how to handle VERSIONS files being deleted \
|
||||
from custom typeshed directories! Expected a file to exist at {versions_path}"
|
||||
)
|
||||
};
|
||||
// TODO(Alex/Micha): If VERSIONS is invalid,
|
||||
// this should invalidate not just the specific module resolution we're currently attempting,
|
||||
// but all type inference that depends on any standard-library types.
|
||||
// Unwrapping here is not correct...
|
||||
parse_typeshed_versions(db, versions_file).as_ref().unwrap()
|
||||
});
|
||||
versions.query_module(module, PyVersion::from(target_version))
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub(crate) fn parse_typeshed_versions(
|
||||
db: &dyn Db,
|
||||
versions_file: File,
|
||||
) -> Result<TypeshedVersions, TypeshedVersionsParseError> {
|
||||
// TODO: Handle IO errors
|
||||
let file_content = versions_file
|
||||
.read_to_string(db.upcast())
|
||||
.unwrap_or_default();
|
||||
file_content.parse()
|
||||
}
|
||||
use crate::{Program, PythonVersion};
|
||||
|
||||
static VENDORED_VERSIONS: Lazy<TypeshedVersions> = Lazy::new(|| {
|
||||
TypeshedVersions::from_str(
|
||||
@@ -89,8 +21,16 @@ static VENDORED_VERSIONS: Lazy<TypeshedVersions> = Lazy::new(|| {
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
pub(crate) fn vendored_typeshed_versions() -> &'static TypeshedVersions {
|
||||
&VENDORED_VERSIONS
|
||||
}
|
||||
|
||||
pub(crate) fn typeshed_versions(db: &dyn Db) -> &TypeshedVersions {
|
||||
Program::get(db).search_paths(db).typeshed_versions()
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub struct TypeshedVersionsParseError {
|
||||
pub(crate) struct TypeshedVersionsParseError {
|
||||
line_number: Option<NonZeroU16>,
|
||||
reason: TypeshedVersionsParseErrorKind,
|
||||
}
|
||||
@@ -123,7 +63,7 @@ impl std::error::Error for TypeshedVersionsParseError {
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub enum TypeshedVersionsParseErrorKind {
|
||||
pub(super) enum TypeshedVersionsParseErrorKind {
|
||||
TooManyLines(NonZeroUsize),
|
||||
UnexpectedNumberOfColons,
|
||||
InvalidModuleName(String),
|
||||
@@ -175,10 +115,10 @@ impl TypeshedVersions {
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
fn query_module(
|
||||
pub(in crate::module_resolver) fn query_module(
|
||||
&self,
|
||||
module: &ModuleName,
|
||||
target_version: PyVersion,
|
||||
target_version: PythonVersion,
|
||||
) -> TypeshedVersionsQueryResult {
|
||||
if let Some(range) = self.exact(module) {
|
||||
if range.contains(target_version) {
|
||||
@@ -205,7 +145,7 @@ impl TypeshedVersions {
|
||||
}
|
||||
}
|
||||
|
||||
/// Possible answers [`LazyTypeshedVersions::query_module()`] could give to the question:
|
||||
/// Possible answers [`TypeshedVersions::query_module()`] could give to the question:
|
||||
/// "Does this module exist in the stdlib at runtime on a certain target version?"
|
||||
#[derive(Debug, Copy, PartialEq, Eq, Clone, Hash)]
|
||||
pub(crate) enum TypeshedVersionsQueryResult {
|
||||
@@ -323,13 +263,13 @@ impl fmt::Display for TypeshedVersions {
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
|
||||
enum PyVersionRange {
|
||||
AvailableFrom(RangeFrom<PyVersion>),
|
||||
AvailableWithin(RangeInclusive<PyVersion>),
|
||||
AvailableFrom(RangeFrom<PythonVersion>),
|
||||
AvailableWithin(RangeInclusive<PythonVersion>),
|
||||
}
|
||||
|
||||
impl PyVersionRange {
|
||||
#[must_use]
|
||||
fn contains(&self, version: PyVersion) -> bool {
|
||||
fn contains(&self, version: PythonVersion) -> bool {
|
||||
match self {
|
||||
Self::AvailableFrom(inner) => inner.contains(&version),
|
||||
Self::AvailableWithin(inner) => inner.contains(&version),
|
||||
@@ -343,9 +283,14 @@ impl FromStr for PyVersionRange {
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let mut parts = s.split('-').map(str::trim);
|
||||
match (parts.next(), parts.next(), parts.next()) {
|
||||
(Some(lower), Some(""), None) => Ok(Self::AvailableFrom((lower.parse()?)..)),
|
||||
(Some(lower), Some(""), None) => {
|
||||
let lower = PythonVersion::from_versions_file_string(lower)?;
|
||||
Ok(Self::AvailableFrom(lower..))
|
||||
}
|
||||
(Some(lower), Some(upper), None) => {
|
||||
Ok(Self::AvailableWithin((lower.parse()?)..=(upper.parse()?)))
|
||||
let lower = PythonVersion::from_versions_file_string(lower)?;
|
||||
let upper = PythonVersion::from_versions_file_string(upper)?;
|
||||
Ok(Self::AvailableWithin(lower..=upper))
|
||||
}
|
||||
_ => Err(TypeshedVersionsParseErrorKind::UnexpectedNumberOfHyphens),
|
||||
}
|
||||
@@ -363,74 +308,20 @@ impl fmt::Display for PyVersionRange {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
struct PyVersion {
|
||||
major: u8,
|
||||
minor: u8,
|
||||
}
|
||||
|
||||
impl FromStr for PyVersion {
|
||||
type Err = TypeshedVersionsParseErrorKind;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
impl PythonVersion {
|
||||
fn from_versions_file_string(s: &str) -> Result<Self, TypeshedVersionsParseErrorKind> {
|
||||
let mut parts = s.split('.').map(str::trim);
|
||||
let (Some(major), Some(minor), None) = (parts.next(), parts.next(), parts.next()) else {
|
||||
return Err(TypeshedVersionsParseErrorKind::UnexpectedNumberOfPeriods(
|
||||
s.to_string(),
|
||||
));
|
||||
};
|
||||
let major = match u8::from_str(major) {
|
||||
Ok(major) => major,
|
||||
Err(err) => {
|
||||
return Err(TypeshedVersionsParseErrorKind::IntegerParsingFailure {
|
||||
version: s.to_string(),
|
||||
err,
|
||||
})
|
||||
PythonVersion::try_from((major, minor)).map_err(|int_parse_error| {
|
||||
TypeshedVersionsParseErrorKind::IntegerParsingFailure {
|
||||
version: s.to_string(),
|
||||
err: int_parse_error,
|
||||
}
|
||||
};
|
||||
let minor = match u8::from_str(minor) {
|
||||
Ok(minor) => minor,
|
||||
Err(err) => {
|
||||
return Err(TypeshedVersionsParseErrorKind::IntegerParsingFailure {
|
||||
version: s.to_string(),
|
||||
err,
|
||||
})
|
||||
}
|
||||
};
|
||||
Ok(Self { major, minor })
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for PyVersion {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let PyVersion { major, minor } = self;
|
||||
write!(f, "{major}.{minor}")
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TargetVersion> for PyVersion {
|
||||
fn from(value: TargetVersion) -> Self {
|
||||
match value {
|
||||
TargetVersion::Py37 => PyVersion { major: 3, minor: 7 },
|
||||
TargetVersion::Py38 => PyVersion { major: 3, minor: 8 },
|
||||
TargetVersion::Py39 => PyVersion { major: 3, minor: 9 },
|
||||
TargetVersion::Py310 => PyVersion {
|
||||
major: 3,
|
||||
minor: 10,
|
||||
},
|
||||
TargetVersion::Py311 => PyVersion {
|
||||
major: 3,
|
||||
minor: 11,
|
||||
},
|
||||
TargetVersion::Py312 => PyVersion {
|
||||
major: 3,
|
||||
minor: 12,
|
||||
},
|
||||
TargetVersion::Py313 => PyVersion {
|
||||
major: 3,
|
||||
minor: 13,
|
||||
},
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -440,7 +331,6 @@ mod tests {
|
||||
use std::path::Path;
|
||||
|
||||
use insta::assert_snapshot;
|
||||
use ruff_db::program::TargetVersion;
|
||||
|
||||
use super::*;
|
||||
|
||||
@@ -478,34 +368,34 @@ mod tests {
|
||||
|
||||
assert!(versions.contains_exact(&asyncio));
|
||||
assert_eq!(
|
||||
versions.query_module(&asyncio, TargetVersion::Py310.into()),
|
||||
versions.query_module(&asyncio, PythonVersion::PY310),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
|
||||
assert!(versions.contains_exact(&asyncio_staggered));
|
||||
assert_eq!(
|
||||
versions.query_module(&asyncio_staggered, TargetVersion::Py38.into()),
|
||||
versions.query_module(&asyncio_staggered, PythonVersion::PY38),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
versions.query_module(&asyncio_staggered, TargetVersion::Py37.into()),
|
||||
versions.query_module(&asyncio_staggered, PythonVersion::PY37),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
|
||||
assert!(versions.contains_exact(&audioop));
|
||||
assert_eq!(
|
||||
versions.query_module(&audioop, TargetVersion::Py312.into()),
|
||||
versions.query_module(&audioop, PythonVersion::PY312),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
versions.query_module(&audioop, TargetVersion::Py313.into()),
|
||||
versions.query_module(&audioop, PythonVersion::PY313),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn typeshed_versions_consistent_with_vendored_stubs() {
|
||||
const VERSIONS_DATA: &str = include_str!("../../vendor/typeshed/stdlib/VERSIONS");
|
||||
const VERSIONS_DATA: &str = include_str!("../../../vendor/typeshed/stdlib/VERSIONS");
|
||||
let vendored_typeshed_dir = Path::new("vendor/typeshed").canonicalize().unwrap();
|
||||
let vendored_typeshed_versions = TypeshedVersions::from_str(VERSIONS_DATA).unwrap();
|
||||
|
||||
@@ -590,15 +480,15 @@ foo: 3.8- # trailing comment
|
||||
|
||||
assert!(parsed_versions.contains_exact(&bar));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar, TargetVersion::Py37.into()),
|
||||
parsed_versions.query_module(&bar, PythonVersion::PY37),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar, TargetVersion::Py310.into()),
|
||||
parsed_versions.query_module(&bar, PythonVersion::PY310),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar, TargetVersion::Py311.into()),
|
||||
parsed_versions.query_module(&bar, PythonVersion::PY311),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
@@ -610,15 +500,15 @@ foo: 3.8- # trailing comment
|
||||
|
||||
assert!(parsed_versions.contains_exact(&foo));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&foo, TargetVersion::Py37.into()),
|
||||
parsed_versions.query_module(&foo, PythonVersion::PY37),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&foo, TargetVersion::Py38.into()),
|
||||
parsed_versions.query_module(&foo, PythonVersion::PY38),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&foo, TargetVersion::Py311.into()),
|
||||
parsed_versions.query_module(&foo, PythonVersion::PY311),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
}
|
||||
@@ -630,15 +520,15 @@ foo: 3.8- # trailing comment
|
||||
|
||||
assert!(parsed_versions.contains_exact(&bar_baz));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_baz, TargetVersion::Py37.into()),
|
||||
parsed_versions.query_module(&bar_baz, PythonVersion::PY37),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_baz, TargetVersion::Py39.into()),
|
||||
parsed_versions.query_module(&bar_baz, PythonVersion::PY39),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_baz, TargetVersion::Py310.into()),
|
||||
parsed_versions.query_module(&bar_baz, PythonVersion::PY310),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
@@ -650,15 +540,15 @@ foo: 3.8- # trailing comment
|
||||
|
||||
assert!(!parsed_versions.contains_exact(&bar_eggs));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_eggs, TargetVersion::Py37.into()),
|
||||
parsed_versions.query_module(&bar_eggs, PythonVersion::PY37),
|
||||
TypeshedVersionsQueryResult::MaybeExists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_eggs, TargetVersion::Py310.into()),
|
||||
parsed_versions.query_module(&bar_eggs, PythonVersion::PY310),
|
||||
TypeshedVersionsQueryResult::MaybeExists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_eggs, TargetVersion::Py311.into()),
|
||||
parsed_versions.query_module(&bar_eggs, PythonVersion::PY311),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
@@ -670,11 +560,11 @@ foo: 3.8- # trailing comment
|
||||
|
||||
assert!(!parsed_versions.contains_exact(&spam));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&spam, TargetVersion::Py37.into()),
|
||||
parsed_versions.query_module(&spam, PythonVersion::PY37),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&spam, TargetVersion::Py313.into()),
|
||||
parsed_versions.query_module(&spam, PythonVersion::PY313),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
100
crates/red_knot_python_semantic/src/program.rs
Normal file
100
crates/red_knot_python_semantic/src/program.rs
Normal file
@@ -0,0 +1,100 @@
|
||||
use crate::python_version::PythonVersion;
|
||||
use anyhow::Context;
|
||||
use salsa::Durability;
|
||||
use salsa::Setter;
|
||||
|
||||
use ruff_db::system::{SystemPath, SystemPathBuf};
|
||||
|
||||
use crate::module_resolver::SearchPaths;
|
||||
use crate::Db;
|
||||
|
||||
#[salsa::input(singleton)]
|
||||
pub struct Program {
|
||||
pub target_version: PythonVersion,
|
||||
|
||||
#[return_ref]
|
||||
pub(crate) search_paths: SearchPaths,
|
||||
}
|
||||
|
||||
impl Program {
|
||||
pub fn from_settings(db: &dyn Db, settings: &ProgramSettings) -> anyhow::Result<Self> {
|
||||
let ProgramSettings {
|
||||
target_version,
|
||||
search_paths,
|
||||
} = settings;
|
||||
|
||||
tracing::info!("Target version: Python {target_version}");
|
||||
|
||||
let search_paths = SearchPaths::from_settings(db, search_paths)
|
||||
.with_context(|| "Invalid search path settings")?;
|
||||
|
||||
Ok(Program::builder(settings.target_version, search_paths)
|
||||
.durability(Durability::HIGH)
|
||||
.new(db))
|
||||
}
|
||||
|
||||
pub fn update_search_paths(
|
||||
self,
|
||||
db: &mut dyn Db,
|
||||
search_path_settings: &SearchPathSettings,
|
||||
) -> anyhow::Result<()> {
|
||||
let search_paths = SearchPaths::from_settings(db, search_path_settings)?;
|
||||
|
||||
if self.search_paths(db) != &search_paths {
|
||||
tracing::debug!("Update search paths");
|
||||
self.set_search_paths(db).to(search_paths);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn custom_stdlib_search_path(self, db: &dyn Db) -> Option<&SystemPath> {
|
||||
self.search_paths(db).custom_stdlib()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub struct ProgramSettings {
|
||||
pub target_version: PythonVersion,
|
||||
pub search_paths: SearchPathSettings,
|
||||
}
|
||||
|
||||
/// Configures the search paths for module resolution.
|
||||
#[derive(Eq, PartialEq, Debug, Clone)]
|
||||
pub struct SearchPathSettings {
|
||||
/// List of user-provided paths that should take first priority in the module resolution.
|
||||
/// Examples in other type checkers are mypy's MYPYPATH environment variable,
|
||||
/// or pyright's stubPath configuration setting.
|
||||
pub extra_paths: Vec<SystemPathBuf>,
|
||||
|
||||
/// The root of the workspace, used for finding first-party modules.
|
||||
pub src_root: SystemPathBuf,
|
||||
|
||||
/// Optional path to a "custom typeshed" directory on disk for us to use for standard-library types.
|
||||
/// If this is not provided, we will fallback to our vendored typeshed stubs for the stdlib,
|
||||
/// bundled as a zip file in the binary
|
||||
pub custom_typeshed: Option<SystemPathBuf>,
|
||||
|
||||
/// The path to the user's `site-packages` directory, where third-party packages from ``PyPI`` are installed.
|
||||
pub site_packages: SitePackages,
|
||||
}
|
||||
|
||||
impl SearchPathSettings {
|
||||
pub fn new(src_root: SystemPathBuf) -> Self {
|
||||
Self {
|
||||
src_root,
|
||||
extra_paths: vec![],
|
||||
custom_typeshed: None,
|
||||
site_packages: SitePackages::Known(vec![]),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub enum SitePackages {
|
||||
Derived {
|
||||
venv_path: SystemPathBuf,
|
||||
},
|
||||
/// Resolved site packages paths
|
||||
Known(Vec<SystemPathBuf>),
|
||||
}
|
||||
62
crates/red_knot_python_semantic/src/python_version.rs
Normal file
62
crates/red_knot_python_semantic/src/python_version.rs
Normal file
@@ -0,0 +1,62 @@
|
||||
use std::fmt;
|
||||
|
||||
/// Representation of a Python version.
|
||||
///
|
||||
/// Unlike the `TargetVersion` enums in the CLI crates,
|
||||
/// this does not necessarily represent a Python version that we actually support.
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
pub struct PythonVersion {
|
||||
pub major: u8,
|
||||
pub minor: u8,
|
||||
}
|
||||
|
||||
impl PythonVersion {
|
||||
pub const PY37: PythonVersion = PythonVersion { major: 3, minor: 7 };
|
||||
pub const PY38: PythonVersion = PythonVersion { major: 3, minor: 8 };
|
||||
pub const PY39: PythonVersion = PythonVersion { major: 3, minor: 9 };
|
||||
pub const PY310: PythonVersion = PythonVersion {
|
||||
major: 3,
|
||||
minor: 10,
|
||||
};
|
||||
pub const PY311: PythonVersion = PythonVersion {
|
||||
major: 3,
|
||||
minor: 11,
|
||||
};
|
||||
pub const PY312: PythonVersion = PythonVersion {
|
||||
major: 3,
|
||||
minor: 12,
|
||||
};
|
||||
pub const PY313: PythonVersion = PythonVersion {
|
||||
major: 3,
|
||||
minor: 13,
|
||||
};
|
||||
|
||||
pub fn free_threaded_build_available(self) -> bool {
|
||||
self >= PythonVersion::PY313
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for PythonVersion {
|
||||
fn default() -> Self {
|
||||
Self::PY38
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<(&str, &str)> for PythonVersion {
|
||||
type Error = std::num::ParseIntError;
|
||||
|
||||
fn try_from(value: (&str, &str)) -> Result<Self, Self::Error> {
|
||||
let (major, minor) = value;
|
||||
Ok(Self {
|
||||
major: major.parse()?,
|
||||
minor: minor.parse()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for PythonVersion {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let PythonVersion { major, minor } = self;
|
||||
write!(f, "{major}.{minor}")
|
||||
}
|
||||
}
|
||||
@@ -2,6 +2,7 @@ use std::iter::FusedIterator;
|
||||
use std::sync::Arc;
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
use salsa::plumbing::AsId;
|
||||
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::parsed::parsed_module;
|
||||
@@ -15,16 +16,20 @@ use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::symbol::{
|
||||
FileScopeId, NodeWithScopeKey, NodeWithScopeRef, Scope, ScopeId, ScopedSymbolId, SymbolTable,
|
||||
};
|
||||
use crate::semantic_index::use_def::UseDefMap;
|
||||
use crate::Db;
|
||||
|
||||
pub mod ast_ids;
|
||||
mod builder;
|
||||
pub(crate) mod constraint;
|
||||
pub mod definition;
|
||||
pub mod expression;
|
||||
pub mod symbol;
|
||||
mod use_def;
|
||||
|
||||
pub(crate) use self::use_def::UseDefMap;
|
||||
pub(crate) use self::use_def::{
|
||||
BindingWithConstraints, BindingWithConstraintsIterator, DeclarationsIterator,
|
||||
};
|
||||
|
||||
type SymbolMap = hashbrown::HashMap<ScopedSymbolId, (), ()>;
|
||||
|
||||
@@ -33,7 +38,7 @@ type SymbolMap = hashbrown::HashMap<ScopedSymbolId, (), ()>;
|
||||
/// Prefer using [`symbol_table`] when working with symbols from a single scope.
|
||||
#[salsa::tracked(return_ref, no_eq)]
|
||||
pub(crate) fn semantic_index(db: &dyn Db, file: File) -> SemanticIndex<'_> {
|
||||
let _span = tracing::trace_span!("semantic_index", ?file).entered();
|
||||
let _span = tracing::trace_span!("semantic_index", file = %file.path(db)).entered();
|
||||
|
||||
let parsed = parsed_module(db.upcast(), file);
|
||||
|
||||
@@ -47,8 +52,10 @@ pub(crate) fn semantic_index(db: &dyn Db, file: File) -> SemanticIndex<'_> {
|
||||
/// is unchanged.
|
||||
#[salsa::tracked]
|
||||
pub(crate) fn symbol_table<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> Arc<SymbolTable> {
|
||||
let _span = tracing::trace_span!("symbol_table", ?scope).entered();
|
||||
let index = semantic_index(db, scope.file(db));
|
||||
let file = scope.file(db);
|
||||
let _span =
|
||||
tracing::trace_span!("symbol_table", scope=?scope.as_id(), file=%file.path(db)).entered();
|
||||
let index = semantic_index(db, file);
|
||||
|
||||
index.symbol_table(scope.file_scope_id(db))
|
||||
}
|
||||
@@ -60,8 +67,10 @@ pub(crate) fn symbol_table<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> Arc<Sym
|
||||
/// is unchanged.
|
||||
#[salsa::tracked]
|
||||
pub(crate) fn use_def_map<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> Arc<UseDefMap<'db>> {
|
||||
let _span = tracing::trace_span!("use_def_map", ?scope).entered();
|
||||
let index = semantic_index(db, scope.file(db));
|
||||
let file = scope.file(db);
|
||||
let _span =
|
||||
tracing::trace_span!("use_def_map", scope=?scope.as_id(), file=%file.path(db)).entered();
|
||||
let index = semantic_index(db, file);
|
||||
|
||||
index.use_def_map(scope.file_scope_id(db))
|
||||
}
|
||||
@@ -69,7 +78,7 @@ pub(crate) fn use_def_map<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> Arc<UseD
|
||||
/// Returns the module global scope of `file`.
|
||||
#[salsa::tracked]
|
||||
pub(crate) fn global_scope(db: &dyn Db, file: File) -> ScopeId<'_> {
|
||||
let _span = tracing::trace_span!("global_scope", ?file).entered();
|
||||
let _span = tracing::trace_span!("global_scope", file = %file.path(db)).entered();
|
||||
|
||||
FileScopeId::global().to_scope_id(db, file)
|
||||
}
|
||||
@@ -84,8 +93,6 @@ pub(crate) struct SemanticIndex<'db> {
|
||||
scopes: IndexVec<FileScopeId, Scope>,
|
||||
|
||||
/// Map expressions to their corresponding scope.
|
||||
/// We can't use [`ExpressionId`] here, because the challenge is how to get from
|
||||
/// an [`ast::Expr`] to an [`ExpressionId`] (which requires knowing the scope).
|
||||
scopes_by_expression: FxHashMap<ExpressionNodeKey, FileScopeId>,
|
||||
|
||||
/// Map from a node creating a definition to its definition.
|
||||
@@ -108,12 +115,15 @@ pub(crate) struct SemanticIndex<'db> {
|
||||
/// Note: We should not depend on this map when analysing other files or
|
||||
/// changing a file invalidates all dependents.
|
||||
ast_ids: IndexVec<FileScopeId, AstIds>,
|
||||
|
||||
/// Flags about the global scope (code usage impacting inference)
|
||||
has_future_annotations: bool,
|
||||
}
|
||||
|
||||
impl<'db> SemanticIndex<'db> {
|
||||
/// Returns the symbol table for a specific scope.
|
||||
///
|
||||
/// Use the Salsa cached [`symbol_table`] query if you only need the
|
||||
/// Use the Salsa cached [`symbol_table()`] query if you only need the
|
||||
/// symbol table for a single scope.
|
||||
pub(super) fn symbol_table(&self, scope_id: FileScopeId) -> Arc<SymbolTable> {
|
||||
self.symbol_tables[scope_id].clone()
|
||||
@@ -121,7 +131,7 @@ impl<'db> SemanticIndex<'db> {
|
||||
|
||||
/// Returns the use-def map for a specific scope.
|
||||
///
|
||||
/// Use the Salsa cached [`use_def_map`] query if you only need the
|
||||
/// Use the Salsa cached [`use_def_map()`] query if you only need the
|
||||
/// use-def map for a single scope.
|
||||
pub(super) fn use_def_map(&self, scope_id: FileScopeId) -> Arc<UseDefMap> {
|
||||
self.use_def_maps[scope_id].clone()
|
||||
@@ -150,6 +160,10 @@ impl<'db> SemanticIndex<'db> {
|
||||
&self.scopes[id]
|
||||
}
|
||||
|
||||
pub(crate) fn scope_ids(&self) -> impl Iterator<Item = ScopeId> {
|
||||
self.scope_ids_by_scope.iter().copied()
|
||||
}
|
||||
|
||||
/// Returns the id of the parent scope.
|
||||
pub(crate) fn parent_scope_id(&self, scope_id: FileScopeId) -> Option<FileScopeId> {
|
||||
let scope = self.scope(scope_id);
|
||||
@@ -204,6 +218,12 @@ impl<'db> SemanticIndex<'db> {
|
||||
pub(crate) fn node_scope(&self, node: NodeWithScopeRef) -> FileScopeId {
|
||||
self.scopes_by_node[&node.node_key()]
|
||||
}
|
||||
|
||||
/// Checks if there is an import of `__future__.annotations` in the global scope, which affects
|
||||
/// the logic for type inference.
|
||||
pub(super) fn has_future_annotations(&self) -> bool {
|
||||
self.has_future_annotations
|
||||
}
|
||||
}
|
||||
|
||||
pub struct AncestorsIter<'a> {
|
||||
@@ -304,14 +324,32 @@ mod tests {
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::system::DbWithTestSystem;
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::semantic_index::ast_ids::HasScopedUseId;
|
||||
use crate::semantic_index::definition::DefinitionKind;
|
||||
use crate::semantic_index::symbol::{FileScopeId, Scope, ScopeKind, SymbolTable};
|
||||
use crate::semantic_index::ast_ids::{HasScopedUseId, ScopedUseId};
|
||||
use crate::semantic_index::definition::{Definition, DefinitionKind};
|
||||
use crate::semantic_index::symbol::{
|
||||
FileScopeId, Scope, ScopeKind, ScopedSymbolId, SymbolTable,
|
||||
};
|
||||
use crate::semantic_index::use_def::UseDefMap;
|
||||
use crate::semantic_index::{global_scope, semantic_index, symbol_table, use_def_map};
|
||||
use crate::Db;
|
||||
|
||||
impl UseDefMap<'_> {
|
||||
fn first_public_binding(&self, symbol: ScopedSymbolId) -> Option<Definition<'_>> {
|
||||
self.public_bindings(symbol)
|
||||
.next()
|
||||
.map(|constrained_binding| constrained_binding.binding)
|
||||
}
|
||||
|
||||
fn first_binding_at_use(&self, use_id: ScopedUseId) -> Option<Definition<'_>> {
|
||||
self.bindings_at_use(use_id)
|
||||
.next()
|
||||
.map(|constrained_binding| constrained_binding.binding)
|
||||
}
|
||||
}
|
||||
|
||||
struct TestCase {
|
||||
db: TestDb,
|
||||
file: File,
|
||||
@@ -370,10 +408,8 @@ mod tests {
|
||||
let foo = global_table.symbol_id_by_name("foo").unwrap();
|
||||
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let [definition] = use_def.public_definitions(foo) else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
assert!(matches!(definition.node(&db), DefinitionKind::Import(_)));
|
||||
let binding = use_def.first_public_binding(foo).unwrap();
|
||||
assert!(matches!(binding.kind(&db), DefinitionKind::Import(_)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -402,22 +438,19 @@ mod tests {
|
||||
assert!(
|
||||
global_table
|
||||
.symbol_by_name("foo")
|
||||
.is_some_and(|symbol| { symbol.is_defined() && !symbol.is_used() }),
|
||||
.is_some_and(|symbol| { symbol.is_bound() && !symbol.is_used() }),
|
||||
"symbols that are defined get the defined flag"
|
||||
);
|
||||
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let [definition] = use_def.public_definitions(
|
||||
global_table
|
||||
.symbol_id_by_name("foo")
|
||||
.expect("symbol to exist"),
|
||||
) else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::ImportFrom(_)
|
||||
));
|
||||
let binding = use_def
|
||||
.first_public_binding(
|
||||
global_table
|
||||
.symbol_id_by_name("foo")
|
||||
.expect("symbol to exist"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(binding.kind(&db), DefinitionKind::ImportFrom(_)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -430,18 +463,32 @@ mod tests {
|
||||
assert!(
|
||||
global_table
|
||||
.symbol_by_name("foo")
|
||||
.is_some_and(|symbol| { !symbol.is_defined() && symbol.is_used() }),
|
||||
"a symbol used but not defined in a scope should have only the used flag"
|
||||
.is_some_and(|symbol| { !symbol.is_bound() && symbol.is_used() }),
|
||||
"a symbol used but not bound in a scope should have only the used flag"
|
||||
);
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let [definition] =
|
||||
use_def.public_definitions(global_table.symbol_id_by_name("x").expect("symbol exists"))
|
||||
else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
let binding = use_def
|
||||
.first_public_binding(global_table.symbol_id_by_name("x").expect("symbol exists"))
|
||||
.unwrap();
|
||||
assert!(matches!(binding.kind(&db), DefinitionKind::Assignment(_)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn augmented_assignment() {
|
||||
let TestCase { db, file } = test_case("x += 1");
|
||||
let scope = global_scope(&db, file);
|
||||
let global_table = symbol_table(&db, scope);
|
||||
|
||||
assert_eq!(names(&global_table), vec!["x"]);
|
||||
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let binding = use_def
|
||||
.first_public_binding(global_table.symbol_id_by_name("x").unwrap())
|
||||
.unwrap();
|
||||
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::Assignment(_)
|
||||
binding.kind(&db),
|
||||
DefinitionKind::AugmentedAssignment(_)
|
||||
));
|
||||
}
|
||||
|
||||
@@ -473,15 +520,10 @@ y = 2
|
||||
assert_eq!(names(&class_table), vec!["x"]);
|
||||
|
||||
let use_def = index.use_def_map(class_scope_id);
|
||||
let [definition] =
|
||||
use_def.public_definitions(class_table.symbol_id_by_name("x").expect("symbol exists"))
|
||||
else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::Assignment(_)
|
||||
));
|
||||
let binding = use_def
|
||||
.first_public_binding(class_table.symbol_id_by_name("x").expect("symbol exists"))
|
||||
.unwrap();
|
||||
assert!(matches!(binding.kind(&db), DefinitionKind::Assignment(_)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -511,17 +553,298 @@ y = 2
|
||||
assert_eq!(names(&function_table), vec!["x"]);
|
||||
|
||||
let use_def = index.use_def_map(function_scope_id);
|
||||
let [definition] = use_def.public_definitions(
|
||||
function_table
|
||||
.symbol_id_by_name("x")
|
||||
.expect("symbol exists"),
|
||||
) else {
|
||||
panic!("expected one definition");
|
||||
let binding = use_def
|
||||
.first_public_binding(
|
||||
function_table
|
||||
.symbol_id_by_name("x")
|
||||
.expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(binding.kind(&db), DefinitionKind::Assignment(_)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn function_parameter_symbols() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
|
||||
pass
|
||||
",
|
||||
);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert_eq!(names(&global_table), vec!["str", "int", "f"]);
|
||||
|
||||
let [(function_scope_id, _function_scope)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("Expected a function scope")
|
||||
};
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::Assignment(_)
|
||||
));
|
||||
|
||||
let function_table = index.symbol_table(function_scope_id);
|
||||
assert_eq!(
|
||||
names(&function_table),
|
||||
vec!["a", "b", "c", "args", "d", "kwargs"],
|
||||
);
|
||||
|
||||
let use_def = index.use_def_map(function_scope_id);
|
||||
for name in ["a", "b", "c", "d"] {
|
||||
let binding = use_def
|
||||
.first_public_binding(
|
||||
function_table
|
||||
.symbol_id_by_name(name)
|
||||
.expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
binding.kind(&db),
|
||||
DefinitionKind::ParameterWithDefault(_)
|
||||
));
|
||||
}
|
||||
for name in ["args", "kwargs"] {
|
||||
let binding = use_def
|
||||
.first_public_binding(
|
||||
function_table
|
||||
.symbol_id_by_name(name)
|
||||
.expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(binding.kind(&db), DefinitionKind::Parameter(_)));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn lambda_parameter_symbols() {
|
||||
let TestCase { db, file } = test_case("lambda a, b, c=1, *args, d=2, **kwargs: None");
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert!(names(&global_table).is_empty());
|
||||
|
||||
let [(lambda_scope_id, _lambda_scope)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("Expected a lambda scope")
|
||||
};
|
||||
|
||||
let lambda_table = index.symbol_table(lambda_scope_id);
|
||||
assert_eq!(
|
||||
names(&lambda_table),
|
||||
vec!["a", "b", "c", "args", "d", "kwargs"],
|
||||
);
|
||||
|
||||
let use_def = index.use_def_map(lambda_scope_id);
|
||||
for name in ["a", "b", "c", "d"] {
|
||||
let binding = use_def
|
||||
.first_public_binding(lambda_table.symbol_id_by_name(name).expect("symbol exists"))
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
binding.kind(&db),
|
||||
DefinitionKind::ParameterWithDefault(_)
|
||||
));
|
||||
}
|
||||
for name in ["args", "kwargs"] {
|
||||
let binding = use_def
|
||||
.first_public_binding(lambda_table.symbol_id_by_name(name).expect("symbol exists"))
|
||||
.unwrap();
|
||||
assert!(matches!(binding.kind(&db), DefinitionKind::Parameter(_)));
|
||||
}
|
||||
}
|
||||
|
||||
/// Test case to validate that the comprehension scope is correctly identified and that the target
|
||||
/// variable is defined only in the comprehension scope and not in the global scope.
|
||||
#[test]
|
||||
fn comprehension_scope() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
[x for x, y in iter1]
|
||||
",
|
||||
);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let global_table = index.symbol_table(FileScopeId::global());
|
||||
|
||||
assert_eq!(names(&global_table), vec!["iter1"]);
|
||||
|
||||
let [(comprehension_scope_id, comprehension_scope)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one child scope")
|
||||
};
|
||||
|
||||
assert_eq!(comprehension_scope.kind(), ScopeKind::Comprehension);
|
||||
assert_eq!(
|
||||
comprehension_scope_id.to_scope_id(&db, file).name(&db),
|
||||
"<listcomp>"
|
||||
);
|
||||
|
||||
let comprehension_symbol_table = index.symbol_table(comprehension_scope_id);
|
||||
|
||||
assert_eq!(names(&comprehension_symbol_table), vec!["x", "y"]);
|
||||
|
||||
let use_def = index.use_def_map(comprehension_scope_id);
|
||||
for name in ["x", "y"] {
|
||||
let binding = use_def
|
||||
.first_public_binding(
|
||||
comprehension_symbol_table
|
||||
.symbol_id_by_name(name)
|
||||
.expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
binding.kind(&db),
|
||||
DefinitionKind::Comprehension(_)
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
/// Test case to validate that the `x` variable used in the comprehension is referencing the
|
||||
/// `x` variable defined by the inner generator (`for x in iter2`) and not the outer one.
|
||||
#[test]
|
||||
fn multiple_generators() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
[x for x in iter1 for x in iter2]
|
||||
",
|
||||
);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let [(comprehension_scope_id, _)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one child scope")
|
||||
};
|
||||
|
||||
let use_def = index.use_def_map(comprehension_scope_id);
|
||||
|
||||
let module = parsed_module(&db, file).syntax();
|
||||
let element = module.body[0]
|
||||
.as_expr_stmt()
|
||||
.unwrap()
|
||||
.value
|
||||
.as_list_comp_expr()
|
||||
.unwrap()
|
||||
.elt
|
||||
.as_name_expr()
|
||||
.unwrap();
|
||||
let element_use_id =
|
||||
element.scoped_use_id(&db, comprehension_scope_id.to_scope_id(&db, file));
|
||||
|
||||
let binding = use_def.first_binding_at_use(element_use_id).unwrap();
|
||||
let DefinitionKind::Comprehension(comprehension) = binding.kind(&db) else {
|
||||
panic!("expected generator definition")
|
||||
};
|
||||
let target = comprehension.target();
|
||||
let name = target.id().as_str();
|
||||
|
||||
assert_eq!(name, "x");
|
||||
assert_eq!(target.range(), TextRange::new(23.into(), 24.into()));
|
||||
}
|
||||
|
||||
/// Test case to validate that the nested comprehension creates a new scope which is a child of
|
||||
/// the outer comprehension scope and the variables are correctly defined in the respective
|
||||
/// scopes.
|
||||
#[test]
|
||||
fn nested_generators() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
[{x for x in iter2} for y in iter1]
|
||||
",
|
||||
);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let global_table = index.symbol_table(FileScopeId::global());
|
||||
|
||||
assert_eq!(names(&global_table), vec!["iter1"]);
|
||||
|
||||
let [(comprehension_scope_id, comprehension_scope)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one child scope")
|
||||
};
|
||||
|
||||
assert_eq!(comprehension_scope.kind(), ScopeKind::Comprehension);
|
||||
assert_eq!(
|
||||
comprehension_scope_id.to_scope_id(&db, file).name(&db),
|
||||
"<listcomp>"
|
||||
);
|
||||
|
||||
let comprehension_symbol_table = index.symbol_table(comprehension_scope_id);
|
||||
|
||||
assert_eq!(names(&comprehension_symbol_table), vec!["y", "iter2"]);
|
||||
|
||||
let [(inner_comprehension_scope_id, inner_comprehension_scope)] = index
|
||||
.child_scopes(comprehension_scope_id)
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one inner generator scope")
|
||||
};
|
||||
|
||||
assert_eq!(inner_comprehension_scope.kind(), ScopeKind::Comprehension);
|
||||
assert_eq!(
|
||||
inner_comprehension_scope_id
|
||||
.to_scope_id(&db, file)
|
||||
.name(&db),
|
||||
"<setcomp>"
|
||||
);
|
||||
|
||||
let inner_comprehension_symbol_table = index.symbol_table(inner_comprehension_scope_id);
|
||||
|
||||
assert_eq!(names(&inner_comprehension_symbol_table), vec!["x"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn with_item_definition() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
with item1 as x, item2 as y:
|
||||
pass
|
||||
",
|
||||
);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let global_table = index.symbol_table(FileScopeId::global());
|
||||
|
||||
assert_eq!(names(&global_table), vec!["item1", "x", "item2", "y"]);
|
||||
|
||||
let use_def = index.use_def_map(FileScopeId::global());
|
||||
for name in ["x", "y"] {
|
||||
let binding = use_def
|
||||
.first_public_binding(global_table.symbol_id_by_name(name).expect("symbol exists"))
|
||||
.expect("Expected with item definition for {name}");
|
||||
assert!(matches!(binding.kind(&db), DefinitionKind::WithItem(_)));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn with_item_unpacked_definition() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
with context() as (x, y):
|
||||
pass
|
||||
",
|
||||
);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let global_table = index.symbol_table(FileScopeId::global());
|
||||
|
||||
assert_eq!(names(&global_table), vec!["context", "x", "y"]);
|
||||
|
||||
let use_def = index.use_def_map(FileScopeId::global());
|
||||
for name in ["x", "y"] {
|
||||
let binding = use_def
|
||||
.first_public_binding(global_table.symbol_id_by_name(name).expect("symbol exists"))
|
||||
.expect("Expected with item definition for {name}");
|
||||
assert!(matches!(binding.kind(&db), DefinitionKind::WithItem(_)));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -557,14 +880,14 @@ def func():
|
||||
assert_eq!(names(&func2_table), vec!["y"]);
|
||||
|
||||
let use_def = index.use_def_map(FileScopeId::global());
|
||||
let [definition] = use_def.public_definitions(
|
||||
global_table
|
||||
.symbol_id_by_name("func")
|
||||
.expect("symbol exists"),
|
||||
) else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
assert!(matches!(definition.node(&db), DefinitionKind::Function(_)));
|
||||
let binding = use_def
|
||||
.first_public_binding(
|
||||
global_table
|
||||
.symbol_id_by_name("func")
|
||||
.expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(binding.kind(&db), DefinitionKind::Function(_)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -632,7 +955,7 @@ class C[T]:
|
||||
assert!(
|
||||
ann_table
|
||||
.symbol_by_name("T")
|
||||
.is_some_and(|s| s.is_defined() && !s.is_used()),
|
||||
.is_some_and(|s| s.is_bound() && !s.is_used()),
|
||||
"type parameters are defined by the scope that introduces them"
|
||||
);
|
||||
|
||||
@@ -664,10 +987,8 @@ class C[T]:
|
||||
};
|
||||
let x_use_id = x_use_expr_name.scoped_use_id(&db, scope);
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let [definition] = use_def.use_definitions(x_use_id) else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
let DefinitionKind::Assignment(assignment) = definition.node(&db) else {
|
||||
let binding = use_def.first_binding_at_use(x_use_id).unwrap();
|
||||
let DefinitionKind::Assignment(assignment) = binding.kind(&db) else {
|
||||
panic!("should be an assignment definition")
|
||||
};
|
||||
let ast::Expr::NumberLiteral(ast::ExprNumberLiteral {
|
||||
@@ -715,7 +1036,7 @@ class C[T]:
|
||||
}
|
||||
|
||||
let TestCase { db, file } = test_case(
|
||||
r#"
|
||||
r"
|
||||
class Test:
|
||||
def foo():
|
||||
def bar():
|
||||
@@ -724,7 +1045,7 @@ class Test:
|
||||
pass
|
||||
|
||||
def x():
|
||||
pass"#,
|
||||
pass",
|
||||
);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
@@ -757,4 +1078,136 @@ def x():
|
||||
vec!["bar", "foo", "Test", "<module>"]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn match_stmt() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
match subject:
|
||||
case a: ...
|
||||
case [b, c, *d]: ...
|
||||
case e as f: ...
|
||||
case {'x': g, **h}: ...
|
||||
case Foo(i, z=j): ...
|
||||
case k | l: ...
|
||||
case _: ...
|
||||
",
|
||||
);
|
||||
|
||||
let global_scope_id = global_scope(&db, file);
|
||||
let global_table = symbol_table(&db, global_scope_id);
|
||||
|
||||
assert!(global_table.symbol_by_name("Foo").unwrap().is_used());
|
||||
assert_eq!(
|
||||
names(&global_table),
|
||||
vec!["subject", "a", "b", "c", "d", "e", "f", "g", "h", "Foo", "i", "j", "k", "l"]
|
||||
);
|
||||
|
||||
let use_def = use_def_map(&db, global_scope_id);
|
||||
for (name, expected_index) in [
|
||||
("a", 0),
|
||||
("b", 0),
|
||||
("c", 1),
|
||||
("d", 2),
|
||||
("e", 0),
|
||||
("f", 1),
|
||||
("g", 0),
|
||||
("h", 1),
|
||||
("i", 0),
|
||||
("j", 1),
|
||||
("k", 0),
|
||||
("l", 1),
|
||||
] {
|
||||
let binding = use_def
|
||||
.first_public_binding(global_table.symbol_id_by_name(name).expect("symbol exists"))
|
||||
.expect("Expected with item definition for {name}");
|
||||
if let DefinitionKind::MatchPattern(pattern) = binding.kind(&db) {
|
||||
assert_eq!(pattern.index(), expected_index);
|
||||
} else {
|
||||
panic!("Expected match pattern definition for {name}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nested_match_case() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
match 1:
|
||||
case first:
|
||||
match 2:
|
||||
case second:
|
||||
pass
|
||||
",
|
||||
);
|
||||
|
||||
let global_scope_id = global_scope(&db, file);
|
||||
let global_table = symbol_table(&db, global_scope_id);
|
||||
|
||||
assert_eq!(names(&global_table), vec!["first", "second"]);
|
||||
|
||||
let use_def = use_def_map(&db, global_scope_id);
|
||||
for (name, expected_index) in [("first", 0), ("second", 0)] {
|
||||
let binding = use_def
|
||||
.first_public_binding(global_table.symbol_id_by_name(name).expect("symbol exists"))
|
||||
.expect("Expected with item definition for {name}");
|
||||
if let DefinitionKind::MatchPattern(pattern) = binding.kind(&db) {
|
||||
assert_eq!(pattern.index(), expected_index);
|
||||
} else {
|
||||
panic!("Expected match pattern definition for {name}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn for_loops_single_assignment() {
|
||||
let TestCase { db, file } = test_case("for x in a: pass");
|
||||
let scope = global_scope(&db, file);
|
||||
let global_table = symbol_table(&db, scope);
|
||||
|
||||
assert_eq!(&names(&global_table), &["a", "x"]);
|
||||
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let binding = use_def
|
||||
.first_public_binding(global_table.symbol_id_by_name("x").unwrap())
|
||||
.unwrap();
|
||||
|
||||
assert!(matches!(binding.kind(&db), DefinitionKind::For(_)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn for_loops_simple_unpacking() {
|
||||
let TestCase { db, file } = test_case("for (x, y) in a: pass");
|
||||
let scope = global_scope(&db, file);
|
||||
let global_table = symbol_table(&db, scope);
|
||||
|
||||
assert_eq!(&names(&global_table), &["a", "x", "y"]);
|
||||
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let x_binding = use_def
|
||||
.first_public_binding(global_table.symbol_id_by_name("x").unwrap())
|
||||
.unwrap();
|
||||
let y_binding = use_def
|
||||
.first_public_binding(global_table.symbol_id_by_name("y").unwrap())
|
||||
.unwrap();
|
||||
|
||||
assert!(matches!(x_binding.kind(&db), DefinitionKind::For(_)));
|
||||
assert!(matches!(y_binding.kind(&db), DefinitionKind::For(_)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn for_loops_complex_unpacking() {
|
||||
let TestCase { db, file } = test_case("for [((a,) b), (c, d)] in e: pass");
|
||||
let scope = global_scope(&db, file);
|
||||
let global_table = symbol_table(&db, scope);
|
||||
|
||||
assert_eq!(&names(&global_table), &["e", "a", "b", "c", "d"]);
|
||||
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let binding = use_def
|
||||
.first_public_binding(global_table.symbol_id_by_name("a").unwrap())
|
||||
.unwrap();
|
||||
|
||||
assert!(matches!(binding.kind(&db), DefinitionKind::For(_)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,9 +26,9 @@ use crate::Db;
|
||||
/// ```
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct AstIds {
|
||||
/// Maps expressions to their expression id. Uses `NodeKey` because it avoids cloning [`Parsed`].
|
||||
/// Maps expressions to their expression id.
|
||||
expressions_map: FxHashMap<ExpressionNodeKey, ScopedExpressionId>,
|
||||
/// Maps expressions which "use" a symbol (that is, [`ExprName`]) to a use id.
|
||||
/// Maps expressions which "use" a symbol (that is, [`ast::ExprName`]) to a use id.
|
||||
uses_map: FxHashMap<ExpressionNodeKey, ScopedUseId>,
|
||||
}
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,39 @@
|
||||
use ruff_db::files::File;
|
||||
use ruff_python_ast as ast;
|
||||
|
||||
use crate::ast_node_ref::AstNodeRef;
|
||||
use crate::db::Db;
|
||||
use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::symbol::{FileScopeId, ScopeId};
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub(crate) enum Constraint<'db> {
|
||||
Expression(Expression<'db>),
|
||||
Pattern(PatternConstraint<'db>),
|
||||
}
|
||||
|
||||
#[salsa::tracked]
|
||||
pub(crate) struct PatternConstraint<'db> {
|
||||
#[id]
|
||||
pub(crate) file: File,
|
||||
|
||||
#[id]
|
||||
pub(crate) file_scope: FileScopeId,
|
||||
|
||||
#[no_eq]
|
||||
#[return_ref]
|
||||
pub(crate) subject: AstNodeRef<ast::Expr>,
|
||||
|
||||
#[no_eq]
|
||||
#[return_ref]
|
||||
pub(crate) pattern: AstNodeRef<ast::Pattern>,
|
||||
|
||||
#[no_eq]
|
||||
count: countme::Count<PatternConstraint<'static>>,
|
||||
}
|
||||
|
||||
impl<'db> PatternConstraint<'db> {
|
||||
pub(crate) fn scope(self, db: &'db dyn Db) -> ScopeId<'db> {
|
||||
self.file_scope(db).to_scope_id(db, self.file(db))
|
||||
}
|
||||
}
|
||||
@@ -23,24 +23,46 @@ pub struct Definition<'db> {
|
||||
|
||||
#[no_eq]
|
||||
#[return_ref]
|
||||
pub(crate) node: DefinitionKind,
|
||||
pub(crate) kind: DefinitionKind,
|
||||
|
||||
#[no_eq]
|
||||
count: countme::Count<Definition<'static>>,
|
||||
}
|
||||
|
||||
impl<'db> Definition<'db> {
|
||||
pub(crate) fn scope(self, db: &'db dyn Db) -> ScopeId<'db> {
|
||||
self.file_scope(db).to_scope_id(db, self.file(db))
|
||||
}
|
||||
|
||||
pub(crate) fn category(self, db: &'db dyn Db) -> DefinitionCategory {
|
||||
self.kind(db).category()
|
||||
}
|
||||
|
||||
pub(crate) fn is_declaration(self, db: &'db dyn Db) -> bool {
|
||||
self.kind(db).category().is_declaration()
|
||||
}
|
||||
|
||||
pub(crate) fn is_binding(self, db: &'db dyn Db) -> bool {
|
||||
self.kind(db).category().is_binding()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) enum DefinitionNodeRef<'a> {
|
||||
Import(&'a ast::Alias),
|
||||
ImportFrom(ImportFromDefinitionNodeRef<'a>),
|
||||
For(ForStmtDefinitionNodeRef<'a>),
|
||||
Function(&'a ast::StmtFunctionDef),
|
||||
Class(&'a ast::StmtClassDef),
|
||||
NamedExpression(&'a ast::ExprNamed),
|
||||
Assignment(AssignmentDefinitionNodeRef<'a>),
|
||||
AnnotatedAssignment(&'a ast::StmtAnnAssign),
|
||||
AugmentedAssignment(&'a ast::StmtAugAssign),
|
||||
Comprehension(ComprehensionDefinitionNodeRef<'a>),
|
||||
Parameter(ast::AnyParameterRef<'a>),
|
||||
WithItem(WithItemDefinitionNodeRef<'a>),
|
||||
MatchPattern(MatchPatternDefinitionNodeRef<'a>),
|
||||
ExceptHandler(ExceptHandlerDefinitionNodeRef<'a>),
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtFunctionDef> for DefinitionNodeRef<'a> {
|
||||
@@ -67,6 +89,12 @@ impl<'a> From<&'a ast::StmtAnnAssign> for DefinitionNodeRef<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtAugAssign> for DefinitionNodeRef<'a> {
|
||||
fn from(node: &'a ast::StmtAugAssign) -> Self {
|
||||
Self::AugmentedAssignment(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::Alias> for DefinitionNodeRef<'a> {
|
||||
fn from(node_ref: &'a ast::Alias) -> Self {
|
||||
Self::Import(node_ref)
|
||||
@@ -79,12 +107,42 @@ impl<'a> From<ImportFromDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<ForStmtDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(value: ForStmtDefinitionNodeRef<'a>) -> Self {
|
||||
Self::For(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<AssignmentDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(node_ref: AssignmentDefinitionNodeRef<'a>) -> Self {
|
||||
Self::Assignment(node_ref)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<WithItemDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(node_ref: WithItemDefinitionNodeRef<'a>) -> Self {
|
||||
Self::WithItem(node_ref)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<ComprehensionDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(node: ComprehensionDefinitionNodeRef<'a>) -> Self {
|
||||
Self::Comprehension(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<ast::AnyParameterRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(node: ast::AnyParameterRef<'a>) -> Self {
|
||||
Self::Parameter(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<MatchPatternDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(node: MatchPatternDefinitionNodeRef<'a>) -> Self {
|
||||
Self::MatchPattern(node)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct ImportFromDefinitionNodeRef<'a> {
|
||||
pub(crate) node: &'a ast::StmtImportFrom,
|
||||
@@ -97,6 +155,44 @@ pub(crate) struct AssignmentDefinitionNodeRef<'a> {
|
||||
pub(crate) target: &'a ast::ExprName,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct WithItemDefinitionNodeRef<'a> {
|
||||
pub(crate) node: &'a ast::WithItem,
|
||||
pub(crate) target: &'a ast::ExprName,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct ForStmtDefinitionNodeRef<'a> {
|
||||
pub(crate) iterable: &'a ast::Expr,
|
||||
pub(crate) target: &'a ast::ExprName,
|
||||
pub(crate) is_async: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct ExceptHandlerDefinitionNodeRef<'a> {
|
||||
pub(crate) handler: &'a ast::ExceptHandlerExceptHandler,
|
||||
pub(crate) is_star: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct ComprehensionDefinitionNodeRef<'a> {
|
||||
pub(crate) iterable: &'a ast::Expr,
|
||||
pub(crate) target: &'a ast::ExprName,
|
||||
pub(crate) first: bool,
|
||||
pub(crate) is_async: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct MatchPatternDefinitionNodeRef<'a> {
|
||||
/// The outermost pattern node in which the identifier being defined occurs.
|
||||
pub(crate) pattern: &'a ast::Pattern,
|
||||
/// The identifier being defined.
|
||||
pub(crate) identifier: &'a ast::Identifier,
|
||||
/// The index of the identifier in the pattern when visiting the `pattern` node in evaluation
|
||||
/// order.
|
||||
pub(crate) index: u32,
|
||||
}
|
||||
|
||||
impl DefinitionNodeRef<'_> {
|
||||
#[allow(unsafe_code)]
|
||||
pub(super) unsafe fn into_owned(self, parsed: ParsedModule) -> DefinitionKind {
|
||||
@@ -128,6 +224,59 @@ impl DefinitionNodeRef<'_> {
|
||||
DefinitionNodeRef::AnnotatedAssignment(assign) => {
|
||||
DefinitionKind::AnnotatedAssignment(AstNodeRef::new(parsed, assign))
|
||||
}
|
||||
DefinitionNodeRef::AugmentedAssignment(augmented_assignment) => {
|
||||
DefinitionKind::AugmentedAssignment(AstNodeRef::new(parsed, augmented_assignment))
|
||||
}
|
||||
DefinitionNodeRef::For(ForStmtDefinitionNodeRef {
|
||||
iterable,
|
||||
target,
|
||||
is_async,
|
||||
}) => DefinitionKind::For(ForStmtDefinitionKind {
|
||||
iterable: AstNodeRef::new(parsed.clone(), iterable),
|
||||
target: AstNodeRef::new(parsed, target),
|
||||
is_async,
|
||||
}),
|
||||
DefinitionNodeRef::Comprehension(ComprehensionDefinitionNodeRef {
|
||||
iterable,
|
||||
target,
|
||||
first,
|
||||
is_async,
|
||||
}) => DefinitionKind::Comprehension(ComprehensionDefinitionKind {
|
||||
iterable: AstNodeRef::new(parsed.clone(), iterable),
|
||||
target: AstNodeRef::new(parsed, target),
|
||||
first,
|
||||
is_async,
|
||||
}),
|
||||
DefinitionNodeRef::Parameter(parameter) => match parameter {
|
||||
ast::AnyParameterRef::Variadic(parameter) => {
|
||||
DefinitionKind::Parameter(AstNodeRef::new(parsed, parameter))
|
||||
}
|
||||
ast::AnyParameterRef::NonVariadic(parameter) => {
|
||||
DefinitionKind::ParameterWithDefault(AstNodeRef::new(parsed, parameter))
|
||||
}
|
||||
},
|
||||
DefinitionNodeRef::WithItem(WithItemDefinitionNodeRef { node, target }) => {
|
||||
DefinitionKind::WithItem(WithItemDefinitionKind {
|
||||
node: AstNodeRef::new(parsed.clone(), node),
|
||||
target: AstNodeRef::new(parsed, target),
|
||||
})
|
||||
}
|
||||
DefinitionNodeRef::MatchPattern(MatchPatternDefinitionNodeRef {
|
||||
pattern,
|
||||
identifier,
|
||||
index,
|
||||
}) => DefinitionKind::MatchPattern(MatchPatternDefinitionKind {
|
||||
pattern: AstNodeRef::new(parsed.clone(), pattern),
|
||||
identifier: AstNodeRef::new(parsed, identifier),
|
||||
index,
|
||||
}),
|
||||
DefinitionNodeRef::ExceptHandler(ExceptHandlerDefinitionNodeRef {
|
||||
handler,
|
||||
is_star,
|
||||
}) => DefinitionKind::ExceptHandler(ExceptHandlerDefinitionKind {
|
||||
handler: AstNodeRef::new(parsed.clone(), handler),
|
||||
is_star,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -145,10 +294,61 @@ impl DefinitionNodeRef<'_> {
|
||||
target,
|
||||
}) => target.into(),
|
||||
Self::AnnotatedAssignment(node) => node.into(),
|
||||
Self::AugmentedAssignment(node) => node.into(),
|
||||
Self::For(ForStmtDefinitionNodeRef {
|
||||
iterable: _,
|
||||
target,
|
||||
is_async: _,
|
||||
}) => target.into(),
|
||||
Self::Comprehension(ComprehensionDefinitionNodeRef { target, .. }) => target.into(),
|
||||
Self::Parameter(node) => match node {
|
||||
ast::AnyParameterRef::Variadic(parameter) => parameter.into(),
|
||||
ast::AnyParameterRef::NonVariadic(parameter) => parameter.into(),
|
||||
},
|
||||
Self::WithItem(WithItemDefinitionNodeRef { node: _, target }) => target.into(),
|
||||
Self::MatchPattern(MatchPatternDefinitionNodeRef { identifier, .. }) => {
|
||||
identifier.into()
|
||||
}
|
||||
Self::ExceptHandler(ExceptHandlerDefinitionNodeRef { handler, .. }) => handler.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub(crate) enum DefinitionCategory {
|
||||
/// A Definition which binds a value to a name (e.g. `x = 1`).
|
||||
Binding,
|
||||
/// A Definition which declares the upper-bound of acceptable types for this name (`x: int`).
|
||||
Declaration,
|
||||
/// A Definition which both declares a type and binds a value (e.g. `x: int = 1`).
|
||||
DeclarationAndBinding,
|
||||
}
|
||||
|
||||
impl DefinitionCategory {
|
||||
/// True if this definition establishes a "declared type" for the symbol.
|
||||
///
|
||||
/// If so, any assignments reached by this definition are in error if they assign a value of a
|
||||
/// type not assignable to the declared type.
|
||||
///
|
||||
/// Annotations establish a declared type. So do function and class definitions, and imports.
|
||||
pub(crate) fn is_declaration(self) -> bool {
|
||||
matches!(
|
||||
self,
|
||||
DefinitionCategory::Declaration | DefinitionCategory::DeclarationAndBinding
|
||||
)
|
||||
}
|
||||
|
||||
/// True if this definition assigns a value to the symbol.
|
||||
///
|
||||
/// False only for annotated assignments without a RHS.
|
||||
pub(crate) fn is_binding(self) -> bool {
|
||||
matches!(
|
||||
self,
|
||||
DefinitionCategory::Binding | DefinitionCategory::DeclarationAndBinding
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum DefinitionKind {
|
||||
Import(AstNodeRef<ast::Alias>),
|
||||
@@ -158,6 +358,103 @@ pub enum DefinitionKind {
|
||||
NamedExpression(AstNodeRef<ast::ExprNamed>),
|
||||
Assignment(AssignmentDefinitionKind),
|
||||
AnnotatedAssignment(AstNodeRef<ast::StmtAnnAssign>),
|
||||
AugmentedAssignment(AstNodeRef<ast::StmtAugAssign>),
|
||||
For(ForStmtDefinitionKind),
|
||||
Comprehension(ComprehensionDefinitionKind),
|
||||
Parameter(AstNodeRef<ast::Parameter>),
|
||||
ParameterWithDefault(AstNodeRef<ast::ParameterWithDefault>),
|
||||
WithItem(WithItemDefinitionKind),
|
||||
MatchPattern(MatchPatternDefinitionKind),
|
||||
ExceptHandler(ExceptHandlerDefinitionKind),
|
||||
}
|
||||
|
||||
impl DefinitionKind {
|
||||
pub(crate) fn category(&self) -> DefinitionCategory {
|
||||
match self {
|
||||
// functions, classes, and imports always bind, and we consider them declarations
|
||||
DefinitionKind::Function(_)
|
||||
| DefinitionKind::Class(_)
|
||||
| DefinitionKind::Import(_)
|
||||
| DefinitionKind::ImportFrom(_) => DefinitionCategory::DeclarationAndBinding,
|
||||
// a parameter always binds a value, but is only a declaration if annotated
|
||||
DefinitionKind::Parameter(parameter) => {
|
||||
if parameter.annotation.is_some() {
|
||||
DefinitionCategory::DeclarationAndBinding
|
||||
} else {
|
||||
DefinitionCategory::Binding
|
||||
}
|
||||
}
|
||||
// presence of a default is irrelevant, same logic as for a no-default parameter
|
||||
DefinitionKind::ParameterWithDefault(parameter_with_default) => {
|
||||
if parameter_with_default.parameter.annotation.is_some() {
|
||||
DefinitionCategory::DeclarationAndBinding
|
||||
} else {
|
||||
DefinitionCategory::Binding
|
||||
}
|
||||
}
|
||||
// annotated assignment is always a declaration, only a binding if there is a RHS
|
||||
DefinitionKind::AnnotatedAssignment(ann_assign) => {
|
||||
if ann_assign.value.is_some() {
|
||||
DefinitionCategory::DeclarationAndBinding
|
||||
} else {
|
||||
DefinitionCategory::Declaration
|
||||
}
|
||||
}
|
||||
// all of these bind values without declaring a type
|
||||
DefinitionKind::NamedExpression(_)
|
||||
| DefinitionKind::Assignment(_)
|
||||
| DefinitionKind::AugmentedAssignment(_)
|
||||
| DefinitionKind::For(_)
|
||||
| DefinitionKind::Comprehension(_)
|
||||
| DefinitionKind::WithItem(_)
|
||||
| DefinitionKind::MatchPattern(_)
|
||||
| DefinitionKind::ExceptHandler(_) => DefinitionCategory::Binding,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[allow(dead_code)]
|
||||
pub struct MatchPatternDefinitionKind {
|
||||
pattern: AstNodeRef<ast::Pattern>,
|
||||
identifier: AstNodeRef<ast::Identifier>,
|
||||
index: u32,
|
||||
}
|
||||
|
||||
impl MatchPatternDefinitionKind {
|
||||
pub(crate) fn pattern(&self) -> &ast::Pattern {
|
||||
self.pattern.node()
|
||||
}
|
||||
|
||||
pub(crate) fn index(&self) -> u32 {
|
||||
self.index
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ComprehensionDefinitionKind {
|
||||
iterable: AstNodeRef<ast::Expr>,
|
||||
target: AstNodeRef<ast::ExprName>,
|
||||
first: bool,
|
||||
is_async: bool,
|
||||
}
|
||||
|
||||
impl ComprehensionDefinitionKind {
|
||||
pub(crate) fn iterable(&self) -> &ast::Expr {
|
||||
self.iterable.node()
|
||||
}
|
||||
|
||||
pub(crate) fn target(&self) -> &ast::ExprName {
|
||||
self.target.node()
|
||||
}
|
||||
|
||||
pub(crate) fn is_first(&self) -> bool {
|
||||
self.first
|
||||
}
|
||||
|
||||
pub(crate) fn is_async(&self) -> bool {
|
||||
self.is_async
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
@@ -177,7 +474,6 @@ impl ImportFromDefinitionKind {
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[allow(dead_code)]
|
||||
pub struct AssignmentDefinitionKind {
|
||||
assignment: AstNodeRef<ast::StmtAssign>,
|
||||
target: AstNodeRef<ast::ExprName>,
|
||||
@@ -187,6 +483,67 @@ impl AssignmentDefinitionKind {
|
||||
pub(crate) fn assignment(&self) -> &ast::StmtAssign {
|
||||
self.assignment.node()
|
||||
}
|
||||
|
||||
pub(crate) fn target(&self) -> &ast::ExprName {
|
||||
self.target.node()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct WithItemDefinitionKind {
|
||||
node: AstNodeRef<ast::WithItem>,
|
||||
target: AstNodeRef<ast::ExprName>,
|
||||
}
|
||||
|
||||
impl WithItemDefinitionKind {
|
||||
pub(crate) fn node(&self) -> &ast::WithItem {
|
||||
self.node.node()
|
||||
}
|
||||
|
||||
pub(crate) fn target(&self) -> &ast::ExprName {
|
||||
self.target.node()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ForStmtDefinitionKind {
|
||||
iterable: AstNodeRef<ast::Expr>,
|
||||
target: AstNodeRef<ast::ExprName>,
|
||||
is_async: bool,
|
||||
}
|
||||
|
||||
impl ForStmtDefinitionKind {
|
||||
pub(crate) fn iterable(&self) -> &ast::Expr {
|
||||
self.iterable.node()
|
||||
}
|
||||
|
||||
pub(crate) fn target(&self) -> &ast::ExprName {
|
||||
self.target.node()
|
||||
}
|
||||
|
||||
pub(crate) fn is_async(&self) -> bool {
|
||||
self.is_async
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ExceptHandlerDefinitionKind {
|
||||
handler: AstNodeRef<ast::ExceptHandlerExceptHandler>,
|
||||
is_star: bool,
|
||||
}
|
||||
|
||||
impl ExceptHandlerDefinitionKind {
|
||||
pub(crate) fn node(&self) -> &ast::ExceptHandlerExceptHandler {
|
||||
self.handler.node()
|
||||
}
|
||||
|
||||
pub(crate) fn handled_exceptions(&self) -> Option<&ast::Expr> {
|
||||
self.node().type_.as_deref()
|
||||
}
|
||||
|
||||
pub(crate) fn is_star(&self) -> bool {
|
||||
self.is_star
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
|
||||
@@ -227,3 +584,39 @@ impl From<&ast::StmtAnnAssign> for DefinitionNodeKey {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::StmtAugAssign> for DefinitionNodeKey {
|
||||
fn from(node: &ast::StmtAugAssign) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::StmtFor> for DefinitionNodeKey {
|
||||
fn from(value: &ast::StmtFor) -> Self {
|
||||
Self(NodeKey::from_node(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::Parameter> for DefinitionNodeKey {
|
||||
fn from(node: &ast::Parameter) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::ParameterWithDefault> for DefinitionNodeKey {
|
||||
fn from(node: &ast::ParameterWithDefault) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::Identifier> for DefinitionNodeKey {
|
||||
fn from(identifier: &ast::Identifier) -> Self {
|
||||
Self(NodeKey::from_node(identifier))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::ExceptHandlerExceptHandler> for DefinitionNodeKey {
|
||||
fn from(handler: &ast::ExceptHandlerExceptHandler) -> Self {
|
||||
Self(NodeKey::from_node(handler))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,7 +21,10 @@ pub(crate) struct Expression<'db> {
|
||||
/// The expression node.
|
||||
#[no_eq]
|
||||
#[return_ref]
|
||||
pub(crate) node: AstNodeRef<ast::Expr>,
|
||||
pub(crate) node_ref: AstNodeRef<ast::Expr>,
|
||||
|
||||
#[no_eq]
|
||||
count: countme::Count<Expression<'static>>,
|
||||
}
|
||||
|
||||
impl<'db> Expression<'db> {
|
||||
|
||||
@@ -44,16 +44,16 @@ impl Symbol {
|
||||
}
|
||||
|
||||
/// Is the symbol defined in its containing scope?
|
||||
pub fn is_defined(&self) -> bool {
|
||||
self.flags.contains(SymbolFlags::IS_DEFINED)
|
||||
pub fn is_bound(&self) -> bool {
|
||||
self.flags.contains(SymbolFlags::IS_BOUND)
|
||||
}
|
||||
}
|
||||
|
||||
bitflags! {
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
pub(super) struct SymbolFlags: u8 {
|
||||
struct SymbolFlags: u8 {
|
||||
const IS_USED = 1 << 0;
|
||||
const IS_DEFINED = 1 << 1;
|
||||
const IS_BOUND = 1 << 1;
|
||||
/// TODO: This flag is not yet set by anything
|
||||
const MARKED_GLOBAL = 1 << 2;
|
||||
/// TODO: This flag is not yet set by anything
|
||||
@@ -100,6 +100,9 @@ pub struct ScopeId<'db> {
|
||||
#[no_eq]
|
||||
#[return_ref]
|
||||
pub node: NodeWithScopeKind,
|
||||
|
||||
#[no_eq]
|
||||
count: countme::Count<ScopeId<'static>>,
|
||||
}
|
||||
|
||||
impl<'db> ScopeId<'db> {
|
||||
@@ -111,6 +114,10 @@ impl<'db> ScopeId<'db> {
|
||||
NodeWithScopeKind::ClassTypeParameters(_)
|
||||
| NodeWithScopeKind::FunctionTypeParameters(_)
|
||||
| NodeWithScopeKind::Function(_)
|
||||
| NodeWithScopeKind::ListComprehension(_)
|
||||
| NodeWithScopeKind::SetComprehension(_)
|
||||
| NodeWithScopeKind::DictComprehension(_)
|
||||
| NodeWithScopeKind::GeneratorExpression(_)
|
||||
)
|
||||
}
|
||||
|
||||
@@ -123,6 +130,11 @@ impl<'db> ScopeId<'db> {
|
||||
}
|
||||
NodeWithScopeKind::Function(function)
|
||||
| NodeWithScopeKind::FunctionTypeParameters(function) => function.name.as_str(),
|
||||
NodeWithScopeKind::Lambda(_) => "<lambda>",
|
||||
NodeWithScopeKind::ListComprehension(_) => "<listcomp>",
|
||||
NodeWithScopeKind::SetComprehension(_) => "<setcomp>",
|
||||
NodeWithScopeKind::DictComprehension(_) => "<dictcomp>",
|
||||
NodeWithScopeKind::GeneratorExpression(_) => "<generator>",
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -137,6 +149,10 @@ impl FileScopeId {
|
||||
FileScopeId::from_u32(0)
|
||||
}
|
||||
|
||||
pub fn is_global(self) -> bool {
|
||||
self == FileScopeId::global()
|
||||
}
|
||||
|
||||
pub fn to_scope_id(self, db: &dyn Db, file: File) -> ScopeId<'_> {
|
||||
let index = semantic_index(db, file);
|
||||
index.scope_ids_by_scope[self]
|
||||
@@ -166,6 +182,13 @@ pub enum ScopeKind {
|
||||
Annotation,
|
||||
Class,
|
||||
Function,
|
||||
Comprehension,
|
||||
}
|
||||
|
||||
impl ScopeKind {
|
||||
pub const fn is_comprehension(self) -> bool {
|
||||
matches!(self, ScopeKind::Comprehension)
|
||||
}
|
||||
}
|
||||
|
||||
/// Symbol table for a specific [`Scope`].
|
||||
@@ -249,11 +272,7 @@ impl SymbolTableBuilder {
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn add_or_update_symbol(
|
||||
&mut self,
|
||||
name: Name,
|
||||
flags: SymbolFlags,
|
||||
) -> (ScopedSymbolId, bool) {
|
||||
pub(super) fn add_symbol(&mut self, name: Name) -> (ScopedSymbolId, bool) {
|
||||
let hash = SymbolTable::hash_name(&name);
|
||||
let entry = self
|
||||
.table
|
||||
@@ -262,15 +281,9 @@ impl SymbolTableBuilder {
|
||||
.from_hash(hash, |id| self.table.symbols[*id].name() == &name);
|
||||
|
||||
match entry {
|
||||
RawEntryMut::Occupied(entry) => {
|
||||
let symbol = &mut self.table.symbols[*entry.key()];
|
||||
symbol.insert_flags(flags);
|
||||
|
||||
(*entry.key(), false)
|
||||
}
|
||||
RawEntryMut::Occupied(entry) => (*entry.key(), false),
|
||||
RawEntryMut::Vacant(entry) => {
|
||||
let mut symbol = Symbol::new(name);
|
||||
symbol.insert_flags(flags);
|
||||
let symbol = Symbol::new(name);
|
||||
|
||||
let id = self.table.symbols.push(symbol);
|
||||
entry.insert_with_hasher(hash, id, (), |id| {
|
||||
@@ -281,6 +294,14 @@ impl SymbolTableBuilder {
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn mark_symbol_bound(&mut self, id: ScopedSymbolId) {
|
||||
self.table.symbols[id].insert_flags(SymbolFlags::IS_BOUND);
|
||||
}
|
||||
|
||||
pub(super) fn mark_symbol_used(&mut self, id: ScopedSymbolId) {
|
||||
self.table.symbols[id].insert_flags(SymbolFlags::IS_USED);
|
||||
}
|
||||
|
||||
pub(super) fn finish(mut self) -> SymbolTable {
|
||||
self.table.shrink_to_fit();
|
||||
self.table
|
||||
@@ -293,8 +314,13 @@ pub(crate) enum NodeWithScopeRef<'a> {
|
||||
Module,
|
||||
Class(&'a ast::StmtClassDef),
|
||||
Function(&'a ast::StmtFunctionDef),
|
||||
Lambda(&'a ast::ExprLambda),
|
||||
FunctionTypeParameters(&'a ast::StmtFunctionDef),
|
||||
ClassTypeParameters(&'a ast::StmtClassDef),
|
||||
ListComprehension(&'a ast::ExprListComp),
|
||||
SetComprehension(&'a ast::ExprSetComp),
|
||||
DictComprehension(&'a ast::ExprDictComp),
|
||||
GeneratorExpression(&'a ast::ExprGenerator),
|
||||
}
|
||||
|
||||
impl NodeWithScopeRef<'_> {
|
||||
@@ -312,11 +338,26 @@ impl NodeWithScopeRef<'_> {
|
||||
NodeWithScopeRef::Function(function) => {
|
||||
NodeWithScopeKind::Function(AstNodeRef::new(module, function))
|
||||
}
|
||||
NodeWithScopeRef::Lambda(lambda) => {
|
||||
NodeWithScopeKind::Lambda(AstNodeRef::new(module, lambda))
|
||||
}
|
||||
NodeWithScopeRef::FunctionTypeParameters(function) => {
|
||||
NodeWithScopeKind::FunctionTypeParameters(AstNodeRef::new(module, function))
|
||||
}
|
||||
NodeWithScopeRef::ClassTypeParameters(class) => {
|
||||
NodeWithScopeKind::Class(AstNodeRef::new(module, class))
|
||||
NodeWithScopeKind::ClassTypeParameters(AstNodeRef::new(module, class))
|
||||
}
|
||||
NodeWithScopeRef::ListComprehension(comprehension) => {
|
||||
NodeWithScopeKind::ListComprehension(AstNodeRef::new(module, comprehension))
|
||||
}
|
||||
NodeWithScopeRef::SetComprehension(comprehension) => {
|
||||
NodeWithScopeKind::SetComprehension(AstNodeRef::new(module, comprehension))
|
||||
}
|
||||
NodeWithScopeRef::DictComprehension(comprehension) => {
|
||||
NodeWithScopeKind::DictComprehension(AstNodeRef::new(module, comprehension))
|
||||
}
|
||||
NodeWithScopeRef::GeneratorExpression(generator) => {
|
||||
NodeWithScopeKind::GeneratorExpression(AstNodeRef::new(module, generator))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -326,8 +367,13 @@ impl NodeWithScopeRef<'_> {
|
||||
NodeWithScopeRef::Module => ScopeKind::Module,
|
||||
NodeWithScopeRef::Class(_) => ScopeKind::Class,
|
||||
NodeWithScopeRef::Function(_) => ScopeKind::Function,
|
||||
NodeWithScopeRef::Lambda(_) => ScopeKind::Function,
|
||||
NodeWithScopeRef::FunctionTypeParameters(_)
|
||||
| NodeWithScopeRef::ClassTypeParameters(_) => ScopeKind::Annotation,
|
||||
NodeWithScopeRef::ListComprehension(_)
|
||||
| NodeWithScopeRef::SetComprehension(_)
|
||||
| NodeWithScopeRef::DictComprehension(_)
|
||||
| NodeWithScopeRef::GeneratorExpression(_) => ScopeKind::Comprehension,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -338,12 +384,27 @@ impl NodeWithScopeRef<'_> {
|
||||
NodeWithScopeRef::Function(function) => {
|
||||
NodeWithScopeKey::Function(NodeKey::from_node(function))
|
||||
}
|
||||
NodeWithScopeRef::Lambda(lambda) => {
|
||||
NodeWithScopeKey::Lambda(NodeKey::from_node(lambda))
|
||||
}
|
||||
NodeWithScopeRef::FunctionTypeParameters(function) => {
|
||||
NodeWithScopeKey::FunctionTypeParameters(NodeKey::from_node(function))
|
||||
}
|
||||
NodeWithScopeRef::ClassTypeParameters(class) => {
|
||||
NodeWithScopeKey::ClassTypeParameters(NodeKey::from_node(class))
|
||||
}
|
||||
NodeWithScopeRef::ListComprehension(comprehension) => {
|
||||
NodeWithScopeKey::ListComprehension(NodeKey::from_node(comprehension))
|
||||
}
|
||||
NodeWithScopeRef::SetComprehension(comprehension) => {
|
||||
NodeWithScopeKey::SetComprehension(NodeKey::from_node(comprehension))
|
||||
}
|
||||
NodeWithScopeRef::DictComprehension(comprehension) => {
|
||||
NodeWithScopeKey::DictComprehension(NodeKey::from_node(comprehension))
|
||||
}
|
||||
NodeWithScopeRef::GeneratorExpression(generator) => {
|
||||
NodeWithScopeKey::GeneratorExpression(NodeKey::from_node(generator))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -356,6 +417,11 @@ pub enum NodeWithScopeKind {
|
||||
ClassTypeParameters(AstNodeRef<ast::StmtClassDef>),
|
||||
Function(AstNodeRef<ast::StmtFunctionDef>),
|
||||
FunctionTypeParameters(AstNodeRef<ast::StmtFunctionDef>),
|
||||
Lambda(AstNodeRef<ast::ExprLambda>),
|
||||
ListComprehension(AstNodeRef<ast::ExprListComp>),
|
||||
SetComprehension(AstNodeRef<ast::ExprSetComp>),
|
||||
DictComprehension(AstNodeRef<ast::ExprDictComp>),
|
||||
GeneratorExpression(AstNodeRef<ast::ExprGenerator>),
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
@@ -365,4 +431,9 @@ pub(crate) enum NodeWithScopeKey {
|
||||
ClassTypeParameters(NodeKey),
|
||||
Function(NodeKey),
|
||||
FunctionTypeParameters(NodeKey),
|
||||
Lambda(NodeKey),
|
||||
ListComprehension(NodeKey),
|
||||
SetComprehension(NodeKey),
|
||||
DictComprehension(NodeKey),
|
||||
GeneratorExpression(NodeKey),
|
||||
}
|
||||
|
||||
@@ -1,4 +1,79 @@
|
||||
//! Build a map from each use of a symbol to the definitions visible from that use.
|
||||
//! First, some terminology:
|
||||
//!
|
||||
//! * A "binding" gives a new value to a variable. This includes many different Python statements
|
||||
//! (assignment statements of course, but also imports, `def` and `class` statements, `as`
|
||||
//! clauses in `with` and `except` statements, match patterns, and others) and even one
|
||||
//! expression kind (named expressions). It notably does not include annotated assignment
|
||||
//! statements without a right-hand side value; these do not assign any new value to the
|
||||
//! variable. We consider function parameters to be bindings as well, since (from the perspective
|
||||
//! of the function's internal scope), a function parameter begins the scope bound to a value.
|
||||
//!
|
||||
//! * A "declaration" establishes an upper bound type for the values that a variable may be
|
||||
//! permitted to take on. Annotated assignment statements (with or without an RHS value) are
|
||||
//! declarations; annotated function parameters are also declarations. We consider `def` and
|
||||
//! `class` statements to also be declarations, so as to prohibit accidentally shadowing them.
|
||||
//!
|
||||
//! Annotated assignments with a right-hand side, and annotated function parameters, are both
|
||||
//! bindings and declarations.
|
||||
//!
|
||||
//! We use [`Definition`] as the universal term (and Salsa tracked struct) encompassing both
|
||||
//! bindings and declarations. (This sacrifices a bit of type safety in exchange for improved
|
||||
//! performance via fewer Salsa tracked structs and queries, since most declarations -- typed
|
||||
//! parameters and annotated assignments with RHS -- are both bindings and declarations.)
|
||||
//!
|
||||
//! At any given use of a variable, we can ask about both its "declared type" and its "inferred
|
||||
//! type". These may be different, but the inferred type must always be assignable to the declared
|
||||
//! type; that is, the declared type is always wider, and the inferred type may be more precise. If
|
||||
//! we see an invalid assignment, we emit a diagnostic and abandon our inferred type, deferring to
|
||||
//! the declared type (this allows an explicit annotation to override bad inference, without a
|
||||
//! cast), maintaining the invariant.
|
||||
//!
|
||||
//! The **inferred type** represents the most precise type we believe encompasses all possible
|
||||
//! values for the variable at a given use. It is based on a union of the bindings which can reach
|
||||
//! that use through some control flow path, and the narrowing constraints that control flow must
|
||||
//! have passed through between the binding and the use. For example, in this code:
|
||||
//!
|
||||
//! ```python
|
||||
//! x = 1 if flag else None
|
||||
//! if x is not None:
|
||||
//! use(x)
|
||||
//! ```
|
||||
//!
|
||||
//! For the use of `x` on the third line, the inferred type should be `Literal[1]`. This is based
|
||||
//! on the binding on the first line, which assigns the type `Literal[1] | None`, and the narrowing
|
||||
//! constraint on the second line, which rules out the type `None`, since control flow must pass
|
||||
//! through this constraint to reach the use in question.
|
||||
//!
|
||||
//! The **declared type** represents the code author's declaration (usually through a type
|
||||
//! annotation) that a given variable should not be assigned any type outside the declared type. In
|
||||
//! our model, declared types are also control-flow-sensitive; we allow the code author to
|
||||
//! explicitly re-declare the same variable with a different type. So for a given binding of a
|
||||
//! variable, we will want to ask which declarations of that variable can reach that binding, in
|
||||
//! order to determine whether the binding is permitted, or should be a type error. For example:
|
||||
//!
|
||||
//! ```python
|
||||
//! from pathlib import Path
|
||||
//! def f(path: str):
|
||||
//! path: Path = Path(path)
|
||||
//! ```
|
||||
//!
|
||||
//! In this function, the initial declared type of `path` is `str`, meaning that the assignment
|
||||
//! `path = Path(path)` would be a type error, since it assigns to `path` a value whose type is not
|
||||
//! assignable to `str`. This is the purpose of declared types: they prevent accidental assignment
|
||||
//! of the wrong type to a variable.
|
||||
//!
|
||||
//! But in some cases it is useful to "shadow" or "re-declare" a variable with a new type, and we
|
||||
//! permit this, as long as it is done with an explicit re-annotation. So `path: Path =
|
||||
//! Path(path)`, with the explicit `: Path` annotation, is permitted.
|
||||
//!
|
||||
//! The general rule is that whatever declaration(s) can reach a given binding determine the
|
||||
//! validity of that binding. If there is a path in which the symbol is not declared, that is a
|
||||
//! declaration of `Unknown`. If multiple declarations can reach a binding, we union them, but by
|
||||
//! default we also issue a type error, since this implicit union of declared types may hide an
|
||||
//! error.
|
||||
//!
|
||||
//! To support type inference, we build a map from each use of a symbol to the bindings live at
|
||||
//! that use, and the type narrowing constraints that apply to each binding.
|
||||
//!
|
||||
//! Let's take this code sample:
|
||||
//!
|
||||
@@ -13,341 +88,488 @@
|
||||
//! z = x
|
||||
//! ```
|
||||
//!
|
||||
//! In this snippet, we have four definitions of `x` (the statements assigning `1`, `2`, `3`,
|
||||
//! and `4` to it), and two uses of `x` (the `y = x` and `z = x` assignments). The first
|
||||
//! [`Definition`] of `x` is never visible to any use, because it's immediately replaced by the
|
||||
//! second definition, before any use happens. (A linter could thus flag the statement `x = 1`
|
||||
//! as likely superfluous.)
|
||||
//! In this snippet, we have four bindings of `x` (the statements assigning `1`, `2`, `3`, and `4`
|
||||
//! to it), and two uses of `x` (the `y = x` and `z = x` assignments). The first binding of `x`
|
||||
//! does not reach any use, because it's immediately replaced by the second binding, before any use
|
||||
//! happens. (A linter could thus flag the statement `x = 1` as likely superfluous.)
|
||||
//!
|
||||
//! The first use of `x` has one definition visible to it: the assignment `x = 2`.
|
||||
//! The first use of `x` has one live binding: the assignment `x = 2`.
|
||||
//!
|
||||
//! Things get a bit more complex when we have branches. We will definitely take either the `if` or
|
||||
//! the `else` branch. Thus, the second use of `x` has two definitions visible to it: `x = 3` and
|
||||
//! `x = 4`. The `x = 2` definition is no longer visible, because it must be replaced by either `x
|
||||
//! = 3` or `x = 4`, no matter which branch was taken. We don't know which branch was taken, so we
|
||||
//! must consider both definitions as visible, which means eventually we would (in type inference)
|
||||
//! look at these two definitions and infer a type of `Literal[3, 4]` -- the union of `Literal[3]`
|
||||
//! and `Literal[4]` -- for the second use of `x`.
|
||||
//! the `else` branch. Thus, the second use of `x` has two live bindings: `x = 3` and `x = 4`. The
|
||||
//! `x = 2` assignment is no longer visible, because it must be replaced by either `x = 3` or `x =
|
||||
//! 4`, no matter which branch was taken. We don't know which branch was taken, so we must consider
|
||||
//! both bindings as live, which means eventually we would (in type inference) look at these two
|
||||
//! bindings and infer a type of `Literal[3, 4]` -- the union of `Literal[3]` and `Literal[4]` --
|
||||
//! for the second use of `x`.
|
||||
//!
|
||||
//! So that's one question our use-def map needs to answer: given a specific use of a symbol, which
|
||||
//! definition(s) is/are visible from that use. In
|
||||
//! [`AstIds`](crate::semantic_index::ast_ids::AstIds) we number all uses (that means a `Name` node
|
||||
//! with `Load` context) so we have a `ScopedUseId` to efficiently represent each use.
|
||||
//! binding(s) can reach that use. In [`AstIds`](crate::semantic_index::ast_ids::AstIds) we number
|
||||
//! all uses (that means a `Name` node with `Load` context) so we have a `ScopedUseId` to
|
||||
//! efficiently represent each use.
|
||||
//!
|
||||
//! The other case we need to handle is when a symbol is referenced from a different scope (the
|
||||
//! most obvious example of this is an import). We call this "public" use of a symbol. So the other
|
||||
//! question we need to be able to answer is, what are the publicly-visible definitions of each
|
||||
//! symbol?
|
||||
//! We also need to know, for a given definition of a symbol, what type narrowing constraints apply
|
||||
//! to it. For instance, in this code sample:
|
||||
//!
|
||||
//! Technically, public use of a symbol could also occur from any point in control flow of the
|
||||
//! scope where the symbol is defined (via inline imports and import cycles, in the case of an
|
||||
//! import, or via a function call partway through the local scope that ends up using a symbol from
|
||||
//! the scope via a global or nonlocal reference.) But modeling this fully accurately requires
|
||||
//! whole-program analysis that isn't tractable for an efficient incremental compiler, since it
|
||||
//! means a given symbol could have a different type every place it's referenced throughout the
|
||||
//! program, depending on the shape of arbitrarily-sized call/import graphs. So we follow other
|
||||
//! Python type-checkers in making the simplifying assumption that usually the scope will finish
|
||||
//! execution before its symbols are made visible to other scopes; for instance, most imports will
|
||||
//! import from a complete module, not a partially-executed module. (We may want to get a little
|
||||
//! smarter than this in the future, in particular for closures, but for now this is where we
|
||||
//! start.)
|
||||
//! ```python
|
||||
//! x = 1 if flag else None
|
||||
//! if x is not None:
|
||||
//! use(x)
|
||||
//! ```
|
||||
//!
|
||||
//! So this means that the publicly-visible definitions of a symbol are the definitions still
|
||||
//! visible at the end of the scope.
|
||||
//! At the use of `x`, the live binding of `x` is `1 if flag else None`, which would infer as the
|
||||
//! type `Literal[1] | None`. But the constraint `x is not None` dominates this use, which means we
|
||||
//! can rule out the possibility that `x` is `None` here, which should give us the type
|
||||
//! `Literal[1]` for this use.
|
||||
//!
|
||||
//! The data structure we build to answer these two questions is the `UseDefMap`. It has a
|
||||
//! `definitions_by_use` vector indexed by [`ScopedUseId`] and a `public_definitions` vector
|
||||
//! indexed by [`ScopedSymbolId`]. The values in each of these vectors are (in principle) a list of
|
||||
//! visible definitions at that use, or at the end of the scope for that symbol.
|
||||
//! For declared types, we need to be able to answer the question "given a binding to a symbol,
|
||||
//! which declarations of that symbol can reach the binding?" This allows us to emit a diagnostic
|
||||
//! if the binding is attempting to bind a value of a type that is not assignable to the declared
|
||||
//! type for that symbol, at that point in control flow.
|
||||
//!
|
||||
//! In order to avoid vectors-of-vectors and all the allocations that would entail, we don't
|
||||
//! actually store these "list of visible definitions" as a vector of [`Definition`] IDs. Instead,
|
||||
//! the values in `definitions_by_use` and `public_definitions` are a [`Definitions`] struct that
|
||||
//! keeps a [`Range`] into a third vector of [`Definition`] IDs, `all_definitions`. The trick with
|
||||
//! this representation is that it requires that the definitions visible at any given use of a
|
||||
//! symbol are stored sequentially in `all_definitions`.
|
||||
//! We also need to know, given a declaration of a symbol, what the inferred type of that symbol is
|
||||
//! at that point. This allows us to emit a diagnostic in a case like `x = "foo"; x: int`. The
|
||||
//! binding `x = "foo"` occurs before the declaration `x: int`, so according to our
|
||||
//! control-flow-sensitive interpretation of declarations, the assignment is not an error. But the
|
||||
//! declaration is an error, since it would violate the "inferred type must be assignable to
|
||||
//! declared type" rule.
|
||||
//!
|
||||
//! There is another special kind of possible "definition" for a symbol: it might be unbound in the
|
||||
//! scope. (This isn't equivalent to "zero visible definitions", since we may go through an `if`
|
||||
//! that has a definition for the symbol, leaving us with one visible definition, but still also
|
||||
//! the "unbound" possibility, since we might not have taken the `if` branch.)
|
||||
//! Another case we need to handle is when a symbol is referenced from a different scope (for
|
||||
//! example, an import or a nonlocal reference). We call this "public" use of a symbol. For public
|
||||
//! use of a symbol, we prefer the declared type, if there are any declarations of that symbol; if
|
||||
//! not, we fall back to the inferred type. So we also need to know which declarations and bindings
|
||||
//! can reach the end of the scope.
|
||||
//!
|
||||
//! The simplest way to model "unbound" would be as an actual [`Definition`] itself: the initial
|
||||
//! visible [`Definition`] for each symbol in a scope. But actually modeling it this way would
|
||||
//! dramatically increase the number of [`Definition`] that Salsa must track. Since "unbound" is a
|
||||
//! special definition in that all symbols share it, and it doesn't have any additional per-symbol
|
||||
//! state, we can represent it more efficiently: we use the `may_be_unbound` boolean on the
|
||||
//! [`Definitions`] struct. If this flag is `true`, it means the symbol/use really has one
|
||||
//! additional visible "definition", which is the unbound state. If this flag is `false`, it means
|
||||
//! we've eliminated the possibility of unbound: every path we've followed includes a definition
|
||||
//! Technically, public use of a symbol could occur from any point in control flow of the scope
|
||||
//! where the symbol is defined (via inline imports and import cycles, in the case of an import, or
|
||||
//! via a function call partway through the local scope that ends up using a symbol from the scope
|
||||
//! via a global or nonlocal reference.) But modeling this fully accurately requires whole-program
|
||||
//! analysis that isn't tractable for an efficient analysis, since it means a given symbol could
|
||||
//! have a different type every place it's referenced throughout the program, depending on the
|
||||
//! shape of arbitrarily-sized call/import graphs. So we follow other Python type checkers in
|
||||
//! making the simplifying assumption that usually the scope will finish execution before its
|
||||
//! symbols are made visible to other scopes; for instance, most imports will import from a
|
||||
//! complete module, not a partially-executed module. (We may want to get a little smarter than
|
||||
//! this in the future for some closures, but for now this is where we start.)
|
||||
//!
|
||||
//! The data structure we build to answer these questions is the `UseDefMap`. It has a
|
||||
//! `bindings_by_use` vector of [`SymbolBindings`] indexed by [`ScopedUseId`], a
|
||||
//! `declarations_by_binding` vector of [`SymbolDeclarations`] indexed by [`ScopedDefinitionId`], a
|
||||
//! `bindings_by_declaration` vector of [`SymbolBindings`] indexed by [`ScopedDefinitionId`], and
|
||||
//! `public_bindings` and `public_definitions` vectors indexed by [`ScopedSymbolId`]. The values in
|
||||
//! each of these vectors are (in principle) a list of live bindings at that use/definition, or at
|
||||
//! the end of the scope for that symbol, with a list of the dominating constraints for each
|
||||
//! binding.
|
||||
//!
|
||||
//! In order to avoid vectors-of-vectors-of-vectors and all the allocations that would entail, we
|
||||
//! don't actually store these "list of visible definitions" as a vector of [`Definition`].
|
||||
//! Instead, [`SymbolBindings`] and [`SymbolDeclarations`] are structs which use bit-sets to track
|
||||
//! definitions (and constraints, in the case of bindings) in terms of [`ScopedDefinitionId`] and
|
||||
//! [`ScopedConstraintId`], which are indices into the `all_definitions` and `all_constraints`
|
||||
//! indexvecs in the [`UseDefMap`].
|
||||
//!
|
||||
//! There is another special kind of possible "definition" for a symbol: there might be a path from
|
||||
//! the scope entry to a given use in which the symbol is never bound.
|
||||
//!
|
||||
//! The simplest way to model "unbound" would be as a "binding" itself: the initial "binding" for
|
||||
//! each symbol in a scope. But actually modeling it this way would unnecessarily increase the
|
||||
//! number of [`Definition`]s that Salsa must track. Since "unbound" is special in that all symbols
|
||||
//! share it, and it doesn't have any additional per-symbol state, and constraints are irrelevant
|
||||
//! to it, we can represent it more efficiently: we use the `may_be_unbound` boolean on the
|
||||
//! [`SymbolBindings`] struct. If this flag is `true` for a use of a symbol, it means the symbol
|
||||
//! has a path to the use in which it is never bound. If this flag is `false`, it means we've
|
||||
//! eliminated the possibility of unbound: every control flow path to the use includes a binding
|
||||
//! for this symbol.
|
||||
//!
|
||||
//! To build a [`UseDefMap`], the [`UseDefMapBuilder`] is notified of each new use and definition
|
||||
//! as they are encountered by the
|
||||
//! To build a [`UseDefMap`], the [`UseDefMapBuilder`] is notified of each new use, definition, and
|
||||
//! constraint as they are encountered by the
|
||||
//! [`SemanticIndexBuilder`](crate::semantic_index::builder::SemanticIndexBuilder) AST visit. For
|
||||
//! each symbol, the builder tracks the currently-visible definitions for that symbol. When we hit
|
||||
//! a use of a symbol, it records the currently-visible definitions for that symbol as the visible
|
||||
//! definitions for that use. When we reach the end of the scope, it records the currently-visible
|
||||
//! definitions for each symbol as the public definitions of that symbol.
|
||||
//! each symbol, the builder tracks the `SymbolState` (`SymbolBindings` and `SymbolDeclarations`)
|
||||
//! for that symbol. When we hit a use or definition of a symbol, we record the necessary parts of
|
||||
//! the current state for that symbol that we need for that use or definition. When we reach the
|
||||
//! end of the scope, it records the state for each symbol as the public definitions of that
|
||||
//! symbol.
|
||||
//!
|
||||
//! Let's walk through the above example. Initially we record for `x` that it has no visible
|
||||
//! definitions, and may be unbound. When we see `x = 1`, we record that as the sole visible
|
||||
//! definition of `x`, and flip `may_be_unbound` to `false`. Then we see `x = 2`, and it replaces
|
||||
//! `x = 1` as the sole visible definition of `x`. When we get to `y = x`, we record that the
|
||||
//! visible definitions for that use of `x` are just the `x = 2` definition.
|
||||
//! Let's walk through the above example. Initially we record for `x` that it has no bindings, and
|
||||
//! may be unbound. When we see `x = 1`, we record that as the sole live binding of `x`, and flip
|
||||
//! `may_be_unbound` to `false`. Then we see `x = 2`, and we replace `x = 1` as the sole live
|
||||
//! binding of `x`. When we get to `y = x`, we record that the live bindings for that use of `x`
|
||||
//! are just the `x = 2` definition.
|
||||
//!
|
||||
//! Then we hit the `if` branch. We visit the `test` node (`flag` in this case), since that will
|
||||
//! happen regardless. Then we take a pre-branch snapshot of the currently visible definitions for
|
||||
//! all symbols, which we'll need later. Then we go ahead and visit the `if` body. When we see `x =
|
||||
//! 3`, it replaces `x = 2` as the sole visible definition of `x`. At the end of the `if` body, we
|
||||
//! take another snapshot of the currently-visible definitions; we'll call this the post-if-body
|
||||
//! snapshot.
|
||||
//! happen regardless. Then we take a pre-branch snapshot of the current state for all symbols,
|
||||
//! which we'll need later. Then we record `flag` as a possible constraint on the current binding
|
||||
//! (`x = 2`), and go ahead and visit the `if` body. When we see `x = 3`, it replaces `x = 2`
|
||||
//! (constrained by `flag`) as the sole live binding of `x`. At the end of the `if` body, we take
|
||||
//! another snapshot of the current symbol state; we'll call this the post-if-body snapshot.
|
||||
//!
|
||||
//! Now we need to visit the `else` clause. The conditions when entering the `else` clause should
|
||||
//! be the pre-if conditions; if we are entering the `else` clause, we know that the `if` test
|
||||
//! failed and we didn't execute the `if` body. So we first reset the builder to the pre-if state,
|
||||
//! using the snapshot we took previously (meaning we now have `x = 2` as the sole visible
|
||||
//! definition for `x` again), then visit the `else` clause, where `x = 4` replaces `x = 2` as the
|
||||
//! sole visible definition of `x`.
|
||||
//! using the snapshot we took previously (meaning we now have `x = 2` as the sole binding for `x`
|
||||
//! again), then visit the `else` clause, where `x = 4` replaces `x = 2` as the sole live binding
|
||||
//! of `x`.
|
||||
//!
|
||||
//! Now we reach the end of the if/else, and want to visit the following code. The state here needs
|
||||
//! to reflect that we might have gone through the `if` branch, or we might have gone through the
|
||||
//! `else` branch, and we don't know which. So we need to "merge" our current builder state
|
||||
//! (reflecting the end-of-else state, with `x = 4` as the only visible definition) with our
|
||||
//! post-if-body snapshot (which has `x = 3` as the only visible definition). The result of this
|
||||
//! merge is that we now have two visible definitions of `x`: `x = 3` and `x = 4`.
|
||||
//! (reflecting the end-of-else state, with `x = 4` as the only live binding) with our post-if-body
|
||||
//! snapshot (which has `x = 3` as the only live binding). The result of this merge is that we now
|
||||
//! have two live bindings of `x`: `x = 3` and `x = 4`.
|
||||
//!
|
||||
//! The [`UseDefMapBuilder`] itself just exposes methods for taking a snapshot, resetting to a
|
||||
//! snapshot, and merging a snapshot into the current state. The logic using these methods lives in
|
||||
//! [`SemanticIndexBuilder`](crate::semantic_index::builder::SemanticIndexBuilder), e.g. where it
|
||||
//! visits a `StmtIf` node.
|
||||
//!
|
||||
//! (In the future we may have some other questions we want to answer as well, such as "is this
|
||||
//! definition used?", which will require tracking a bit more info in our map, e.g. a "used" bit
|
||||
//! for each [`Definition`] which is flipped to true when we record that definition for a use.)
|
||||
use self::symbol_state::{
|
||||
BindingIdWithConstraintsIterator, ConstraintIdIterator, DeclarationIdIterator,
|
||||
ScopedConstraintId, ScopedDefinitionId, SymbolBindings, SymbolDeclarations, SymbolState,
|
||||
};
|
||||
use crate::semantic_index::ast_ids::ScopedUseId;
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::symbol::ScopedSymbolId;
|
||||
use ruff_index::IndexVec;
|
||||
use std::ops::Range;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
/// All definitions that can reach a given use of a name.
|
||||
use super::constraint::Constraint;
|
||||
|
||||
mod bitset;
|
||||
mod symbol_state;
|
||||
|
||||
/// Applicable definitions and constraints for every use of a name.
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub(crate) struct UseDefMap<'db> {
|
||||
// TODO store constraints with definitions for type narrowing
|
||||
/// Definition IDs array for `definitions_by_use` and `public_definitions` to slice into.
|
||||
all_definitions: Vec<Definition<'db>>,
|
||||
/// Array of [`Definition`] in this scope.
|
||||
all_definitions: IndexVec<ScopedDefinitionId, Definition<'db>>,
|
||||
|
||||
/// Definitions that can reach a [`ScopedUseId`].
|
||||
definitions_by_use: IndexVec<ScopedUseId, Definitions>,
|
||||
/// Array of [`Constraint`] in this scope.
|
||||
all_constraints: IndexVec<ScopedConstraintId, Constraint<'db>>,
|
||||
|
||||
/// Definitions of each symbol visible at end of scope.
|
||||
public_definitions: IndexVec<ScopedSymbolId, Definitions>,
|
||||
/// [`SymbolBindings`] reaching a [`ScopedUseId`].
|
||||
bindings_by_use: IndexVec<ScopedUseId, SymbolBindings>,
|
||||
|
||||
/// [`SymbolBindings`] or [`SymbolDeclarations`] reaching a given [`Definition`].
|
||||
///
|
||||
/// If the definition is a binding (only) -- `x = 1` for example -- then we need
|
||||
/// [`SymbolDeclarations`] to know whether this binding is permitted by the live declarations.
|
||||
///
|
||||
/// If the definition is a declaration (only) -- `x: int` for example -- then we need
|
||||
/// [`SymbolBindings`] to know whether this declaration is consistent with the previously
|
||||
/// inferred type.
|
||||
///
|
||||
/// If the definition is both a declaration and a binding -- `x: int = 1` for example -- then
|
||||
/// we don't actually need anything here, all we'll need to validate is that our own RHS is a
|
||||
/// valid assignment to our own annotation.
|
||||
definitions_by_definition: FxHashMap<Definition<'db>, SymbolDefinitions>,
|
||||
|
||||
/// [`SymbolState`] visible at end of scope for each symbol.
|
||||
public_symbols: IndexVec<ScopedSymbolId, SymbolState>,
|
||||
}
|
||||
|
||||
impl<'db> UseDefMap<'db> {
|
||||
pub(crate) fn use_definitions(&self, use_id: ScopedUseId) -> &[Definition<'db>] {
|
||||
&self.all_definitions[self.definitions_by_use[use_id].definitions_range.clone()]
|
||||
pub(crate) fn bindings_at_use(
|
||||
&self,
|
||||
use_id: ScopedUseId,
|
||||
) -> BindingWithConstraintsIterator<'_, 'db> {
|
||||
self.bindings_iterator(&self.bindings_by_use[use_id])
|
||||
}
|
||||
|
||||
pub(crate) fn use_may_be_unbound(&self, use_id: ScopedUseId) -> bool {
|
||||
self.definitions_by_use[use_id].may_be_unbound
|
||||
self.bindings_by_use[use_id].may_be_unbound()
|
||||
}
|
||||
|
||||
pub(crate) fn public_definitions(&self, symbol: ScopedSymbolId) -> &[Definition<'db>] {
|
||||
&self.all_definitions[self.public_definitions[symbol].definitions_range.clone()]
|
||||
pub(crate) fn public_bindings(
|
||||
&self,
|
||||
symbol: ScopedSymbolId,
|
||||
) -> BindingWithConstraintsIterator<'_, 'db> {
|
||||
self.bindings_iterator(self.public_symbols[symbol].bindings())
|
||||
}
|
||||
|
||||
pub(crate) fn public_may_be_unbound(&self, symbol: ScopedSymbolId) -> bool {
|
||||
self.public_definitions[symbol].may_be_unbound
|
||||
self.public_symbols[symbol].may_be_unbound()
|
||||
}
|
||||
}
|
||||
|
||||
/// Definitions visible for a symbol at a particular use (or end-of-scope).
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
struct Definitions {
|
||||
/// [`Range`] in `all_definitions` of the visible definition IDs.
|
||||
definitions_range: Range<usize>,
|
||||
/// Is the symbol possibly unbound at this point?
|
||||
may_be_unbound: bool,
|
||||
}
|
||||
pub(crate) fn bindings_at_declaration(
|
||||
&self,
|
||||
declaration: Definition<'db>,
|
||||
) -> BindingWithConstraintsIterator<'_, 'db> {
|
||||
if let SymbolDefinitions::Bindings(bindings) = &self.definitions_by_definition[&declaration]
|
||||
{
|
||||
self.bindings_iterator(bindings)
|
||||
} else {
|
||||
unreachable!("Declaration has non-Bindings in definitions_by_definition");
|
||||
}
|
||||
}
|
||||
|
||||
impl Definitions {
|
||||
/// The default state of a symbol is "no definitions, may be unbound", aka definitely-unbound.
|
||||
fn unbound() -> Self {
|
||||
Self {
|
||||
definitions_range: Range::default(),
|
||||
may_be_unbound: true,
|
||||
pub(crate) fn declarations_at_binding(
|
||||
&self,
|
||||
binding: Definition<'db>,
|
||||
) -> DeclarationsIterator<'_, 'db> {
|
||||
if let SymbolDefinitions::Declarations(declarations) =
|
||||
&self.definitions_by_definition[&binding]
|
||||
{
|
||||
self.declarations_iterator(declarations)
|
||||
} else {
|
||||
unreachable!("Binding has non-Declarations in definitions_by_definition");
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn public_declarations(
|
||||
&self,
|
||||
symbol: ScopedSymbolId,
|
||||
) -> DeclarationsIterator<'_, 'db> {
|
||||
let declarations = self.public_symbols[symbol].declarations();
|
||||
self.declarations_iterator(declarations)
|
||||
}
|
||||
|
||||
pub(crate) fn has_public_declarations(&self, symbol: ScopedSymbolId) -> bool {
|
||||
!self.public_symbols[symbol].declarations().is_empty()
|
||||
}
|
||||
|
||||
fn bindings_iterator<'a>(
|
||||
&'a self,
|
||||
bindings: &'a SymbolBindings,
|
||||
) -> BindingWithConstraintsIterator<'a, 'db> {
|
||||
BindingWithConstraintsIterator {
|
||||
all_definitions: &self.all_definitions,
|
||||
all_constraints: &self.all_constraints,
|
||||
inner: bindings.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
fn declarations_iterator<'a>(
|
||||
&'a self,
|
||||
declarations: &'a SymbolDeclarations,
|
||||
) -> DeclarationsIterator<'a, 'db> {
|
||||
DeclarationsIterator {
|
||||
all_definitions: &self.all_definitions,
|
||||
inner: declarations.iter(),
|
||||
may_be_undeclared: declarations.may_be_undeclared(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Definitions {
|
||||
fn default() -> Self {
|
||||
Definitions::unbound()
|
||||
/// Either live bindings or live declarations for a symbol.
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
enum SymbolDefinitions {
|
||||
Bindings(SymbolBindings),
|
||||
Declarations(SymbolDeclarations),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct BindingWithConstraintsIterator<'map, 'db> {
|
||||
all_definitions: &'map IndexVec<ScopedDefinitionId, Definition<'db>>,
|
||||
all_constraints: &'map IndexVec<ScopedConstraintId, Constraint<'db>>,
|
||||
inner: BindingIdWithConstraintsIterator<'map>,
|
||||
}
|
||||
|
||||
impl<'map, 'db> Iterator for BindingWithConstraintsIterator<'map, 'db> {
|
||||
type Item = BindingWithConstraints<'map, 'db>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.inner
|
||||
.next()
|
||||
.map(|def_id_with_constraints| BindingWithConstraints {
|
||||
binding: self.all_definitions[def_id_with_constraints.definition],
|
||||
constraints: ConstraintsIterator {
|
||||
all_constraints: self.all_constraints,
|
||||
constraint_ids: def_id_with_constraints.constraint_ids,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A snapshot of the visible definitions for each symbol at a particular point in control flow.
|
||||
#[derive(Clone, Debug)]
|
||||
pub(super) struct FlowSnapshot {
|
||||
definitions_by_symbol: IndexVec<ScopedSymbolId, Definitions>,
|
||||
impl std::iter::FusedIterator for BindingWithConstraintsIterator<'_, '_> {}
|
||||
|
||||
pub(crate) struct BindingWithConstraints<'map, 'db> {
|
||||
pub(crate) binding: Definition<'db>,
|
||||
pub(crate) constraints: ConstraintsIterator<'map, 'db>,
|
||||
}
|
||||
|
||||
pub(crate) struct ConstraintsIterator<'map, 'db> {
|
||||
all_constraints: &'map IndexVec<ScopedConstraintId, Constraint<'db>>,
|
||||
constraint_ids: ConstraintIdIterator<'map>,
|
||||
}
|
||||
|
||||
impl<'map, 'db> Iterator for ConstraintsIterator<'map, 'db> {
|
||||
type Item = Constraint<'db>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.constraint_ids
|
||||
.next()
|
||||
.map(|constraint_id| self.all_constraints[constraint_id])
|
||||
}
|
||||
}
|
||||
|
||||
impl std::iter::FusedIterator for ConstraintsIterator<'_, '_> {}
|
||||
|
||||
pub(crate) struct DeclarationsIterator<'map, 'db> {
|
||||
all_definitions: &'map IndexVec<ScopedDefinitionId, Definition<'db>>,
|
||||
inner: DeclarationIdIterator<'map>,
|
||||
may_be_undeclared: bool,
|
||||
}
|
||||
|
||||
impl DeclarationsIterator<'_, '_> {
|
||||
pub(crate) fn may_be_undeclared(&self) -> bool {
|
||||
self.may_be_undeclared
|
||||
}
|
||||
}
|
||||
|
||||
impl<'map, 'db> Iterator for DeclarationsIterator<'map, 'db> {
|
||||
type Item = Definition<'db>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.inner.next().map(|def_id| self.all_definitions[def_id])
|
||||
}
|
||||
}
|
||||
|
||||
impl std::iter::FusedIterator for DeclarationsIterator<'_, '_> {}
|
||||
|
||||
/// A snapshot of the definitions and constraints state at a particular point in control flow.
|
||||
#[derive(Clone, Debug)]
|
||||
pub(super) struct FlowSnapshot {
|
||||
symbol_states: IndexVec<ScopedSymbolId, SymbolState>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub(super) struct UseDefMapBuilder<'db> {
|
||||
/// Definition IDs array for `definitions_by_use` and `definitions_by_symbol` to slice into.
|
||||
all_definitions: Vec<Definition<'db>>,
|
||||
/// Append-only array of [`Definition`].
|
||||
all_definitions: IndexVec<ScopedDefinitionId, Definition<'db>>,
|
||||
|
||||
/// Visible definitions at each so-far-recorded use.
|
||||
definitions_by_use: IndexVec<ScopedUseId, Definitions>,
|
||||
/// Append-only array of [`Constraint`].
|
||||
all_constraints: IndexVec<ScopedConstraintId, Constraint<'db>>,
|
||||
|
||||
/// Currently visible definitions for each symbol.
|
||||
definitions_by_symbol: IndexVec<ScopedSymbolId, Definitions>,
|
||||
/// Live bindings at each so-far-recorded use.
|
||||
bindings_by_use: IndexVec<ScopedUseId, SymbolBindings>,
|
||||
|
||||
/// Live bindings or declarations for each so-far-recorded definition.
|
||||
definitions_by_definition: FxHashMap<Definition<'db>, SymbolDefinitions>,
|
||||
|
||||
/// Currently live bindings and declarations for each symbol.
|
||||
symbol_states: IndexVec<ScopedSymbolId, SymbolState>,
|
||||
}
|
||||
|
||||
impl<'db> UseDefMapBuilder<'db> {
|
||||
pub(super) fn new() -> Self {
|
||||
Self {
|
||||
all_definitions: Vec::new(),
|
||||
definitions_by_use: IndexVec::new(),
|
||||
definitions_by_symbol: IndexVec::new(),
|
||||
}
|
||||
Self::default()
|
||||
}
|
||||
|
||||
pub(super) fn add_symbol(&mut self, symbol: ScopedSymbolId) {
|
||||
let new_symbol = self.definitions_by_symbol.push(Definitions::unbound());
|
||||
let new_symbol = self.symbol_states.push(SymbolState::undefined());
|
||||
debug_assert_eq!(symbol, new_symbol);
|
||||
}
|
||||
|
||||
pub(super) fn record_definition(
|
||||
pub(super) fn record_binding(&mut self, symbol: ScopedSymbolId, binding: Definition<'db>) {
|
||||
let def_id = self.all_definitions.push(binding);
|
||||
let symbol_state = &mut self.symbol_states[symbol];
|
||||
self.definitions_by_definition.insert(
|
||||
binding,
|
||||
SymbolDefinitions::Declarations(symbol_state.declarations().clone()),
|
||||
);
|
||||
symbol_state.record_binding(def_id);
|
||||
}
|
||||
|
||||
pub(super) fn record_constraint(&mut self, constraint: Constraint<'db>) {
|
||||
let constraint_id = self.all_constraints.push(constraint);
|
||||
for state in &mut self.symbol_states {
|
||||
state.record_constraint(constraint_id);
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn record_declaration(
|
||||
&mut self,
|
||||
symbol: ScopedSymbolId,
|
||||
declaration: Definition<'db>,
|
||||
) {
|
||||
let def_id = self.all_definitions.push(declaration);
|
||||
let symbol_state = &mut self.symbol_states[symbol];
|
||||
self.definitions_by_definition.insert(
|
||||
declaration,
|
||||
SymbolDefinitions::Bindings(symbol_state.bindings().clone()),
|
||||
);
|
||||
symbol_state.record_declaration(def_id);
|
||||
}
|
||||
|
||||
pub(super) fn record_declaration_and_binding(
|
||||
&mut self,
|
||||
symbol: ScopedSymbolId,
|
||||
definition: Definition<'db>,
|
||||
) {
|
||||
// We have a new definition of a symbol; this replaces any previous definitions in this
|
||||
// path.
|
||||
let def_idx = self.all_definitions.len();
|
||||
self.all_definitions.push(definition);
|
||||
self.definitions_by_symbol[symbol] = Definitions {
|
||||
#[allow(clippy::range_plus_one)]
|
||||
definitions_range: def_idx..(def_idx + 1),
|
||||
may_be_unbound: false,
|
||||
};
|
||||
// We don't need to store anything in self.definitions_by_definition.
|
||||
let def_id = self.all_definitions.push(definition);
|
||||
let symbol_state = &mut self.symbol_states[symbol];
|
||||
symbol_state.record_declaration(def_id);
|
||||
symbol_state.record_binding(def_id);
|
||||
}
|
||||
|
||||
pub(super) fn record_use(&mut self, symbol: ScopedSymbolId, use_id: ScopedUseId) {
|
||||
// We have a use of a symbol; clone the currently visible definitions for that symbol, and
|
||||
// record them as the visible definitions for this use.
|
||||
// We have a use of a symbol; clone the current bindings for that symbol, and record them
|
||||
// as the live bindings for this use.
|
||||
let new_use = self
|
||||
.definitions_by_use
|
||||
.push(self.definitions_by_symbol[symbol].clone());
|
||||
.bindings_by_use
|
||||
.push(self.symbol_states[symbol].bindings().clone());
|
||||
debug_assert_eq!(use_id, new_use);
|
||||
}
|
||||
|
||||
/// Take a snapshot of the current visible-symbols state.
|
||||
pub(super) fn snapshot(&self) -> FlowSnapshot {
|
||||
FlowSnapshot {
|
||||
definitions_by_symbol: self.definitions_by_symbol.clone(),
|
||||
symbol_states: self.symbol_states.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Restore the current builder visible-definitions state to the given snapshot.
|
||||
/// Restore the current builder symbols state to the given snapshot.
|
||||
pub(super) fn restore(&mut self, snapshot: FlowSnapshot) {
|
||||
// We never remove symbols from `definitions_by_symbol` (it's an IndexVec, and the symbol
|
||||
// We never remove symbols from `symbol_states` (it's an IndexVec, and the symbol
|
||||
// IDs must line up), so the current number of known symbols must always be equal to or
|
||||
// greater than the number of known symbols in a previously-taken snapshot.
|
||||
let num_symbols = self.definitions_by_symbol.len();
|
||||
debug_assert!(num_symbols >= snapshot.definitions_by_symbol.len());
|
||||
let num_symbols = self.symbol_states.len();
|
||||
debug_assert!(num_symbols >= snapshot.symbol_states.len());
|
||||
|
||||
// Restore the current visible-definitions state to the given snapshot.
|
||||
self.definitions_by_symbol = snapshot.definitions_by_symbol;
|
||||
self.symbol_states = snapshot.symbol_states;
|
||||
|
||||
// If the snapshot we are restoring is missing some symbols we've recorded since, we need
|
||||
// to fill them in so the symbol IDs continue to line up. Since they don't exist in the
|
||||
// snapshot, the correct state to fill them in with is "unbound", the default.
|
||||
self.definitions_by_symbol
|
||||
.resize(num_symbols, Definitions::unbound());
|
||||
// snapshot, the correct state to fill them in with is "undefined".
|
||||
self.symbol_states
|
||||
.resize(num_symbols, SymbolState::undefined());
|
||||
}
|
||||
|
||||
/// Merge the given snapshot into the current state, reflecting that we might have taken either
|
||||
/// path to get here. The new visible-definitions state for each symbol should include
|
||||
/// definitions from both the prior state and the snapshot.
|
||||
pub(super) fn merge(&mut self, snapshot: &FlowSnapshot) {
|
||||
// The tricky thing about merging two Ranges pointing into `all_definitions` is that if the
|
||||
// two Ranges aren't already adjacent in `all_definitions`, we will have to copy at least
|
||||
// one or the other of the ranges to the end of `all_definitions` so as to make them
|
||||
// adjacent. We can't ever move things around in `all_definitions` because previously
|
||||
// recorded uses may still have ranges pointing to any part of it; all we can do is append.
|
||||
// It's possible we may end up with some old entries in `all_definitions` that nobody is
|
||||
// pointing to, but that's OK.
|
||||
|
||||
// We never remove symbols from `definitions_by_symbol` (it's an IndexVec, and the symbol
|
||||
/// path to get here. The new state for each symbol should include definitions from both the
|
||||
/// prior state and the snapshot.
|
||||
pub(super) fn merge(&mut self, snapshot: FlowSnapshot) {
|
||||
// We never remove symbols from `symbol_states` (it's an IndexVec, and the symbol
|
||||
// IDs must line up), so the current number of known symbols must always be equal to or
|
||||
// greater than the number of known symbols in a previously-taken snapshot.
|
||||
debug_assert!(self.definitions_by_symbol.len() >= snapshot.definitions_by_symbol.len());
|
||||
debug_assert!(self.symbol_states.len() >= snapshot.symbol_states.len());
|
||||
|
||||
for (symbol_id, current) in self.definitions_by_symbol.iter_mut_enumerated() {
|
||||
let Some(snapshot) = snapshot.definitions_by_symbol.get(symbol_id) else {
|
||||
// Symbol not present in snapshot, so it's unbound from that path.
|
||||
current.may_be_unbound = true;
|
||||
continue;
|
||||
};
|
||||
|
||||
// If the symbol can be unbound in either predecessor, it can be unbound post-merge.
|
||||
current.may_be_unbound |= snapshot.may_be_unbound;
|
||||
|
||||
// Merge the definition ranges.
|
||||
let current = &mut current.definitions_range;
|
||||
let snapshot = &snapshot.definitions_range;
|
||||
|
||||
// We never create reversed ranges.
|
||||
debug_assert!(current.end >= current.start);
|
||||
debug_assert!(snapshot.end >= snapshot.start);
|
||||
|
||||
if current == snapshot {
|
||||
// Ranges already identical, nothing to do.
|
||||
} else if snapshot.is_empty() {
|
||||
// Merging from an empty range; nothing to do.
|
||||
} else if (*current).is_empty() {
|
||||
// Merging to an empty range; just use the incoming range.
|
||||
*current = snapshot.clone();
|
||||
} else if snapshot.end >= current.start && snapshot.start <= current.end {
|
||||
// Ranges are adjacent or overlapping, merge them in-place.
|
||||
*current = current.start.min(snapshot.start)..current.end.max(snapshot.end);
|
||||
} else if current.end == self.all_definitions.len() {
|
||||
// Ranges are not adjacent or overlapping, `current` is at the end of
|
||||
// `all_definitions`, we need to copy `snapshot` to the end so they are adjacent
|
||||
// and can be merged into one range.
|
||||
self.all_definitions.extend_from_within(snapshot.clone());
|
||||
current.end = self.all_definitions.len();
|
||||
} else if snapshot.end == self.all_definitions.len() {
|
||||
// Ranges are not adjacent or overlapping, `snapshot` is at the end of
|
||||
// `all_definitions`, we need to copy `current` to the end so they are adjacent and
|
||||
// can be merged into one range.
|
||||
self.all_definitions.extend_from_within(current.clone());
|
||||
current.start = snapshot.start;
|
||||
current.end = self.all_definitions.len();
|
||||
let mut snapshot_definitions_iter = snapshot.symbol_states.into_iter();
|
||||
for current in &mut self.symbol_states {
|
||||
if let Some(snapshot) = snapshot_definitions_iter.next() {
|
||||
current.merge(snapshot);
|
||||
} else {
|
||||
// Ranges are not adjacent and neither one is at the end of `all_definitions`, we
|
||||
// have to copy both to the end so they are adjacent and we can merge them.
|
||||
let start = self.all_definitions.len();
|
||||
self.all_definitions.extend_from_within(current.clone());
|
||||
self.all_definitions.extend_from_within(snapshot.clone());
|
||||
current.start = start;
|
||||
current.end = self.all_definitions.len();
|
||||
// Symbol not present in snapshot, so it's unbound/undeclared from that path.
|
||||
current.set_may_be_unbound();
|
||||
current.set_may_be_undeclared();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn finish(mut self) -> UseDefMap<'db> {
|
||||
self.all_definitions.shrink_to_fit();
|
||||
self.definitions_by_symbol.shrink_to_fit();
|
||||
self.definitions_by_use.shrink_to_fit();
|
||||
self.all_constraints.shrink_to_fit();
|
||||
self.symbol_states.shrink_to_fit();
|
||||
self.bindings_by_use.shrink_to_fit();
|
||||
self.definitions_by_definition.shrink_to_fit();
|
||||
|
||||
UseDefMap {
|
||||
all_definitions: self.all_definitions,
|
||||
definitions_by_use: self.definitions_by_use,
|
||||
public_definitions: self.definitions_by_symbol,
|
||||
all_constraints: self.all_constraints,
|
||||
bindings_by_use: self.bindings_by_use,
|
||||
public_symbols: self.symbol_states,
|
||||
definitions_by_definition: self.definitions_by_definition,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,309 @@
|
||||
/// Ordered set of `u32`.
|
||||
///
|
||||
/// Uses an inline bit-set for small values (up to 64 * B), falls back to heap allocated vector of
|
||||
/// blocks for larger values.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub(super) enum BitSet<const B: usize> {
|
||||
/// Bit-set (in 64-bit blocks) for the first 64 * B entries.
|
||||
Inline([u64; B]),
|
||||
|
||||
/// Overflow beyond 64 * B.
|
||||
Heap(Vec<u64>),
|
||||
}
|
||||
|
||||
impl<const B: usize> Default for BitSet<B> {
|
||||
fn default() -> Self {
|
||||
// B * 64 must fit in a u32, or else we have unusable bits; this assertion makes the
|
||||
// truncating casts to u32 below safe. This would be better as a const assertion, but
|
||||
// that's not possible on stable with const generic params. (B should never really be
|
||||
// anywhere close to this large.)
|
||||
assert!(B * 64 < (u32::MAX as usize));
|
||||
// This implementation requires usize >= 32 bits.
|
||||
static_assertions::const_assert!(usize::BITS >= 32);
|
||||
Self::Inline([0; B])
|
||||
}
|
||||
}
|
||||
|
||||
impl<const B: usize> BitSet<B> {
|
||||
/// Create and return a new [`BitSet`] with a single `value` inserted.
|
||||
pub(super) fn with(value: u32) -> Self {
|
||||
let mut bitset = Self::default();
|
||||
bitset.insert(value);
|
||||
bitset
|
||||
}
|
||||
|
||||
pub(super) fn is_empty(&self) -> bool {
|
||||
self.blocks().iter().all(|&b| b == 0)
|
||||
}
|
||||
|
||||
/// Convert from Inline to Heap, if needed, and resize the Heap vector, if needed.
|
||||
fn resize(&mut self, value: u32) {
|
||||
let num_blocks_needed = (value / 64) + 1;
|
||||
self.resize_blocks(num_blocks_needed as usize);
|
||||
}
|
||||
|
||||
fn resize_blocks(&mut self, num_blocks_needed: usize) {
|
||||
match self {
|
||||
Self::Inline(blocks) => {
|
||||
let mut vec = blocks.to_vec();
|
||||
vec.resize(num_blocks_needed, 0);
|
||||
*self = Self::Heap(vec);
|
||||
}
|
||||
Self::Heap(vec) => {
|
||||
vec.resize(num_blocks_needed, 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn blocks_mut(&mut self) -> &mut [u64] {
|
||||
match self {
|
||||
Self::Inline(blocks) => blocks.as_mut_slice(),
|
||||
Self::Heap(blocks) => blocks.as_mut_slice(),
|
||||
}
|
||||
}
|
||||
|
||||
fn blocks(&self) -> &[u64] {
|
||||
match self {
|
||||
Self::Inline(blocks) => blocks.as_slice(),
|
||||
Self::Heap(blocks) => blocks.as_slice(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Insert a value into the [`BitSet`].
|
||||
///
|
||||
/// Return true if the value was newly inserted, false if already present.
|
||||
pub(super) fn insert(&mut self, value: u32) -> bool {
|
||||
let value_usize = value as usize;
|
||||
let (block, index) = (value_usize / 64, value_usize % 64);
|
||||
if block >= self.blocks().len() {
|
||||
self.resize(value);
|
||||
}
|
||||
let blocks = self.blocks_mut();
|
||||
let missing = blocks[block] & (1 << index) == 0;
|
||||
blocks[block] |= 1 << index;
|
||||
missing
|
||||
}
|
||||
|
||||
/// Intersect in-place with another [`BitSet`].
|
||||
pub(super) fn intersect(&mut self, other: &BitSet<B>) {
|
||||
let my_blocks = self.blocks_mut();
|
||||
let other_blocks = other.blocks();
|
||||
let min_len = my_blocks.len().min(other_blocks.len());
|
||||
for i in 0..min_len {
|
||||
my_blocks[i] &= other_blocks[i];
|
||||
}
|
||||
for block in my_blocks.iter_mut().skip(min_len) {
|
||||
*block = 0;
|
||||
}
|
||||
}
|
||||
|
||||
/// Union in-place with another [`BitSet`].
|
||||
pub(super) fn union(&mut self, other: &BitSet<B>) {
|
||||
let mut max_len = self.blocks().len();
|
||||
let other_len = other.blocks().len();
|
||||
if other_len > max_len {
|
||||
max_len = other_len;
|
||||
self.resize_blocks(max_len);
|
||||
}
|
||||
for (my_block, other_block) in self.blocks_mut().iter_mut().zip(other.blocks()) {
|
||||
*my_block |= other_block;
|
||||
}
|
||||
}
|
||||
|
||||
/// Return an iterator over the values (in ascending order) in this [`BitSet`].
|
||||
pub(super) fn iter(&self) -> BitSetIterator<'_, B> {
|
||||
let blocks = self.blocks();
|
||||
BitSetIterator {
|
||||
blocks,
|
||||
current_block_index: 0,
|
||||
current_block: blocks[0],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterator over values in a [`BitSet`].
|
||||
#[derive(Debug)]
|
||||
pub(super) struct BitSetIterator<'a, const B: usize> {
|
||||
/// The blocks we are iterating over.
|
||||
blocks: &'a [u64],
|
||||
|
||||
/// The index of the block we are currently iterating through.
|
||||
current_block_index: usize,
|
||||
|
||||
/// The block we are currently iterating through (and zeroing as we go.)
|
||||
current_block: u64,
|
||||
}
|
||||
|
||||
impl<const B: usize> Iterator for BitSetIterator<'_, B> {
|
||||
type Item = u32;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
while self.current_block == 0 {
|
||||
if self.current_block_index + 1 >= self.blocks.len() {
|
||||
return None;
|
||||
}
|
||||
self.current_block_index += 1;
|
||||
self.current_block = self.blocks[self.current_block_index];
|
||||
}
|
||||
let lowest_bit_set = self.current_block.trailing_zeros();
|
||||
// reset the lowest set bit, without a data dependency on `lowest_bit_set`
|
||||
self.current_block &= self.current_block.wrapping_sub(1);
|
||||
// SAFETY: `lowest_bit_set` cannot be more than 64, `current_block_index` cannot be more
|
||||
// than `B - 1`, and we check above that `B * 64 < u32::MAX`. So both `64 *
|
||||
// current_block_index` and the final value here must fit in u32.
|
||||
#[allow(clippy::cast_possible_truncation)]
|
||||
Some(lowest_bit_set + (64 * self.current_block_index) as u32)
|
||||
}
|
||||
}
|
||||
|
||||
impl<const B: usize> std::iter::FusedIterator for BitSetIterator<'_, B> {}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::BitSet;
|
||||
|
||||
fn assert_bitset<const B: usize>(bitset: &BitSet<B>, contents: &[u32]) {
|
||||
assert_eq!(bitset.iter().collect::<Vec<_>>(), contents);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn iter() {
|
||||
let mut b = BitSet::<1>::with(3);
|
||||
b.insert(27);
|
||||
b.insert(6);
|
||||
assert!(matches!(b, BitSet::Inline(_)));
|
||||
assert_bitset(&b, &[3, 6, 27]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn iter_overflow() {
|
||||
let mut b = BitSet::<1>::with(140);
|
||||
b.insert(100);
|
||||
b.insert(129);
|
||||
assert!(matches!(b, BitSet::Heap(_)));
|
||||
assert_bitset(&b, &[100, 129, 140]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn intersect() {
|
||||
let mut b1 = BitSet::<1>::with(4);
|
||||
let mut b2 = BitSet::<1>::with(4);
|
||||
b1.insert(23);
|
||||
b2.insert(5);
|
||||
|
||||
b1.intersect(&b2);
|
||||
assert_bitset(&b1, &[4]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn intersect_mixed_1() {
|
||||
let mut b1 = BitSet::<1>::with(4);
|
||||
let mut b2 = BitSet::<1>::with(4);
|
||||
b1.insert(89);
|
||||
b2.insert(5);
|
||||
|
||||
b1.intersect(&b2);
|
||||
assert_bitset(&b1, &[4]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn intersect_mixed_2() {
|
||||
let mut b1 = BitSet::<1>::with(4);
|
||||
let mut b2 = BitSet::<1>::with(4);
|
||||
b1.insert(23);
|
||||
b2.insert(89);
|
||||
|
||||
b1.intersect(&b2);
|
||||
assert_bitset(&b1, &[4]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn intersect_heap() {
|
||||
let mut b1 = BitSet::<1>::with(4);
|
||||
let mut b2 = BitSet::<1>::with(4);
|
||||
b1.insert(89);
|
||||
b2.insert(90);
|
||||
|
||||
b1.intersect(&b2);
|
||||
assert_bitset(&b1, &[4]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn intersect_heap_2() {
|
||||
let mut b1 = BitSet::<1>::with(89);
|
||||
let mut b2 = BitSet::<1>::with(89);
|
||||
b1.insert(91);
|
||||
b2.insert(90);
|
||||
|
||||
b1.intersect(&b2);
|
||||
assert_bitset(&b1, &[89]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn union() {
|
||||
let mut b1 = BitSet::<1>::with(2);
|
||||
let b2 = BitSet::<1>::with(4);
|
||||
|
||||
b1.union(&b2);
|
||||
assert_bitset(&b1, &[2, 4]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn union_mixed_1() {
|
||||
let mut b1 = BitSet::<1>::with(4);
|
||||
let mut b2 = BitSet::<1>::with(4);
|
||||
b1.insert(89);
|
||||
b2.insert(5);
|
||||
|
||||
b1.union(&b2);
|
||||
assert_bitset(&b1, &[4, 5, 89]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn union_mixed_2() {
|
||||
let mut b1 = BitSet::<1>::with(4);
|
||||
let mut b2 = BitSet::<1>::with(4);
|
||||
b1.insert(23);
|
||||
b2.insert(89);
|
||||
|
||||
b1.union(&b2);
|
||||
assert_bitset(&b1, &[4, 23, 89]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn union_heap() {
|
||||
let mut b1 = BitSet::<1>::with(4);
|
||||
let mut b2 = BitSet::<1>::with(4);
|
||||
b1.insert(89);
|
||||
b2.insert(90);
|
||||
|
||||
b1.union(&b2);
|
||||
assert_bitset(&b1, &[4, 89, 90]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn union_heap_2() {
|
||||
let mut b1 = BitSet::<1>::with(89);
|
||||
let mut b2 = BitSet::<1>::with(89);
|
||||
b1.insert(91);
|
||||
b2.insert(90);
|
||||
|
||||
b1.union(&b2);
|
||||
assert_bitset(&b1, &[89, 90, 91]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiple_blocks() {
|
||||
let mut b = BitSet::<2>::with(120);
|
||||
b.insert(45);
|
||||
assert!(matches!(b, BitSet::Inline(_)));
|
||||
assert_bitset(&b, &[45, 120]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn empty() {
|
||||
let b = BitSet::<1>::default();
|
||||
|
||||
assert!(b.is_empty());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,588 @@
|
||||
//! Track live bindings per symbol, applicable constraints per binding, and live declarations.
|
||||
//!
|
||||
//! These data structures operate entirely on scope-local newtype-indices for definitions and
|
||||
//! constraints, referring to their location in the `all_definitions` and `all_constraints`
|
||||
//! indexvecs in [`super::UseDefMapBuilder`].
|
||||
//!
|
||||
//! We need to track arbitrary associations between bindings and constraints, not just a single set
|
||||
//! of currently dominating constraints (where "dominating" means "control flow must have passed
|
||||
//! through it to reach this point"), because we can have dominating constraints that apply to some
|
||||
//! bindings but not others, as in this code:
|
||||
//!
|
||||
//! ```python
|
||||
//! x = 1 if flag else None
|
||||
//! if x is not None:
|
||||
//! if flag2:
|
||||
//! x = 2 if flag else None
|
||||
//! x
|
||||
//! ```
|
||||
//!
|
||||
//! The `x is not None` constraint dominates the final use of `x`, but it applies only to the first
|
||||
//! binding of `x`, not the second, so `None` is a possible value for `x`.
|
||||
//!
|
||||
//! And we can't just track, for each binding, an index into a list of dominating constraints,
|
||||
//! either, because we can have bindings which are still visible, but subject to constraints that
|
||||
//! are no longer dominating, as in this code:
|
||||
//!
|
||||
//! ```python
|
||||
//! x = 0
|
||||
//! if flag1:
|
||||
//! x = 1 if flag2 else None
|
||||
//! assert x is not None
|
||||
//! x
|
||||
//! ```
|
||||
//!
|
||||
//! From the point of view of the final use of `x`, the `x is not None` constraint no longer
|
||||
//! dominates, but it does dominate the `x = 1 if flag2 else None` binding, so we have to keep
|
||||
//! track of that.
|
||||
//!
|
||||
//! The data structures used here ([`BitSet`] and [`smallvec::SmallVec`]) optimize for keeping all
|
||||
//! data inline (avoiding lots of scattered allocations) in small-to-medium cases, and falling back
|
||||
//! to heap allocation to be able to scale to arbitrary numbers of live bindings and constraints
|
||||
//! when needed.
|
||||
//!
|
||||
//! Tracking live declarations is simpler, since constraints are not involved, but otherwise very
|
||||
//! similar to tracking live bindings.
|
||||
use super::bitset::{BitSet, BitSetIterator};
|
||||
use ruff_index::newtype_index;
|
||||
use smallvec::SmallVec;
|
||||
|
||||
/// A newtype-index for a definition in a particular scope.
|
||||
#[newtype_index]
|
||||
pub(super) struct ScopedDefinitionId;
|
||||
|
||||
/// A newtype-index for a constraint expression in a particular scope.
|
||||
#[newtype_index]
|
||||
pub(super) struct ScopedConstraintId;
|
||||
|
||||
/// Can reference this * 64 total definitions inline; more will fall back to the heap.
|
||||
const INLINE_BINDING_BLOCKS: usize = 3;
|
||||
|
||||
/// A [`BitSet`] of [`ScopedDefinitionId`], representing live bindings of a symbol in a scope.
|
||||
type Bindings = BitSet<INLINE_BINDING_BLOCKS>;
|
||||
type BindingsIterator<'a> = BitSetIterator<'a, INLINE_BINDING_BLOCKS>;
|
||||
|
||||
/// Can reference this * 64 total declarations inline; more will fall back to the heap.
|
||||
const INLINE_DECLARATION_BLOCKS: usize = 3;
|
||||
|
||||
/// A [`BitSet`] of [`ScopedDefinitionId`], representing live declarations of a symbol in a scope.
|
||||
type Declarations = BitSet<INLINE_DECLARATION_BLOCKS>;
|
||||
type DeclarationsIterator<'a> = BitSetIterator<'a, INLINE_DECLARATION_BLOCKS>;
|
||||
|
||||
/// Can reference this * 64 total constraints inline; more will fall back to the heap.
|
||||
const INLINE_CONSTRAINT_BLOCKS: usize = 2;
|
||||
|
||||
/// Can keep inline this many live bindings per symbol at a given time; more will go to heap.
|
||||
const INLINE_BINDINGS_PER_SYMBOL: usize = 4;
|
||||
|
||||
/// One [`BitSet`] of applicable [`ScopedConstraintId`] per live binding.
|
||||
type InlineConstraintArray = [BitSet<INLINE_CONSTRAINT_BLOCKS>; INLINE_BINDINGS_PER_SYMBOL];
|
||||
type Constraints = SmallVec<InlineConstraintArray>;
|
||||
type ConstraintsIterator<'a> = std::slice::Iter<'a, BitSet<INLINE_CONSTRAINT_BLOCKS>>;
|
||||
type ConstraintsIntoIterator = smallvec::IntoIter<InlineConstraintArray>;
|
||||
|
||||
/// Live declarations for a single symbol at some point in control flow.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(super) struct SymbolDeclarations {
|
||||
/// [`BitSet`]: which declarations (as [`ScopedDefinitionId`]) can reach the current location?
|
||||
live_declarations: Declarations,
|
||||
|
||||
/// Could the symbol be un-declared at this point?
|
||||
may_be_undeclared: bool,
|
||||
}
|
||||
|
||||
impl SymbolDeclarations {
|
||||
fn undeclared() -> Self {
|
||||
Self {
|
||||
live_declarations: Declarations::default(),
|
||||
may_be_undeclared: true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Record a newly-encountered declaration for this symbol.
|
||||
fn record_declaration(&mut self, declaration_id: ScopedDefinitionId) {
|
||||
self.live_declarations = Declarations::with(declaration_id.into());
|
||||
self.may_be_undeclared = false;
|
||||
}
|
||||
|
||||
/// Add undeclared as a possibility for this symbol.
|
||||
fn set_may_be_undeclared(&mut self) {
|
||||
self.may_be_undeclared = true;
|
||||
}
|
||||
|
||||
/// Return an iterator over live declarations for this symbol.
|
||||
pub(super) fn iter(&self) -> DeclarationIdIterator {
|
||||
DeclarationIdIterator {
|
||||
inner: self.live_declarations.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn is_empty(&self) -> bool {
|
||||
self.live_declarations.is_empty()
|
||||
}
|
||||
|
||||
pub(super) fn may_be_undeclared(&self) -> bool {
|
||||
self.may_be_undeclared
|
||||
}
|
||||
}
|
||||
|
||||
/// Live bindings and narrowing constraints for a single symbol at some point in control flow.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(super) struct SymbolBindings {
|
||||
/// [`BitSet`]: which bindings (as [`ScopedDefinitionId`]) can reach the current location?
|
||||
live_bindings: Bindings,
|
||||
|
||||
/// For each live binding, which [`ScopedConstraintId`] apply?
|
||||
///
|
||||
/// This is a [`smallvec::SmallVec`] which should always have one [`BitSet`] of constraints per
|
||||
/// binding in `live_bindings`.
|
||||
constraints: Constraints,
|
||||
|
||||
/// Could the symbol be unbound at this point?
|
||||
may_be_unbound: bool,
|
||||
}
|
||||
|
||||
impl SymbolBindings {
|
||||
fn unbound() -> Self {
|
||||
Self {
|
||||
live_bindings: Bindings::default(),
|
||||
constraints: Constraints::default(),
|
||||
may_be_unbound: true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Add Unbound as a possibility for this symbol.
|
||||
fn set_may_be_unbound(&mut self) {
|
||||
self.may_be_unbound = true;
|
||||
}
|
||||
|
||||
/// Record a newly-encountered binding for this symbol.
|
||||
pub(super) fn record_binding(&mut self, binding_id: ScopedDefinitionId) {
|
||||
// The new binding replaces all previous live bindings in this path, and has no
|
||||
// constraints.
|
||||
self.live_bindings = Bindings::with(binding_id.into());
|
||||
self.constraints = Constraints::with_capacity(1);
|
||||
self.constraints.push(BitSet::default());
|
||||
self.may_be_unbound = false;
|
||||
}
|
||||
|
||||
/// Add given constraint to all live bindings.
|
||||
pub(super) fn record_constraint(&mut self, constraint_id: ScopedConstraintId) {
|
||||
for bitset in &mut self.constraints {
|
||||
bitset.insert(constraint_id.into());
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterate over currently live bindings for this symbol.
|
||||
pub(super) fn iter(&self) -> BindingIdWithConstraintsIterator {
|
||||
BindingIdWithConstraintsIterator {
|
||||
definitions: self.live_bindings.iter(),
|
||||
constraints: self.constraints.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn may_be_unbound(&self) -> bool {
|
||||
self.may_be_unbound
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(super) struct SymbolState {
|
||||
declarations: SymbolDeclarations,
|
||||
bindings: SymbolBindings,
|
||||
}
|
||||
|
||||
impl SymbolState {
|
||||
/// Return a new [`SymbolState`] representing an unbound, undeclared symbol.
|
||||
pub(super) fn undefined() -> Self {
|
||||
Self {
|
||||
declarations: SymbolDeclarations::undeclared(),
|
||||
bindings: SymbolBindings::unbound(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Add Unbound as a possibility for this symbol.
|
||||
pub(super) fn set_may_be_unbound(&mut self) {
|
||||
self.bindings.set_may_be_unbound();
|
||||
}
|
||||
|
||||
/// Record a newly-encountered binding for this symbol.
|
||||
pub(super) fn record_binding(&mut self, binding_id: ScopedDefinitionId) {
|
||||
self.bindings.record_binding(binding_id);
|
||||
}
|
||||
|
||||
/// Add given constraint to all live bindings.
|
||||
pub(super) fn record_constraint(&mut self, constraint_id: ScopedConstraintId) {
|
||||
self.bindings.record_constraint(constraint_id);
|
||||
}
|
||||
|
||||
/// Add undeclared as a possibility for this symbol.
|
||||
pub(super) fn set_may_be_undeclared(&mut self) {
|
||||
self.declarations.set_may_be_undeclared();
|
||||
}
|
||||
|
||||
/// Record a newly-encountered declaration of this symbol.
|
||||
pub(super) fn record_declaration(&mut self, declaration_id: ScopedDefinitionId) {
|
||||
self.declarations.record_declaration(declaration_id);
|
||||
}
|
||||
|
||||
/// Merge another [`SymbolState`] into this one.
|
||||
pub(super) fn merge(&mut self, b: SymbolState) {
|
||||
let mut a = Self {
|
||||
bindings: SymbolBindings {
|
||||
live_bindings: Bindings::default(),
|
||||
constraints: Constraints::default(),
|
||||
may_be_unbound: self.bindings.may_be_unbound || b.bindings.may_be_unbound,
|
||||
},
|
||||
declarations: SymbolDeclarations {
|
||||
live_declarations: self.declarations.live_declarations.clone(),
|
||||
may_be_undeclared: self.declarations.may_be_undeclared
|
||||
|| b.declarations.may_be_undeclared,
|
||||
},
|
||||
};
|
||||
|
||||
std::mem::swap(&mut a, self);
|
||||
self.declarations
|
||||
.live_declarations
|
||||
.union(&b.declarations.live_declarations);
|
||||
|
||||
let mut a_defs_iter = a.bindings.live_bindings.iter();
|
||||
let mut b_defs_iter = b.bindings.live_bindings.iter();
|
||||
let mut a_constraints_iter = a.bindings.constraints.into_iter();
|
||||
let mut b_constraints_iter = b.bindings.constraints.into_iter();
|
||||
|
||||
let mut opt_a_def: Option<u32> = a_defs_iter.next();
|
||||
let mut opt_b_def: Option<u32> = b_defs_iter.next();
|
||||
|
||||
// Iterate through the definitions from `a` and `b`, always processing the lower definition
|
||||
// ID first, and pushing each definition onto the merged `SymbolState` with its
|
||||
// constraints. If a definition is found in both `a` and `b`, push it with the intersection
|
||||
// of the constraints from the two paths; a constraint that applies from only one possible
|
||||
// path is irrelevant.
|
||||
|
||||
// Helper to push `def`, with constraints in `constraints_iter`, onto `self`.
|
||||
let push = |def, constraints_iter: &mut ConstraintsIntoIterator, merged: &mut Self| {
|
||||
merged.bindings.live_bindings.insert(def);
|
||||
// SAFETY: we only ever create SymbolState with either no definitions and no constraint
|
||||
// bitsets (`::unbound`) or one definition and one constraint bitset (`::with`), and
|
||||
// `::merge` always pushes one definition and one constraint bitset together (just
|
||||
// below), so the number of definitions and the number of constraint bitsets can never
|
||||
// get out of sync.
|
||||
let constraints = constraints_iter
|
||||
.next()
|
||||
.expect("definitions and constraints length mismatch");
|
||||
merged.bindings.constraints.push(constraints);
|
||||
};
|
||||
|
||||
loop {
|
||||
match (opt_a_def, opt_b_def) {
|
||||
(Some(a_def), Some(b_def)) => match a_def.cmp(&b_def) {
|
||||
std::cmp::Ordering::Less => {
|
||||
// Next definition ID is only in `a`, push it to `self` and advance `a`.
|
||||
push(a_def, &mut a_constraints_iter, self);
|
||||
opt_a_def = a_defs_iter.next();
|
||||
}
|
||||
std::cmp::Ordering::Greater => {
|
||||
// Next definition ID is only in `b`, push it to `self` and advance `b`.
|
||||
push(b_def, &mut b_constraints_iter, self);
|
||||
opt_b_def = b_defs_iter.next();
|
||||
}
|
||||
std::cmp::Ordering::Equal => {
|
||||
// Next definition is in both; push to `self` and intersect constraints.
|
||||
push(a_def, &mut b_constraints_iter, self);
|
||||
// SAFETY: we only ever create SymbolState with either no definitions and
|
||||
// no constraint bitsets (`::unbound`) or one definition and one constraint
|
||||
// bitset (`::with`), and `::merge` always pushes one definition and one
|
||||
// constraint bitset together (just below), so the number of definitions
|
||||
// and the number of constraint bitsets can never get out of sync.
|
||||
let a_constraints = a_constraints_iter
|
||||
.next()
|
||||
.expect("definitions and constraints length mismatch");
|
||||
// If the same definition is visible through both paths, any constraint
|
||||
// that applies on only one path is irrelevant to the resulting type from
|
||||
// unioning the two paths, so we intersect the constraints.
|
||||
self.bindings
|
||||
.constraints
|
||||
.last_mut()
|
||||
.unwrap()
|
||||
.intersect(&a_constraints);
|
||||
opt_a_def = a_defs_iter.next();
|
||||
opt_b_def = b_defs_iter.next();
|
||||
}
|
||||
},
|
||||
(Some(a_def), None) => {
|
||||
// We've exhausted `b`, just push the def from `a` and move on to the next.
|
||||
push(a_def, &mut a_constraints_iter, self);
|
||||
opt_a_def = a_defs_iter.next();
|
||||
}
|
||||
(None, Some(b_def)) => {
|
||||
// We've exhausted `a`, just push the def from `b` and move on to the next.
|
||||
push(b_def, &mut b_constraints_iter, self);
|
||||
opt_b_def = b_defs_iter.next();
|
||||
}
|
||||
(None, None) => break,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn bindings(&self) -> &SymbolBindings {
|
||||
&self.bindings
|
||||
}
|
||||
|
||||
pub(super) fn declarations(&self) -> &SymbolDeclarations {
|
||||
&self.declarations
|
||||
}
|
||||
|
||||
/// Could the symbol be unbound?
|
||||
pub(super) fn may_be_unbound(&self) -> bool {
|
||||
self.bindings.may_be_unbound()
|
||||
}
|
||||
}
|
||||
|
||||
/// The default state of a symbol, if we've seen no definitions of it, is undefined (that is,
|
||||
/// both unbound and undeclared).
|
||||
impl Default for SymbolState {
|
||||
fn default() -> Self {
|
||||
SymbolState::undefined()
|
||||
}
|
||||
}
|
||||
|
||||
/// A single binding (as [`ScopedDefinitionId`]) with an iterator of its applicable
|
||||
/// [`ScopedConstraintId`].
|
||||
#[derive(Debug)]
|
||||
pub(super) struct BindingIdWithConstraints<'a> {
|
||||
pub(super) definition: ScopedDefinitionId,
|
||||
pub(super) constraint_ids: ConstraintIdIterator<'a>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(super) struct BindingIdWithConstraintsIterator<'a> {
|
||||
definitions: BindingsIterator<'a>,
|
||||
constraints: ConstraintsIterator<'a>,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for BindingIdWithConstraintsIterator<'a> {
|
||||
type Item = BindingIdWithConstraints<'a>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
match (self.definitions.next(), self.constraints.next()) {
|
||||
(None, None) => None,
|
||||
(Some(def), Some(constraints)) => Some(BindingIdWithConstraints {
|
||||
definition: ScopedDefinitionId::from_u32(def),
|
||||
constraint_ids: ConstraintIdIterator {
|
||||
wrapped: constraints.iter(),
|
||||
},
|
||||
}),
|
||||
// SAFETY: see above.
|
||||
_ => unreachable!("definitions and constraints length mismatch"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::iter::FusedIterator for BindingIdWithConstraintsIterator<'_> {}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(super) struct ConstraintIdIterator<'a> {
|
||||
wrapped: BitSetIterator<'a, INLINE_CONSTRAINT_BLOCKS>,
|
||||
}
|
||||
|
||||
impl Iterator for ConstraintIdIterator<'_> {
|
||||
type Item = ScopedConstraintId;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.wrapped.next().map(ScopedConstraintId::from_u32)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::iter::FusedIterator for ConstraintIdIterator<'_> {}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(super) struct DeclarationIdIterator<'a> {
|
||||
inner: DeclarationsIterator<'a>,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for DeclarationIdIterator<'a> {
|
||||
type Item = ScopedDefinitionId;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.inner.next().map(ScopedDefinitionId::from_u32)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::iter::FusedIterator for DeclarationIdIterator<'_> {}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{ScopedConstraintId, ScopedDefinitionId, SymbolState};
|
||||
|
||||
fn assert_bindings(symbol: &SymbolState, may_be_unbound: bool, expected: &[&str]) {
|
||||
assert_eq!(symbol.may_be_unbound(), may_be_unbound);
|
||||
let actual = symbol
|
||||
.bindings()
|
||||
.iter()
|
||||
.map(|def_id_with_constraints| {
|
||||
format!(
|
||||
"{}<{}>",
|
||||
def_id_with_constraints.definition.as_u32(),
|
||||
def_id_with_constraints
|
||||
.constraint_ids
|
||||
.map(ScopedConstraintId::as_u32)
|
||||
.map(|idx| idx.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
|
||||
pub(crate) fn assert_declarations(
|
||||
symbol: &SymbolState,
|
||||
may_be_undeclared: bool,
|
||||
expected: &[u32],
|
||||
) {
|
||||
assert_eq!(symbol.declarations.may_be_undeclared(), may_be_undeclared);
|
||||
let actual = symbol
|
||||
.declarations()
|
||||
.iter()
|
||||
.map(ScopedDefinitionId::as_u32)
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unbound() {
|
||||
let sym = SymbolState::undefined();
|
||||
|
||||
assert_bindings(&sym, true, &[]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn with() {
|
||||
let mut sym = SymbolState::undefined();
|
||||
sym.record_binding(ScopedDefinitionId::from_u32(0));
|
||||
|
||||
assert_bindings(&sym, false, &["0<>"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn set_may_be_unbound() {
|
||||
let mut sym = SymbolState::undefined();
|
||||
sym.record_binding(ScopedDefinitionId::from_u32(0));
|
||||
sym.set_may_be_unbound();
|
||||
|
||||
assert_bindings(&sym, true, &["0<>"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn record_constraint() {
|
||||
let mut sym = SymbolState::undefined();
|
||||
sym.record_binding(ScopedDefinitionId::from_u32(0));
|
||||
sym.record_constraint(ScopedConstraintId::from_u32(0));
|
||||
|
||||
assert_bindings(&sym, false, &["0<0>"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn merge() {
|
||||
// merging the same definition with the same constraint keeps the constraint
|
||||
let mut sym0a = SymbolState::undefined();
|
||||
sym0a.record_binding(ScopedDefinitionId::from_u32(0));
|
||||
sym0a.record_constraint(ScopedConstraintId::from_u32(0));
|
||||
|
||||
let mut sym0b = SymbolState::undefined();
|
||||
sym0b.record_binding(ScopedDefinitionId::from_u32(0));
|
||||
sym0b.record_constraint(ScopedConstraintId::from_u32(0));
|
||||
|
||||
sym0a.merge(sym0b);
|
||||
let mut sym0 = sym0a;
|
||||
assert_bindings(&sym0, false, &["0<0>"]);
|
||||
|
||||
// merging the same definition with differing constraints drops all constraints
|
||||
let mut sym1a = SymbolState::undefined();
|
||||
sym1a.record_binding(ScopedDefinitionId::from_u32(1));
|
||||
sym1a.record_constraint(ScopedConstraintId::from_u32(1));
|
||||
|
||||
let mut sym1b = SymbolState::undefined();
|
||||
sym1b.record_binding(ScopedDefinitionId::from_u32(1));
|
||||
sym1b.record_constraint(ScopedConstraintId::from_u32(2));
|
||||
|
||||
sym1a.merge(sym1b);
|
||||
let sym1 = sym1a;
|
||||
assert_bindings(&sym1, false, &["1<>"]);
|
||||
|
||||
// merging a constrained definition with unbound keeps both
|
||||
let mut sym2a = SymbolState::undefined();
|
||||
sym2a.record_binding(ScopedDefinitionId::from_u32(2));
|
||||
sym2a.record_constraint(ScopedConstraintId::from_u32(3));
|
||||
|
||||
let sym2b = SymbolState::undefined();
|
||||
|
||||
sym2a.merge(sym2b);
|
||||
let sym2 = sym2a;
|
||||
assert_bindings(&sym2, true, &["2<3>"]);
|
||||
|
||||
// merging different definitions keeps them each with their existing constraints
|
||||
sym0.merge(sym2);
|
||||
let sym = sym0;
|
||||
assert_bindings(&sym, true, &["0<0>", "2<3>"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_declaration() {
|
||||
let sym = SymbolState::undefined();
|
||||
|
||||
assert_declarations(&sym, true, &[]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn record_declaration() {
|
||||
let mut sym = SymbolState::undefined();
|
||||
sym.record_declaration(ScopedDefinitionId::from_u32(1));
|
||||
|
||||
assert_declarations(&sym, false, &[1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn record_declaration_override() {
|
||||
let mut sym = SymbolState::undefined();
|
||||
sym.record_declaration(ScopedDefinitionId::from_u32(1));
|
||||
sym.record_declaration(ScopedDefinitionId::from_u32(2));
|
||||
|
||||
assert_declarations(&sym, false, &[2]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn record_declaration_merge() {
|
||||
let mut sym = SymbolState::undefined();
|
||||
sym.record_declaration(ScopedDefinitionId::from_u32(1));
|
||||
|
||||
let mut sym2 = SymbolState::undefined();
|
||||
sym2.record_declaration(ScopedDefinitionId::from_u32(2));
|
||||
|
||||
sym.merge(sym2);
|
||||
|
||||
assert_declarations(&sym, false, &[1, 2]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn record_declaration_merge_partial_undeclared() {
|
||||
let mut sym = SymbolState::undefined();
|
||||
sym.record_declaration(ScopedDefinitionId::from_u32(1));
|
||||
|
||||
let sym2 = SymbolState::undefined();
|
||||
|
||||
sym.merge(sym2);
|
||||
|
||||
assert_declarations(&sym, true, &[1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn set_may_be_undeclared() {
|
||||
let mut sym = SymbolState::undefined();
|
||||
sym.record_declaration(ScopedDefinitionId::from_u32(0));
|
||||
sym.set_may_be_undeclared();
|
||||
|
||||
assert_declarations(&sym, true, &[0]);
|
||||
}
|
||||
}
|
||||
@@ -1,11 +1,14 @@
|
||||
use red_knot_module_resolver::{resolve_module, Module, ModuleName};
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::files::{File, FilePath};
|
||||
use ruff_db::source::line_index;
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::{Expr, ExpressionRef, StmtClassDef};
|
||||
use ruff_python_ast::{Expr, ExpressionRef};
|
||||
use ruff_source_file::LineIndex;
|
||||
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::{resolve_module, Module};
|
||||
use crate::semantic_index::ast_ids::HasScopedAstId;
|
||||
use crate::semantic_index::semantic_index;
|
||||
use crate::types::{definition_ty, global_symbol_ty_by_name, infer_scope_types, Type};
|
||||
use crate::types::{binding_ty, global_symbol_ty, infer_scope_types, Type};
|
||||
use crate::Db;
|
||||
|
||||
pub struct SemanticModel<'db> {
|
||||
@@ -24,12 +27,20 @@ impl<'db> SemanticModel<'db> {
|
||||
self.db
|
||||
}
|
||||
|
||||
pub fn file_path(&self) -> &FilePath {
|
||||
self.file.path(self.db)
|
||||
}
|
||||
|
||||
pub fn line_index(&self) -> LineIndex {
|
||||
line_index(self.db.upcast(), self.file)
|
||||
}
|
||||
|
||||
pub fn resolve_module(&self, module_name: ModuleName) -> Option<Module> {
|
||||
resolve_module(self.db.upcast(), module_name)
|
||||
resolve_module(self.db, module_name)
|
||||
}
|
||||
|
||||
pub fn global_symbol_ty(&self, module: &Module, symbol_name: &str) -> Type<'db> {
|
||||
global_symbol_ty_by_name(self.db, module.file(), symbol_name)
|
||||
global_symbol_ty(self.db, module.file(), symbol_name)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -136,62 +147,56 @@ impl HasTy for ast::Expr {
|
||||
}
|
||||
}
|
||||
|
||||
impl HasTy for ast::StmtFunctionDef {
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let index = semantic_index(model.db, model.file);
|
||||
let definition = index.definition(self);
|
||||
definition_ty(model.db, definition)
|
||||
}
|
||||
macro_rules! impl_binding_has_ty {
|
||||
($ty: ty) => {
|
||||
impl HasTy for $ty {
|
||||
#[inline]
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let index = semantic_index(model.db, model.file);
|
||||
let binding = index.definition(self);
|
||||
binding_ty(model.db, binding)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl HasTy for StmtClassDef {
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let index = semantic_index(model.db, model.file);
|
||||
let definition = index.definition(self);
|
||||
definition_ty(model.db, definition)
|
||||
}
|
||||
}
|
||||
|
||||
impl HasTy for ast::Alias {
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let index = semantic_index(model.db, model.file);
|
||||
let definition = index.definition(self);
|
||||
definition_ty(model.db, definition)
|
||||
}
|
||||
}
|
||||
impl_binding_has_ty!(ast::StmtFunctionDef);
|
||||
impl_binding_has_ty!(ast::StmtClassDef);
|
||||
impl_binding_has_ty!(ast::Alias);
|
||||
impl_binding_has_ty!(ast::Parameter);
|
||||
impl_binding_has_ty!(ast::ParameterWithDefault);
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::program::{Program, SearchPathSettings, TargetVersion};
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::program::{Program, SearchPathSettings};
|
||||
use crate::python_version::PythonVersion;
|
||||
use crate::types::Type;
|
||||
use crate::{HasTy, SemanticModel};
|
||||
use crate::{HasTy, ProgramSettings, SemanticModel};
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
let db = TestDb::new();
|
||||
Program::new(
|
||||
fn setup_db<'a>(files: impl IntoIterator<Item = (&'a str, &'a str)>) -> anyhow::Result<TestDb> {
|
||||
let mut db = TestDb::new();
|
||||
db.write_files(files)?;
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
TargetVersion::Py38,
|
||||
SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
workspace_root: SystemPathBuf::from("/src"),
|
||||
site_packages: None,
|
||||
custom_typeshed: None,
|
||||
&ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings::new(SystemPathBuf::from("/src")),
|
||||
},
|
||||
);
|
||||
)?;
|
||||
|
||||
db
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn function_ty() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
let db = setup_db([("/src/foo.py", "def test(): pass")])?;
|
||||
|
||||
db.write_file("/src/foo.py", "def test(): pass")?;
|
||||
let foo = system_path_to_file(&db, "/src/foo.py").unwrap();
|
||||
|
||||
let ast = parsed_module(&db, foo);
|
||||
@@ -207,9 +212,8 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn class_ty() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
let db = setup_db([("/src/foo.py", "class Test: pass")])?;
|
||||
|
||||
db.write_file("/src/foo.py", "class Test: pass")?;
|
||||
let foo = system_path_to_file(&db, "/src/foo.py").unwrap();
|
||||
|
||||
let ast = parsed_module(&db, foo);
|
||||
@@ -225,12 +229,11 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn alias_ty() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_files([
|
||||
let db = setup_db([
|
||||
("/src/foo.py", "class Test: pass"),
|
||||
("/src/bar.py", "from foo import Test"),
|
||||
])?;
|
||||
|
||||
let bar = system_path_to_file(&db, "/src/bar.py").unwrap();
|
||||
|
||||
let ast = parsed_module(&db, bar);
|
||||
|
||||
842
crates/red_knot_python_semantic/src/site_packages.rs
Normal file
842
crates/red_knot_python_semantic/src/site_packages.rs
Normal file
@@ -0,0 +1,842 @@
|
||||
//! Utilities for finding the `site-packages` directory,
|
||||
//! into which third-party packages are installed.
|
||||
//!
|
||||
//! The routines exposed by this module have different behaviour depending
|
||||
//! on the platform of the *host machine*, which may be
|
||||
//! different from the *target platform for type checking*. (A user
|
||||
//! might be running red-knot on a Windows machine, but might
|
||||
//! reasonably ask us to type-check code assuming that the code runs
|
||||
//! on Linux.)
|
||||
|
||||
use std::fmt;
|
||||
use std::io;
|
||||
use std::num::NonZeroUsize;
|
||||
use std::ops::Deref;
|
||||
|
||||
use ruff_db::system::{System, SystemPath, SystemPathBuf};
|
||||
|
||||
use crate::PythonVersion;
|
||||
|
||||
type SitePackagesDiscoveryResult<T> = Result<T, SitePackagesDiscoveryError>;
|
||||
|
||||
/// Abstraction for a Python virtual environment.
|
||||
///
|
||||
/// Most of this information is derived from the virtual environment's `pyvenv.cfg` file.
|
||||
/// The format of this file is not defined anywhere, and exactly which keys are present
|
||||
/// depends on the tool that was used to create the virtual environment.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct VirtualEnvironment {
|
||||
venv_path: SysPrefixPath,
|
||||
base_executable_home_path: PythonHomePath,
|
||||
include_system_site_packages: bool,
|
||||
|
||||
/// The version of the Python executable that was used to create this virtual environment.
|
||||
///
|
||||
/// The Python version is encoded under different keys and in different formats
|
||||
/// by different virtual-environment creation tools,
|
||||
/// and the key is never read by the standard-library `site.py` module,
|
||||
/// so it's possible that we might not be able to find this information
|
||||
/// in an acceptable format under any of the keys we expect.
|
||||
/// This field will be `None` if so.
|
||||
version: Option<PythonVersion>,
|
||||
}
|
||||
|
||||
impl VirtualEnvironment {
|
||||
pub(crate) fn new(
|
||||
path: impl AsRef<SystemPath>,
|
||||
system: &dyn System,
|
||||
) -> SitePackagesDiscoveryResult<Self> {
|
||||
Self::new_impl(path.as_ref(), system)
|
||||
}
|
||||
|
||||
fn new_impl(path: &SystemPath, system: &dyn System) -> SitePackagesDiscoveryResult<Self> {
|
||||
fn pyvenv_cfg_line_number(index: usize) -> NonZeroUsize {
|
||||
index.checked_add(1).and_then(NonZeroUsize::new).unwrap()
|
||||
}
|
||||
|
||||
let venv_path = SysPrefixPath::new(path, system)?;
|
||||
let pyvenv_cfg_path = venv_path.join("pyvenv.cfg");
|
||||
tracing::debug!("Attempting to parse virtual environment metadata at '{pyvenv_cfg_path}'");
|
||||
|
||||
let pyvenv_cfg = system
|
||||
.read_to_string(&pyvenv_cfg_path)
|
||||
.map_err(SitePackagesDiscoveryError::NoPyvenvCfgFile)?;
|
||||
|
||||
let mut include_system_site_packages = false;
|
||||
let mut base_executable_home_path = None;
|
||||
let mut version_info_string = None;
|
||||
|
||||
// A `pyvenv.cfg` file *looks* like a `.ini` file, but actually isn't valid `.ini` syntax!
|
||||
// The Python standard-library's `site` module parses these files by splitting each line on
|
||||
// '=' characters, so that's what we should do as well.
|
||||
//
|
||||
// See also: https://snarky.ca/how-virtual-environments-work/
|
||||
for (index, line) in pyvenv_cfg.lines().enumerate() {
|
||||
if let Some((key, value)) = line.split_once('=') {
|
||||
let key = key.trim();
|
||||
if key.is_empty() {
|
||||
return Err(SitePackagesDiscoveryError::PyvenvCfgParseError(
|
||||
pyvenv_cfg_path,
|
||||
PyvenvCfgParseErrorKind::MalformedKeyValuePair {
|
||||
line_number: pyvenv_cfg_line_number(index),
|
||||
},
|
||||
));
|
||||
}
|
||||
|
||||
let value = value.trim();
|
||||
if value.is_empty() {
|
||||
return Err(SitePackagesDiscoveryError::PyvenvCfgParseError(
|
||||
pyvenv_cfg_path,
|
||||
PyvenvCfgParseErrorKind::MalformedKeyValuePair {
|
||||
line_number: pyvenv_cfg_line_number(index),
|
||||
},
|
||||
));
|
||||
}
|
||||
|
||||
if value.contains('=') {
|
||||
return Err(SitePackagesDiscoveryError::PyvenvCfgParseError(
|
||||
pyvenv_cfg_path,
|
||||
PyvenvCfgParseErrorKind::TooManyEquals {
|
||||
line_number: pyvenv_cfg_line_number(index),
|
||||
},
|
||||
));
|
||||
}
|
||||
|
||||
match key {
|
||||
"include-system-site-packages" => {
|
||||
include_system_site_packages = value.eq_ignore_ascii_case("true");
|
||||
}
|
||||
"home" => base_executable_home_path = Some(value),
|
||||
// `virtualenv` and `uv` call this key `version_info`,
|
||||
// but the stdlib venv module calls it `version`
|
||||
"version" | "version_info" => version_info_string = Some(value),
|
||||
_ => continue,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// The `home` key is read by the standard library's `site.py` module,
|
||||
// so if it's missing from the `pyvenv.cfg` file
|
||||
// (or the provided value is invalid),
|
||||
// it's reasonable to consider the virtual environment irredeemably broken.
|
||||
let Some(base_executable_home_path) = base_executable_home_path else {
|
||||
return Err(SitePackagesDiscoveryError::PyvenvCfgParseError(
|
||||
pyvenv_cfg_path,
|
||||
PyvenvCfgParseErrorKind::NoHomeKey,
|
||||
));
|
||||
};
|
||||
let base_executable_home_path = PythonHomePath::new(base_executable_home_path, system)
|
||||
.map_err(|io_err| {
|
||||
SitePackagesDiscoveryError::PyvenvCfgParseError(
|
||||
pyvenv_cfg_path,
|
||||
PyvenvCfgParseErrorKind::InvalidHomeValue(io_err),
|
||||
)
|
||||
})?;
|
||||
|
||||
// but the `version`/`version_info` key is not read by the standard library,
|
||||
// and is provided under different keys depending on which virtual-environment creation tool
|
||||
// created the `pyvenv.cfg` file. Lenient parsing is appropriate here:
|
||||
// the file isn't really *invalid* if it doesn't have this key,
|
||||
// or if the value doesn't parse according to our expectations.
|
||||
let version = version_info_string.and_then(|version_string| {
|
||||
let mut version_info_parts = version_string.split('.');
|
||||
let (major, minor) = (version_info_parts.next()?, version_info_parts.next()?);
|
||||
PythonVersion::try_from((major, minor)).ok()
|
||||
});
|
||||
|
||||
let metadata = Self {
|
||||
venv_path,
|
||||
base_executable_home_path,
|
||||
include_system_site_packages,
|
||||
version,
|
||||
};
|
||||
|
||||
tracing::trace!("Resolved metadata for virtual environment: {metadata:?}");
|
||||
Ok(metadata)
|
||||
}
|
||||
|
||||
/// Return a list of `site-packages` directories that are available from this virtual environment
|
||||
///
|
||||
/// See the documentation for `site_packages_dir_from_sys_prefix` for more details.
|
||||
pub(crate) fn site_packages_directories(
|
||||
&self,
|
||||
system: &dyn System,
|
||||
) -> SitePackagesDiscoveryResult<Vec<SystemPathBuf>> {
|
||||
let VirtualEnvironment {
|
||||
venv_path,
|
||||
base_executable_home_path,
|
||||
include_system_site_packages,
|
||||
version,
|
||||
} = self;
|
||||
|
||||
let mut site_packages_directories = vec![site_packages_directory_from_sys_prefix(
|
||||
venv_path, *version, system,
|
||||
)?];
|
||||
|
||||
if *include_system_site_packages {
|
||||
let system_sys_prefix =
|
||||
SysPrefixPath::from_executable_home_path(base_executable_home_path);
|
||||
|
||||
// If we fail to resolve the `sys.prefix` path from the base executable home path,
|
||||
// or if we fail to resolve the `site-packages` from the `sys.prefix` path,
|
||||
// we should probably print a warning but *not* abort type checking
|
||||
if let Some(sys_prefix_path) = system_sys_prefix {
|
||||
match site_packages_directory_from_sys_prefix(&sys_prefix_path, *version, system) {
|
||||
Ok(site_packages_directory) => {
|
||||
site_packages_directories.push(site_packages_directory);
|
||||
}
|
||||
Err(error) => tracing::warn!(
|
||||
"{error}. System site-packages will not be used for module resolution."
|
||||
),
|
||||
}
|
||||
} else {
|
||||
tracing::warn!(
|
||||
"Failed to resolve `sys.prefix` of the system Python installation \
|
||||
from the `home` value in the `pyvenv.cfg` file at '{}'. \
|
||||
System site-packages will not be used for module resolution.",
|
||||
venv_path.join("pyvenv.cfg")
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
tracing::debug!("Resolved site-packages directories for this virtual environment are: {site_packages_directories:?}");
|
||||
Ok(site_packages_directories)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub(crate) enum SitePackagesDiscoveryError {
|
||||
#[error("Invalid --venv-path argument: {0} could not be canonicalized")]
|
||||
VenvDirCanonicalizationError(SystemPathBuf, #[source] io::Error),
|
||||
#[error("Invalid --venv-path argument: {0} does not point to a directory on disk")]
|
||||
VenvDirIsNotADirectory(SystemPathBuf),
|
||||
#[error("--venv-path points to a broken venv with no pyvenv.cfg file")]
|
||||
NoPyvenvCfgFile(#[source] io::Error),
|
||||
#[error("Failed to parse the pyvenv.cfg file at {0} because {1}")]
|
||||
PyvenvCfgParseError(SystemPathBuf, PyvenvCfgParseErrorKind),
|
||||
#[error("Failed to search the `lib` directory of the Python installation at {1} for `site-packages`")]
|
||||
CouldNotReadLibDirectory(#[source] io::Error, SysPrefixPath),
|
||||
#[error("Could not find the `site-packages` directory for the Python installation at {0}")]
|
||||
NoSitePackagesDirFound(SysPrefixPath),
|
||||
}
|
||||
|
||||
/// The various ways in which parsing a `pyvenv.cfg` file could fail
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum PyvenvCfgParseErrorKind {
|
||||
TooManyEquals { line_number: NonZeroUsize },
|
||||
MalformedKeyValuePair { line_number: NonZeroUsize },
|
||||
NoHomeKey,
|
||||
InvalidHomeValue(io::Error),
|
||||
}
|
||||
|
||||
impl fmt::Display for PyvenvCfgParseErrorKind {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::TooManyEquals { line_number } => {
|
||||
write!(f, "line {line_number} has too many '=' characters")
|
||||
}
|
||||
Self::MalformedKeyValuePair { line_number } => write!(
|
||||
f,
|
||||
"line {line_number} has a malformed `<key> = <value>` pair"
|
||||
),
|
||||
Self::NoHomeKey => f.write_str("the file does not have a `home` key"),
|
||||
Self::InvalidHomeValue(io_err) => {
|
||||
write!(
|
||||
f,
|
||||
"the following error was encountered \
|
||||
when trying to resolve the `home` value to a directory on disk: {io_err}"
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Attempt to retrieve the `site-packages` directory
|
||||
/// associated with a given Python installation.
|
||||
///
|
||||
/// The location of the `site-packages` directory can vary according to the
|
||||
/// Python version that this installation represents. The Python version may
|
||||
/// or may not be known at this point, which is why the `python_version`
|
||||
/// parameter is an `Option`.
|
||||
fn site_packages_directory_from_sys_prefix(
|
||||
sys_prefix_path: &SysPrefixPath,
|
||||
python_version: Option<PythonVersion>,
|
||||
system: &dyn System,
|
||||
) -> SitePackagesDiscoveryResult<SystemPathBuf> {
|
||||
tracing::debug!("Searching for site-packages directory in {sys_prefix_path}");
|
||||
|
||||
if cfg!(target_os = "windows") {
|
||||
let site_packages = sys_prefix_path.join(r"Lib\site-packages");
|
||||
return system
|
||||
.is_directory(&site_packages)
|
||||
.then_some(site_packages)
|
||||
.ok_or(SitePackagesDiscoveryError::NoSitePackagesDirFound(
|
||||
sys_prefix_path.to_owned(),
|
||||
));
|
||||
}
|
||||
|
||||
// In the Python standard library's `site.py` module (used for finding `site-packages`
|
||||
// at runtime), we can find this in [the non-Windows branch]:
|
||||
//
|
||||
// ```py
|
||||
// libdirs = [sys.platlibdir]
|
||||
// if sys.platlibdir != "lib":
|
||||
// libdirs.append("lib")
|
||||
// ```
|
||||
//
|
||||
// Pyright therefore searches for both a `lib/python3.X/site-packages` directory
|
||||
// and a `lib64/python3.X/site-packages` directory on non-MacOS Unix systems,
|
||||
// since `sys.platlibdir` can sometimes be equal to `"lib64"`.
|
||||
//
|
||||
// However, we only care about the `site-packages` directory insofar as it allows
|
||||
// us to discover Python source code that can be used for inferring type
|
||||
// information regarding third-party dependencies. That means that we don't need
|
||||
// to care about any possible `lib64/site-packages` directories, since
|
||||
// [the `sys`-module documentation] states that `sys.platlibdir` is *only* ever
|
||||
// used for C extensions, never for pure-Python modules.
|
||||
//
|
||||
// [the non-Windows branch]: https://github.com/python/cpython/blob/a8be8fc6c4682089be45a87bd5ee1f686040116c/Lib/site.py#L401-L410
|
||||
// [the `sys`-module documentation]: https://docs.python.org/3/library/sys.html#sys.platlibdir
|
||||
|
||||
// If we were able to figure out what Python version this installation is,
|
||||
// we should be able to avoid iterating through all items in the `lib/` directory:
|
||||
if let Some(version) = python_version {
|
||||
let expected_path = sys_prefix_path.join(format!("lib/python{version}/site-packages"));
|
||||
if system.is_directory(&expected_path) {
|
||||
return Ok(expected_path);
|
||||
}
|
||||
if version.free_threaded_build_available() {
|
||||
// Nearly the same as `expected_path`, but with an additional `t` after {version}:
|
||||
let alternative_path =
|
||||
sys_prefix_path.join(format!("lib/python{version}t/site-packages"));
|
||||
if system.is_directory(&alternative_path) {
|
||||
return Ok(alternative_path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Either we couldn't figure out the version before calling this function
|
||||
// (e.g., from a `pyvenv.cfg` file if this was a venv),
|
||||
// or we couldn't find a `site-packages` folder at the expected location given
|
||||
// the parsed version
|
||||
//
|
||||
// Note: the `python3.x` part of the `site-packages` path can't be computed from
|
||||
// the `--target-version` the user has passed, as they might be running Python 3.12 locally
|
||||
// even if they've requested that we type check their code "as if" they're running 3.8.
|
||||
for entry_result in system
|
||||
.read_directory(&sys_prefix_path.join("lib"))
|
||||
.map_err(|io_err| {
|
||||
SitePackagesDiscoveryError::CouldNotReadLibDirectory(io_err, sys_prefix_path.to_owned())
|
||||
})?
|
||||
{
|
||||
let Ok(entry) = entry_result else {
|
||||
continue;
|
||||
};
|
||||
|
||||
if !entry.file_type().is_directory() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut path = entry.into_path();
|
||||
|
||||
let name = path
|
||||
.file_name()
|
||||
.expect("File name to be non-null because path is guaranteed to be a child of `lib`");
|
||||
|
||||
if !name.starts_with("python3.") {
|
||||
continue;
|
||||
}
|
||||
|
||||
path.push("site-packages");
|
||||
if system.is_directory(&path) {
|
||||
return Ok(path);
|
||||
}
|
||||
}
|
||||
Err(SitePackagesDiscoveryError::NoSitePackagesDirFound(
|
||||
sys_prefix_path.to_owned(),
|
||||
))
|
||||
}
|
||||
|
||||
/// A path that represents the value of [`sys.prefix`] at runtime in Python
|
||||
/// for a given Python executable.
|
||||
///
|
||||
/// For the case of a virtual environment, where a
|
||||
/// Python binary is at `/.venv/bin/python`, `sys.prefix` is the path to
|
||||
/// the virtual environment the Python binary lies inside, i.e. `/.venv`,
|
||||
/// and `site-packages` will be at `.venv/lib/python3.X/site-packages`.
|
||||
/// System Python installations generally work the same way: if a system
|
||||
/// Python installation lies at `/opt/homebrew/bin/python`, `sys.prefix`
|
||||
/// will be `/opt/homebrew`, and `site-packages` will be at
|
||||
/// `/opt/homebrew/lib/python3.X/site-packages`.
|
||||
///
|
||||
/// [`sys.prefix`]: https://docs.python.org/3/library/sys.html#sys.prefix
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub(crate) struct SysPrefixPath(SystemPathBuf);
|
||||
|
||||
impl SysPrefixPath {
|
||||
fn new(
|
||||
unvalidated_path: impl AsRef<SystemPath>,
|
||||
system: &dyn System,
|
||||
) -> SitePackagesDiscoveryResult<Self> {
|
||||
Self::new_impl(unvalidated_path.as_ref(), system)
|
||||
}
|
||||
|
||||
fn new_impl(
|
||||
unvalidated_path: &SystemPath,
|
||||
system: &dyn System,
|
||||
) -> SitePackagesDiscoveryResult<Self> {
|
||||
// It's important to resolve symlinks here rather than simply making the path absolute,
|
||||
// since system Python installations often only put symlinks in the "expected"
|
||||
// locations for `home` and `site-packages`
|
||||
let canonicalized = system
|
||||
.canonicalize_path(unvalidated_path)
|
||||
.map_err(|io_err| {
|
||||
SitePackagesDiscoveryError::VenvDirCanonicalizationError(
|
||||
unvalidated_path.to_path_buf(),
|
||||
io_err,
|
||||
)
|
||||
})?;
|
||||
system
|
||||
.is_directory(&canonicalized)
|
||||
.then_some(Self(canonicalized))
|
||||
.ok_or_else(|| {
|
||||
SitePackagesDiscoveryError::VenvDirIsNotADirectory(unvalidated_path.to_path_buf())
|
||||
})
|
||||
}
|
||||
|
||||
fn from_executable_home_path(path: &PythonHomePath) -> Option<Self> {
|
||||
// No need to check whether `path.parent()` is a directory:
|
||||
// the parent of a canonicalised path that is known to exist
|
||||
// is guaranteed to be a directory.
|
||||
if cfg!(target_os = "windows") {
|
||||
Some(Self(path.to_path_buf()))
|
||||
} else {
|
||||
path.parent().map(|path| Self(path.to_path_buf()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for SysPrefixPath {
|
||||
type Target = SystemPath;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for SysPrefixPath {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "`sys.prefix` path '{}'", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
/// The value given by the `home` key in `pyvenv.cfg` files.
|
||||
///
|
||||
/// This is equivalent to `{sys_prefix_path}/bin`, and points
|
||||
/// to a directory in which a Python executable can be found.
|
||||
/// Confusingly, it is *not* the same as the [`PYTHONHOME`]
|
||||
/// environment variable that Python provides! However, it's
|
||||
/// consistent among all mainstream creators of Python virtual
|
||||
/// environments (the stdlib Python `venv` module, the third-party
|
||||
/// `virtualenv` library, and `uv`), was specified by
|
||||
/// [the original PEP adding the `venv` module],
|
||||
/// and it's one of the few fields that's read by the Python
|
||||
/// standard library's `site.py` module.
|
||||
///
|
||||
/// Although it doesn't appear to be specified anywhere,
|
||||
/// all existing virtual environment tools always use an absolute path
|
||||
/// for the `home` value, and the Python standard library also assumes
|
||||
/// that the `home` value will be an absolute path.
|
||||
///
|
||||
/// Other values, such as the path to the Python executable or the
|
||||
/// base-executable `sys.prefix` value, are either only provided in
|
||||
/// `pyvenv.cfg` files by some virtual-environment creators,
|
||||
/// or are included under different keys depending on which
|
||||
/// virtual-environment creation tool you've used.
|
||||
///
|
||||
/// [`PYTHONHOME`]: https://docs.python.org/3/using/cmdline.html#envvar-PYTHONHOME
|
||||
/// [the original PEP adding the `venv` module]: https://peps.python.org/pep-0405/
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
struct PythonHomePath(SystemPathBuf);
|
||||
|
||||
impl PythonHomePath {
|
||||
fn new(path: impl AsRef<SystemPath>, system: &dyn System) -> io::Result<Self> {
|
||||
let path = path.as_ref();
|
||||
// It's important to resolve symlinks here rather than simply making the path absolute,
|
||||
// since system Python installations often only put symlinks in the "expected"
|
||||
// locations for `home` and `site-packages`
|
||||
let canonicalized = system.canonicalize_path(path)?;
|
||||
system
|
||||
.is_directory(&canonicalized)
|
||||
.then_some(Self(canonicalized))
|
||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "not a directory"))
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for PythonHomePath {
|
||||
type Target = SystemPath;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for PythonHomePath {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "`home` location '{}'", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<SystemPath> for PythonHomePath {
|
||||
fn eq(&self, other: &SystemPath) -> bool {
|
||||
&*self.0 == other
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<SystemPathBuf> for PythonHomePath {
|
||||
fn eq(&self, other: &SystemPathBuf) -> bool {
|
||||
self == &**other
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_db::system::TestSystem;
|
||||
|
||||
use super::*;
|
||||
|
||||
struct VirtualEnvironmentTester {
|
||||
system: TestSystem,
|
||||
minor_version: u8,
|
||||
free_threaded: bool,
|
||||
system_site_packages: bool,
|
||||
pyvenv_cfg_version_field: Option<&'static str>,
|
||||
}
|
||||
|
||||
impl VirtualEnvironmentTester {
|
||||
/// Builds a mock virtual environment, and returns the path to the venv
|
||||
fn build_mock_venv(&self) -> SystemPathBuf {
|
||||
let VirtualEnvironmentTester {
|
||||
system,
|
||||
minor_version,
|
||||
system_site_packages,
|
||||
free_threaded,
|
||||
pyvenv_cfg_version_field,
|
||||
} = self;
|
||||
let memory_fs = system.memory_file_system();
|
||||
let unix_site_packages = if *free_threaded {
|
||||
format!("lib/python3.{minor_version}t/site-packages")
|
||||
} else {
|
||||
format!("lib/python3.{minor_version}/site-packages")
|
||||
};
|
||||
|
||||
let system_install_sys_prefix =
|
||||
SystemPathBuf::from(&*format!("/Python3.{minor_version}"));
|
||||
let (system_home_path, system_exe_path, system_site_packages_path) =
|
||||
if cfg!(target_os = "windows") {
|
||||
let system_home_path = system_install_sys_prefix.clone();
|
||||
let system_exe_path = system_home_path.join("python.exe");
|
||||
let system_site_packages_path =
|
||||
system_install_sys_prefix.join(r"Lib\site-packages");
|
||||
(system_home_path, system_exe_path, system_site_packages_path)
|
||||
} else {
|
||||
let system_home_path = system_install_sys_prefix.join("bin");
|
||||
let system_exe_path = system_home_path.join("python");
|
||||
let system_site_packages_path =
|
||||
system_install_sys_prefix.join(&unix_site_packages);
|
||||
(system_home_path, system_exe_path, system_site_packages_path)
|
||||
};
|
||||
memory_fs.write_file(system_exe_path, "").unwrap();
|
||||
memory_fs
|
||||
.create_directory_all(&system_site_packages_path)
|
||||
.unwrap();
|
||||
|
||||
let venv_sys_prefix = SystemPathBuf::from("/.venv");
|
||||
let (venv_exe, site_packages_path) = if cfg!(target_os = "windows") {
|
||||
(
|
||||
venv_sys_prefix.join(r"Scripts\python.exe"),
|
||||
venv_sys_prefix.join(r"Lib\site-packages"),
|
||||
)
|
||||
} else {
|
||||
(
|
||||
venv_sys_prefix.join("bin/python"),
|
||||
venv_sys_prefix.join(&unix_site_packages),
|
||||
)
|
||||
};
|
||||
memory_fs.write_file(&venv_exe, "").unwrap();
|
||||
memory_fs.create_directory_all(&site_packages_path).unwrap();
|
||||
|
||||
let pyvenv_cfg_path = venv_sys_prefix.join("pyvenv.cfg");
|
||||
let mut pyvenv_cfg_contents = format!("home = {system_home_path}\n");
|
||||
if let Some(version_field) = pyvenv_cfg_version_field {
|
||||
pyvenv_cfg_contents.push_str(version_field);
|
||||
pyvenv_cfg_contents.push('\n');
|
||||
}
|
||||
// Deliberately using weird casing here to test that our pyvenv.cfg parsing is case-insensitive:
|
||||
if *system_site_packages {
|
||||
pyvenv_cfg_contents.push_str("include-system-site-packages = TRuE\n");
|
||||
}
|
||||
memory_fs
|
||||
.write_file(pyvenv_cfg_path, &pyvenv_cfg_contents)
|
||||
.unwrap();
|
||||
|
||||
venv_sys_prefix
|
||||
}
|
||||
|
||||
fn test(self) {
|
||||
let venv_path = self.build_mock_venv();
|
||||
let venv = VirtualEnvironment::new(venv_path.clone(), &self.system).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
venv.venv_path,
|
||||
SysPrefixPath(self.system.canonicalize_path(&venv_path).unwrap())
|
||||
);
|
||||
assert_eq!(venv.include_system_site_packages, self.system_site_packages);
|
||||
|
||||
if self.pyvenv_cfg_version_field.is_some() {
|
||||
assert_eq!(
|
||||
venv.version,
|
||||
Some(PythonVersion {
|
||||
major: 3,
|
||||
minor: self.minor_version
|
||||
})
|
||||
);
|
||||
} else {
|
||||
assert_eq!(venv.version, None);
|
||||
}
|
||||
|
||||
let expected_home = if cfg!(target_os = "windows") {
|
||||
SystemPathBuf::from(&*format!(r"\Python3.{}", self.minor_version))
|
||||
} else {
|
||||
SystemPathBuf::from(&*format!("/Python3.{}/bin", self.minor_version))
|
||||
};
|
||||
assert_eq!(venv.base_executable_home_path, expected_home);
|
||||
|
||||
let site_packages_directories = venv.site_packages_directories(&self.system).unwrap();
|
||||
let expected_venv_site_packages = if cfg!(target_os = "windows") {
|
||||
SystemPathBuf::from(r"\.venv\Lib\site-packages")
|
||||
} else if self.free_threaded {
|
||||
SystemPathBuf::from(&*format!(
|
||||
"/.venv/lib/python3.{}t/site-packages",
|
||||
self.minor_version
|
||||
))
|
||||
} else {
|
||||
SystemPathBuf::from(&*format!(
|
||||
"/.venv/lib/python3.{}/site-packages",
|
||||
self.minor_version
|
||||
))
|
||||
};
|
||||
|
||||
let expected_system_site_packages = if cfg!(target_os = "windows") {
|
||||
SystemPathBuf::from(&*format!(
|
||||
r"\Python3.{}\Lib\site-packages",
|
||||
self.minor_version
|
||||
))
|
||||
} else if self.free_threaded {
|
||||
SystemPathBuf::from(&*format!(
|
||||
"/Python3.{minor_version}/lib/python3.{minor_version}t/site-packages",
|
||||
minor_version = self.minor_version
|
||||
))
|
||||
} else {
|
||||
SystemPathBuf::from(&*format!(
|
||||
"/Python3.{minor_version}/lib/python3.{minor_version}/site-packages",
|
||||
minor_version = self.minor_version
|
||||
))
|
||||
};
|
||||
|
||||
if self.system_site_packages {
|
||||
assert_eq!(
|
||||
&site_packages_directories,
|
||||
&[expected_venv_site_packages, expected_system_site_packages]
|
||||
);
|
||||
} else {
|
||||
assert_eq!(&site_packages_directories, &[expected_venv_site_packages]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_find_site_packages_directory_no_version_field_in_pyvenv_cfg() {
|
||||
let tester = VirtualEnvironmentTester {
|
||||
system: TestSystem::default(),
|
||||
minor_version: 12,
|
||||
free_threaded: false,
|
||||
system_site_packages: false,
|
||||
pyvenv_cfg_version_field: None,
|
||||
};
|
||||
tester.test();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_find_site_packages_directory_venv_style_version_field_in_pyvenv_cfg() {
|
||||
let tester = VirtualEnvironmentTester {
|
||||
system: TestSystem::default(),
|
||||
minor_version: 12,
|
||||
free_threaded: false,
|
||||
system_site_packages: false,
|
||||
pyvenv_cfg_version_field: Some("version = 3.12"),
|
||||
};
|
||||
tester.test();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_find_site_packages_directory_uv_style_version_field_in_pyvenv_cfg() {
|
||||
let tester = VirtualEnvironmentTester {
|
||||
system: TestSystem::default(),
|
||||
minor_version: 12,
|
||||
free_threaded: false,
|
||||
system_site_packages: false,
|
||||
pyvenv_cfg_version_field: Some("version_info = 3.12"),
|
||||
};
|
||||
tester.test();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_find_site_packages_directory_virtualenv_style_version_field_in_pyvenv_cfg() {
|
||||
let tester = VirtualEnvironmentTester {
|
||||
system: TestSystem::default(),
|
||||
minor_version: 12,
|
||||
free_threaded: false,
|
||||
system_site_packages: false,
|
||||
pyvenv_cfg_version_field: Some("version_info = 3.12.0rc2"),
|
||||
};
|
||||
tester.test();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_find_site_packages_directory_freethreaded_build() {
|
||||
let tester = VirtualEnvironmentTester {
|
||||
system: TestSystem::default(),
|
||||
minor_version: 13,
|
||||
free_threaded: true,
|
||||
system_site_packages: false,
|
||||
pyvenv_cfg_version_field: Some("version_info = 3.13"),
|
||||
};
|
||||
tester.test();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn finds_system_site_packages() {
|
||||
let tester = VirtualEnvironmentTester {
|
||||
system: TestSystem::default(),
|
||||
minor_version: 13,
|
||||
free_threaded: true,
|
||||
system_site_packages: true,
|
||||
pyvenv_cfg_version_field: Some("version_info = 3.13"),
|
||||
};
|
||||
tester.test();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reject_venv_that_does_not_exist() {
|
||||
let system = TestSystem::default();
|
||||
assert!(matches!(
|
||||
VirtualEnvironment::new("/.venv", &system),
|
||||
Err(SitePackagesDiscoveryError::VenvDirIsNotADirectory(_))
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reject_venv_with_no_pyvenv_cfg_file() {
|
||||
let system = TestSystem::default();
|
||||
system
|
||||
.memory_file_system()
|
||||
.create_directory_all("/.venv")
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
VirtualEnvironment::new("/.venv", &system),
|
||||
Err(SitePackagesDiscoveryError::NoPyvenvCfgFile(_))
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parsing_pyvenv_cfg_with_too_many_equals() {
|
||||
let system = TestSystem::default();
|
||||
let memory_fs = system.memory_file_system();
|
||||
let pyvenv_cfg_path = SystemPathBuf::from("/.venv/pyvenv.cfg");
|
||||
memory_fs
|
||||
.write_file(&pyvenv_cfg_path, "home = bar = /.venv/bin")
|
||||
.unwrap();
|
||||
let venv_result = VirtualEnvironment::new("/.venv", &system);
|
||||
assert!(matches!(
|
||||
venv_result,
|
||||
Err(SitePackagesDiscoveryError::PyvenvCfgParseError(
|
||||
path,
|
||||
PyvenvCfgParseErrorKind::TooManyEquals { line_number }
|
||||
))
|
||||
if path == pyvenv_cfg_path && Some(line_number) == NonZeroUsize::new(1)
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parsing_pyvenv_cfg_with_key_but_no_value_fails() {
|
||||
let system = TestSystem::default();
|
||||
let memory_fs = system.memory_file_system();
|
||||
let pyvenv_cfg_path = SystemPathBuf::from("/.venv/pyvenv.cfg");
|
||||
memory_fs.write_file(&pyvenv_cfg_path, "home =").unwrap();
|
||||
let venv_result = VirtualEnvironment::new("/.venv", &system);
|
||||
assert!(matches!(
|
||||
venv_result,
|
||||
Err(SitePackagesDiscoveryError::PyvenvCfgParseError(
|
||||
path,
|
||||
PyvenvCfgParseErrorKind::MalformedKeyValuePair { line_number }
|
||||
))
|
||||
if path == pyvenv_cfg_path && Some(line_number) == NonZeroUsize::new(1)
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parsing_pyvenv_cfg_with_value_but_no_key_fails() {
|
||||
let system = TestSystem::default();
|
||||
let memory_fs = system.memory_file_system();
|
||||
let pyvenv_cfg_path = SystemPathBuf::from("/.venv/pyvenv.cfg");
|
||||
memory_fs
|
||||
.write_file(&pyvenv_cfg_path, "= whatever")
|
||||
.unwrap();
|
||||
let venv_result = VirtualEnvironment::new("/.venv", &system);
|
||||
assert!(matches!(
|
||||
venv_result,
|
||||
Err(SitePackagesDiscoveryError::PyvenvCfgParseError(
|
||||
path,
|
||||
PyvenvCfgParseErrorKind::MalformedKeyValuePair { line_number }
|
||||
))
|
||||
if path == pyvenv_cfg_path && Some(line_number) == NonZeroUsize::new(1)
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parsing_pyvenv_cfg_with_no_home_key_fails() {
|
||||
let system = TestSystem::default();
|
||||
let memory_fs = system.memory_file_system();
|
||||
let pyvenv_cfg_path = SystemPathBuf::from("/.venv/pyvenv.cfg");
|
||||
memory_fs.write_file(&pyvenv_cfg_path, "").unwrap();
|
||||
let venv_result = VirtualEnvironment::new("/.venv", &system);
|
||||
assert!(matches!(
|
||||
venv_result,
|
||||
Err(SitePackagesDiscoveryError::PyvenvCfgParseError(
|
||||
path,
|
||||
PyvenvCfgParseErrorKind::NoHomeKey
|
||||
))
|
||||
if path == pyvenv_cfg_path
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parsing_pyvenv_cfg_with_invalid_home_key_fails() {
|
||||
let system = TestSystem::default();
|
||||
let memory_fs = system.memory_file_system();
|
||||
let pyvenv_cfg_path = SystemPathBuf::from("/.venv/pyvenv.cfg");
|
||||
memory_fs
|
||||
.write_file(&pyvenv_cfg_path, "home = foo")
|
||||
.unwrap();
|
||||
let venv_result = VirtualEnvironment::new("/.venv", &system);
|
||||
assert!(matches!(
|
||||
venv_result,
|
||||
Err(SitePackagesDiscoveryError::PyvenvCfgParseError(
|
||||
path,
|
||||
PyvenvCfgParseErrorKind::InvalidHomeValue(_)
|
||||
))
|
||||
if path == pyvenv_cfg_path
|
||||
));
|
||||
}
|
||||
}
|
||||
87
crates/red_knot_python_semantic/src/stdlib.rs
Normal file
87
crates/red_knot_python_semantic/src/stdlib.rs
Normal file
@@ -0,0 +1,87 @@
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::resolve_module;
|
||||
use crate::semantic_index::global_scope;
|
||||
use crate::semantic_index::symbol::ScopeId;
|
||||
use crate::types::{global_symbol_ty, Type};
|
||||
use crate::Db;
|
||||
|
||||
/// Enumeration of various core stdlib modules, for which we have dedicated Salsa queries.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
enum CoreStdlibModule {
|
||||
Builtins,
|
||||
Types,
|
||||
Typeshed,
|
||||
TypingExtensions,
|
||||
}
|
||||
|
||||
impl CoreStdlibModule {
|
||||
fn name(self) -> ModuleName {
|
||||
let module_name = match self {
|
||||
Self::Builtins => "builtins",
|
||||
Self::Types => "types",
|
||||
Self::Typeshed => "_typeshed",
|
||||
Self::TypingExtensions => "typing_extensions",
|
||||
};
|
||||
ModuleName::new_static(module_name)
|
||||
.unwrap_or_else(|| panic!("{module_name} should be a valid module name!"))
|
||||
}
|
||||
}
|
||||
|
||||
/// Lookup the type of `symbol` in a given core module
|
||||
///
|
||||
/// Returns `Unbound` if the given core module cannot be resolved for some reason
|
||||
fn core_module_symbol_ty<'db>(
|
||||
db: &'db dyn Db,
|
||||
core_module: CoreStdlibModule,
|
||||
symbol: &str,
|
||||
) -> Type<'db> {
|
||||
resolve_module(db, core_module.name())
|
||||
.map(|module| global_symbol_ty(db, module.file(), symbol))
|
||||
.unwrap_or(Type::Unbound)
|
||||
}
|
||||
|
||||
/// Lookup the type of `symbol` in the builtins namespace.
|
||||
///
|
||||
/// Returns `Unbound` if the `builtins` module isn't available for some reason.
|
||||
#[inline]
|
||||
pub(crate) fn builtins_symbol_ty<'db>(db: &'db dyn Db, symbol: &str) -> Type<'db> {
|
||||
core_module_symbol_ty(db, CoreStdlibModule::Builtins, symbol)
|
||||
}
|
||||
|
||||
/// Lookup the type of `symbol` in the `types` module namespace.
|
||||
///
|
||||
/// Returns `Unbound` if the `types` module isn't available for some reason.
|
||||
#[inline]
|
||||
pub(crate) fn types_symbol_ty<'db>(db: &'db dyn Db, symbol: &str) -> Type<'db> {
|
||||
core_module_symbol_ty(db, CoreStdlibModule::Types, symbol)
|
||||
}
|
||||
|
||||
/// Lookup the type of `symbol` in the `_typeshed` module namespace.
|
||||
///
|
||||
/// Returns `Unbound` if the `_typeshed` module isn't available for some reason.
|
||||
#[inline]
|
||||
pub(crate) fn typeshed_symbol_ty<'db>(db: &'db dyn Db, symbol: &str) -> Type<'db> {
|
||||
core_module_symbol_ty(db, CoreStdlibModule::Typeshed, symbol)
|
||||
}
|
||||
|
||||
/// Lookup the type of `symbol` in the `typing_extensions` module namespace.
|
||||
///
|
||||
/// Returns `Unbound` if the `typing_extensions` module isn't available for some reason.
|
||||
#[inline]
|
||||
pub(crate) fn typing_extensions_symbol_ty<'db>(db: &'db dyn Db, symbol: &str) -> Type<'db> {
|
||||
core_module_symbol_ty(db, CoreStdlibModule::TypingExtensions, symbol)
|
||||
}
|
||||
|
||||
/// Get the scope of a core stdlib module.
|
||||
///
|
||||
/// Can return `None` if a custom typeshed is used that is missing the core module in question.
|
||||
fn core_module_scope(db: &dyn Db, core_module: CoreStdlibModule) -> Option<ScopeId<'_>> {
|
||||
resolve_module(db, core_module.name()).map(|module| global_scope(db, module.file()))
|
||||
}
|
||||
|
||||
/// Get the `builtins` module scope.
|
||||
///
|
||||
/// Can return `None` if a custom typeshed is used that is missing `builtins.pyi`.
|
||||
pub(crate) fn builtins_module_scope(db: &dyn Db) -> Option<ScopeId<'_>> {
|
||||
core_module_scope(db, CoreStdlibModule::Builtins)
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
552
crates/red_knot_python_semantic/src/types/builder.rs
Normal file
552
crates/red_knot_python_semantic/src/types/builder.rs
Normal file
@@ -0,0 +1,552 @@
|
||||
//! Smart builders for union and intersection types.
|
||||
//!
|
||||
//! Invariants we maintain here:
|
||||
//! * No single-element union types (should just be the contained type instead.)
|
||||
//! * No single-positive-element intersection types. Single-negative-element are OK, we don't
|
||||
//! have a standalone negation type so there's no other representation for this.
|
||||
//! * The same type should never appear more than once in a union or intersection. (This should
|
||||
//! be expanded to cover subtyping -- see below -- but for now we only implement it for type
|
||||
//! identity.)
|
||||
//! * Disjunctive normal form (DNF): the tree of unions and intersections can never be deeper
|
||||
//! than a union-of-intersections. Unions cannot contain other unions (the inner union just
|
||||
//! flattens into the outer one), intersections cannot contain other intersections (also
|
||||
//! flattens), and intersections cannot contain unions (the intersection distributes over the
|
||||
//! union, inverting it into a union-of-intersections).
|
||||
//!
|
||||
//! The implication of these invariants is that a [`UnionBuilder`] does not necessarily build a
|
||||
//! [`Type::Union`]. For example, if only one type is added to the [`UnionBuilder`], `build()` will
|
||||
//! just return that type directly. The same is true for [`IntersectionBuilder`]; for example, if a
|
||||
//! union type is added to the intersection, it will distribute and [`IntersectionBuilder::build`]
|
||||
//! may end up returning a [`Type::Union`] of intersections.
|
||||
//!
|
||||
//! In the future we should have these additional invariants, but they aren't implemented yet:
|
||||
//! * No type in a union can be a subtype of any other type in the union (just eliminate the
|
||||
//! subtype from the union).
|
||||
//! * No type in an intersection can be a supertype of any other type in the intersection (just
|
||||
//! eliminate the supertype from the intersection).
|
||||
//! * An intersection containing two non-overlapping types should simplify to [`Type::Never`].
|
||||
use crate::types::{builtins_symbol_ty, IntersectionType, Type, UnionType};
|
||||
use crate::{Db, FxOrderSet};
|
||||
use ordermap::set::MutableValues;
|
||||
|
||||
pub(crate) struct UnionBuilder<'db> {
|
||||
elements: FxOrderSet<Type<'db>>,
|
||||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
impl<'db> UnionBuilder<'db> {
|
||||
pub(crate) fn new(db: &'db dyn Db) -> Self {
|
||||
Self {
|
||||
db,
|
||||
elements: FxOrderSet::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds a type to this union.
|
||||
pub(crate) fn add(mut self, ty: Type<'db>) -> Self {
|
||||
match ty {
|
||||
Type::Union(union) => {
|
||||
for element in union.elements(self.db) {
|
||||
self = self.add(*element);
|
||||
}
|
||||
}
|
||||
Type::Never => {}
|
||||
_ => {
|
||||
let mut remove = vec![];
|
||||
for element in &self.elements {
|
||||
if ty.is_subtype_of(self.db, *element) {
|
||||
return self;
|
||||
} else if element.is_subtype_of(self.db, ty) {
|
||||
remove.push(*element);
|
||||
}
|
||||
}
|
||||
for element in remove {
|
||||
self.elements.remove(&element);
|
||||
}
|
||||
self.elements.insert(ty);
|
||||
}
|
||||
}
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
/// Performs the following normalizations:
|
||||
/// - Replaces `Literal[True,False]` with `bool`.
|
||||
/// - TODO For enums `E` with members `X1`,...,`Xn`, replaces
|
||||
/// `Literal[E.X1,...,E.Xn]` with `E`.
|
||||
fn simplify(&mut self) {
|
||||
if let Some(true_index) = self.elements.get_index_of(&Type::BooleanLiteral(true)) {
|
||||
if self.elements.contains(&Type::BooleanLiteral(false)) {
|
||||
*self.elements.get_index_mut2(true_index).unwrap() =
|
||||
builtins_symbol_ty(self.db, "bool");
|
||||
self.elements.remove(&Type::BooleanLiteral(false));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn build(mut self) -> Type<'db> {
|
||||
match self.elements.len() {
|
||||
0 => Type::Never,
|
||||
1 => self.elements[0],
|
||||
_ => {
|
||||
self.simplify();
|
||||
|
||||
match self.elements.len() {
|
||||
0 => Type::Never,
|
||||
1 => self.elements[0],
|
||||
_ => {
|
||||
self.elements.shrink_to_fit();
|
||||
Type::Union(UnionType::new(self.db, self.elements))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct IntersectionBuilder<'db> {
|
||||
// Really this builds a union-of-intersections, because we always keep our set-theoretic types
|
||||
// in disjunctive normal form (DNF), a union of intersections. In the simplest case there's
|
||||
// just a single intersection in this vector, and we are building a single intersection type,
|
||||
// but if a union is added to the intersection, we'll distribute ourselves over that union and
|
||||
// create a union of intersections.
|
||||
intersections: Vec<InnerIntersectionBuilder<'db>>,
|
||||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
impl<'db> IntersectionBuilder<'db> {
|
||||
pub(crate) fn new(db: &'db dyn Db) -> Self {
|
||||
Self {
|
||||
db,
|
||||
intersections: vec![InnerIntersectionBuilder::new()],
|
||||
}
|
||||
}
|
||||
|
||||
fn empty(db: &'db dyn Db) -> Self {
|
||||
Self {
|
||||
db,
|
||||
intersections: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn add_positive(mut self, ty: Type<'db>) -> Self {
|
||||
if let Type::Union(union) = ty {
|
||||
// Distribute ourself over this union: for each union element, clone ourself and
|
||||
// intersect with that union element, then create a new union-of-intersections with all
|
||||
// of those sub-intersections in it. E.g. if `self` is a simple intersection `T1 & T2`
|
||||
// and we add `T3 | T4` to the intersection, we don't get `T1 & T2 & (T3 | T4)` (that's
|
||||
// not in DNF), we distribute the union and get `(T1 & T3) | (T2 & T3) | (T1 & T4) |
|
||||
// (T2 & T4)`. If `self` is already a union-of-intersections `(T1 & T2) | (T3 & T4)`
|
||||
// and we add `T5 | T6` to it, that flattens all the way out to `(T1 & T2 & T5) | (T1 &
|
||||
// T2 & T6) | (T3 & T4 & T5) ...` -- you get the idea.
|
||||
union
|
||||
.elements(self.db)
|
||||
.iter()
|
||||
.map(|elem| self.clone().add_positive(*elem))
|
||||
.fold(IntersectionBuilder::empty(self.db), |mut builder, sub| {
|
||||
builder.intersections.extend(sub.intersections);
|
||||
builder
|
||||
})
|
||||
} else {
|
||||
// If we are already a union-of-intersections, distribute the new intersected element
|
||||
// across all of those intersections.
|
||||
for inner in &mut self.intersections {
|
||||
inner.add_positive(self.db, ty);
|
||||
}
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn add_negative(mut self, ty: Type<'db>) -> Self {
|
||||
// See comments above in `add_positive`; this is just the negated version.
|
||||
if let Type::Union(union) = ty {
|
||||
union
|
||||
.elements(self.db)
|
||||
.iter()
|
||||
.map(|elem| self.clone().add_negative(*elem))
|
||||
.fold(IntersectionBuilder::empty(self.db), |mut builder, sub| {
|
||||
builder.intersections.extend(sub.intersections);
|
||||
builder
|
||||
})
|
||||
} else {
|
||||
for inner in &mut self.intersections {
|
||||
inner.add_negative(self.db, ty);
|
||||
}
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn build(mut self) -> Type<'db> {
|
||||
// Avoid allocating the UnionBuilder unnecessarily if we have just one intersection:
|
||||
if self.intersections.len() == 1 {
|
||||
self.intersections.pop().unwrap().build(self.db)
|
||||
} else {
|
||||
UnionType::from_elements(
|
||||
self.db,
|
||||
self.intersections
|
||||
.into_iter()
|
||||
.map(|inner| inner.build(self.db)),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
struct InnerIntersectionBuilder<'db> {
|
||||
positive: FxOrderSet<Type<'db>>,
|
||||
negative: FxOrderSet<Type<'db>>,
|
||||
}
|
||||
|
||||
impl<'db> InnerIntersectionBuilder<'db> {
|
||||
fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
/// Adds a positive type to this intersection.
|
||||
fn add_positive(&mut self, db: &'db dyn Db, ty: Type<'db>) {
|
||||
match ty {
|
||||
Type::Intersection(inter) => {
|
||||
let pos = inter.positive(db);
|
||||
let neg = inter.negative(db);
|
||||
self.positive.extend(pos.difference(&self.negative));
|
||||
self.negative.extend(neg.difference(&self.positive));
|
||||
self.positive.retain(|elem| !neg.contains(elem));
|
||||
self.negative.retain(|elem| !pos.contains(elem));
|
||||
}
|
||||
_ => {
|
||||
if !self.negative.remove(&ty) {
|
||||
self.positive.insert(ty);
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds a negative type to this intersection.
|
||||
fn add_negative(&mut self, db: &'db dyn Db, ty: Type<'db>) {
|
||||
// TODO Any/Unknown actually should not self-cancel
|
||||
match ty {
|
||||
Type::Intersection(intersection) => {
|
||||
let pos = intersection.negative(db);
|
||||
let neg = intersection.positive(db);
|
||||
self.positive.extend(pos.difference(&self.negative));
|
||||
self.negative.extend(neg.difference(&self.positive));
|
||||
self.positive.retain(|elem| !neg.contains(elem));
|
||||
self.negative.retain(|elem| !pos.contains(elem));
|
||||
}
|
||||
Type::Never => {}
|
||||
Type::Unbound => {}
|
||||
_ => {
|
||||
if !self.positive.remove(&ty) {
|
||||
self.negative.insert(ty);
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn simplify(&mut self) {
|
||||
// TODO this should be generalized based on subtyping, for now we just handle a few cases
|
||||
|
||||
// Never is a subtype of all types
|
||||
if self.positive.contains(&Type::Never) {
|
||||
self.positive.retain(Type::is_never);
|
||||
self.negative.clear();
|
||||
}
|
||||
|
||||
if self.positive.contains(&Type::Unbound) {
|
||||
self.positive.retain(Type::is_unbound);
|
||||
self.negative.clear();
|
||||
}
|
||||
|
||||
// None intersects only with object
|
||||
for pos in &self.positive {
|
||||
if let Type::Instance(_) = pos {
|
||||
// could be `object` type
|
||||
} else {
|
||||
self.negative.remove(&Type::None);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn build(mut self, db: &'db dyn Db) -> Type<'db> {
|
||||
self.simplify();
|
||||
match (self.positive.len(), self.negative.len()) {
|
||||
(0, 0) => Type::Never,
|
||||
(1, 0) => self.positive[0],
|
||||
_ => {
|
||||
self.positive.shrink_to_fit();
|
||||
self.negative.shrink_to_fit();
|
||||
Type::Intersection(IntersectionType::new(db, self.positive, self.negative))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{IntersectionBuilder, IntersectionType, Type, UnionType};
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::program::{Program, SearchPathSettings};
|
||||
use crate::python_version::PythonVersion;
|
||||
use crate::types::{builtins_symbol_ty, UnionBuilder};
|
||||
use crate::ProgramSettings;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
|
||||
impl<'db> UnionType<'db> {
|
||||
fn elements_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
|
||||
self.elements(db).into_iter().copied().collect()
|
||||
}
|
||||
}
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
let db = TestDb::new();
|
||||
|
||||
let src_root = SystemPathBuf::from("/src");
|
||||
db.memory_file_system()
|
||||
.create_directory_all(&src_root)
|
||||
.unwrap();
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
&ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings::new(src_root),
|
||||
},
|
||||
)
|
||||
.expect("Valid search path settings");
|
||||
|
||||
db
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let union = UnionType::from_elements(&db, [t0, t1]).expect_union();
|
||||
|
||||
assert_eq!(union.elements_vec(&db), &[t0, t1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_single() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let ty = UnionType::from_elements(&db, [t0]);
|
||||
assert_eq!(ty, t0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_empty() {
|
||||
let db = setup_db();
|
||||
let ty = UnionBuilder::new(&db).build();
|
||||
assert_eq!(ty, Type::Never);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_never() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let ty = UnionType::from_elements(&db, [t0, Type::Never]);
|
||||
assert_eq!(ty, t0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_bool() {
|
||||
let db = setup_db();
|
||||
let bool_ty = builtins_symbol_ty(&db, "bool");
|
||||
|
||||
let t0 = Type::BooleanLiteral(true);
|
||||
let t1 = Type::BooleanLiteral(true);
|
||||
let t2 = Type::BooleanLiteral(false);
|
||||
let t3 = Type::IntLiteral(17);
|
||||
|
||||
let union = UnionType::from_elements(&db, [t0, t1, t3]).expect_union();
|
||||
assert_eq!(union.elements_vec(&db), &[t0, t3]);
|
||||
|
||||
let union = UnionType::from_elements(&db, [t0, t1, t2, t3]).expect_union();
|
||||
assert_eq!(union.elements_vec(&db), &[bool_ty, t3]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_flatten() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let t2 = Type::IntLiteral(2);
|
||||
let u1 = UnionType::from_elements(&db, [t0, t1]);
|
||||
let union = UnionType::from_elements(&db, [u1, t2]).expect_union();
|
||||
|
||||
assert_eq!(union.elements_vec(&db), &[t0, t1, t2]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_simplify_subtype() {
|
||||
let db = setup_db();
|
||||
let t0 = builtins_symbol_ty(&db, "str").to_instance(&db);
|
||||
let t1 = Type::LiteralString;
|
||||
let t2 = Type::Unknown;
|
||||
let u0 = UnionType::from_elements(&db, [t0, t1]);
|
||||
let u1 = UnionType::from_elements(&db, [t1, t0]);
|
||||
let u2 = UnionType::from_elements(&db, [t0, t1, t2]);
|
||||
|
||||
assert_eq!(u0, t0);
|
||||
assert_eq!(u1, t0);
|
||||
assert_eq!(u2.expect_union().elements_vec(&db), &[t0, t2]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_no_simplify_any() {}
|
||||
|
||||
impl<'db> IntersectionType<'db> {
|
||||
fn pos_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
|
||||
self.positive(db).into_iter().copied().collect()
|
||||
}
|
||||
|
||||
fn neg_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
|
||||
self.negative(db).into_iter().copied().collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let ta = Type::Any;
|
||||
let intersection = IntersectionBuilder::new(&db)
|
||||
.add_positive(ta)
|
||||
.add_negative(t0)
|
||||
.build()
|
||||
.expect_intersection();
|
||||
|
||||
assert_eq!(intersection.pos_vec(&db), &[ta]);
|
||||
assert_eq!(intersection.neg_vec(&db), &[t0]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_flatten_positive() {
|
||||
let db = setup_db();
|
||||
let ta = Type::Any;
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let t2 = Type::IntLiteral(2);
|
||||
let i0 = IntersectionBuilder::new(&db)
|
||||
.add_positive(ta)
|
||||
.add_negative(t1)
|
||||
.build();
|
||||
let intersection = IntersectionBuilder::new(&db)
|
||||
.add_positive(t2)
|
||||
.add_positive(i0)
|
||||
.build()
|
||||
.expect_intersection();
|
||||
|
||||
assert_eq!(intersection.pos_vec(&db), &[t2, ta]);
|
||||
assert_eq!(intersection.neg_vec(&db), &[t1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_flatten_negative() {
|
||||
let db = setup_db();
|
||||
let ta = Type::Any;
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let t2 = Type::IntLiteral(2);
|
||||
let i0 = IntersectionBuilder::new(&db)
|
||||
.add_positive(ta)
|
||||
.add_negative(t1)
|
||||
.build();
|
||||
let intersection = IntersectionBuilder::new(&db)
|
||||
.add_positive(t2)
|
||||
.add_negative(i0)
|
||||
.build()
|
||||
.expect_intersection();
|
||||
|
||||
assert_eq!(intersection.pos_vec(&db), &[t2, t1]);
|
||||
assert_eq!(intersection.neg_vec(&db), &[ta]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn intersection_distributes_over_union() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let ta = Type::Any;
|
||||
let u0 = UnionType::from_elements(&db, [t0, t1]);
|
||||
|
||||
let union = IntersectionBuilder::new(&db)
|
||||
.add_positive(ta)
|
||||
.add_positive(u0)
|
||||
.build()
|
||||
.expect_union();
|
||||
let [Type::Intersection(i0), Type::Intersection(i1)] = union.elements_vec(&db)[..] else {
|
||||
panic!("expected a union of two intersections");
|
||||
};
|
||||
assert_eq!(i0.pos_vec(&db), &[ta, t0]);
|
||||
assert_eq!(i1.pos_vec(&db), &[ta, t1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_self_negation() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::None)
|
||||
.add_negative(Type::None)
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::Never);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_negative_never() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::None)
|
||||
.add_negative(Type::Never)
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_positive_never() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::None)
|
||||
.add_positive(Type::Never)
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::Never);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_positive_unbound() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::Unbound)
|
||||
.add_positive(Type::IntLiteral(1))
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::Unbound);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_negative_unbound() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_negative(Type::Unbound)
|
||||
.add_positive(Type::IntLiteral(1))
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::IntLiteral(1));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_negative_none() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_negative(Type::None)
|
||||
.add_positive(Type::IntLiteral(1))
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::IntLiteral(1));
|
||||
}
|
||||
}
|
||||
111
crates/red_knot_python_semantic/src/types/diagnostic.rs
Normal file
111
crates/red_knot_python_semantic/src/types/diagnostic.rs
Normal file
@@ -0,0 +1,111 @@
|
||||
use ruff_db::files::File;
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
use std::fmt::Formatter;
|
||||
use std::ops::Deref;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
pub struct TypeCheckDiagnostic {
|
||||
// TODO: Don't use string keys for rules
|
||||
pub(super) rule: String,
|
||||
pub(super) message: String,
|
||||
pub(super) range: TextRange,
|
||||
pub(super) file: File,
|
||||
}
|
||||
|
||||
impl TypeCheckDiagnostic {
|
||||
pub fn rule(&self) -> &str {
|
||||
&self.rule
|
||||
}
|
||||
|
||||
pub fn message(&self) -> &str {
|
||||
&self.message
|
||||
}
|
||||
|
||||
pub fn file(&self) -> File {
|
||||
self.file
|
||||
}
|
||||
}
|
||||
|
||||
impl Ranged for TypeCheckDiagnostic {
|
||||
fn range(&self) -> TextRange {
|
||||
self.range
|
||||
}
|
||||
}
|
||||
|
||||
/// A collection of type check diagnostics.
|
||||
///
|
||||
/// The diagnostics are wrapped in an `Arc` because they need to be cloned multiple times
|
||||
/// when going from `infer_expression` to `check_file`. We could consider
|
||||
/// making [`TypeCheckDiagnostic`] a Salsa struct to have them Arena-allocated (once the Tables refactor is done).
|
||||
/// Using Salsa struct does have the downside that it leaks the Salsa dependency into diagnostics and
|
||||
/// each Salsa-struct comes with an overhead.
|
||||
#[derive(Default, Eq, PartialEq)]
|
||||
pub struct TypeCheckDiagnostics {
|
||||
inner: Vec<std::sync::Arc<TypeCheckDiagnostic>>,
|
||||
}
|
||||
|
||||
impl TypeCheckDiagnostics {
|
||||
pub fn new() -> Self {
|
||||
Self { inner: Vec::new() }
|
||||
}
|
||||
|
||||
pub(super) fn push(&mut self, diagnostic: TypeCheckDiagnostic) {
|
||||
self.inner.push(Arc::new(diagnostic));
|
||||
}
|
||||
|
||||
pub(crate) fn shrink_to_fit(&mut self) {
|
||||
self.inner.shrink_to_fit();
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<TypeCheckDiagnostic> for TypeCheckDiagnostics {
|
||||
fn extend<T: IntoIterator<Item = TypeCheckDiagnostic>>(&mut self, iter: T) {
|
||||
self.inner.extend(iter.into_iter().map(std::sync::Arc::new));
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<std::sync::Arc<TypeCheckDiagnostic>> for TypeCheckDiagnostics {
|
||||
fn extend<T: IntoIterator<Item = Arc<TypeCheckDiagnostic>>>(&mut self, iter: T) {
|
||||
self.inner.extend(iter);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Extend<&'a std::sync::Arc<TypeCheckDiagnostic>> for TypeCheckDiagnostics {
|
||||
fn extend<T: IntoIterator<Item = &'a Arc<TypeCheckDiagnostic>>>(&mut self, iter: T) {
|
||||
self.inner
|
||||
.extend(iter.into_iter().map(std::sync::Arc::clone));
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for TypeCheckDiagnostics {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
self.inner.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for TypeCheckDiagnostics {
|
||||
type Target = [std::sync::Arc<TypeCheckDiagnostic>];
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.inner
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoIterator for TypeCheckDiagnostics {
|
||||
type Item = Arc<TypeCheckDiagnostic>;
|
||||
type IntoIter = std::vec::IntoIter<std::sync::Arc<TypeCheckDiagnostic>>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.inner.into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a TypeCheckDiagnostics {
|
||||
type Item = &'a Arc<TypeCheckDiagnostic>;
|
||||
type IntoIter = std::slice::Iter<'a, std::sync::Arc<TypeCheckDiagnostic>>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.inner.iter()
|
||||
}
|
||||
}
|
||||
@@ -1,14 +1,22 @@
|
||||
//! Display implementations for types.
|
||||
|
||||
use std::fmt::{Display, Formatter};
|
||||
use std::fmt::{self, Display, Formatter};
|
||||
|
||||
use ruff_db::display::FormatterJoinExtension;
|
||||
use ruff_python_ast::str::Quote;
|
||||
use ruff_python_literal::escape::AsciiEscape;
|
||||
|
||||
use crate::types::{IntersectionType, Type, UnionType};
|
||||
use crate::Db;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
impl<'db> Type<'db> {
|
||||
pub fn display(&'db self, db: &'db dyn Db) -> DisplayType<'db> {
|
||||
pub fn display(&self, db: &'db dyn Db) -> DisplayType {
|
||||
DisplayType { ty: self, db }
|
||||
}
|
||||
fn representation(self, db: &'db dyn Db) -> DisplayRepresentation<'db> {
|
||||
DisplayRepresentation { db, ty: self }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
@@ -18,7 +26,41 @@ pub struct DisplayType<'db> {
|
||||
}
|
||||
|
||||
impl Display for DisplayType<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
let representation = self.ty.representation(self.db);
|
||||
if matches!(
|
||||
self.ty,
|
||||
Type::IntLiteral(_)
|
||||
| Type::BooleanLiteral(_)
|
||||
| Type::StringLiteral(_)
|
||||
| Type::BytesLiteral(_)
|
||||
| Type::Class(_)
|
||||
| Type::Function(_)
|
||||
| Type::RevealTypeFunction(_)
|
||||
) {
|
||||
write!(f, "Literal[{representation}]",)
|
||||
} else {
|
||||
representation.fmt(f)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for DisplayType<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
Display::fmt(self, f)
|
||||
}
|
||||
}
|
||||
|
||||
/// Writes the string representation of a type, which is the value displayed either as
|
||||
/// `Literal[<repr>]` or `Literal[<repr1>, <repr2>]` for literal types or as `<repr>` for
|
||||
/// non literals
|
||||
struct DisplayRepresentation<'db> {
|
||||
ty: Type<'db>,
|
||||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
impl Display for DisplayRepresentation<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
match self.ty {
|
||||
Type::Any => f.write_str("Any"),
|
||||
Type::Never => f.write_str("Never"),
|
||||
@@ -26,26 +68,39 @@ impl Display for DisplayType<'_> {
|
||||
Type::Unbound => f.write_str("Unbound"),
|
||||
Type::None => f.write_str("None"),
|
||||
Type::Module(file) => {
|
||||
write!(f, "<module '{:?}'>", file.path(self.db.upcast()))
|
||||
write!(f, "<module '{:?}'>", file.path(self.db))
|
||||
}
|
||||
// TODO functions and classes should display using a fully qualified name
|
||||
Type::Class(class) => {
|
||||
f.write_str("Literal[")?;
|
||||
f.write_str(&class.name(self.db))?;
|
||||
f.write_str("]")
|
||||
Type::Class(class) => f.write_str(class.name(self.db)),
|
||||
Type::Instance(class) => f.write_str(class.name(self.db)),
|
||||
Type::Function(function) | Type::RevealTypeFunction(function) => {
|
||||
f.write_str(function.name(self.db))
|
||||
}
|
||||
Type::Instance(class) => f.write_str(&class.name(self.db)),
|
||||
Type::Function(function) => f.write_str(&function.name(self.db)),
|
||||
Type::Union(union) => union.display(self.db).fmt(f),
|
||||
Type::Intersection(intersection) => intersection.display(self.db).fmt(f),
|
||||
Type::IntLiteral(n) => write!(f, "Literal[{n}]"),
|
||||
}
|
||||
}
|
||||
}
|
||||
Type::IntLiteral(n) => n.fmt(f),
|
||||
Type::BooleanLiteral(boolean) => f.write_str(if boolean { "True" } else { "False" }),
|
||||
Type::StringLiteral(string) => {
|
||||
write!(f, r#""{}""#, string.value(self.db).replace('"', r#"\""#))
|
||||
}
|
||||
Type::LiteralString => f.write_str("LiteralString"),
|
||||
Type::BytesLiteral(bytes) => {
|
||||
let escape =
|
||||
AsciiEscape::with_preferred_quote(bytes.value(self.db).as_ref(), Quote::Double);
|
||||
|
||||
impl std::fmt::Debug for DisplayType<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
std::fmt::Display::fmt(self, f)
|
||||
escape.bytes_repr().write(f)
|
||||
}
|
||||
Type::Tuple(tuple) => {
|
||||
f.write_str("tuple[")?;
|
||||
let elements = tuple.elements(self.db);
|
||||
if elements.is_empty() {
|
||||
f.write_str("()")?;
|
||||
} else {
|
||||
elements.display(self.db).fmt(f)?;
|
||||
}
|
||||
f.write_str("]")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -61,54 +116,90 @@ struct DisplayUnionType<'db> {
|
||||
}
|
||||
|
||||
impl Display for DisplayUnionType<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
let union = self.ty;
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
let elements = self.ty.elements(self.db);
|
||||
|
||||
let (int_literals, other_types): (Vec<Type>, Vec<Type>) = union
|
||||
.elements(self.db)
|
||||
.iter()
|
||||
.copied()
|
||||
.partition(|ty| matches!(ty, Type::IntLiteral(_)));
|
||||
// Group literal types by kind.
|
||||
let mut grouped_literals = FxHashMap::default();
|
||||
|
||||
let mut first = true;
|
||||
if !int_literals.is_empty() {
|
||||
f.write_str("Literal[")?;
|
||||
let mut nums: Vec<_> = int_literals
|
||||
.into_iter()
|
||||
.filter_map(|ty| {
|
||||
if let Type::IntLiteral(n) = ty {
|
||||
Some(n)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
nums.sort_unstable();
|
||||
for num in nums {
|
||||
if !first {
|
||||
f.write_str(", ")?;
|
||||
}
|
||||
write!(f, "{num}")?;
|
||||
first = false;
|
||||
for element in elements {
|
||||
if let Ok(literal_kind) = LiteralTypeKind::try_from(*element) {
|
||||
grouped_literals
|
||||
.entry(literal_kind)
|
||||
.or_insert_with(Vec::new)
|
||||
.push(*element);
|
||||
}
|
||||
f.write_str("]")?;
|
||||
}
|
||||
|
||||
for ty in other_types {
|
||||
if !first {
|
||||
f.write_str(" | ")?;
|
||||
};
|
||||
first = false;
|
||||
write!(f, "{}", ty.display(self.db))?;
|
||||
let mut join = f.join(" | ");
|
||||
|
||||
for element in elements {
|
||||
if let Ok(literal_kind) = LiteralTypeKind::try_from(*element) {
|
||||
let Some(mut literals) = grouped_literals.remove(&literal_kind) else {
|
||||
continue;
|
||||
};
|
||||
if literal_kind == LiteralTypeKind::IntLiteral {
|
||||
literals.sort_unstable_by_key(|ty| ty.expect_int_literal());
|
||||
}
|
||||
join.entry(&DisplayLiteralGroup {
|
||||
literals,
|
||||
db: self.db,
|
||||
});
|
||||
} else {
|
||||
join.entry(&element.display(self.db));
|
||||
}
|
||||
}
|
||||
|
||||
join.finish()?;
|
||||
|
||||
debug_assert!(grouped_literals.is_empty());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for DisplayUnionType<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
std::fmt::Display::fmt(self, f)
|
||||
impl fmt::Debug for DisplayUnionType<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
Display::fmt(self, f)
|
||||
}
|
||||
}
|
||||
|
||||
struct DisplayLiteralGroup<'db> {
|
||||
literals: Vec<Type<'db>>,
|
||||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
impl Display for DisplayLiteralGroup<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
f.write_str("Literal[")?;
|
||||
f.join(", ")
|
||||
.entries(self.literals.iter().map(|ty| ty.representation(self.db)))
|
||||
.finish()?;
|
||||
f.write_str("]")
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
enum LiteralTypeKind {
|
||||
Class,
|
||||
Function,
|
||||
IntLiteral,
|
||||
StringLiteral,
|
||||
BytesLiteral,
|
||||
}
|
||||
|
||||
impl TryFrom<Type<'_>> for LiteralTypeKind {
|
||||
type Error = ();
|
||||
|
||||
fn try_from(value: Type<'_>) -> Result<Self, Self::Error> {
|
||||
match value {
|
||||
Type::Class(_) => Ok(Self::Class),
|
||||
Type::Function(_) | Type::RevealTypeFunction(_) => Ok(Self::Function),
|
||||
Type::IntLiteral(_) => Ok(Self::IntLiteral),
|
||||
Type::StringLiteral(_) => Ok(Self::StringLiteral),
|
||||
Type::BytesLiteral(_) => Ok(Self::BytesLiteral),
|
||||
_ => Err(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -124,30 +215,159 @@ struct DisplayIntersectionType<'db> {
|
||||
}
|
||||
|
||||
impl Display for DisplayIntersectionType<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
let mut first = true;
|
||||
for (neg, ty) in self
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
let tys = self
|
||||
.ty
|
||||
.positive(self.db)
|
||||
.iter()
|
||||
.map(|ty| (false, ty))
|
||||
.chain(self.ty.negative(self.db).iter().map(|ty| (true, ty)))
|
||||
{
|
||||
if !first {
|
||||
f.write_str(" & ")?;
|
||||
};
|
||||
first = false;
|
||||
if neg {
|
||||
f.write_str("~")?;
|
||||
};
|
||||
write!(f, "{}", ty.display(self.db))?;
|
||||
}
|
||||
Ok(())
|
||||
.map(|&ty| DisplayMaybeNegatedType {
|
||||
ty,
|
||||
db: self.db,
|
||||
negated: false,
|
||||
})
|
||||
.chain(
|
||||
self.ty
|
||||
.negative(self.db)
|
||||
.iter()
|
||||
.map(|&ty| DisplayMaybeNegatedType {
|
||||
ty,
|
||||
db: self.db,
|
||||
negated: true,
|
||||
}),
|
||||
);
|
||||
f.join(" & ").entries(tys).finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for DisplayIntersectionType<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
std::fmt::Display::fmt(self, f)
|
||||
impl fmt::Debug for DisplayIntersectionType<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
Display::fmt(self, f)
|
||||
}
|
||||
}
|
||||
|
||||
struct DisplayMaybeNegatedType<'db> {
|
||||
ty: Type<'db>,
|
||||
db: &'db dyn Db,
|
||||
negated: bool,
|
||||
}
|
||||
|
||||
impl<'db> Display for DisplayMaybeNegatedType<'db> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
if self.negated {
|
||||
f.write_str("~")?;
|
||||
}
|
||||
self.ty.display(self.db).fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) trait TypeArrayDisplay<'db> {
|
||||
fn display(&self, db: &'db dyn Db) -> DisplayTypeArray;
|
||||
}
|
||||
|
||||
impl<'db> TypeArrayDisplay<'db> for Box<[Type<'db>]> {
|
||||
fn display(&self, db: &'db dyn Db) -> DisplayTypeArray {
|
||||
DisplayTypeArray { types: self, db }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> TypeArrayDisplay<'db> for Vec<Type<'db>> {
|
||||
fn display(&self, db: &'db dyn Db) -> DisplayTypeArray {
|
||||
DisplayTypeArray { types: self, db }
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct DisplayTypeArray<'b, 'db> {
|
||||
types: &'b [Type<'db>],
|
||||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
impl<'db> Display for DisplayTypeArray<'_, 'db> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
f.join(", ")
|
||||
.entries(self.types.iter().map(|ty| ty.display(self.db)))
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::types::{global_symbol_ty, BytesLiteralType, StringLiteralType, Type, UnionType};
|
||||
use crate::{Program, ProgramSettings, PythonVersion, SearchPathSettings};
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
let db = TestDb::new();
|
||||
|
||||
let src_root = SystemPathBuf::from("/src");
|
||||
db.memory_file_system()
|
||||
.create_directory_all(&src_root)
|
||||
.unwrap();
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
&ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings::new(src_root),
|
||||
},
|
||||
)
|
||||
.expect("Valid search path settings");
|
||||
|
||||
db
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_condense_literal_display_by_type() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_dedented(
|
||||
"src/main.py",
|
||||
"
|
||||
def foo(x: int) -> int:
|
||||
return x + 1
|
||||
|
||||
def bar(s: str) -> str:
|
||||
return s
|
||||
|
||||
class A: ...
|
||||
class B: ...
|
||||
",
|
||||
)?;
|
||||
let mod_file = system_path_to_file(&db, "src/main.py").expect("Expected file to exist.");
|
||||
|
||||
let union_elements = &[
|
||||
Type::Unknown,
|
||||
Type::IntLiteral(-1),
|
||||
global_symbol_ty(&db, mod_file, "A"),
|
||||
Type::StringLiteral(StringLiteralType::new(&db, Box::from("A"))),
|
||||
Type::BytesLiteral(BytesLiteralType::new(&db, Box::from([0]))),
|
||||
Type::BytesLiteral(BytesLiteralType::new(&db, Box::from([7]))),
|
||||
Type::IntLiteral(0),
|
||||
Type::IntLiteral(1),
|
||||
Type::StringLiteral(StringLiteralType::new(&db, Box::from("B"))),
|
||||
global_symbol_ty(&db, mod_file, "foo"),
|
||||
global_symbol_ty(&db, mod_file, "bar"),
|
||||
global_symbol_ty(&db, mod_file, "B"),
|
||||
Type::BooleanLiteral(true),
|
||||
Type::None,
|
||||
];
|
||||
let union = UnionType::from_elements(&db, union_elements).expect_union();
|
||||
let display = format!("{}", union.display(&db));
|
||||
assert_eq!(
|
||||
display,
|
||||
concat!(
|
||||
"Unknown | ",
|
||||
"Literal[-1, 0, 1] | ",
|
||||
"Literal[A, B] | ",
|
||||
"Literal[\"A\", \"B\"] | ",
|
||||
"Literal[b\"\\x00\", b\"\\x07\"] | ",
|
||||
"Literal[foo, bar] | ",
|
||||
"Literal[True] | ",
|
||||
"None"
|
||||
)
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
186
crates/red_knot_python_semantic/src/types/narrow.rs
Normal file
186
crates/red_knot_python_semantic/src/types/narrow.rs
Normal file
@@ -0,0 +1,186 @@
|
||||
use crate::semantic_index::ast_ids::HasScopedAstId;
|
||||
use crate::semantic_index::constraint::{Constraint, PatternConstraint};
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::symbol::{ScopeId, ScopedSymbolId, SymbolTable};
|
||||
use crate::semantic_index::symbol_table;
|
||||
use crate::types::{infer_expression_types, IntersectionBuilder, Type};
|
||||
use crate::Db;
|
||||
use ruff_python_ast as ast;
|
||||
use rustc_hash::FxHashMap;
|
||||
use std::sync::Arc;
|
||||
|
||||
/// Return the type constraint that `test` (if true) would place on `definition`, if any.
|
||||
///
|
||||
/// For example, if we have this code:
|
||||
///
|
||||
/// ```python
|
||||
/// y = 1 if flag else None
|
||||
/// x = 1 if flag else None
|
||||
/// if x is not None:
|
||||
/// ...
|
||||
/// ```
|
||||
///
|
||||
/// The `test` expression `x is not None` places the constraint "not None" on the definition of
|
||||
/// `x`, so in that case we'd return `Some(Type::Intersection(negative=[Type::None]))`.
|
||||
///
|
||||
/// But if we called this with the same `test` expression, but the `definition` of `y`, no
|
||||
/// constraint is applied to that definition, so we'd just return `None`.
|
||||
pub(crate) fn narrowing_constraint<'db>(
|
||||
db: &'db dyn Db,
|
||||
constraint: Constraint<'db>,
|
||||
definition: Definition<'db>,
|
||||
) -> Option<Type<'db>> {
|
||||
match constraint {
|
||||
Constraint::Expression(expression) => {
|
||||
all_narrowing_constraints_for_expression(db, expression)
|
||||
.get(&definition.symbol(db))
|
||||
.copied()
|
||||
}
|
||||
Constraint::Pattern(pattern) => all_narrowing_constraints_for_pattern(db, pattern)
|
||||
.get(&definition.symbol(db))
|
||||
.copied(),
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::tracked(return_ref)]
|
||||
fn all_narrowing_constraints_for_pattern<'db>(
|
||||
db: &'db dyn Db,
|
||||
pattern: PatternConstraint<'db>,
|
||||
) -> NarrowingConstraints<'db> {
|
||||
NarrowingConstraintsBuilder::new(db, Constraint::Pattern(pattern)).finish()
|
||||
}
|
||||
|
||||
#[salsa::tracked(return_ref)]
|
||||
fn all_narrowing_constraints_for_expression<'db>(
|
||||
db: &'db dyn Db,
|
||||
expression: Expression<'db>,
|
||||
) -> NarrowingConstraints<'db> {
|
||||
NarrowingConstraintsBuilder::new(db, Constraint::Expression(expression)).finish()
|
||||
}
|
||||
|
||||
type NarrowingConstraints<'db> = FxHashMap<ScopedSymbolId, Type<'db>>;
|
||||
|
||||
struct NarrowingConstraintsBuilder<'db> {
|
||||
db: &'db dyn Db,
|
||||
constraint: Constraint<'db>,
|
||||
constraints: NarrowingConstraints<'db>,
|
||||
}
|
||||
|
||||
impl<'db> NarrowingConstraintsBuilder<'db> {
|
||||
fn new(db: &'db dyn Db, constraint: Constraint<'db>) -> Self {
|
||||
Self {
|
||||
db,
|
||||
constraint,
|
||||
constraints: NarrowingConstraints::default(),
|
||||
}
|
||||
}
|
||||
|
||||
fn finish(mut self) -> NarrowingConstraints<'db> {
|
||||
match self.constraint {
|
||||
Constraint::Expression(expression) => self.evaluate_expression_constraint(expression),
|
||||
Constraint::Pattern(pattern) => self.evaluate_pattern_constraint(pattern),
|
||||
}
|
||||
|
||||
self.constraints.shrink_to_fit();
|
||||
self.constraints
|
||||
}
|
||||
|
||||
fn evaluate_expression_constraint(&mut self, expression: Expression<'db>) {
|
||||
if let ast::Expr::Compare(expr_compare) = expression.node_ref(self.db).node() {
|
||||
self.add_expr_compare(expr_compare, expression);
|
||||
}
|
||||
// TODO other test expression kinds
|
||||
}
|
||||
|
||||
fn evaluate_pattern_constraint(&mut self, pattern: PatternConstraint<'db>) {
|
||||
let subject = pattern.subject(self.db);
|
||||
|
||||
match pattern.pattern(self.db).node() {
|
||||
ast::Pattern::MatchValue(_) => {
|
||||
// TODO
|
||||
}
|
||||
ast::Pattern::MatchSingleton(singleton_pattern) => {
|
||||
self.add_match_pattern_singleton(subject, singleton_pattern);
|
||||
}
|
||||
ast::Pattern::MatchSequence(_) => {
|
||||
// TODO
|
||||
}
|
||||
ast::Pattern::MatchMapping(_) => {
|
||||
// TODO
|
||||
}
|
||||
ast::Pattern::MatchClass(_) => {
|
||||
// TODO
|
||||
}
|
||||
ast::Pattern::MatchStar(_) => {
|
||||
// TODO
|
||||
}
|
||||
ast::Pattern::MatchAs(_) => {
|
||||
// TODO
|
||||
}
|
||||
ast::Pattern::MatchOr(_) => {
|
||||
// TODO
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn symbols(&self) -> Arc<SymbolTable> {
|
||||
symbol_table(self.db, self.scope())
|
||||
}
|
||||
|
||||
fn scope(&self) -> ScopeId<'db> {
|
||||
match self.constraint {
|
||||
Constraint::Expression(expression) => expression.scope(self.db),
|
||||
Constraint::Pattern(pattern) => pattern.scope(self.db),
|
||||
}
|
||||
}
|
||||
|
||||
fn add_expr_compare(&mut self, expr_compare: &ast::ExprCompare, expression: Expression<'db>) {
|
||||
let ast::ExprCompare {
|
||||
range: _,
|
||||
left,
|
||||
ops,
|
||||
comparators,
|
||||
} = expr_compare;
|
||||
|
||||
if let ast::Expr::Name(ast::ExprName {
|
||||
range: _,
|
||||
id,
|
||||
ctx: _,
|
||||
}) = left.as_ref()
|
||||
{
|
||||
// SAFETY: we should always have a symbol for every Name node.
|
||||
let symbol = self.symbols().symbol_id_by_name(id).unwrap();
|
||||
let scope = self.scope();
|
||||
let inference = infer_expression_types(self.db, expression);
|
||||
for (op, comparator) in std::iter::zip(&**ops, &**comparators) {
|
||||
let comp_ty = inference.expression_ty(comparator.scoped_ast_id(self.db, scope));
|
||||
if matches!(op, ast::CmpOp::IsNot) {
|
||||
let ty = IntersectionBuilder::new(self.db)
|
||||
.add_negative(comp_ty)
|
||||
.build();
|
||||
self.constraints.insert(symbol, ty);
|
||||
};
|
||||
// TODO other comparison types
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn add_match_pattern_singleton(
|
||||
&mut self,
|
||||
subject: &ast::Expr,
|
||||
pattern: &ast::PatternMatchSingleton,
|
||||
) {
|
||||
if let Some(ast::ExprName { id, .. }) = subject.as_name_expr() {
|
||||
// SAFETY: we should always have a symbol for every Name node.
|
||||
let symbol = self.symbols().symbol_id_by_name(id).unwrap();
|
||||
|
||||
let ty = match pattern.value {
|
||||
ast::Singleton::None => Type::None,
|
||||
ast::Singleton::True => Type::BooleanLiteral(true),
|
||||
ast::Singleton::False => Type::BooleanLiteral(false),
|
||||
};
|
||||
self.constraints.insert(symbol, ty);
|
||||
}
|
||||
}
|
||||
}
|
||||
1
crates/red_knot_python_semantic/vendor/typeshed/source_commit.txt
vendored
Normal file
1
crates/red_knot_python_semantic/vendor/typeshed/source_commit.txt
vendored
Normal file
@@ -0,0 +1 @@
|
||||
9e506eb5e8fc2823db8c60ad561b1145ff114947
|
||||
@@ -41,7 +41,7 @@ _json: 3.0-
|
||||
_locale: 3.0-
|
||||
_lsprof: 3.0-
|
||||
_markupbase: 3.0-
|
||||
_msi: 3.0-
|
||||
_msi: 3.0-3.12
|
||||
_operator: 3.4-
|
||||
_osx_support: 3.0-
|
||||
_posixsubprocess: 3.2-
|
||||
@@ -1,7 +1,7 @@
|
||||
import sys
|
||||
import typing_extensions
|
||||
from typing import Any, ClassVar, Generic, Literal, TypedDict, overload
|
||||
from typing_extensions import Unpack
|
||||
from typing_extensions import Self, Unpack
|
||||
|
||||
PyCF_ONLY_AST: Literal[1024]
|
||||
PyCF_TYPE_COMMENTS: Literal[4096]
|
||||
@@ -34,6 +34,9 @@ class AST:
|
||||
if sys.version_info >= (3, 13):
|
||||
_field_types: ClassVar[dict[str, Any]]
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self) -> Self: ...
|
||||
|
||||
class mod(AST): ...
|
||||
class type_ignore(AST): ...
|
||||
|
||||
@@ -44,6 +47,9 @@ class TypeIgnore(type_ignore):
|
||||
tag: str
|
||||
def __init__(self, lineno: int, tag: str) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, lineno: int = ..., tag: str = ...) -> Self: ...
|
||||
|
||||
class FunctionType(mod):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("argtypes", "returns")
|
||||
@@ -57,6 +63,9 @@ class FunctionType(mod):
|
||||
else:
|
||||
def __init__(self, argtypes: list[expr], returns: expr) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, argtypes: list[expr] = ..., returns: expr = ...) -> Self: ...
|
||||
|
||||
class Module(mod):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("body", "type_ignores")
|
||||
@@ -67,6 +76,9 @@ class Module(mod):
|
||||
else:
|
||||
def __init__(self, body: list[stmt], type_ignores: list[TypeIgnore]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, body: list[stmt] = ..., type_ignores: list[TypeIgnore] = ...) -> Self: ...
|
||||
|
||||
class Interactive(mod):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("body",)
|
||||
@@ -76,12 +88,18 @@ class Interactive(mod):
|
||||
else:
|
||||
def __init__(self, body: list[stmt]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, body: list[stmt] = ...) -> Self: ...
|
||||
|
||||
class Expression(mod):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("body",)
|
||||
body: expr
|
||||
def __init__(self, body: expr) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, body: expr = ...) -> Self: ...
|
||||
|
||||
class stmt(AST):
|
||||
lineno: int
|
||||
col_offset: int
|
||||
@@ -89,6 +107,9 @@ class stmt(AST):
|
||||
end_col_offset: int | None
|
||||
def __init__(self, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class FunctionDef(stmt):
|
||||
if sys.version_info >= (3, 12):
|
||||
__match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment", "type_params")
|
||||
@@ -152,6 +173,19 @@ class FunctionDef(stmt):
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
name: _Identifier = ...,
|
||||
args: arguments = ...,
|
||||
body: list[stmt] = ...,
|
||||
decorator_list: list[expr] = ...,
|
||||
returns: expr | None = ...,
|
||||
type_comment: str | None = ...,
|
||||
type_params: list[type_param] = ...,
|
||||
) -> Self: ...
|
||||
|
||||
class AsyncFunctionDef(stmt):
|
||||
if sys.version_info >= (3, 12):
|
||||
__match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment", "type_params")
|
||||
@@ -215,6 +249,19 @@ class AsyncFunctionDef(stmt):
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
name: _Identifier = ...,
|
||||
args: arguments = ...,
|
||||
body: list[stmt],
|
||||
decorator_list: list[expr],
|
||||
returns: expr | None,
|
||||
type_comment: str | None,
|
||||
type_params: list[type_param],
|
||||
) -> Self: ...
|
||||
|
||||
class ClassDef(stmt):
|
||||
if sys.version_info >= (3, 12):
|
||||
__match_args__ = ("name", "bases", "keywords", "body", "decorator_list", "type_params")
|
||||
@@ -260,12 +307,28 @@ class ClassDef(stmt):
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
name: _Identifier,
|
||||
bases: list[expr],
|
||||
keywords: list[keyword],
|
||||
body: list[stmt],
|
||||
decorator_list: list[expr],
|
||||
type_params: list[type_param],
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> Self: ...
|
||||
|
||||
class Return(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value",)
|
||||
value: expr | None
|
||||
def __init__(self, value: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, value: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Delete(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("targets",)
|
||||
@@ -275,6 +338,9 @@ class Delete(stmt):
|
||||
else:
|
||||
def __init__(self, targets: list[expr], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, targets: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Assign(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("targets", "value", "type_comment")
|
||||
@@ -295,6 +361,11 @@ class Assign(stmt):
|
||||
self, targets: list[expr], value: expr, type_comment: str | None = None, **kwargs: Unpack[_Attributes]
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, targets: list[expr] = ..., value: expr = ..., type_comment: str | None = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class AugAssign(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "op", "value")
|
||||
@@ -305,6 +376,16 @@ class AugAssign(stmt):
|
||||
self, target: Name | Attribute | Subscript, op: operator, value: expr, **kwargs: Unpack[_Attributes]
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
target: Name | Attribute | Subscript = ...,
|
||||
op: operator = ...,
|
||||
value: expr = ...,
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> Self: ...
|
||||
|
||||
class AnnAssign(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "annotation", "value", "simple")
|
||||
@@ -332,6 +413,17 @@ class AnnAssign(stmt):
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
target: Name | Attribute | Subscript = ...,
|
||||
annotation: expr = ...,
|
||||
value: expr | None = ...,
|
||||
simple: int = ...,
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> Self: ...
|
||||
|
||||
class For(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "iter", "body", "orelse", "type_comment")
|
||||
@@ -361,6 +453,18 @@ class For(stmt):
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
target: expr = ...,
|
||||
iter: expr = ...,
|
||||
body: list[stmt] = ...,
|
||||
orelse: list[stmt] = ...,
|
||||
type_comment: str | None = ...,
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> Self: ...
|
||||
|
||||
class AsyncFor(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "iter", "body", "orelse", "type_comment")
|
||||
@@ -390,6 +494,18 @@ class AsyncFor(stmt):
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
target: expr = ...,
|
||||
iter: expr = ...,
|
||||
body: list[stmt] = ...,
|
||||
orelse: list[stmt] = ...,
|
||||
type_comment: str | None = ...,
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> Self: ...
|
||||
|
||||
class While(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("test", "body", "orelse")
|
||||
@@ -403,6 +519,9 @@ class While(stmt):
|
||||
else:
|
||||
def __init__(self, test: expr, body: list[stmt], orelse: list[stmt], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, test: expr, body: list[stmt], orelse: list[stmt], **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class If(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("test", "body", "orelse")
|
||||
@@ -416,6 +535,11 @@ class If(stmt):
|
||||
else:
|
||||
def __init__(self, test: expr, body: list[stmt], orelse: list[stmt], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, test: expr = ..., body: list[stmt] = ..., orelse: list[stmt] = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class With(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("items", "body", "type_comment")
|
||||
@@ -435,6 +559,16 @@ class With(stmt):
|
||||
self, items: list[withitem], body: list[stmt], type_comment: str | None = None, **kwargs: Unpack[_Attributes]
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
items: list[withitem] = ...,
|
||||
body: list[stmt] = ...,
|
||||
type_comment: str | None = ...,
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> Self: ...
|
||||
|
||||
class AsyncWith(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("items", "body", "type_comment")
|
||||
@@ -454,6 +588,16 @@ class AsyncWith(stmt):
|
||||
self, items: list[withitem], body: list[stmt], type_comment: str | None = None, **kwargs: Unpack[_Attributes]
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
items: list[withitem] = ...,
|
||||
body: list[stmt] = ...,
|
||||
type_comment: str | None = ...,
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> Self: ...
|
||||
|
||||
class Raise(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("exc", "cause")
|
||||
@@ -461,6 +605,9 @@ class Raise(stmt):
|
||||
cause: expr | None
|
||||
def __init__(self, exc: expr | None = None, cause: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, exc: expr | None = ..., cause: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Try(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("body", "handlers", "orelse", "finalbody")
|
||||
@@ -487,6 +634,17 @@ class Try(stmt):
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
body: list[stmt] = ...,
|
||||
handlers: list[ExceptHandler] = ...,
|
||||
orelse: list[stmt] = ...,
|
||||
finalbody: list[stmt] = ...,
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> Self: ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
class TryStar(stmt):
|
||||
__match_args__ = ("body", "handlers", "orelse", "finalbody")
|
||||
@@ -513,6 +671,17 @@ if sys.version_info >= (3, 11):
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
body: list[stmt] = ...,
|
||||
handlers: list[ExceptHandler] = ...,
|
||||
orelse: list[stmt] = ...,
|
||||
finalbody: list[stmt] = ...,
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> Self: ...
|
||||
|
||||
class Assert(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("test", "msg")
|
||||
@@ -520,6 +689,9 @@ class Assert(stmt):
|
||||
msg: expr | None
|
||||
def __init__(self, test: expr, msg: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, test: expr, msg: expr | None, **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Import(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("names",)
|
||||
@@ -529,6 +701,9 @@ class Import(stmt):
|
||||
else:
|
||||
def __init__(self, names: list[alias], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, names: list[alias] = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class ImportFrom(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("module", "names", "level")
|
||||
@@ -550,6 +725,11 @@ class ImportFrom(stmt):
|
||||
self, module: str | None = None, *, names: list[alias], level: int, **kwargs: Unpack[_Attributes]
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, module: str | None = ..., names: list[alias] = ..., level: int = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class Global(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("names",)
|
||||
@@ -559,6 +739,9 @@ class Global(stmt):
|
||||
else:
|
||||
def __init__(self, names: list[_Identifier], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, names: list[_Identifier], **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Nonlocal(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("names",)
|
||||
@@ -568,12 +751,18 @@ class Nonlocal(stmt):
|
||||
else:
|
||||
def __init__(self, names: list[_Identifier], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, names: list[_Identifier] = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Expr(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value",)
|
||||
value: expr
|
||||
def __init__(self, value: expr, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Pass(stmt): ...
|
||||
class Break(stmt): ...
|
||||
class Continue(stmt): ...
|
||||
@@ -585,6 +774,9 @@ class expr(AST):
|
||||
end_col_offset: int | None
|
||||
def __init__(self, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class BoolOp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("op", "values")
|
||||
@@ -595,6 +787,9 @@ class BoolOp(expr):
|
||||
else:
|
||||
def __init__(self, op: boolop, values: list[expr], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, op: boolop = ..., values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class BinOp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("left", "op", "right")
|
||||
@@ -603,6 +798,11 @@ class BinOp(expr):
|
||||
right: expr
|
||||
def __init__(self, left: expr, op: operator, right: expr, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, left: expr = ..., op: operator = ..., right: expr = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class UnaryOp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("op", "operand")
|
||||
@@ -610,6 +810,9 @@ class UnaryOp(expr):
|
||||
operand: expr
|
||||
def __init__(self, op: unaryop, operand: expr, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, op: unaryop = ..., operand: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Lambda(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("args", "body")
|
||||
@@ -617,6 +820,9 @@ class Lambda(expr):
|
||||
body: expr
|
||||
def __init__(self, args: arguments, body: expr, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, args: arguments = ..., body: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class IfExp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("test", "body", "orelse")
|
||||
@@ -625,6 +831,11 @@ class IfExp(expr):
|
||||
orelse: expr
|
||||
def __init__(self, test: expr, body: expr, orelse: expr, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, test: expr = ..., body: expr = ..., orelse: expr = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class Dict(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("keys", "values")
|
||||
@@ -635,6 +846,11 @@ class Dict(expr):
|
||||
else:
|
||||
def __init__(self, keys: list[expr | None], values: list[expr], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, keys: list[expr | None] = ..., values: list[expr] = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class Set(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elts",)
|
||||
@@ -644,6 +860,9 @@ class Set(expr):
|
||||
else:
|
||||
def __init__(self, elts: list[expr], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, elts: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class ListComp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elt", "generators")
|
||||
@@ -654,6 +873,11 @@ class ListComp(expr):
|
||||
else:
|
||||
def __init__(self, elt: expr, generators: list[comprehension], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, elt: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class SetComp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elt", "generators")
|
||||
@@ -664,6 +888,11 @@ class SetComp(expr):
|
||||
else:
|
||||
def __init__(self, elt: expr, generators: list[comprehension], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, elt: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class DictComp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("key", "value", "generators")
|
||||
@@ -677,6 +906,11 @@ class DictComp(expr):
|
||||
else:
|
||||
def __init__(self, key: expr, value: expr, generators: list[comprehension], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, key: expr = ..., value: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class GeneratorExp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elt", "generators")
|
||||
@@ -687,24 +921,38 @@ class GeneratorExp(expr):
|
||||
else:
|
||||
def __init__(self, elt: expr, generators: list[comprehension], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, elt: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class Await(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value",)
|
||||
value: expr
|
||||
def __init__(self, value: expr, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Yield(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value",)
|
||||
value: expr | None
|
||||
def __init__(self, value: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, value: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class YieldFrom(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value",)
|
||||
value: expr
|
||||
def __init__(self, value: expr, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Compare(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("left", "ops", "comparators")
|
||||
@@ -718,6 +966,11 @@ class Compare(expr):
|
||||
else:
|
||||
def __init__(self, left: expr, ops: list[cmpop], comparators: list[expr], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, left: expr = ..., ops: list[cmpop] = ..., comparators: list[expr] = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class Call(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("func", "args", "keywords")
|
||||
@@ -731,6 +984,11 @@ class Call(expr):
|
||||
else:
|
||||
def __init__(self, func: expr, args: list[expr], keywords: list[keyword], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, func: expr = ..., args: list[expr] = ..., keywords: list[keyword] = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class FormattedValue(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value", "conversion", "format_spec")
|
||||
@@ -739,6 +997,11 @@ class FormattedValue(expr):
|
||||
format_spec: expr | None
|
||||
def __init__(self, value: expr, conversion: int, format_spec: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, value: expr = ..., conversion: int = ..., format_spec: expr | None = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class JoinedStr(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("values",)
|
||||
@@ -748,16 +1011,24 @@ class JoinedStr(expr):
|
||||
else:
|
||||
def __init__(self, values: list[expr], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Constant(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value", "kind")
|
||||
value: Any # None, str, bytes, bool, int, float, complex, Ellipsis
|
||||
kind: str | None
|
||||
# Aliases for value, for backwards compatibility
|
||||
s: Any
|
||||
n: int | float | complex
|
||||
if sys.version_info < (3, 14):
|
||||
# Aliases for value, for backwards compatibility
|
||||
s: Any
|
||||
n: int | float | complex
|
||||
|
||||
def __init__(self, value: Any, kind: str | None = None, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, value: Any = ..., kind: str | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class NamedExpr(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "value")
|
||||
@@ -765,6 +1036,9 @@ class NamedExpr(expr):
|
||||
value: expr
|
||||
def __init__(self, target: Name, value: expr, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, target: Name = ..., value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Attribute(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value", "attr", "ctx")
|
||||
@@ -773,6 +1047,11 @@ class Attribute(expr):
|
||||
ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__`
|
||||
def __init__(self, value: expr, attr: _Identifier, ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, value: expr = ..., attr: _Identifier = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
_Slice: typing_extensions.TypeAlias = expr
|
||||
_SliceAttributes: typing_extensions.TypeAlias = _Attributes
|
||||
@@ -792,6 +1071,16 @@ class Slice(_Slice):
|
||||
self, lower: expr | None = None, upper: expr | None = None, step: expr | None = None, **kwargs: Unpack[_SliceAttributes]
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
lower: expr | None = ...,
|
||||
upper: expr | None = ...,
|
||||
step: expr | None = ...,
|
||||
**kwargs: Unpack[_SliceAttributes],
|
||||
) -> Self: ...
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
class ExtSlice(slice):
|
||||
dims: list[slice]
|
||||
@@ -809,6 +1098,11 @@ class Subscript(expr):
|
||||
ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__`
|
||||
def __init__(self, value: expr, slice: _Slice, ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, value: expr = ..., slice: _Slice = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class Starred(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value", "ctx")
|
||||
@@ -816,6 +1110,9 @@ class Starred(expr):
|
||||
ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__`
|
||||
def __init__(self, value: expr, ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, value: expr = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Name(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("id", "ctx")
|
||||
@@ -823,6 +1120,9 @@ class Name(expr):
|
||||
ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__`
|
||||
def __init__(self, id: _Identifier, ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, id: _Identifier = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class List(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elts", "ctx")
|
||||
@@ -833,6 +1133,9 @@ class List(expr):
|
||||
else:
|
||||
def __init__(self, elts: list[expr], ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, elts: list[expr] = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Tuple(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elts", "ctx")
|
||||
@@ -845,6 +1148,9 @@ class Tuple(expr):
|
||||
else:
|
||||
def __init__(self, elts: list[expr], ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, elts: list[expr] = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class expr_context(AST): ...
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
@@ -908,6 +1214,9 @@ class comprehension(AST):
|
||||
else:
|
||||
def __init__(self, target: expr, iter: expr, ifs: list[expr], is_async: int) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, target: expr = ..., iter: expr = ..., ifs: list[expr] = ..., is_async: int = ...) -> Self: ...
|
||||
|
||||
class excepthandler(AST):
|
||||
lineno: int
|
||||
col_offset: int
|
||||
@@ -915,6 +1224,11 @@ class excepthandler(AST):
|
||||
end_col_offset: int | None
|
||||
def __init__(self, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, lineno: int = ..., col_offset: int = ..., end_lineno: int | None = ..., end_col_offset: int | None = ...
|
||||
) -> Self: ...
|
||||
|
||||
class ExceptHandler(excepthandler):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("type", "name", "body")
|
||||
@@ -935,6 +1249,16 @@ class ExceptHandler(excepthandler):
|
||||
self, type: expr | None = None, name: _Identifier | None = None, *, body: list[stmt], **kwargs: Unpack[_Attributes]
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
type: expr | None = ...,
|
||||
name: _Identifier | None = ...,
|
||||
body: list[stmt] = ...,
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> Self: ...
|
||||
|
||||
class arguments(AST):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("posonlyargs", "args", "vararg", "kwonlyargs", "kw_defaults", "kwarg", "defaults")
|
||||
@@ -993,6 +1317,19 @@ class arguments(AST):
|
||||
defaults: list[expr],
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
posonlyargs: list[arg] = ...,
|
||||
args: list[arg] = ...,
|
||||
vararg: arg | None = ...,
|
||||
kwonlyargs: list[arg] = ...,
|
||||
kw_defaults: list[expr | None] = ...,
|
||||
kwarg: arg | None = ...,
|
||||
defaults: list[expr] = ...,
|
||||
) -> Self: ...
|
||||
|
||||
class arg(AST):
|
||||
lineno: int
|
||||
col_offset: int
|
||||
@@ -1007,6 +1344,16 @@ class arg(AST):
|
||||
self, arg: _Identifier, annotation: expr | None = None, type_comment: str | None = None, **kwargs: Unpack[_Attributes]
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
arg: _Identifier = ...,
|
||||
annotation: expr | None = ...,
|
||||
type_comment: str | None = ...,
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> Self: ...
|
||||
|
||||
class keyword(AST):
|
||||
lineno: int
|
||||
col_offset: int
|
||||
@@ -1021,6 +1368,9 @@ class keyword(AST):
|
||||
@overload
|
||||
def __init__(self, arg: _Identifier | None = None, *, value: expr, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, arg: _Identifier | None = ..., value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class alias(AST):
|
||||
lineno: int
|
||||
col_offset: int
|
||||
@@ -1032,6 +1382,9 @@ class alias(AST):
|
||||
asname: _Identifier | None
|
||||
def __init__(self, name: str, asname: _Identifier | None = None, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, name: str = ..., asname: _Identifier | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class withitem(AST):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("context_expr", "optional_vars")
|
||||
@@ -1039,6 +1392,9 @@ class withitem(AST):
|
||||
optional_vars: expr | None
|
||||
def __init__(self, context_expr: expr, optional_vars: expr | None = None) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, context_expr: expr = ..., optional_vars: expr | None = ...) -> Self: ...
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
class Match(stmt):
|
||||
__match_args__ = ("subject", "cases")
|
||||
@@ -1049,6 +1405,11 @@ if sys.version_info >= (3, 10):
|
||||
else:
|
||||
def __init__(self, subject: expr, cases: list[match_case], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, subject: expr = ..., cases: list[match_case] = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class pattern(AST):
|
||||
lineno: int
|
||||
col_offset: int
|
||||
@@ -1056,6 +1417,11 @@ if sys.version_info >= (3, 10):
|
||||
end_col_offset: int
|
||||
def __init__(self, **kwargs: Unpack[_Attributes[int]]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, lineno: int = ..., col_offset: int = ..., end_lineno: int = ..., end_col_offset: int = ...
|
||||
) -> Self: ...
|
||||
|
||||
# Without the alias, Pyright complains variables named pattern are recursively defined
|
||||
_Pattern: typing_extensions.TypeAlias = pattern
|
||||
|
||||
@@ -1072,16 +1438,25 @@ if sys.version_info >= (3, 10):
|
||||
@overload
|
||||
def __init__(self, pattern: _Pattern, guard: expr | None = None, *, body: list[stmt]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, pattern: _Pattern = ..., guard: expr | None = ..., body: list[stmt] = ...) -> Self: ...
|
||||
|
||||
class MatchValue(pattern):
|
||||
__match_args__ = ("value",)
|
||||
value: expr
|
||||
def __init__(self, value: expr, **kwargs: Unpack[_Attributes[int]]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ...
|
||||
|
||||
class MatchSingleton(pattern):
|
||||
__match_args__ = ("value",)
|
||||
value: Literal[True, False] | None
|
||||
def __init__(self, value: Literal[True, False] | None, **kwargs: Unpack[_Attributes[int]]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, value: Literal[True, False] | None = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ...
|
||||
|
||||
class MatchSequence(pattern):
|
||||
__match_args__ = ("patterns",)
|
||||
patterns: list[pattern]
|
||||
@@ -1090,11 +1465,17 @@ if sys.version_info >= (3, 10):
|
||||
else:
|
||||
def __init__(self, patterns: list[pattern], **kwargs: Unpack[_Attributes[int]]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, patterns: list[pattern] = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ...
|
||||
|
||||
class MatchStar(pattern):
|
||||
__match_args__ = ("name",)
|
||||
name: _Identifier | None
|
||||
def __init__(self, name: _Identifier | None, **kwargs: Unpack[_Attributes[int]]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, name: _Identifier | None = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ...
|
||||
|
||||
class MatchMapping(pattern):
|
||||
__match_args__ = ("keys", "patterns", "rest")
|
||||
keys: list[expr]
|
||||
@@ -1117,6 +1498,16 @@ if sys.version_info >= (3, 10):
|
||||
**kwargs: Unpack[_Attributes[int]],
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
keys: list[expr] = ...,
|
||||
patterns: list[pattern] = ...,
|
||||
rest: _Identifier | None = ...,
|
||||
**kwargs: Unpack[_Attributes[int]],
|
||||
) -> Self: ...
|
||||
|
||||
class MatchClass(pattern):
|
||||
__match_args__ = ("cls", "patterns", "kwd_attrs", "kwd_patterns")
|
||||
cls: expr
|
||||
@@ -1142,6 +1533,17 @@ if sys.version_info >= (3, 10):
|
||||
**kwargs: Unpack[_Attributes[int]],
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
cls: expr = ...,
|
||||
patterns: list[pattern] = ...,
|
||||
kwd_attrs: list[_Identifier] = ...,
|
||||
kwd_patterns: list[pattern] = ...,
|
||||
**kwargs: Unpack[_Attributes[int]],
|
||||
) -> Self: ...
|
||||
|
||||
class MatchAs(pattern):
|
||||
__match_args__ = ("pattern", "name")
|
||||
pattern: _Pattern | None
|
||||
@@ -1150,6 +1552,11 @@ if sys.version_info >= (3, 10):
|
||||
self, pattern: _Pattern | None = None, name: _Identifier | None = None, **kwargs: Unpack[_Attributes[int]]
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, pattern: _Pattern | None = ..., name: _Identifier | None = ..., **kwargs: Unpack[_Attributes[int]]
|
||||
) -> Self: ...
|
||||
|
||||
class MatchOr(pattern):
|
||||
__match_args__ = ("patterns",)
|
||||
patterns: list[pattern]
|
||||
@@ -1158,6 +1565,9 @@ if sys.version_info >= (3, 10):
|
||||
else:
|
||||
def __init__(self, patterns: list[pattern], **kwargs: Unpack[_Attributes[int]]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, patterns: list[pattern] = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ...
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
class type_param(AST):
|
||||
lineno: int
|
||||
@@ -1166,6 +1576,9 @@ if sys.version_info >= (3, 12):
|
||||
end_col_offset: int
|
||||
def __init__(self, **kwargs: Unpack[_Attributes[int]]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, **kwargs: Unpack[_Attributes[int]]) -> Self: ...
|
||||
|
||||
class TypeVar(type_param):
|
||||
if sys.version_info >= (3, 13):
|
||||
__match_args__ = ("name", "bound", "default_value")
|
||||
@@ -1185,6 +1598,16 @@ if sys.version_info >= (3, 12):
|
||||
else:
|
||||
def __init__(self, name: _Identifier, bound: expr | None = None, **kwargs: Unpack[_Attributes[int]]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
name: _Identifier = ...,
|
||||
bound: expr | None = ...,
|
||||
default_value: expr | None = ...,
|
||||
**kwargs: Unpack[_Attributes[int]],
|
||||
) -> Self: ...
|
||||
|
||||
class ParamSpec(type_param):
|
||||
if sys.version_info >= (3, 13):
|
||||
__match_args__ = ("name", "default_value")
|
||||
@@ -1199,6 +1622,11 @@ if sys.version_info >= (3, 12):
|
||||
else:
|
||||
def __init__(self, name: _Identifier, **kwargs: Unpack[_Attributes[int]]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, name: _Identifier = ..., default_value: expr | None = ..., **kwargs: Unpack[_Attributes[int]]
|
||||
) -> Self: ...
|
||||
|
||||
class TypeVarTuple(type_param):
|
||||
if sys.version_info >= (3, 13):
|
||||
__match_args__ = ("name", "default_value")
|
||||
@@ -1213,6 +1641,11 @@ if sys.version_info >= (3, 12):
|
||||
else:
|
||||
def __init__(self, name: _Identifier, **kwargs: Unpack[_Attributes[int]]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, name: _Identifier = ..., default_value: expr | None = ..., **kwargs: Unpack[_Attributes[int]]
|
||||
) -> Self: ...
|
||||
|
||||
class TypeAlias(stmt):
|
||||
__match_args__ = ("name", "type_params", "value")
|
||||
name: Name
|
||||
@@ -1231,3 +1664,13 @@ if sys.version_info >= (3, 12):
|
||||
def __init__(
|
||||
self, name: Name, type_params: list[type_param], value: expr, **kwargs: Unpack[_Attributes[int]]
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
name: Name = ...,
|
||||
type_params: list[type_param] = ...,
|
||||
value: expr = ...,
|
||||
**kwargs: Unpack[_Attributes[int]],
|
||||
) -> Self: ...
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user