Compare commits
605 Commits
podman_mou
...
dhruv/rest
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1dc0a523de | ||
|
|
e1e9143c47 | ||
|
|
3c4ec82aee | ||
|
|
29c36a56b2 | ||
|
|
dfee65882b | ||
|
|
50c8ee5175 | ||
|
|
c2aac5f826 | ||
|
|
387af831f9 | ||
|
|
9d517061f2 | ||
|
|
facf6febf0 | ||
|
|
46e687e8d1 | ||
|
|
599103c933 | ||
|
|
54df960a4a | ||
|
|
3463683632 | ||
|
|
6b973b2556 | ||
|
|
c0e2c13d0d | ||
|
|
591a7a152c | ||
|
|
b7c7b4b387 | ||
|
|
ea0246c51a | ||
|
|
0f85769976 | ||
|
|
47f0b45be3 | ||
|
|
f4bed22b05 | ||
|
|
17eb65b26f | ||
|
|
9986397d56 | ||
|
|
58c641c92f | ||
|
|
227fa4e035 | ||
|
|
2b21b77ee6 | ||
|
|
ba272b093c | ||
|
|
8972e5d175 | ||
|
|
9ac2e61bad | ||
|
|
6deb056117 | ||
|
|
c4aad4b161 | ||
|
|
3abd5c08a5 | ||
|
|
2014cba87f | ||
|
|
5661353334 | ||
|
|
dd5d0d523c | ||
|
|
1be8c2e340 | ||
|
|
52d8847b60 | ||
|
|
d3b6e8f58b | ||
|
|
bf620dcb38 | ||
|
|
fae0573817 | ||
|
|
0c23b868dc | ||
|
|
3ceedf76b8 | ||
|
|
828871dc5c | ||
|
|
ee21fc7fd8 | ||
|
|
28ab5f4065 | ||
|
|
a73bebcf15 | ||
|
|
34dafb67a2 | ||
|
|
f8656ff35e | ||
|
|
34b4732c46 | ||
|
|
ce68f1cc1b | ||
|
|
281e6d9791 | ||
|
|
ee258caed7 | ||
|
|
b4d9d26020 | ||
|
|
a99832088a | ||
|
|
770ef2ab27 | ||
|
|
c6023c03a2 | ||
|
|
df694ca1c1 | ||
|
|
2e75cfbfe7 | ||
|
|
cfafaa7637 | ||
|
|
3e9c7adeee | ||
|
|
81cd438d88 | ||
|
|
483748c188 | ||
|
|
eb3dc37faa | ||
|
|
aba1802828 | ||
|
|
96b42b0c8f | ||
|
|
e6d0c4a65d | ||
|
|
4e1b289a67 | ||
|
|
a5ef124201 | ||
|
|
390bb43276 | ||
|
|
fe8b15291f | ||
|
|
c8e01d7c53 | ||
|
|
c4d628cc4c | ||
|
|
ab3648c4c5 | ||
|
|
a822fd6642 | ||
|
|
f8f2e2a442 | ||
|
|
0b5828a1e8 | ||
|
|
5af48337a5 | ||
|
|
39ad6b9472 | ||
|
|
41dec93cd2 | ||
|
|
aee2caa733 | ||
|
|
fe5544e137 | ||
|
|
14c014a48b | ||
|
|
ecd0597d6b | ||
|
|
202271fba6 | ||
|
|
4bdb0b4f86 | ||
|
|
2286f916c1 | ||
|
|
1e4c944251 | ||
|
|
f50f8732e9 | ||
|
|
ecab04e338 | ||
|
|
8c09496b07 | ||
|
|
d19fd1b91c | ||
|
|
99df859e20 | ||
|
|
2d5fe9a6d3 | ||
|
|
1f2cb09853 | ||
|
|
cfe25ab465 | ||
|
|
551ed2706b | ||
|
|
21c5606793 | ||
|
|
c73a7bb929 | ||
|
|
4f6accb5c6 | ||
|
|
1ca14e4335 | ||
|
|
b9c8113a8a | ||
|
|
2edd32aa31 | ||
|
|
02c4373a49 | ||
|
|
d37e2e5d33 | ||
|
|
d1d067896c | ||
|
|
93f9023ea3 | ||
|
|
8144a11f98 | ||
|
|
dce87c21fd | ||
|
|
f873d2ac12 | ||
|
|
ecd9e6a650 | ||
|
|
785c39927b | ||
|
|
a35cdbb275 | ||
|
|
0c98b5949c | ||
|
|
e5f37a8254 | ||
|
|
5c5dfc11f0 | ||
|
|
678045e1aa | ||
|
|
dedefd73da | ||
|
|
37a60460ed | ||
|
|
0bd258a370 | ||
|
|
9baab8672a | ||
|
|
c65e3310d5 | ||
|
|
38c19fb96e | ||
|
|
abb4cdbf3d | ||
|
|
fc811f5168 | ||
|
|
1a8f29ea41 | ||
|
|
aefaddeae7 | ||
|
|
df09045176 | ||
|
|
049cda2ff3 | ||
|
|
358792f2c9 | ||
|
|
e6d5a7af37 | ||
|
|
f5bff82e70 | ||
|
|
ab44152eb5 | ||
|
|
f4c8c7eb70 | ||
|
|
65de8f2c9b | ||
|
|
e6226436fd | ||
|
|
0345d46759 | ||
|
|
4d0d3b00cb | ||
|
|
2be1c4ff04 | ||
|
|
edd86d5603 | ||
|
|
78ad7959ca | ||
|
|
d72ecd6ded | ||
|
|
8617a508bd | ||
|
|
c88bd4e884 | ||
|
|
fbcda90316 | ||
|
|
169d4390cb | ||
|
|
80ade591df | ||
|
|
4881d32c80 | ||
|
|
81a2220ce1 | ||
|
|
900e98b584 | ||
|
|
f9d8189670 | ||
|
|
52ba94191a | ||
|
|
96802d6a7f | ||
|
|
dd0a7ec73e | ||
|
|
25f5ae44c4 | ||
|
|
251efe5c41 | ||
|
|
6359e55383 | ||
|
|
a9847af6e8 | ||
|
|
d61d75d4fa | ||
|
|
499c0bd875 | ||
|
|
4cb30b598f | ||
|
|
aba0d83c11 | ||
|
|
c319414e54 | ||
|
|
ef1f6d98a0 | ||
|
|
b850b812de | ||
|
|
a87b27c075 | ||
|
|
9b73532b11 | ||
|
|
d8debb7a36 | ||
|
|
bd4a947b29 | ||
|
|
f121f8b31b | ||
|
|
80efb865e9 | ||
|
|
52d27befe8 | ||
|
|
6ed06afd28 | ||
|
|
b9da31610a | ||
|
|
ac7b1770e2 | ||
|
|
e4c2859c0f | ||
|
|
6dcd743111 | ||
|
|
73160dc8b6 | ||
|
|
15aa5a6d57 | ||
|
|
33512a4249 | ||
|
|
d8ebb03591 | ||
|
|
2e211c5c22 | ||
|
|
9fd8aaaf29 | ||
|
|
d110bd4e60 | ||
|
|
eb9c7ae869 | ||
|
|
7defc0d136 | ||
|
|
45f459bafd | ||
|
|
99e946a005 | ||
|
|
78a7ac0722 | ||
|
|
fa2f3f9f2f | ||
|
|
3898d737d8 | ||
|
|
c487149b7d | ||
|
|
bebed67bf1 | ||
|
|
3ddcad64f5 | ||
|
|
05c35b6975 | ||
|
|
7fc39ad624 | ||
|
|
2520ebb145 | ||
|
|
89c8b49027 | ||
|
|
e05953a991 | ||
|
|
d0ac38f9d3 | ||
|
|
ff53db3d99 | ||
|
|
899a52390b | ||
|
|
82a3e69b8a | ||
|
|
7027344dfc | ||
|
|
fb9f0c448f | ||
|
|
75131c6f4a | ||
|
|
4b9ddc4a06 | ||
|
|
99dc208b00 | ||
|
|
540023262e | ||
|
|
2ea79572ae | ||
|
|
aa0db338d9 | ||
|
|
a99a45868c | ||
|
|
fabf19fdc9 | ||
|
|
59f712a566 | ||
|
|
1d080465de | ||
|
|
3481e16cdf | ||
|
|
d7e9280e1e | ||
|
|
f237d36d2f | ||
|
|
12f22b1fdd | ||
|
|
47d05ee9ea | ||
|
|
9caec36b59 | ||
|
|
cb364780b3 | ||
|
|
71b8bf211f | ||
|
|
109b9cc4f9 | ||
|
|
5d02627794 | ||
|
|
65444bb00e | ||
|
|
8822a79b4d | ||
|
|
2df4d23113 | ||
|
|
603b62607a | ||
|
|
2b71fc4510 | ||
|
|
1b78d872ec | ||
|
|
feba5031dc | ||
|
|
0c2b88f224 | ||
|
|
cf1a57df5a | ||
|
|
597c5f9124 | ||
|
|
69e1c567d4 | ||
|
|
37b9bac403 | ||
|
|
83db48d316 | ||
|
|
c4e651921b | ||
|
|
b595346213 | ||
|
|
253474b312 | ||
|
|
a176679b24 | ||
|
|
1f51048fa4 | ||
|
|
2abfab0f9b | ||
|
|
64f1f3468d | ||
|
|
ffaa35eafe | ||
|
|
c906b0183b | ||
|
|
bc5b9b81dd | ||
|
|
221ea662e0 | ||
|
|
d28c5afd14 | ||
|
|
f1de08c2a0 | ||
|
|
33e9a6a54e | ||
|
|
f577e03021 | ||
|
|
f53733525c | ||
|
|
2daa914334 | ||
|
|
6d9205e346 | ||
|
|
df7345e118 | ||
|
|
dc6aafecc2 | ||
|
|
5107a50ae7 | ||
|
|
a631d600ac | ||
|
|
f34b9a77f0 | ||
|
|
7997da47f5 | ||
|
|
d380b37a09 | ||
|
|
b14fee9320 | ||
|
|
037e817450 | ||
|
|
7fcfedd430 | ||
|
|
50ff5c7544 | ||
|
|
90e5bc2bd9 | ||
|
|
aae9619d3d | ||
|
|
7fa76a2b2b | ||
|
|
14dd6d980e | ||
|
|
846f57fd15 | ||
|
|
8e6aa78796 | ||
|
|
e91a0fe94a | ||
|
|
d2c627efb3 | ||
|
|
10e977d5f5 | ||
|
|
f0318ff889 | ||
|
|
5cc3fed9a8 | ||
|
|
39dd732e27 | ||
|
|
52630a1d55 | ||
|
|
7b5fd63ce8 | ||
|
|
5499821c67 | ||
|
|
7ee7c68f36 | ||
|
|
2393d19f91 | ||
|
|
a8e2ba508e | ||
|
|
0b4d3ce39b | ||
|
|
0a345dc627 | ||
|
|
ff2aa3ea00 | ||
|
|
0d3bad877d | ||
|
|
756060d676 | ||
|
|
b647f3fba8 | ||
|
|
82e69ebf23 | ||
|
|
2c79045342 | ||
|
|
3497f5257b | ||
|
|
25aabec814 | ||
|
|
0e71485ea9 | ||
|
|
43a9d282f7 | ||
|
|
6f357b8b45 | ||
|
|
73d9f11a9c | ||
|
|
d6c6db5a44 | ||
|
|
56d985a972 | ||
|
|
b3e0655cc9 | ||
|
|
06baffec9e | ||
|
|
67a2ae800a | ||
|
|
7a2c75e2fc | ||
|
|
9ee44637ca | ||
|
|
733341ab39 | ||
|
|
341a25eec1 | ||
|
|
38e178e914 | ||
|
|
daccb3f4f3 | ||
|
|
c858afe03a | ||
|
|
3c1c3199d0 | ||
|
|
fbfe2cb2f5 | ||
|
|
1c311e4fdb | ||
|
|
12177a42e3 | ||
|
|
dfb08856eb | ||
|
|
94d817e1a5 | ||
|
|
9296bd4e3f | ||
|
|
da824ba316 | ||
|
|
012198a1b0 | ||
|
|
fbab04fbe1 | ||
|
|
9aa43d5f91 | ||
|
|
966563c79b | ||
|
|
27edadec29 | ||
|
|
2e2b1b460f | ||
|
|
a3e67abf4c | ||
|
|
ee0518e8f7 | ||
|
|
d774a3bd48 | ||
|
|
7e6b19048e | ||
|
|
8e383b9587 | ||
|
|
3f49ab126f | ||
|
|
c1bc7f4dee | ||
|
|
a44d579f21 | ||
|
|
a3900d2b0b | ||
|
|
83b1c48a93 | ||
|
|
138e70bd5c | ||
|
|
ee103ffb25 | ||
|
|
18f87b9497 | ||
|
|
adc8d4e1e7 | ||
|
|
90db361199 | ||
|
|
4738135801 | ||
|
|
264cd750e9 | ||
|
|
7a4419a2a5 | ||
|
|
ac1666d6e2 | ||
|
|
459c85ba27 | ||
|
|
aaa56eb0bd | ||
|
|
f3c14a4276 | ||
|
|
3169d408fa | ||
|
|
a2286c8e47 | ||
|
|
fb9f566f56 | ||
|
|
381bd1ff4a | ||
|
|
2f54d05d97 | ||
|
|
e18b4e42d3 | ||
|
|
9495331a5f | ||
|
|
e1076db7d0 | ||
|
|
1986c9e8e2 | ||
|
|
d7e80dc955 | ||
|
|
87d09f77cd | ||
|
|
bd37ef13b8 | ||
|
|
ec23c974db | ||
|
|
122e5ab428 | ||
|
|
2f2149aca8 | ||
|
|
9d5c31e7da | ||
|
|
25f3ad6238 | ||
|
|
79926329a4 | ||
|
|
9cdc578dd9 | ||
|
|
665c75f7ab | ||
|
|
f37b39d6cc | ||
|
|
e18c45c310 | ||
|
|
d930052de8 | ||
|
|
7ad4df9e9f | ||
|
|
425761e960 | ||
|
|
4b69271809 | ||
|
|
bf23d38a21 | ||
|
|
49f51583fa | ||
|
|
1fe4a5faed | ||
|
|
998bfe0847 | ||
|
|
6f4db8675b | ||
|
|
71f7aa4971 | ||
|
|
9f72f474e6 | ||
|
|
10c993e21a | ||
|
|
2d3914296d | ||
|
|
7571da8778 | ||
|
|
2ceac5f868 | ||
|
|
5ce80827d2 | ||
|
|
e047b9685a | ||
|
|
fc16d8d04d | ||
|
|
175e5d7b88 | ||
|
|
c03f257ed7 | ||
|
|
6bbb4a28c2 | ||
|
|
2ce3e3ae60 | ||
|
|
2a64cccb61 | ||
|
|
928ffd6650 | ||
|
|
e52be0951a | ||
|
|
889073578e | ||
|
|
eac965ecaf | ||
|
|
8659f2f4ea | ||
|
|
c1b292a0dc | ||
|
|
3af6ccb720 | ||
|
|
f0fc6a95fe | ||
|
|
f96a3c71ff | ||
|
|
b9b7deff17 | ||
|
|
40d9324f5a | ||
|
|
a9f8bd59b2 | ||
|
|
143e172431 | ||
|
|
b2d3a05ee4 | ||
|
|
ef1ca0dd38 | ||
|
|
c7b13bb8fc | ||
|
|
dbbe3526ef | ||
|
|
f22c8ab811 | ||
|
|
2a8f95c437 | ||
|
|
ea2d51c2bb | ||
|
|
ed238e0c76 | ||
|
|
3ace12943e | ||
|
|
978909fcf4 | ||
|
|
f8735e1ee8 | ||
|
|
d70ceb6a56 | ||
|
|
fc7d9e95b8 | ||
|
|
b578fca9cb | ||
|
|
8d3146c2b2 | ||
|
|
fa5c841154 | ||
|
|
f8fcbc19d9 | ||
|
|
97fdd48208 | ||
|
|
731ed2e40b | ||
|
|
3a742c17f8 | ||
|
|
053243635c | ||
|
|
82355712c3 | ||
|
|
4bc73dd87e | ||
|
|
53b84ab054 | ||
|
|
3664f85f45 | ||
|
|
2c1926beeb | ||
|
|
4bcc96ae51 | ||
|
|
c0a2b49bac | ||
|
|
ca22248628 | ||
|
|
d8cf8ac2ef | ||
|
|
1c7b84059e | ||
|
|
f82bb67555 | ||
|
|
5f96f69151 | ||
|
|
ad19b3fd0e | ||
|
|
a62e2d2000 | ||
|
|
d61747093c | ||
|
|
0ba7fc63d0 | ||
|
|
fa5b19d4b6 | ||
|
|
181e7b3c0d | ||
|
|
519eca9fe7 | ||
|
|
f0d589d7a3 | ||
|
|
512c8b2cc5 | ||
|
|
811f78d94d | ||
|
|
8f1be31289 | ||
|
|
8cfbac71a4 | ||
|
|
9460857932 | ||
|
|
a028ca22f0 | ||
|
|
7953f6aa79 | ||
|
|
764d9ab4ee | ||
|
|
9b9d701500 | ||
|
|
648cca199b | ||
|
|
2e77b775b0 | ||
|
|
ebe5b06c95 | ||
|
|
b2a49d8140 | ||
|
|
985a999234 | ||
|
|
1df51b1fbf | ||
|
|
1435b0f022 | ||
|
|
e39298dcbc | ||
|
|
1de8ff3308 | ||
|
|
72e02206d6 | ||
|
|
80f0116641 | ||
|
|
79b535587b | ||
|
|
6e0cbe0f35 | ||
|
|
91338ae902 | ||
|
|
0c72577b5d | ||
|
|
fe04f2b09d | ||
|
|
073588b48e | ||
|
|
9a2dafb43d | ||
|
|
595b1aa4a1 | ||
|
|
30cef67b45 | ||
|
|
d0c5925672 | ||
|
|
b1487b6b4f | ||
|
|
85ae02d62e | ||
|
|
9a817a2922 | ||
|
|
ecd4b4d943 | ||
|
|
b9a8cd390f | ||
|
|
2348714081 | ||
|
|
3817b207cf | ||
|
|
b1cf9ea663 | ||
|
|
8ad10b9307 | ||
|
|
9c5524a9a2 | ||
|
|
1530223311 | ||
|
|
b9671522c4 | ||
|
|
9918202422 | ||
|
|
42e7147860 | ||
|
|
25feab93f8 | ||
|
|
dc8db1afb0 | ||
|
|
18c364d5df | ||
|
|
7a7c601d5e | ||
|
|
3bfbbbc78c | ||
|
|
1a3ee45b23 | ||
|
|
65848869d5 | ||
|
|
456d6a2fb2 | ||
|
|
940df67823 | ||
|
|
e58713e2ac | ||
|
|
aa5c53b38b | ||
|
|
4e6ecb2348 | ||
|
|
6febd96dfe | ||
|
|
17e84d5f40 | ||
|
|
b6545ce5d6 | ||
|
|
90e9aae3f4 | ||
|
|
bd01004a42 | ||
|
|
d0298dc26d | ||
|
|
bbb9fe1692 | ||
|
|
5b21922420 | ||
|
|
abcf07c8c5 | ||
|
|
e8b5341c97 | ||
|
|
880c31d164 | ||
|
|
d365f1a648 | ||
|
|
4cc7bc9d32 | ||
|
|
0bb2fc6eec | ||
|
|
855d62cdde | ||
|
|
88abc6aed8 | ||
|
|
6fa4e32ad3 | ||
|
|
000dabcd88 | ||
|
|
f8ff42a13d | ||
|
|
b5834d57af | ||
|
|
3d3ff10bb9 | ||
|
|
ac04380f36 | ||
|
|
16a63c88cf | ||
|
|
10f07d88a2 | ||
|
|
1e04bd0b73 | ||
|
|
2041b0e5fb | ||
|
|
bf3d903939 | ||
|
|
64855c5f06 | ||
|
|
b5ab4ce293 | ||
|
|
c396b9f08b | ||
|
|
9ed3893e6d | ||
|
|
30c9604c1d | ||
|
|
7e4a1c2b33 | ||
|
|
e379160941 | ||
|
|
38b503ebcc | ||
|
|
dac476f2c0 | ||
|
|
754e5d6a7d | ||
|
|
d9c15e7a12 | ||
|
|
757c75752e | ||
|
|
9d61727289 | ||
|
|
a62a432a48 | ||
|
|
8198723201 | ||
|
|
7df10ea3e9 | ||
|
|
0e44235981 | ||
|
|
7b50061b43 | ||
|
|
3a72400202 | ||
|
|
1b3bff0330 | ||
|
|
0f6f73ecf3 | ||
|
|
7910beecc4 | ||
|
|
f3ccd152e9 | ||
|
|
1e07bfa373 | ||
|
|
5e7ba05612 | ||
|
|
d12570ea00 | ||
|
|
2f3264e148 | ||
|
|
e2e0889a30 | ||
|
|
497fd4c505 | ||
|
|
6cdf3e7af8 | ||
|
|
4d385b60c8 | ||
|
|
262053f85c | ||
|
|
3ce8b9fcae | ||
|
|
e6e09ea93a | ||
|
|
d870720841 | ||
|
|
8210c1ed5b | ||
|
|
a184f84f69 | ||
|
|
c487b99e93 | ||
|
|
24524771f2 | ||
|
|
b950a6c389 | ||
|
|
47eb6ee42b | ||
|
|
b4f7d5b2fb | ||
|
|
c13c60bc47 | ||
|
|
ee90017d3f | ||
|
|
adfd78e05a | ||
|
|
7c8112614a | ||
|
|
8f40928534 | ||
|
|
88a4cc41f7 | ||
|
|
dcb9523b1e | ||
|
|
25080acb7a | ||
|
|
228b1c4235 | ||
|
|
955138b74a | ||
|
|
5677614079 | ||
|
|
37f260b5af | ||
|
|
3f25561511 | ||
|
|
eaf33d85ed | ||
|
|
aaa6cabf3a | ||
|
|
9a4d9072c1 | ||
|
|
4cb6a09fc0 | ||
|
|
5109b50bb3 | ||
|
|
db6ee74cbe | ||
|
|
d1aeadc009 | ||
|
|
d80a9d9ce9 | ||
|
|
85ede4a88c | ||
|
|
d2fefc8bf3 | ||
|
|
5fd3f43de1 | ||
|
|
ab372f5f48 | ||
|
|
6a8a7b65e9 | ||
|
|
211cafc571 | ||
|
|
0b1b94567a | ||
|
|
168112d343 | ||
|
|
a5355084b5 | ||
|
|
deedb29e75 | ||
|
|
f765d19402 | ||
|
|
d1079680bb | ||
|
|
da78de0439 |
@@ -20,7 +20,7 @@
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
"rust-lang.rust-analyzer",
|
||||
"serayuzgur.crates",
|
||||
"fill-labs.dependi",
|
||||
"tamasfe.even-better-toml",
|
||||
"Swellaby.vscode-rust-test-adapter",
|
||||
"charliermarsh.ruff"
|
||||
|
||||
3
.github/CODEOWNERS
vendored
3
.github/CODEOWNERS
vendored
@@ -17,4 +17,5 @@
|
||||
/scripts/fuzz-parser/ @AlexWaygood
|
||||
|
||||
# red-knot
|
||||
/crates/red_knot/ @carljm @MichaReiser
|
||||
/crates/red_knot* @carljm @MichaReiser @AlexWaygood
|
||||
/crates/ruff_db/ @carljm @MichaReiser @AlexWaygood
|
||||
|
||||
27
.github/renovate.json5
vendored
27
.github/renovate.json5
vendored
@@ -8,15 +8,32 @@
|
||||
semanticCommits: "disabled",
|
||||
separateMajorMinor: false,
|
||||
prHourlyLimit: 10,
|
||||
enabledManagers: ["github-actions", "pre-commit", "cargo", "pep621", "npm"],
|
||||
enabledManagers: ["github-actions", "pre-commit", "cargo", "pep621", "pip_requirements", "npm"],
|
||||
cargo: {
|
||||
// See https://docs.renovatebot.com/configuration-options/#rangestrategy
|
||||
rangeStrategy: "update-lockfile",
|
||||
},
|
||||
pep621: {
|
||||
// The default for this package manager is to only search for `pyproject.toml` files
|
||||
// found at the repository root: https://docs.renovatebot.com/modules/manager/pep621/#file-matching
|
||||
fileMatch: ["^(python|scripts)/.*pyproject\\.toml$"],
|
||||
},
|
||||
pip_requirements: {
|
||||
// The default for this package manager is to run on all requirements.txt files:
|
||||
// https://docs.renovatebot.com/modules/manager/pip_requirements/#file-matching
|
||||
// `fileMatch` doesn't work for excluding files; to exclude `requirements.txt` files
|
||||
// outside the `doc/` directory, we instead have to use `ignorePaths`. Unlike `fileMatch`,
|
||||
// which takes a regex string, `ignorePaths` takes a glob string, so we have to use
|
||||
// a "negative glob pattern".
|
||||
// See:
|
||||
// - https://docs.renovatebot.com/modules/manager/#ignoring-files-that-match-the-default-filematch
|
||||
// - https://docs.renovatebot.com/configuration-options/#ignorepaths
|
||||
// - https://docs.renovatebot.com/string-pattern-matching/#negative-matching
|
||||
ignorePaths: ["!docs/requirements*.txt"]
|
||||
},
|
||||
npm: {
|
||||
// The default for this package manager is to only search for `package.json` files
|
||||
// found at the repository root: https://docs.renovatebot.com/modules/manager/npm/#file-matching
|
||||
fileMatch: ["^playground/.*package\\.json$"],
|
||||
},
|
||||
"pre-commit": {
|
||||
@@ -48,6 +65,14 @@
|
||||
matchManagers: ["cargo"],
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
// `mkdocs-material` requires a manual update to keep the version in sync
|
||||
// with `mkdocs-material-insider`.
|
||||
// See: https://squidfunk.github.io/mkdocs-material/insiders/upgrade/
|
||||
matchManagers: ["pip_requirements"],
|
||||
matchPackagePatterns: ["mkdocs-material"],
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
groupName: "pre-commit dependencies",
|
||||
matchManagers: ["pre-commit"],
|
||||
|
||||
2
.github/workflows/build-docker.yml
vendored
2
.github/workflows/build-docker.yml
vendored
@@ -56,7 +56,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: "Build and push Docker image"
|
||||
uses: docker/build-push-action@v5
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
19
.github/workflows/ci.yaml
vendored
19
.github/workflows/ci.yaml
vendored
@@ -37,7 +37,7 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: tj-actions/changed-files@v44
|
||||
- uses: tj-actions/changed-files@v45
|
||||
id: changed
|
||||
with:
|
||||
files_yaml: |
|
||||
@@ -111,7 +111,7 @@ jobs:
|
||||
- name: "Clippy"
|
||||
run: cargo clippy --workspace --all-targets --all-features --locked -- -D warnings
|
||||
- name: "Clippy (wasm)"
|
||||
run: cargo clippy -p ruff_wasm --target wasm32-unknown-unknown --all-features --locked -- -D warnings
|
||||
run: cargo clippy -p ruff_wasm -p red_knot_wasm --target wasm32-unknown-unknown --all-features --locked -- -D warnings
|
||||
|
||||
cargo-test-linux:
|
||||
name: "cargo test (linux)"
|
||||
@@ -142,6 +142,13 @@ jobs:
|
||||
|
||||
# Check for broken links in the documentation.
|
||||
- run: cargo doc --all --no-deps
|
||||
env:
|
||||
RUSTDOCFLAGS: "-D warnings"
|
||||
# Use --document-private-items so that all our doc comments are kept in
|
||||
# sync, not just public items. Eventually we should do this for all
|
||||
# crates; for now add crates here as they are warning-clean to prevent
|
||||
# regression.
|
||||
- run: cargo doc --no-deps -p red_knot_python_semantic -p red_knot -p ruff_db --document-private-items
|
||||
env:
|
||||
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
|
||||
RUSTDOCFLAGS: "-D warnings"
|
||||
@@ -191,10 +198,14 @@ jobs:
|
||||
cache-dependency-path: playground/package-lock.json
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run wasm-pack"
|
||||
- name: "Test ruff_wasm"
|
||||
run: |
|
||||
cd crates/ruff_wasm
|
||||
wasm-pack test --node
|
||||
- name: "Test red_knot_wasm"
|
||||
run: |
|
||||
cd crates/red_knot_wasm
|
||||
wasm-pack test --node
|
||||
|
||||
cargo-build-release:
|
||||
name: "cargo build (release)"
|
||||
@@ -619,7 +630,7 @@ jobs:
|
||||
run: cargo codspeed build --features codspeed -p ruff_benchmark
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@v2
|
||||
uses: CodSpeedHQ/action@v3
|
||||
with:
|
||||
run: cargo codspeed run
|
||||
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||
|
||||
2
.github/workflows/pr-comment.yaml
vendored
2
.github/workflows/pr-comment.yaml
vendored
@@ -23,6 +23,7 @@ jobs:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Parse pull request number
|
||||
id: pr-number
|
||||
@@ -43,6 +44,7 @@ jobs:
|
||||
path: pr/ecosystem
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
|
||||
62
.github/workflows/publish-docs.yaml
vendored
62
.github/workflows/publish-docs.yaml
vendored
@@ -1,62 +0,0 @@
|
||||
# Publish the Ruff documentation.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a post-announce
|
||||
# job within `cargo-dist`.
|
||||
name: mkdocs
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
ref:
|
||||
description: "The commit SHA, tag, or branch to publish. Uses the default branch if not specified."
|
||||
default: ""
|
||||
type: string
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
mkdocs:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
- uses: actions/setup-python@v5
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@v0.9.0
|
||||
with:
|
||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: pip install -r docs/requirements-insiders.txt
|
||||
- name: "Install dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: pip install -r docs/requirements.txt
|
||||
- name: "Copy README File"
|
||||
run: |
|
||||
python scripts/transform_readme.py --target mkdocs
|
||||
python scripts/generate_mkdocs.py
|
||||
- name: "Build Insiders docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.insiders.yml
|
||||
- name: "Build docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.public.yml
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@v3.6.1
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
# `github.head_ref` is only set during pull requests and for manual runs or tags we use `main` to deploy to production
|
||||
command: pages deploy site --project-name=astral-docs --branch ${{ github.head_ref || 'main' }} --commit-hash ${GITHUB_SHA}
|
||||
152
.github/workflows/publish-docs.yml
vendored
Normal file
152
.github/workflows/publish-docs.yml
vendored
Normal file
@@ -0,0 +1,152 @@
|
||||
# Publish the Ruff documentation.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a post-announce
|
||||
# job within `cargo-dist`.
|
||||
name: mkdocs
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
ref:
|
||||
description: "The commit SHA, tag, or branch to publish. Uses the default branch if not specified."
|
||||
default: ""
|
||||
type: string
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
mkdocs:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.12
|
||||
|
||||
- name: "Set docs version"
|
||||
run: |
|
||||
version="${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || inputs.ref }}"
|
||||
# if version is missing, use 'latest'
|
||||
if [ -z "$version" ]; then
|
||||
echo "Using 'latest' as version"
|
||||
version="latest"
|
||||
fi
|
||||
|
||||
# Use version as display name for now
|
||||
display_name="$version"
|
||||
|
||||
echo "version=$version" >> $GITHUB_ENV
|
||||
echo "display_name=$display_name" >> $GITHUB_ENV
|
||||
|
||||
- name: "Set branch name"
|
||||
run: |
|
||||
version="${{ env.version }}"
|
||||
display_name="${{ env.display_name }}"
|
||||
timestamp="$(date +%s)"
|
||||
|
||||
# create branch_display_name from display_name by replacing all
|
||||
# characters disallowed in git branch names with hyphens
|
||||
branch_display_name="$(echo "$display_name" | tr -c '[:alnum:]._' '-' | tr -s '-')"
|
||||
|
||||
echo "branch_name=update-docs-$branch_display_name-$timestamp" >> $GITHUB_ENV
|
||||
echo "timestamp=$timestamp" >> $GITHUB_ENV
|
||||
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@v0.9.0
|
||||
with:
|
||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: pip install -r docs/requirements-insiders.txt
|
||||
|
||||
- name: "Install dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: pip install -r docs/requirements.txt
|
||||
|
||||
- name: "Copy README File"
|
||||
run: |
|
||||
python scripts/transform_readme.py --target mkdocs
|
||||
python scripts/generate_mkdocs.py
|
||||
|
||||
- name: "Build Insiders docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.insiders.yml
|
||||
|
||||
- name: "Build docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.public.yml
|
||||
|
||||
- name: "Clone docs repo"
|
||||
run: |
|
||||
version="${{ env.version }}"
|
||||
git clone https://${{ secrets.ASTRAL_DOCS_PAT }}@github.com/astral-sh/docs.git astral-docs
|
||||
|
||||
- name: "Copy docs"
|
||||
run: rm -rf astral-docs/site/ruff && mkdir -p astral-docs/site && cp -r site/ruff astral-docs/site/
|
||||
|
||||
- name: "Commit docs"
|
||||
working-directory: astral-docs
|
||||
run: |
|
||||
branch_name="${{ env.branch_name }}"
|
||||
|
||||
git config user.name "astral-docs-bot"
|
||||
git config user.email "176161322+astral-docs-bot@users.noreply.github.com"
|
||||
|
||||
git checkout -b $branch_name
|
||||
git add site/ruff
|
||||
git commit -m "Update ruff documentation for $version"
|
||||
|
||||
- name: "Create Pull Request"
|
||||
working-directory: astral-docs
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
|
||||
run: |
|
||||
version="${{ env.version }}"
|
||||
display_name="${{ env.display_name }}"
|
||||
branch_name="${{ env.branch_name }}"
|
||||
|
||||
# set the PR title
|
||||
pull_request_title="Update ruff documentation for $display_name"
|
||||
|
||||
# Delete any existing pull requests that are open for this version
|
||||
# by checking against pull_request_title because the new PR will
|
||||
# supersede the old one.
|
||||
gh pr list --state open --json title --jq '.[] | select(.title == "$pull_request_title") | .number' | \
|
||||
xargs -I {} gh pr close {}
|
||||
|
||||
# push the branch to GitHub
|
||||
git push origin $branch_name
|
||||
|
||||
# create the PR
|
||||
gh pr create --base main --head $branch_name \
|
||||
--title "$pull_request_title" \
|
||||
--body "Automated documentation update for $display_name" \
|
||||
--label "documentation"
|
||||
|
||||
- name: "Merge Pull Request"
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
working-directory: astral-docs
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
|
||||
run: |
|
||||
branch_name="${{ env.branch_name }}"
|
||||
|
||||
# auto-merge the PR if the build was triggered by a release. Manual builds should be reviewed by a human.
|
||||
# give the PR a few seconds to be created before trying to auto-merge it
|
||||
sleep 10
|
||||
gh pr merge --squash $branch_name
|
||||
@@ -47,7 +47,7 @@ jobs:
|
||||
working-directory: playground
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@v3.6.1
|
||||
uses: cloudflare/wrangler-action@v3.7.0
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
55
.github/workflows/publish-wasm.yml
vendored
Normal file
55
.github/workflows/publish-wasm.yml
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
# Build and publish ruff-api for wasm.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a publish
|
||||
# job within `cargo-dist`.
|
||||
name: "Build and publish wasm"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
ruff_wasm:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
strategy:
|
||||
matrix:
|
||||
target: [web, bundler, nodejs]
|
||||
fail-fast: false
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
- uses: jetli/wasm-bindgen-action@v0.2.0
|
||||
- name: "Run wasm-pack build"
|
||||
run: wasm-pack build --target ${{ matrix.target }} crates/ruff_wasm
|
||||
- name: "Rename generated package"
|
||||
run: | # Replace the package name w/ jq
|
||||
jq '.name="@astral-sh/ruff-wasm-${{ matrix.target }}"' crates/ruff_wasm/pkg/package.json > /tmp/package.json
|
||||
mv /tmp/package.json crates/ruff_wasm/pkg
|
||||
- run: cp LICENSE crates/ruff_wasm/pkg # wasm-pack does not put the LICENSE file in the pkg
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- name: "Publish (dry-run)"
|
||||
if: ${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
run: npm publish --dry-run crates/ruff_wasm/pkg
|
||||
- name: "Publish"
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
run: npm publish --provenance --access public crates/ruff_wasm/pkg
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
73
.github/workflows/release.yml
vendored
73
.github/workflows/release.yml
vendored
@@ -12,9 +12,8 @@
|
||||
# title/body based on your changelogs.
|
||||
|
||||
name: Release
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
"contents": "write"
|
||||
|
||||
# This task will run whenever you workflow_dispatch with a tag that looks like a version
|
||||
# like "1.0.0", "v0.1.0-prerelease.1", "my-app/0.1.0", "releases/v1.0.0", etc.
|
||||
@@ -49,7 +48,7 @@ on:
|
||||
jobs:
|
||||
# Run 'cargo dist plan' (or host) to determine what tasks we need to do
|
||||
plan:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: "ubuntu-20.04"
|
||||
outputs:
|
||||
val: ${{ steps.plan.outputs.manifest }}
|
||||
tag: ${{ (inputs.tag != 'dry-run' && inputs.tag) || '' }}
|
||||
@@ -65,7 +64,12 @@ jobs:
|
||||
# we specify bash to get pipefail; it guards against the `curl` command
|
||||
# failing. otherwise `sh` won't catch that `curl` returned non-0
|
||||
shell: bash
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.14.0/cargo-dist-installer.sh | sh"
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.18.0/cargo-dist-installer.sh | sh"
|
||||
- name: Cache cargo-dist
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/cargo-dist
|
||||
# sure would be cool if github gave us proper conditionals...
|
||||
# so here's a doubly-nested ternary-via-truthiness to try to provide the best possible
|
||||
# functionality based on whether this is a pull_request, and whether it's from a fork.
|
||||
@@ -101,8 +105,8 @@ jobs:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
"contents": "read"
|
||||
"packages": "write"
|
||||
|
||||
# Build and package all the platform-agnostic(ish) things
|
||||
build-global-artifacts:
|
||||
@@ -118,9 +122,12 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Install cargo-dist
|
||||
shell: bash
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.14.0/cargo-dist-installer.sh | sh"
|
||||
- name: Install cached cargo-dist
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/
|
||||
- run: chmod +x ~/.cargo/bin/cargo-dist
|
||||
# Get all the local artifacts for the global tasks to use (for e.g. checksums)
|
||||
- name: Fetch local artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
@@ -165,8 +172,12 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Install cargo-dist
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.14.0/cargo-dist-installer.sh | sh"
|
||||
- name: Install cached cargo-dist
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/
|
||||
- run: chmod +x ~/.cargo/bin/cargo-dist
|
||||
# Fetch artifacts from scratch-storage
|
||||
- name: Fetch artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
@@ -200,8 +211,23 @@ jobs:
|
||||
secrets: inherit
|
||||
# publish jobs get escalated permissions
|
||||
permissions:
|
||||
id-token: write
|
||||
packages: write
|
||||
"id-token": "write"
|
||||
"packages": "write"
|
||||
|
||||
custom-publish-wasm:
|
||||
needs:
|
||||
- plan
|
||||
- host
|
||||
if: ${{ !fromJson(needs.plan.outputs.val).announcement_is_prerelease || fromJson(needs.plan.outputs.val).publish_prereleases }}
|
||||
uses: ./.github/workflows/publish-wasm.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
# publish jobs get escalated permissions
|
||||
permissions:
|
||||
"contents": "read"
|
||||
"id-token": "write"
|
||||
"packages": "write"
|
||||
|
||||
# Create a GitHub Release while uploading all files to it
|
||||
announce:
|
||||
@@ -209,10 +235,11 @@ jobs:
|
||||
- plan
|
||||
- host
|
||||
- custom-publish-pypi
|
||||
- custom-publish-wasm
|
||||
# use "always() && ..." to allow us to wait for all publish jobs while
|
||||
# still allowing individual publish jobs to skip themselves (for prereleases).
|
||||
# "host" however must run to completion, no skipping allowed!
|
||||
if: ${{ always() && needs.host.result == 'success' && (needs.custom-publish-pypi.result == 'skipped' || needs.custom-publish-pypi.result == 'success') }}
|
||||
if: ${{ always() && needs.host.result == 'success' && (needs.custom-publish-pypi.result == 'skipped' || needs.custom-publish-pypi.result == 'success') && (needs.custom-publish-wasm.result == 'skipped' || needs.custom-publish-wasm.result == 'success') }}
|
||||
runs-on: "ubuntu-20.04"
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
@@ -220,6 +247,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
# Create a GitHub Release while uploading all files to it
|
||||
- name: "Download GitHub Artifacts"
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
@@ -231,13 +259,16 @@ jobs:
|
||||
# Remove the granular manifests
|
||||
rm -f artifacts/*-dist-manifest.json
|
||||
- name: Create GitHub Release
|
||||
uses: ncipollo/release-action@v1
|
||||
with:
|
||||
tag: ${{ needs.plan.outputs.tag }}
|
||||
name: ${{ fromJson(needs.host.outputs.val).announcement_title }}
|
||||
body: ${{ fromJson(needs.host.outputs.val).announcement_github_body }}
|
||||
prerelease: ${{ fromJson(needs.host.outputs.val).announcement_is_prerelease }}
|
||||
artifacts: "artifacts/*"
|
||||
env:
|
||||
PRERELEASE_FLAG: "${{ fromJson(needs.host.outputs.val).announcement_is_prerelease && '--prerelease' || '' }}"
|
||||
ANNOUNCEMENT_TITLE: "${{ fromJson(needs.host.outputs.val).announcement_title }}"
|
||||
ANNOUNCEMENT_BODY: "${{ fromJson(needs.host.outputs.val).announcement_github_body }}"
|
||||
RELEASE_COMMIT: "${{ github.sha }}"
|
||||
run: |
|
||||
# Write and read notes from a file to avoid quoting breaking things
|
||||
echo "$ANNOUNCEMENT_BODY" > $RUNNER_TEMP/notes.txt
|
||||
|
||||
gh release create "${{ needs.plan.outputs.tag }}" --target "$RELEASE_COMMIT" $PRERELEASE_FLAG --title "$ANNOUNCEMENT_TITLE" --notes-file "$RUNNER_TEMP/notes.txt" artifacts/*
|
||||
|
||||
custom-notify-dependents:
|
||||
needs:
|
||||
|
||||
14
.github/workflows/sync_typeshed.yaml
vendored
14
.github/workflows/sync_typeshed.yaml
vendored
@@ -37,13 +37,13 @@ jobs:
|
||||
- name: Sync typeshed
|
||||
id: sync
|
||||
run: |
|
||||
rm -rf ruff/crates/red_knot_module_resolver/vendor/typeshed
|
||||
mkdir ruff/crates/red_knot_module_resolver/vendor/typeshed
|
||||
cp typeshed/README.md ruff/crates/red_knot_module_resolver/vendor/typeshed
|
||||
cp typeshed/LICENSE ruff/crates/red_knot_module_resolver/vendor/typeshed
|
||||
cp -r typeshed/stdlib ruff/crates/red_knot_module_resolver/vendor/typeshed/stdlib
|
||||
rm -rf ruff/crates/red_knot_module_resolver/vendor/typeshed/stdlib/@tests
|
||||
git -C typeshed rev-parse HEAD > ruff/crates/red_knot_module_resolver/vendor/typeshed/source_commit.txt
|
||||
rm -rf ruff/crates/red_knot_python_semantic/vendor/typeshed
|
||||
mkdir ruff/crates/red_knot_python_semantic/vendor/typeshed
|
||||
cp typeshed/README.md ruff/crates/red_knot_python_semantic/vendor/typeshed
|
||||
cp typeshed/LICENSE ruff/crates/red_knot_python_semantic/vendor/typeshed
|
||||
cp -r typeshed/stdlib ruff/crates/red_knot_python_semantic/vendor/typeshed/stdlib
|
||||
rm -rf ruff/crates/red_knot_python_semantic/vendor/typeshed/stdlib/@tests
|
||||
git -C typeshed rev-parse HEAD > ruff/crates/red_knot_python_semantic/vendor/typeshed/source_commit.txt
|
||||
- name: Commit the changes
|
||||
id: commit
|
||||
if: ${{ steps.sync.outcome == 'success' }}
|
||||
|
||||
8
.gitignore
vendored
8
.gitignore
vendored
@@ -21,6 +21,14 @@ flamegraph.svg
|
||||
# `CARGO_TARGET_DIR=target-llvm-lines RUSTFLAGS="-Csymbol-mangling-version=v0" cargo llvm-lines -p ruff --lib`
|
||||
/target*
|
||||
|
||||
# samply profiles
|
||||
profile.json
|
||||
|
||||
# tracing-flame traces
|
||||
tracing.folded
|
||||
tracing-flamechart.svg
|
||||
tracing-flamegraph.svg
|
||||
|
||||
###
|
||||
# Rust.gitignore
|
||||
###
|
||||
|
||||
@@ -14,6 +14,9 @@ MD041: false
|
||||
# MD013/line-length
|
||||
MD013: false
|
||||
|
||||
# MD014/commands-show-output
|
||||
MD014: false
|
||||
|
||||
# MD024/no-duplicate-heading
|
||||
MD024:
|
||||
# Allow when nested under different parents e.g. CHANGELOG.md
|
||||
|
||||
@@ -2,9 +2,12 @@ fail_fast: true
|
||||
|
||||
exclude: |
|
||||
(?x)^(
|
||||
crates/red_knot_module_resolver/vendor/.*|
|
||||
crates/red_knot_python_semantic/vendor/.*|
|
||||
crates/red_knot_workspace/resources/.*|
|
||||
crates/ruff_linter/resources/.*|
|
||||
crates/ruff_linter/src/rules/.*/snapshots/.*|
|
||||
crates/ruff_notebook/resources/.*|
|
||||
crates/ruff_server/resources/.*|
|
||||
crates/ruff/resources/.*|
|
||||
crates/ruff_python_formatter/resources/.*|
|
||||
crates/ruff_python_formatter/tests/snapshots/.*|
|
||||
@@ -14,7 +17,7 @@ exclude: |
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/abravalheri/validate-pyproject
|
||||
rev: v0.18
|
||||
rev: v0.19
|
||||
hooks:
|
||||
- id: validate-pyproject
|
||||
|
||||
@@ -42,7 +45,7 @@ repos:
|
||||
)$
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.22.9
|
||||
rev: v1.24.3
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
@@ -56,18 +59,13 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.4.10
|
||||
rev: v0.6.3
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
args: [--fix, --exit-non-zero-on-fix]
|
||||
types_or: [python, pyi]
|
||||
require_serial: true
|
||||
exclude: |
|
||||
(?x)^(
|
||||
crates/ruff_linter/resources/.*|
|
||||
crates/ruff_python_formatter/resources/.*
|
||||
)$
|
||||
|
||||
# Prettier
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
|
||||
@@ -1,5 +1,43 @@
|
||||
# Breaking Changes
|
||||
|
||||
## 0.6.0
|
||||
|
||||
- Detect imports in `src` layouts by default for `isort` rules ([#12848](https://github.com/astral-sh/ruff/pull/12848))
|
||||
|
||||
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments ([#12838](https://github.com/astral-sh/ruff/pull/12838)).
|
||||
|
||||
- Lint and format Jupyter Notebook by default ([#12878](https://github.com/astral-sh/ruff/pull/12878)).
|
||||
|
||||
You can disable specific rules for notebooks using [`per-file-ignores`](https://docs.astral.sh/ruff/settings/#lint_per-file-ignores):
|
||||
|
||||
```toml
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"*.ipynb" = ["E501"] # disable line-too-long in notebooks
|
||||
```
|
||||
|
||||
If you'd prefer to either only lint or only format Jupyter Notebook files, you can use the
|
||||
section-specific `exclude` option to do so. For example, the following would only lint Jupyter
|
||||
Notebook files and not format them:
|
||||
|
||||
```toml
|
||||
[tool.ruff.format]
|
||||
exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
And, conversely, the following would only format Jupyter Notebook files and not lint them:
|
||||
|
||||
```toml
|
||||
[tool.ruff.lint]
|
||||
exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
You can completely disable Jupyter Notebook support by updating the [`extend-exclude`](https://docs.astral.sh/ruff/settings/#extend-exclude) setting:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
extend-exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
## 0.5.0
|
||||
|
||||
- Follow the XDG specification to discover user-level configurations on macOS (same as on other Unix platforms)
|
||||
|
||||
437
CHANGELOG.md
437
CHANGELOG.md
@@ -1,5 +1,440 @@
|
||||
# Changelog
|
||||
|
||||
## 0.6.3
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-simplify`\] Extend `open-file-with-context-handler` to work with `dbm.sqlite3` (`SIM115`) ([#13104](https://github.com/astral-sh/ruff/pull/13104))
|
||||
- \[`pycodestyle`\] Disable `E741` in stub files (`.pyi`) ([#13119](https://github.com/astral-sh/ruff/pull/13119))
|
||||
- \[`pydoclint`\] Avoid `DOC201` on explicit returns in functions that only return `None` ([#13064](https://github.com/astral-sh/ruff/pull/13064))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-async`\] Disable check for `asyncio` before Python 3.11 (`ASYNC109`) ([#13023](https://github.com/astral-sh/ruff/pull/13023))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`FastAPI`\] Avoid introducing invalid syntax in fix for `fast-api-non-annotated-dependency` (`FAST002`) ([#13133](https://github.com/astral-sh/ruff/pull/13133))
|
||||
- \[`flake8-implicit-str-concat`\] Normalize octals before merging concatenated strings in `single-line-implicit-string-concatenation` (`ISC001`) ([#13118](https://github.com/astral-sh/ruff/pull/13118))
|
||||
- \[`flake8-pytest-style`\] Improve help message for `pytest-incorrect-mark-parentheses-style` (`PT023`) ([#13092](https://github.com/astral-sh/ruff/pull/13092))
|
||||
- \[`pylint`\] Avoid autofix for calls that aren't `min` or `max` as starred expression (`PLW3301`) ([#13089](https://github.com/astral-sh/ruff/pull/13089))
|
||||
- \[`ruff`\] Add `datetime.time`, `datetime.tzinfo`, and `datetime.timezone` as immutable function calls (`RUF009`) ([#13109](https://github.com/astral-sh/ruff/pull/13109))
|
||||
- \[`ruff`\] Extend comment deletion for `RUF100` to include trailing text from `noqa` directives while preserving any following comments on the same line, if any ([#13105](https://github.com/astral-sh/ruff/pull/13105))
|
||||
- Fix dark theme on initial page load for the Ruff playground ([#13077](https://github.com/astral-sh/ruff/pull/13077))
|
||||
|
||||
## 0.6.2
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-simplify`\] Extend `open-file-with-context-handler` to work with other standard-library IO modules (`SIM115`) ([#12959](https://github.com/astral-sh/ruff/pull/12959))
|
||||
- \[`ruff`\] Avoid `unused-async` for functions with FastAPI route decorator (`RUF029`) ([#12938](https://github.com/astral-sh/ruff/pull/12938))
|
||||
- \[`ruff`\] Ignore `fstring-missing-syntax` (`RUF027`) for `fastAPI` paths ([#12939](https://github.com/astral-sh/ruff/pull/12939))
|
||||
- \[`ruff`\] Implement check for Decimal called with a float literal (RUF032) ([#12909](https://github.com/astral-sh/ruff/pull/12909))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-bugbear`\] Update diagnostic message when expression is at the end of function (`B015`) ([#12944](https://github.com/astral-sh/ruff/pull/12944))
|
||||
- \[`flake8-pyi`\] Skip type annotations in `string-or-bytes-too-long` (`PYI053`) ([#13002](https://github.com/astral-sh/ruff/pull/13002))
|
||||
- \[`flake8-type-checking`\] Always recognise relative imports as first-party ([#12994](https://github.com/astral-sh/ruff/pull/12994))
|
||||
- \[`flake8-unused-arguments`\] Ignore unused arguments on stub functions (`ARG001`) ([#12966](https://github.com/astral-sh/ruff/pull/12966))
|
||||
- \[`pylint`\] Ignore augmented assignment for `self-cls-assignment` (`PLW0642`) ([#12957](https://github.com/astral-sh/ruff/pull/12957))
|
||||
|
||||
### Server
|
||||
|
||||
- Show full context in error log messages ([#13029](https://github.com/astral-sh/ruff/pull/13029))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`pep8-naming`\] Don't flag `from` imports following conventional import names (`N817`) ([#12946](https://github.com/astral-sh/ruff/pull/12946))
|
||||
- \[`pylint`\] - Allow `__new__` methods to have `cls` as their first argument even if decorated with `@staticmethod` for `bad-staticmethod-argument` (`PLW0211`) ([#12958](https://github.com/astral-sh/ruff/pull/12958))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add `hyperfine` installation instructions; update `hyperfine` code samples ([#13034](https://github.com/astral-sh/ruff/pull/13034))
|
||||
- Expand note to use Ruff with other language server in Kate ([#12806](https://github.com/astral-sh/ruff/pull/12806))
|
||||
- Update example for `PT001` as per the new default behavior ([#13019](https://github.com/astral-sh/ruff/pull/13019))
|
||||
- \[`perflint`\] Improve docs for `try-except-in-loop` (`PERF203`) ([#12947](https://github.com/astral-sh/ruff/pull/12947))
|
||||
- \[`pydocstyle`\] Add reference to `lint.pydocstyle.ignore-decorators` setting to rule docs ([#12996](https://github.com/astral-sh/ruff/pull/12996))
|
||||
|
||||
## 0.6.1
|
||||
|
||||
This is a hotfix release to address an issue with `ruff-pre-commit`. In v0.6,
|
||||
Ruff changed its behavior to lint and format Jupyter notebooks by default;
|
||||
however, due to an oversight, these files were still excluded by default if
|
||||
Ruff was run via pre-commit, leading to inconsistent behavior.
|
||||
This has [now been fixed](https://github.com/astral-sh/ruff-pre-commit/pull/96).
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`fastapi`\] Implement `fast-api-unused-path-parameter` (`FAST003`) ([#12638](https://github.com/astral-sh/ruff/pull/12638))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pylint`\] Rename `too-many-positional` to `too-many-positional-arguments` (`R0917`) ([#12905](https://github.com/astral-sh/ruff/pull/12905))
|
||||
|
||||
### Server
|
||||
|
||||
- Fix crash when applying "fix-all" code-action to notebook cells ([#12929](https://github.com/astral-sh/ruff/pull/12929))
|
||||
|
||||
### Other changes
|
||||
|
||||
- \[`flake8-naming`\]: Respect import conventions (`N817`) ([#12922](https://github.com/astral-sh/ruff/pull/12922))
|
||||
|
||||
## 0.6.0
|
||||
|
||||
Check out the [blog post](https://astral.sh/blog/ruff-v0.6.0) for a migration guide and overview of the changes!
|
||||
|
||||
### Breaking changes
|
||||
|
||||
See also, the "Remapped rules" section which may result in disabled rules.
|
||||
|
||||
- Lint and format Jupyter Notebook by default ([#12878](https://github.com/astral-sh/ruff/pull/12878)).
|
||||
- Detect imports in `src` layouts by default for `isort` rules ([#12848](https://github.com/astral-sh/ruff/pull/12848))
|
||||
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments ([#12838](https://github.com/astral-sh/ruff/pull/12838)).
|
||||
|
||||
### Deprecations
|
||||
|
||||
The following rules are now deprecated:
|
||||
|
||||
- [`pytest-missing-fixture-name-underscore`](https://docs.astral.sh/ruff/rules/pytest-missing-fixture-name-underscore/) (`PT004`)
|
||||
- [`pytest-incorrect-fixture-name-underscore`](https://docs.astral.sh/ruff/rules/pytest-incorrect-fixture-name-underscore/) (`PT005`)
|
||||
- [`unpacked-list-comprehension`](https://docs.astral.sh/ruff/rules/unpacked-list-comprehension/) (`UP027`)
|
||||
|
||||
### Remapped rules
|
||||
|
||||
The following rules have been remapped to new rule codes:
|
||||
|
||||
- [`unnecessary-dict-comprehension-for-iterable`](https://docs.astral.sh/ruff/rules/unnecessary-dict-comprehension-for-iterable/): `RUF025` to `C420`
|
||||
|
||||
### Stabilization
|
||||
|
||||
The following rules have been stabilized and are no longer in preview:
|
||||
|
||||
- [`singledispatch-method`](https://docs.astral.sh/ruff/rules/singledispatch-method/) (`PLE1519`)
|
||||
- [`singledispatchmethod-function`](https://docs.astral.sh/ruff/rules/singledispatchmethod-function/) (`PLE1520`)
|
||||
- [`bad-staticmethod-argument`](https://docs.astral.sh/ruff/rules/bad-staticmethod-argument/) (`PLW0211`)
|
||||
- [`if-stmt-min-max`](https://docs.astral.sh/ruff/rules/if-stmt-min-max/) (`PLR1730`)
|
||||
- [`invalid-bytes-return-type`](https://docs.astral.sh/ruff/rules/invalid-bytes-return-type/) (`PLE0308`)
|
||||
- [`invalid-hash-return-type`](https://docs.astral.sh/ruff/rules/invalid-hash-return-type/) (`PLE0309`)
|
||||
- [`invalid-index-return-type`](https://docs.astral.sh/ruff/rules/invalid-index-return-type/) (`PLE0305`)
|
||||
- [`invalid-length-return-type`](https://docs.astral.sh/ruff/rules/invalid-length-return-type/) (`PLEE303`)
|
||||
- [`self-or-cls-assignment`](https://docs.astral.sh/ruff/rules/self-or-cls-assignment/) (`PLW0642`)
|
||||
- [`byte-string-usage`](https://docs.astral.sh/ruff/rules/byte-string-usage/) (`PYI057`)
|
||||
- [`duplicate-literal-member`](https://docs.astral.sh/ruff/rules/duplicate-literal-member/) (`PYI062`)
|
||||
- [`redirected-noqa`](https://docs.astral.sh/ruff/rules/redirected-noqa/) (`RUF101`)
|
||||
|
||||
The following behaviors have been stabilized:
|
||||
|
||||
- [`cancel-scope-no-checkpoint`](https://docs.astral.sh/ruff/rules/cancel-scope-no-checkpoint/) (`ASYNC100`): Support `asyncio` and `anyio` context mangers.
|
||||
- [`async-function-with-timeout`](https://docs.astral.sh/ruff/rules/async-function-with-timeout/) (`ASYNC109`): Support `asyncio` and `anyio` context mangers.
|
||||
- [`async-busy-wait`](https://docs.astral.sh/ruff/rules/async-busy-wait/) (`ASYNC110`): Support `asyncio` and `anyio` context mangers.
|
||||
- [`async-zero-sleep`](https://docs.astral.sh/ruff/rules/async-zero-sleep/) (`ASYNC115`): Support `anyio` context mangers.
|
||||
- [`long-sleep-not-forever`](https://docs.astral.sh/ruff/rules/long-sleep-not-forever/) (`ASYNC116`): Support `anyio` context mangers.
|
||||
|
||||
The following fixes have been stabilized:
|
||||
|
||||
- [`superfluous-else-return`](https://docs.astral.sh/ruff/rules/superfluous-else-return/) (`RET505`)
|
||||
- [`superfluous-else-raise`](https://docs.astral.sh/ruff/rules/superfluous-else-raise/) (`RET506`)
|
||||
- [`superfluous-else-continue`](https://docs.astral.sh/ruff/rules/superfluous-else-continue/) (`RET507`)
|
||||
- [`superfluous-else-break`](https://docs.astral.sh/ruff/rules/superfluous-else-break/) (`RET508`)
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-simplify`\] Further simplify to binary in preview for (`SIM108`) ([#12796](https://github.com/astral-sh/ruff/pull/12796))
|
||||
- \[`pyupgrade`\] Show violations without auto-fix (`UP031`) ([#11229](https://github.com/astral-sh/ruff/pull/11229))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-import-conventions`\] Add `xml.etree.ElementTree` to default conventions ([#12455](https://github.com/astral-sh/ruff/pull/12455))
|
||||
- \[`flake8-pytest-style`\] Add a space after comma in CSV output (`PT006`) ([#12853](https://github.com/astral-sh/ruff/pull/12853))
|
||||
|
||||
### Server
|
||||
|
||||
- Show a message for incorrect settings ([#12781](https://github.com/astral-sh/ruff/pull/12781))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-async`\] Do not lint yield in context manager (`ASYNC100`) ([#12896](https://github.com/astral-sh/ruff/pull/12896))
|
||||
- \[`flake8-comprehensions`\] Do not lint `async for` comprehensions (`C419`) ([#12895](https://github.com/astral-sh/ruff/pull/12895))
|
||||
- \[`flake8-return`\] Only add return `None` at end of a function (`RET503`) ([#11074](https://github.com/astral-sh/ruff/pull/11074))
|
||||
- \[`flake8-type-checking`\] Avoid treating `dataclasses.KW_ONLY` as typing-only (`TCH003`) ([#12863](https://github.com/astral-sh/ruff/pull/12863))
|
||||
- \[`pep8-naming`\] Treat `type(Protocol)` et al as metaclass base (`N805`) ([#12770](https://github.com/astral-sh/ruff/pull/12770))
|
||||
- \[`pydoclint`\] Don't enforce returns and yields in abstract methods (`DOC201`, `DOC202`) ([#12771](https://github.com/astral-sh/ruff/pull/12771))
|
||||
- \[`ruff`\] Skip tuples with slice expressions in (`RUF031`) ([#12768](https://github.com/astral-sh/ruff/pull/12768))
|
||||
- \[`ruff`\] Ignore unparenthesized tuples in subscripts when the subscript is a type annotation or type alias (`RUF031`) ([#12762](https://github.com/astral-sh/ruff/pull/12762))
|
||||
- \[`ruff`\] Ignore template strings passed to logging and `builtins._()` calls (`RUF027`) ([#12889](https://github.com/astral-sh/ruff/pull/12889))
|
||||
- \[`ruff`\] Do not remove parens for tuples with starred expressions in Python \<=3.10 (`RUF031`) ([#12784](https://github.com/astral-sh/ruff/pull/12784))
|
||||
- Evaluate default parameter values for a function in that function's enclosing scope ([#12852](https://github.com/astral-sh/ruff/pull/12852))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Respect VS Code cell metadata when detecting the language of Jupyter Notebook cells ([#12864](https://github.com/astral-sh/ruff/pull/12864))
|
||||
- Respect `kernelspec` notebook metadata when detecting the preferred language for a Jupyter Notebook ([#12875](https://github.com/astral-sh/ruff/pull/12875))
|
||||
|
||||
## 0.5.7
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-comprehensions`\] Account for list and set comprehensions in `unnecessary-literal-within-tuple-call` (`C409`) ([#12657](https://github.com/astral-sh/ruff/pull/12657))
|
||||
- \[`flake8-pyi`\] Add autofix for `future-annotations-in-stub` (`PYI044`) ([#12676](https://github.com/astral-sh/ruff/pull/12676))
|
||||
- \[`flake8-return`\] Avoid syntax error when auto-fixing `RET505` with mixed indentation (space and tabs) ([#12740](https://github.com/astral-sh/ruff/pull/12740))
|
||||
- \[`pydoclint`\] Add `docstring-missing-yields` (`DOC402`) and `docstring-extraneous-yields` (`DOC403`) ([#12538](https://github.com/astral-sh/ruff/pull/12538))
|
||||
- \[`pydoclint`\] Avoid `DOC201` if docstring begins with "Return", "Returns", "Yield", or "Yields" ([#12675](https://github.com/astral-sh/ruff/pull/12675))
|
||||
- \[`pydoclint`\] Deduplicate collected exceptions after traversing function bodies (`DOC501`) ([#12642](https://github.com/astral-sh/ruff/pull/12642))
|
||||
- \[`pydoclint`\] Ignore `DOC` errors for stub functions ([#12651](https://github.com/astral-sh/ruff/pull/12651))
|
||||
- \[`pydoclint`\] Teach rules to understand reraised exceptions as being explicitly raised (`DOC501`, `DOC502`) ([#12639](https://github.com/astral-sh/ruff/pull/12639))
|
||||
- \[`ruff`\] Implement `incorrectly-parenthesized-tuple-in-subscript` (`RUF031`) ([#12480](https://github.com/astral-sh/ruff/pull/12480))
|
||||
- \[`ruff`\] Mark `RUF023` fix as unsafe if `__slots__` is not a set and the binding is used elsewhere ([#12692](https://github.com/astral-sh/ruff/pull/12692))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`refurb`\] Add autofix for `implicit-cwd` (`FURB177`) ([#12708](https://github.com/astral-sh/ruff/pull/12708))
|
||||
- \[`ruff`\] Add autofix for `zip-instead-of-pairwise` (`RUF007`) ([#12663](https://github.com/astral-sh/ruff/pull/12663))
|
||||
- \[`tryceratops`\] Add `BaseException` to `raise-vanilla-class` rule (`TRY002`) ([#12620](https://github.com/astral-sh/ruff/pull/12620))
|
||||
|
||||
### Server
|
||||
|
||||
- Ignore non-file workspace URL; Ruff will display a warning notification in this case ([#12725](https://github.com/astral-sh/ruff/pull/12725))
|
||||
|
||||
### CLI
|
||||
|
||||
- Fix cache invalidation for nested `pyproject.toml` files ([#12727](https://github.com/astral-sh/ruff/pull/12727))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-async`\] Fix false positives with multiple `async with` items (`ASYNC100`) ([#12643](https://github.com/astral-sh/ruff/pull/12643))
|
||||
- \[`flake8-bandit`\] Avoid false-positives for list concatenations in SQL construction (`S608`) ([#12720](https://github.com/astral-sh/ruff/pull/12720))
|
||||
- \[`flake8-bugbear`\] Treat `return` as equivalent to `break` (`B909`) ([#12646](https://github.com/astral-sh/ruff/pull/12646))
|
||||
- \[`flake8-comprehensions`\] Set comprehensions not a violation for `sum` in `unnecessary-comprehension-in-call` (`C419`) ([#12691](https://github.com/astral-sh/ruff/pull/12691))
|
||||
- \[`flake8-simplify`\] Parenthesize conditions based on precedence when merging if arms (`SIM114`) ([#12737](https://github.com/astral-sh/ruff/pull/12737))
|
||||
- \[`pydoclint`\] Try both 'Raises' section styles when convention is unspecified (`DOC501`) ([#12649](https://github.com/astral-sh/ruff/pull/12649))
|
||||
|
||||
## 0.5.6
|
||||
|
||||
Ruff 0.5.6 automatically enables linting and formatting of notebooks in *preview mode*.
|
||||
You can opt-out of this behavior by adding `*.ipynb` to the `extend-exclude` setting.
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
extend-exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
### Preview features
|
||||
|
||||
- Enable notebooks by default in preview mode ([#12621](https://github.com/astral-sh/ruff/pull/12621))
|
||||
- \[`flake8-builtins`\] Implement import, lambda, and module shadowing ([#12546](https://github.com/astral-sh/ruff/pull/12546))
|
||||
- \[`pydoclint`\] Add `docstring-missing-returns` (`DOC201`) and `docstring-extraneous-returns` (`DOC202`) ([#12485](https://github.com/astral-sh/ruff/pull/12485))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-return`\] Exempt cached properties and other property-like decorators from explicit return rule (`RET501`) ([#12563](https://github.com/astral-sh/ruff/pull/12563))
|
||||
|
||||
### Server
|
||||
|
||||
- Make server panic hook more error resilient ([#12610](https://github.com/astral-sh/ruff/pull/12610))
|
||||
- Use `$/logTrace` for server trace logs in Zed and VS Code ([#12564](https://github.com/astral-sh/ruff/pull/12564))
|
||||
- Keep track of deleted cells for reorder change request ([#12575](https://github.com/astral-sh/ruff/pull/12575))
|
||||
|
||||
### Configuration
|
||||
|
||||
- \[`flake8-implicit-str-concat`\] Always allow explicit multi-line concatenations when implicit concatenations are banned ([#12532](https://github.com/astral-sh/ruff/pull/12532))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-async`\] Avoid flagging `asyncio.timeout`s as unused when the context manager includes `asyncio.TaskGroup` ([#12605](https://github.com/astral-sh/ruff/pull/12605))
|
||||
- \[`flake8-slots`\] Avoid recommending `__slots__` for classes that inherit from more than `namedtuple` ([#12531](https://github.com/astral-sh/ruff/pull/12531))
|
||||
- \[`isort`\] Avoid marking required imports as unused ([#12537](https://github.com/astral-sh/ruff/pull/12537))
|
||||
- \[`isort`\] Preserve trailing inline comments on import-from statements ([#12498](https://github.com/astral-sh/ruff/pull/12498))
|
||||
- \[`pycodestyle`\] Add newlines before comments (`E305`) ([#12606](https://github.com/astral-sh/ruff/pull/12606))
|
||||
- \[`pycodestyle`\] Don't attach comments with mismatched indents ([#12604](https://github.com/astral-sh/ruff/pull/12604))
|
||||
- \[`pyflakes`\] Fix preview-mode bugs in `F401` when attempting to autofix unused first-party submodule imports in an `__init__.py` file ([#12569](https://github.com/astral-sh/ruff/pull/12569))
|
||||
- \[`pylint`\] Respect start index in `unnecessary-list-index-lookup` ([#12603](https://github.com/astral-sh/ruff/pull/12603))
|
||||
- \[`pyupgrade`\] Avoid recommending no-argument super in `slots=True` dataclasses ([#12530](https://github.com/astral-sh/ruff/pull/12530))
|
||||
- \[`pyupgrade`\] Use colon rather than dot formatting for integer-only types ([#12534](https://github.com/astral-sh/ruff/pull/12534))
|
||||
- Fix NFKC normalization bug when removing unused imports ([#12571](https://github.com/astral-sh/ruff/pull/12571))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Consider more stdlib decorators to be property-like ([#12583](https://github.com/astral-sh/ruff/pull/12583))
|
||||
- Improve handling of metaclasses in various linter rules ([#12579](https://github.com/astral-sh/ruff/pull/12579))
|
||||
- Improve consistency between linter rules in determining whether a function is property ([#12581](https://github.com/astral-sh/ruff/pull/12581))
|
||||
|
||||
## 0.5.5
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`fastapi`\] Implement `fastapi-redundant-response-model` (`FAST001`) and `fastapi-non-annotated-dependency`(`FAST002`) ([#11579](https://github.com/astral-sh/ruff/pull/11579))
|
||||
- \[`pydoclint`\] Implement `docstring-missing-exception` (`DOC501`) and `docstring-extraneous-exception` (`DOC502`) ([#11471](https://github.com/astral-sh/ruff/pull/11471))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`numpy`\] Fix NumPy 2.0 rule for `np.alltrue` and `np.sometrue` ([#12473](https://github.com/astral-sh/ruff/pull/12473))
|
||||
- \[`numpy`\] Ignore `NPY201` inside `except` blocks for compatibility with older numpy versions ([#12490](https://github.com/astral-sh/ruff/pull/12490))
|
||||
- \[`pep8-naming`\] Avoid applying `ignore-names` to `self` and `cls` function names (`N804`, `N805`) ([#12497](https://github.com/astral-sh/ruff/pull/12497))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Fix incorrect placement of leading function comment with type params ([#12447](https://github.com/astral-sh/ruff/pull/12447))
|
||||
|
||||
### Server
|
||||
|
||||
- Do not bail code action resolution when a quick fix is requested ([#12462](https://github.com/astral-sh/ruff/pull/12462))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix `Ord` implementation of `cmp_fix` ([#12471](https://github.com/astral-sh/ruff/pull/12471))
|
||||
- Raise syntax error for unparenthesized generator expression in multi-argument call ([#12445](https://github.com/astral-sh/ruff/pull/12445))
|
||||
- \[`pydoclint`\] Fix panic in `DOC501` reported in [#12428](https://github.com/astral-sh/ruff/pull/12428) ([#12435](https://github.com/astral-sh/ruff/pull/12435))
|
||||
- \[`flake8-bugbear`\] Allow singleton tuples with starred expressions in `B013` ([#12484](https://github.com/astral-sh/ruff/pull/12484))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add Eglot setup guide for Emacs editor ([#12426](https://github.com/astral-sh/ruff/pull/12426))
|
||||
- Add note about the breaking change in `nvim-lspconfig` ([#12507](https://github.com/astral-sh/ruff/pull/12507))
|
||||
- Add note to include notebook files for native server ([#12449](https://github.com/astral-sh/ruff/pull/12449))
|
||||
- Add setup docs for Zed editor ([#12501](https://github.com/astral-sh/ruff/pull/12501))
|
||||
|
||||
## 0.5.4
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`ruff`\] Rename `RUF007` to `zip-instead-of-pairwise` ([#12399](https://github.com/astral-sh/ruff/pull/12399))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-builtins`\] Avoid shadowing diagnostics for `@override` methods ([#12415](https://github.com/astral-sh/ruff/pull/12415))
|
||||
- \[`flake8-comprehensions`\] Insert parentheses for multi-argument generators ([#12422](https://github.com/astral-sh/ruff/pull/12422))
|
||||
- \[`pydocstyle`\] Handle escaped docstrings within docstring (`D301`) ([#12192](https://github.com/astral-sh/ruff/pull/12192))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix GitHub link to Neovim setup ([#12410](https://github.com/astral-sh/ruff/pull/12410))
|
||||
- Fix `output-format` default in settings reference ([#12409](https://github.com/astral-sh/ruff/pull/12409))
|
||||
|
||||
## 0.5.3
|
||||
|
||||
**Ruff 0.5.3 marks the stable release of the Ruff language server and introduces revamped
|
||||
[documentation](https://docs.astral.sh/ruff/editors), including [setup guides for your editor of
|
||||
choice](https://docs.astral.sh/ruff/editors/setup) and [the language server
|
||||
itself](https://docs.astral.sh/ruff/editors/settings)**.
|
||||
|
||||
### Preview features
|
||||
|
||||
- Formatter: Insert empty line between suite and alternative branch after function/class definition ([#12294](https://github.com/astral-sh/ruff/pull/12294))
|
||||
- \[`pyupgrade`\] Implement `unnecessary-default-type-args` (`UP043`) ([#12371](https://github.com/astral-sh/ruff/pull/12371))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-bugbear`\] Detect enumerate iterations in `loop-iterator-mutation` (`B909`) ([#12366](https://github.com/astral-sh/ruff/pull/12366))
|
||||
- \[`flake8-bugbear`\] Remove `discard`, `remove`, and `pop` allowance for `loop-iterator-mutation` (`B909`) ([#12365](https://github.com/astral-sh/ruff/pull/12365))
|
||||
- \[`pylint`\] Allow `repeated-equality-comparison` for mixed operations (`PLR1714`) ([#12369](https://github.com/astral-sh/ruff/pull/12369))
|
||||
- \[`pylint`\] Ignore `self` and `cls` when counting arguments (`PLR0913`) ([#12367](https://github.com/astral-sh/ruff/pull/12367))
|
||||
- \[`pylint`\] Use UTF-8 as default encoding in `unspecified-encoding` fix (`PLW1514`) ([#12370](https://github.com/astral-sh/ruff/pull/12370))
|
||||
|
||||
### Server
|
||||
|
||||
- Build settings index in parallel for the native server ([#12299](https://github.com/astral-sh/ruff/pull/12299))
|
||||
- Use fallback settings when indexing the project ([#12362](https://github.com/astral-sh/ruff/pull/12362))
|
||||
- Consider `--preview` flag for `server` subcommand for the linter and formatter ([#12208](https://github.com/astral-sh/ruff/pull/12208))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-comprehensions`\] Allow additional arguments for `sum` and `max` comprehensions (`C419`) ([#12364](https://github.com/astral-sh/ruff/pull/12364))
|
||||
- \[`pylint`\] Avoid dropping extra boolean operations in `repeated-equality-comparison` (`PLR1714`) ([#12368](https://github.com/astral-sh/ruff/pull/12368))
|
||||
- \[`pylint`\] Consider expression before statement when determining binding kind (`PLR1704`) ([#12346](https://github.com/astral-sh/ruff/pull/12346))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add docs for Ruff language server ([#12344](https://github.com/astral-sh/ruff/pull/12344))
|
||||
- Migrate to standalone docs repo ([#12341](https://github.com/astral-sh/ruff/pull/12341))
|
||||
- Update versioning policy for editor integration ([#12375](https://github.com/astral-sh/ruff/pull/12375))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Publish Wasm API to npm ([#12317](https://github.com/astral-sh/ruff/pull/12317))
|
||||
|
||||
## 0.5.2
|
||||
|
||||
### Preview features
|
||||
|
||||
- Use `space` separator before parenthesized expressions in comprehensions with leading comments ([#12282](https://github.com/astral-sh/ruff/pull/12282))
|
||||
- \[`flake8-async`\] Update `ASYNC100` to include `anyio` and `asyncio` ([#12221](https://github.com/astral-sh/ruff/pull/12221))
|
||||
- \[`flake8-async`\] Update `ASYNC109` to include `anyio` and `asyncio` ([#12236](https://github.com/astral-sh/ruff/pull/12236))
|
||||
- \[`flake8-async`\] Update `ASYNC110` to include `anyio` and `asyncio` ([#12261](https://github.com/astral-sh/ruff/pull/12261))
|
||||
- \[`flake8-async`\] Update `ASYNC115` to include `anyio` and `asyncio` ([#12262](https://github.com/astral-sh/ruff/pull/12262))
|
||||
- \[`flake8-async`\] Update `ASYNC116` to include `anyio` and `asyncio` ([#12266](https://github.com/astral-sh/ruff/pull/12266))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-return`\] Exempt properties from explicit return rule (`RET501`) ([#12243](https://github.com/astral-sh/ruff/pull/12243))
|
||||
- \[`numpy`\] Add `np.NAN`-to-`np.nan` diagnostic ([#12292](https://github.com/astral-sh/ruff/pull/12292))
|
||||
- \[`refurb`\] Make `list-reverse-copy` an unsafe fix ([#12303](https://github.com/astral-sh/ruff/pull/12303))
|
||||
|
||||
### Server
|
||||
|
||||
- Consider `include` and `extend-include` settings in native server ([#12252](https://github.com/astral-sh/ruff/pull/12252))
|
||||
- Include nested configurations in settings reloading ([#12253](https://github.com/astral-sh/ruff/pull/12253))
|
||||
|
||||
### CLI
|
||||
|
||||
- Omit code frames for fixes with empty ranges ([#12304](https://github.com/astral-sh/ruff/pull/12304))
|
||||
- Warn about formatter incompatibility for `D203` ([#12238](https://github.com/astral-sh/ruff/pull/12238))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Make cache-write failures non-fatal on Windows ([#12302](https://github.com/astral-sh/ruff/pull/12302))
|
||||
- Treat `not` operations as boolean tests ([#12301](https://github.com/astral-sh/ruff/pull/12301))
|
||||
- \[`flake8-bandit`\] Avoid `S310` violations for HTTP-safe f-strings ([#12305](https://github.com/astral-sh/ruff/pull/12305))
|
||||
- \[`flake8-bandit`\] Support explicit string concatenations in S310 HTTP detection ([#12315](https://github.com/astral-sh/ruff/pull/12315))
|
||||
- \[`flake8-bandit`\] fix S113 false positive for httpx without `timeout` argument ([#12213](https://github.com/astral-sh/ruff/pull/12213))
|
||||
- \[`pycodestyle`\] Remove "non-obvious" allowance for E721 ([#12300](https://github.com/astral-sh/ruff/pull/12300))
|
||||
- \[`pyflakes`\] Consider `with` blocks as single-item branches for redefinition analysis ([#12311](https://github.com/astral-sh/ruff/pull/12311))
|
||||
- \[`refurb`\] Restrict forwarding for `newline` argument in `open()` calls to Python versions >= 3.10 ([#12244](https://github.com/astral-sh/ruff/pull/12244))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Update help and documentation to reflect `--output-format full` default ([#12248](https://github.com/astral-sh/ruff/pull/12248))
|
||||
|
||||
### Performance
|
||||
|
||||
- Use more threads when discovering Python files ([#12258](https://github.com/astral-sh/ruff/pull/12258))
|
||||
|
||||
## 0.5.1
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-bugbear`\] Implement mutable-contextvar-default (B039) ([#12113](https://github.com/astral-sh/ruff/pull/12113))
|
||||
- \[`pycodestyle`\] Whitespace after decorator (`E204`) ([#12140](https://github.com/astral-sh/ruff/pull/12140))
|
||||
- \[`pytest`\] Reverse `PT001` and `PT0023` defaults ([#12106](https://github.com/astral-sh/ruff/pull/12106))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- Enable token-based rules on source with syntax errors ([#11950](https://github.com/astral-sh/ruff/pull/11950))
|
||||
- \[`flake8-bandit`\] Detect `httpx` for `S113` ([#12174](https://github.com/astral-sh/ruff/pull/12174))
|
||||
- \[`numpy`\] Update `NPY201` to include exception deprecations ([#12065](https://github.com/astral-sh/ruff/pull/12065))
|
||||
- \[`pylint`\] Generate autofix for `duplicate-bases` (`PLE0241`) ([#12105](https://github.com/astral-sh/ruff/pull/12105))
|
||||
|
||||
### Server
|
||||
|
||||
- Avoid syntax error notification for source code actions ([#12148](https://github.com/astral-sh/ruff/pull/12148))
|
||||
- Consider the content of the new cells during notebook sync ([#12203](https://github.com/astral-sh/ruff/pull/12203))
|
||||
- Fix replacement edit range computation ([#12171](https://github.com/astral-sh/ruff/pull/12171))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Disable auto-fix when source has syntax errors ([#12134](https://github.com/astral-sh/ruff/pull/12134))
|
||||
- Fix cache key collisions for paths with separators ([#12159](https://github.com/astral-sh/ruff/pull/12159))
|
||||
- Make `requires-python` inference robust to `==` ([#12091](https://github.com/astral-sh/ruff/pull/12091))
|
||||
- Use char-wise width instead of `str`-width ([#12135](https://github.com/astral-sh/ruff/pull/12135))
|
||||
- \[`pycodestyle`\] Avoid `E275` if keyword followed by comma ([#12136](https://github.com/astral-sh/ruff/pull/12136))
|
||||
- \[`pycodestyle`\] Avoid `E275` if keyword is followed by a semicolon ([#12095](https://github.com/astral-sh/ruff/pull/12095))
|
||||
- \[`pylint`\] Skip [dummy variables](https://docs.astral.sh/ruff/settings/#lint_dummy-variable-rgx) for `PLR1704` ([#12190](https://github.com/astral-sh/ruff/pull/12190))
|
||||
|
||||
### Performance
|
||||
|
||||
- Remove allocation in `parse_identifier` ([#12103](https://github.com/astral-sh/ruff/pull/12103))
|
||||
- Use `CompactString` for `Identifier` AST node ([#12101](https://github.com/astral-sh/ruff/pull/12101))
|
||||
|
||||
## 0.5.0
|
||||
|
||||
Check out the [blog post](https://astral.sh/blog/ruff-v0.5.0) for a migration guide and overview of the changes!
|
||||
@@ -56,7 +491,7 @@ The following rules have been stabilized and are no longer in preview:
|
||||
- [`bad-open-mode`](https://docs.astral.sh/ruff/rules/bad-open-mode/) (`PLW1501`)
|
||||
- [`empty-comment`](https://docs.astral.sh/ruff/rules/empty-comment/) (`PLR2044`)
|
||||
- [`global-at-module-level`](https://docs.astral.sh/ruff/rules/global-at-module-level/) (`PLW0604`)
|
||||
- [`misplaced-bare-raise`](https://docs.astral.sh/ruff/rules/misplaced-bare-raise%60/) (`PLE0744`)
|
||||
- [`misplaced-bare-raise`](https://docs.astral.sh/ruff/rules/misplaced-bare-raise/) (`PLE0744`)
|
||||
- [`non-ascii-import-name`](https://docs.astral.sh/ruff/rules/non-ascii-import-name/) (`PLC2403`)
|
||||
- [`non-ascii-name`](https://docs.astral.sh/ruff/rules/non-ascii-name/) (`PLC2401`)
|
||||
- [`nonlocal-and-global`](https://docs.astral.sh/ruff/rules/nonlocal-and-global/) (`PLE0115`)
|
||||
|
||||
@@ -2,35 +2,6 @@
|
||||
|
||||
Welcome! We're happy to have you here. Thank you in advance for your contribution to Ruff.
|
||||
|
||||
- [The Basics](#the-basics)
|
||||
- [Prerequisites](#prerequisites)
|
||||
- [Development](#development)
|
||||
- [Project Structure](#project-structure)
|
||||
- [Example: Adding a new lint rule](#example-adding-a-new-lint-rule)
|
||||
- [Rule naming convention](#rule-naming-convention)
|
||||
- [Rule testing: fixtures and snapshots](#rule-testing-fixtures-and-snapshots)
|
||||
- [Example: Adding a new configuration option](#example-adding-a-new-configuration-option)
|
||||
- [MkDocs](#mkdocs)
|
||||
- [Release Process](#release-process)
|
||||
- [Creating a new release](#creating-a-new-release)
|
||||
- [Ecosystem CI](#ecosystem-ci)
|
||||
- [Benchmarking and Profiling](#benchmarking-and-profiling)
|
||||
- [CPython Benchmark](#cpython-benchmark)
|
||||
- [Microbenchmarks](#microbenchmarks)
|
||||
- [Benchmark-driven Development](#benchmark-driven-development)
|
||||
- [PR Summary](#pr-summary)
|
||||
- [Tips](#tips)
|
||||
- [Profiling Projects](#profiling-projects)
|
||||
- [Linux](#linux)
|
||||
- [Mac](#mac)
|
||||
- [`cargo dev`](#cargo-dev)
|
||||
- [Subsystems](#subsystems)
|
||||
- [Compilation Pipeline](#compilation-pipeline)
|
||||
- [Import Categorization](#import-categorization)
|
||||
- [Project root](#project-root)
|
||||
- [Package root](#package-root)
|
||||
- [Import categorization](#import-categorization-1)
|
||||
|
||||
## The Basics
|
||||
|
||||
Ruff welcomes contributions in the form of pull requests.
|
||||
@@ -280,7 +251,7 @@ These represent, respectively: the schema used to parse the `pyproject.toml` fil
|
||||
intermediate representation; and the final, internal representation used to power Ruff.
|
||||
|
||||
To add a new configuration option, you'll likely want to modify these latter few files (along with
|
||||
`arg.rs`, if appropriate). If you want to pattern-match against an existing example, grep for
|
||||
`args.rs`, if appropriate). If you want to pattern-match against an existing example, grep for
|
||||
`dummy_variable_rgx`, which defines a regular expression to match against acceptable unused
|
||||
variables (e.g., `_`).
|
||||
|
||||
@@ -333,22 +304,34 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
|
||||
### Creating a new release
|
||||
|
||||
1. Install `uv`: `curl -LsSf https://astral.sh/uv/install.sh | sh`
|
||||
1. Run `./scripts/release/bump.sh`; this command will:
|
||||
|
||||
1. Run `./scripts/release.sh`; this command will:
|
||||
|
||||
- Generate a temporary virtual environment with `rooster`
|
||||
- Generate a changelog entry in `CHANGELOG.md`
|
||||
- Update versions in `pyproject.toml` and `Cargo.toml`
|
||||
- Update references to versions in the `README.md` and documentation
|
||||
- Display contributors for the release
|
||||
|
||||
1. The changelog should then be editorialized for consistency
|
||||
|
||||
- Often labels will be missing from pull requests they will need to be manually organized into the proper section
|
||||
- Changes should be edited to be user-facing descriptions, avoiding internal details
|
||||
|
||||
1. Highlight any breaking changes in `BREAKING_CHANGES.md`
|
||||
|
||||
1. Run `cargo check`. This should update the lock file with new versions.
|
||||
|
||||
1. Create a pull request with the changelog and version updates
|
||||
|
||||
1. Merge the PR
|
||||
|
||||
1. Run the [release workflow](https://github.com/astral-sh/ruff/actions/workflows/release.yml) with:
|
||||
|
||||
- The new version number (without starting `v`)
|
||||
|
||||
1. The release workflow will do the following:
|
||||
|
||||
1. Build all the assets. If this fails (even though we tested in step 4), we haven't tagged or
|
||||
uploaded anything, you can restart after pushing a fix. If you just need to rerun the build,
|
||||
make sure you're [re-running all the failed
|
||||
@@ -359,14 +342,25 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
|
||||
1. Attach artifacts to draft GitHub release
|
||||
1. Trigger downstream repositories. This can fail non-catastrophically, as we can run any
|
||||
downstream jobs manually if needed.
|
||||
|
||||
1. Verify the GitHub release:
|
||||
|
||||
1. The Changelog should match the content of `CHANGELOG.md`
|
||||
1. Append the contributors from the `bump.sh` script
|
||||
1. Append the contributors from the `scripts/release.sh` script
|
||||
|
||||
1. If needed, [update the schemastore](https://github.com/astral-sh/ruff/blob/main/scripts/update_schemastore.py).
|
||||
|
||||
1. One can determine if an update is needed when
|
||||
`git diff old-version-tag new-version-tag -- ruff.schema.json` returns a non-empty diff.
|
||||
1. Once run successfully, you should follow the link in the output to create a PR.
|
||||
1. If needed, update the `ruff-lsp` and `ruff-vscode` repositories.
|
||||
|
||||
1. If needed, update the [`ruff-lsp`](https://github.com/astral-sh/ruff-lsp) and
|
||||
[`ruff-vscode`](https://github.com/astral-sh/ruff-vscode) repositories and follow
|
||||
the release instructions in those repositories. `ruff-lsp` should always be updated
|
||||
before `ruff-vscode`.
|
||||
|
||||
This step is generally not required for a patch release, but should always be done
|
||||
for a minor release.
|
||||
|
||||
## Ecosystem CI
|
||||
|
||||
@@ -389,7 +383,7 @@ We have several ways of benchmarking and profiling Ruff:
|
||||
- Microbenchmarks which run the linter or the formatter on individual files. These run on pull requests.
|
||||
- Profiling the linter on either the microbenchmarks or entire projects
|
||||
|
||||
> \[!NOTE\]
|
||||
> **Note**
|
||||
> When running benchmarks, ensure that your CPU is otherwise idle (e.g., close any background
|
||||
> applications, like web browsers). You may also want to switch your CPU to a "performance"
|
||||
> mode, if it exists, especially when benchmarking short-lived processes.
|
||||
@@ -403,12 +397,18 @@ which makes it a good target for benchmarking.
|
||||
git clone --branch 3.10 https://github.com/python/cpython.git crates/ruff_linter/resources/test/cpython
|
||||
```
|
||||
|
||||
Install `hyperfine`:
|
||||
|
||||
```shell
|
||||
cargo install hyperfine
|
||||
```
|
||||
|
||||
To benchmark the release build:
|
||||
|
||||
```shell
|
||||
cargo build --release && hyperfine --warmup 10 \
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache -e" \
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ -e"
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache -e" \
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ -e"
|
||||
|
||||
Benchmark 1: ./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache
|
||||
Time (mean ± σ): 293.8 ms ± 3.2 ms [User: 2384.6 ms, System: 90.3 ms]
|
||||
@@ -427,7 +427,7 @@ To benchmark against the ecosystem's existing tools:
|
||||
|
||||
```shell
|
||||
hyperfine --ignore-failure --warmup 5 \
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache" \
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache" \
|
||||
"pyflakes crates/ruff_linter/resources/test/cpython" \
|
||||
"autoflake --recursive --expand-star-imports --remove-all-unused-imports --remove-unused-variables --remove-duplicate-keys resources/test/cpython" \
|
||||
"pycodestyle crates/ruff_linter/resources/test/cpython" \
|
||||
@@ -473,7 +473,7 @@ To benchmark a subset of rules, e.g. `LineTooLong` and `DocLineTooLong`:
|
||||
|
||||
```shell
|
||||
cargo build --release && hyperfine --warmup 10 \
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache -e --select W505,E501"
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache -e --select W505,E501"
|
||||
```
|
||||
|
||||
You can run `poetry install` from `./scripts/benchmarks` to create a working environment for the
|
||||
@@ -530,6 +530,8 @@ You can run the benchmarks with
|
||||
cargo benchmark
|
||||
```
|
||||
|
||||
`cargo benchmark` is an alias for `cargo bench -p ruff_benchmark --bench linter --bench formatter --`
|
||||
|
||||
#### Benchmark-driven Development
|
||||
|
||||
Ruff uses [Criterion.rs](https://bheisler.github.io/criterion.rs/book/) for benchmarks. You can use
|
||||
@@ -568,7 +570,7 @@ cargo install critcmp
|
||||
|
||||
#### Tips
|
||||
|
||||
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo benchmark lexer`
|
||||
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo bench -p ruff_benchmark lexer`
|
||||
to only run the lexer benchmarks.
|
||||
- Use `cargo bench -p ruff_benchmark -- --quiet` for a more cleaned up output (without statistical relevance)
|
||||
- Use `cargo bench -p ruff_benchmark -- --quick` to get faster results (more prone to noise)
|
||||
@@ -905,15 +907,11 @@ There are three ways in which an import can be categorized as "first-party":
|
||||
package (e.g., `from foo import bar` or `import foo.bar`), they'll be classified as first-party
|
||||
automatically. This check is as simple as comparing the first segment of the current file's
|
||||
module path to the first segment of the import.
|
||||
1. **Source roots**: Ruff supports a `[src](https://docs.astral.sh/ruff/settings/#src)` setting, which
|
||||
1. **Source roots**: Ruff supports a [`src`](https://docs.astral.sh/ruff/settings/#src) setting, which
|
||||
sets the directories to scan when identifying first-party imports. The algorithm is
|
||||
straightforward: given an import, like `import foo`, iterate over the directories enumerated in
|
||||
the `src` setting and, for each directory, check for the existence of a subdirectory `foo` or a
|
||||
file `foo.py`.
|
||||
|
||||
By default, `src` is set to the project root. In the above example, we'd want to set
|
||||
`src = ["./src"]` to ensure that we locate `./my_project/src/foo` and thus categorize `import foo`
|
||||
as first-party in `baz.py`. In practice, for this limited example, setting `src = ["./src"]` is
|
||||
unnecessary, as all imports within `./my_project/src/foo` would be categorized as first-party via
|
||||
the same-package heuristic; but if your project contains multiple packages, you'll want to set `src`
|
||||
explicitly.
|
||||
By default, `src` is set to the project root, along with `"src"` subdirectory in the project root.
|
||||
This ensures that Ruff supports both flat and "src" layouts out of the box.
|
||||
|
||||
734
Cargo.lock
generated
734
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
41
Cargo.toml
41
Cargo.toml
@@ -4,7 +4,7 @@ resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
edition = "2021"
|
||||
rust-version = "1.75"
|
||||
rust-version = "1.76"
|
||||
homepage = "https://docs.astral.sh/ruff"
|
||||
documentation = "https://docs.astral.sh/ruff"
|
||||
repository = "https://github.com/astral-sh/ruff"
|
||||
@@ -35,7 +35,9 @@ ruff_source_file = { path = "crates/ruff_source_file" }
|
||||
ruff_text_size = { path = "crates/ruff_text_size" }
|
||||
ruff_workspace = { path = "crates/ruff_workspace" }
|
||||
|
||||
red_knot_module_resolver = { path = "crates/red_knot_module_resolver" }
|
||||
red_knot_python_semantic = { path = "crates/red_knot_python_semantic" }
|
||||
red_knot_server = { path = "crates/red_knot_server" }
|
||||
red_knot_workspace = { path = "crates/red_knot_workspace" }
|
||||
|
||||
aho-corasick = { version = "1.1.3" }
|
||||
annotate-snippets = { version = "0.9.2", features = ["color"] }
|
||||
@@ -48,16 +50,17 @@ cachedir = { version = "0.3.1" }
|
||||
camino = { version = "1.1.7" }
|
||||
chrono = { version = "0.4.35", default-features = false, features = ["clock"] }
|
||||
clap = { version = "4.5.3", features = ["derive"] }
|
||||
clap_complete_command = { version = "0.5.1" }
|
||||
clap_complete_command = { version = "0.6.0" }
|
||||
clearscreen = { version = "3.0.0" }
|
||||
codspeed-criterion-compat = { version = "2.6.0", default-features = false }
|
||||
colored = { version = "2.1.0" }
|
||||
console_error_panic_hook = { version = "0.1.7" }
|
||||
console_log = { version = "1.0.0" }
|
||||
countme = { version = "3.0.1" }
|
||||
compact_str = "0.8.0"
|
||||
criterion = { version = "0.5.1", default-features = false }
|
||||
crossbeam = { version = "0.8.4" }
|
||||
dashmap = { version = "5.5.3" }
|
||||
dashmap = { version = "6.0.1" }
|
||||
drop_bomb = { version = "0.1.5" }
|
||||
env_logger = { version = "0.11.0" }
|
||||
etcetera = { version = "0.8.0" }
|
||||
@@ -69,7 +72,6 @@ hashbrown = "0.14.3"
|
||||
ignore = { version = "0.4.22" }
|
||||
imara-diff = { version = "0.1.5" }
|
||||
imperative = { version = "1.0.4" }
|
||||
indexmap = { version = "2.2.6" }
|
||||
indicatif = { version = "0.17.8" }
|
||||
indoc = { version = "2.0.4" }
|
||||
insta = { version = "1.35.1" }
|
||||
@@ -92,21 +94,21 @@ mimalloc = { version = "0.1.39" }
|
||||
natord = { version = "1.0.9" }
|
||||
notify = { version = "6.1.1" }
|
||||
once_cell = { version = "1.19.0" }
|
||||
ordermap = { version = "0.5.0" }
|
||||
path-absolutize = { version = "3.1.1" }
|
||||
path-slash = { version = "0.2.1" }
|
||||
pathdiff = { version = "0.2.1" }
|
||||
parking_lot = "0.12.1"
|
||||
pep440_rs = { version = "0.6.0", features = ["serde"] }
|
||||
pretty_assertions = "1.3.0"
|
||||
proc-macro2 = { version = "1.0.79" }
|
||||
pyproject-toml = { version = "0.9.0" }
|
||||
quick-junit = { version = "0.4.0" }
|
||||
quick-junit = { version = "0.5.0" }
|
||||
quote = { version = "1.0.23" }
|
||||
rand = { version = "0.8.5" }
|
||||
rayon = { version = "1.10.0" }
|
||||
regex = { version = "1.10.2" }
|
||||
rustc-hash = { version = "2.0.0" }
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "f706aa2d32d473ee633a77c1af01d180c85da308" }
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "f608ff8b24f07706492027199f51132244034f29" }
|
||||
schemars = { version = "0.8.16" }
|
||||
seahash = { version = "4.1.0" }
|
||||
serde = { version = "1.0.197", features = ["derive"] }
|
||||
@@ -119,7 +121,6 @@ serde_with = { version = "3.6.0", default-features = false, features = [
|
||||
shellexpand = { version = "3.0.0" }
|
||||
similar = { version = "2.4.0", features = ["inline"] }
|
||||
smallvec = { version = "1.13.2" }
|
||||
smol_str = { version = "0.2.2" }
|
||||
static_assertions = "1.1.0"
|
||||
strum = { version = "0.26.0", features = ["strum_macros"] }
|
||||
strum_macros = { version = "0.26.0" }
|
||||
@@ -127,12 +128,13 @@ syn = { version = "2.0.55" }
|
||||
tempfile = { version = "3.9.0" }
|
||||
test-case = { version = "3.3.1" }
|
||||
thiserror = { version = "1.0.58" }
|
||||
tikv-jemallocator = { version = "0.5.0" }
|
||||
tikv-jemallocator = { version = "0.6.0" }
|
||||
toml = { version = "0.8.11" }
|
||||
tracing = { version = "0.1.40" }
|
||||
tracing-flame = { version = "0.2.0" }
|
||||
tracing-indicatif = { version = "0.3.6" }
|
||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
|
||||
tracing-tree = { version = "0.3.0" }
|
||||
tracing-subscriber = { version = "0.3.18", default-features = false, features = ["env-filter", "fmt"] }
|
||||
tracing-tree = { version = "0.4.0" }
|
||||
typed-arena = { version = "2.0.2" }
|
||||
unic-ucd-category = { version = "0.9" }
|
||||
unicode-ident = { version = "1.0.12" }
|
||||
@@ -151,11 +153,12 @@ walkdir = { version = "2.3.2" }
|
||||
wasm-bindgen = { version = "0.2.92" }
|
||||
wasm-bindgen-test = { version = "0.3.42" }
|
||||
wild = { version = "2" }
|
||||
zip = { version = "0.6.6", default-features = false, features = ["zstd"] }
|
||||
zip = { version = "0.6.6", default-features = false }
|
||||
|
||||
[workspace.lints.rust]
|
||||
unsafe_code = "warn"
|
||||
unreachable_pub = "warn"
|
||||
unexpected_cfgs = { level = "warn", check-cfg = ["cfg(fuzzing)", "cfg(codspeed)"] }
|
||||
|
||||
[workspace.lints.clippy]
|
||||
pedantic = { level = "warn", priority = -2 }
|
||||
@@ -227,7 +230,7 @@ inherits = "release"
|
||||
# Config for 'cargo dist'
|
||||
[workspace.metadata.dist]
|
||||
# The preferred cargo-dist version to use in CI (Cargo.toml SemVer syntax)
|
||||
cargo-dist-version = "0.14.0"
|
||||
cargo-dist-version = "0.18.0"
|
||||
# CI backends to support
|
||||
ci = ["github"]
|
||||
# The installers to generate for each app
|
||||
@@ -258,21 +261,23 @@ targets = [
|
||||
]
|
||||
# Whether to auto-include files like READMEs, LICENSEs, and CHANGELOGs (default true)
|
||||
auto-includes = false
|
||||
# Whether cargo-dist should create a Github Release or use an existing draft
|
||||
# Whether cargo-dist should create a GitHub Release or use an existing draft
|
||||
create-release = true
|
||||
# Publish jobs to run in CI
|
||||
pr-run-mode = "skip"
|
||||
# Whether CI should trigger releases with dispatches instead of tag pushes
|
||||
dispatch-releases = true
|
||||
# The stage during which the GitHub Release should be created
|
||||
github-release = "announce"
|
||||
# Whether CI should include auto-generated code to build local artifacts
|
||||
build-local-artifacts = false
|
||||
# Local artifacts jobs to run in CI
|
||||
local-artifacts-jobs = ["./build-binaries", "./build-docker"]
|
||||
# Publish jobs to run in CI
|
||||
publish-jobs = ["./publish-pypi"]
|
||||
publish-jobs = ["./publish-pypi", "./publish-wasm"]
|
||||
# Announcement jobs to run in CI
|
||||
post-announce-jobs = ["./notify-dependents", "./publish-docs", "./publish-playground"]
|
||||
# Skip checking whether the specified configuration files are up to date
|
||||
allow-dirty = ["ci"]
|
||||
# Custom permissions for GitHub Jobs
|
||||
github-custom-job-permissions = { "build-docker" = { packages = "write", contents = "read" }, "publish-wasm" = { contents = "read", id-token = "write", packages = "write" } }
|
||||
# Whether to install an updater program
|
||||
install-updater = false
|
||||
|
||||
25
LICENSE
25
LICENSE
@@ -1371,3 +1371,28 @@ are:
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
"""
|
||||
|
||||
- pydoclint, licensed as follows:
|
||||
"""
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2023 jsh9
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
"""
|
||||
|
||||
47
README.md
47
README.md
@@ -29,14 +29,14 @@ An extremely fast Python linter and code formatter, written in Rust.
|
||||
- 🐍 Installable via `pip`
|
||||
- 🛠️ `pyproject.toml` support
|
||||
- 🤝 Python 3.13 compatibility
|
||||
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruff-compare-to-flake8), isort, and Black
|
||||
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruffs-linter-compare-to-flake8), isort, and [Black](https://docs.astral.sh/ruff/faq/#how-does-ruffs-formatter-compare-to-black)
|
||||
- 📦 Built-in caching, to avoid re-analyzing unchanged files
|
||||
- 🔧 Fix support, for automatic error correction (e.g., automatically remove unused imports)
|
||||
- 📏 Over [800 built-in rules](https://docs.astral.sh/ruff/rules/), with native re-implementations
|
||||
of popular Flake8 plugins, like flake8-bugbear
|
||||
- ⌨️ First-party [editor integrations](https://docs.astral.sh/ruff/integrations/) for
|
||||
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://github.com/astral-sh/ruff-lsp)
|
||||
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://docs.astral.sh/ruff/configuration/#pyprojecttoml-discovery)
|
||||
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://docs.astral.sh/ruff/editors/setup)
|
||||
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://docs.astral.sh/ruff/configuration/#config-file-discovery)
|
||||
|
||||
Ruff aims to be orders of magnitude faster than alternative tools while integrating more
|
||||
functionality behind a single, common interface.
|
||||
@@ -110,7 +110,7 @@ For more, see the [documentation](https://docs.astral.sh/ruff/).
|
||||
1. [Who's Using Ruff?](#whos-using-ruff)
|
||||
1. [License](#license)
|
||||
|
||||
## Getting Started
|
||||
## Getting Started<a id="getting-started"></a>
|
||||
|
||||
For more, see the [documentation](https://docs.astral.sh/ruff/).
|
||||
|
||||
@@ -119,7 +119,25 @@ For more, see the [documentation](https://docs.astral.sh/ruff/).
|
||||
Ruff is available as [`ruff`](https://pypi.org/project/ruff/) on PyPI:
|
||||
|
||||
```shell
|
||||
# With pip.
|
||||
pip install ruff
|
||||
|
||||
# With pipx.
|
||||
pipx install ruff
|
||||
```
|
||||
|
||||
Starting with version `0.5.0`, Ruff can be installed with our standalone installers:
|
||||
|
||||
```shell
|
||||
# On macOS and Linux.
|
||||
curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
|
||||
# On Windows.
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.6.3/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.6.3/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -152,7 +170,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.5.0
|
||||
rev: v0.6.3
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
@@ -161,8 +179,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
- id: ruff-format
|
||||
```
|
||||
|
||||
Ruff can also be used as a [VS Code extension](https://github.com/astral-sh/ruff-vscode) or
|
||||
alongside any other editor through the [Ruff LSP](https://github.com/astral-sh/ruff-lsp).
|
||||
Ruff can also be used as a [VS Code extension](https://github.com/astral-sh/ruff-vscode) or with [various other editors](https://docs.astral.sh/ruff/editors/setup).
|
||||
|
||||
Ruff can also be used as a [GitHub Action](https://github.com/features/actions) via
|
||||
[`ruff-action`](https://github.com/chartboost/ruff-action):
|
||||
@@ -178,7 +195,7 @@ jobs:
|
||||
- uses: chartboost/ruff-action@v1
|
||||
```
|
||||
|
||||
### Configuration
|
||||
### Configuration<a id="configuration"></a>
|
||||
|
||||
Ruff can be configured through a `pyproject.toml`, `ruff.toml`, or `.ruff.toml` file (see:
|
||||
[_Configuration_](https://docs.astral.sh/ruff/configuration/), or [_Settings_](https://docs.astral.sh/ruff/settings/)
|
||||
@@ -274,7 +291,7 @@ features that may change prior to stabilization.
|
||||
See `ruff help` for more on Ruff's top-level commands, or `ruff help check` and `ruff help format`
|
||||
for more on the linting and formatting commands, respectively.
|
||||
|
||||
## Rules
|
||||
## Rules<a id="rules"></a>
|
||||
|
||||
<!-- Begin section: Rules -->
|
||||
|
||||
@@ -350,21 +367,21 @@ quality tools, including:
|
||||
|
||||
For a complete enumeration of the supported rules, see [_Rules_](https://docs.astral.sh/ruff/rules/).
|
||||
|
||||
## Contributing
|
||||
## Contributing<a id="contributing"></a>
|
||||
|
||||
Contributions are welcome and highly appreciated. To get started, check out the
|
||||
[**contributing guidelines**](https://docs.astral.sh/ruff/contributing/).
|
||||
|
||||
You can also join us on [**Discord**](https://discord.com/invite/astral-sh).
|
||||
|
||||
## Support
|
||||
## Support<a id="support"></a>
|
||||
|
||||
Having trouble? Check out the existing issues on [**GitHub**](https://github.com/astral-sh/ruff/issues),
|
||||
or feel free to [**open a new one**](https://github.com/astral-sh/ruff/issues/new).
|
||||
|
||||
You can also ask for help on [**Discord**](https://discord.com/invite/astral-sh).
|
||||
|
||||
## Acknowledgements
|
||||
## Acknowledgements<a id="acknowledgements"></a>
|
||||
|
||||
Ruff's linter draws on both the APIs and implementation details of many other
|
||||
tools in the Python ecosystem, especially [Flake8](https://github.com/PyCQA/flake8), [Pyflakes](https://github.com/PyCQA/pyflakes),
|
||||
@@ -388,7 +405,7 @@ Ruff is the beneficiary of a large number of [contributors](https://github.com/a
|
||||
|
||||
Ruff is released under the MIT license.
|
||||
|
||||
## Who's Using Ruff?
|
||||
## Who's Using Ruff?<a id="whos-using-ruff"></a>
|
||||
|
||||
Ruff is used by a number of major open-source projects and companies, including:
|
||||
|
||||
@@ -406,6 +423,7 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [Dagger](https://github.com/dagger/dagger)
|
||||
- [Dagster](https://github.com/dagster-io/dagster)
|
||||
- Databricks ([MLflow](https://github.com/mlflow/mlflow))
|
||||
- [Dify](https://github.com/langgenius/dify)
|
||||
- [FastAPI](https://github.com/tiangolo/fastapi)
|
||||
- [Godot](https://github.com/godotengine/godot)
|
||||
- [Gradio](https://github.com/gradio-app/gradio)
|
||||
@@ -416,6 +434,7 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- Hugging Face ([Transformers](https://github.com/huggingface/transformers),
|
||||
[Datasets](https://github.com/huggingface/datasets),
|
||||
[Diffusers](https://github.com/huggingface/diffusers))
|
||||
- IBM ([Qiskit](https://github.com/Qiskit/qiskit))
|
||||
- ING Bank ([popmon](https://github.com/ing-bank/popmon), [probatus](https://github.com/ing-bank/probatus))
|
||||
- [Ibis](https://github.com/ibis-project/ibis)
|
||||
- [ivy](https://github.com/unifyai/ivy)
|
||||
@@ -505,7 +524,7 @@ If you're using Ruff, consider adding the Ruff badge to your project's `README.m
|
||||
<a href="https://github.com/astral-sh/ruff"><img src="https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json" alt="Ruff" style="max-width:100%;"></a>
|
||||
```
|
||||
|
||||
## License
|
||||
## License<a id="license"></a>
|
||||
|
||||
This repository is licensed under the [MIT License](https://github.com/astral-sh/ruff/blob/main/LICENSE)
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[files]
|
||||
# https://github.com/crate-ci/typos/issues/868
|
||||
extend-exclude = ["crates/red_knot_module_resolver/vendor/**/*", "**/resources/**/*", "**/snapshots/**/*"]
|
||||
extend-exclude = ["crates/red_knot_python_semantic/vendor/**/*", "**/resources/**/*", "**/snapshots/**/*"]
|
||||
|
||||
[default.extend-words]
|
||||
"arange" = "arange" # e.g. `numpy.arange`
|
||||
|
||||
@@ -10,4 +10,12 @@ doc-valid-idents = [
|
||||
"SCREAMING_SNAKE_CASE",
|
||||
"SQLAlchemy",
|
||||
"StackOverflow",
|
||||
"PyCharm",
|
||||
]
|
||||
|
||||
ignore-interior-mutability = [
|
||||
# Interned is read-only. The wrapped `Rc` never gets updated.
|
||||
"ruff_formatter::format_element::Interned",
|
||||
# The expression is read-only.
|
||||
"ruff_python_ast::hashable::HashableExpr",
|
||||
]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "red_knot"
|
||||
version = "0.1.0"
|
||||
version = "0.0.0"
|
||||
edition.workspace = true
|
||||
rust-version.workspace = true
|
||||
homepage.workspace = true
|
||||
@@ -12,32 +12,28 @@ license.workspace = true
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
red_knot_module_resolver = { workspace = true }
|
||||
red_knot_python_semantic = { workspace = true }
|
||||
red_knot_workspace = { workspace = true }
|
||||
red_knot_server = { workspace = true }
|
||||
|
||||
ruff_python_parser = { workspace = true }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_text_size = { workspace = true }
|
||||
ruff_index = { workspace = true }
|
||||
ruff_notebook = { workspace = true }
|
||||
ruff_db = { workspace = true, features = ["os", "cache"] }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
bitflags = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
clap = { workspace = true, features = ["wrap_help"] }
|
||||
colored = { workspace = true }
|
||||
countme = { workspace = true, features = ["enable"] }
|
||||
crossbeam = { workspace = true }
|
||||
ctrlc = { version = "3.4.4" }
|
||||
dashmap = { workspace = true }
|
||||
hashbrown = { workspace = true }
|
||||
indexmap = { workspace = true }
|
||||
is-macro = { workspace = true }
|
||||
notify = { workspace = true }
|
||||
parking_lot = { workspace = true }
|
||||
rayon = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
smol_str = { version = "0.2.1" }
|
||||
tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
tracing = { workspace = true, features = ["release_max_level_debug"] }
|
||||
tracing-subscriber = { workspace = true, features = ["env-filter", "fmt"] }
|
||||
tracing-flame = { workspace = true }
|
||||
tracing-tree = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
filetime = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
|
||||
[lints]
|
||||
|
||||
BIN
crates/red_knot/docs/tracing-flamegraph.png
Normal file
BIN
crates/red_knot/docs/tracing-flamegraph.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 40 KiB |
128
crates/red_knot/docs/tracing.md
Normal file
128
crates/red_knot/docs/tracing.md
Normal file
@@ -0,0 +1,128 @@
|
||||
# Tracing
|
||||
|
||||
Traces are a useful tool to narrow down the location of a bug or, at least, to understand why the compiler is doing a particular thing.
|
||||
Note, tracing messages with severity `debug` or greater are user-facing. They should be phrased accordingly.
|
||||
Tracing spans are only shown when using `-vvv`.
|
||||
|
||||
## Verbosity levels
|
||||
|
||||
The CLI supports different verbosity levels.
|
||||
|
||||
- default: Only show errors and warnings.
|
||||
- `-v` activates `info!`: Show generally useful information such as paths of configuration files, detected platform, etc., but it's not a lot of messages, it's something you'll activate in CI by default. cargo build e.g. shows you which packages are fresh.
|
||||
- `-vv` activates `debug!` and timestamps: This should be enough information to get to the bottom of bug reports. When you're processing many packages or files, you'll get pages and pages of output, but each line is link to a specific action or state change.
|
||||
- `-vvv` activates `trace!` (only in debug builds) and shows tracing-spans: At this level, you're logging everything. Most of this is wasted, it's really slow, we dump e.g. the entire resolution graph. Only useful to developers, and you almost certainly want to use `RED_KNOT_LOG` to filter it down to the area your investigating.
|
||||
|
||||
## Better logging with `RED_KNOT_LOG` and `RAYON_NUM_THREADS`
|
||||
|
||||
By default, the CLI shows messages from the `ruff` and `red_knot` crates. Tracing messages from other crates are not shown.
|
||||
The `RED_KNOT_LOG` environment variable allows you to customize which messages are shown by specifying one
|
||||
or more [filter directives](https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives).
|
||||
|
||||
The `RAYON_NUM_THREADS` environment variable, meanwhile, can be used to control the level of concurrency red-knot uses.
|
||||
By default, red-knot will attempt to parallelize its work so that multiple files are checked simultaneously,
|
||||
but this can result in a confused logging output where messages from different threads are intertwined.
|
||||
To switch off concurrency entirely and have more readable logs, use `RAYON_NUM_THREADS=1`.
|
||||
|
||||
### Examples
|
||||
|
||||
#### Show all debug messages
|
||||
|
||||
Shows debug messages from all crates.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG=debug
|
||||
```
|
||||
|
||||
#### Show salsa query execution messages
|
||||
|
||||
Show the salsa `execute: my_query` messages in addition to all red knot messages.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG=ruff=trace,red_knot=trace,salsa=info
|
||||
```
|
||||
|
||||
#### Show typing traces
|
||||
|
||||
Only show traces for the `red_knot_python_semantic::types` module.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG="red_knot_python_semantic::types"
|
||||
```
|
||||
|
||||
Note: Ensure that you use `-vvv` to see tracing spans.
|
||||
|
||||
#### Show messages for a single file
|
||||
|
||||
Shows all messages that are inside of a span for a specific file.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG=red_knot[{file=/home/micha/astral/test/x.py}]=trace
|
||||
```
|
||||
|
||||
**Note**: Tracing still shows all spans because tracing can't know at the time of entering the span
|
||||
whether one if its children has the file `x.py`.
|
||||
|
||||
**Note**: Salsa currently logs the entire memoized values. In our case, the source text and parsed AST.
|
||||
This very quickly leads to extremely long outputs.
|
||||
|
||||
## Tracing and Salsa
|
||||
|
||||
Be mindful about using `tracing` in Salsa queries, especially when using `warn` or `error` because it isn't guaranteed
|
||||
that the query will execute after restoring from a persistent cache. In which case the user won't see the message.
|
||||
|
||||
For example, don't use `tracing` to show the user a message when generating a lint violation failed
|
||||
because the message would only be shown when linting the file the first time, but not on subsequent analysis
|
||||
runs or when restoring from a persistent cache. This can be confusing for users because they
|
||||
don't understand why a specific lint violation isn't raised. Instead, change your
|
||||
query to return the failure as part of the query's result or use a Salsa accumulator.
|
||||
|
||||
## Tracing in tests
|
||||
|
||||
You can use `ruff_db::testing::setup_logging` or `ruff_db::testing::setup_logging_with_filter` to set up logging in tests.
|
||||
|
||||
```rust
|
||||
use ruff_db::testing::setup_logging;
|
||||
|
||||
#[test]
|
||||
fn test() {
|
||||
let _logging = setup_logging();
|
||||
|
||||
tracing::info!("This message will be printed to stderr");
|
||||
}
|
||||
```
|
||||
|
||||
Note: Most test runners capture stderr and only show its output when a test fails.
|
||||
|
||||
Note also that `setup_logging` only sets up logging for the current thread because [`set_global_default`](https://docs.rs/tracing/latest/tracing/subscriber/fn.set_global_default.html) can only be
|
||||
called **once**.
|
||||
|
||||
## Release builds
|
||||
|
||||
`trace!` events are removed in release builds.
|
||||
|
||||
## Profiling
|
||||
|
||||
Red Knot generates a folded stack trace to the current directory named `tracing.folded` when setting the environment variable `RED_KNOT_LOG_PROFILE` to `1` or `true`.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG_PROFILE=1 red_knot -- --current-directory=../test -vvv
|
||||
```
|
||||
|
||||
You can convert the textual representation into a visual one using `inferno`.
|
||||
|
||||
```shell
|
||||
cargo install inferno
|
||||
```
|
||||
|
||||
```shell
|
||||
# flamegraph
|
||||
cat tracing.folded | inferno-flamegraph > tracing-flamegraph.svg
|
||||
|
||||
# flamechart
|
||||
cat tracing.folded | inferno-flamegraph --flamechart > tracing-flamechart.svg
|
||||
```
|
||||
|
||||

|
||||
|
||||
See [`tracing-flame`](https://crates.io/crates/tracing-flame) for more details.
|
||||
@@ -1,418 +0,0 @@
|
||||
use std::any::type_name;
|
||||
use std::fmt::{Debug, Formatter};
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::marker::PhantomData;
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_index::{Idx, IndexVec};
|
||||
use ruff_python_ast::visitor::source_order;
|
||||
use ruff_python_ast::visitor::source_order::{SourceOrderVisitor, TraversalSignal};
|
||||
use ruff_python_ast::{
|
||||
AnyNodeRef, AstNode, ExceptHandler, ExceptHandlerExceptHandler, Expr, MatchCase, ModModule,
|
||||
NodeKind, Parameter, Stmt, StmtAnnAssign, StmtAssign, StmtAugAssign, StmtClassDef,
|
||||
StmtFunctionDef, StmtGlobal, StmtImport, StmtImportFrom, StmtNonlocal, StmtTypeAlias,
|
||||
TypeParam, TypeParamParamSpec, TypeParamTypeVar, TypeParamTypeVarTuple, WithItem,
|
||||
};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
/// A type agnostic ID that uniquely identifies an AST node in a file.
|
||||
#[ruff_index::newtype_index]
|
||||
pub struct AstId;
|
||||
|
||||
/// A typed ID that uniquely identifies an AST node in a file.
|
||||
///
|
||||
/// This is different from [`AstId`] in that it is a combination of ID and the type of the node the ID identifies.
|
||||
/// Typing the ID prevents mixing IDs of different node types and allows to restrict the API to only accept
|
||||
/// nodes for which an ID has been created (not all AST nodes get an ID).
|
||||
pub struct TypedAstId<N: HasAstId> {
|
||||
erased: AstId,
|
||||
_marker: PhantomData<fn() -> N>,
|
||||
}
|
||||
|
||||
impl<N: HasAstId> TypedAstId<N> {
|
||||
/// Upcasts this ID from a more specific node type to a more general node type.
|
||||
pub fn upcast<M: HasAstId>(self) -> TypedAstId<M>
|
||||
where
|
||||
N: Into<M>,
|
||||
{
|
||||
TypedAstId {
|
||||
erased: self.erased,
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> Copy for TypedAstId<N> {}
|
||||
impl<N: HasAstId> Clone for TypedAstId<N> {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> PartialEq for TypedAstId<N> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.erased == other.erased
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> Eq for TypedAstId<N> {}
|
||||
impl<N: HasAstId> Hash for TypedAstId<N> {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.erased.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> Debug for TypedAstId<N> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_tuple("TypedAstId")
|
||||
.field(&self.erased)
|
||||
.field(&type_name::<N>())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct AstIds {
|
||||
ids: IndexVec<AstId, NodeKey>,
|
||||
reverse: FxHashMap<NodeKey, AstId>,
|
||||
}
|
||||
|
||||
impl AstIds {
|
||||
// TODO rust analyzer doesn't allocate an ID for every node. It only allocates ids for
|
||||
// nodes with a corresponding HIR element, that is nodes that are definitions.
|
||||
pub fn from_module(module: &ModModule) -> Self {
|
||||
let mut visitor = AstIdsVisitor::default();
|
||||
|
||||
// TODO: visit_module?
|
||||
// Make sure we visit the root
|
||||
visitor.create_id(module);
|
||||
visitor.visit_body(&module.body);
|
||||
|
||||
while let Some(deferred) = visitor.deferred.pop() {
|
||||
match deferred {
|
||||
DeferredNode::FunctionDefinition(def) => {
|
||||
def.visit_source_order(&mut visitor);
|
||||
}
|
||||
DeferredNode::ClassDefinition(def) => def.visit_source_order(&mut visitor),
|
||||
}
|
||||
}
|
||||
|
||||
AstIds {
|
||||
ids: visitor.ids,
|
||||
reverse: visitor.reverse,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the ID to the root node.
|
||||
pub fn root(&self) -> NodeKey {
|
||||
self.ids[AstId::new(0)]
|
||||
}
|
||||
|
||||
/// Returns the [`TypedAstId`] for a node.
|
||||
pub fn ast_id<N: HasAstId>(&self, node: &N) -> TypedAstId<N> {
|
||||
let key = node.syntax_node_key();
|
||||
TypedAstId {
|
||||
erased: self.reverse.get(&key).copied().unwrap(),
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the [`TypedAstId`] for the node identified with the given [`TypedNodeKey`].
|
||||
pub fn ast_id_for_key<N: HasAstId>(&self, node: &TypedNodeKey<N>) -> TypedAstId<N> {
|
||||
let ast_id = self.ast_id_for_node_key(node.inner);
|
||||
|
||||
TypedAstId {
|
||||
erased: ast_id,
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the untyped [`AstId`] for the node identified by the given `node` key.
|
||||
pub fn ast_id_for_node_key(&self, node: NodeKey) -> AstId {
|
||||
self.reverse
|
||||
.get(&node)
|
||||
.copied()
|
||||
.expect("Can't find node in AstIds map.")
|
||||
}
|
||||
|
||||
/// Returns the [`TypedNodeKey`] for the node identified by the given [`TypedAstId`].
|
||||
pub fn key<N: HasAstId>(&self, id: TypedAstId<N>) -> TypedNodeKey<N> {
|
||||
let syntax_key = self.ids[id.erased];
|
||||
|
||||
TypedNodeKey::new(syntax_key).unwrap()
|
||||
}
|
||||
|
||||
pub fn node_key<H: HasAstId>(&self, id: TypedAstId<H>) -> NodeKey {
|
||||
self.ids[id.erased]
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for AstIds {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
let mut map = f.debug_map();
|
||||
for (key, value) in self.ids.iter_enumerated() {
|
||||
map.entry(&key, &value);
|
||||
}
|
||||
|
||||
map.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for AstIds {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.ids == other.ids
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for AstIds {}
|
||||
|
||||
#[derive(Default)]
|
||||
struct AstIdsVisitor<'a> {
|
||||
ids: IndexVec<AstId, NodeKey>,
|
||||
reverse: FxHashMap<NodeKey, AstId>,
|
||||
deferred: Vec<DeferredNode<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> AstIdsVisitor<'a> {
|
||||
fn create_id<A: HasAstId>(&mut self, node: &A) {
|
||||
let node_key = node.syntax_node_key();
|
||||
|
||||
let id = self.ids.push(node_key);
|
||||
self.reverse.insert(node_key, id);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> SourceOrderVisitor<'a> for AstIdsVisitor<'a> {
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt) {
|
||||
match stmt {
|
||||
Stmt::FunctionDef(def) => {
|
||||
self.create_id(def);
|
||||
self.deferred.push(DeferredNode::FunctionDefinition(def));
|
||||
return;
|
||||
}
|
||||
// TODO defer visiting the assignment body, type alias parameters etc?
|
||||
Stmt::ClassDef(def) => {
|
||||
self.create_id(def);
|
||||
self.deferred.push(DeferredNode::ClassDefinition(def));
|
||||
return;
|
||||
}
|
||||
Stmt::Expr(_) => {
|
||||
// Skip
|
||||
return;
|
||||
}
|
||||
Stmt::Return(_) => {}
|
||||
Stmt::Delete(_) => {}
|
||||
Stmt::Assign(assignment) => self.create_id(assignment),
|
||||
Stmt::AugAssign(assignment) => {
|
||||
self.create_id(assignment);
|
||||
}
|
||||
Stmt::AnnAssign(assignment) => self.create_id(assignment),
|
||||
Stmt::TypeAlias(assignment) => self.create_id(assignment),
|
||||
Stmt::For(_) => {}
|
||||
Stmt::While(_) => {}
|
||||
Stmt::If(_) => {}
|
||||
Stmt::With(_) => {}
|
||||
Stmt::Match(_) => {}
|
||||
Stmt::Raise(_) => {}
|
||||
Stmt::Try(_) => {}
|
||||
Stmt::Assert(_) => {}
|
||||
Stmt::Import(import) => self.create_id(import),
|
||||
Stmt::ImportFrom(import_from) => self.create_id(import_from),
|
||||
Stmt::Global(global) => self.create_id(global),
|
||||
Stmt::Nonlocal(non_local) => self.create_id(non_local),
|
||||
Stmt::Pass(_) => {}
|
||||
Stmt::Break(_) => {}
|
||||
Stmt::Continue(_) => {}
|
||||
Stmt::IpyEscapeCommand(_) => {}
|
||||
}
|
||||
|
||||
source_order::walk_stmt(self, stmt);
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, _expr: &'a Expr) {}
|
||||
|
||||
fn visit_parameter(&mut self, parameter: &'a Parameter) {
|
||||
self.create_id(parameter);
|
||||
source_order::walk_parameter(self, parameter);
|
||||
}
|
||||
|
||||
fn visit_except_handler(&mut self, except_handler: &'a ExceptHandler) {
|
||||
match except_handler {
|
||||
ExceptHandler::ExceptHandler(except_handler) => {
|
||||
self.create_id(except_handler);
|
||||
}
|
||||
}
|
||||
|
||||
source_order::walk_except_handler(self, except_handler);
|
||||
}
|
||||
|
||||
fn visit_with_item(&mut self, with_item: &'a WithItem) {
|
||||
self.create_id(with_item);
|
||||
source_order::walk_with_item(self, with_item);
|
||||
}
|
||||
|
||||
fn visit_match_case(&mut self, match_case: &'a MatchCase) {
|
||||
self.create_id(match_case);
|
||||
source_order::walk_match_case(self, match_case);
|
||||
}
|
||||
|
||||
fn visit_type_param(&mut self, type_param: &'a TypeParam) {
|
||||
self.create_id(type_param);
|
||||
}
|
||||
}
|
||||
|
||||
enum DeferredNode<'a> {
|
||||
FunctionDefinition(&'a StmtFunctionDef),
|
||||
ClassDefinition(&'a StmtClassDef),
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub struct TypedNodeKey<N: AstNode> {
|
||||
/// The type erased node key.
|
||||
inner: NodeKey,
|
||||
_marker: PhantomData<fn() -> N>,
|
||||
}
|
||||
|
||||
impl<N: AstNode> TypedNodeKey<N> {
|
||||
pub fn from_node(node: &N) -> Self {
|
||||
let inner = NodeKey::from_node(node.as_any_node_ref());
|
||||
Self {
|
||||
inner,
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new(node_key: NodeKey) -> Option<Self> {
|
||||
N::can_cast(node_key.kind).then_some(TypedNodeKey {
|
||||
inner: node_key,
|
||||
_marker: PhantomData,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn resolve<'a>(&self, root: AnyNodeRef<'a>) -> Option<N::Ref<'a>> {
|
||||
let node_ref = self.inner.resolve(root)?;
|
||||
|
||||
Some(N::cast_ref(node_ref).unwrap())
|
||||
}
|
||||
|
||||
pub fn resolve_unwrap<'a>(&self, root: AnyNodeRef<'a>) -> N::Ref<'a> {
|
||||
self.resolve(root).expect("node should resolve")
|
||||
}
|
||||
|
||||
pub fn erased(&self) -> &NodeKey {
|
||||
&self.inner
|
||||
}
|
||||
}
|
||||
|
||||
struct FindNodeKeyVisitor<'a> {
|
||||
key: NodeKey,
|
||||
result: Option<AnyNodeRef<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> SourceOrderVisitor<'a> for FindNodeKeyVisitor<'a> {
|
||||
fn enter_node(&mut self, node: AnyNodeRef<'a>) -> TraversalSignal {
|
||||
if self.result.is_some() {
|
||||
return TraversalSignal::Skip;
|
||||
}
|
||||
|
||||
if node.range() == self.key.range && node.kind() == self.key.kind {
|
||||
self.result = Some(node);
|
||||
TraversalSignal::Skip
|
||||
} else if node.range().contains_range(self.key.range) {
|
||||
TraversalSignal::Traverse
|
||||
} else {
|
||||
TraversalSignal::Skip
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_body(&mut self, body: &'a [Stmt]) {
|
||||
// TODO it would be more efficient to use binary search instead of linear
|
||||
for stmt in body {
|
||||
if stmt.range().start() > self.key.range.end() {
|
||||
break;
|
||||
}
|
||||
|
||||
self.visit_stmt(stmt);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO an alternative to this is to have a `NodeId` on each node (in increasing order depending on the position).
|
||||
// This would allow to reduce the size of this to a u32.
|
||||
// What would be nice if we could use an `Arc::weak_ref` here but that only works if we use
|
||||
// `Arc` internally
|
||||
// TODO: Implement the logic to resolve a node, given a db (and the correct file).
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub struct NodeKey {
|
||||
kind: NodeKind,
|
||||
range: TextRange,
|
||||
}
|
||||
|
||||
impl NodeKey {
|
||||
pub fn from_node(node: AnyNodeRef) -> Self {
|
||||
NodeKey {
|
||||
kind: node.kind(),
|
||||
range: node.range(),
|
||||
}
|
||||
}
|
||||
pub fn resolve<'a>(&self, root: AnyNodeRef<'a>) -> Option<AnyNodeRef<'a>> {
|
||||
// We need to do a binary search here. Only traverse into a node if the range is withint the node
|
||||
let mut visitor = FindNodeKeyVisitor {
|
||||
key: *self,
|
||||
result: None,
|
||||
};
|
||||
|
||||
if visitor.enter_node(root) == TraversalSignal::Traverse {
|
||||
root.visit_preorder(&mut visitor);
|
||||
}
|
||||
|
||||
visitor.result
|
||||
}
|
||||
}
|
||||
|
||||
/// Marker trait implemented by AST nodes for which we extract the `AstId`.
|
||||
pub trait HasAstId: AstNode {
|
||||
fn node_key(&self) -> TypedNodeKey<Self>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
TypedNodeKey {
|
||||
inner: self.syntax_node_key(),
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
fn syntax_node_key(&self) -> NodeKey {
|
||||
NodeKey {
|
||||
kind: self.as_any_node_ref().kind(),
|
||||
range: self.range(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HasAstId for StmtFunctionDef {}
|
||||
impl HasAstId for StmtClassDef {}
|
||||
impl HasAstId for StmtAnnAssign {}
|
||||
impl HasAstId for StmtAugAssign {}
|
||||
impl HasAstId for StmtAssign {}
|
||||
impl HasAstId for StmtTypeAlias {}
|
||||
|
||||
impl HasAstId for ModModule {}
|
||||
|
||||
impl HasAstId for StmtImport {}
|
||||
|
||||
impl HasAstId for StmtImportFrom {}
|
||||
|
||||
impl HasAstId for Parameter {}
|
||||
|
||||
impl HasAstId for TypeParam {}
|
||||
impl HasAstId for Stmt {}
|
||||
impl HasAstId for TypeParamTypeVar {}
|
||||
impl HasAstId for TypeParamTypeVarTuple {}
|
||||
impl HasAstId for TypeParamParamSpec {}
|
||||
impl HasAstId for StmtGlobal {}
|
||||
impl HasAstId for StmtNonlocal {}
|
||||
|
||||
impl HasAstId for ExceptHandlerExceptHandler {}
|
||||
impl HasAstId for WithItem {}
|
||||
impl HasAstId for MatchCase {}
|
||||
@@ -1,165 +0,0 @@
|
||||
use std::fmt::Formatter;
|
||||
use std::hash::Hash;
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
|
||||
use crate::db::QueryResult;
|
||||
use dashmap::mapref::entry::Entry;
|
||||
|
||||
use crate::FxDashMap;
|
||||
|
||||
/// Simple key value cache that locks on a per-key level.
|
||||
pub struct KeyValueCache<K, V> {
|
||||
map: FxDashMap<K, V>,
|
||||
statistics: CacheStatistics,
|
||||
}
|
||||
|
||||
impl<K, V> KeyValueCache<K, V>
|
||||
where
|
||||
K: Eq + Hash + Clone,
|
||||
V: Clone,
|
||||
{
|
||||
pub fn try_get(&self, key: &K) -> Option<V> {
|
||||
if let Some(existing) = self.map.get(key) {
|
||||
self.statistics.hit();
|
||||
Some(existing.clone())
|
||||
} else {
|
||||
self.statistics.miss();
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get<F>(&self, key: &K, compute: F) -> QueryResult<V>
|
||||
where
|
||||
F: FnOnce(&K) -> QueryResult<V>,
|
||||
{
|
||||
Ok(match self.map.entry(key.clone()) {
|
||||
Entry::Occupied(cached) => {
|
||||
self.statistics.hit();
|
||||
|
||||
cached.get().clone()
|
||||
}
|
||||
Entry::Vacant(vacant) => {
|
||||
self.statistics.miss();
|
||||
|
||||
let value = compute(key)?;
|
||||
vacant.insert(value.clone());
|
||||
value
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn set(&mut self, key: K, value: V) {
|
||||
self.map.insert(key, value);
|
||||
}
|
||||
|
||||
pub fn remove(&mut self, key: &K) -> Option<V> {
|
||||
self.map.remove(key).map(|(_, value)| value)
|
||||
}
|
||||
|
||||
pub fn clear(&mut self) {
|
||||
self.map.clear();
|
||||
self.map.shrink_to_fit();
|
||||
}
|
||||
|
||||
pub fn statistics(&self) -> Option<Statistics> {
|
||||
self.statistics.to_statistics()
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> Default for KeyValueCache<K, V>
|
||||
where
|
||||
K: Eq + Hash,
|
||||
V: Clone,
|
||||
{
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
map: FxDashMap::default(),
|
||||
statistics: CacheStatistics::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> std::fmt::Debug for KeyValueCache<K, V>
|
||||
where
|
||||
K: std::fmt::Debug + Eq + Hash,
|
||||
V: std::fmt::Debug,
|
||||
{
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
let mut debug = f.debug_map();
|
||||
|
||||
for entry in &self.map {
|
||||
debug.entry(&entry.value(), &entry.key());
|
||||
}
|
||||
|
||||
debug.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct Statistics {
|
||||
pub hits: usize,
|
||||
pub misses: usize,
|
||||
}
|
||||
|
||||
impl Statistics {
|
||||
#[allow(clippy::cast_precision_loss)]
|
||||
pub fn hit_rate(&self) -> Option<f64> {
|
||||
if self.hits + self.misses == 0 {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some((self.hits as f64) / (self.hits + self.misses) as f64)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
pub type CacheStatistics = DebugStatistics;
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
pub type CacheStatistics = ReleaseStatistics;
|
||||
|
||||
pub trait StatisticsRecorder {
|
||||
fn hit(&self);
|
||||
fn miss(&self);
|
||||
fn to_statistics(&self) -> Option<Statistics>;
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct DebugStatistics {
|
||||
hits: AtomicUsize,
|
||||
misses: AtomicUsize,
|
||||
}
|
||||
|
||||
impl StatisticsRecorder for DebugStatistics {
|
||||
// TODO figure out appropriate Ordering
|
||||
fn hit(&self) {
|
||||
self.hits.fetch_add(1, Ordering::SeqCst);
|
||||
}
|
||||
|
||||
fn miss(&self) {
|
||||
self.misses.fetch_add(1, Ordering::SeqCst);
|
||||
}
|
||||
|
||||
fn to_statistics(&self) -> Option<Statistics> {
|
||||
let hits = self.hits.load(Ordering::SeqCst);
|
||||
let misses = self.misses.load(Ordering::SeqCst);
|
||||
|
||||
Some(Statistics { hits, misses })
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct ReleaseStatistics;
|
||||
|
||||
impl StatisticsRecorder for ReleaseStatistics {
|
||||
#[inline]
|
||||
fn hit(&self) {}
|
||||
|
||||
#[inline]
|
||||
fn miss(&self) {}
|
||||
|
||||
#[inline]
|
||||
fn to_statistics(&self) -> Option<Statistics> {
|
||||
None
|
||||
}
|
||||
}
|
||||
@@ -1,42 +0,0 @@
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct CancellationTokenSource {
|
||||
signal: Arc<AtomicBool>,
|
||||
}
|
||||
|
||||
impl CancellationTokenSource {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
signal: Arc::new(AtomicBool::new(false)),
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "trace", skip_all)]
|
||||
pub fn cancel(&self) {
|
||||
self.signal.store(true, std::sync::atomic::Ordering::SeqCst);
|
||||
}
|
||||
|
||||
pub fn is_cancelled(&self) -> bool {
|
||||
self.signal.load(std::sync::atomic::Ordering::SeqCst)
|
||||
}
|
||||
|
||||
pub fn token(&self) -> CancellationToken {
|
||||
CancellationToken {
|
||||
signal: self.signal.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct CancellationToken {
|
||||
signal: Arc<AtomicBool>,
|
||||
}
|
||||
|
||||
impl CancellationToken {
|
||||
/// Returns `true` if cancellation has been requested.
|
||||
pub fn is_cancelled(&self) -> bool {
|
||||
self.signal.load(std::sync::atomic::Ordering::SeqCst)
|
||||
}
|
||||
}
|
||||
@@ -1,248 +0,0 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
pub use jars::{HasJar, HasJars};
|
||||
pub use query::{QueryError, QueryResult};
|
||||
pub use runtime::DbRuntime;
|
||||
pub use storage::JarsStorage;
|
||||
|
||||
use crate::files::FileId;
|
||||
use crate::lint::{LintSemanticStorage, LintSyntaxStorage};
|
||||
use crate::module::ModuleResolver;
|
||||
use crate::parse::ParsedStorage;
|
||||
use crate::semantic::SemanticIndexStorage;
|
||||
use crate::semantic::TypeStore;
|
||||
use crate::source::SourceStorage;
|
||||
|
||||
mod jars;
|
||||
mod query;
|
||||
mod runtime;
|
||||
mod storage;
|
||||
|
||||
pub trait Database {
|
||||
/// Returns a reference to the runtime of the current worker.
|
||||
fn runtime(&self) -> &DbRuntime;
|
||||
|
||||
/// Returns a mutable reference to the runtime. Only one worker can hold a mutable reference to the runtime.
|
||||
fn runtime_mut(&mut self) -> &mut DbRuntime;
|
||||
|
||||
/// Returns `Ok` if the queries have not been cancelled and `Err(QueryError::Cancelled)` otherwise.
|
||||
fn cancelled(&self) -> QueryResult<()> {
|
||||
self.runtime().cancelled()
|
||||
}
|
||||
|
||||
/// Returns `true` if the queries have been cancelled.
|
||||
fn is_cancelled(&self) -> bool {
|
||||
self.runtime().is_cancelled()
|
||||
}
|
||||
}
|
||||
|
||||
/// Database that supports running queries from multiple threads.
|
||||
pub trait ParallelDatabase: Database + Send {
|
||||
/// Creates a snapshot of the database state that can be used to query the database in another thread.
|
||||
///
|
||||
/// The snapshot is a read-only view of the database but query results are shared between threads.
|
||||
/// All queries will be automatically cancelled when applying any mutations (calling [`HasJars::jars_mut`])
|
||||
/// to the database (not the snapshot, because they're readonly).
|
||||
///
|
||||
/// ## Creating a snapshot
|
||||
///
|
||||
/// Creating a snapshot of the database's jars is cheap but creating a snapshot of
|
||||
/// other state stored on the database might require deep-cloning data. That's why you should
|
||||
/// avoid creating snapshots in a hot function (e.g. don't create a snapshot for each file, instead
|
||||
/// create a snapshot when scheduling the check of an entire program).
|
||||
///
|
||||
/// ## Salsa compatibility
|
||||
/// Salsa prohibits creating a snapshot while running a local query (it's fine if other workers run a query) [[source](https://github.com/salsa-rs/salsa/issues/80)].
|
||||
/// We should avoid creating snapshots while running a query because we might want to adopt Salsa in the future (if we can figure out persistent caching).
|
||||
/// Unfortunately, the infrastructure doesn't provide an automated way of knowing when a query is run, that's
|
||||
/// why we have to "enforce" this constraint manually.
|
||||
#[must_use]
|
||||
fn snapshot(&self) -> Snapshot<Self>;
|
||||
}
|
||||
|
||||
pub trait DbWithJar<Jar>: Database + HasJar<Jar> {}
|
||||
|
||||
/// Readonly snapshot of a database.
|
||||
///
|
||||
/// ## Dead locks
|
||||
/// A snapshot should always be dropped as soon as it is no longer necessary to run queries.
|
||||
/// Storing the snapshot without running a query or periodically checking if cancellation was requested
|
||||
/// can lead to deadlocks because mutating the [`Database`] requires cancels all pending queries
|
||||
/// and waiting for all [`Snapshot`]s to be dropped.
|
||||
#[derive(Debug)]
|
||||
pub struct Snapshot<DB: ?Sized>
|
||||
where
|
||||
DB: ParallelDatabase,
|
||||
{
|
||||
db: DB,
|
||||
}
|
||||
|
||||
impl<DB> Snapshot<DB>
|
||||
where
|
||||
DB: ParallelDatabase,
|
||||
{
|
||||
pub fn new(db: DB) -> Self {
|
||||
Snapshot { db }
|
||||
}
|
||||
}
|
||||
|
||||
impl<DB> std::ops::Deref for Snapshot<DB>
|
||||
where
|
||||
DB: ParallelDatabase,
|
||||
{
|
||||
type Target = DB;
|
||||
|
||||
fn deref(&self) -> &DB {
|
||||
&self.db
|
||||
}
|
||||
}
|
||||
|
||||
pub trait Upcast<T: ?Sized> {
|
||||
fn upcast(&self) -> &T;
|
||||
}
|
||||
|
||||
// Red knot specific databases code.
|
||||
|
||||
pub trait SourceDb: DbWithJar<SourceJar> {
|
||||
// queries
|
||||
fn file_id(&self, path: &std::path::Path) -> FileId;
|
||||
|
||||
fn file_path(&self, file_id: FileId) -> Arc<std::path::Path>;
|
||||
}
|
||||
|
||||
pub trait SemanticDb: SourceDb + DbWithJar<SemanticJar> + Upcast<dyn SourceDb> {}
|
||||
|
||||
pub trait LintDb: SemanticDb + DbWithJar<LintJar> + Upcast<dyn SemanticDb> {}
|
||||
|
||||
pub trait Db: LintDb + Upcast<dyn LintDb> {}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SourceJar {
|
||||
pub sources: SourceStorage,
|
||||
pub parsed: ParsedStorage,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SemanticJar {
|
||||
pub module_resolver: ModuleResolver,
|
||||
pub semantic_indices: SemanticIndexStorage,
|
||||
pub type_store: TypeStore,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct LintJar {
|
||||
pub lint_syntax: LintSyntaxStorage,
|
||||
pub lint_semantic: LintSemanticStorage,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::db::{
|
||||
Database, DbRuntime, DbWithJar, HasJar, HasJars, JarsStorage, LintDb, LintJar, QueryResult,
|
||||
SourceDb, SourceJar, Upcast,
|
||||
};
|
||||
use crate::files::{FileId, Files};
|
||||
|
||||
use super::{SemanticDb, SemanticJar};
|
||||
|
||||
// This can be a partial database used in a single crate for testing.
|
||||
// It would hold fewer data than the full database.
|
||||
#[derive(Debug, Default)]
|
||||
pub(crate) struct TestDb {
|
||||
files: Files,
|
||||
jars: JarsStorage<Self>,
|
||||
}
|
||||
|
||||
impl HasJar<SourceJar> for TestDb {
|
||||
fn jar(&self) -> QueryResult<&SourceJar> {
|
||||
Ok(&self.jars()?.0)
|
||||
}
|
||||
|
||||
fn jar_mut(&mut self) -> &mut SourceJar {
|
||||
&mut self.jars_mut().0
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJar<SemanticJar> for TestDb {
|
||||
fn jar(&self) -> QueryResult<&SemanticJar> {
|
||||
Ok(&self.jars()?.1)
|
||||
}
|
||||
|
||||
fn jar_mut(&mut self) -> &mut SemanticJar {
|
||||
&mut self.jars_mut().1
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJar<LintJar> for TestDb {
|
||||
fn jar(&self) -> QueryResult<&LintJar> {
|
||||
Ok(&self.jars()?.2)
|
||||
}
|
||||
|
||||
fn jar_mut(&mut self) -> &mut LintJar {
|
||||
&mut self.jars_mut().2
|
||||
}
|
||||
}
|
||||
|
||||
impl SourceDb for TestDb {
|
||||
fn file_id(&self, path: &Path) -> FileId {
|
||||
self.files.intern(path)
|
||||
}
|
||||
|
||||
fn file_path(&self, file_id: FileId) -> Arc<Path> {
|
||||
self.files.path(file_id)
|
||||
}
|
||||
}
|
||||
|
||||
impl DbWithJar<SourceJar> for TestDb {}
|
||||
|
||||
impl Upcast<dyn SourceDb> for TestDb {
|
||||
fn upcast(&self) -> &(dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl SemanticDb for TestDb {}
|
||||
|
||||
impl DbWithJar<SemanticJar> for TestDb {}
|
||||
|
||||
impl Upcast<dyn SemanticDb> for TestDb {
|
||||
fn upcast(&self) -> &(dyn SemanticDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl LintDb for TestDb {}
|
||||
|
||||
impl Upcast<dyn LintDb> for TestDb {
|
||||
fn upcast(&self) -> &(dyn LintDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl DbWithJar<LintJar> for TestDb {}
|
||||
|
||||
impl HasJars for TestDb {
|
||||
type Jars = (SourceJar, SemanticJar, LintJar);
|
||||
|
||||
fn jars(&self) -> QueryResult<&Self::Jars> {
|
||||
self.jars.jars()
|
||||
}
|
||||
|
||||
fn jars_mut(&mut self) -> &mut Self::Jars {
|
||||
self.jars.jars_mut()
|
||||
}
|
||||
}
|
||||
|
||||
impl Database for TestDb {
|
||||
fn runtime(&self) -> &DbRuntime {
|
||||
self.jars.runtime()
|
||||
}
|
||||
|
||||
fn runtime_mut(&mut self) -> &mut DbRuntime {
|
||||
self.jars.runtime_mut()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,37 +0,0 @@
|
||||
use crate::db::query::QueryResult;
|
||||
|
||||
/// Gives access to a specific jar in the database.
|
||||
///
|
||||
/// Nope, the terminology isn't borrowed from Java but from Salsa <https://salsa-rs.github.io/salsa/>,
|
||||
/// which is an analogy to storing the salsa in different jars.
|
||||
///
|
||||
/// The basic idea is that each crate can define its own jar and the jars can be combined to a single
|
||||
/// database in the top level crate. Each crate also defines its own `Database` trait. The combination of
|
||||
/// `Database` trait and the jar allows to write queries in isolation without having to know how they get composed at the upper levels.
|
||||
///
|
||||
/// Salsa further defines a `HasIngredient` trait which slices the jar to a specific storage (e.g. a specific cache).
|
||||
/// We don't need this just yet because we write our queries by hand. We may want a similar trait if we decide
|
||||
/// to use a macro to generate the queries.
|
||||
pub trait HasJar<T> {
|
||||
/// Gives a read-only reference to the jar.
|
||||
fn jar(&self) -> QueryResult<&T>;
|
||||
|
||||
/// Gives a mutable reference to the jar.
|
||||
fn jar_mut(&mut self) -> &mut T;
|
||||
}
|
||||
|
||||
/// Gives access to the jars in a database.
|
||||
pub trait HasJars {
|
||||
/// A type storing the jars.
|
||||
///
|
||||
/// Most commonly, this is a tuple where each jar is a tuple element.
|
||||
type Jars: Default;
|
||||
|
||||
/// Gives access to the underlying jars but tests if the queries have been cancelled.
|
||||
///
|
||||
/// Returns `Err(QueryError::Cancelled)` if the queries have been cancelled.
|
||||
fn jars(&self) -> QueryResult<&Self::Jars>;
|
||||
|
||||
/// Gives mutable access to the underlying jars.
|
||||
fn jars_mut(&mut self) -> &mut Self::Jars;
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
use std::fmt::{Display, Formatter};
|
||||
|
||||
/// Reason why a db query operation failed.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum QueryError {
|
||||
/// The query was cancelled because the DB was mutated or the query was cancelled by the host (e.g. on a file change or when pressing CTRL+C).
|
||||
Cancelled,
|
||||
}
|
||||
|
||||
impl Display for QueryError {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
QueryError::Cancelled => f.write_str("query was cancelled"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for QueryError {}
|
||||
|
||||
pub type QueryResult<T> = Result<T, QueryError>;
|
||||
@@ -1,41 +0,0 @@
|
||||
use crate::cancellation::CancellationTokenSource;
|
||||
use crate::db::{QueryError, QueryResult};
|
||||
|
||||
/// Holds the jar agnostic state of the database.
|
||||
#[derive(Debug, Default)]
|
||||
pub struct DbRuntime {
|
||||
/// The cancellation token source used to signal other works that the queries should be aborted and
|
||||
/// exit at the next possible point.
|
||||
cancellation_token: CancellationTokenSource,
|
||||
}
|
||||
|
||||
impl DbRuntime {
|
||||
pub(super) fn snapshot(&self) -> Self {
|
||||
Self {
|
||||
cancellation_token: self.cancellation_token.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Cancels the pending queries of other workers. The current worker cannot have any pending
|
||||
/// queries because we're holding a mutable reference to the runtime.
|
||||
pub(super) fn cancel_other_workers(&mut self) {
|
||||
self.cancellation_token.cancel();
|
||||
// Set a new cancellation token so that we're in a non-cancelled state again when running the next
|
||||
// query.
|
||||
self.cancellation_token = CancellationTokenSource::default();
|
||||
}
|
||||
|
||||
/// Returns `Ok` if the queries have not been cancelled and `Err(QueryError::Cancelled)` otherwise.
|
||||
pub(super) fn cancelled(&self) -> QueryResult<()> {
|
||||
if self.cancellation_token.is_cancelled() {
|
||||
Err(QueryError::Cancelled)
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the queries have been cancelled.
|
||||
pub(super) fn is_cancelled(&self) -> bool {
|
||||
self.cancellation_token.is_cancelled()
|
||||
}
|
||||
}
|
||||
@@ -1,117 +0,0 @@
|
||||
use std::fmt::Formatter;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crossbeam::sync::WaitGroup;
|
||||
|
||||
use crate::db::query::QueryResult;
|
||||
use crate::db::runtime::DbRuntime;
|
||||
use crate::db::{HasJars, ParallelDatabase};
|
||||
|
||||
/// Stores the jars of a database and the state for each worker.
|
||||
///
|
||||
/// Today, all state is shared across all workers, but it may be desired to store data per worker in the future.
|
||||
pub struct JarsStorage<T>
|
||||
where
|
||||
T: HasJars + Sized,
|
||||
{
|
||||
// It's important that `jars_wait_group` is declared after `jars` to ensure that `jars` is dropped first.
|
||||
// See https://doc.rust-lang.org/reference/destructors.html
|
||||
/// Stores the jars of the database.
|
||||
jars: Arc<T::Jars>,
|
||||
|
||||
/// Used to count the references to `jars`. Allows implementing `jars_mut` without requiring to clone `jars`.
|
||||
jars_wait_group: WaitGroup,
|
||||
|
||||
/// The data agnostic state.
|
||||
runtime: DbRuntime,
|
||||
}
|
||||
|
||||
impl<Db> JarsStorage<Db>
|
||||
where
|
||||
Db: HasJars,
|
||||
{
|
||||
pub(super) fn new() -> Self {
|
||||
Self {
|
||||
jars: Arc::new(Db::Jars::default()),
|
||||
jars_wait_group: WaitGroup::default(),
|
||||
runtime: DbRuntime::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a snapshot of the jars.
|
||||
///
|
||||
/// Creating the snapshot is cheap because it doesn't clone the jars, it only increments a ref counter.
|
||||
#[must_use]
|
||||
pub fn snapshot(&self) -> JarsStorage<Db>
|
||||
where
|
||||
Db: ParallelDatabase,
|
||||
{
|
||||
Self {
|
||||
jars: self.jars.clone(),
|
||||
jars_wait_group: self.jars_wait_group.clone(),
|
||||
runtime: self.runtime.snapshot(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn jars(&self) -> QueryResult<&Db::Jars> {
|
||||
self.runtime.cancelled()?;
|
||||
Ok(&self.jars)
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the jars without cloning their content.
|
||||
///
|
||||
/// The method cancels any pending queries of other works and waits for them to complete so that
|
||||
/// this instance is the only instance holding a reference to the jars.
|
||||
pub(crate) fn jars_mut(&mut self) -> &mut Db::Jars {
|
||||
// We have a mutable ref here, so no more workers can be spawned between calling this function and taking the mut ref below.
|
||||
self.cancel_other_workers();
|
||||
|
||||
// Now all other references to `self.jars` should have been released. We can now safely return a mutable reference
|
||||
// to the Arc's content.
|
||||
let jars =
|
||||
Arc::get_mut(&mut self.jars).expect("All references to jars should have been released");
|
||||
|
||||
jars
|
||||
}
|
||||
|
||||
pub(crate) fn runtime(&self) -> &DbRuntime {
|
||||
&self.runtime
|
||||
}
|
||||
|
||||
pub(crate) fn runtime_mut(&mut self) -> &mut DbRuntime {
|
||||
// Note: This method may need to use a similar trick to `jars_mut` if `DbRuntime` is ever to store data that is shared between workers.
|
||||
&mut self.runtime
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "trace", skip(self))]
|
||||
fn cancel_other_workers(&mut self) {
|
||||
self.runtime.cancel_other_workers();
|
||||
|
||||
// Wait for all other works to complete.
|
||||
let existing_wait = std::mem::take(&mut self.jars_wait_group);
|
||||
existing_wait.wait();
|
||||
}
|
||||
}
|
||||
|
||||
impl<Db> Default for JarsStorage<Db>
|
||||
where
|
||||
Db: HasJars,
|
||||
{
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> std::fmt::Debug for JarsStorage<T>
|
||||
where
|
||||
T: HasJars,
|
||||
<T as HasJars>::Jars: std::fmt::Debug,
|
||||
{
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("SharedStorage")
|
||||
.field("jars", &self.jars)
|
||||
.field("jars_wait_group", &self.jars_wait_group)
|
||||
.field("runtime", &self.runtime)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
@@ -1,180 +0,0 @@
|
||||
use std::fmt::{Debug, Formatter};
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
|
||||
use hashbrown::hash_map::RawEntryMut;
|
||||
use parking_lot::RwLock;
|
||||
use rustc_hash::FxHasher;
|
||||
|
||||
use ruff_index::{newtype_index, IndexVec};
|
||||
|
||||
type Map<K, V> = hashbrown::HashMap<K, V, ()>;
|
||||
|
||||
#[newtype_index]
|
||||
pub struct FileId;
|
||||
|
||||
// TODO we'll need a higher level virtual file system abstraction that allows testing if a file exists
|
||||
// or retrieving its content (ideally lazily and in a way that the memory can be retained later)
|
||||
// I suspect that we'll end up with a FileSystem trait and our own Path abstraction.
|
||||
#[derive(Default)]
|
||||
pub struct Files {
|
||||
inner: Arc<RwLock<FilesInner>>,
|
||||
}
|
||||
|
||||
impl Files {
|
||||
#[tracing::instrument(level = "debug", skip(self))]
|
||||
pub fn intern(&self, path: &Path) -> FileId {
|
||||
self.inner.write().intern(path)
|
||||
}
|
||||
|
||||
pub fn try_get(&self, path: &Path) -> Option<FileId> {
|
||||
self.inner.read().try_get(path)
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(self))]
|
||||
pub fn path(&self, id: FileId) -> Arc<Path> {
|
||||
self.inner.read().path(id)
|
||||
}
|
||||
|
||||
/// Snapshots files for a new database snapshot.
|
||||
///
|
||||
/// This method should not be used outside a database snapshot.
|
||||
#[must_use]
|
||||
pub fn snapshot(&self) -> Files {
|
||||
Files {
|
||||
inner: self.inner.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Files {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
let files = self.inner.read();
|
||||
let mut debug = f.debug_map();
|
||||
for item in files.iter() {
|
||||
debug.entry(&item.0, &item.1);
|
||||
}
|
||||
|
||||
debug.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Files {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.inner.read().eq(&other.inner.read())
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for Files {}
|
||||
|
||||
#[derive(Default)]
|
||||
struct FilesInner {
|
||||
by_path: Map<FileId, ()>,
|
||||
// TODO should we use a map here to reclaim the space for removed files?
|
||||
// TODO I think we should use our own path abstraction here to avoid having to normalize paths
|
||||
// and dealing with non-utf paths everywhere.
|
||||
by_id: IndexVec<FileId, Arc<Path>>,
|
||||
}
|
||||
|
||||
impl FilesInner {
|
||||
/// Inserts the path and returns a new id for it or returns the id if it is an existing path.
|
||||
// TODO should this accept Path or PathBuf?
|
||||
pub(crate) fn intern(&mut self, path: &Path) -> FileId {
|
||||
let hash = FilesInner::hash_path(path);
|
||||
|
||||
let entry = self
|
||||
.by_path
|
||||
.raw_entry_mut()
|
||||
.from_hash(hash, |existing_file| &*self.by_id[*existing_file] == path);
|
||||
|
||||
match entry {
|
||||
RawEntryMut::Occupied(entry) => *entry.key(),
|
||||
RawEntryMut::Vacant(entry) => {
|
||||
let id = self.by_id.push(Arc::from(path));
|
||||
entry.insert_with_hasher(hash, id, (), |file| {
|
||||
FilesInner::hash_path(&self.by_id[*file])
|
||||
});
|
||||
id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn hash_path(path: &Path) -> u64 {
|
||||
let mut hasher = FxHasher::default();
|
||||
path.hash(&mut hasher);
|
||||
hasher.finish()
|
||||
}
|
||||
|
||||
pub(crate) fn try_get(&self, path: &Path) -> Option<FileId> {
|
||||
let mut hasher = FxHasher::default();
|
||||
path.hash(&mut hasher);
|
||||
let hash = hasher.finish();
|
||||
|
||||
Some(
|
||||
*self
|
||||
.by_path
|
||||
.raw_entry()
|
||||
.from_hash(hash, |existing_file| &*self.by_id[*existing_file] == path)?
|
||||
.0,
|
||||
)
|
||||
}
|
||||
|
||||
/// Returns the path for the file with the given id.
|
||||
pub(crate) fn path(&self, id: FileId) -> Arc<Path> {
|
||||
self.by_id[id].clone()
|
||||
}
|
||||
|
||||
pub(crate) fn iter(&self) -> impl Iterator<Item = (FileId, Arc<Path>)> + '_ {
|
||||
self.by_path.keys().map(|id| (*id, self.by_id[*id].clone()))
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for FilesInner {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.by_id == other.by_id
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for FilesInner {}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[test]
|
||||
fn insert_path_twice_same_id() {
|
||||
let files = Files::default();
|
||||
let path = PathBuf::from("foo/bar");
|
||||
let id1 = files.intern(&path);
|
||||
let id2 = files.intern(&path);
|
||||
assert_eq!(id1, id2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_different_paths_different_ids() {
|
||||
let files = Files::default();
|
||||
let path1 = PathBuf::from("foo/bar");
|
||||
let path2 = PathBuf::from("foo/bar/baz");
|
||||
let id1 = files.intern(&path1);
|
||||
let id2 = files.intern(&path2);
|
||||
assert_ne!(id1, id2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn four_files() {
|
||||
let files = Files::default();
|
||||
let foo_path = PathBuf::from("foo");
|
||||
let foo_id = files.intern(&foo_path);
|
||||
let bar_path = PathBuf::from("bar");
|
||||
files.intern(&bar_path);
|
||||
let baz_path = PathBuf::from("baz");
|
||||
files.intern(&baz_path);
|
||||
let qux_path = PathBuf::from("qux");
|
||||
files.intern(&qux_path);
|
||||
|
||||
let foo_id_2 = files.try_get(&foo_path).expect("foo_path to be found");
|
||||
assert_eq!(foo_id_2, foo_id);
|
||||
}
|
||||
}
|
||||
@@ -1,67 +0,0 @@
|
||||
//! Key observations
|
||||
//!
|
||||
//! The HIR (High-Level Intermediate Representation) avoids allocations to large extends by:
|
||||
//! * Using an arena per node type
|
||||
//! * using ids and id ranges to reference items.
|
||||
//!
|
||||
//! Using separate arena per node type has the advantage that the IDs are relatively stable, because
|
||||
//! they only change when a node of the same kind has been added or removed. (What's unclear is if that matters or if
|
||||
//! it still triggers a re-compute because the AST-id in the node has changed).
|
||||
//!
|
||||
//! The HIR does not store all details. It mainly stores the *public* interface. There's a reference
|
||||
//! back to the AST node to get more details.
|
||||
//!
|
||||
//!
|
||||
|
||||
use crate::ast_ids::{HasAstId, TypedAstId};
|
||||
use crate::files::FileId;
|
||||
use std::fmt::Formatter;
|
||||
use std::hash::{Hash, Hasher};
|
||||
|
||||
pub struct HirAstId<N: HasAstId> {
|
||||
file_id: FileId,
|
||||
node_id: TypedAstId<N>,
|
||||
}
|
||||
|
||||
impl<N: HasAstId> Copy for HirAstId<N> {}
|
||||
impl<N: HasAstId> Clone for HirAstId<N> {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> PartialEq for HirAstId<N> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.file_id == other.file_id && self.node_id == other.node_id
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> Eq for HirAstId<N> {}
|
||||
|
||||
impl<N: HasAstId> std::fmt::Debug for HirAstId<N> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("HirAstId")
|
||||
.field("file_id", &self.file_id)
|
||||
.field("node_id", &self.node_id)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> Hash for HirAstId<N> {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.file_id.hash(state);
|
||||
self.node_id.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> HirAstId<N> {
|
||||
pub fn upcast<M: HasAstId>(self) -> HirAstId<M>
|
||||
where
|
||||
N: Into<M>,
|
||||
{
|
||||
HirAstId {
|
||||
file_id: self.file_id,
|
||||
node_id: self.node_id.upcast(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,556 +0,0 @@
|
||||
use std::ops::{Index, Range};
|
||||
|
||||
use ruff_index::{newtype_index, IndexVec};
|
||||
use ruff_python_ast::visitor::preorder;
|
||||
use ruff_python_ast::visitor::preorder::PreorderVisitor;
|
||||
use ruff_python_ast::{
|
||||
Decorator, ExceptHandler, ExceptHandlerExceptHandler, Expr, MatchCase, ModModule, Stmt,
|
||||
StmtAnnAssign, StmtAssign, StmtClassDef, StmtFunctionDef, StmtGlobal, StmtImport,
|
||||
StmtImportFrom, StmtNonlocal, StmtTypeAlias, TypeParam, TypeParamParamSpec, TypeParamTypeVar,
|
||||
TypeParamTypeVarTuple, WithItem,
|
||||
};
|
||||
|
||||
use crate::ast_ids::{AstIds, HasAstId};
|
||||
use crate::files::FileId;
|
||||
use crate::hir::HirAstId;
|
||||
use crate::Name;
|
||||
|
||||
#[newtype_index]
|
||||
pub struct FunctionId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Function {
|
||||
ast_id: HirAstId<StmtFunctionDef>,
|
||||
name: Name,
|
||||
parameters: Range<ParameterId>,
|
||||
type_parameters: Range<TypeParameterId>, // TODO: type_parameters, return expression, decorators
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct ParameterId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Parameter {
|
||||
kind: ParameterKind,
|
||||
name: Name,
|
||||
default: Option<()>, // TODO use expression HIR
|
||||
ast_id: HirAstId<ruff_python_ast::Parameter>,
|
||||
}
|
||||
|
||||
// TODO or should `Parameter` be an enum?
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub enum ParameterKind {
|
||||
PositionalOnly,
|
||||
Arguments,
|
||||
Vararg,
|
||||
KeywordOnly,
|
||||
Kwarg,
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct ClassId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Class {
|
||||
name: Name,
|
||||
ast_id: HirAstId<StmtClassDef>,
|
||||
// TODO type parameters, inheritance, decorators, members
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct AssignmentId;
|
||||
|
||||
// This can have more than one name...
|
||||
// but that means we can't implement `name()` on `ModuleItem`.
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Assignment {
|
||||
// TODO: Handle multiple names / targets
|
||||
name: Name,
|
||||
ast_id: HirAstId<StmtAssign>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct AnnotatedAssignment {
|
||||
name: Name,
|
||||
ast_id: HirAstId<StmtAnnAssign>,
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct AnnotatedAssignmentId;
|
||||
|
||||
#[newtype_index]
|
||||
pub struct TypeAliasId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct TypeAlias {
|
||||
name: Name,
|
||||
ast_id: HirAstId<StmtTypeAlias>,
|
||||
parameters: Range<TypeParameterId>,
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct TypeParameterId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub enum TypeParameter {
|
||||
TypeVar(TypeParameterTypeVar),
|
||||
ParamSpec(TypeParameterParamSpec),
|
||||
TypeVarTuple(TypeParameterTypeVarTuple),
|
||||
}
|
||||
|
||||
impl TypeParameter {
|
||||
pub fn ast_id(&self) -> HirAstId<TypeParam> {
|
||||
match self {
|
||||
TypeParameter::TypeVar(type_var) => type_var.ast_id.upcast(),
|
||||
TypeParameter::ParamSpec(param_spec) => param_spec.ast_id.upcast(),
|
||||
TypeParameter::TypeVarTuple(type_var_tuple) => type_var_tuple.ast_id.upcast(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct TypeParameterTypeVar {
|
||||
name: Name,
|
||||
ast_id: HirAstId<TypeParamTypeVar>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct TypeParameterParamSpec {
|
||||
name: Name,
|
||||
ast_id: HirAstId<TypeParamParamSpec>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct TypeParameterTypeVarTuple {
|
||||
name: Name,
|
||||
ast_id: HirAstId<TypeParamTypeVarTuple>,
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct GlobalId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Global {
|
||||
// TODO track names
|
||||
ast_id: HirAstId<StmtGlobal>,
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct NonLocalId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct NonLocal {
|
||||
// TODO track names
|
||||
ast_id: HirAstId<StmtNonlocal>,
|
||||
}
|
||||
|
||||
pub enum DefinitionId {
|
||||
Function(FunctionId),
|
||||
Parameter(ParameterId),
|
||||
Class(ClassId),
|
||||
Assignment(AssignmentId),
|
||||
AnnotatedAssignment(AnnotatedAssignmentId),
|
||||
Global(GlobalId),
|
||||
NonLocal(NonLocalId),
|
||||
TypeParameter(TypeParameterId),
|
||||
TypeAlias(TypeAlias),
|
||||
}
|
||||
|
||||
pub enum DefinitionItem {
|
||||
Function(Function),
|
||||
Parameter(Parameter),
|
||||
Class(Class),
|
||||
Assignment(Assignment),
|
||||
AnnotatedAssignment(AnnotatedAssignment),
|
||||
Global(Global),
|
||||
NonLocal(NonLocal),
|
||||
TypeParameter(TypeParameter),
|
||||
TypeAlias(TypeAlias),
|
||||
}
|
||||
|
||||
// The closest is rust-analyzers item-tree. It only represents "Items" which make the public interface of a module
|
||||
// (it excludes any other statement or expressions). rust-analyzer uses it as the main input to the name resolution
|
||||
// algorithm
|
||||
// > It is the input to the name resolution algorithm, as well as to the queries defined in `adt.rs`,
|
||||
// > `data.rs`, and most things in `attr.rs`.
|
||||
//
|
||||
// > One important purpose of this layer is to provide an "invalidation barrier" for incremental
|
||||
// > computations: when typing inside an item body, the `ItemTree` of the modified file is typically
|
||||
// > unaffected, so we don't have to recompute name resolution results or item data (see `data.rs`).
|
||||
//
|
||||
// I haven't fully figured this out but I think that this composes the "public" interface of a module?
|
||||
// But maybe that's too optimistic.
|
||||
//
|
||||
//
|
||||
#[derive(Debug, Clone, Default, Eq, PartialEq)]
|
||||
pub struct Definitions {
|
||||
functions: IndexVec<FunctionId, Function>,
|
||||
parameters: IndexVec<ParameterId, Parameter>,
|
||||
classes: IndexVec<ClassId, Class>,
|
||||
assignments: IndexVec<AssignmentId, Assignment>,
|
||||
annotated_assignments: IndexVec<AnnotatedAssignmentId, AnnotatedAssignment>,
|
||||
type_aliases: IndexVec<TypeAliasId, TypeAlias>,
|
||||
type_parameters: IndexVec<TypeParameterId, TypeParameter>,
|
||||
globals: IndexVec<GlobalId, Global>,
|
||||
non_locals: IndexVec<NonLocalId, NonLocal>,
|
||||
}
|
||||
|
||||
impl Definitions {
|
||||
pub fn from_module(module: &ModModule, ast_ids: &AstIds, file_id: FileId) -> Self {
|
||||
let mut visitor = DefinitionsVisitor {
|
||||
definitions: Definitions::default(),
|
||||
ast_ids,
|
||||
file_id,
|
||||
};
|
||||
|
||||
visitor.visit_body(&module.body);
|
||||
|
||||
visitor.definitions
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<FunctionId> for Definitions {
|
||||
type Output = Function;
|
||||
|
||||
fn index(&self, index: FunctionId) -> &Self::Output {
|
||||
&self.functions[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<ParameterId> for Definitions {
|
||||
type Output = Parameter;
|
||||
|
||||
fn index(&self, index: ParameterId) -> &Self::Output {
|
||||
&self.parameters[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<ClassId> for Definitions {
|
||||
type Output = Class;
|
||||
|
||||
fn index(&self, index: ClassId) -> &Self::Output {
|
||||
&self.classes[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<AssignmentId> for Definitions {
|
||||
type Output = Assignment;
|
||||
|
||||
fn index(&self, index: AssignmentId) -> &Self::Output {
|
||||
&self.assignments[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<AnnotatedAssignmentId> for Definitions {
|
||||
type Output = AnnotatedAssignment;
|
||||
|
||||
fn index(&self, index: AnnotatedAssignmentId) -> &Self::Output {
|
||||
&self.annotated_assignments[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<TypeAliasId> for Definitions {
|
||||
type Output = TypeAlias;
|
||||
|
||||
fn index(&self, index: TypeAliasId) -> &Self::Output {
|
||||
&self.type_aliases[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<GlobalId> for Definitions {
|
||||
type Output = Global;
|
||||
|
||||
fn index(&self, index: GlobalId) -> &Self::Output {
|
||||
&self.globals[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<NonLocalId> for Definitions {
|
||||
type Output = NonLocal;
|
||||
|
||||
fn index(&self, index: NonLocalId) -> &Self::Output {
|
||||
&self.non_locals[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<TypeParameterId> for Definitions {
|
||||
type Output = TypeParameter;
|
||||
|
||||
fn index(&self, index: TypeParameterId) -> &Self::Output {
|
||||
&self.type_parameters[index]
|
||||
}
|
||||
}
|
||||
|
||||
struct DefinitionsVisitor<'a> {
|
||||
definitions: Definitions,
|
||||
ast_ids: &'a AstIds,
|
||||
file_id: FileId,
|
||||
}
|
||||
|
||||
impl DefinitionsVisitor<'_> {
|
||||
fn ast_id<N: HasAstId>(&self, node: &N) -> HirAstId<N> {
|
||||
HirAstId {
|
||||
file_id: self.file_id,
|
||||
node_id: self.ast_ids.ast_id(node),
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_function_def(&mut self, function: &StmtFunctionDef) -> FunctionId {
|
||||
let name = Name::new(&function.name);
|
||||
|
||||
let first_type_parameter_id = self.definitions.type_parameters.next_index();
|
||||
let mut last_type_parameter_id = first_type_parameter_id;
|
||||
|
||||
if let Some(type_params) = &function.type_params {
|
||||
for parameter in &type_params.type_params {
|
||||
let id = self.lower_type_parameter(parameter);
|
||||
last_type_parameter_id = id;
|
||||
}
|
||||
}
|
||||
|
||||
let parameters = self.lower_parameters(&function.parameters);
|
||||
|
||||
self.definitions.functions.push(Function {
|
||||
name,
|
||||
ast_id: self.ast_id(function),
|
||||
parameters,
|
||||
type_parameters: first_type_parameter_id..last_type_parameter_id,
|
||||
})
|
||||
}
|
||||
|
||||
fn lower_parameters(&mut self, parameters: &ruff_python_ast::Parameters) -> Range<ParameterId> {
|
||||
let first_parameter_id = self.definitions.parameters.next_index();
|
||||
let mut last_parameter_id = first_parameter_id;
|
||||
|
||||
for parameter in ¶meters.posonlyargs {
|
||||
last_parameter_id = self.definitions.parameters.push(Parameter {
|
||||
kind: ParameterKind::PositionalOnly,
|
||||
name: Name::new(¶meter.parameter.name),
|
||||
default: None,
|
||||
ast_id: self.ast_id(¶meter.parameter),
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(vararg) = ¶meters.vararg {
|
||||
last_parameter_id = self.definitions.parameters.push(Parameter {
|
||||
kind: ParameterKind::Vararg,
|
||||
name: Name::new(&vararg.name),
|
||||
default: None,
|
||||
ast_id: self.ast_id(vararg),
|
||||
});
|
||||
}
|
||||
|
||||
for parameter in ¶meters.kwonlyargs {
|
||||
last_parameter_id = self.definitions.parameters.push(Parameter {
|
||||
kind: ParameterKind::KeywordOnly,
|
||||
name: Name::new(¶meter.parameter.name),
|
||||
default: None,
|
||||
ast_id: self.ast_id(¶meter.parameter),
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(kwarg) = ¶meters.kwarg {
|
||||
last_parameter_id = self.definitions.parameters.push(Parameter {
|
||||
kind: ParameterKind::KeywordOnly,
|
||||
name: Name::new(&kwarg.name),
|
||||
default: None,
|
||||
ast_id: self.ast_id(kwarg),
|
||||
});
|
||||
}
|
||||
|
||||
first_parameter_id..last_parameter_id
|
||||
}
|
||||
|
||||
fn lower_class_def(&mut self, class: &StmtClassDef) -> ClassId {
|
||||
let name = Name::new(&class.name);
|
||||
|
||||
self.definitions.classes.push(Class {
|
||||
name,
|
||||
ast_id: self.ast_id(class),
|
||||
})
|
||||
}
|
||||
|
||||
fn lower_assignment(&mut self, assignment: &StmtAssign) {
|
||||
// FIXME handle multiple names
|
||||
if let Some(Expr::Name(name)) = assignment.targets.first() {
|
||||
self.definitions.assignments.push(Assignment {
|
||||
name: Name::new(&name.id),
|
||||
ast_id: self.ast_id(assignment),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_annotated_assignment(&mut self, annotated_assignment: &StmtAnnAssign) {
|
||||
if let Expr::Name(name) = &*annotated_assignment.target {
|
||||
self.definitions
|
||||
.annotated_assignments
|
||||
.push(AnnotatedAssignment {
|
||||
name: Name::new(&name.id),
|
||||
ast_id: self.ast_id(annotated_assignment),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_type_alias(&mut self, type_alias: &StmtTypeAlias) {
|
||||
if let Expr::Name(name) = &*type_alias.name {
|
||||
let name = Name::new(&name.id);
|
||||
|
||||
let lower_parameters_id = self.definitions.type_parameters.next_index();
|
||||
let mut last_parameter_id = lower_parameters_id;
|
||||
|
||||
if let Some(type_params) = &type_alias.type_params {
|
||||
for type_parameter in &type_params.type_params {
|
||||
let id = self.lower_type_parameter(type_parameter);
|
||||
last_parameter_id = id;
|
||||
}
|
||||
}
|
||||
|
||||
self.definitions.type_aliases.push(TypeAlias {
|
||||
name,
|
||||
ast_id: self.ast_id(type_alias),
|
||||
parameters: lower_parameters_id..last_parameter_id,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_type_parameter(&mut self, type_parameter: &TypeParam) -> TypeParameterId {
|
||||
match type_parameter {
|
||||
TypeParam::TypeVar(type_var) => {
|
||||
self.definitions
|
||||
.type_parameters
|
||||
.push(TypeParameter::TypeVar(TypeParameterTypeVar {
|
||||
name: Name::new(&type_var.name),
|
||||
ast_id: self.ast_id(type_var),
|
||||
}))
|
||||
}
|
||||
TypeParam::ParamSpec(param_spec) => {
|
||||
self.definitions
|
||||
.type_parameters
|
||||
.push(TypeParameter::ParamSpec(TypeParameterParamSpec {
|
||||
name: Name::new(¶m_spec.name),
|
||||
ast_id: self.ast_id(param_spec),
|
||||
}))
|
||||
}
|
||||
TypeParam::TypeVarTuple(type_var_tuple) => {
|
||||
self.definitions
|
||||
.type_parameters
|
||||
.push(TypeParameter::TypeVarTuple(TypeParameterTypeVarTuple {
|
||||
name: Name::new(&type_var_tuple.name),
|
||||
ast_id: self.ast_id(type_var_tuple),
|
||||
}))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_import(&mut self, _import: &StmtImport) {
|
||||
// TODO
|
||||
}
|
||||
|
||||
fn lower_import_from(&mut self, _import_from: &StmtImportFrom) {
|
||||
// TODO
|
||||
}
|
||||
|
||||
fn lower_global(&mut self, global: &StmtGlobal) -> GlobalId {
|
||||
self.definitions.globals.push(Global {
|
||||
ast_id: self.ast_id(global),
|
||||
})
|
||||
}
|
||||
|
||||
fn lower_non_local(&mut self, non_local: &StmtNonlocal) -> NonLocalId {
|
||||
self.definitions.non_locals.push(NonLocal {
|
||||
ast_id: self.ast_id(non_local),
|
||||
})
|
||||
}
|
||||
|
||||
fn lower_except_handler(&mut self, _except_handler: &ExceptHandlerExceptHandler) {
|
||||
// TODO
|
||||
}
|
||||
|
||||
fn lower_with_item(&mut self, _with_item: &WithItem) {
|
||||
// TODO
|
||||
}
|
||||
|
||||
fn lower_match_case(&mut self, _match_case: &MatchCase) {
|
||||
// TODO
|
||||
}
|
||||
}
|
||||
|
||||
impl PreorderVisitor<'_> for DefinitionsVisitor<'_> {
|
||||
fn visit_stmt(&mut self, stmt: &Stmt) {
|
||||
match stmt {
|
||||
// Definition statements
|
||||
Stmt::FunctionDef(definition) => {
|
||||
self.lower_function_def(definition);
|
||||
self.visit_body(&definition.body);
|
||||
}
|
||||
Stmt::ClassDef(definition) => {
|
||||
self.lower_class_def(definition);
|
||||
self.visit_body(&definition.body);
|
||||
}
|
||||
Stmt::Assign(assignment) => {
|
||||
self.lower_assignment(assignment);
|
||||
}
|
||||
Stmt::AnnAssign(annotated_assignment) => {
|
||||
self.lower_annotated_assignment(annotated_assignment);
|
||||
}
|
||||
Stmt::TypeAlias(type_alias) => {
|
||||
self.lower_type_alias(type_alias);
|
||||
}
|
||||
|
||||
Stmt::Import(import) => self.lower_import(import),
|
||||
Stmt::ImportFrom(import_from) => self.lower_import_from(import_from),
|
||||
Stmt::Global(global) => {
|
||||
self.lower_global(global);
|
||||
}
|
||||
Stmt::Nonlocal(non_local) => {
|
||||
self.lower_non_local(non_local);
|
||||
}
|
||||
|
||||
// Visit the compound statement bodies because they can contain other definitions.
|
||||
Stmt::For(_)
|
||||
| Stmt::While(_)
|
||||
| Stmt::If(_)
|
||||
| Stmt::With(_)
|
||||
| Stmt::Match(_)
|
||||
| Stmt::Try(_) => {
|
||||
preorder::walk_stmt(self, stmt);
|
||||
}
|
||||
|
||||
// Skip over simple statements because they can't contain any other definitions.
|
||||
Stmt::Return(_)
|
||||
| Stmt::Delete(_)
|
||||
| Stmt::AugAssign(_)
|
||||
| Stmt::Raise(_)
|
||||
| Stmt::Assert(_)
|
||||
| Stmt::Expr(_)
|
||||
| Stmt::Pass(_)
|
||||
| Stmt::Break(_)
|
||||
| Stmt::Continue(_)
|
||||
| Stmt::IpyEscapeCommand(_) => {
|
||||
// No op
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, _: &'_ Expr) {}
|
||||
|
||||
fn visit_decorator(&mut self, _decorator: &'_ Decorator) {}
|
||||
|
||||
fn visit_except_handler(&mut self, except_handler: &'_ ExceptHandler) {
|
||||
match except_handler {
|
||||
ExceptHandler::ExceptHandler(except_handler) => {
|
||||
self.lower_except_handler(except_handler);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_with_item(&mut self, with_item: &'_ WithItem) {
|
||||
self.lower_with_item(with_item);
|
||||
}
|
||||
|
||||
fn visit_match_case(&mut self, match_case: &'_ MatchCase) {
|
||||
self.lower_match_case(match_case);
|
||||
self.visit_body(&match_case.body);
|
||||
}
|
||||
}
|
||||
@@ -1,108 +0,0 @@
|
||||
use std::fmt::Formatter;
|
||||
use std::hash::BuildHasherDefault;
|
||||
use std::ops::Deref;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use rustc_hash::{FxHashSet, FxHasher};
|
||||
|
||||
use crate::files::FileId;
|
||||
|
||||
pub mod ast_ids;
|
||||
pub mod cache;
|
||||
pub mod cancellation;
|
||||
pub mod db;
|
||||
pub mod files;
|
||||
pub mod hir;
|
||||
pub mod lint;
|
||||
pub mod module;
|
||||
mod parse;
|
||||
pub mod program;
|
||||
mod semantic;
|
||||
pub mod source;
|
||||
pub mod watch;
|
||||
|
||||
pub(crate) type FxDashMap<K, V> = dashmap::DashMap<K, V, BuildHasherDefault<FxHasher>>;
|
||||
#[allow(unused)]
|
||||
pub(crate) type FxDashSet<V> = dashmap::DashSet<V, BuildHasherDefault<FxHasher>>;
|
||||
pub(crate) type FxIndexSet<V> = indexmap::set::IndexSet<V, BuildHasherDefault<FxHasher>>;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Workspace {
|
||||
/// TODO this should be a resolved path. We should probably use a newtype wrapper that guarantees that
|
||||
/// PATH is a UTF-8 path and is normalized.
|
||||
root: PathBuf,
|
||||
/// The files that are open in the workspace.
|
||||
///
|
||||
/// * Editor: The files that are actively being edited in the editor (the user has a tab open with the file).
|
||||
/// * CLI: The resolved files passed as arguments to the CLI.
|
||||
open_files: FxHashSet<FileId>,
|
||||
}
|
||||
|
||||
impl Workspace {
|
||||
pub fn new(root: PathBuf) -> Self {
|
||||
Self {
|
||||
root,
|
||||
open_files: FxHashSet::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn root(&self) -> &Path {
|
||||
self.root.as_path()
|
||||
}
|
||||
|
||||
// TODO having the content in workspace feels wrong.
|
||||
pub fn open_file(&mut self, file_id: FileId) {
|
||||
self.open_files.insert(file_id);
|
||||
}
|
||||
|
||||
pub fn close_file(&mut self, file_id: FileId) {
|
||||
self.open_files.remove(&file_id);
|
||||
}
|
||||
|
||||
// TODO introduce an `OpenFile` type instead of using an anonymous tuple.
|
||||
pub fn open_files(&self) -> impl Iterator<Item = FileId> + '_ {
|
||||
self.open_files.iter().copied()
|
||||
}
|
||||
|
||||
pub fn is_file_open(&self, file_id: FileId) -> bool {
|
||||
self.open_files.contains(&file_id)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
|
||||
pub struct Name(smol_str::SmolStr);
|
||||
|
||||
impl Name {
|
||||
#[inline]
|
||||
pub fn new(name: &str) -> Self {
|
||||
Self(smol_str::SmolStr::new(name))
|
||||
}
|
||||
|
||||
pub fn as_str(&self) -> &str {
|
||||
self.0.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Name {
|
||||
type Target = str;
|
||||
|
||||
#[inline]
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> From<T> for Name
|
||||
where
|
||||
T: Into<smol_str::SmolStr>,
|
||||
{
|
||||
fn from(value: T) -> Self {
|
||||
Self(value.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Name {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
@@ -1,332 +0,0 @@
|
||||
use std::cell::RefCell;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use ruff_python_ast::visitor::Visitor;
|
||||
use ruff_python_ast::{ModModule, StringLiteral};
|
||||
use ruff_python_parser::Parsed;
|
||||
|
||||
use crate::cache::KeyValueCache;
|
||||
use crate::db::{LintDb, LintJar, QueryResult};
|
||||
use crate::files::FileId;
|
||||
use crate::module::{resolve_module, ModuleName};
|
||||
use crate::parse::parse;
|
||||
use crate::semantic::{infer_definition_type, infer_symbol_public_type, Type};
|
||||
use crate::semantic::{
|
||||
resolve_global_symbol, semantic_index, Definition, GlobalSymbolId, SemanticIndex, SymbolId,
|
||||
};
|
||||
use crate::source::{source_text, Source};
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub(crate) fn lint_syntax(db: &dyn LintDb, file_id: FileId) -> QueryResult<Diagnostics> {
|
||||
let lint_jar: &LintJar = db.jar()?;
|
||||
let storage = &lint_jar.lint_syntax;
|
||||
|
||||
#[allow(clippy::print_stdout)]
|
||||
if std::env::var("RED_KNOT_SLOW_LINT").is_ok() {
|
||||
for i in 0..10 {
|
||||
db.cancelled()?;
|
||||
println!("RED_KNOT_SLOW_LINT is set, sleeping for {i}/10 seconds");
|
||||
std::thread::sleep(Duration::from_secs(1));
|
||||
}
|
||||
}
|
||||
|
||||
storage.get(&file_id, |file_id| {
|
||||
let mut diagnostics = Vec::new();
|
||||
|
||||
let source = source_text(db.upcast(), *file_id)?;
|
||||
lint_lines(source.text(), &mut diagnostics);
|
||||
|
||||
let parsed = parse(db.upcast(), *file_id)?;
|
||||
|
||||
if parsed.errors().is_empty() {
|
||||
let ast = parsed.syntax();
|
||||
|
||||
let mut visitor = SyntaxLintVisitor {
|
||||
diagnostics,
|
||||
source: source.text(),
|
||||
};
|
||||
visitor.visit_body(&ast.body);
|
||||
diagnostics = visitor.diagnostics;
|
||||
} else {
|
||||
diagnostics.extend(parsed.errors().iter().map(std::string::ToString::to_string));
|
||||
}
|
||||
|
||||
Ok(Diagnostics::from(diagnostics))
|
||||
})
|
||||
}
|
||||
|
||||
fn lint_lines(source: &str, diagnostics: &mut Vec<String>) {
|
||||
for (line_number, line) in source.lines().enumerate() {
|
||||
if line.len() < 88 {
|
||||
continue;
|
||||
}
|
||||
|
||||
let char_count = line.chars().count();
|
||||
if char_count > 88 {
|
||||
diagnostics.push(format!(
|
||||
"Line {} is too long ({} characters)",
|
||||
line_number + 1,
|
||||
char_count
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub(crate) fn lint_semantic(db: &dyn LintDb, file_id: FileId) -> QueryResult<Diagnostics> {
|
||||
let lint_jar: &LintJar = db.jar()?;
|
||||
let storage = &lint_jar.lint_semantic;
|
||||
|
||||
storage.get(&file_id, |file_id| {
|
||||
let source = source_text(db.upcast(), *file_id)?;
|
||||
let parsed = parse(db.upcast(), *file_id)?;
|
||||
let semantic_index = semantic_index(db.upcast(), *file_id)?;
|
||||
|
||||
let context = SemanticLintContext {
|
||||
file_id: *file_id,
|
||||
source,
|
||||
parsed: &parsed,
|
||||
semantic_index,
|
||||
db,
|
||||
diagnostics: RefCell::new(Vec::new()),
|
||||
};
|
||||
|
||||
lint_unresolved_imports(&context)?;
|
||||
lint_bad_overrides(&context)?;
|
||||
|
||||
Ok(Diagnostics::from(context.diagnostics.take()))
|
||||
})
|
||||
}
|
||||
|
||||
fn lint_unresolved_imports(context: &SemanticLintContext) -> QueryResult<()> {
|
||||
// TODO: Consider iterating over the dependencies (imports) only instead of all definitions.
|
||||
for (symbol, definition) in context.semantic_index().symbol_table().all_definitions() {
|
||||
match definition {
|
||||
Definition::Import(import) => {
|
||||
let ty = context.infer_symbol_public_type(symbol)?;
|
||||
|
||||
if ty.is_unknown() {
|
||||
context.push_diagnostic(format!("Unresolved module {}", import.module));
|
||||
}
|
||||
}
|
||||
Definition::ImportFrom(import) => {
|
||||
let ty = context.infer_symbol_public_type(symbol)?;
|
||||
|
||||
if ty.is_unknown() {
|
||||
let module_name = import.module().map(Deref::deref).unwrap_or_default();
|
||||
let message = if import.level() > 0 {
|
||||
format!(
|
||||
"Unresolved relative import '{}' from {}{}",
|
||||
import.name(),
|
||||
".".repeat(import.level() as usize),
|
||||
module_name
|
||||
)
|
||||
} else {
|
||||
format!(
|
||||
"Unresolved import '{}' from '{}'",
|
||||
import.name(),
|
||||
module_name
|
||||
)
|
||||
};
|
||||
|
||||
context.push_diagnostic(message);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn lint_bad_overrides(context: &SemanticLintContext) -> QueryResult<()> {
|
||||
// TODO we should have a special marker on the real typing module (from typeshed) so if you
|
||||
// have your own "typing" module in your project, we don't consider it THE typing module (and
|
||||
// same for other stdlib modules that our lint rules care about)
|
||||
let Some(typing_override) = context.resolve_global_symbol("typing", "override")? else {
|
||||
// TODO once we bundle typeshed, this should be unreachable!()
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
// TODO we should maybe index definitions by type instead of iterating all, or else iterate all
|
||||
// just once, match, and branch to all lint rules that care about a type of definition
|
||||
for (symbol, definition) in context.semantic_index().symbol_table().all_definitions() {
|
||||
if !matches!(definition, Definition::FunctionDef(_)) {
|
||||
continue;
|
||||
}
|
||||
let ty = infer_definition_type(
|
||||
context.db.upcast(),
|
||||
GlobalSymbolId {
|
||||
file_id: context.file_id,
|
||||
symbol_id: symbol,
|
||||
},
|
||||
definition.clone(),
|
||||
)?;
|
||||
let Type::Function(func) = ty else {
|
||||
unreachable!("type of a FunctionDef should always be a Function");
|
||||
};
|
||||
let Some(class) = func.get_containing_class(context.db.upcast())? else {
|
||||
// not a method of a class
|
||||
continue;
|
||||
};
|
||||
if func.has_decorator(context.db.upcast(), typing_override)? {
|
||||
let method_name = func.name(context.db.upcast())?;
|
||||
if class
|
||||
.get_super_class_member(context.db.upcast(), &method_name)?
|
||||
.is_none()
|
||||
{
|
||||
// TODO should have a qualname() method to support nested classes
|
||||
context.push_diagnostic(
|
||||
format!(
|
||||
"Method {}.{} is decorated with `typing.override` but does not override any base class method",
|
||||
class.name(context.db.upcast())?,
|
||||
method_name,
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub struct SemanticLintContext<'a> {
|
||||
file_id: FileId,
|
||||
source: Source,
|
||||
parsed: &'a Parsed<ModModule>,
|
||||
semantic_index: Arc<SemanticIndex>,
|
||||
db: &'a dyn LintDb,
|
||||
diagnostics: RefCell<Vec<String>>,
|
||||
}
|
||||
|
||||
impl<'a> SemanticLintContext<'a> {
|
||||
pub fn source_text(&self) -> &str {
|
||||
self.source.text()
|
||||
}
|
||||
|
||||
pub fn file_id(&self) -> FileId {
|
||||
self.file_id
|
||||
}
|
||||
|
||||
pub fn ast(&self) -> &'a ModModule {
|
||||
self.parsed.syntax()
|
||||
}
|
||||
|
||||
pub fn semantic_index(&self) -> &SemanticIndex {
|
||||
&self.semantic_index
|
||||
}
|
||||
|
||||
pub fn infer_symbol_public_type(&self, symbol_id: SymbolId) -> QueryResult<Type> {
|
||||
infer_symbol_public_type(
|
||||
self.db.upcast(),
|
||||
GlobalSymbolId {
|
||||
file_id: self.file_id,
|
||||
symbol_id,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
pub fn push_diagnostic(&self, diagnostic: String) {
|
||||
self.diagnostics.borrow_mut().push(diagnostic);
|
||||
}
|
||||
|
||||
pub fn extend_diagnostics(&mut self, diagnostics: impl IntoIterator<Item = String>) {
|
||||
self.diagnostics.get_mut().extend(diagnostics);
|
||||
}
|
||||
|
||||
pub fn resolve_global_symbol(
|
||||
&self,
|
||||
module: &str,
|
||||
symbol_name: &str,
|
||||
) -> QueryResult<Option<GlobalSymbolId>> {
|
||||
let Some(module) = resolve_module(self.db.upcast(), ModuleName::new(module))? else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
resolve_global_symbol(self.db.upcast(), module, symbol_name)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct SyntaxLintVisitor<'a> {
|
||||
diagnostics: Vec<String>,
|
||||
source: &'a str,
|
||||
}
|
||||
|
||||
impl Visitor<'_> for SyntaxLintVisitor<'_> {
|
||||
fn visit_string_literal(&mut self, string_literal: &'_ StringLiteral) {
|
||||
// A very naive implementation of use double quotes
|
||||
let text = &self.source[string_literal.range];
|
||||
|
||||
if text.starts_with('\'') {
|
||||
self.diagnostics
|
||||
.push("Use double quotes for strings".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Diagnostics {
|
||||
Empty,
|
||||
List(Arc<Vec<String>>),
|
||||
}
|
||||
|
||||
impl Diagnostics {
|
||||
pub fn as_slice(&self) -> &[String] {
|
||||
match self {
|
||||
Diagnostics::Empty => &[],
|
||||
Diagnostics::List(list) => list.as_slice(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Diagnostics {
|
||||
type Target = [String];
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.as_slice()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Vec<String>> for Diagnostics {
|
||||
fn from(value: Vec<String>) -> Self {
|
||||
if value.is_empty() {
|
||||
Diagnostics::Empty
|
||||
} else {
|
||||
Diagnostics::List(Arc::new(value))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub struct LintSyntaxStorage(KeyValueCache<FileId, Diagnostics>);
|
||||
|
||||
impl Deref for LintSyntaxStorage {
|
||||
type Target = KeyValueCache<FileId, Diagnostics>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for LintSyntaxStorage {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub struct LintSemanticStorage(KeyValueCache<FileId, Diagnostics>);
|
||||
|
||||
impl Deref for LintSemanticStorage {
|
||||
type Target = KeyValueCache<FileId, Diagnostics>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for LintSemanticStorage {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
254
crates/red_knot/src/logging.rs
Normal file
254
crates/red_knot/src/logging.rs
Normal file
@@ -0,0 +1,254 @@
|
||||
//! Sets up logging for Red Knot
|
||||
|
||||
use anyhow::Context;
|
||||
use colored::Colorize;
|
||||
use std::fmt;
|
||||
use std::fs::File;
|
||||
use std::io::BufWriter;
|
||||
use tracing::{Event, Subscriber};
|
||||
use tracing_subscriber::filter::LevelFilter;
|
||||
use tracing_subscriber::fmt::format::Writer;
|
||||
use tracing_subscriber::fmt::{FmtContext, FormatEvent, FormatFields};
|
||||
use tracing_subscriber::registry::LookupSpan;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
|
||||
/// Logging flags to `#[command(flatten)]` into your CLI
|
||||
#[derive(clap::Args, Debug, Clone, Default)]
|
||||
#[command(about = None, long_about = None)]
|
||||
pub(crate) struct Verbosity {
|
||||
#[arg(
|
||||
long,
|
||||
short = 'v',
|
||||
help = "Use verbose output (or `-vv` and `-vvv` for more verbose output)",
|
||||
action = clap::ArgAction::Count,
|
||||
global = true,
|
||||
)]
|
||||
verbose: u8,
|
||||
}
|
||||
|
||||
impl Verbosity {
|
||||
/// Returns the verbosity level based on the number of `-v` flags.
|
||||
///
|
||||
/// Returns `None` if the user did not specify any verbosity flags.
|
||||
pub(crate) fn level(&self) -> VerbosityLevel {
|
||||
match self.verbose {
|
||||
0 => VerbosityLevel::Default,
|
||||
1 => VerbosityLevel::Verbose,
|
||||
2 => VerbosityLevel::ExtraVerbose,
|
||||
_ => VerbosityLevel::Trace,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
|
||||
pub(crate) enum VerbosityLevel {
|
||||
/// Default output level. Only shows Ruff and Red Knot events up to the [`WARN`](tracing::Level::WARN).
|
||||
Default,
|
||||
|
||||
/// Enables verbose output. Emits Ruff and Red Knot events up to the [`INFO`](tracing::Level::INFO).
|
||||
/// Corresponds to `-v`.
|
||||
Verbose,
|
||||
|
||||
/// Enables a more verbose tracing format and emits Ruff and Red Knot events up to [`DEBUG`](tracing::Level::DEBUG).
|
||||
/// Corresponds to `-vv`
|
||||
ExtraVerbose,
|
||||
|
||||
/// Enables all tracing events and uses a tree-like output format. Corresponds to `-vvv`.
|
||||
Trace,
|
||||
}
|
||||
|
||||
impl VerbosityLevel {
|
||||
const fn level_filter(self) -> LevelFilter {
|
||||
match self {
|
||||
VerbosityLevel::Default => LevelFilter::WARN,
|
||||
VerbosityLevel::Verbose => LevelFilter::INFO,
|
||||
VerbosityLevel::ExtraVerbose => LevelFilter::DEBUG,
|
||||
VerbosityLevel::Trace => LevelFilter::TRACE,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) const fn is_trace(self) -> bool {
|
||||
matches!(self, VerbosityLevel::Trace)
|
||||
}
|
||||
|
||||
pub(crate) const fn is_extra_verbose(self) -> bool {
|
||||
matches!(self, VerbosityLevel::ExtraVerbose)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn setup_tracing(level: VerbosityLevel) -> anyhow::Result<TracingGuard> {
|
||||
use tracing_subscriber::prelude::*;
|
||||
|
||||
// The `RED_KNOT_LOG` environment variable overrides the default log level.
|
||||
let filter = if let Ok(log_env_variable) = std::env::var("RED_KNOT_LOG") {
|
||||
EnvFilter::builder()
|
||||
.parse(log_env_variable)
|
||||
.context("Failed to parse directives specified in RED_KNOT_LOG environment variable.")?
|
||||
} else {
|
||||
match level {
|
||||
VerbosityLevel::Default => {
|
||||
// Show warning traces
|
||||
EnvFilter::default().add_directive(LevelFilter::WARN.into())
|
||||
}
|
||||
level => {
|
||||
let level_filter = level.level_filter();
|
||||
|
||||
// Show info|debug|trace events, but allow `RED_KNOT_LOG` to override
|
||||
let filter = EnvFilter::default().add_directive(
|
||||
format!("red_knot={level_filter}")
|
||||
.parse()
|
||||
.expect("Hardcoded directive to be valid"),
|
||||
);
|
||||
|
||||
filter.add_directive(
|
||||
format!("ruff={level_filter}")
|
||||
.parse()
|
||||
.expect("Hardcoded directive to be valid"),
|
||||
)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let (profiling_layer, guard) = setup_profile();
|
||||
|
||||
let registry = tracing_subscriber::registry()
|
||||
.with(filter)
|
||||
.with(profiling_layer);
|
||||
|
||||
if level.is_trace() {
|
||||
let subscriber = registry.with(
|
||||
tracing_tree::HierarchicalLayer::default()
|
||||
.with_indent_lines(true)
|
||||
.with_indent_amount(2)
|
||||
.with_bracketed_fields(true)
|
||||
.with_thread_ids(true)
|
||||
.with_targets(true)
|
||||
.with_writer(std::io::stderr)
|
||||
.with_timer(tracing_tree::time::Uptime::default()),
|
||||
);
|
||||
|
||||
subscriber.init();
|
||||
} else {
|
||||
let subscriber = registry.with(
|
||||
tracing_subscriber::fmt::layer()
|
||||
.event_format(RedKnotFormat {
|
||||
display_level: true,
|
||||
display_timestamp: level.is_extra_verbose(),
|
||||
show_spans: false,
|
||||
})
|
||||
.with_writer(std::io::stderr),
|
||||
);
|
||||
|
||||
subscriber.init();
|
||||
}
|
||||
|
||||
Ok(TracingGuard {
|
||||
_flame_guard: guard,
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn setup_profile<S>() -> (
|
||||
Option<tracing_flame::FlameLayer<S, BufWriter<File>>>,
|
||||
Option<tracing_flame::FlushGuard<BufWriter<File>>>,
|
||||
)
|
||||
where
|
||||
S: Subscriber + for<'span> LookupSpan<'span>,
|
||||
{
|
||||
if let Ok("1" | "true") = std::env::var("RED_KNOT_LOG_PROFILE").as_deref() {
|
||||
let (layer, guard) = tracing_flame::FlameLayer::with_file("tracing.folded")
|
||||
.expect("Flame layer to be created");
|
||||
(Some(layer), Some(guard))
|
||||
} else {
|
||||
(None, None)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct TracingGuard {
|
||||
_flame_guard: Option<tracing_flame::FlushGuard<BufWriter<File>>>,
|
||||
}
|
||||
|
||||
struct RedKnotFormat {
|
||||
display_timestamp: bool,
|
||||
display_level: bool,
|
||||
show_spans: bool,
|
||||
}
|
||||
|
||||
/// See <https://docs.rs/tracing-subscriber/0.3.18/src/tracing_subscriber/fmt/format/mod.rs.html#1026-1156>
|
||||
impl<S, N> FormatEvent<S, N> for RedKnotFormat
|
||||
where
|
||||
S: Subscriber + for<'a> LookupSpan<'a>,
|
||||
N: for<'a> FormatFields<'a> + 'static,
|
||||
{
|
||||
fn format_event(
|
||||
&self,
|
||||
ctx: &FmtContext<'_, S, N>,
|
||||
mut writer: Writer<'_>,
|
||||
event: &Event<'_>,
|
||||
) -> fmt::Result {
|
||||
let meta = event.metadata();
|
||||
let ansi = writer.has_ansi_escapes();
|
||||
|
||||
if self.display_timestamp {
|
||||
let timestamp = chrono::Local::now()
|
||||
.format("%Y-%m-%d %H:%M:%S.%f")
|
||||
.to_string();
|
||||
if ansi {
|
||||
write!(writer, "{} ", timestamp.dimmed())?;
|
||||
} else {
|
||||
write!(
|
||||
writer,
|
||||
"{} ",
|
||||
chrono::Local::now().format("%Y-%m-%d %H:%M:%S.%f")
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
if self.display_level {
|
||||
let level = meta.level();
|
||||
// Same colors as tracing
|
||||
if ansi {
|
||||
let formatted_level = level.to_string();
|
||||
match *level {
|
||||
tracing::Level::TRACE => {
|
||||
write!(writer, "{} ", formatted_level.purple().bold())?;
|
||||
}
|
||||
tracing::Level::DEBUG => write!(writer, "{} ", formatted_level.blue().bold())?,
|
||||
tracing::Level::INFO => write!(writer, "{} ", formatted_level.green().bold())?,
|
||||
tracing::Level::WARN => write!(writer, "{} ", formatted_level.yellow().bold())?,
|
||||
tracing::Level::ERROR => write!(writer, "{} ", level.to_string().red().bold())?,
|
||||
}
|
||||
} else {
|
||||
write!(writer, "{level} ")?;
|
||||
}
|
||||
}
|
||||
|
||||
if self.show_spans {
|
||||
let span = event.parent();
|
||||
let mut seen = false;
|
||||
|
||||
let span = span
|
||||
.and_then(|id| ctx.span(id))
|
||||
.or_else(|| ctx.lookup_current());
|
||||
|
||||
let scope = span.into_iter().flat_map(|span| span.scope().from_root());
|
||||
|
||||
for span in scope {
|
||||
seen = true;
|
||||
if ansi {
|
||||
write!(writer, "{}:", span.metadata().name().bold())?;
|
||||
} else {
|
||||
write!(writer, "{}:", span.metadata().name())?;
|
||||
}
|
||||
}
|
||||
|
||||
if seen {
|
||||
writer.write_char(' ')?;
|
||||
}
|
||||
}
|
||||
|
||||
ctx.field_format().format_fields(writer.by_ref(), event)?;
|
||||
|
||||
writeln!(writer)
|
||||
}
|
||||
}
|
||||
@@ -1,65 +1,199 @@
|
||||
#![allow(clippy::dbg_macro)]
|
||||
|
||||
use std::path::Path;
|
||||
use std::process::{ExitCode, Termination};
|
||||
use std::sync::Mutex;
|
||||
|
||||
use anyhow::{anyhow, Context};
|
||||
use clap::Parser;
|
||||
use colored::Colorize;
|
||||
use crossbeam::channel as crossbeam_channel;
|
||||
use tracing::subscriber::Interest;
|
||||
use tracing::{Level, Metadata};
|
||||
use tracing_subscriber::filter::LevelFilter;
|
||||
use tracing_subscriber::layer::{Context, Filter, SubscriberExt};
|
||||
use tracing_subscriber::{Layer, Registry};
|
||||
use tracing_tree::time::Uptime;
|
||||
use salsa::plumbing::ZalsaDatabase;
|
||||
|
||||
use red_knot::db::{HasJar, ParallelDatabase, QueryError, SourceDb, SourceJar};
|
||||
use red_knot::module::{set_module_search_paths, ModuleResolutionInputs};
|
||||
use red_knot::program::check::ExecutionMode;
|
||||
use red_knot::program::{FileWatcherChange, Program};
|
||||
use red_knot::watch::FileWatcher;
|
||||
use red_knot::Workspace;
|
||||
use red_knot_python_semantic::SitePackages;
|
||||
use red_knot_server::run_server;
|
||||
use red_knot_workspace::db::RootDatabase;
|
||||
use red_knot_workspace::watch;
|
||||
use red_knot_workspace::watch::WorkspaceWatcher;
|
||||
use red_knot_workspace::workspace::settings::Configuration;
|
||||
use red_knot_workspace::workspace::WorkspaceMetadata;
|
||||
use ruff_db::system::{OsSystem, System, SystemPath, SystemPathBuf};
|
||||
use target_version::TargetVersion;
|
||||
|
||||
use crate::logging::{setup_tracing, Verbosity};
|
||||
|
||||
mod logging;
|
||||
mod target_version;
|
||||
mod verbosity;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(
|
||||
author,
|
||||
name = "red-knot",
|
||||
about = "An extremely fast Python type checker."
|
||||
)]
|
||||
#[command(version)]
|
||||
struct Args {
|
||||
#[command(subcommand)]
|
||||
pub(crate) command: Option<Command>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help = "Changes the current working directory.",
|
||||
long_help = "Changes the current working directory before any specified operations. This affects the workspace and configuration discovery.",
|
||||
value_name = "PATH"
|
||||
)]
|
||||
current_directory: Option<SystemPathBuf>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help = "Path to the virtual environment the project uses",
|
||||
long_help = "\
|
||||
Path to the virtual environment the project uses. \
|
||||
If provided, red-knot will use the `site-packages` directory of this virtual environment \
|
||||
to resolve type information for the project's third-party dependencies.",
|
||||
value_name = "PATH"
|
||||
)]
|
||||
venv_path: Option<SystemPathBuf>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
value_name = "DIRECTORY",
|
||||
help = "Custom directory to use for stdlib typeshed stubs"
|
||||
)]
|
||||
custom_typeshed_dir: Option<SystemPathBuf>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
value_name = "PATH",
|
||||
help = "Additional path to use as a module-resolution source (can be passed multiple times)"
|
||||
)]
|
||||
extra_search_path: Option<Vec<SystemPathBuf>>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help = "Python version to assume when resolving types",
|
||||
value_name = "VERSION"
|
||||
)]
|
||||
target_version: Option<TargetVersion>,
|
||||
|
||||
#[clap(flatten)]
|
||||
verbosity: Verbosity,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help = "Run in watch mode by re-running whenever files change",
|
||||
short = 'W'
|
||||
)]
|
||||
watch: bool,
|
||||
}
|
||||
|
||||
impl Args {
|
||||
fn to_configuration(&self, cli_cwd: &SystemPath) -> Configuration {
|
||||
let mut configuration = Configuration::default();
|
||||
|
||||
if let Some(target_version) = self.target_version {
|
||||
configuration.target_version = Some(target_version.into());
|
||||
}
|
||||
|
||||
if let Some(venv_path) = &self.venv_path {
|
||||
configuration.search_paths.site_packages = Some(SitePackages::Derived {
|
||||
venv_path: SystemPath::absolute(venv_path, cli_cwd),
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(custom_typeshed_dir) = &self.custom_typeshed_dir {
|
||||
configuration.search_paths.custom_typeshed =
|
||||
Some(SystemPath::absolute(custom_typeshed_dir, cli_cwd));
|
||||
}
|
||||
|
||||
if let Some(extra_search_paths) = &self.extra_search_path {
|
||||
configuration.search_paths.extra_paths = extra_search_paths
|
||||
.iter()
|
||||
.map(|path| Some(SystemPath::absolute(path, cli_cwd)))
|
||||
.collect();
|
||||
}
|
||||
|
||||
configuration
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Subcommand)]
|
||||
pub enum Command {
|
||||
/// Start the language server
|
||||
Server,
|
||||
}
|
||||
|
||||
#[allow(clippy::print_stdout, clippy::unnecessary_wraps, clippy::print_stderr)]
|
||||
fn main() -> anyhow::Result<()> {
|
||||
setup_tracing();
|
||||
pub fn main() -> ExitStatus {
|
||||
run().unwrap_or_else(|error| {
|
||||
use std::io::Write;
|
||||
|
||||
let arguments: Vec<_> = std::env::args().collect();
|
||||
// Use `writeln` instead of `eprintln` to avoid panicking when the stderr pipe is broken.
|
||||
let mut stderr = std::io::stderr().lock();
|
||||
|
||||
if arguments.len() < 2 {
|
||||
eprintln!("Usage: red_knot <path>");
|
||||
return Err(anyhow::anyhow!("Invalid arguments"));
|
||||
// This communicates that this isn't a linter error but Red Knot itself hard-errored for
|
||||
// some reason (e.g. failed to resolve the configuration)
|
||||
writeln!(stderr, "{}", "Red Knot failed".red().bold()).ok();
|
||||
// Currently we generally only see one error, but e.g. with io errors when resolving
|
||||
// the configuration it is help to chain errors ("resolving configuration failed" ->
|
||||
// "failed to read file: subdir/pyproject.toml")
|
||||
for cause in error.chain() {
|
||||
writeln!(stderr, " {} {cause}", "Cause:".bold()).ok();
|
||||
}
|
||||
|
||||
ExitStatus::Error
|
||||
})
|
||||
}
|
||||
|
||||
fn run() -> anyhow::Result<ExitStatus> {
|
||||
let args = Args::parse_from(std::env::args().collect::<Vec<_>>());
|
||||
|
||||
if matches!(args.command, Some(Command::Server)) {
|
||||
return run_server().map(|()| ExitStatus::Success);
|
||||
}
|
||||
|
||||
let entry_point = Path::new(&arguments[1]);
|
||||
let verbosity = args.verbosity.level();
|
||||
countme::enable(verbosity.is_trace());
|
||||
let _guard = setup_tracing(verbosity)?;
|
||||
|
||||
if !entry_point.exists() {
|
||||
eprintln!("The entry point does not exist.");
|
||||
return Err(anyhow::anyhow!("Invalid arguments"));
|
||||
}
|
||||
|
||||
if !entry_point.is_file() {
|
||||
eprintln!("The entry point is not a file.");
|
||||
return Err(anyhow::anyhow!("Invalid arguments"));
|
||||
}
|
||||
|
||||
let workspace_folder = entry_point.parent().unwrap();
|
||||
let workspace = Workspace::new(workspace_folder.to_path_buf());
|
||||
|
||||
let workspace_search_path = workspace.root().to_path_buf();
|
||||
|
||||
let search_paths = ModuleResolutionInputs {
|
||||
extra_paths: vec![],
|
||||
workspace_root: workspace_search_path,
|
||||
site_packages: None,
|
||||
custom_typeshed: None,
|
||||
// The base path to which all CLI arguments are relative to.
|
||||
let cli_base_path = {
|
||||
let cwd = std::env::current_dir().context("Failed to get the current working directory")?;
|
||||
SystemPathBuf::from_path_buf(cwd)
|
||||
.map_err(|path| {
|
||||
anyhow!(
|
||||
"The current working directory '{}' contains non-unicode characters. Red Knot only supports unicode paths.",
|
||||
path.display()
|
||||
)
|
||||
})?
|
||||
};
|
||||
|
||||
let mut program = Program::new(workspace);
|
||||
set_module_search_paths(&mut program, search_paths);
|
||||
let cwd = args
|
||||
.current_directory
|
||||
.as_ref()
|
||||
.map(|cwd| {
|
||||
if cwd.as_std_path().is_dir() {
|
||||
Ok(SystemPath::absolute(cwd, &cli_base_path))
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
"Provided current-directory path '{cwd}' is not a directory."
|
||||
))
|
||||
}
|
||||
})
|
||||
.transpose()?
|
||||
.unwrap_or_else(|| cli_base_path.clone());
|
||||
|
||||
let entry_id = program.file_id(entry_point);
|
||||
program.workspace_mut().open_file(entry_id);
|
||||
let system = OsSystem::new(cwd.clone());
|
||||
let cli_configuration = args.to_configuration(&cwd);
|
||||
let workspace_metadata = WorkspaceMetadata::from_path(
|
||||
system.current_directory(),
|
||||
&system,
|
||||
Some(cli_configuration.clone()),
|
||||
)?;
|
||||
|
||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new();
|
||||
// TODO: Use the `program_settings` to compute the key for the database's persistent
|
||||
// cache and load the cache if it exists.
|
||||
let mut db = RootDatabase::new(workspace_metadata, system)?;
|
||||
|
||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new(cli_configuration);
|
||||
|
||||
// Listen to Ctrl+C and abort the watch mode.
|
||||
let main_loop_cancellation_token = Mutex::new(Some(main_loop_cancellation_token));
|
||||
@@ -71,122 +205,161 @@ fn main() -> anyhow::Result<()> {
|
||||
}
|
||||
})?;
|
||||
|
||||
let file_changes_notifier = main_loop.file_changes_notifier();
|
||||
let exit_status = if args.watch {
|
||||
main_loop.watch(&mut db)?
|
||||
} else {
|
||||
main_loop.run(&mut db)
|
||||
};
|
||||
|
||||
// Watch for file changes and re-trigger the analysis.
|
||||
let mut file_watcher = FileWatcher::new(move |changes| {
|
||||
file_changes_notifier.notify(changes);
|
||||
})?;
|
||||
tracing::trace!("Counts for entire CLI run:\n{}", countme::get_all());
|
||||
|
||||
file_watcher.watch_folder(workspace_folder)?;
|
||||
std::mem::forget(db);
|
||||
|
||||
main_loop.run(&mut program);
|
||||
Ok(exit_status)
|
||||
}
|
||||
|
||||
let source_jar: &SourceJar = program.jar().unwrap();
|
||||
#[derive(Copy, Clone)]
|
||||
pub enum ExitStatus {
|
||||
/// Checking was successful and there were no errors.
|
||||
Success = 0,
|
||||
|
||||
dbg!(source_jar.parsed.statistics());
|
||||
dbg!(source_jar.sources.statistics());
|
||||
/// Checking was successful but there were errors.
|
||||
Failure = 1,
|
||||
|
||||
Ok(())
|
||||
/// Checking failed.
|
||||
Error = 2,
|
||||
}
|
||||
|
||||
impl Termination for ExitStatus {
|
||||
fn report(self) -> ExitCode {
|
||||
ExitCode::from(self as u8)
|
||||
}
|
||||
}
|
||||
|
||||
struct MainLoop {
|
||||
orchestrator_sender: crossbeam_channel::Sender<OrchestratorMessage>,
|
||||
main_loop_receiver: crossbeam_channel::Receiver<MainLoopMessage>,
|
||||
/// Sender that can be used to send messages to the main loop.
|
||||
sender: crossbeam_channel::Sender<MainLoopMessage>,
|
||||
|
||||
/// Receiver for the messages sent **to** the main loop.
|
||||
receiver: crossbeam_channel::Receiver<MainLoopMessage>,
|
||||
|
||||
/// The file system watcher, if running in watch mode.
|
||||
watcher: Option<WorkspaceWatcher>,
|
||||
|
||||
cli_configuration: Configuration,
|
||||
}
|
||||
|
||||
impl MainLoop {
|
||||
fn new() -> (Self, MainLoopCancellationToken) {
|
||||
let (orchestrator_sender, orchestrator_receiver) = crossbeam_channel::bounded(1);
|
||||
let (main_loop_sender, main_loop_receiver) = crossbeam_channel::bounded(1);
|
||||
|
||||
let mut orchestrator = Orchestrator {
|
||||
receiver: orchestrator_receiver,
|
||||
sender: main_loop_sender.clone(),
|
||||
revision: 0,
|
||||
};
|
||||
|
||||
std::thread::spawn(move || {
|
||||
orchestrator.run();
|
||||
});
|
||||
fn new(cli_configuration: Configuration) -> (Self, MainLoopCancellationToken) {
|
||||
let (sender, receiver) = crossbeam_channel::bounded(10);
|
||||
|
||||
(
|
||||
Self {
|
||||
orchestrator_sender,
|
||||
main_loop_receiver,
|
||||
},
|
||||
MainLoopCancellationToken {
|
||||
sender: main_loop_sender,
|
||||
sender: sender.clone(),
|
||||
receiver,
|
||||
watcher: None,
|
||||
cli_configuration,
|
||||
},
|
||||
MainLoopCancellationToken { sender },
|
||||
)
|
||||
}
|
||||
|
||||
fn file_changes_notifier(&self) -> FileChangesNotifier {
|
||||
FileChangesNotifier {
|
||||
sender: self.orchestrator_sender.clone(),
|
||||
}
|
||||
fn watch(mut self, db: &mut RootDatabase) -> anyhow::Result<ExitStatus> {
|
||||
tracing::debug!("Starting watch mode");
|
||||
let sender = self.sender.clone();
|
||||
let watcher = watch::directory_watcher(move |event| {
|
||||
sender.send(MainLoopMessage::ApplyChanges(event)).unwrap();
|
||||
})?;
|
||||
|
||||
self.watcher = Some(WorkspaceWatcher::new(watcher, db));
|
||||
|
||||
self.run(db);
|
||||
|
||||
Ok(ExitStatus::Success)
|
||||
}
|
||||
|
||||
fn run(self, program: &mut Program) {
|
||||
self.orchestrator_sender
|
||||
.send(OrchestratorMessage::Run)
|
||||
.unwrap();
|
||||
fn run(mut self, db: &mut RootDatabase) -> ExitStatus {
|
||||
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
|
||||
|
||||
for message in &self.main_loop_receiver {
|
||||
tracing::trace!("Main Loop: Tick");
|
||||
let result = self.main_loop(db);
|
||||
|
||||
tracing::debug!("Exiting main loop");
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn main_loop(&mut self, db: &mut RootDatabase) -> ExitStatus {
|
||||
// Schedule the first check.
|
||||
tracing::debug!("Starting main loop");
|
||||
|
||||
let mut revision = 0u64;
|
||||
|
||||
while let Ok(message) = self.receiver.recv() {
|
||||
match message {
|
||||
MainLoopMessage::CheckProgram { revision } => {
|
||||
let program = program.snapshot();
|
||||
let sender = self.orchestrator_sender.clone();
|
||||
MainLoopMessage::CheckWorkspace => {
|
||||
let db = db.snapshot();
|
||||
let sender = self.sender.clone();
|
||||
|
||||
// Spawn a new task that checks the program. This needs to be done in a separate thread
|
||||
// Spawn a new task that checks the workspace. This needs to be done in a separate thread
|
||||
// to prevent blocking the main loop here.
|
||||
rayon::spawn(move || match program.check(ExecutionMode::ThreadPool) {
|
||||
Ok(result) => {
|
||||
rayon::spawn(move || {
|
||||
if let Ok(result) = db.check() {
|
||||
// Send the result back to the main loop for printing.
|
||||
sender
|
||||
.send(OrchestratorMessage::CheckProgramCompleted {
|
||||
diagnostics: result,
|
||||
revision,
|
||||
})
|
||||
.send(MainLoopMessage::CheckCompleted { result, revision })
|
||||
.unwrap();
|
||||
}
|
||||
Err(QueryError::Cancelled) => {}
|
||||
});
|
||||
}
|
||||
MainLoopMessage::ApplyChanges(changes) => {
|
||||
// Automatically cancels any pending queries and waits for them to complete.
|
||||
program.apply_changes(changes);
|
||||
|
||||
MainLoopMessage::CheckCompleted {
|
||||
result,
|
||||
revision: check_revision,
|
||||
} => {
|
||||
let has_diagnostics = !result.is_empty();
|
||||
if check_revision == revision {
|
||||
for diagnostic in result {
|
||||
tracing::error!("{}", diagnostic);
|
||||
}
|
||||
} else {
|
||||
tracing::debug!(
|
||||
"Discarding check result for outdated revision: current: {revision}, result revision: {check_revision}"
|
||||
);
|
||||
}
|
||||
|
||||
if self.watcher.is_none() {
|
||||
return if has_diagnostics {
|
||||
ExitStatus::Failure
|
||||
} else {
|
||||
ExitStatus::Success
|
||||
};
|
||||
}
|
||||
|
||||
tracing::trace!("Counts after last check:\n{}", countme::get_all());
|
||||
}
|
||||
MainLoopMessage::CheckCompleted(diagnostics) => {
|
||||
dbg!(diagnostics);
|
||||
|
||||
MainLoopMessage::ApplyChanges(changes) => {
|
||||
revision += 1;
|
||||
// Automatically cancels any pending queries and waits for them to complete.
|
||||
db.apply_changes(changes, Some(&self.cli_configuration));
|
||||
if let Some(watcher) = self.watcher.as_mut() {
|
||||
watcher.update(db);
|
||||
}
|
||||
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
|
||||
}
|
||||
MainLoopMessage::Exit => {
|
||||
return;
|
||||
// Cancel any pending queries and wait for them to complete.
|
||||
// TODO: Don't use Salsa internal APIs
|
||||
// [Zulip-Thread](https://salsa.zulipchat.com/#narrow/stream/333573-salsa-3.2E0/topic/Expose.20an.20API.20to.20cancel.20other.20queries)
|
||||
let _ = db.zalsa_mut();
|
||||
return ExitStatus::Success;
|
||||
}
|
||||
}
|
||||
|
||||
tracing::debug!("Waiting for next main loop message.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for MainLoop {
|
||||
fn drop(&mut self) {
|
||||
self.orchestrator_sender
|
||||
.send(OrchestratorMessage::Shutdown)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct FileChangesNotifier {
|
||||
sender: crossbeam_channel::Sender<OrchestratorMessage>,
|
||||
}
|
||||
|
||||
impl FileChangesNotifier {
|
||||
fn notify(&self, changes: Vec<FileWatcherChange>) {
|
||||
self.sender
|
||||
.send(OrchestratorMessage::FileChanges(changes))
|
||||
.unwrap();
|
||||
ExitStatus::Success
|
||||
}
|
||||
}
|
||||
|
||||
@@ -201,164 +374,11 @@ impl MainLoopCancellationToken {
|
||||
}
|
||||
}
|
||||
|
||||
struct Orchestrator {
|
||||
/// Sends messages to the main loop.
|
||||
sender: crossbeam_channel::Sender<MainLoopMessage>,
|
||||
/// Receives messages from the main loop.
|
||||
receiver: crossbeam_channel::Receiver<OrchestratorMessage>,
|
||||
revision: usize,
|
||||
}
|
||||
|
||||
impl Orchestrator {
|
||||
fn run(&mut self) {
|
||||
while let Ok(message) = self.receiver.recv() {
|
||||
match message {
|
||||
OrchestratorMessage::Run => {
|
||||
self.sender
|
||||
.send(MainLoopMessage::CheckProgram {
|
||||
revision: self.revision,
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
OrchestratorMessage::CheckProgramCompleted {
|
||||
diagnostics,
|
||||
revision,
|
||||
} => {
|
||||
// Only take the diagnostics if they are for the latest revision.
|
||||
if self.revision == revision {
|
||||
self.sender
|
||||
.send(MainLoopMessage::CheckCompleted(diagnostics))
|
||||
.unwrap();
|
||||
} else {
|
||||
tracing::debug!("Discarding diagnostics for outdated revision {revision} (current: {}).", self.revision);
|
||||
}
|
||||
}
|
||||
|
||||
OrchestratorMessage::FileChanges(changes) => {
|
||||
// Request cancellation, but wait until all analysis tasks have completed to
|
||||
// avoid stale messages in the next main loop.
|
||||
|
||||
self.revision += 1;
|
||||
self.debounce_changes(changes);
|
||||
}
|
||||
OrchestratorMessage::Shutdown => {
|
||||
return self.shutdown();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn debounce_changes(&self, mut changes: Vec<FileWatcherChange>) {
|
||||
loop {
|
||||
// Consume possibly incoming file change messages before running a new analysis, but don't wait for more than 100ms.
|
||||
crossbeam_channel::select! {
|
||||
recv(self.receiver) -> message => {
|
||||
match message {
|
||||
Ok(OrchestratorMessage::Shutdown) => {
|
||||
return self.shutdown();
|
||||
}
|
||||
Ok(OrchestratorMessage::FileChanges(file_changes)) => {
|
||||
changes.extend(file_changes);
|
||||
}
|
||||
|
||||
Ok(OrchestratorMessage::CheckProgramCompleted { .. })=> {
|
||||
// disregard any outdated completion message.
|
||||
}
|
||||
Ok(OrchestratorMessage::Run) => unreachable!("The orchestrator is already running."),
|
||||
|
||||
Err(_) => {
|
||||
// There are no more senders, no point in waiting for more messages
|
||||
return;
|
||||
}
|
||||
}
|
||||
},
|
||||
default(std::time::Duration::from_millis(10)) => {
|
||||
// No more file changes after 10 ms, send the changes and schedule a new analysis
|
||||
self.sender.send(MainLoopMessage::ApplyChanges(changes)).unwrap();
|
||||
self.sender.send(MainLoopMessage::CheckProgram { revision: self.revision}).unwrap();
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::unused_self)]
|
||||
fn shutdown(&self) {
|
||||
tracing::trace!("Shutting down orchestrator.");
|
||||
}
|
||||
}
|
||||
|
||||
/// Message sent from the orchestrator to the main loop.
|
||||
#[derive(Debug)]
|
||||
enum MainLoopMessage {
|
||||
CheckProgram { revision: usize },
|
||||
CheckCompleted(Vec<String>),
|
||||
ApplyChanges(Vec<FileWatcherChange>),
|
||||
CheckWorkspace,
|
||||
CheckCompleted { result: Vec<String>, revision: u64 },
|
||||
ApplyChanges(Vec<watch::ChangeEvent>),
|
||||
Exit,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum OrchestratorMessage {
|
||||
Run,
|
||||
Shutdown,
|
||||
|
||||
CheckProgramCompleted {
|
||||
diagnostics: Vec<String>,
|
||||
revision: usize,
|
||||
},
|
||||
|
||||
FileChanges(Vec<FileWatcherChange>),
|
||||
}
|
||||
|
||||
fn setup_tracing() {
|
||||
let subscriber = Registry::default().with(
|
||||
tracing_tree::HierarchicalLayer::default()
|
||||
.with_indent_lines(true)
|
||||
.with_indent_amount(2)
|
||||
.with_bracketed_fields(true)
|
||||
.with_thread_ids(true)
|
||||
.with_targets(true)
|
||||
.with_writer(|| Box::new(std::io::stderr()))
|
||||
.with_timer(Uptime::default())
|
||||
.with_filter(LoggingFilter {
|
||||
trace_level: Level::TRACE,
|
||||
}),
|
||||
);
|
||||
|
||||
tracing::subscriber::set_global_default(subscriber).unwrap();
|
||||
}
|
||||
|
||||
struct LoggingFilter {
|
||||
trace_level: Level,
|
||||
}
|
||||
|
||||
impl LoggingFilter {
|
||||
fn is_enabled(&self, meta: &Metadata<'_>) -> bool {
|
||||
let filter = if meta.target().starts_with("red_knot") || meta.target().starts_with("ruff") {
|
||||
self.trace_level
|
||||
} else {
|
||||
Level::INFO
|
||||
};
|
||||
|
||||
meta.level() <= &filter
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> Filter<S> for LoggingFilter {
|
||||
fn enabled(&self, meta: &Metadata<'_>, _cx: &Context<'_, S>) -> bool {
|
||||
self.is_enabled(meta)
|
||||
}
|
||||
|
||||
fn callsite_enabled(&self, meta: &'static Metadata<'static>) -> Interest {
|
||||
if self.is_enabled(meta) {
|
||||
Interest::always()
|
||||
} else {
|
||||
Interest::never()
|
||||
}
|
||||
}
|
||||
|
||||
fn max_level_hint(&self) -> Option<LevelFilter> {
|
||||
Some(LevelFilter::from_level(self.trace_level))
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,41 +0,0 @@
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::sync::Arc;
|
||||
|
||||
use ruff_python_ast::ModModule;
|
||||
use ruff_python_parser::Parsed;
|
||||
|
||||
use crate::cache::KeyValueCache;
|
||||
use crate::db::{QueryResult, SourceDb};
|
||||
use crate::files::FileId;
|
||||
use crate::source::source_text;
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub(crate) fn parse(db: &dyn SourceDb, file_id: FileId) -> QueryResult<Arc<Parsed<ModModule>>> {
|
||||
let jar = db.jar()?;
|
||||
|
||||
jar.parsed.get(&file_id, |file_id| {
|
||||
let source = source_text(db, *file_id)?;
|
||||
|
||||
Ok(Arc::new(ruff_python_parser::parse_unchecked_source(
|
||||
source.text(),
|
||||
source.kind().into(),
|
||||
)))
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct ParsedStorage(KeyValueCache<FileId, Arc<Parsed<ModModule>>>);
|
||||
|
||||
impl Deref for ParsedStorage {
|
||||
type Target = KeyValueCache<FileId, Arc<Parsed<ModModule>>>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for ParsedStorage {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
@@ -1,413 +0,0 @@
|
||||
use rayon::{current_num_threads, yield_local};
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use crate::db::{Database, QueryError, QueryResult};
|
||||
use crate::files::FileId;
|
||||
use crate::lint::{lint_semantic, lint_syntax, Diagnostics};
|
||||
use crate::module::{file_to_module, resolve_module};
|
||||
use crate::program::Program;
|
||||
use crate::semantic::{semantic_index, Dependency};
|
||||
|
||||
impl Program {
|
||||
/// Checks all open files in the workspace and its dependencies.
|
||||
#[tracing::instrument(level = "debug", skip_all)]
|
||||
pub fn check(&self, mode: ExecutionMode) -> QueryResult<Vec<String>> {
|
||||
self.cancelled()?;
|
||||
|
||||
let mut context = CheckContext::new(self);
|
||||
|
||||
match mode {
|
||||
ExecutionMode::SingleThreaded => SingleThreadedExecutor.run(&mut context)?,
|
||||
ExecutionMode::ThreadPool => ThreadPoolExecutor.run(&mut context)?,
|
||||
};
|
||||
|
||||
Ok(context.finish())
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(self, context))]
|
||||
fn check_file(&self, file: FileId, context: &CheckFileContext) -> QueryResult<Diagnostics> {
|
||||
self.cancelled()?;
|
||||
|
||||
let index = semantic_index(self, file)?;
|
||||
let dependencies = index.symbol_table().dependencies();
|
||||
|
||||
if !dependencies.is_empty() {
|
||||
let module = file_to_module(self, file)?;
|
||||
|
||||
// TODO scheduling all dependencies here is wasteful if we don't infer any types on them
|
||||
// but I think that's unlikely, so it is okay?
|
||||
// Anyway, we need to figure out a way to retrieve the dependencies of a module
|
||||
// from the persistent cache. So maybe it should be a separate query after all.
|
||||
for dependency in dependencies {
|
||||
let dependency_name = match dependency {
|
||||
Dependency::Module(name) => Some(name.clone()),
|
||||
Dependency::Relative { .. } => match &module {
|
||||
Some(module) => module.resolve_dependency(self, dependency)?,
|
||||
None => None,
|
||||
},
|
||||
};
|
||||
|
||||
if let Some(dependency_name) = dependency_name {
|
||||
// TODO We may want to have a different check functions for non-first-party
|
||||
// files because we only need to index them and not check them.
|
||||
// Supporting non-first-party code also requires supporting typing stubs.
|
||||
if let Some(dependency) = resolve_module(self, dependency_name)? {
|
||||
if dependency.path(self)?.root().kind().is_first_party() {
|
||||
context.schedule_dependency(dependency.path(self)?.file());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut diagnostics = Vec::new();
|
||||
|
||||
if self.workspace().is_file_open(file) {
|
||||
diagnostics.extend_from_slice(&lint_syntax(self, file)?);
|
||||
diagnostics.extend_from_slice(&lint_semantic(self, file)?);
|
||||
}
|
||||
|
||||
Ok(Diagnostics::from(diagnostics))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub enum ExecutionMode {
|
||||
SingleThreaded,
|
||||
ThreadPool,
|
||||
}
|
||||
|
||||
/// Context that stores state information about the entire check operation.
|
||||
struct CheckContext<'a> {
|
||||
/// IDs of the files that have been queued for checking.
|
||||
///
|
||||
/// Used to avoid queuing the same file twice.
|
||||
scheduled_files: FxHashSet<FileId>,
|
||||
|
||||
/// Reference to the program that is checked.
|
||||
program: &'a Program,
|
||||
|
||||
/// The aggregated diagnostics
|
||||
diagnostics: Vec<String>,
|
||||
}
|
||||
|
||||
impl<'a> CheckContext<'a> {
|
||||
fn new(program: &'a Program) -> Self {
|
||||
Self {
|
||||
scheduled_files: FxHashSet::default(),
|
||||
program,
|
||||
diagnostics: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the tasks to check all open files in the workspace.
|
||||
fn check_open_files(&mut self) -> Vec<CheckOpenFileTask> {
|
||||
self.scheduled_files
|
||||
.extend(self.program.workspace().open_files());
|
||||
|
||||
self.program
|
||||
.workspace()
|
||||
.open_files()
|
||||
.map(|file_id| CheckOpenFileTask { file_id })
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Returns the task to check a dependency.
|
||||
fn check_dependency(&mut self, file_id: FileId) -> Option<CheckDependencyTask> {
|
||||
if self.scheduled_files.insert(file_id) {
|
||||
Some(CheckDependencyTask { file_id })
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Pushes the result for a single file check operation
|
||||
fn push_diagnostics(&mut self, diagnostics: &Diagnostics) {
|
||||
self.diagnostics.extend_from_slice(diagnostics);
|
||||
}
|
||||
|
||||
/// Returns a reference to the program that is being checked.
|
||||
fn program(&self) -> &'a Program {
|
||||
self.program
|
||||
}
|
||||
|
||||
/// Creates a task context that is used to check a single file.
|
||||
fn task_context<'b, S>(&self, dependency_scheduler: &'b S) -> CheckTaskContext<'a, 'b, S>
|
||||
where
|
||||
S: ScheduleDependency,
|
||||
{
|
||||
CheckTaskContext {
|
||||
program: self.program,
|
||||
dependency_scheduler,
|
||||
}
|
||||
}
|
||||
|
||||
fn finish(self) -> Vec<String> {
|
||||
self.diagnostics
|
||||
}
|
||||
}
|
||||
|
||||
/// Trait that abstracts away how a dependency of a file gets scheduled for checking.
|
||||
trait ScheduleDependency {
|
||||
/// Schedules the file with the given ID for checking.
|
||||
fn schedule(&self, file_id: FileId);
|
||||
}
|
||||
|
||||
impl<T> ScheduleDependency for T
|
||||
where
|
||||
T: Fn(FileId),
|
||||
{
|
||||
fn schedule(&self, file_id: FileId) {
|
||||
let f = self;
|
||||
f(file_id);
|
||||
}
|
||||
}
|
||||
|
||||
/// Context that is used to run a single file check task.
|
||||
///
|
||||
/// The task is generic over `S` because it is passed across thread boundaries and
|
||||
/// we don't want to add the requirement that [`ScheduleDependency`] must be [`Send`].
|
||||
struct CheckTaskContext<'a, 'scheduler, S>
|
||||
where
|
||||
S: ScheduleDependency,
|
||||
{
|
||||
dependency_scheduler: &'scheduler S,
|
||||
program: &'a Program,
|
||||
}
|
||||
|
||||
impl<'a, 'scheduler, S> CheckTaskContext<'a, 'scheduler, S>
|
||||
where
|
||||
S: ScheduleDependency,
|
||||
{
|
||||
fn as_file_context(&self) -> CheckFileContext<'scheduler> {
|
||||
CheckFileContext {
|
||||
dependency_scheduler: self.dependency_scheduler,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Context passed when checking a single file.
|
||||
///
|
||||
/// This is a trimmed down version of [`CheckTaskContext`] with the type parameter `S` erased
|
||||
/// to avoid monomorphization of [`Program:check_file`].
|
||||
struct CheckFileContext<'a> {
|
||||
dependency_scheduler: &'a dyn ScheduleDependency,
|
||||
}
|
||||
|
||||
impl<'a> CheckFileContext<'a> {
|
||||
fn schedule_dependency(&self, file_id: FileId) {
|
||||
self.dependency_scheduler.schedule(file_id);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum CheckFileTask {
|
||||
OpenFile(CheckOpenFileTask),
|
||||
Dependency(CheckDependencyTask),
|
||||
}
|
||||
|
||||
impl CheckFileTask {
|
||||
/// Runs the task and returns the results for checking this file.
|
||||
fn run<S>(&self, context: &CheckTaskContext<S>) -> QueryResult<Diagnostics>
|
||||
where
|
||||
S: ScheduleDependency,
|
||||
{
|
||||
match self {
|
||||
Self::OpenFile(task) => task.run(context),
|
||||
Self::Dependency(task) => task.run(context),
|
||||
}
|
||||
}
|
||||
|
||||
fn file_id(&self) -> FileId {
|
||||
match self {
|
||||
CheckFileTask::OpenFile(task) => task.file_id,
|
||||
CheckFileTask::Dependency(task) => task.file_id,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Task to check an open file.
|
||||
|
||||
#[derive(Debug)]
|
||||
struct CheckOpenFileTask {
|
||||
file_id: FileId,
|
||||
}
|
||||
|
||||
impl CheckOpenFileTask {
|
||||
fn run<S>(&self, context: &CheckTaskContext<S>) -> QueryResult<Diagnostics>
|
||||
where
|
||||
S: ScheduleDependency,
|
||||
{
|
||||
context
|
||||
.program
|
||||
.check_file(self.file_id, &context.as_file_context())
|
||||
}
|
||||
}
|
||||
|
||||
/// Task to check a dependency file.
|
||||
#[derive(Debug)]
|
||||
struct CheckDependencyTask {
|
||||
file_id: FileId,
|
||||
}
|
||||
|
||||
impl CheckDependencyTask {
|
||||
fn run<S>(&self, context: &CheckTaskContext<S>) -> QueryResult<Diagnostics>
|
||||
where
|
||||
S: ScheduleDependency,
|
||||
{
|
||||
context
|
||||
.program
|
||||
.check_file(self.file_id, &context.as_file_context())
|
||||
}
|
||||
}
|
||||
|
||||
/// Executor that schedules the checking of individual program files.
|
||||
trait CheckExecutor {
|
||||
fn run(self, context: &mut CheckContext) -> QueryResult<()>;
|
||||
}
|
||||
|
||||
/// Executor that runs all check operations on the current thread.
|
||||
///
|
||||
/// The executor does not schedule dependencies for checking.
|
||||
/// The main motivation for scheduling dependencies
|
||||
/// in a multithreaded environment is to parse and index the dependencies concurrently.
|
||||
/// However, that doesn't make sense in a single threaded environment, because the dependencies then compute
|
||||
/// with checking the open files. Checking dependencies in a single threaded environment is more likely
|
||||
/// to hurt performance because we end up analyzing files in their entirety, even if we only need to type check parts of them.
|
||||
#[derive(Debug, Default)]
|
||||
struct SingleThreadedExecutor;
|
||||
|
||||
impl CheckExecutor for SingleThreadedExecutor {
|
||||
fn run(self, context: &mut CheckContext) -> QueryResult<()> {
|
||||
let mut queue = context.check_open_files();
|
||||
|
||||
let noop_schedule_dependency = |_| {};
|
||||
|
||||
while let Some(file) = queue.pop() {
|
||||
context.program().cancelled()?;
|
||||
|
||||
let task_context = context.task_context(&noop_schedule_dependency);
|
||||
context.push_diagnostics(&file.run(&task_context)?);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Executor that runs the check operations on a thread pool.
|
||||
///
|
||||
/// The executor runs each check operation as its own task using a thread pool.
|
||||
///
|
||||
/// Other than [`SingleThreadedExecutor`], this executor schedules dependencies for checking. It
|
||||
/// even schedules dependencies for checking when the thread pool size is 1 for a better debugging experience.
|
||||
#[derive(Debug, Default)]
|
||||
struct ThreadPoolExecutor;
|
||||
|
||||
impl CheckExecutor for ThreadPoolExecutor {
|
||||
fn run(self, context: &mut CheckContext) -> QueryResult<()> {
|
||||
let num_threads = current_num_threads();
|
||||
let single_threaded = num_threads == 1;
|
||||
let span = tracing::trace_span!("ThreadPoolExecutor::run", num_threads);
|
||||
let _ = span.enter();
|
||||
|
||||
let mut queue: Vec<_> = context
|
||||
.check_open_files()
|
||||
.into_iter()
|
||||
.map(CheckFileTask::OpenFile)
|
||||
.collect();
|
||||
|
||||
let (sender, receiver) = if single_threaded {
|
||||
// Use an unbounded queue for single threaded execution to prevent deadlocks
|
||||
// when a single file schedules multiple dependencies.
|
||||
crossbeam::channel::unbounded()
|
||||
} else {
|
||||
// Use a bounded queue to apply backpressure when the orchestration thread isn't able to keep
|
||||
// up processing messages from the worker threads.
|
||||
crossbeam::channel::bounded(num_threads)
|
||||
};
|
||||
|
||||
let schedule_sender = sender.clone();
|
||||
let schedule_dependency = move |file_id| {
|
||||
schedule_sender
|
||||
.send(ThreadPoolMessage::ScheduleDependency(file_id))
|
||||
.unwrap();
|
||||
};
|
||||
|
||||
let result = rayon::in_place_scope(|scope| {
|
||||
let mut pending = 0usize;
|
||||
|
||||
loop {
|
||||
context.program().cancelled()?;
|
||||
|
||||
// 1. Try to get a queued message to ensure that we have always remaining space in the channel to prevent blocking the worker threads.
|
||||
// 2. Try to process a queued file
|
||||
// 3. If there's no queued file wait for the next incoming message.
|
||||
// 4. Exit if there are no more messages and no senders.
|
||||
let message = if let Ok(message) = receiver.try_recv() {
|
||||
message
|
||||
} else if let Some(task) = queue.pop() {
|
||||
pending += 1;
|
||||
|
||||
let task_context = context.task_context(&schedule_dependency);
|
||||
let sender = sender.clone();
|
||||
let task_span = tracing::trace_span!(
|
||||
parent: &span,
|
||||
"CheckFileTask::run",
|
||||
file_id = task.file_id().as_u32(),
|
||||
);
|
||||
|
||||
scope.spawn(move |_| {
|
||||
task_span.in_scope(|| match task.run(&task_context) {
|
||||
Ok(result) => {
|
||||
sender.send(ThreadPoolMessage::Completed(result)).unwrap();
|
||||
}
|
||||
Err(err) => sender.send(ThreadPoolMessage::Errored(err)).unwrap(),
|
||||
});
|
||||
});
|
||||
|
||||
// If this is a single threaded rayon thread pool, yield the current thread
|
||||
// or we never start processing the work items.
|
||||
if single_threaded {
|
||||
yield_local();
|
||||
}
|
||||
|
||||
continue;
|
||||
} else if let Ok(message) = receiver.recv() {
|
||||
message
|
||||
} else {
|
||||
break;
|
||||
};
|
||||
|
||||
match message {
|
||||
ThreadPoolMessage::ScheduleDependency(dependency) => {
|
||||
if let Some(task) = context.check_dependency(dependency) {
|
||||
queue.push(CheckFileTask::Dependency(task));
|
||||
}
|
||||
}
|
||||
ThreadPoolMessage::Completed(diagnostics) => {
|
||||
context.push_diagnostics(&diagnostics);
|
||||
pending -= 1;
|
||||
|
||||
if pending == 0 && queue.is_empty() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
ThreadPoolMessage::Errored(err) => {
|
||||
return Err(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
});
|
||||
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum ThreadPoolMessage {
|
||||
ScheduleDependency(FileId),
|
||||
Completed(Diagnostics),
|
||||
Errored(QueryError),
|
||||
}
|
||||
@@ -1,275 +0,0 @@
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::db::{
|
||||
Database, Db, DbRuntime, DbWithJar, HasJar, HasJars, JarsStorage, LintDb, LintJar,
|
||||
ParallelDatabase, QueryResult, SemanticDb, SemanticJar, Snapshot, SourceDb, SourceJar, Upcast,
|
||||
};
|
||||
use crate::files::{FileId, Files};
|
||||
use crate::Workspace;
|
||||
|
||||
pub mod check;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Program {
|
||||
jars: JarsStorage<Program>,
|
||||
files: Files,
|
||||
workspace: Workspace,
|
||||
}
|
||||
|
||||
impl Program {
|
||||
pub fn new(workspace: Workspace) -> Self {
|
||||
Self {
|
||||
jars: JarsStorage::default(),
|
||||
files: Files::default(),
|
||||
workspace,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn apply_changes<I>(&mut self, changes: I)
|
||||
where
|
||||
I: IntoIterator<Item = FileWatcherChange>,
|
||||
{
|
||||
let mut aggregated_changes = AggregatedChanges::default();
|
||||
|
||||
aggregated_changes.extend(changes.into_iter().map(|change| FileChange {
|
||||
id: self.files.intern(&change.path),
|
||||
kind: change.kind,
|
||||
}));
|
||||
|
||||
let (source, semantic, lint) = self.jars_mut();
|
||||
for change in aggregated_changes.iter() {
|
||||
semantic.module_resolver.remove_module_by_file(change.id);
|
||||
semantic.semantic_indices.remove(&change.id);
|
||||
source.sources.remove(&change.id);
|
||||
source.parsed.remove(&change.id);
|
||||
// TODO: remove all dependent modules as well
|
||||
semantic.type_store.remove_module(change.id);
|
||||
lint.lint_syntax.remove(&change.id);
|
||||
lint.lint_semantic.remove(&change.id);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn files(&self) -> &Files {
|
||||
&self.files
|
||||
}
|
||||
|
||||
pub fn workspace(&self) -> &Workspace {
|
||||
&self.workspace
|
||||
}
|
||||
|
||||
pub fn workspace_mut(&mut self) -> &mut Workspace {
|
||||
&mut self.workspace
|
||||
}
|
||||
}
|
||||
|
||||
impl SourceDb for Program {
|
||||
fn file_id(&self, path: &Path) -> FileId {
|
||||
self.files.intern(path)
|
||||
}
|
||||
|
||||
fn file_path(&self, file_id: FileId) -> Arc<Path> {
|
||||
self.files.path(file_id)
|
||||
}
|
||||
}
|
||||
|
||||
impl DbWithJar<SourceJar> for Program {}
|
||||
|
||||
impl SemanticDb for Program {}
|
||||
|
||||
impl DbWithJar<SemanticJar> for Program {}
|
||||
|
||||
impl LintDb for Program {}
|
||||
|
||||
impl DbWithJar<LintJar> for Program {}
|
||||
|
||||
impl Upcast<dyn SemanticDb> for Program {
|
||||
fn upcast(&self) -> &(dyn SemanticDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SourceDb> for Program {
|
||||
fn upcast(&self) -> &(dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn LintDb> for Program {
|
||||
fn upcast(&self) -> &(dyn LintDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Db for Program {}
|
||||
|
||||
impl Database for Program {
|
||||
fn runtime(&self) -> &DbRuntime {
|
||||
self.jars.runtime()
|
||||
}
|
||||
|
||||
fn runtime_mut(&mut self) -> &mut DbRuntime {
|
||||
self.jars.runtime_mut()
|
||||
}
|
||||
}
|
||||
|
||||
impl ParallelDatabase for Program {
|
||||
fn snapshot(&self) -> Snapshot<Self> {
|
||||
Snapshot::new(Self {
|
||||
jars: self.jars.snapshot(),
|
||||
files: self.files.snapshot(),
|
||||
workspace: self.workspace.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJars for Program {
|
||||
type Jars = (SourceJar, SemanticJar, LintJar);
|
||||
|
||||
fn jars(&self) -> QueryResult<&Self::Jars> {
|
||||
self.jars.jars()
|
||||
}
|
||||
|
||||
fn jars_mut(&mut self) -> &mut Self::Jars {
|
||||
self.jars.jars_mut()
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJar<SourceJar> for Program {
|
||||
fn jar(&self) -> QueryResult<&SourceJar> {
|
||||
Ok(&self.jars()?.0)
|
||||
}
|
||||
|
||||
fn jar_mut(&mut self) -> &mut SourceJar {
|
||||
&mut self.jars_mut().0
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJar<SemanticJar> for Program {
|
||||
fn jar(&self) -> QueryResult<&SemanticJar> {
|
||||
Ok(&self.jars()?.1)
|
||||
}
|
||||
|
||||
fn jar_mut(&mut self) -> &mut SemanticJar {
|
||||
&mut self.jars_mut().1
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJar<LintJar> for Program {
|
||||
fn jar(&self) -> QueryResult<&LintJar> {
|
||||
Ok(&self.jars()?.2)
|
||||
}
|
||||
|
||||
fn jar_mut(&mut self) -> &mut LintJar {
|
||||
&mut self.jars_mut().2
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct FileWatcherChange {
|
||||
path: PathBuf,
|
||||
kind: FileChangeKind,
|
||||
}
|
||||
|
||||
impl FileWatcherChange {
|
||||
pub fn new(path: PathBuf, kind: FileChangeKind) -> Self {
|
||||
Self { path, kind }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
struct FileChange {
|
||||
id: FileId,
|
||||
kind: FileChangeKind,
|
||||
}
|
||||
|
||||
impl FileChange {
|
||||
fn file_id(self) -> FileId {
|
||||
self.id
|
||||
}
|
||||
|
||||
fn kind(self) -> FileChangeKind {
|
||||
self.kind
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
pub enum FileChangeKind {
|
||||
Created,
|
||||
Modified,
|
||||
Deleted,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
struct AggregatedChanges {
|
||||
changes: FxHashMap<FileId, FileChangeKind>,
|
||||
}
|
||||
|
||||
impl AggregatedChanges {
|
||||
fn add(&mut self, change: FileChange) {
|
||||
match self.changes.entry(change.file_id()) {
|
||||
Entry::Occupied(mut entry) => {
|
||||
let merged = entry.get_mut();
|
||||
|
||||
match (merged, change.kind()) {
|
||||
(FileChangeKind::Created, FileChangeKind::Deleted) => {
|
||||
// Deletion after creations means that ruff never saw the file.
|
||||
entry.remove();
|
||||
}
|
||||
(FileChangeKind::Created, FileChangeKind::Modified) => {
|
||||
// No-op, for ruff, modifying a file that it doesn't yet know that it exists is still considered a creation.
|
||||
}
|
||||
|
||||
(FileChangeKind::Modified, FileChangeKind::Created) => {
|
||||
// Uhh, that should probably not happen. Continue considering it a modification.
|
||||
}
|
||||
|
||||
(FileChangeKind::Modified, FileChangeKind::Deleted) => {
|
||||
*entry.get_mut() = FileChangeKind::Deleted;
|
||||
}
|
||||
|
||||
(FileChangeKind::Deleted, FileChangeKind::Created) => {
|
||||
*entry.get_mut() = FileChangeKind::Modified;
|
||||
}
|
||||
|
||||
(FileChangeKind::Deleted, FileChangeKind::Modified) => {
|
||||
// That's weird, but let's consider it a modification.
|
||||
*entry.get_mut() = FileChangeKind::Modified;
|
||||
}
|
||||
|
||||
(FileChangeKind::Created, FileChangeKind::Created)
|
||||
| (FileChangeKind::Modified, FileChangeKind::Modified)
|
||||
| (FileChangeKind::Deleted, FileChangeKind::Deleted) => {
|
||||
// No-op transitions. Some of them should be impossible but we handle them anyway.
|
||||
}
|
||||
}
|
||||
}
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(change.kind());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn extend<I>(&mut self, changes: I)
|
||||
where
|
||||
I: IntoIterator<Item = FileChange>,
|
||||
{
|
||||
let iter = changes.into_iter();
|
||||
let (lower, _) = iter.size_hint();
|
||||
self.changes.reserve(lower);
|
||||
|
||||
for change in iter {
|
||||
self.add(change);
|
||||
}
|
||||
}
|
||||
|
||||
fn iter(&self) -> impl Iterator<Item = FileChange> + '_ {
|
||||
self.changes.iter().map(|(id, kind)| FileChange {
|
||||
id: *id,
|
||||
kind: *kind,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,882 +0,0 @@
|
||||
use std::num::NonZeroU32;
|
||||
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::visitor::source_order::SourceOrderVisitor;
|
||||
use ruff_python_ast::AstNode;
|
||||
|
||||
use crate::ast_ids::{NodeKey, TypedNodeKey};
|
||||
use crate::cache::KeyValueCache;
|
||||
use crate::db::{QueryResult, SemanticDb, SemanticJar};
|
||||
use crate::files::FileId;
|
||||
use crate::module::Module;
|
||||
use crate::module::ModuleName;
|
||||
use crate::parse::parse;
|
||||
use crate::Name;
|
||||
pub(crate) use definitions::Definition;
|
||||
use definitions::{ImportDefinition, ImportFromDefinition};
|
||||
pub(crate) use flow_graph::ConstrainedDefinition;
|
||||
use flow_graph::{FlowGraph, FlowGraphBuilder, FlowNodeId, ReachableDefinitionsIterator};
|
||||
use ruff_index::{newtype_index, IndexVec};
|
||||
use rustc_hash::FxHashMap;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::sync::Arc;
|
||||
pub(crate) use symbol_table::{Dependency, SymbolId};
|
||||
use symbol_table::{ScopeId, ScopeKind, SymbolFlags, SymbolTable, SymbolTableBuilder};
|
||||
pub(crate) use types::{infer_definition_type, infer_symbol_public_type, Type, TypeStore};
|
||||
|
||||
mod definitions;
|
||||
mod flow_graph;
|
||||
mod symbol_table;
|
||||
mod types;
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub fn semantic_index(db: &dyn SemanticDb, file_id: FileId) -> QueryResult<Arc<SemanticIndex>> {
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
|
||||
jar.semantic_indices.get(&file_id, |_| {
|
||||
let parsed = parse(db.upcast(), file_id)?;
|
||||
Ok(Arc::from(SemanticIndex::from_ast(parsed.syntax())))
|
||||
})
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub fn resolve_global_symbol(
|
||||
db: &dyn SemanticDb,
|
||||
module: Module,
|
||||
name: &str,
|
||||
) -> QueryResult<Option<GlobalSymbolId>> {
|
||||
let file_id = module.path(db)?.file();
|
||||
let symbol_table = &semantic_index(db, file_id)?.symbol_table;
|
||||
let Some(symbol_id) = symbol_table.root_symbol_id_by_name(name) else {
|
||||
return Ok(None);
|
||||
};
|
||||
Ok(Some(GlobalSymbolId { file_id, symbol_id }))
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct ExpressionId;
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub struct GlobalSymbolId {
|
||||
pub(crate) file_id: FileId,
|
||||
pub(crate) symbol_id: SymbolId,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct SemanticIndex {
|
||||
symbol_table: SymbolTable,
|
||||
flow_graph: FlowGraph,
|
||||
expressions: FxHashMap<NodeKey, ExpressionId>,
|
||||
expressions_by_id: IndexVec<ExpressionId, NodeKey>,
|
||||
}
|
||||
|
||||
impl SemanticIndex {
|
||||
pub fn from_ast(module: &ast::ModModule) -> Self {
|
||||
let root_scope_id = SymbolTable::root_scope_id();
|
||||
let mut indexer = SemanticIndexer {
|
||||
symbol_table_builder: SymbolTableBuilder::new(),
|
||||
flow_graph_builder: FlowGraphBuilder::new(),
|
||||
scopes: vec![ScopeState {
|
||||
scope_id: root_scope_id,
|
||||
current_flow_node_id: FlowGraph::start(),
|
||||
}],
|
||||
expressions: FxHashMap::default(),
|
||||
expressions_by_id: IndexVec::default(),
|
||||
current_definition: None,
|
||||
};
|
||||
indexer.visit_body(&module.body);
|
||||
indexer.finish()
|
||||
}
|
||||
|
||||
fn resolve_expression_id<'a>(
|
||||
&self,
|
||||
ast: &'a ast::ModModule,
|
||||
expression_id: ExpressionId,
|
||||
) -> ast::AnyNodeRef<'a> {
|
||||
let node_key = self.expressions_by_id[expression_id];
|
||||
node_key
|
||||
.resolve(ast.as_any_node_ref())
|
||||
.expect("node to resolve")
|
||||
}
|
||||
|
||||
/// Return an iterator over all definitions of `symbol_id` reachable from `use_expr`. The value
|
||||
/// of `symbol_id` in `use_expr` must originate from one of the iterated definitions (or from
|
||||
/// an external reassignment of the name outside of this scope).
|
||||
pub fn reachable_definitions(
|
||||
&self,
|
||||
symbol_id: SymbolId,
|
||||
use_expr: &ast::Expr,
|
||||
) -> ReachableDefinitionsIterator {
|
||||
let expression_id = self.expression_id(use_expr);
|
||||
ReachableDefinitionsIterator::new(
|
||||
&self.flow_graph,
|
||||
symbol_id,
|
||||
self.flow_graph.for_expr(expression_id),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn expression_id(&self, expression: &ast::Expr) -> ExpressionId {
|
||||
self.expressions[&NodeKey::from_node(expression.into())]
|
||||
}
|
||||
|
||||
pub fn symbol_table(&self) -> &SymbolTable {
|
||||
&self.symbol_table
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ScopeState {
|
||||
scope_id: ScopeId,
|
||||
current_flow_node_id: FlowNodeId,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct SemanticIndexer {
|
||||
symbol_table_builder: SymbolTableBuilder,
|
||||
flow_graph_builder: FlowGraphBuilder,
|
||||
scopes: Vec<ScopeState>,
|
||||
/// the definition whose target(s) we are currently walking
|
||||
current_definition: Option<Definition>,
|
||||
expressions: FxHashMap<NodeKey, ExpressionId>,
|
||||
expressions_by_id: IndexVec<ExpressionId, NodeKey>,
|
||||
}
|
||||
|
||||
impl SemanticIndexer {
|
||||
pub(crate) fn finish(mut self) -> SemanticIndex {
|
||||
let SemanticIndexer {
|
||||
flow_graph_builder,
|
||||
symbol_table_builder,
|
||||
..
|
||||
} = self;
|
||||
self.expressions.shrink_to_fit();
|
||||
self.expressions_by_id.shrink_to_fit();
|
||||
SemanticIndex {
|
||||
flow_graph: flow_graph_builder.finish(),
|
||||
symbol_table: symbol_table_builder.finish(),
|
||||
expressions: self.expressions,
|
||||
expressions_by_id: self.expressions_by_id,
|
||||
}
|
||||
}
|
||||
|
||||
fn set_current_flow_node(&mut self, new_flow_node_id: FlowNodeId) {
|
||||
let scope_state = self.scopes.last_mut().expect("scope stack is never empty");
|
||||
scope_state.current_flow_node_id = new_flow_node_id;
|
||||
}
|
||||
|
||||
fn current_flow_node(&self) -> FlowNodeId {
|
||||
self.scopes
|
||||
.last()
|
||||
.expect("scope stack is never empty")
|
||||
.current_flow_node_id
|
||||
}
|
||||
|
||||
fn add_or_update_symbol(&mut self, identifier: &str, flags: SymbolFlags) -> SymbolId {
|
||||
self.symbol_table_builder
|
||||
.add_or_update_symbol(self.cur_scope(), identifier, flags)
|
||||
}
|
||||
|
||||
fn add_or_update_symbol_with_def(
|
||||
&mut self,
|
||||
identifier: &str,
|
||||
definition: Definition,
|
||||
) -> SymbolId {
|
||||
let symbol_id = self.add_or_update_symbol(identifier, SymbolFlags::IS_DEFINED);
|
||||
self.symbol_table_builder
|
||||
.add_definition(symbol_id, definition.clone());
|
||||
let new_flow_node_id =
|
||||
self.flow_graph_builder
|
||||
.add_definition(symbol_id, definition, self.current_flow_node());
|
||||
self.set_current_flow_node(new_flow_node_id);
|
||||
symbol_id
|
||||
}
|
||||
|
||||
fn push_scope(
|
||||
&mut self,
|
||||
name: &str,
|
||||
kind: ScopeKind,
|
||||
definition: Option<Definition>,
|
||||
defining_symbol: Option<SymbolId>,
|
||||
) -> ScopeId {
|
||||
let scope_id = self.symbol_table_builder.add_child_scope(
|
||||
self.cur_scope(),
|
||||
name,
|
||||
kind,
|
||||
definition,
|
||||
defining_symbol,
|
||||
);
|
||||
self.scopes.push(ScopeState {
|
||||
scope_id,
|
||||
current_flow_node_id: FlowGraph::start(),
|
||||
});
|
||||
scope_id
|
||||
}
|
||||
|
||||
fn pop_scope(&mut self) -> ScopeId {
|
||||
self.scopes
|
||||
.pop()
|
||||
.expect("Scope stack should never be empty")
|
||||
.scope_id
|
||||
}
|
||||
|
||||
fn cur_scope(&self) -> ScopeId {
|
||||
self.scopes
|
||||
.last()
|
||||
.expect("Scope stack should never be empty")
|
||||
.scope_id
|
||||
}
|
||||
|
||||
fn record_scope_for_node(&mut self, node_key: NodeKey, scope_id: ScopeId) {
|
||||
self.symbol_table_builder
|
||||
.record_scope_for_node(node_key, scope_id);
|
||||
}
|
||||
|
||||
fn insert_constraint(&mut self, expr: &ast::Expr) {
|
||||
let node_key = NodeKey::from_node(expr.into());
|
||||
let expression_id = self.expressions[&node_key];
|
||||
let constraint = self
|
||||
.flow_graph_builder
|
||||
.add_constraint(self.current_flow_node(), expression_id);
|
||||
self.set_current_flow_node(constraint);
|
||||
}
|
||||
|
||||
fn with_type_params(
|
||||
&mut self,
|
||||
name: &str,
|
||||
params: &Option<Box<ast::TypeParams>>,
|
||||
definition: Option<Definition>,
|
||||
defining_symbol: Option<SymbolId>,
|
||||
nested: impl FnOnce(&mut Self) -> ScopeId,
|
||||
) -> ScopeId {
|
||||
if let Some(type_params) = params {
|
||||
self.push_scope(name, ScopeKind::Annotation, definition, defining_symbol);
|
||||
for type_param in &type_params.type_params {
|
||||
let name = match type_param {
|
||||
ast::TypeParam::TypeVar(ast::TypeParamTypeVar { name, .. }) => name,
|
||||
ast::TypeParam::ParamSpec(ast::TypeParamParamSpec { name, .. }) => name,
|
||||
ast::TypeParam::TypeVarTuple(ast::TypeParamTypeVarTuple { name, .. }) => name,
|
||||
};
|
||||
self.add_or_update_symbol(name, SymbolFlags::IS_DEFINED);
|
||||
}
|
||||
}
|
||||
let scope_id = nested(self);
|
||||
if params.is_some() {
|
||||
self.pop_scope();
|
||||
}
|
||||
scope_id
|
||||
}
|
||||
}
|
||||
|
||||
impl SourceOrderVisitor<'_> for SemanticIndexer {
|
||||
fn visit_expr(&mut self, expr: &ast::Expr) {
|
||||
let node_key = NodeKey::from_node(expr.into());
|
||||
let expression_id = self.expressions_by_id.push(node_key);
|
||||
|
||||
let flow_expression_id = self
|
||||
.flow_graph_builder
|
||||
.record_expr(self.current_flow_node());
|
||||
debug_assert_eq!(expression_id, flow_expression_id);
|
||||
|
||||
let symbol_expression_id = self
|
||||
.symbol_table_builder
|
||||
.record_expression(self.cur_scope());
|
||||
|
||||
debug_assert_eq!(expression_id, symbol_expression_id);
|
||||
|
||||
self.expressions.insert(node_key, expression_id);
|
||||
|
||||
match expr {
|
||||
ast::Expr::Name(ast::ExprName { id, ctx, .. }) => {
|
||||
let flags = match ctx {
|
||||
ast::ExprContext::Load => SymbolFlags::IS_USED,
|
||||
ast::ExprContext::Store => SymbolFlags::IS_DEFINED,
|
||||
ast::ExprContext::Del => SymbolFlags::IS_DEFINED,
|
||||
ast::ExprContext::Invalid => SymbolFlags::empty(),
|
||||
};
|
||||
self.add_or_update_symbol(id, flags);
|
||||
if flags.contains(SymbolFlags::IS_DEFINED) {
|
||||
if let Some(curdef) = self.current_definition.clone() {
|
||||
self.add_or_update_symbol_with_def(id, curdef);
|
||||
}
|
||||
}
|
||||
ast::visitor::source_order::walk_expr(self, expr);
|
||||
}
|
||||
ast::Expr::Named(node) => {
|
||||
debug_assert!(self.current_definition.is_none());
|
||||
self.current_definition =
|
||||
Some(Definition::NamedExpr(TypedNodeKey::from_node(node)));
|
||||
// TODO walrus in comprehensions is implicitly nonlocal
|
||||
self.visit_expr(&node.target);
|
||||
self.current_definition = None;
|
||||
self.visit_expr(&node.value);
|
||||
}
|
||||
ast::Expr::If(ast::ExprIf {
|
||||
body, test, orelse, ..
|
||||
}) => {
|
||||
// TODO detect statically known truthy or falsy test (via type inference, not naive
|
||||
// AST inspection, so we can't simplify here, need to record test expression in CFG
|
||||
// for later checking)
|
||||
|
||||
self.visit_expr(test);
|
||||
|
||||
let if_branch = self.flow_graph_builder.add_branch(self.current_flow_node());
|
||||
|
||||
self.set_current_flow_node(if_branch);
|
||||
self.insert_constraint(test);
|
||||
self.visit_expr(body);
|
||||
|
||||
let post_body = self.current_flow_node();
|
||||
|
||||
self.set_current_flow_node(if_branch);
|
||||
self.visit_expr(orelse);
|
||||
|
||||
let post_else = self
|
||||
.flow_graph_builder
|
||||
.add_phi(self.current_flow_node(), post_body);
|
||||
|
||||
self.set_current_flow_node(post_else);
|
||||
}
|
||||
_ => {
|
||||
ast::visitor::source_order::walk_expr(self, expr);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_stmt(&mut self, stmt: &ast::Stmt) {
|
||||
// TODO need to capture more definition statements here
|
||||
match stmt {
|
||||
ast::Stmt::ClassDef(node) => {
|
||||
let node_key = TypedNodeKey::from_node(node);
|
||||
let def = Definition::ClassDef(node_key.clone());
|
||||
let symbol_id = self.add_or_update_symbol_with_def(&node.name, def.clone());
|
||||
for decorator in &node.decorator_list {
|
||||
self.visit_decorator(decorator);
|
||||
}
|
||||
let scope_id = self.with_type_params(
|
||||
&node.name,
|
||||
&node.type_params,
|
||||
Some(def.clone()),
|
||||
Some(symbol_id),
|
||||
|indexer| {
|
||||
if let Some(arguments) = &node.arguments {
|
||||
indexer.visit_arguments(arguments);
|
||||
}
|
||||
let scope_id = indexer.push_scope(
|
||||
&node.name,
|
||||
ScopeKind::Class,
|
||||
Some(def.clone()),
|
||||
Some(symbol_id),
|
||||
);
|
||||
indexer.visit_body(&node.body);
|
||||
indexer.pop_scope();
|
||||
scope_id
|
||||
},
|
||||
);
|
||||
self.record_scope_for_node(*node_key.erased(), scope_id);
|
||||
}
|
||||
ast::Stmt::FunctionDef(node) => {
|
||||
let node_key = TypedNodeKey::from_node(node);
|
||||
let def = Definition::FunctionDef(node_key.clone());
|
||||
let symbol_id = self.add_or_update_symbol_with_def(&node.name, def.clone());
|
||||
for decorator in &node.decorator_list {
|
||||
self.visit_decorator(decorator);
|
||||
}
|
||||
let scope_id = self.with_type_params(
|
||||
&node.name,
|
||||
&node.type_params,
|
||||
Some(def.clone()),
|
||||
Some(symbol_id),
|
||||
|indexer| {
|
||||
indexer.visit_parameters(&node.parameters);
|
||||
for expr in &node.returns {
|
||||
indexer.visit_annotation(expr);
|
||||
}
|
||||
let scope_id = indexer.push_scope(
|
||||
&node.name,
|
||||
ScopeKind::Function,
|
||||
Some(def.clone()),
|
||||
Some(symbol_id),
|
||||
);
|
||||
indexer.visit_body(&node.body);
|
||||
indexer.pop_scope();
|
||||
scope_id
|
||||
},
|
||||
);
|
||||
self.record_scope_for_node(*node_key.erased(), scope_id);
|
||||
}
|
||||
ast::Stmt::Import(ast::StmtImport { names, .. }) => {
|
||||
for alias in names {
|
||||
let symbol_name = if let Some(asname) = &alias.asname {
|
||||
asname.id.as_str()
|
||||
} else {
|
||||
alias.name.id.split('.').next().unwrap()
|
||||
};
|
||||
|
||||
let module = ModuleName::new(&alias.name.id);
|
||||
|
||||
let def = Definition::Import(ImportDefinition {
|
||||
module: module.clone(),
|
||||
});
|
||||
self.add_or_update_symbol_with_def(symbol_name, def);
|
||||
self.symbol_table_builder
|
||||
.add_dependency(Dependency::Module(module));
|
||||
}
|
||||
}
|
||||
ast::Stmt::ImportFrom(ast::StmtImportFrom {
|
||||
module,
|
||||
names,
|
||||
level,
|
||||
..
|
||||
}) => {
|
||||
let module = module.as_ref().map(|m| ModuleName::new(&m.id));
|
||||
|
||||
for alias in names {
|
||||
let symbol_name = if let Some(asname) = &alias.asname {
|
||||
asname.id.as_str()
|
||||
} else {
|
||||
alias.name.id.as_str()
|
||||
};
|
||||
let def = Definition::ImportFrom(ImportFromDefinition {
|
||||
module: module.clone(),
|
||||
name: Name::new(&alias.name.id),
|
||||
level: *level,
|
||||
});
|
||||
self.add_or_update_symbol_with_def(symbol_name, def);
|
||||
}
|
||||
|
||||
let dependency = if let Some(module) = module {
|
||||
match NonZeroU32::new(*level) {
|
||||
Some(level) => Dependency::Relative {
|
||||
level,
|
||||
module: Some(module),
|
||||
},
|
||||
None => Dependency::Module(module),
|
||||
}
|
||||
} else {
|
||||
Dependency::Relative {
|
||||
level: NonZeroU32::new(*level)
|
||||
.expect("Import without a module to have a level > 0"),
|
||||
module,
|
||||
}
|
||||
};
|
||||
|
||||
self.symbol_table_builder.add_dependency(dependency);
|
||||
}
|
||||
ast::Stmt::Assign(node) => {
|
||||
debug_assert!(self.current_definition.is_none());
|
||||
self.visit_expr(&node.value);
|
||||
self.current_definition =
|
||||
Some(Definition::Assignment(TypedNodeKey::from_node(node)));
|
||||
for expr in &node.targets {
|
||||
self.visit_expr(expr);
|
||||
}
|
||||
|
||||
self.current_definition = None;
|
||||
}
|
||||
ast::Stmt::If(node) => {
|
||||
// TODO detect statically known truthy or falsy test (via type inference, not naive
|
||||
// AST inspection, so we can't simplify here, need to record test expression in CFG
|
||||
// for later checking)
|
||||
|
||||
// we visit the if "test" condition first regardless
|
||||
self.visit_expr(&node.test);
|
||||
|
||||
// create branch node: does the if test pass or not?
|
||||
let if_branch = self.flow_graph_builder.add_branch(self.current_flow_node());
|
||||
|
||||
// visit the body of the `if` clause
|
||||
self.set_current_flow_node(if_branch);
|
||||
self.insert_constraint(&node.test);
|
||||
self.visit_body(&node.body);
|
||||
|
||||
// Flow node for the last if/elif condition branch; represents the "no branch
|
||||
// taken yet" possibility (where "taking a branch" means that the condition in an
|
||||
// if or elif evaluated to true and control flow went into that clause).
|
||||
let mut prior_branch = if_branch;
|
||||
|
||||
// Flow node for the state after the prior if/elif/else clause; represents "we have
|
||||
// taken one of the branches up to this point." Initially set to the post-if-clause
|
||||
// state, later will be set to the phi node joining that possible path with the
|
||||
// possibility that we took a later if/elif/else clause instead.
|
||||
let mut post_prior_clause = self.current_flow_node();
|
||||
|
||||
// Flag to mark if the final clause is an "else" -- if so, that means the "match no
|
||||
// clauses" path is not possible, we have to go through one of the clauses.
|
||||
let mut last_branch_is_else = false;
|
||||
|
||||
for clause in &node.elif_else_clauses {
|
||||
if let Some(test) = &clause.test {
|
||||
self.visit_expr(test);
|
||||
// This is an elif clause. Create a new branch node. Its predecessor is the
|
||||
// previous branch node, because we can only take one branch in an entire
|
||||
// if/elif/else chain, so if we take this branch, it can only be because we
|
||||
// didn't take the previous one.
|
||||
prior_branch = self.flow_graph_builder.add_branch(prior_branch);
|
||||
self.set_current_flow_node(prior_branch);
|
||||
self.insert_constraint(test);
|
||||
} else {
|
||||
// This is an else clause. No need to create a branch node; there's no
|
||||
// branch here, if we haven't taken any previous branch, we definitely go
|
||||
// into the "else" clause.
|
||||
self.set_current_flow_node(prior_branch);
|
||||
last_branch_is_else = true;
|
||||
}
|
||||
self.visit_elif_else_clause(clause);
|
||||
// Update `post_prior_clause` to a new phi node joining the possibility that we
|
||||
// took any of the previous branches with the possibility that we took the one
|
||||
// just visited.
|
||||
post_prior_clause = self
|
||||
.flow_graph_builder
|
||||
.add_phi(self.current_flow_node(), post_prior_clause);
|
||||
}
|
||||
|
||||
if !last_branch_is_else {
|
||||
// Final branch was not an "else", which means it's possible we took zero
|
||||
// branches in the entire if/elif chain, so we need one more phi node to join
|
||||
// the "no branches taken" possibility.
|
||||
post_prior_clause = self
|
||||
.flow_graph_builder
|
||||
.add_phi(post_prior_clause, prior_branch);
|
||||
}
|
||||
|
||||
// Onward, with current flow node set to our final Phi node.
|
||||
self.set_current_flow_node(post_prior_clause);
|
||||
}
|
||||
_ => {
|
||||
ast::visitor::source_order::walk_stmt(self, stmt);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SemanticIndexStorage(KeyValueCache<FileId, Arc<SemanticIndex>>);
|
||||
|
||||
impl Deref for SemanticIndexStorage {
|
||||
type Target = KeyValueCache<FileId, Arc<SemanticIndex>>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for SemanticIndexStorage {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::semantic::symbol_table::{Symbol, SymbolIterator};
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::ModModule;
|
||||
use ruff_python_parser::{Mode, Parsed};
|
||||
|
||||
use super::{Definition, ScopeKind, SemanticIndex, SymbolId};
|
||||
|
||||
fn parse(code: &str) -> Parsed<ModModule> {
|
||||
ruff_python_parser::parse_unchecked(code, Mode::Module)
|
||||
.try_into_module()
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn names<I>(it: SymbolIterator<I>) -> Vec<&str>
|
||||
where
|
||||
I: Iterator<Item = SymbolId>,
|
||||
{
|
||||
let mut symbols: Vec<_> = it.map(Symbol::name).collect();
|
||||
symbols.sort_unstable();
|
||||
symbols
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn empty() {
|
||||
let parsed = parse("");
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()).len(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn simple() {
|
||||
let parsed = parse("x");
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()), vec!["x"]);
|
||||
assert_eq!(
|
||||
table
|
||||
.definitions(table.root_symbol_id_by_name("x").unwrap())
|
||||
.len(),
|
||||
0
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn annotation_only() {
|
||||
let parsed = parse("x: int");
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()), vec!["int", "x"]);
|
||||
// TODO record definition
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import() {
|
||||
let parsed = parse("import foo");
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()), vec!["foo"]);
|
||||
assert_eq!(
|
||||
table
|
||||
.definitions(table.root_symbol_id_by_name("foo").unwrap())
|
||||
.len(),
|
||||
1
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import_sub() {
|
||||
let parsed = parse("import foo.bar");
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()), vec!["foo"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import_as() {
|
||||
let parsed = parse("import foo.bar as baz");
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()), vec!["baz"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import_from() {
|
||||
let parsed = parse("from bar import foo");
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()), vec!["foo"]);
|
||||
assert_eq!(
|
||||
table
|
||||
.definitions(table.root_symbol_id_by_name("foo").unwrap())
|
||||
.len(),
|
||||
1
|
||||
);
|
||||
assert!(
|
||||
table.root_symbol_id_by_name("foo").is_some_and(|sid| {
|
||||
let s = sid.symbol(&table);
|
||||
s.is_defined() || !s.is_used()
|
||||
}),
|
||||
"symbols that are defined get the defined flag"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn assign() {
|
||||
let parsed = parse("x = foo");
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()), vec!["foo", "x"]);
|
||||
assert_eq!(
|
||||
table
|
||||
.definitions(table.root_symbol_id_by_name("x").unwrap())
|
||||
.len(),
|
||||
1
|
||||
);
|
||||
assert!(
|
||||
table.root_symbol_id_by_name("foo").is_some_and(|sid| {
|
||||
let s = sid.symbol(&table);
|
||||
!s.is_defined() && s.is_used()
|
||||
}),
|
||||
"a symbol used but not defined in a scope should have only the used flag"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn class_scope() {
|
||||
let parsed = parse(
|
||||
"
|
||||
class C:
|
||||
x = 1
|
||||
y = 2
|
||||
",
|
||||
);
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()), vec!["C", "y"]);
|
||||
let scopes = table.root_child_scope_ids();
|
||||
assert_eq!(scopes.len(), 1);
|
||||
let c_scope = scopes[0].scope(&table);
|
||||
assert_eq!(c_scope.kind(), ScopeKind::Class);
|
||||
assert_eq!(c_scope.name(), "C");
|
||||
assert_eq!(names(table.symbols_for_scope(scopes[0])), vec!["x"]);
|
||||
assert_eq!(
|
||||
table
|
||||
.definitions(table.root_symbol_id_by_name("C").unwrap())
|
||||
.len(),
|
||||
1
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn func_scope() {
|
||||
let parsed = parse(
|
||||
"
|
||||
def func():
|
||||
x = 1
|
||||
y = 2
|
||||
",
|
||||
);
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()), vec!["func", "y"]);
|
||||
let scopes = table.root_child_scope_ids();
|
||||
assert_eq!(scopes.len(), 1);
|
||||
let func_scope = scopes[0].scope(&table);
|
||||
assert_eq!(func_scope.kind(), ScopeKind::Function);
|
||||
assert_eq!(func_scope.name(), "func");
|
||||
assert_eq!(names(table.symbols_for_scope(scopes[0])), vec!["x"]);
|
||||
assert_eq!(
|
||||
table
|
||||
.definitions(table.root_symbol_id_by_name("func").unwrap())
|
||||
.len(),
|
||||
1
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dupes() {
|
||||
let parsed = parse(
|
||||
"
|
||||
def func():
|
||||
x = 1
|
||||
def func():
|
||||
y = 2
|
||||
",
|
||||
);
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()), vec!["func"]);
|
||||
let scopes = table.root_child_scope_ids();
|
||||
assert_eq!(scopes.len(), 2);
|
||||
let func_scope_1 = scopes[0].scope(&table);
|
||||
let func_scope_2 = scopes[1].scope(&table);
|
||||
assert_eq!(func_scope_1.kind(), ScopeKind::Function);
|
||||
assert_eq!(func_scope_1.name(), "func");
|
||||
assert_eq!(func_scope_2.kind(), ScopeKind::Function);
|
||||
assert_eq!(func_scope_2.name(), "func");
|
||||
assert_eq!(names(table.symbols_for_scope(scopes[0])), vec!["x"]);
|
||||
assert_eq!(names(table.symbols_for_scope(scopes[1])), vec!["y"]);
|
||||
assert_eq!(
|
||||
table
|
||||
.definitions(table.root_symbol_id_by_name("func").unwrap())
|
||||
.len(),
|
||||
2
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generic_func() {
|
||||
let parsed = parse(
|
||||
"
|
||||
def func[T]():
|
||||
x = 1
|
||||
",
|
||||
);
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()), vec!["func"]);
|
||||
let scopes = table.root_child_scope_ids();
|
||||
assert_eq!(scopes.len(), 1);
|
||||
let ann_scope_id = scopes[0];
|
||||
let ann_scope = ann_scope_id.scope(&table);
|
||||
assert_eq!(ann_scope.kind(), ScopeKind::Annotation);
|
||||
assert_eq!(ann_scope.name(), "func");
|
||||
assert_eq!(names(table.symbols_for_scope(ann_scope_id)), vec!["T"]);
|
||||
let scopes = table.child_scope_ids_of(ann_scope_id);
|
||||
assert_eq!(scopes.len(), 1);
|
||||
let func_scope_id = scopes[0];
|
||||
let func_scope = func_scope_id.scope(&table);
|
||||
assert_eq!(func_scope.kind(), ScopeKind::Function);
|
||||
assert_eq!(func_scope.name(), "func");
|
||||
assert_eq!(names(table.symbols_for_scope(func_scope_id)), vec!["x"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generic_class() {
|
||||
let parsed = parse(
|
||||
"
|
||||
class C[T]:
|
||||
x = 1
|
||||
",
|
||||
);
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()), vec!["C"]);
|
||||
let scopes = table.root_child_scope_ids();
|
||||
assert_eq!(scopes.len(), 1);
|
||||
let ann_scope_id = scopes[0];
|
||||
let ann_scope = ann_scope_id.scope(&table);
|
||||
assert_eq!(ann_scope.kind(), ScopeKind::Annotation);
|
||||
assert_eq!(ann_scope.name(), "C");
|
||||
assert_eq!(names(table.symbols_for_scope(ann_scope_id)), vec!["T"]);
|
||||
assert!(
|
||||
table
|
||||
.symbol_by_name(ann_scope_id, "T")
|
||||
.is_some_and(|s| s.is_defined() && !s.is_used()),
|
||||
"type parameters are defined by the scope that introduces them"
|
||||
);
|
||||
let scopes = table.child_scope_ids_of(ann_scope_id);
|
||||
assert_eq!(scopes.len(), 1);
|
||||
let func_scope_id = scopes[0];
|
||||
let func_scope = func_scope_id.scope(&table);
|
||||
assert_eq!(func_scope.kind(), ScopeKind::Class);
|
||||
assert_eq!(func_scope.name(), "C");
|
||||
assert_eq!(names(table.symbols_for_scope(func_scope_id)), vec!["x"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reachability_trivial() {
|
||||
let parsed = parse("x = 1; x");
|
||||
let ast = parsed.syntax();
|
||||
let index = SemanticIndex::from_ast(ast);
|
||||
let table = &index.symbol_table;
|
||||
let x_sym = table
|
||||
.root_symbol_id_by_name("x")
|
||||
.expect("x symbol should exist");
|
||||
let ast::Stmt::Expr(ast::StmtExpr { value: x_use, .. }) = &ast.body[1] else {
|
||||
panic!("should be an expr")
|
||||
};
|
||||
let x_defs: Vec<_> = index
|
||||
.reachable_definitions(x_sym, x_use)
|
||||
.map(|constrained_definition| constrained_definition.definition)
|
||||
.collect();
|
||||
assert_eq!(x_defs.len(), 1);
|
||||
let Definition::Assignment(node_key) = &x_defs[0] else {
|
||||
panic!("def should be an assignment")
|
||||
};
|
||||
let Some(def_node) = node_key.resolve(ast.into()) else {
|
||||
panic!("node key should resolve")
|
||||
};
|
||||
let ast::Expr::NumberLiteral(ast::ExprNumberLiteral {
|
||||
value: ast::Number::Int(num),
|
||||
..
|
||||
}) = &*def_node.value
|
||||
else {
|
||||
panic!("should be a number literal")
|
||||
};
|
||||
assert_eq!(*num, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn expression_scope() {
|
||||
let parsed = parse("x = 1;\ndef test():\n y = 4");
|
||||
let ast = parsed.syntax();
|
||||
let index = SemanticIndex::from_ast(ast);
|
||||
let table = &index.symbol_table;
|
||||
|
||||
let x_sym = table
|
||||
.root_symbol_by_name("x")
|
||||
.expect("x symbol should exist");
|
||||
|
||||
let x_stmt = ast.body[0].as_assign_stmt().unwrap();
|
||||
|
||||
let x_id = index.expression_id(&x_stmt.targets[0]);
|
||||
|
||||
assert_eq!(table.scope_of_expression(x_id).kind(), ScopeKind::Module);
|
||||
assert_eq!(table.scope_id_of_expression(x_id), x_sym.scope_id());
|
||||
|
||||
let def = ast.body[1].as_function_def_stmt().unwrap();
|
||||
let y_stmt = def.body[0].as_assign_stmt().unwrap();
|
||||
let y_id = index.expression_id(&y_stmt.targets[0]);
|
||||
|
||||
assert_eq!(table.scope_of_expression(y_id).kind(), ScopeKind::Function);
|
||||
}
|
||||
}
|
||||
@@ -1,52 +0,0 @@
|
||||
use crate::ast_ids::TypedNodeKey;
|
||||
use crate::semantic::ModuleName;
|
||||
use crate::Name;
|
||||
use ruff_python_ast as ast;
|
||||
|
||||
// TODO storing TypedNodeKey for definitions means we have to search to find them again in the AST;
|
||||
// this is at best O(log n). If looking up definitions is a bottleneck we should look for
|
||||
// alternatives here.
|
||||
// TODO intern Definitions in SymbolTable and reference using IDs?
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum Definition {
|
||||
// For the import cases, we don't need reference to any arbitrary AST subtrees (annotations,
|
||||
// RHS), and referencing just the import statement node is imprecise (a single import statement
|
||||
// can assign many symbols, we'd have to re-search for the one we care about), so we just copy
|
||||
// the small amount of information we need from the AST.
|
||||
Import(ImportDefinition),
|
||||
ImportFrom(ImportFromDefinition),
|
||||
ClassDef(TypedNodeKey<ast::StmtClassDef>),
|
||||
FunctionDef(TypedNodeKey<ast::StmtFunctionDef>),
|
||||
Assignment(TypedNodeKey<ast::StmtAssign>),
|
||||
AnnotatedAssignment(TypedNodeKey<ast::StmtAnnAssign>),
|
||||
NamedExpr(TypedNodeKey<ast::ExprNamed>),
|
||||
/// represents the implicit initial definition of every name as "unbound"
|
||||
Unbound,
|
||||
// TODO with statements, except handlers, function args...
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ImportDefinition {
|
||||
pub module: ModuleName,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ImportFromDefinition {
|
||||
pub module: Option<ModuleName>,
|
||||
pub name: Name,
|
||||
pub level: u32,
|
||||
}
|
||||
|
||||
impl ImportFromDefinition {
|
||||
pub fn module(&self) -> Option<&ModuleName> {
|
||||
self.module.as_ref()
|
||||
}
|
||||
|
||||
pub fn name(&self) -> &Name {
|
||||
&self.name
|
||||
}
|
||||
|
||||
pub fn level(&self) -> u32 {
|
||||
self.level
|
||||
}
|
||||
}
|
||||
@@ -1,270 +0,0 @@
|
||||
use super::symbol_table::SymbolId;
|
||||
use crate::semantic::{Definition, ExpressionId};
|
||||
use ruff_index::{newtype_index, IndexVec};
|
||||
use std::iter::FusedIterator;
|
||||
use std::ops::Range;
|
||||
|
||||
#[newtype_index]
|
||||
pub struct FlowNodeId;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum FlowNode {
|
||||
Start,
|
||||
Definition(DefinitionFlowNode),
|
||||
Branch(BranchFlowNode),
|
||||
Phi(PhiFlowNode),
|
||||
Constraint(ConstraintFlowNode),
|
||||
}
|
||||
|
||||
/// A point in control flow where a symbol is defined
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct DefinitionFlowNode {
|
||||
symbol_id: SymbolId,
|
||||
definition: Definition,
|
||||
predecessor: FlowNodeId,
|
||||
}
|
||||
|
||||
/// A branch in control flow
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct BranchFlowNode {
|
||||
predecessor: FlowNodeId,
|
||||
}
|
||||
|
||||
/// A join point where control flow paths come together
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct PhiFlowNode {
|
||||
first_predecessor: FlowNodeId,
|
||||
second_predecessor: FlowNodeId,
|
||||
}
|
||||
|
||||
/// A branch test which may apply constraints to a symbol's type
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct ConstraintFlowNode {
|
||||
predecessor: FlowNodeId,
|
||||
test_expression: ExpressionId,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct FlowGraph {
|
||||
flow_nodes_by_id: IndexVec<FlowNodeId, FlowNode>,
|
||||
expression_map: IndexVec<ExpressionId, FlowNodeId>,
|
||||
}
|
||||
|
||||
impl FlowGraph {
|
||||
pub fn start() -> FlowNodeId {
|
||||
FlowNodeId::from_usize(0)
|
||||
}
|
||||
|
||||
pub fn for_expr(&self, expr: ExpressionId) -> FlowNodeId {
|
||||
self.expression_map[expr]
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct FlowGraphBuilder {
|
||||
flow_graph: FlowGraph,
|
||||
}
|
||||
|
||||
impl FlowGraphBuilder {
|
||||
pub(crate) fn new() -> Self {
|
||||
let mut graph = FlowGraph {
|
||||
flow_nodes_by_id: IndexVec::default(),
|
||||
expression_map: IndexVec::default(),
|
||||
};
|
||||
graph.flow_nodes_by_id.push(FlowNode::Start);
|
||||
Self { flow_graph: graph }
|
||||
}
|
||||
|
||||
pub(crate) fn add(&mut self, node: FlowNode) -> FlowNodeId {
|
||||
self.flow_graph.flow_nodes_by_id.push(node)
|
||||
}
|
||||
|
||||
pub(crate) fn add_definition(
|
||||
&mut self,
|
||||
symbol_id: SymbolId,
|
||||
definition: Definition,
|
||||
predecessor: FlowNodeId,
|
||||
) -> FlowNodeId {
|
||||
self.add(FlowNode::Definition(DefinitionFlowNode {
|
||||
symbol_id,
|
||||
definition,
|
||||
predecessor,
|
||||
}))
|
||||
}
|
||||
|
||||
pub(crate) fn add_branch(&mut self, predecessor: FlowNodeId) -> FlowNodeId {
|
||||
self.add(FlowNode::Branch(BranchFlowNode { predecessor }))
|
||||
}
|
||||
|
||||
pub(crate) fn add_phi(
|
||||
&mut self,
|
||||
first_predecessor: FlowNodeId,
|
||||
second_predecessor: FlowNodeId,
|
||||
) -> FlowNodeId {
|
||||
self.add(FlowNode::Phi(PhiFlowNode {
|
||||
first_predecessor,
|
||||
second_predecessor,
|
||||
}))
|
||||
}
|
||||
|
||||
pub(crate) fn add_constraint(
|
||||
&mut self,
|
||||
predecessor: FlowNodeId,
|
||||
test_expression: ExpressionId,
|
||||
) -> FlowNodeId {
|
||||
self.add(FlowNode::Constraint(ConstraintFlowNode {
|
||||
predecessor,
|
||||
test_expression,
|
||||
}))
|
||||
}
|
||||
|
||||
pub(super) fn record_expr(&mut self, node_id: FlowNodeId) -> ExpressionId {
|
||||
self.flow_graph.expression_map.push(node_id)
|
||||
}
|
||||
|
||||
pub(super) fn finish(mut self) -> FlowGraph {
|
||||
self.flow_graph.flow_nodes_by_id.shrink_to_fit();
|
||||
self.flow_graph.expression_map.shrink_to_fit();
|
||||
self.flow_graph
|
||||
}
|
||||
}
|
||||
|
||||
/// A definition, and the set of constraints between a use and the definition
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ConstrainedDefinition {
|
||||
pub definition: Definition,
|
||||
pub constraints: Vec<ExpressionId>,
|
||||
}
|
||||
|
||||
/// A flow node and the constraints we passed through to reach it
|
||||
#[derive(Debug)]
|
||||
struct FlowState {
|
||||
node_id: FlowNodeId,
|
||||
constraints_range: Range<usize>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ReachableDefinitionsIterator<'a> {
|
||||
flow_graph: &'a FlowGraph,
|
||||
symbol_id: SymbolId,
|
||||
pending: Vec<FlowState>,
|
||||
constraints: Vec<ExpressionId>,
|
||||
}
|
||||
|
||||
impl<'a> ReachableDefinitionsIterator<'a> {
|
||||
pub fn new(flow_graph: &'a FlowGraph, symbol_id: SymbolId, start_node_id: FlowNodeId) -> Self {
|
||||
Self {
|
||||
flow_graph,
|
||||
symbol_id,
|
||||
pending: vec![FlowState {
|
||||
node_id: start_node_id,
|
||||
constraints_range: 0..0,
|
||||
}],
|
||||
constraints: vec![],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterator for ReachableDefinitionsIterator<'a> {
|
||||
type Item = ConstrainedDefinition;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let FlowState {
|
||||
mut node_id,
|
||||
mut constraints_range,
|
||||
} = self.pending.pop()?;
|
||||
self.constraints.truncate(constraints_range.end + 1);
|
||||
loop {
|
||||
match &self.flow_graph.flow_nodes_by_id[node_id] {
|
||||
FlowNode::Start => {
|
||||
// constraints on unbound are irrelevant
|
||||
return Some(ConstrainedDefinition {
|
||||
definition: Definition::Unbound,
|
||||
constraints: vec![],
|
||||
});
|
||||
}
|
||||
FlowNode::Definition(def_node) => {
|
||||
if def_node.symbol_id == self.symbol_id {
|
||||
return Some(ConstrainedDefinition {
|
||||
definition: def_node.definition.clone(),
|
||||
constraints: self.constraints[constraints_range].to_vec(),
|
||||
});
|
||||
}
|
||||
node_id = def_node.predecessor;
|
||||
}
|
||||
FlowNode::Branch(branch_node) => {
|
||||
node_id = branch_node.predecessor;
|
||||
}
|
||||
FlowNode::Phi(phi_node) => {
|
||||
self.pending.push(FlowState {
|
||||
node_id: phi_node.first_predecessor,
|
||||
constraints_range: constraints_range.clone(),
|
||||
});
|
||||
node_id = phi_node.second_predecessor;
|
||||
}
|
||||
FlowNode::Constraint(constraint_node) => {
|
||||
node_id = constraint_node.predecessor;
|
||||
self.constraints.push(constraint_node.test_expression);
|
||||
constraints_range.end += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> FusedIterator for ReachableDefinitionsIterator<'a> {}
|
||||
|
||||
impl std::fmt::Display for FlowGraph {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
writeln!(f, "flowchart TD")?;
|
||||
for (id, node) in self.flow_nodes_by_id.iter_enumerated() {
|
||||
write!(f, " id{}", id.as_u32())?;
|
||||
match node {
|
||||
FlowNode::Start => writeln!(f, r"[\Start/]")?,
|
||||
FlowNode::Definition(def_node) => {
|
||||
writeln!(f, r"(Define symbol {})", def_node.symbol_id.as_u32())?;
|
||||
writeln!(
|
||||
f,
|
||||
r" id{}-->id{}",
|
||||
def_node.predecessor.as_u32(),
|
||||
id.as_u32()
|
||||
)?;
|
||||
}
|
||||
FlowNode::Branch(branch_node) => {
|
||||
writeln!(f, r"{{Branch}}")?;
|
||||
writeln!(
|
||||
f,
|
||||
r" id{}-->id{}",
|
||||
branch_node.predecessor.as_u32(),
|
||||
id.as_u32()
|
||||
)?;
|
||||
}
|
||||
FlowNode::Phi(phi_node) => {
|
||||
writeln!(f, r"((Phi))")?;
|
||||
writeln!(
|
||||
f,
|
||||
r" id{}-->id{}",
|
||||
phi_node.second_predecessor.as_u32(),
|
||||
id.as_u32()
|
||||
)?;
|
||||
writeln!(
|
||||
f,
|
||||
r" id{}-->id{}",
|
||||
phi_node.first_predecessor.as_u32(),
|
||||
id.as_u32()
|
||||
)?;
|
||||
}
|
||||
FlowNode::Constraint(constraint_node) => {
|
||||
writeln!(f, r"((Constraint))")?;
|
||||
writeln!(
|
||||
f,
|
||||
r" id{}-->id{}",
|
||||
constraint_node.predecessor.as_u32(),
|
||||
id.as_u32()
|
||||
)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -1,560 +0,0 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::iter::{Copied, DoubleEndedIterator, FusedIterator};
|
||||
use std::num::NonZeroU32;
|
||||
|
||||
use bitflags::bitflags;
|
||||
use hashbrown::hash_map::{Keys, RawEntryMut};
|
||||
use rustc_hash::{FxHashMap, FxHasher};
|
||||
|
||||
use ruff_index::{newtype_index, IndexVec};
|
||||
|
||||
use crate::ast_ids::NodeKey;
|
||||
use crate::module::ModuleName;
|
||||
use crate::semantic::{Definition, ExpressionId};
|
||||
use crate::Name;
|
||||
|
||||
type Map<K, V> = hashbrown::HashMap<K, V, ()>;
|
||||
|
||||
#[newtype_index]
|
||||
pub struct ScopeId;
|
||||
|
||||
impl ScopeId {
|
||||
pub fn scope(self, table: &SymbolTable) -> &Scope {
|
||||
&table.scopes_by_id[self]
|
||||
}
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct SymbolId;
|
||||
|
||||
impl SymbolId {
|
||||
pub fn symbol(self, table: &SymbolTable) -> &Symbol {
|
||||
&table.symbols_by_id[self]
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq)]
|
||||
pub enum ScopeKind {
|
||||
Module,
|
||||
Annotation,
|
||||
Class,
|
||||
Function,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Scope {
|
||||
name: Name,
|
||||
kind: ScopeKind,
|
||||
parent: Option<ScopeId>,
|
||||
children: Vec<ScopeId>,
|
||||
/// the definition (e.g. class or function) that created this scope
|
||||
definition: Option<Definition>,
|
||||
/// the symbol (e.g. class or function) that owns this scope
|
||||
defining_symbol: Option<SymbolId>,
|
||||
/// symbol IDs, hashed by symbol name
|
||||
symbols_by_name: Map<SymbolId, ()>,
|
||||
}
|
||||
|
||||
impl Scope {
|
||||
pub fn name(&self) -> &str {
|
||||
self.name.as_str()
|
||||
}
|
||||
|
||||
pub fn kind(&self) -> ScopeKind {
|
||||
self.kind
|
||||
}
|
||||
|
||||
pub fn definition(&self) -> Option<Definition> {
|
||||
self.definition.clone()
|
||||
}
|
||||
|
||||
pub fn defining_symbol(&self) -> Option<SymbolId> {
|
||||
self.defining_symbol
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum Kind {
|
||||
FreeVar,
|
||||
CellVar,
|
||||
CellVarAssigned,
|
||||
ExplicitGlobal,
|
||||
ImplicitGlobal,
|
||||
}
|
||||
|
||||
bitflags! {
|
||||
#[derive(Copy,Clone,Debug)]
|
||||
pub struct SymbolFlags: u8 {
|
||||
const IS_USED = 1 << 0;
|
||||
const IS_DEFINED = 1 << 1;
|
||||
/// TODO: This flag is not yet set by anything
|
||||
const MARKED_GLOBAL = 1 << 2;
|
||||
/// TODO: This flag is not yet set by anything
|
||||
const MARKED_NONLOCAL = 1 << 3;
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Symbol {
|
||||
name: Name,
|
||||
flags: SymbolFlags,
|
||||
scope_id: ScopeId,
|
||||
// kind: Kind,
|
||||
}
|
||||
|
||||
impl Symbol {
|
||||
pub fn name(&self) -> &str {
|
||||
self.name.as_str()
|
||||
}
|
||||
|
||||
pub fn scope_id(&self) -> ScopeId {
|
||||
self.scope_id
|
||||
}
|
||||
|
||||
/// Is the symbol used in its containing scope?
|
||||
pub fn is_used(&self) -> bool {
|
||||
self.flags.contains(SymbolFlags::IS_USED)
|
||||
}
|
||||
|
||||
/// Is the symbol defined in its containing scope?
|
||||
pub fn is_defined(&self) -> bool {
|
||||
self.flags.contains(SymbolFlags::IS_DEFINED)
|
||||
}
|
||||
|
||||
// TODO: implement Symbol.kind 2-pass analysis to categorize as: free-var, cell-var,
|
||||
// explicit-global, implicit-global and implement Symbol.kind by modifying the preorder
|
||||
// traversal code
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Dependency {
|
||||
Module(ModuleName),
|
||||
Relative {
|
||||
level: NonZeroU32,
|
||||
module: Option<ModuleName>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Table of all symbols in all scopes for a module.
|
||||
#[derive(Debug)]
|
||||
pub struct SymbolTable {
|
||||
scopes_by_id: IndexVec<ScopeId, Scope>,
|
||||
symbols_by_id: IndexVec<SymbolId, Symbol>,
|
||||
/// the definitions for each symbol
|
||||
defs: FxHashMap<SymbolId, Vec<Definition>>,
|
||||
/// map of AST node (e.g. class/function def) to sub-scope it creates
|
||||
scopes_by_node: FxHashMap<NodeKey, ScopeId>,
|
||||
/// Maps expressions to their enclosing scope.
|
||||
expression_scopes: IndexVec<ExpressionId, ScopeId>,
|
||||
/// dependencies of this module
|
||||
dependencies: Vec<Dependency>,
|
||||
}
|
||||
|
||||
impl SymbolTable {
|
||||
pub fn dependencies(&self) -> &[Dependency] {
|
||||
&self.dependencies
|
||||
}
|
||||
|
||||
pub const fn root_scope_id() -> ScopeId {
|
||||
ScopeId::from_usize(0)
|
||||
}
|
||||
|
||||
pub fn root_scope(&self) -> &Scope {
|
||||
&self.scopes_by_id[SymbolTable::root_scope_id()]
|
||||
}
|
||||
|
||||
pub fn symbol_ids_for_scope(&self, scope_id: ScopeId) -> Copied<Keys<SymbolId, ()>> {
|
||||
self.scopes_by_id[scope_id].symbols_by_name.keys().copied()
|
||||
}
|
||||
|
||||
pub fn symbols_for_scope(
|
||||
&self,
|
||||
scope_id: ScopeId,
|
||||
) -> SymbolIterator<Copied<Keys<SymbolId, ()>>> {
|
||||
SymbolIterator {
|
||||
table: self,
|
||||
ids: self.symbol_ids_for_scope(scope_id),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn root_symbol_ids(&self) -> Copied<Keys<SymbolId, ()>> {
|
||||
self.symbol_ids_for_scope(SymbolTable::root_scope_id())
|
||||
}
|
||||
|
||||
pub fn root_symbols(&self) -> SymbolIterator<Copied<Keys<SymbolId, ()>>> {
|
||||
self.symbols_for_scope(SymbolTable::root_scope_id())
|
||||
}
|
||||
|
||||
pub fn child_scope_ids_of(&self, scope_id: ScopeId) -> &[ScopeId] {
|
||||
&self.scopes_by_id[scope_id].children
|
||||
}
|
||||
|
||||
pub fn child_scopes_of(&self, scope_id: ScopeId) -> ScopeIterator<&[ScopeId]> {
|
||||
ScopeIterator {
|
||||
table: self,
|
||||
ids: self.child_scope_ids_of(scope_id),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn root_child_scope_ids(&self) -> &[ScopeId] {
|
||||
self.child_scope_ids_of(SymbolTable::root_scope_id())
|
||||
}
|
||||
|
||||
pub fn root_child_scopes(&self) -> ScopeIterator<&[ScopeId]> {
|
||||
self.child_scopes_of(SymbolTable::root_scope_id())
|
||||
}
|
||||
|
||||
pub fn symbol_id_by_name(&self, scope_id: ScopeId, name: &str) -> Option<SymbolId> {
|
||||
let scope = &self.scopes_by_id[scope_id];
|
||||
let hash = SymbolTable::hash_name(name);
|
||||
let name = Name::new(name);
|
||||
Some(
|
||||
*scope
|
||||
.symbols_by_name
|
||||
.raw_entry()
|
||||
.from_hash(hash, |symid| self.symbols_by_id[*symid].name == name)?
|
||||
.0,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn symbol_by_name(&self, scope_id: ScopeId, name: &str) -> Option<&Symbol> {
|
||||
Some(&self.symbols_by_id[self.symbol_id_by_name(scope_id, name)?])
|
||||
}
|
||||
|
||||
pub fn root_symbol_id_by_name(&self, name: &str) -> Option<SymbolId> {
|
||||
self.symbol_id_by_name(SymbolTable::root_scope_id(), name)
|
||||
}
|
||||
|
||||
pub fn root_symbol_by_name(&self, name: &str) -> Option<&Symbol> {
|
||||
self.symbol_by_name(SymbolTable::root_scope_id(), name)
|
||||
}
|
||||
|
||||
pub fn scope_id_of_symbol(&self, symbol_id: SymbolId) -> ScopeId {
|
||||
self.symbols_by_id[symbol_id].scope_id
|
||||
}
|
||||
|
||||
pub fn scope_of_symbol(&self, symbol_id: SymbolId) -> &Scope {
|
||||
&self.scopes_by_id[self.scope_id_of_symbol(symbol_id)]
|
||||
}
|
||||
|
||||
pub fn scope_id_of_expression(&self, expression: ExpressionId) -> ScopeId {
|
||||
self.expression_scopes[expression]
|
||||
}
|
||||
|
||||
pub fn scope_of_expression(&self, expr_id: ExpressionId) -> &Scope {
|
||||
&self.scopes_by_id[self.scope_id_of_expression(expr_id)]
|
||||
}
|
||||
|
||||
pub fn parent_scopes(
|
||||
&self,
|
||||
scope_id: ScopeId,
|
||||
) -> ScopeIterator<impl Iterator<Item = ScopeId> + '_> {
|
||||
ScopeIterator {
|
||||
table: self,
|
||||
ids: std::iter::successors(Some(scope_id), |scope| self.scopes_by_id[*scope].parent),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parent_scope(&self, scope_id: ScopeId) -> Option<ScopeId> {
|
||||
self.scopes_by_id[scope_id].parent
|
||||
}
|
||||
|
||||
pub fn scope_id_for_node(&self, node_key: &NodeKey) -> ScopeId {
|
||||
self.scopes_by_node[node_key]
|
||||
}
|
||||
|
||||
pub fn definitions(&self, symbol_id: SymbolId) -> &[Definition] {
|
||||
self.defs
|
||||
.get(&symbol_id)
|
||||
.map(std::vec::Vec::as_slice)
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
pub fn all_definitions(&self) -> impl Iterator<Item = (SymbolId, &Definition)> + '_ {
|
||||
self.defs
|
||||
.iter()
|
||||
.flat_map(|(sym_id, defs)| defs.iter().map(move |def| (*sym_id, def)))
|
||||
}
|
||||
|
||||
fn hash_name(name: &str) -> u64 {
|
||||
let mut hasher = FxHasher::default();
|
||||
name.hash(&mut hasher);
|
||||
hasher.finish()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct SymbolIterator<'a, I> {
|
||||
table: &'a SymbolTable,
|
||||
ids: I,
|
||||
}
|
||||
|
||||
impl<'a, I> Iterator for SymbolIterator<'a, I>
|
||||
where
|
||||
I: Iterator<Item = SymbolId>,
|
||||
{
|
||||
type Item = &'a Symbol;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let id = self.ids.next()?;
|
||||
Some(&self.table.symbols_by_id[id])
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
self.ids.size_hint()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, I> FusedIterator for SymbolIterator<'a, I> where
|
||||
I: Iterator<Item = SymbolId> + FusedIterator
|
||||
{
|
||||
}
|
||||
|
||||
impl<'a, I> DoubleEndedIterator for SymbolIterator<'a, I>
|
||||
where
|
||||
I: Iterator<Item = SymbolId> + DoubleEndedIterator,
|
||||
{
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
let id = self.ids.next_back()?;
|
||||
Some(&self.table.symbols_by_id[id])
|
||||
}
|
||||
}
|
||||
|
||||
// TODO maybe get rid of this and just do all data access via methods on ScopeId?
|
||||
pub struct ScopeIterator<'a, I> {
|
||||
table: &'a SymbolTable,
|
||||
ids: I,
|
||||
}
|
||||
|
||||
/// iterate (`ScopeId`, `Scope`) pairs for given `ScopeId` iterator
|
||||
impl<'a, I> Iterator for ScopeIterator<'a, I>
|
||||
where
|
||||
I: Iterator<Item = ScopeId>,
|
||||
{
|
||||
type Item = (ScopeId, &'a Scope);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let id = self.ids.next()?;
|
||||
Some((id, &self.table.scopes_by_id[id]))
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
self.ids.size_hint()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, I> FusedIterator for ScopeIterator<'a, I> where I: Iterator<Item = ScopeId> + FusedIterator {}
|
||||
|
||||
impl<'a, I> DoubleEndedIterator for ScopeIterator<'a, I>
|
||||
where
|
||||
I: Iterator<Item = ScopeId> + DoubleEndedIterator,
|
||||
{
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
let id = self.ids.next_back()?;
|
||||
Some((id, &self.table.scopes_by_id[id]))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(super) struct SymbolTableBuilder {
|
||||
symbol_table: SymbolTable,
|
||||
}
|
||||
|
||||
impl SymbolTableBuilder {
|
||||
pub(super) fn new() -> Self {
|
||||
let mut table = SymbolTable {
|
||||
scopes_by_id: IndexVec::new(),
|
||||
symbols_by_id: IndexVec::new(),
|
||||
defs: FxHashMap::default(),
|
||||
scopes_by_node: FxHashMap::default(),
|
||||
expression_scopes: IndexVec::new(),
|
||||
dependencies: Vec::new(),
|
||||
};
|
||||
table.scopes_by_id.push(Scope {
|
||||
name: Name::new("<module>"),
|
||||
kind: ScopeKind::Module,
|
||||
parent: None,
|
||||
children: Vec::new(),
|
||||
definition: None,
|
||||
defining_symbol: None,
|
||||
symbols_by_name: Map::default(),
|
||||
});
|
||||
Self {
|
||||
symbol_table: table,
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn finish(self) -> SymbolTable {
|
||||
let mut symbol_table = self.symbol_table;
|
||||
symbol_table.scopes_by_id.shrink_to_fit();
|
||||
symbol_table.symbols_by_id.shrink_to_fit();
|
||||
symbol_table.defs.shrink_to_fit();
|
||||
symbol_table.scopes_by_node.shrink_to_fit();
|
||||
symbol_table.expression_scopes.shrink_to_fit();
|
||||
symbol_table.dependencies.shrink_to_fit();
|
||||
symbol_table
|
||||
}
|
||||
|
||||
pub(super) fn add_or_update_symbol(
|
||||
&mut self,
|
||||
scope_id: ScopeId,
|
||||
name: &str,
|
||||
flags: SymbolFlags,
|
||||
) -> SymbolId {
|
||||
let hash = SymbolTable::hash_name(name);
|
||||
let scope = &mut self.symbol_table.scopes_by_id[scope_id];
|
||||
let name = Name::new(name);
|
||||
|
||||
let entry = scope
|
||||
.symbols_by_name
|
||||
.raw_entry_mut()
|
||||
.from_hash(hash, |existing| {
|
||||
self.symbol_table.symbols_by_id[*existing].name == name
|
||||
});
|
||||
|
||||
match entry {
|
||||
RawEntryMut::Occupied(entry) => {
|
||||
if let Some(symbol) = self.symbol_table.symbols_by_id.get_mut(*entry.key()) {
|
||||
symbol.flags.insert(flags);
|
||||
};
|
||||
*entry.key()
|
||||
}
|
||||
RawEntryMut::Vacant(entry) => {
|
||||
let id = self.symbol_table.symbols_by_id.push(Symbol {
|
||||
name,
|
||||
flags,
|
||||
scope_id,
|
||||
});
|
||||
entry.insert_with_hasher(hash, id, (), |symid| {
|
||||
SymbolTable::hash_name(&self.symbol_table.symbols_by_id[*symid].name)
|
||||
});
|
||||
id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn add_definition(&mut self, symbol_id: SymbolId, definition: Definition) {
|
||||
self.symbol_table
|
||||
.defs
|
||||
.entry(symbol_id)
|
||||
.or_default()
|
||||
.push(definition);
|
||||
}
|
||||
|
||||
pub(super) fn add_child_scope(
|
||||
&mut self,
|
||||
parent_scope_id: ScopeId,
|
||||
name: &str,
|
||||
kind: ScopeKind,
|
||||
definition: Option<Definition>,
|
||||
defining_symbol: Option<SymbolId>,
|
||||
) -> ScopeId {
|
||||
let new_scope_id = self.symbol_table.scopes_by_id.push(Scope {
|
||||
name: Name::new(name),
|
||||
kind,
|
||||
parent: Some(parent_scope_id),
|
||||
children: Vec::new(),
|
||||
definition,
|
||||
defining_symbol,
|
||||
symbols_by_name: Map::default(),
|
||||
});
|
||||
let parent_scope = &mut self.symbol_table.scopes_by_id[parent_scope_id];
|
||||
parent_scope.children.push(new_scope_id);
|
||||
new_scope_id
|
||||
}
|
||||
|
||||
pub(super) fn record_scope_for_node(&mut self, node_key: NodeKey, scope_id: ScopeId) {
|
||||
self.symbol_table.scopes_by_node.insert(node_key, scope_id);
|
||||
}
|
||||
|
||||
pub(super) fn add_dependency(&mut self, dependency: Dependency) {
|
||||
self.symbol_table.dependencies.push(dependency);
|
||||
}
|
||||
|
||||
/// Records the scope for the current expression
|
||||
pub(super) fn record_expression(&mut self, scope: ScopeId) -> ExpressionId {
|
||||
self.symbol_table.expression_scopes.push(scope)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{ScopeKind, SymbolFlags, SymbolTable, SymbolTableBuilder};
|
||||
|
||||
#[test]
|
||||
fn insert_same_name_symbol_twice() {
|
||||
let mut builder = SymbolTableBuilder::new();
|
||||
let root_scope_id = SymbolTable::root_scope_id();
|
||||
let symbol_id_1 =
|
||||
builder.add_or_update_symbol(root_scope_id, "foo", SymbolFlags::IS_DEFINED);
|
||||
let symbol_id_2 = builder.add_or_update_symbol(root_scope_id, "foo", SymbolFlags::IS_USED);
|
||||
let table = builder.finish();
|
||||
|
||||
assert_eq!(symbol_id_1, symbol_id_2);
|
||||
assert!(symbol_id_1.symbol(&table).is_used(), "flags must merge");
|
||||
assert!(symbol_id_1.symbol(&table).is_defined(), "flags must merge");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_different_named_symbols() {
|
||||
let mut builder = SymbolTableBuilder::new();
|
||||
let root_scope_id = SymbolTable::root_scope_id();
|
||||
let symbol_id_1 = builder.add_or_update_symbol(root_scope_id, "foo", SymbolFlags::empty());
|
||||
let symbol_id_2 = builder.add_or_update_symbol(root_scope_id, "bar", SymbolFlags::empty());
|
||||
|
||||
assert_ne!(symbol_id_1, symbol_id_2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_child_scope_with_symbol() {
|
||||
let mut builder = SymbolTableBuilder::new();
|
||||
let root_scope_id = SymbolTable::root_scope_id();
|
||||
let foo_symbol_top =
|
||||
builder.add_or_update_symbol(root_scope_id, "foo", SymbolFlags::empty());
|
||||
let c_scope = builder.add_child_scope(root_scope_id, "C", ScopeKind::Class, None, None);
|
||||
let foo_symbol_inner = builder.add_or_update_symbol(c_scope, "foo", SymbolFlags::empty());
|
||||
|
||||
assert_ne!(foo_symbol_top, foo_symbol_inner);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scope_from_id() {
|
||||
let table = SymbolTableBuilder::new().finish();
|
||||
let root_scope_id = SymbolTable::root_scope_id();
|
||||
let scope = root_scope_id.scope(&table);
|
||||
|
||||
assert_eq!(scope.name.as_str(), "<module>");
|
||||
assert_eq!(scope.kind, ScopeKind::Module);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn symbol_from_id() {
|
||||
let mut builder = SymbolTableBuilder::new();
|
||||
let root_scope_id = SymbolTable::root_scope_id();
|
||||
let foo_symbol_id =
|
||||
builder.add_or_update_symbol(root_scope_id, "foo", SymbolFlags::empty());
|
||||
let table = builder.finish();
|
||||
let symbol = foo_symbol_id.symbol(&table);
|
||||
|
||||
assert_eq!(symbol.name(), "foo");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bigger_symbol_table() {
|
||||
let mut builder = SymbolTableBuilder::new();
|
||||
let root_scope_id = SymbolTable::root_scope_id();
|
||||
let foo_symbol_id =
|
||||
builder.add_or_update_symbol(root_scope_id, "foo", SymbolFlags::empty());
|
||||
builder.add_or_update_symbol(root_scope_id, "bar", SymbolFlags::empty());
|
||||
builder.add_or_update_symbol(root_scope_id, "baz", SymbolFlags::empty());
|
||||
builder.add_or_update_symbol(root_scope_id, "qux", SymbolFlags::empty());
|
||||
let table = builder.finish();
|
||||
|
||||
let foo_symbol_id_2 = table
|
||||
.root_symbol_id_by_name("foo")
|
||||
.expect("foo symbol to be found");
|
||||
|
||||
assert_eq!(foo_symbol_id_2, foo_symbol_id);
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,762 +0,0 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::AstNode;
|
||||
use std::fmt::Debug;
|
||||
|
||||
use crate::db::{QueryResult, SemanticDb, SemanticJar};
|
||||
|
||||
use crate::module::{resolve_module, ModuleName};
|
||||
use crate::parse::parse;
|
||||
use crate::semantic::types::{ModuleTypeId, Type};
|
||||
use crate::semantic::{
|
||||
resolve_global_symbol, semantic_index, ConstrainedDefinition, Definition, GlobalSymbolId,
|
||||
ImportDefinition, ImportFromDefinition,
|
||||
};
|
||||
use crate::{FileId, Name};
|
||||
|
||||
// FIXME: Figure out proper dead-lock free synchronisation now that this takes `&db` instead of `&mut db`.
|
||||
/// Resolve the public-facing type for a symbol (the type seen by other scopes: other modules, or
|
||||
/// nested functions). Because calls to nested functions and imports can occur anywhere in control
|
||||
/// flow, this type must be conservative and consider all definitions of the symbol that could
|
||||
/// possibly be seen by another scope. Currently we take the most conservative approach, which is
|
||||
/// the union of all definitions. We may be able to narrow this in future to eliminate definitions
|
||||
/// which can't possibly (or at least likely) be seen by any other scope, so that e.g. we could
|
||||
/// infer `Literal["1"]` instead of `Literal[1] | Literal["1"]` for `x` in `x = x; x = str(x);`.
|
||||
#[tracing::instrument(level = "trace", skip(db))]
|
||||
pub fn infer_symbol_public_type(db: &dyn SemanticDb, symbol: GlobalSymbolId) -> QueryResult<Type> {
|
||||
let index = semantic_index(db, symbol.file_id)?;
|
||||
let defs = index.symbol_table().definitions(symbol.symbol_id).to_vec();
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
|
||||
if let Some(ty) = jar.type_store.get_cached_symbol_public_type(symbol) {
|
||||
return Ok(ty);
|
||||
}
|
||||
|
||||
let ty = infer_type_from_definitions(db, symbol, defs.iter().cloned())?;
|
||||
|
||||
jar.type_store.cache_symbol_public_type(symbol, ty);
|
||||
|
||||
// TODO record dependencies
|
||||
Ok(ty)
|
||||
}
|
||||
|
||||
/// Infer type of a symbol as union of the given `Definitions`.
|
||||
fn infer_type_from_definitions<T>(
|
||||
db: &dyn SemanticDb,
|
||||
symbol: GlobalSymbolId,
|
||||
definitions: T,
|
||||
) -> QueryResult<Type>
|
||||
where
|
||||
T: Debug + IntoIterator<Item = Definition>,
|
||||
{
|
||||
infer_type_from_constrained_definitions(
|
||||
db,
|
||||
symbol,
|
||||
definitions
|
||||
.into_iter()
|
||||
.map(|definition| ConstrainedDefinition {
|
||||
definition,
|
||||
constraints: vec![],
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
/// Infer type of a symbol as union of the given `ConstrainedDefinitions`.
|
||||
fn infer_type_from_constrained_definitions<T>(
|
||||
db: &dyn SemanticDb,
|
||||
symbol: GlobalSymbolId,
|
||||
constrained_definitions: T,
|
||||
) -> QueryResult<Type>
|
||||
where
|
||||
T: IntoIterator<Item = ConstrainedDefinition>,
|
||||
{
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
let mut tys = constrained_definitions
|
||||
.into_iter()
|
||||
.map(|def| infer_constrained_definition_type(db, symbol, def.clone()))
|
||||
.peekable();
|
||||
if let Some(first) = tys.next() {
|
||||
if tys.peek().is_some() {
|
||||
Ok(jar.type_store.add_union(
|
||||
symbol.file_id,
|
||||
&Iterator::chain(std::iter::once(first), tys).collect::<QueryResult<Vec<_>>>()?,
|
||||
))
|
||||
} else {
|
||||
first
|
||||
}
|
||||
} else {
|
||||
Ok(Type::Unknown)
|
||||
}
|
||||
}
|
||||
|
||||
/// Infer type for a ConstrainedDefinition (intersection of the definition type and the
|
||||
/// constraints)
|
||||
#[tracing::instrument(level = "trace", skip(db))]
|
||||
pub fn infer_constrained_definition_type(
|
||||
db: &dyn SemanticDb,
|
||||
symbol: GlobalSymbolId,
|
||||
constrained_definition: ConstrainedDefinition,
|
||||
) -> QueryResult<Type> {
|
||||
let ConstrainedDefinition {
|
||||
definition,
|
||||
constraints,
|
||||
} = constrained_definition;
|
||||
let index = semantic_index(db, symbol.file_id)?;
|
||||
let parsed = parse(db.upcast(), symbol.file_id)?;
|
||||
let mut intersected_types = vec![infer_definition_type(db, symbol, definition)?];
|
||||
for constraint in constraints {
|
||||
if let Some(constraint_type) = infer_constraint_type(
|
||||
db,
|
||||
symbol,
|
||||
index.resolve_expression_id(parsed.syntax(), constraint),
|
||||
)? {
|
||||
intersected_types.push(constraint_type);
|
||||
}
|
||||
}
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
Ok(jar
|
||||
.type_store
|
||||
.add_intersection(symbol.file_id, &intersected_types, &[]))
|
||||
}
|
||||
|
||||
/// Infer a type for a Definition
|
||||
#[tracing::instrument(level = "trace", skip(db))]
|
||||
pub fn infer_definition_type(
|
||||
db: &dyn SemanticDb,
|
||||
symbol: GlobalSymbolId,
|
||||
definition: Definition,
|
||||
) -> QueryResult<Type> {
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
let type_store = &jar.type_store;
|
||||
let file_id = symbol.file_id;
|
||||
|
||||
match definition {
|
||||
Definition::Unbound => Ok(Type::Unbound),
|
||||
Definition::Import(ImportDefinition {
|
||||
module: module_name,
|
||||
}) => {
|
||||
if let Some(module) = resolve_module(db, module_name.clone())? {
|
||||
Ok(Type::Module(ModuleTypeId { module, file_id }))
|
||||
} else {
|
||||
Ok(Type::Unknown)
|
||||
}
|
||||
}
|
||||
Definition::ImportFrom(ImportFromDefinition {
|
||||
module,
|
||||
name,
|
||||
level,
|
||||
}) => {
|
||||
// TODO relative imports
|
||||
assert!(matches!(level, 0));
|
||||
let module_name = ModuleName::new(module.as_ref().expect("TODO relative imports"));
|
||||
let Some(module) = resolve_module(db, module_name.clone())? else {
|
||||
return Ok(Type::Unknown);
|
||||
};
|
||||
|
||||
if let Some(remote_symbol) = resolve_global_symbol(db, module, &name)? {
|
||||
infer_symbol_public_type(db, remote_symbol)
|
||||
} else {
|
||||
Ok(Type::Unknown)
|
||||
}
|
||||
}
|
||||
Definition::ClassDef(node_key) => {
|
||||
if let Some(ty) = type_store.get_cached_node_type(file_id, node_key.erased()) {
|
||||
Ok(ty)
|
||||
} else {
|
||||
let parsed = parse(db.upcast(), file_id)?;
|
||||
let ast = parsed.syntax();
|
||||
let index = semantic_index(db, file_id)?;
|
||||
let node = node_key.resolve_unwrap(ast.as_any_node_ref());
|
||||
|
||||
let mut bases = Vec::with_capacity(node.bases().len());
|
||||
|
||||
for base in node.bases() {
|
||||
bases.push(infer_expr_type(db, file_id, base)?);
|
||||
}
|
||||
let scope_id = index.symbol_table().scope_id_for_node(node_key.erased());
|
||||
let ty = type_store.add_class(file_id, &node.name.id, scope_id, bases);
|
||||
type_store.cache_node_type(file_id, *node_key.erased(), ty);
|
||||
Ok(ty)
|
||||
}
|
||||
}
|
||||
Definition::FunctionDef(node_key) => {
|
||||
if let Some(ty) = type_store.get_cached_node_type(file_id, node_key.erased()) {
|
||||
Ok(ty)
|
||||
} else {
|
||||
let parsed = parse(db.upcast(), file_id)?;
|
||||
let ast = parsed.syntax();
|
||||
let index = semantic_index(db, file_id)?;
|
||||
let node = node_key
|
||||
.resolve(ast.as_any_node_ref())
|
||||
.expect("node key should resolve");
|
||||
|
||||
let decorator_tys = node
|
||||
.decorator_list
|
||||
.iter()
|
||||
.map(|decorator| infer_expr_type(db, file_id, &decorator.expression))
|
||||
.collect::<QueryResult<_>>()?;
|
||||
let scope_id = index.symbol_table().scope_id_for_node(node_key.erased());
|
||||
let ty = type_store.add_function(
|
||||
file_id,
|
||||
&node.name.id,
|
||||
symbol.symbol_id,
|
||||
scope_id,
|
||||
decorator_tys,
|
||||
);
|
||||
type_store.cache_node_type(file_id, *node_key.erased(), ty);
|
||||
Ok(ty)
|
||||
}
|
||||
}
|
||||
Definition::Assignment(node_key) => {
|
||||
let parsed = parse(db.upcast(), file_id)?;
|
||||
let ast = parsed.syntax();
|
||||
let node = node_key.resolve_unwrap(ast.as_any_node_ref());
|
||||
// TODO handle unpacking assignment
|
||||
infer_expr_type(db, file_id, &node.value)
|
||||
}
|
||||
Definition::AnnotatedAssignment(node_key) => {
|
||||
let parsed = parse(db.upcast(), file_id)?;
|
||||
let ast = parsed.syntax();
|
||||
let node = node_key.resolve_unwrap(ast.as_any_node_ref());
|
||||
// TODO actually look at the annotation
|
||||
let Some(value) = &node.value else {
|
||||
return Ok(Type::Unknown);
|
||||
};
|
||||
// TODO handle unpacking assignment
|
||||
infer_expr_type(db, file_id, value)
|
||||
}
|
||||
Definition::NamedExpr(node_key) => {
|
||||
let parsed = parse(db.upcast(), file_id)?;
|
||||
let ast = parsed.syntax();
|
||||
let node = node_key.resolve_unwrap(ast.as_any_node_ref());
|
||||
infer_expr_type(db, file_id, &node.value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the type that the given constraint (an expression from a control-flow test) requires the
|
||||
/// given symbol to have. For example, returns the Type "~None" as the constraint type if given the
|
||||
/// symbol ID for x and the expression ID for `x is not None`. Returns (Rust) None if the given
|
||||
/// expression applies no constraints on the given symbol.
|
||||
#[tracing::instrument(level = "trace", skip(db))]
|
||||
fn infer_constraint_type(
|
||||
db: &dyn SemanticDb,
|
||||
symbol_id: GlobalSymbolId,
|
||||
// TODO this should preferably take an &ast::Expr instead of AnyNodeRef
|
||||
expression: ast::AnyNodeRef,
|
||||
) -> QueryResult<Option<Type>> {
|
||||
let file_id = symbol_id.file_id;
|
||||
let index = semantic_index(db, file_id)?;
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
let symbol_name = symbol_id.symbol_id.symbol(&index.symbol_table).name();
|
||||
// TODO narrowing attributes
|
||||
// TODO narrowing dict keys
|
||||
// TODO isinstance, ==/!=, type(...), literals, bools...
|
||||
match expression {
|
||||
ast::AnyNodeRef::ExprCompare(ast::ExprCompare {
|
||||
left,
|
||||
ops,
|
||||
comparators,
|
||||
..
|
||||
}) => {
|
||||
// TODO chained comparisons
|
||||
match left.as_ref() {
|
||||
ast::Expr::Name(ast::ExprName { id, .. }) if id == symbol_name => match ops[0] {
|
||||
ast::CmpOp::Is | ast::CmpOp::IsNot => {
|
||||
Ok(match infer_expr_type(db, file_id, &comparators[0])? {
|
||||
Type::None => Some(Type::None),
|
||||
_ => None,
|
||||
}
|
||||
.map(|ty| {
|
||||
if matches!(ops[0], ast::CmpOp::IsNot) {
|
||||
jar.type_store.add_intersection(file_id, &[], &[ty])
|
||||
} else {
|
||||
ty
|
||||
}
|
||||
}))
|
||||
}
|
||||
_ => Ok(None),
|
||||
},
|
||||
_ => Ok(None),
|
||||
}
|
||||
}
|
||||
_ => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
/// Infer type of the given expression.
|
||||
fn infer_expr_type(db: &dyn SemanticDb, file_id: FileId, expr: &ast::Expr) -> QueryResult<Type> {
|
||||
// TODO cache the resolution of the type on the node
|
||||
let index = semantic_index(db, file_id)?;
|
||||
match expr {
|
||||
ast::Expr::NoneLiteral(_) => Ok(Type::None),
|
||||
ast::Expr::NumberLiteral(ast::ExprNumberLiteral { value, .. }) => {
|
||||
match value {
|
||||
ast::Number::Int(n) => {
|
||||
// TODO support big int literals
|
||||
Ok(n.as_i64().map(Type::IntLiteral).unwrap_or(Type::Unknown))
|
||||
}
|
||||
// TODO builtins.float or builtins.complex
|
||||
_ => Ok(Type::Unknown),
|
||||
}
|
||||
}
|
||||
ast::Expr::Name(name) => {
|
||||
// TODO look up in the correct scope, don't assume global
|
||||
if let Some(symbol_id) = index.symbol_table().root_symbol_id_by_name(&name.id) {
|
||||
infer_type_from_constrained_definitions(
|
||||
db,
|
||||
GlobalSymbolId { file_id, symbol_id },
|
||||
index.reachable_definitions(symbol_id, expr),
|
||||
)
|
||||
} else {
|
||||
Ok(Type::Unknown)
|
||||
}
|
||||
}
|
||||
ast::Expr::Attribute(ast::ExprAttribute { value, attr, .. }) => {
|
||||
let value_type = infer_expr_type(db, file_id, value)?;
|
||||
let attr_name = &Name::new(&attr.id);
|
||||
value_type
|
||||
.get_member(db, attr_name)
|
||||
.map(|ty| ty.unwrap_or(Type::Unknown))
|
||||
}
|
||||
ast::Expr::BinOp(ast::ExprBinOp {
|
||||
left, op, right, ..
|
||||
}) => {
|
||||
let left_ty = infer_expr_type(db, file_id, left)?;
|
||||
let right_ty = infer_expr_type(db, file_id, right)?;
|
||||
// TODO add reverse bin op support if right <: left
|
||||
left_ty.resolve_bin_op(db, *op, right_ty)
|
||||
}
|
||||
ast::Expr::Named(ast::ExprNamed { value, .. }) => infer_expr_type(db, file_id, value),
|
||||
ast::Expr::If(ast::ExprIf { body, orelse, .. }) => {
|
||||
// TODO detect statically known truthy or falsy test
|
||||
let body_ty = infer_expr_type(db, file_id, body)?;
|
||||
let else_ty = infer_expr_type(db, file_id, orelse)?;
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
Ok(jar.type_store.add_union(file_id, &[body_ty, else_ty]))
|
||||
}
|
||||
_ => todo!("expression type resolution for {:?}", expr),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::db::{HasJar, SemanticJar};
|
||||
use crate::module::{
|
||||
resolve_module, set_module_search_paths, ModuleName, ModuleResolutionInputs,
|
||||
};
|
||||
use crate::semantic::{infer_symbol_public_type, resolve_global_symbol, Type};
|
||||
use crate::Name;
|
||||
|
||||
// TODO with virtual filesystem we shouldn't have to write files to disk for these
|
||||
// tests
|
||||
|
||||
struct TestCase {
|
||||
temp_dir: tempfile::TempDir,
|
||||
db: TestDb,
|
||||
|
||||
src: PathBuf,
|
||||
}
|
||||
|
||||
fn create_test() -> std::io::Result<TestCase> {
|
||||
let temp_dir = tempfile::tempdir()?;
|
||||
|
||||
let src = temp_dir.path().join("src");
|
||||
std::fs::create_dir(&src)?;
|
||||
let src = src.canonicalize()?;
|
||||
|
||||
let search_paths = ModuleResolutionInputs {
|
||||
extra_paths: vec![],
|
||||
workspace_root: src.clone(),
|
||||
site_packages: None,
|
||||
custom_typeshed: None,
|
||||
};
|
||||
|
||||
let mut db = TestDb::default();
|
||||
set_module_search_paths(&mut db, search_paths);
|
||||
|
||||
Ok(TestCase { temp_dir, db, src })
|
||||
}
|
||||
|
||||
fn write_to_path(case: &TestCase, relative_path: &str, contents: &str) -> anyhow::Result<()> {
|
||||
let path = case.src.join(relative_path);
|
||||
std::fs::write(path, contents)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_public_type(
|
||||
case: &TestCase,
|
||||
module_name: &str,
|
||||
variable_name: &str,
|
||||
) -> anyhow::Result<Type> {
|
||||
let db = &case.db;
|
||||
let module = resolve_module(db, ModuleName::new(module_name))?.expect("Module to exist");
|
||||
let symbol = resolve_global_symbol(db, module, variable_name)?.expect("symbol to exist");
|
||||
|
||||
Ok(infer_symbol_public_type(db, symbol)?)
|
||||
}
|
||||
|
||||
fn assert_public_type(
|
||||
case: &TestCase,
|
||||
module_name: &str,
|
||||
variable_name: &str,
|
||||
type_name: &str,
|
||||
) -> anyhow::Result<()> {
|
||||
let ty = get_public_type(case, module_name, variable_name)?;
|
||||
|
||||
let jar = HasJar::<SemanticJar>::jar(&case.db)?;
|
||||
assert_eq!(format!("{}", ty.display(&jar.type_store)), type_name);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn follow_import_to_class() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(&case, "a.py", "from b import C as D; E = D")?;
|
||||
write_to_path(&case, "b.py", "class C: pass")?;
|
||||
|
||||
assert_public_type(&case, "a", "E", "Literal[C]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_base_class_by_name() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"mod.py",
|
||||
"
|
||||
class Base: pass
|
||||
class Sub(Base): pass
|
||||
",
|
||||
)?;
|
||||
|
||||
let ty = get_public_type(&case, "mod", "Sub")?;
|
||||
|
||||
let Type::Class(class_id) = ty else {
|
||||
panic!("Sub is not a Class")
|
||||
};
|
||||
let jar = HasJar::<SemanticJar>::jar(&case.db)?;
|
||||
let base_names: Vec<_> = jar
|
||||
.type_store
|
||||
.get_class(class_id)
|
||||
.bases()
|
||||
.iter()
|
||||
.map(|base_ty| format!("{}", base_ty.display(&jar.type_store)))
|
||||
.collect();
|
||||
|
||||
assert_eq!(base_names, vec!["Literal[Base]"]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_method() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"mod.py",
|
||||
"
|
||||
class C:
|
||||
def f(self): pass
|
||||
",
|
||||
)?;
|
||||
|
||||
let ty = get_public_type(&case, "mod", "C")?;
|
||||
|
||||
let Type::Class(class_id) = ty else {
|
||||
panic!("C is not a Class");
|
||||
};
|
||||
|
||||
let member_ty = class_id
|
||||
.get_own_class_member(&case.db, &Name::new("f"))
|
||||
.expect("C.f to resolve");
|
||||
|
||||
let Some(Type::Function(func_id)) = member_ty else {
|
||||
panic!("C.f is not a Function");
|
||||
};
|
||||
|
||||
let jar = HasJar::<SemanticJar>::jar(&case.db)?;
|
||||
let function = jar.type_store.get_function(func_id);
|
||||
assert_eq!(function.name(), "f");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_module_member() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(&case, "a.py", "import b; D = b.C")?;
|
||||
write_to_path(&case, "b.py", "class C: pass")?;
|
||||
|
||||
assert_public_type(&case, "a", "D", "Literal[C]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_literal() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(&case, "a.py", "x = 1")?;
|
||||
|
||||
assert_public_type(&case, "a", "x", "Literal[1]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_union() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
if flag:
|
||||
x = 1
|
||||
else:
|
||||
x = 2
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_type(&case, "a", "x", "Literal[1, 2]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_visible_def() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
write_to_path(&case, "a.py", "y = 1; y = 2; x = y")?;
|
||||
assert_public_type(&case, "a", "x", "Literal[2]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn join_paths() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
y = 1
|
||||
y = 2
|
||||
if flag:
|
||||
y = 3
|
||||
x = y
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_type(&case, "a", "x", "Literal[2, 3]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn maybe_unbound() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
if flag:
|
||||
y = 1
|
||||
x = y
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_type(&case, "a", "x", "Literal[1] | Unbound")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn if_elif_else() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
y = 1
|
||||
y = 2
|
||||
if flag:
|
||||
y = 3
|
||||
elif flag2:
|
||||
y = 4
|
||||
else:
|
||||
r = y
|
||||
y = 5
|
||||
s = y
|
||||
x = y
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_type(&case, "a", "x", "Literal[3, 4, 5]")?;
|
||||
assert_public_type(&case, "a", "r", "Literal[2]")?;
|
||||
assert_public_type(&case, "a", "s", "Literal[5]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn if_elif() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
y = 1
|
||||
y = 2
|
||||
if flag:
|
||||
y = 3
|
||||
elif flag2:
|
||||
y = 4
|
||||
x = y
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_type(&case, "a", "x", "Literal[2, 3, 4]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn literal_int_arithmetic() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
a = 2 + 1
|
||||
b = a - 4
|
||||
c = a * b
|
||||
d = c / 3
|
||||
e = 5 % 3
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_type(&case, "a", "a", "Literal[3]")?;
|
||||
assert_public_type(&case, "a", "b", "Literal[-1]")?;
|
||||
assert_public_type(&case, "a", "c", "Literal[-3]")?;
|
||||
assert_public_type(&case, "a", "d", "Literal[-1]")?;
|
||||
assert_public_type(&case, "a", "e", "Literal[2]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn walrus() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
x = (y := 1) + 1
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_type(&case, "a", "x", "Literal[2]")?;
|
||||
assert_public_type(&case, "a", "y", "Literal[1]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ifexpr() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
x = 1 if flag else 2
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_type(&case, "a", "x", "Literal[1, 2]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ifexpr_walrus() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
y = z = 0
|
||||
x = (y := 1) if flag else (z := 2)
|
||||
a = y
|
||||
b = z
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_type(&case, "a", "x", "Literal[1, 2]")?;
|
||||
assert_public_type(&case, "a", "a", "Literal[0, 1]")?;
|
||||
assert_public_type(&case, "a", "b", "Literal[0, 2]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ifexpr_walrus_2() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
y = 0
|
||||
(y := 1) if flag else (y := 2)
|
||||
a = y
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_type(&case, "a", "a", "Literal[1, 2]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ifexpr_nested() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
x = 1 if flag else 2 if flag2 else 3
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_type(&case, "a", "x", "Literal[1, 2, 3]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn none() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
x = 1 if flag else None
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_type(&case, "a", "x", "Literal[1] | None")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn narrow_none() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
x = 1 if flag else None
|
||||
y = 0
|
||||
if x is not None:
|
||||
y = x
|
||||
z = y
|
||||
",
|
||||
)?;
|
||||
|
||||
// TODO normalization of unions and intersections: this type is technically correct but
|
||||
// begging for normalization
|
||||
assert_public_type(&case, "a", "z", "Literal[0] | Literal[1] | None & ~None")
|
||||
}
|
||||
}
|
||||
@@ -1,105 +0,0 @@
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::sync::Arc;
|
||||
|
||||
use ruff_notebook::Notebook;
|
||||
use ruff_python_ast::PySourceType;
|
||||
|
||||
use crate::cache::KeyValueCache;
|
||||
use crate::db::{QueryResult, SourceDb};
|
||||
use crate::files::FileId;
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub(crate) fn source_text(db: &dyn SourceDb, file_id: FileId) -> QueryResult<Source> {
|
||||
let jar = db.jar()?;
|
||||
let sources = &jar.sources;
|
||||
|
||||
sources.get(&file_id, |file_id| {
|
||||
let path = db.file_path(*file_id);
|
||||
|
||||
let source_text = std::fs::read_to_string(&path).unwrap_or_else(|err| {
|
||||
tracing::error!("Failed to read file '{path:?}: {err}'. Falling back to empty text");
|
||||
String::new()
|
||||
});
|
||||
|
||||
let python_ty = PySourceType::from(&path);
|
||||
|
||||
let kind = match python_ty {
|
||||
PySourceType::Python => {
|
||||
SourceKind::Python(Arc::from(source_text))
|
||||
}
|
||||
PySourceType::Stub => SourceKind::Stub(Arc::from(source_text)),
|
||||
PySourceType::Ipynb => {
|
||||
let notebook = Notebook::from_source_code(&source_text).unwrap_or_else(|err| {
|
||||
// TODO should this be changed to never fail?
|
||||
// or should we instead add a diagnostic somewhere? But what would we return in this case?
|
||||
tracing::error!(
|
||||
"Failed to parse notebook '{path:?}: {err}'. Falling back to an empty notebook"
|
||||
);
|
||||
Notebook::from_source_code("").unwrap()
|
||||
});
|
||||
|
||||
SourceKind::IpyNotebook(Arc::new(notebook))
|
||||
}
|
||||
};
|
||||
|
||||
Ok(Source { kind })
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum SourceKind {
|
||||
Python(Arc<str>),
|
||||
Stub(Arc<str>),
|
||||
IpyNotebook(Arc<Notebook>),
|
||||
}
|
||||
|
||||
impl<'a> From<&'a SourceKind> for PySourceType {
|
||||
fn from(value: &'a SourceKind) -> Self {
|
||||
match value {
|
||||
SourceKind::Python(_) => PySourceType::Python,
|
||||
SourceKind::Stub(_) => PySourceType::Stub,
|
||||
SourceKind::IpyNotebook(_) => PySourceType::Ipynb,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct Source {
|
||||
kind: SourceKind,
|
||||
}
|
||||
|
||||
impl Source {
|
||||
pub fn python<T: Into<Arc<str>>>(source: T) -> Self {
|
||||
Self {
|
||||
kind: SourceKind::Python(source.into()),
|
||||
}
|
||||
}
|
||||
pub fn kind(&self) -> &SourceKind {
|
||||
&self.kind
|
||||
}
|
||||
|
||||
pub fn text(&self) -> &str {
|
||||
match &self.kind {
|
||||
SourceKind::Python(text) => text,
|
||||
SourceKind::Stub(text) => text,
|
||||
SourceKind::IpyNotebook(notebook) => notebook.source_code(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SourceStorage(pub(crate) KeyValueCache<FileId, Source>);
|
||||
|
||||
impl Deref for SourceStorage {
|
||||
type Target = KeyValueCache<FileId, Source>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for SourceStorage {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
48
crates/red_knot/src/target_version.rs
Normal file
48
crates/red_knot/src/target_version.rs
Normal file
@@ -0,0 +1,48 @@
|
||||
/// Enumeration of all supported Python versions
|
||||
///
|
||||
/// TODO: unify with the `PythonVersion` enum in the linter/formatter crates?
|
||||
#[derive(Copy, Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord, Default, clap::ValueEnum)]
|
||||
pub enum TargetVersion {
|
||||
Py37,
|
||||
#[default]
|
||||
Py38,
|
||||
Py39,
|
||||
Py310,
|
||||
Py311,
|
||||
Py312,
|
||||
Py313,
|
||||
}
|
||||
|
||||
impl TargetVersion {
|
||||
const fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
Self::Py37 => "py37",
|
||||
Self::Py38 => "py38",
|
||||
Self::Py39 => "py39",
|
||||
Self::Py310 => "py310",
|
||||
Self::Py311 => "py311",
|
||||
Self::Py312 => "py312",
|
||||
Self::Py313 => "py313",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for TargetVersion {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TargetVersion> for red_knot_python_semantic::PythonVersion {
|
||||
fn from(value: TargetVersion) -> Self {
|
||||
match value {
|
||||
TargetVersion::Py37 => Self::PY37,
|
||||
TargetVersion::Py38 => Self::PY38,
|
||||
TargetVersion::Py39 => Self::PY39,
|
||||
TargetVersion::Py310 => Self::PY310,
|
||||
TargetVersion::Py311 => Self::PY311,
|
||||
TargetVersion::Py312 => Self::PY312,
|
||||
TargetVersion::Py313 => Self::PY313,
|
||||
}
|
||||
}
|
||||
}
|
||||
1
crates/red_knot/src/verbosity.rs
Normal file
1
crates/red_knot/src/verbosity.rs
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
@@ -1,77 +0,0 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Context;
|
||||
use notify::event::{CreateKind, RemoveKind};
|
||||
use notify::{recommended_watcher, Event, EventKind, RecommendedWatcher, RecursiveMode, Watcher};
|
||||
|
||||
use crate::program::{FileChangeKind, FileWatcherChange};
|
||||
|
||||
pub struct FileWatcher {
|
||||
watcher: RecommendedWatcher,
|
||||
}
|
||||
|
||||
pub trait EventHandler: Send + 'static {
|
||||
fn handle(&self, changes: Vec<FileWatcherChange>);
|
||||
}
|
||||
|
||||
impl<F> EventHandler for F
|
||||
where
|
||||
F: Fn(Vec<FileWatcherChange>) + Send + 'static,
|
||||
{
|
||||
fn handle(&self, changes: Vec<FileWatcherChange>) {
|
||||
let f = self;
|
||||
f(changes);
|
||||
}
|
||||
}
|
||||
|
||||
impl FileWatcher {
|
||||
pub fn new<E>(handler: E) -> anyhow::Result<Self>
|
||||
where
|
||||
E: EventHandler,
|
||||
{
|
||||
Self::from_handler(Box::new(handler))
|
||||
}
|
||||
|
||||
fn from_handler(handler: Box<dyn EventHandler>) -> anyhow::Result<Self> {
|
||||
let watcher = recommended_watcher(move |changes: notify::Result<Event>| {
|
||||
match changes {
|
||||
Ok(event) => {
|
||||
// TODO verify that this handles all events correctly
|
||||
let change_kind = match event.kind {
|
||||
EventKind::Create(CreateKind::File) => FileChangeKind::Created,
|
||||
EventKind::Modify(_) => FileChangeKind::Modified,
|
||||
EventKind::Remove(RemoveKind::File) => FileChangeKind::Deleted,
|
||||
_ => {
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let mut changes = Vec::new();
|
||||
|
||||
for path in event.paths {
|
||||
if path.is_file() {
|
||||
changes.push(FileWatcherChange::new(path, change_kind));
|
||||
}
|
||||
}
|
||||
|
||||
if !changes.is_empty() {
|
||||
handler.handle(changes);
|
||||
}
|
||||
}
|
||||
// TODO proper error handling
|
||||
Err(err) => {
|
||||
panic!("Error: {err}");
|
||||
}
|
||||
}
|
||||
})
|
||||
.context("Failed to create file watcher.")?;
|
||||
|
||||
Ok(Self { watcher })
|
||||
}
|
||||
|
||||
pub fn watch_folder(&mut self, path: &Path) -> anyhow::Result<()> {
|
||||
self.watcher.watch(path, RecursiveMode::Recursive)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
1282
crates/red_knot/tests/file_watching.rs
Normal file
1282
crates/red_knot/tests/file_watching.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,35 +0,0 @@
|
||||
[package]
|
||||
name = "red_knot_module_resolver"
|
||||
version = "0.0.0"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
ruff_db = { workspace = true }
|
||||
ruff_python_stdlib = { workspace = true }
|
||||
|
||||
rustc-hash = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
smol_str = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
path-slash = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
anyhow = { workspace = true }
|
||||
insta = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
@@ -1,156 +0,0 @@
|
||||
use ruff_db::Upcast;
|
||||
|
||||
use crate::resolver::{
|
||||
file_to_module,
|
||||
internal::{ModuleNameIngredient, ModuleResolverSearchPaths},
|
||||
resolve_module_query,
|
||||
};
|
||||
|
||||
#[salsa::jar(db=Db)]
|
||||
pub struct Jar(
|
||||
ModuleNameIngredient<'_>,
|
||||
ModuleResolverSearchPaths,
|
||||
resolve_module_query,
|
||||
file_to_module,
|
||||
);
|
||||
|
||||
pub trait Db: salsa::DbWithJar<Jar> + ruff_db::Db + Upcast<dyn ruff_db::Db> {}
|
||||
|
||||
pub(crate) mod tests {
|
||||
use std::sync;
|
||||
|
||||
use salsa::DebugWithDb;
|
||||
|
||||
use ruff_db::file_system::{FileSystem, MemoryFileSystem, OsFileSystem};
|
||||
use ruff_db::vfs::Vfs;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[salsa::db(Jar, ruff_db::Jar)]
|
||||
pub(crate) struct TestDb {
|
||||
storage: salsa::Storage<Self>,
|
||||
file_system: TestFileSystem,
|
||||
events: sync::Arc<sync::Mutex<Vec<salsa::Event>>>,
|
||||
vfs: Vfs,
|
||||
}
|
||||
|
||||
impl TestDb {
|
||||
#[allow(unused)]
|
||||
pub(crate) fn new() -> Self {
|
||||
Self {
|
||||
storage: salsa::Storage::default(),
|
||||
file_system: TestFileSystem::Memory(MemoryFileSystem::default()),
|
||||
events: sync::Arc::default(),
|
||||
vfs: Vfs::with_stubbed_vendored(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the memory file system.
|
||||
///
|
||||
/// ## Panics
|
||||
/// If this test db isn't using a memory file system.
|
||||
#[allow(unused)]
|
||||
pub(crate) fn memory_file_system(&self) -> &MemoryFileSystem {
|
||||
if let TestFileSystem::Memory(fs) = &self.file_system {
|
||||
fs
|
||||
} else {
|
||||
panic!("The test db is not using a memory file system");
|
||||
}
|
||||
}
|
||||
|
||||
/// Uses the real file system instead of the memory file system.
|
||||
///
|
||||
/// This useful for testing advanced file system features like permissions, symlinks, etc.
|
||||
///
|
||||
/// Note that any files written to the memory file system won't be copied over.
|
||||
#[allow(unused)]
|
||||
pub(crate) fn with_os_file_system(&mut self) {
|
||||
self.file_system = TestFileSystem::Os(OsFileSystem);
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub(crate) fn vfs_mut(&mut self) -> &mut Vfs {
|
||||
&mut self.vfs
|
||||
}
|
||||
|
||||
/// Takes the salsa events.
|
||||
///
|
||||
/// ## Panics
|
||||
/// If there are any pending salsa snapshots.
|
||||
#[allow(unused)]
|
||||
pub(crate) fn take_salsa_events(&mut self) -> Vec<salsa::Event> {
|
||||
let inner = sync::Arc::get_mut(&mut self.events).expect("no pending salsa snapshots");
|
||||
|
||||
let events = inner.get_mut().unwrap();
|
||||
std::mem::take(&mut *events)
|
||||
}
|
||||
|
||||
/// Clears the salsa events.
|
||||
///
|
||||
/// ## Panics
|
||||
/// If there are any pending salsa snapshots.
|
||||
#[allow(unused)]
|
||||
pub(crate) fn clear_salsa_events(&mut self) {
|
||||
self.take_salsa_events();
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn ruff_db::Db> for TestDb {
|
||||
fn upcast(&self) -> &(dyn ruff_db::Db + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl ruff_db::Db for TestDb {
|
||||
fn file_system(&self) -> &dyn ruff_db::file_system::FileSystem {
|
||||
self.file_system.inner()
|
||||
}
|
||||
|
||||
fn vfs(&self) -> &ruff_db::vfs::Vfs {
|
||||
&self.vfs
|
||||
}
|
||||
}
|
||||
|
||||
impl Db for TestDb {}
|
||||
|
||||
impl salsa::Database for TestDb {
|
||||
fn salsa_event(&self, event: salsa::Event) {
|
||||
tracing::trace!("event: {:?}", event.debug(self));
|
||||
let mut events = self.events.lock().unwrap();
|
||||
events.push(event);
|
||||
}
|
||||
}
|
||||
|
||||
impl salsa::ParallelDatabase for TestDb {
|
||||
fn snapshot(&self) -> salsa::Snapshot<Self> {
|
||||
salsa::Snapshot::new(Self {
|
||||
storage: self.storage.snapshot(),
|
||||
file_system: self.file_system.snapshot(),
|
||||
events: self.events.clone(),
|
||||
vfs: self.vfs.snapshot(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
enum TestFileSystem {
|
||||
Memory(MemoryFileSystem),
|
||||
#[allow(unused)]
|
||||
Os(OsFileSystem),
|
||||
}
|
||||
|
||||
impl TestFileSystem {
|
||||
fn inner(&self) -> &dyn FileSystem {
|
||||
match self {
|
||||
Self::Memory(inner) => inner,
|
||||
Self::Os(inner) => inner,
|
||||
}
|
||||
}
|
||||
|
||||
fn snapshot(&self) -> Self {
|
||||
match self {
|
||||
Self::Memory(inner) => Self::Memory(inner.snapshot()),
|
||||
Self::Os(inner) => Self::Os(inner.snapshot()),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,9 +0,0 @@
|
||||
mod db;
|
||||
mod module;
|
||||
mod resolver;
|
||||
mod typeshed;
|
||||
|
||||
pub use db::{Db, Jar};
|
||||
pub use module::{ModuleKind, ModuleName};
|
||||
pub use resolver::{resolve_module, set_module_resolution_settings, ModuleResolutionSettings};
|
||||
pub use typeshed::versions::TypeshedVersions;
|
||||
@@ -1,346 +0,0 @@
|
||||
use std::fmt::Formatter;
|
||||
use std::ops::Deref;
|
||||
use std::sync::Arc;
|
||||
|
||||
use ruff_db::file_system::FileSystemPath;
|
||||
use ruff_db::vfs::{VfsFile, VfsPath};
|
||||
use ruff_python_stdlib::identifiers::is_identifier;
|
||||
|
||||
use crate::Db;
|
||||
|
||||
/// A module name, e.g. `foo.bar`.
|
||||
///
|
||||
/// Always normalized to the absolute form (never a relative module name, i.e., never `.foo`).
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
|
||||
pub struct ModuleName(smol_str::SmolStr);
|
||||
|
||||
impl ModuleName {
|
||||
/// Creates a new module name for `name`. Returns `Some` if `name` is a valid, absolute
|
||||
/// module name and `None` otherwise.
|
||||
///
|
||||
/// The module name is invalid if:
|
||||
///
|
||||
/// * The name is empty
|
||||
/// * The name is relative
|
||||
/// * The name ends with a `.`
|
||||
/// * The name contains a sequence of multiple dots
|
||||
/// * A component of a name (the part between two dots) isn't a valid python identifier.
|
||||
#[inline]
|
||||
pub fn new(name: &str) -> Option<Self> {
|
||||
Self::new_from_smol(smol_str::SmolStr::new(name))
|
||||
}
|
||||
|
||||
/// Creates a new module name for `name` where `name` is a static string.
|
||||
/// Returns `Some` if `name` is a valid, absolute module name and `None` otherwise.
|
||||
///
|
||||
/// The module name is invalid if:
|
||||
///
|
||||
/// * The name is empty
|
||||
/// * The name is relative
|
||||
/// * The name ends with a `.`
|
||||
/// * The name contains a sequence of multiple dots
|
||||
/// * A component of a name (the part between two dots) isn't a valid python identifier.
|
||||
///
|
||||
/// ## Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_module_resolver::ModuleName;
|
||||
///
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar").as_deref(), Some("foo.bar"));
|
||||
/// assert_eq!(ModuleName::new_static(""), None);
|
||||
/// assert_eq!(ModuleName::new_static("..foo"), None);
|
||||
/// assert_eq!(ModuleName::new_static(".foo"), None);
|
||||
/// assert_eq!(ModuleName::new_static("foo."), None);
|
||||
/// assert_eq!(ModuleName::new_static("foo..bar"), None);
|
||||
/// assert_eq!(ModuleName::new_static("2000"), None);
|
||||
/// ```
|
||||
#[inline]
|
||||
pub fn new_static(name: &'static str) -> Option<Self> {
|
||||
Self::new_from_smol(smol_str::SmolStr::new_static(name))
|
||||
}
|
||||
|
||||
fn new_from_smol(name: smol_str::SmolStr) -> Option<Self> {
|
||||
if name.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
if name.split('.').all(is_identifier) {
|
||||
Some(Self(name))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator over the components of the module name:
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_module_resolver::ModuleName;
|
||||
///
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar.baz").unwrap().components().collect::<Vec<_>>(), vec!["foo", "bar", "baz"]);
|
||||
/// ```
|
||||
pub fn components(&self) -> impl DoubleEndedIterator<Item = &str> {
|
||||
self.0.split('.')
|
||||
}
|
||||
|
||||
/// The name of this module's immediate parent, if it has a parent.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_module_resolver::ModuleName;
|
||||
///
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar").unwrap().parent(), Some(ModuleName::new_static("foo").unwrap()));
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar.baz").unwrap().parent(), Some(ModuleName::new_static("foo.bar").unwrap()));
|
||||
/// assert_eq!(ModuleName::new_static("root").unwrap().parent(), None);
|
||||
/// ```
|
||||
pub fn parent(&self) -> Option<ModuleName> {
|
||||
let (parent, _) = self.0.rsplit_once('.')?;
|
||||
|
||||
Some(Self(smol_str::SmolStr::new(parent)))
|
||||
}
|
||||
|
||||
/// Returns `true` if the name starts with `other`.
|
||||
///
|
||||
/// This is equivalent to checking if `self` is a sub-module of `other`.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_module_resolver::ModuleName;
|
||||
///
|
||||
/// assert!(ModuleName::new_static("foo.bar").unwrap().starts_with(&ModuleName::new_static("foo").unwrap()));
|
||||
///
|
||||
/// assert!(!ModuleName::new_static("foo.bar").unwrap().starts_with(&ModuleName::new_static("bar").unwrap()));
|
||||
/// assert!(!ModuleName::new_static("foo_bar").unwrap().starts_with(&ModuleName::new_static("foo").unwrap()));
|
||||
/// ```
|
||||
pub fn starts_with(&self, other: &ModuleName) -> bool {
|
||||
let mut self_components = self.components();
|
||||
let other_components = other.components();
|
||||
|
||||
for other_component in other_components {
|
||||
if self_components.next() != Some(other_component) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn as_str(&self) -> &str {
|
||||
&self.0
|
||||
}
|
||||
|
||||
pub(crate) fn from_relative_path(path: &FileSystemPath) -> Option<Self> {
|
||||
let path = if path.ends_with("__init__.py") || path.ends_with("__init__.pyi") {
|
||||
path.parent()?
|
||||
} else {
|
||||
path
|
||||
};
|
||||
|
||||
let name = if let Some(parent) = path.parent() {
|
||||
let mut name = String::with_capacity(path.as_str().len());
|
||||
|
||||
for component in parent.components() {
|
||||
name.push_str(component.as_os_str().to_str()?);
|
||||
name.push('.');
|
||||
}
|
||||
|
||||
// SAFETY: Unwrap is safe here or `parent` would have returned `None`.
|
||||
name.push_str(path.file_stem().unwrap());
|
||||
|
||||
smol_str::SmolStr::from(name)
|
||||
} else {
|
||||
smol_str::SmolStr::new(path.file_stem()?)
|
||||
};
|
||||
|
||||
Some(Self(name))
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for ModuleName {
|
||||
type Target = str;
|
||||
|
||||
#[inline]
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<str> for ModuleName {
|
||||
fn eq(&self, other: &str) -> bool {
|
||||
self.as_str() == other
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<ModuleName> for str {
|
||||
fn eq(&self, other: &ModuleName) -> bool {
|
||||
self == other.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ModuleName {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(&self.0)
|
||||
}
|
||||
}
|
||||
|
||||
/// Representation of a Python module.
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub struct Module {
|
||||
inner: Arc<ModuleInner>,
|
||||
}
|
||||
|
||||
impl Module {
|
||||
pub(crate) fn new(
|
||||
name: ModuleName,
|
||||
kind: ModuleKind,
|
||||
search_path: ModuleSearchPath,
|
||||
file: VfsFile,
|
||||
) -> Self {
|
||||
Self {
|
||||
inner: Arc::new(ModuleInner {
|
||||
name,
|
||||
kind,
|
||||
search_path,
|
||||
file,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
/// The absolute name of the module (e.g. `foo.bar`)
|
||||
pub fn name(&self) -> &ModuleName {
|
||||
&self.inner.name
|
||||
}
|
||||
|
||||
/// The file to the source code that defines this module
|
||||
pub fn file(&self) -> VfsFile {
|
||||
self.inner.file
|
||||
}
|
||||
|
||||
/// The search path from which the module was resolved.
|
||||
pub fn search_path(&self) -> &ModuleSearchPath {
|
||||
&self.inner.search_path
|
||||
}
|
||||
|
||||
/// Determine whether this module is a single-file module or a package
|
||||
pub fn kind(&self) -> ModuleKind {
|
||||
self.inner.kind
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for Module {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("Module")
|
||||
.field("name", &self.name())
|
||||
.field("kind", &self.kind())
|
||||
.field("file", &self.file())
|
||||
.field("search_path", &self.search_path())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl salsa::DebugWithDb<dyn Db> for Module {
|
||||
fn fmt(&self, f: &mut Formatter<'_>, db: &dyn Db) -> std::fmt::Result {
|
||||
f.debug_struct("Module")
|
||||
.field("name", &self.name())
|
||||
.field("kind", &self.kind())
|
||||
.field("file", &self.file().debug(db.upcast()))
|
||||
.field("search_path", &self.search_path())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq)]
|
||||
struct ModuleInner {
|
||||
name: ModuleName,
|
||||
kind: ModuleKind,
|
||||
search_path: ModuleSearchPath,
|
||||
file: VfsFile,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub enum ModuleKind {
|
||||
/// A single-file module (e.g. `foo.py` or `foo.pyi`)
|
||||
Module,
|
||||
|
||||
/// A python package (`foo/__init__.py` or `foo/__init__.pyi`)
|
||||
Package,
|
||||
}
|
||||
|
||||
/// A search path in which to search modules.
|
||||
/// Corresponds to a path in [`sys.path`](https://docs.python.org/3/library/sys_path_init.html) at runtime.
|
||||
///
|
||||
/// Cloning a search path is cheap because it's an `Arc`.
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub struct ModuleSearchPath {
|
||||
inner: Arc<ModuleSearchPathInner>,
|
||||
}
|
||||
|
||||
impl ModuleSearchPath {
|
||||
pub fn new<P>(path: P, kind: ModuleSearchPathKind) -> Self
|
||||
where
|
||||
P: Into<VfsPath>,
|
||||
{
|
||||
Self {
|
||||
inner: Arc::new(ModuleSearchPathInner {
|
||||
path: path.into(),
|
||||
kind,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
/// Determine whether this is a first-party, third-party or standard-library search path
|
||||
pub fn kind(&self) -> ModuleSearchPathKind {
|
||||
self.inner.kind
|
||||
}
|
||||
|
||||
/// Return the location of the search path on the file system
|
||||
pub fn path(&self) -> &VfsPath {
|
||||
&self.inner.path
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for ModuleSearchPath {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("ModuleSearchPath")
|
||||
.field("path", &self.inner.path)
|
||||
.field("kind", &self.kind())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Eq, PartialEq)]
|
||||
struct ModuleSearchPathInner {
|
||||
path: VfsPath,
|
||||
kind: ModuleSearchPathKind,
|
||||
}
|
||||
|
||||
/// Enumeration of the different kinds of search paths type checkers are expected to support.
|
||||
///
|
||||
/// N.B. Although we don't implement `Ord` for this enum, they are ordered in terms of the
|
||||
/// priority that we want to give these modules when resolving them.
|
||||
/// This is roughly [the order given in the typing spec], but typeshed's stubs
|
||||
/// for the standard library are moved higher up to match Python's semantics at runtime.
|
||||
///
|
||||
/// [the order given in the typing spec]: https://typing.readthedocs.io/en/latest/spec/distributing.html#import-resolution-ordering
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub enum ModuleSearchPathKind {
|
||||
/// "Extra" paths provided by the user in a config file, env var or CLI flag.
|
||||
/// E.g. mypy's `MYPYPATH` env var, or pyright's `stubPath` configuration setting
|
||||
Extra,
|
||||
|
||||
/// Files in the project we're directly being invoked on
|
||||
FirstParty,
|
||||
|
||||
/// The `stdlib` directory of typeshed (either vendored or custom)
|
||||
StandardLibrary,
|
||||
|
||||
/// Stubs or runtime modules installed in site-packages
|
||||
SitePackagesThirdParty,
|
||||
|
||||
/// Vendored third-party stubs from typeshed
|
||||
VendoredThirdParty,
|
||||
}
|
||||
@@ -1,938 +0,0 @@
|
||||
use salsa::DebugWithDb;
|
||||
use std::ops::Deref;
|
||||
|
||||
use ruff_db::file_system::{FileSystem, FileSystemPath, FileSystemPathBuf};
|
||||
use ruff_db::vfs::{system_path_to_file, vfs_path_to_file, VfsFile, VfsPath};
|
||||
|
||||
use crate::module::{Module, ModuleKind, ModuleName, ModuleSearchPath, ModuleSearchPathKind};
|
||||
use crate::resolver::internal::ModuleResolverSearchPaths;
|
||||
use crate::Db;
|
||||
|
||||
const TYPESHED_STDLIB_DIRECTORY: &str = "stdlib";
|
||||
|
||||
/// Configures the module search paths for the module resolver.
|
||||
///
|
||||
/// Must be called before calling any other module resolution functions.
|
||||
pub fn set_module_resolution_settings(db: &mut dyn Db, config: ModuleResolutionSettings) {
|
||||
// There's no concurrency issue here because we hold a `&mut dyn Db` reference. No other
|
||||
// thread can mutate the `Db` while we're in this call, so using `try_get` to test if
|
||||
// the settings have already been set is safe.
|
||||
if let Some(existing) = ModuleResolverSearchPaths::try_get(db) {
|
||||
existing
|
||||
.set_search_paths(db)
|
||||
.to(config.into_ordered_search_paths());
|
||||
} else {
|
||||
ModuleResolverSearchPaths::new(db, config.into_ordered_search_paths());
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolves a module name to a module.
|
||||
pub fn resolve_module(db: &dyn Db, module_name: ModuleName) -> Option<Module> {
|
||||
let interned_name = internal::ModuleNameIngredient::new(db, module_name);
|
||||
|
||||
resolve_module_query(db, interned_name)
|
||||
}
|
||||
|
||||
/// Salsa query that resolves an interned [`ModuleNameIngredient`] to a module.
|
||||
///
|
||||
/// This query should not be called directly. Instead, use [`resolve_module`]. It only exists
|
||||
/// because Salsa requires the module name to be an ingredient.
|
||||
#[salsa::tracked]
|
||||
pub(crate) fn resolve_module_query<'db>(
|
||||
db: &'db dyn Db,
|
||||
module_name: internal::ModuleNameIngredient<'db>,
|
||||
) -> Option<Module> {
|
||||
let _ = tracing::trace_span!("resolve_module", module_name = ?module_name.debug(db)).enter();
|
||||
|
||||
let name = module_name.name(db);
|
||||
|
||||
let (search_path, module_file, kind) = resolve_name(db, name)?;
|
||||
|
||||
let module = Module::new(name.clone(), kind, search_path, module_file);
|
||||
|
||||
Some(module)
|
||||
}
|
||||
|
||||
/// Resolves the module for the given path.
|
||||
///
|
||||
/// Returns `None` if the path is not a module locatable via `sys.path`.
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub fn path_to_module(db: &dyn Db, path: &VfsPath) -> Option<Module> {
|
||||
// It's not entirely clear on first sight why this method calls `file_to_module` instead of
|
||||
// it being the other way round, considering that the first thing that `file_to_module` does
|
||||
// is to retrieve the file's path.
|
||||
//
|
||||
// The reason is that `file_to_module` is a tracked Salsa query and salsa queries require that
|
||||
// all arguments are Salsa ingredients (something stored in Salsa). `Path`s aren't salsa ingredients but
|
||||
// `VfsFile` is. So what we do here is to retrieve the `path`'s `VfsFile` so that we can make
|
||||
// use of Salsa's caching and invalidation.
|
||||
let file = vfs_path_to_file(db.upcast(), path)?;
|
||||
file_to_module(db, file)
|
||||
}
|
||||
|
||||
/// Resolves the module for the file with the given id.
|
||||
///
|
||||
/// Returns `None` if the file is not a module locatable via `sys.path`.
|
||||
#[salsa::tracked]
|
||||
#[allow(unused)]
|
||||
pub(crate) fn file_to_module(db: &dyn Db, file: VfsFile) -> Option<Module> {
|
||||
let _ = tracing::trace_span!("file_to_module", file = ?file.debug(db.upcast())).enter();
|
||||
|
||||
let path = file.path(db.upcast());
|
||||
|
||||
let search_paths = module_search_paths(db);
|
||||
|
||||
let relative_path = search_paths
|
||||
.iter()
|
||||
.find_map(|root| match (root.path(), path) {
|
||||
(VfsPath::FileSystem(root_path), VfsPath::FileSystem(path)) => {
|
||||
let relative_path = path.strip_prefix(root_path).ok()?;
|
||||
Some(relative_path)
|
||||
}
|
||||
(VfsPath::Vendored(_), VfsPath::Vendored(_)) => {
|
||||
todo!("Add support for vendored modules")
|
||||
}
|
||||
(VfsPath::Vendored(_), VfsPath::FileSystem(_))
|
||||
| (VfsPath::FileSystem(_), VfsPath::Vendored(_)) => None,
|
||||
})?;
|
||||
|
||||
let module_name = ModuleName::from_relative_path(relative_path)?;
|
||||
|
||||
// Resolve the module name to see if Python would resolve the name to the same path.
|
||||
// If it doesn't, then that means that multiple modules have the same name in different
|
||||
// root paths, but that the module corresponding to `path` is in a lower priority search path,
|
||||
// in which case we ignore it.
|
||||
let module = resolve_module(db, module_name)?;
|
||||
|
||||
if file == module.file() {
|
||||
Some(module)
|
||||
} else {
|
||||
// This path is for a module with the same name but with a different precedence. For example:
|
||||
// ```
|
||||
// src/foo.py
|
||||
// src/foo/__init__.py
|
||||
// ```
|
||||
// The module name of `src/foo.py` is `foo`, but the module loaded by Python is `src/foo/__init__.py`.
|
||||
// That means we need to ignore `src/foo.py` even though it resolves to the same module name.
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Configures the search paths that are used to resolve modules.
|
||||
#[derive(Eq, PartialEq, Debug)]
|
||||
pub struct ModuleResolutionSettings {
|
||||
/// List of user-provided paths that should take first priority in the module resolution.
|
||||
/// Examples in other type checkers are mypy's MYPYPATH environment variable,
|
||||
/// or pyright's stubPath configuration setting.
|
||||
pub extra_paths: Vec<FileSystemPathBuf>,
|
||||
|
||||
/// The root of the workspace, used for finding first-party modules.
|
||||
pub workspace_root: FileSystemPathBuf,
|
||||
|
||||
/// The path to the user's `site-packages` directory, where third-party packages from ``PyPI`` are installed.
|
||||
pub site_packages: Option<FileSystemPathBuf>,
|
||||
|
||||
/// Optional path to standard-library typeshed stubs.
|
||||
/// Currently this has to be a directory that exists on disk.
|
||||
///
|
||||
/// (TODO: fall back to vendored stubs if no custom directory is provided.)
|
||||
pub custom_typeshed: Option<FileSystemPathBuf>,
|
||||
}
|
||||
|
||||
impl ModuleResolutionSettings {
|
||||
/// Implementation of PEP 561's module resolution order
|
||||
/// (with some small, deliberate, differences)
|
||||
fn into_ordered_search_paths(self) -> OrderedSearchPaths {
|
||||
let ModuleResolutionSettings {
|
||||
extra_paths,
|
||||
workspace_root,
|
||||
site_packages,
|
||||
custom_typeshed,
|
||||
} = self;
|
||||
|
||||
let mut paths: Vec<_> = extra_paths
|
||||
.into_iter()
|
||||
.map(|path| ModuleSearchPath::new(path, ModuleSearchPathKind::Extra))
|
||||
.collect();
|
||||
|
||||
paths.push(ModuleSearchPath::new(
|
||||
workspace_root,
|
||||
ModuleSearchPathKind::FirstParty,
|
||||
));
|
||||
|
||||
// TODO fallback to vendored typeshed stubs if no custom typeshed directory is provided by the user
|
||||
if let Some(custom_typeshed) = custom_typeshed {
|
||||
paths.push(ModuleSearchPath::new(
|
||||
custom_typeshed.join(TYPESHED_STDLIB_DIRECTORY),
|
||||
ModuleSearchPathKind::StandardLibrary,
|
||||
));
|
||||
}
|
||||
|
||||
// TODO vendor typeshed's third-party stubs as well as the stdlib and fallback to them as a final step
|
||||
if let Some(site_packages) = site_packages {
|
||||
paths.push(ModuleSearchPath::new(
|
||||
site_packages,
|
||||
ModuleSearchPathKind::SitePackagesThirdParty,
|
||||
));
|
||||
}
|
||||
|
||||
OrderedSearchPaths(paths)
|
||||
}
|
||||
}
|
||||
|
||||
/// A resolved module resolution order, implementing PEP 561
|
||||
/// (with some small, deliberate differences)
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq)]
|
||||
pub(crate) struct OrderedSearchPaths(Vec<ModuleSearchPath>);
|
||||
|
||||
impl Deref for OrderedSearchPaths {
|
||||
type Target = [ModuleSearchPath];
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
// The singleton methods generated by salsa are all `pub` instead of `pub(crate)` which triggers
|
||||
// `unreachable_pub`. Work around this by creating a module and allow `unreachable_pub` for it.
|
||||
// Salsa also generates uses to `_db` variables for `interned` which triggers `clippy::used_underscore_binding`. Suppress that too
|
||||
// TODO(micha): Contribute a fix for this upstream where the singleton methods have the same visibility as the struct.
|
||||
#[allow(unreachable_pub, clippy::used_underscore_binding)]
|
||||
pub(crate) mod internal {
|
||||
use crate::module::ModuleName;
|
||||
use crate::resolver::OrderedSearchPaths;
|
||||
|
||||
#[salsa::input(singleton)]
|
||||
pub(crate) struct ModuleResolverSearchPaths {
|
||||
#[return_ref]
|
||||
pub(super) search_paths: OrderedSearchPaths,
|
||||
}
|
||||
|
||||
/// A thin wrapper around `ModuleName` to make it a Salsa ingredient.
|
||||
///
|
||||
/// This is needed because Salsa requires that all query arguments are salsa ingredients.
|
||||
#[salsa::interned]
|
||||
pub(crate) struct ModuleNameIngredient<'db> {
|
||||
#[return_ref]
|
||||
pub(super) name: ModuleName,
|
||||
}
|
||||
}
|
||||
|
||||
fn module_search_paths(db: &dyn Db) -> &[ModuleSearchPath] {
|
||||
ModuleResolverSearchPaths::get(db).search_paths(db)
|
||||
}
|
||||
|
||||
/// Given a module name and a list of search paths in which to lookup modules,
|
||||
/// attempt to resolve the module name
|
||||
fn resolve_name(db: &dyn Db, name: &ModuleName) -> Option<(ModuleSearchPath, VfsFile, ModuleKind)> {
|
||||
let search_paths = module_search_paths(db);
|
||||
|
||||
for search_path in search_paths {
|
||||
let mut components = name.components();
|
||||
let module_name = components.next_back()?;
|
||||
|
||||
let VfsPath::FileSystem(fs_search_path) = search_path.path() else {
|
||||
todo!("Vendored search paths are not yet supported");
|
||||
};
|
||||
|
||||
match resolve_package(db.file_system(), fs_search_path, components) {
|
||||
Ok(resolved_package) => {
|
||||
let mut package_path = resolved_package.path;
|
||||
|
||||
package_path.push(module_name);
|
||||
|
||||
// Must be a `__init__.pyi` or `__init__.py` or it isn't a package.
|
||||
let kind = if db.file_system().is_directory(&package_path) {
|
||||
package_path.push("__init__");
|
||||
ModuleKind::Package
|
||||
} else {
|
||||
ModuleKind::Module
|
||||
};
|
||||
|
||||
// TODO Implement full https://peps.python.org/pep-0561/#type-checker-module-resolution-order resolution
|
||||
let stub = package_path.with_extension("pyi");
|
||||
|
||||
if let Some(stub) = system_path_to_file(db.upcast(), &stub) {
|
||||
return Some((search_path.clone(), stub, kind));
|
||||
}
|
||||
|
||||
let module = package_path.with_extension("py");
|
||||
|
||||
if let Some(module) = system_path_to_file(db.upcast(), &module) {
|
||||
return Some((search_path.clone(), module, kind));
|
||||
}
|
||||
|
||||
// For regular packages, don't search the next search path. All files of that
|
||||
// package must be in the same location
|
||||
if resolved_package.kind.is_regular_package() {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
Err(parent_kind) => {
|
||||
if parent_kind.is_regular_package() {
|
||||
// For regular packages, don't search the next search path.
|
||||
return None;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
fn resolve_package<'a, I>(
|
||||
fs: &dyn FileSystem,
|
||||
module_search_path: &FileSystemPath,
|
||||
components: I,
|
||||
) -> Result<ResolvedPackage, PackageKind>
|
||||
where
|
||||
I: Iterator<Item = &'a str>,
|
||||
{
|
||||
let mut package_path = module_search_path.to_path_buf();
|
||||
|
||||
// `true` if inside a folder that is a namespace package (has no `__init__.py`).
|
||||
// Namespace packages are special because they can be spread across multiple search paths.
|
||||
// https://peps.python.org/pep-0420/
|
||||
let mut in_namespace_package = false;
|
||||
|
||||
// `true` if resolving a sub-package. For example, `true` when resolving `bar` of `foo.bar`.
|
||||
let mut in_sub_package = false;
|
||||
|
||||
// For `foo.bar.baz`, test that `foo` and `baz` both contain a `__init__.py`.
|
||||
for folder in components {
|
||||
package_path.push(folder);
|
||||
|
||||
let has_init_py = fs.is_file(&package_path.join("__init__.py"))
|
||||
|| fs.is_file(&package_path.join("__init__.pyi"));
|
||||
|
||||
if has_init_py {
|
||||
in_namespace_package = false;
|
||||
} else if fs.is_directory(&package_path) {
|
||||
// A directory without an `__init__.py` is a namespace package, continue with the next folder.
|
||||
in_namespace_package = true;
|
||||
} else if in_namespace_package {
|
||||
// Package not found but it is part of a namespace package.
|
||||
return Err(PackageKind::Namespace);
|
||||
} else if in_sub_package {
|
||||
// A regular sub package wasn't found.
|
||||
return Err(PackageKind::Regular);
|
||||
} else {
|
||||
// We couldn't find `foo` for `foo.bar.baz`, search the next search path.
|
||||
return Err(PackageKind::Root);
|
||||
}
|
||||
|
||||
in_sub_package = true;
|
||||
}
|
||||
|
||||
let kind = if in_namespace_package {
|
||||
PackageKind::Namespace
|
||||
} else if in_sub_package {
|
||||
PackageKind::Regular
|
||||
} else {
|
||||
PackageKind::Root
|
||||
};
|
||||
|
||||
Ok(ResolvedPackage {
|
||||
kind,
|
||||
path: package_path,
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ResolvedPackage {
|
||||
path: FileSystemPathBuf,
|
||||
kind: PackageKind,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
|
||||
enum PackageKind {
|
||||
/// A root package or module. E.g. `foo` in `foo.bar.baz` or just `foo`.
|
||||
Root,
|
||||
|
||||
/// A regular sub-package where the parent contains an `__init__.py`.
|
||||
///
|
||||
/// For example, `bar` in `foo.bar` when the `foo` directory contains an `__init__.py`.
|
||||
Regular,
|
||||
|
||||
/// A sub-package in a namespace package. A namespace package is a package without an `__init__.py`.
|
||||
///
|
||||
/// For example, `bar` in `foo.bar` if the `foo` directory contains no `__init__.py`.
|
||||
Namespace,
|
||||
}
|
||||
|
||||
impl PackageKind {
|
||||
const fn is_regular_package(self) -> bool {
|
||||
matches!(self, PackageKind::Regular)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use ruff_db::file_system::{FileSystemPath, FileSystemPathBuf};
|
||||
use ruff_db::vfs::{system_path_to_file, VfsFile, VfsPath};
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::module::{ModuleKind, ModuleName};
|
||||
|
||||
use super::{
|
||||
path_to_module, resolve_module, set_module_resolution_settings, ModuleResolutionSettings,
|
||||
TYPESHED_STDLIB_DIRECTORY,
|
||||
};
|
||||
|
||||
struct TestCase {
|
||||
db: TestDb,
|
||||
|
||||
src: FileSystemPathBuf,
|
||||
custom_typeshed: FileSystemPathBuf,
|
||||
site_packages: FileSystemPathBuf,
|
||||
}
|
||||
|
||||
fn create_resolver() -> std::io::Result<TestCase> {
|
||||
let mut db = TestDb::new();
|
||||
|
||||
let src = FileSystemPath::new("src").to_path_buf();
|
||||
let site_packages = FileSystemPath::new("site_packages").to_path_buf();
|
||||
let custom_typeshed = FileSystemPath::new("typeshed").to_path_buf();
|
||||
|
||||
let fs = db.memory_file_system();
|
||||
|
||||
fs.create_directory_all(&src)?;
|
||||
fs.create_directory_all(&site_packages)?;
|
||||
fs.create_directory_all(&custom_typeshed)?;
|
||||
|
||||
let settings = ModuleResolutionSettings {
|
||||
extra_paths: vec![],
|
||||
workspace_root: src.clone(),
|
||||
site_packages: Some(site_packages.clone()),
|
||||
custom_typeshed: Some(custom_typeshed.clone()),
|
||||
};
|
||||
|
||||
set_module_resolution_settings(&mut db, settings);
|
||||
|
||||
Ok(TestCase {
|
||||
db,
|
||||
src,
|
||||
custom_typeshed,
|
||||
site_packages,
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn first_party_module() -> anyhow::Result<()> {
|
||||
let TestCase { db, src, .. } = create_resolver()?;
|
||||
|
||||
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
||||
let foo_path = src.join("foo.py");
|
||||
db.memory_file_system()
|
||||
.write_file(&foo_path, "print('Hello, world!')")?;
|
||||
|
||||
let foo_module = resolve_module(&db, foo_module_name.clone()).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
Some(&foo_module),
|
||||
resolve_module(&db, foo_module_name.clone()).as_ref()
|
||||
);
|
||||
|
||||
assert_eq!("foo", foo_module.name());
|
||||
assert_eq!(&src, foo_module.search_path().path());
|
||||
assert_eq!(ModuleKind::Module, foo_module.kind());
|
||||
assert_eq!(&foo_path, foo_module.file().path(&db));
|
||||
|
||||
assert_eq!(
|
||||
Some(foo_module),
|
||||
path_to_module(&db, &VfsPath::FileSystem(foo_path))
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn stdlib() -> anyhow::Result<()> {
|
||||
let TestCase {
|
||||
db,
|
||||
custom_typeshed,
|
||||
..
|
||||
} = create_resolver()?;
|
||||
|
||||
let stdlib_dir = custom_typeshed.join(TYPESHED_STDLIB_DIRECTORY);
|
||||
let functools_path = stdlib_dir.join("functools.py");
|
||||
db.memory_file_system()
|
||||
.write_file(&functools_path, "def update_wrapper(): ...")?;
|
||||
|
||||
let functools_module_name = ModuleName::new_static("functools").unwrap();
|
||||
let functools_module = resolve_module(&db, functools_module_name.clone()).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
Some(&functools_module),
|
||||
resolve_module(&db, functools_module_name).as_ref()
|
||||
);
|
||||
|
||||
assert_eq!(&stdlib_dir, functools_module.search_path().path());
|
||||
assert_eq!(ModuleKind::Module, functools_module.kind());
|
||||
assert_eq!(&functools_path.clone(), functools_module.file().path(&db));
|
||||
|
||||
assert_eq!(
|
||||
Some(functools_module),
|
||||
path_to_module(&db, &VfsPath::FileSystem(functools_path))
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn first_party_precedence_over_stdlib() -> anyhow::Result<()> {
|
||||
let TestCase {
|
||||
db,
|
||||
src,
|
||||
custom_typeshed,
|
||||
..
|
||||
} = create_resolver()?;
|
||||
|
||||
let stdlib_dir = custom_typeshed.join(TYPESHED_STDLIB_DIRECTORY);
|
||||
let stdlib_functools_path = stdlib_dir.join("functools.py");
|
||||
let first_party_functools_path = src.join("functools.py");
|
||||
|
||||
db.memory_file_system().write_files([
|
||||
(&stdlib_functools_path, "def update_wrapper(): ..."),
|
||||
(&first_party_functools_path, "def update_wrapper(): ..."),
|
||||
])?;
|
||||
|
||||
let functools_module_name = ModuleName::new_static("functools").unwrap();
|
||||
let functools_module = resolve_module(&db, functools_module_name.clone()).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
Some(&functools_module),
|
||||
resolve_module(&db, functools_module_name).as_ref()
|
||||
);
|
||||
assert_eq!(&src, functools_module.search_path().path());
|
||||
assert_eq!(ModuleKind::Module, functools_module.kind());
|
||||
assert_eq!(
|
||||
&first_party_functools_path.clone(),
|
||||
functools_module.file().path(&db)
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
Some(functools_module),
|
||||
path_to_module(&db, &VfsPath::FileSystem(first_party_functools_path))
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// TODO: Port typeshed test case. Porting isn't possible at the moment because the vendored zip
|
||||
// is part of the red knot crate
|
||||
// #[test]
|
||||
// fn typeshed_zip_created_at_build_time() -> anyhow::Result<()> {
|
||||
// // The file path here is hardcoded in this crate's `build.rs` script.
|
||||
// // Luckily this crate will fail to build if this file isn't available at build time.
|
||||
// const TYPESHED_ZIP_BYTES: &[u8] =
|
||||
// include_bytes!(concat!(env!("OUT_DIR"), "/zipped_typeshed.zip"));
|
||||
// assert!(!TYPESHED_ZIP_BYTES.is_empty());
|
||||
// let mut typeshed_zip_archive = ZipArchive::new(Cursor::new(TYPESHED_ZIP_BYTES))?;
|
||||
//
|
||||
// let path_to_functools = Path::new("stdlib").join("functools.pyi");
|
||||
// let mut functools_module_stub = typeshed_zip_archive
|
||||
// .by_name(path_to_functools.to_str().unwrap())
|
||||
// .unwrap();
|
||||
// assert!(functools_module_stub.is_file());
|
||||
//
|
||||
// let mut functools_module_stub_source = String::new();
|
||||
// functools_module_stub.read_to_string(&mut functools_module_stub_source)?;
|
||||
//
|
||||
// assert!(functools_module_stub_source.contains("def update_wrapper("));
|
||||
// Ok(())
|
||||
// }
|
||||
|
||||
#[test]
|
||||
fn resolve_package() -> anyhow::Result<()> {
|
||||
let TestCase { src, db, .. } = create_resolver()?;
|
||||
|
||||
let foo_dir = src.join("foo");
|
||||
let foo_path = foo_dir.join("__init__.py");
|
||||
|
||||
db.memory_file_system()
|
||||
.write_file(&foo_path, "print('Hello, world!')")?;
|
||||
|
||||
let foo_module = resolve_module(&db, ModuleName::new_static("foo").unwrap()).unwrap();
|
||||
|
||||
assert_eq!("foo", foo_module.name());
|
||||
assert_eq!(&src, foo_module.search_path().path());
|
||||
assert_eq!(&foo_path, foo_module.file().path(&db));
|
||||
|
||||
assert_eq!(
|
||||
Some(&foo_module),
|
||||
path_to_module(&db, &VfsPath::FileSystem(foo_path)).as_ref()
|
||||
);
|
||||
|
||||
// Resolving by directory doesn't resolve to the init file.
|
||||
assert_eq!(None, path_to_module(&db, &VfsPath::FileSystem(foo_dir)));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn package_priority_over_module() -> anyhow::Result<()> {
|
||||
let TestCase { db, src, .. } = create_resolver()?;
|
||||
|
||||
let foo_dir = src.join("foo");
|
||||
let foo_init = foo_dir.join("__init__.py");
|
||||
|
||||
db.memory_file_system()
|
||||
.write_file(&foo_init, "print('Hello, world!')")?;
|
||||
|
||||
let foo_py = src.join("foo.py");
|
||||
db.memory_file_system()
|
||||
.write_file(&foo_py, "print('Hello, world!')")?;
|
||||
|
||||
let foo_module = resolve_module(&db, ModuleName::new_static("foo").unwrap()).unwrap();
|
||||
|
||||
assert_eq!(&src, foo_module.search_path().path());
|
||||
assert_eq!(&foo_init, foo_module.file().path(&db));
|
||||
assert_eq!(ModuleKind::Package, foo_module.kind());
|
||||
|
||||
assert_eq!(
|
||||
Some(foo_module),
|
||||
path_to_module(&db, &VfsPath::FileSystem(foo_init))
|
||||
);
|
||||
assert_eq!(None, path_to_module(&db, &VfsPath::FileSystem(foo_py)));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn typing_stub_over_module() -> anyhow::Result<()> {
|
||||
let TestCase { db, src, .. } = create_resolver()?;
|
||||
|
||||
let foo_stub = src.join("foo.pyi");
|
||||
let foo_py = src.join("foo.py");
|
||||
db.memory_file_system()
|
||||
.write_files([(&foo_stub, "x: int"), (&foo_py, "print('Hello, world!')")])?;
|
||||
|
||||
let foo = resolve_module(&db, ModuleName::new_static("foo").unwrap()).unwrap();
|
||||
|
||||
assert_eq!(&src, foo.search_path().path());
|
||||
assert_eq!(&foo_stub, foo.file().path(&db));
|
||||
|
||||
assert_eq!(
|
||||
Some(foo),
|
||||
path_to_module(&db, &VfsPath::FileSystem(foo_stub))
|
||||
);
|
||||
assert_eq!(None, path_to_module(&db, &VfsPath::FileSystem(foo_py)));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sub_packages() -> anyhow::Result<()> {
|
||||
let TestCase { db, src, .. } = create_resolver()?;
|
||||
|
||||
let foo = src.join("foo");
|
||||
let bar = foo.join("bar");
|
||||
let baz = bar.join("baz.py");
|
||||
|
||||
db.memory_file_system().write_files([
|
||||
(&foo.join("__init__.py"), ""),
|
||||
(&bar.join("__init__.py"), ""),
|
||||
(&baz, "print('Hello, world!')"),
|
||||
])?;
|
||||
|
||||
let baz_module =
|
||||
resolve_module(&db, ModuleName::new_static("foo.bar.baz").unwrap()).unwrap();
|
||||
|
||||
assert_eq!(&src, baz_module.search_path().path());
|
||||
assert_eq!(&baz, baz_module.file().path(&db));
|
||||
|
||||
assert_eq!(
|
||||
Some(baz_module),
|
||||
path_to_module(&db, &VfsPath::FileSystem(baz))
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn namespace_package() -> anyhow::Result<()> {
|
||||
let TestCase {
|
||||
db,
|
||||
src,
|
||||
site_packages,
|
||||
..
|
||||
} = create_resolver()?;
|
||||
|
||||
// From [PEP420](https://peps.python.org/pep-0420/#nested-namespace-packages).
|
||||
// But uses `src` for `project1` and `site_packages2` for `project2`.
|
||||
// ```
|
||||
// src
|
||||
// parent
|
||||
// child
|
||||
// one.py
|
||||
// site_packages
|
||||
// parent
|
||||
// child
|
||||
// two.py
|
||||
// ```
|
||||
|
||||
let parent1 = src.join("parent");
|
||||
let child1 = parent1.join("child");
|
||||
let one = child1.join("one.py");
|
||||
|
||||
let parent2 = site_packages.join("parent");
|
||||
let child2 = parent2.join("child");
|
||||
let two = child2.join("two.py");
|
||||
|
||||
db.memory_file_system().write_files([
|
||||
(&one, "print('Hello, world!')"),
|
||||
(&two, "print('Hello, world!')"),
|
||||
])?;
|
||||
|
||||
let one_module =
|
||||
resolve_module(&db, ModuleName::new_static("parent.child.one").unwrap()).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
Some(one_module),
|
||||
path_to_module(&db, &VfsPath::FileSystem(one))
|
||||
);
|
||||
|
||||
let two_module =
|
||||
resolve_module(&db, ModuleName::new_static("parent.child.two").unwrap()).unwrap();
|
||||
assert_eq!(
|
||||
Some(two_module),
|
||||
path_to_module(&db, &VfsPath::FileSystem(two))
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn regular_package_in_namespace_package() -> anyhow::Result<()> {
|
||||
let TestCase {
|
||||
db,
|
||||
src,
|
||||
site_packages,
|
||||
..
|
||||
} = create_resolver()?;
|
||||
|
||||
// Adopted test case from the [PEP420 examples](https://peps.python.org/pep-0420/#nested-namespace-packages).
|
||||
// The `src/parent/child` package is a regular package. Therefore, `site_packages/parent/child/two.py` should not be resolved.
|
||||
// ```
|
||||
// src
|
||||
// parent
|
||||
// child
|
||||
// one.py
|
||||
// site_packages
|
||||
// parent
|
||||
// child
|
||||
// two.py
|
||||
// ```
|
||||
|
||||
let parent1 = src.join("parent");
|
||||
let child1 = parent1.join("child");
|
||||
let one = child1.join("one.py");
|
||||
|
||||
let parent2 = site_packages.join("parent");
|
||||
let child2 = parent2.join("child");
|
||||
let two = child2.join("two.py");
|
||||
|
||||
db.memory_file_system().write_files([
|
||||
(&child1.join("__init__.py"), "print('Hello, world!')"),
|
||||
(&one, "print('Hello, world!')"),
|
||||
(&two, "print('Hello, world!')"),
|
||||
])?;
|
||||
|
||||
let one_module =
|
||||
resolve_module(&db, ModuleName::new_static("parent.child.one").unwrap()).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
Some(one_module),
|
||||
path_to_module(&db, &VfsPath::FileSystem(one))
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
None,
|
||||
resolve_module(&db, ModuleName::new_static("parent.child.two").unwrap())
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn module_search_path_priority() -> anyhow::Result<()> {
|
||||
let TestCase {
|
||||
db,
|
||||
src,
|
||||
site_packages,
|
||||
..
|
||||
} = create_resolver()?;
|
||||
|
||||
let foo_src = src.join("foo.py");
|
||||
let foo_site_packages = site_packages.join("foo.py");
|
||||
|
||||
db.memory_file_system()
|
||||
.write_files([(&foo_src, ""), (&foo_site_packages, "")])?;
|
||||
|
||||
let foo_module = resolve_module(&db, ModuleName::new_static("foo").unwrap()).unwrap();
|
||||
|
||||
assert_eq!(&src, foo_module.search_path().path());
|
||||
assert_eq!(&foo_src, foo_module.file().path(&db));
|
||||
|
||||
assert_eq!(
|
||||
Some(foo_module),
|
||||
path_to_module(&db, &VfsPath::FileSystem(foo_src))
|
||||
);
|
||||
assert_eq!(
|
||||
None,
|
||||
path_to_module(&db, &VfsPath::FileSystem(foo_site_packages))
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(target_family = "unix")]
|
||||
fn symlink() -> anyhow::Result<()> {
|
||||
let TestCase {
|
||||
mut db,
|
||||
src,
|
||||
site_packages,
|
||||
custom_typeshed,
|
||||
} = create_resolver()?;
|
||||
|
||||
db.with_os_file_system();
|
||||
|
||||
let temp_dir = tempfile::tempdir()?;
|
||||
let root = FileSystemPath::from_std_path(temp_dir.path()).unwrap();
|
||||
|
||||
let src = root.join(src);
|
||||
let site_packages = root.join(site_packages);
|
||||
let custom_typeshed = root.join(custom_typeshed);
|
||||
|
||||
let foo = src.join("foo.py");
|
||||
let bar = src.join("bar.py");
|
||||
|
||||
std::fs::create_dir_all(src.as_std_path())?;
|
||||
std::fs::create_dir_all(site_packages.as_std_path())?;
|
||||
std::fs::create_dir_all(custom_typeshed.as_std_path())?;
|
||||
|
||||
std::fs::write(foo.as_std_path(), "")?;
|
||||
std::os::unix::fs::symlink(foo.as_std_path(), bar.as_std_path())?;
|
||||
|
||||
let settings = ModuleResolutionSettings {
|
||||
extra_paths: vec![],
|
||||
workspace_root: src.clone(),
|
||||
site_packages: Some(site_packages),
|
||||
custom_typeshed: Some(custom_typeshed),
|
||||
};
|
||||
|
||||
set_module_resolution_settings(&mut db, settings);
|
||||
|
||||
let foo_module = resolve_module(&db, ModuleName::new_static("foo").unwrap()).unwrap();
|
||||
let bar_module = resolve_module(&db, ModuleName::new_static("bar").unwrap()).unwrap();
|
||||
|
||||
assert_ne!(foo_module, bar_module);
|
||||
|
||||
assert_eq!(&src, foo_module.search_path().path());
|
||||
assert_eq!(&foo, foo_module.file().path(&db));
|
||||
|
||||
// `foo` and `bar` shouldn't resolve to the same file
|
||||
|
||||
assert_eq!(&src, bar_module.search_path().path());
|
||||
assert_eq!(&bar, bar_module.file().path(&db));
|
||||
assert_eq!(&foo, foo_module.file().path(&db));
|
||||
|
||||
assert_ne!(&foo_module, &bar_module);
|
||||
|
||||
assert_eq!(
|
||||
Some(foo_module),
|
||||
path_to_module(&db, &VfsPath::FileSystem(foo))
|
||||
);
|
||||
assert_eq!(
|
||||
Some(bar_module),
|
||||
path_to_module(&db, &VfsPath::FileSystem(bar))
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deleting_an_unrealted_file_doesnt_change_module_resolution() -> anyhow::Result<()> {
|
||||
let TestCase { mut db, src, .. } = create_resolver()?;
|
||||
|
||||
let foo_path = src.join("foo.py");
|
||||
let bar_path = src.join("bar.py");
|
||||
|
||||
db.memory_file_system()
|
||||
.write_files([(&foo_path, "x = 1"), (&bar_path, "y = 2")])?;
|
||||
|
||||
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
||||
let foo_module = resolve_module(&db, foo_module_name.clone()).unwrap();
|
||||
|
||||
let bar = system_path_to_file(&db, &bar_path).expect("bar.py to exist");
|
||||
|
||||
db.clear_salsa_events();
|
||||
|
||||
// Delete `bar.py`
|
||||
db.memory_file_system().remove_file(&bar_path)?;
|
||||
bar.touch(&mut db);
|
||||
|
||||
// Re-query the foo module. The foo module should still be cached because `bar.py` isn't relevant
|
||||
// for resolving `foo`.
|
||||
|
||||
let foo_module2 = resolve_module(&db, foo_module_name);
|
||||
|
||||
assert!(!db
|
||||
.take_salsa_events()
|
||||
.iter()
|
||||
.any(|event| { matches!(event.kind, salsa::EventKind::WillExecute { .. }) }));
|
||||
|
||||
assert_eq!(Some(foo_module), foo_module2);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn adding_a_file_on_which_the_module_resolution_depends_on_invalidates_the_query(
|
||||
) -> anyhow::Result<()> {
|
||||
let TestCase { mut db, src, .. } = create_resolver()?;
|
||||
let foo_path = src.join("foo.py");
|
||||
|
||||
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
||||
assert_eq!(resolve_module(&db, foo_module_name.clone()), None);
|
||||
|
||||
// Now write the foo file
|
||||
db.memory_file_system().write_file(&foo_path, "x = 1")?;
|
||||
VfsFile::touch_path(&mut db, &VfsPath::FileSystem(foo_path.clone()));
|
||||
let foo_file = system_path_to_file(&db, &foo_path).expect("foo.py to exist");
|
||||
|
||||
let foo_module = resolve_module(&db, foo_module_name).expect("Foo module to resolve");
|
||||
assert_eq!(foo_file, foo_module.file());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn removing_a_file_that_the_module_resolution_depends_on_invalidates_the_query(
|
||||
) -> anyhow::Result<()> {
|
||||
let TestCase { mut db, src, .. } = create_resolver()?;
|
||||
let foo_path = src.join("foo.py");
|
||||
let foo_init_path = src.join("foo/__init__.py");
|
||||
|
||||
db.memory_file_system()
|
||||
.write_files([(&foo_path, "x = 1"), (&foo_init_path, "x = 2")])?;
|
||||
|
||||
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
||||
let foo_module = resolve_module(&db, foo_module_name.clone()).expect("foo module to exist");
|
||||
|
||||
assert_eq!(&foo_init_path, foo_module.file().path(&db));
|
||||
|
||||
// Delete `foo/__init__.py` and the `foo` folder. `foo` should now resolve to `foo.py`
|
||||
db.memory_file_system().remove_file(&foo_init_path)?;
|
||||
db.memory_file_system()
|
||||
.remove_directory(foo_init_path.parent().unwrap())?;
|
||||
VfsFile::touch_path(&mut db, &VfsPath::FileSystem(foo_init_path.clone()));
|
||||
|
||||
let foo_module = resolve_module(&db, foo_module_name).expect("Foo module to resolve");
|
||||
assert_eq!(&foo_path, foo_module.file().path(&db));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
114409d49b43ba62a179ebb856fa70a5161f751e
|
||||
@@ -1,117 +0,0 @@
|
||||
import sys
|
||||
from typing import Literal
|
||||
|
||||
SF_APPEND: Literal[0x00040000]
|
||||
SF_ARCHIVED: Literal[0x00010000]
|
||||
SF_IMMUTABLE: Literal[0x00020000]
|
||||
SF_NOUNLINK: Literal[0x00100000]
|
||||
SF_SNAPSHOT: Literal[0x00200000]
|
||||
|
||||
ST_MODE: Literal[0]
|
||||
ST_INO: Literal[1]
|
||||
ST_DEV: Literal[2]
|
||||
ST_NLINK: Literal[3]
|
||||
ST_UID: Literal[4]
|
||||
ST_GID: Literal[5]
|
||||
ST_SIZE: Literal[6]
|
||||
ST_ATIME: Literal[7]
|
||||
ST_MTIME: Literal[8]
|
||||
ST_CTIME: Literal[9]
|
||||
|
||||
S_IFIFO: Literal[0o010000]
|
||||
S_IFLNK: Literal[0o120000]
|
||||
S_IFREG: Literal[0o100000]
|
||||
S_IFSOCK: Literal[0o140000]
|
||||
S_IFBLK: Literal[0o060000]
|
||||
S_IFCHR: Literal[0o020000]
|
||||
S_IFDIR: Literal[0o040000]
|
||||
|
||||
# These are 0 on systems that don't support the specific kind of file.
|
||||
# Example: Linux doesn't support door files, so S_IFDOOR is 0 on linux.
|
||||
S_IFDOOR: int
|
||||
S_IFPORT: int
|
||||
S_IFWHT: int
|
||||
|
||||
S_ISUID: Literal[0o4000]
|
||||
S_ISGID: Literal[0o2000]
|
||||
S_ISVTX: Literal[0o1000]
|
||||
|
||||
S_IRWXU: Literal[0o0700]
|
||||
S_IRUSR: Literal[0o0400]
|
||||
S_IWUSR: Literal[0o0200]
|
||||
S_IXUSR: Literal[0o0100]
|
||||
|
||||
S_IRWXG: Literal[0o0070]
|
||||
S_IRGRP: Literal[0o0040]
|
||||
S_IWGRP: Literal[0o0020]
|
||||
S_IXGRP: Literal[0o0010]
|
||||
|
||||
S_IRWXO: Literal[0o0007]
|
||||
S_IROTH: Literal[0o0004]
|
||||
S_IWOTH: Literal[0o0002]
|
||||
S_IXOTH: Literal[0o0001]
|
||||
|
||||
S_ENFMT: Literal[0o2000]
|
||||
S_IREAD: Literal[0o0400]
|
||||
S_IWRITE: Literal[0o0200]
|
||||
S_IEXEC: Literal[0o0100]
|
||||
|
||||
UF_APPEND: Literal[0x00000004]
|
||||
UF_COMPRESSED: Literal[0x00000020] # OS X 10.6+ only
|
||||
UF_HIDDEN: Literal[0x00008000] # OX X 10.5+ only
|
||||
UF_IMMUTABLE: Literal[0x00000002]
|
||||
UF_NODUMP: Literal[0x00000001]
|
||||
UF_NOUNLINK: Literal[0x00000010]
|
||||
UF_OPAQUE: Literal[0x00000008]
|
||||
|
||||
def S_IMODE(mode: int, /) -> int: ...
|
||||
def S_IFMT(mode: int, /) -> int: ...
|
||||
def S_ISBLK(mode: int, /) -> bool: ...
|
||||
def S_ISCHR(mode: int, /) -> bool: ...
|
||||
def S_ISDIR(mode: int, /) -> bool: ...
|
||||
def S_ISDOOR(mode: int, /) -> bool: ...
|
||||
def S_ISFIFO(mode: int, /) -> bool: ...
|
||||
def S_ISLNK(mode: int, /) -> bool: ...
|
||||
def S_ISPORT(mode: int, /) -> bool: ...
|
||||
def S_ISREG(mode: int, /) -> bool: ...
|
||||
def S_ISSOCK(mode: int, /) -> bool: ...
|
||||
def S_ISWHT(mode: int, /) -> bool: ...
|
||||
def filemode(mode: int, /) -> str: ...
|
||||
|
||||
if sys.platform == "win32":
|
||||
IO_REPARSE_TAG_SYMLINK: int
|
||||
IO_REPARSE_TAG_MOUNT_POINT: int
|
||||
IO_REPARSE_TAG_APPEXECLINK: int
|
||||
|
||||
if sys.platform == "win32":
|
||||
FILE_ATTRIBUTE_ARCHIVE: Literal[32]
|
||||
FILE_ATTRIBUTE_COMPRESSED: Literal[2048]
|
||||
FILE_ATTRIBUTE_DEVICE: Literal[64]
|
||||
FILE_ATTRIBUTE_DIRECTORY: Literal[16]
|
||||
FILE_ATTRIBUTE_ENCRYPTED: Literal[16384]
|
||||
FILE_ATTRIBUTE_HIDDEN: Literal[2]
|
||||
FILE_ATTRIBUTE_INTEGRITY_STREAM: Literal[32768]
|
||||
FILE_ATTRIBUTE_NORMAL: Literal[128]
|
||||
FILE_ATTRIBUTE_NOT_CONTENT_INDEXED: Literal[8192]
|
||||
FILE_ATTRIBUTE_NO_SCRUB_DATA: Literal[131072]
|
||||
FILE_ATTRIBUTE_OFFLINE: Literal[4096]
|
||||
FILE_ATTRIBUTE_READONLY: Literal[1]
|
||||
FILE_ATTRIBUTE_REPARSE_POINT: Literal[1024]
|
||||
FILE_ATTRIBUTE_SPARSE_FILE: Literal[512]
|
||||
FILE_ATTRIBUTE_SYSTEM: Literal[4]
|
||||
FILE_ATTRIBUTE_TEMPORARY: Literal[256]
|
||||
FILE_ATTRIBUTE_VIRTUAL: Literal[65536]
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
SF_SETTABLE: Literal[0x3FFF0000]
|
||||
# https://github.com/python/cpython/issues/114081#issuecomment-2119017790
|
||||
# SF_RESTRICTED: Literal[0x00080000]
|
||||
SF_FIRMLINK: Literal[0x00800000]
|
||||
SF_DATALESS: Literal[0x40000000]
|
||||
|
||||
SF_SUPPORTED: Literal[0x9F0000]
|
||||
SF_SYNTHETIC: Literal[0xC0000000]
|
||||
|
||||
UF_TRACKED: Literal[0x00000040]
|
||||
UF_DATAVAULT: Literal[0x00000080]
|
||||
UF_SETTABLE: Literal[0x0000FFFF]
|
||||
@@ -1,20 +0,0 @@
|
||||
import enum
|
||||
import sys
|
||||
from typing import Literal
|
||||
|
||||
LOG_THRESHOLD_FOR_CONNLOST_WRITES: Literal[5]
|
||||
ACCEPT_RETRY_DELAY: Literal[1]
|
||||
DEBUG_STACK_DEPTH: Literal[10]
|
||||
SSL_HANDSHAKE_TIMEOUT: float
|
||||
SENDFILE_FALLBACK_READBUFFER_SIZE: Literal[262144]
|
||||
if sys.version_info >= (3, 11):
|
||||
SSL_SHUTDOWN_TIMEOUT: float
|
||||
FLOW_CONTROL_HIGH_WATER_SSL_READ: Literal[256]
|
||||
FLOW_CONTROL_HIGH_WATER_SSL_WRITE: Literal[512]
|
||||
if sys.version_info >= (3, 12):
|
||||
THREAD_JOIN_TIMEOUT: Literal[300]
|
||||
|
||||
class _SendfileMode(enum.Enum):
|
||||
UNSUPPORTED = 1
|
||||
TRY_NATIVE = 2
|
||||
FALLBACK = 3
|
||||
@@ -1,20 +0,0 @@
|
||||
import functools
|
||||
import traceback
|
||||
from collections.abc import Iterable
|
||||
from types import FrameType, FunctionType
|
||||
from typing import Any, overload
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
class _HasWrapper:
|
||||
__wrapper__: _HasWrapper | FunctionType
|
||||
|
||||
_FuncType: TypeAlias = FunctionType | _HasWrapper | functools.partial[Any] | functools.partialmethod[Any]
|
||||
|
||||
@overload
|
||||
def _get_function_source(func: _FuncType) -> tuple[str, int]: ...
|
||||
@overload
|
||||
def _get_function_source(func: object) -> tuple[str, int] | None: ...
|
||||
def _format_callback_source(func: object, args: Iterable[Any]) -> str: ...
|
||||
def _format_args_and_kwargs(args: Iterable[Any], kwargs: dict[str, Any]) -> str: ...
|
||||
def _format_callback(func: object, args: Iterable[Any], kwargs: dict[str, Any], suffix: str = "") -> str: ...
|
||||
def extract_stack(f: FrameType | None = None, limit: int | None = None) -> traceback.StackSummary: ...
|
||||
@@ -1,196 +0,0 @@
|
||||
import sys
|
||||
import types
|
||||
from abc import ABCMeta, abstractmethod
|
||||
from collections.abc import Callable
|
||||
from typing import Literal
|
||||
from typing_extensions import Self, TypeVarTuple, Unpack, deprecated
|
||||
|
||||
from .events import AbstractEventLoop, BaseDefaultEventLoopPolicy
|
||||
from .selector_events import BaseSelectorEventLoop
|
||||
|
||||
_Ts = TypeVarTuple("_Ts")
|
||||
|
||||
# This is also technically not available on Win,
|
||||
# but other parts of typeshed need this definition.
|
||||
# So, it is special cased.
|
||||
if sys.version_info >= (3, 12):
|
||||
@deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14")
|
||||
class AbstractChildWatcher:
|
||||
@abstractmethod
|
||||
def add_child_handler(
|
||||
self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts]
|
||||
) -> None: ...
|
||||
@abstractmethod
|
||||
def remove_child_handler(self, pid: int) -> bool: ...
|
||||
@abstractmethod
|
||||
def attach_loop(self, loop: AbstractEventLoop | None) -> None: ...
|
||||
@abstractmethod
|
||||
def close(self) -> None: ...
|
||||
@abstractmethod
|
||||
def __enter__(self) -> Self: ...
|
||||
@abstractmethod
|
||||
def __exit__(
|
||||
self, typ: type[BaseException] | None, exc: BaseException | None, tb: types.TracebackType | None
|
||||
) -> None: ...
|
||||
@abstractmethod
|
||||
def is_active(self) -> bool: ...
|
||||
|
||||
else:
|
||||
class AbstractChildWatcher:
|
||||
@abstractmethod
|
||||
def add_child_handler(
|
||||
self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts]
|
||||
) -> None: ...
|
||||
@abstractmethod
|
||||
def remove_child_handler(self, pid: int) -> bool: ...
|
||||
@abstractmethod
|
||||
def attach_loop(self, loop: AbstractEventLoop | None) -> None: ...
|
||||
@abstractmethod
|
||||
def close(self) -> None: ...
|
||||
@abstractmethod
|
||||
def __enter__(self) -> Self: ...
|
||||
@abstractmethod
|
||||
def __exit__(
|
||||
self, typ: type[BaseException] | None, exc: BaseException | None, tb: types.TracebackType | None
|
||||
) -> None: ...
|
||||
@abstractmethod
|
||||
def is_active(self) -> bool: ...
|
||||
|
||||
if sys.platform != "win32":
|
||||
if sys.version_info >= (3, 9):
|
||||
__all__ = (
|
||||
"SelectorEventLoop",
|
||||
"AbstractChildWatcher",
|
||||
"SafeChildWatcher",
|
||||
"FastChildWatcher",
|
||||
"PidfdChildWatcher",
|
||||
"MultiLoopChildWatcher",
|
||||
"ThreadedChildWatcher",
|
||||
"DefaultEventLoopPolicy",
|
||||
)
|
||||
else:
|
||||
__all__ = (
|
||||
"SelectorEventLoop",
|
||||
"AbstractChildWatcher",
|
||||
"SafeChildWatcher",
|
||||
"FastChildWatcher",
|
||||
"MultiLoopChildWatcher",
|
||||
"ThreadedChildWatcher",
|
||||
"DefaultEventLoopPolicy",
|
||||
)
|
||||
|
||||
# Doesn't actually have ABCMeta metaclass at runtime, but mypy complains if we don't have it in the stub.
|
||||
# See discussion in #7412
|
||||
class BaseChildWatcher(AbstractChildWatcher, metaclass=ABCMeta):
|
||||
def close(self) -> None: ...
|
||||
def is_active(self) -> bool: ...
|
||||
def attach_loop(self, loop: AbstractEventLoop | None) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
@deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14")
|
||||
class SafeChildWatcher(BaseChildWatcher):
|
||||
def __enter__(self) -> Self: ...
|
||||
def __exit__(self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None) -> None: ...
|
||||
def add_child_handler(
|
||||
self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts]
|
||||
) -> None: ...
|
||||
def remove_child_handler(self, pid: int) -> bool: ...
|
||||
|
||||
@deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14")
|
||||
class FastChildWatcher(BaseChildWatcher):
|
||||
def __enter__(self) -> Self: ...
|
||||
def __exit__(self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None) -> None: ...
|
||||
def add_child_handler(
|
||||
self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts]
|
||||
) -> None: ...
|
||||
def remove_child_handler(self, pid: int) -> bool: ...
|
||||
|
||||
else:
|
||||
class SafeChildWatcher(BaseChildWatcher):
|
||||
def __enter__(self) -> Self: ...
|
||||
def __exit__(self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None) -> None: ...
|
||||
def add_child_handler(
|
||||
self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts]
|
||||
) -> None: ...
|
||||
def remove_child_handler(self, pid: int) -> bool: ...
|
||||
|
||||
class FastChildWatcher(BaseChildWatcher):
|
||||
def __enter__(self) -> Self: ...
|
||||
def __exit__(self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None) -> None: ...
|
||||
def add_child_handler(
|
||||
self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts]
|
||||
) -> None: ...
|
||||
def remove_child_handler(self, pid: int) -> bool: ...
|
||||
|
||||
class _UnixSelectorEventLoop(BaseSelectorEventLoop): ...
|
||||
|
||||
class _UnixDefaultEventLoopPolicy(BaseDefaultEventLoopPolicy):
|
||||
if sys.version_info >= (3, 12):
|
||||
@deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14")
|
||||
def get_child_watcher(self) -> AbstractChildWatcher: ...
|
||||
@deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14")
|
||||
def set_child_watcher(self, watcher: AbstractChildWatcher | None) -> None: ...
|
||||
else:
|
||||
def get_child_watcher(self) -> AbstractChildWatcher: ...
|
||||
def set_child_watcher(self, watcher: AbstractChildWatcher | None) -> None: ...
|
||||
|
||||
SelectorEventLoop = _UnixSelectorEventLoop
|
||||
|
||||
DefaultEventLoopPolicy = _UnixDefaultEventLoopPolicy
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
@deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14")
|
||||
class MultiLoopChildWatcher(AbstractChildWatcher):
|
||||
def is_active(self) -> bool: ...
|
||||
def close(self) -> None: ...
|
||||
def __enter__(self) -> Self: ...
|
||||
def __exit__(
|
||||
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None
|
||||
) -> None: ...
|
||||
def add_child_handler(
|
||||
self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts]
|
||||
) -> None: ...
|
||||
def remove_child_handler(self, pid: int) -> bool: ...
|
||||
def attach_loop(self, loop: AbstractEventLoop | None) -> None: ...
|
||||
|
||||
else:
|
||||
class MultiLoopChildWatcher(AbstractChildWatcher):
|
||||
def is_active(self) -> bool: ...
|
||||
def close(self) -> None: ...
|
||||
def __enter__(self) -> Self: ...
|
||||
def __exit__(
|
||||
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None
|
||||
) -> None: ...
|
||||
def add_child_handler(
|
||||
self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts]
|
||||
) -> None: ...
|
||||
def remove_child_handler(self, pid: int) -> bool: ...
|
||||
def attach_loop(self, loop: AbstractEventLoop | None) -> None: ...
|
||||
|
||||
class ThreadedChildWatcher(AbstractChildWatcher):
|
||||
def is_active(self) -> Literal[True]: ...
|
||||
def close(self) -> None: ...
|
||||
def __enter__(self) -> Self: ...
|
||||
def __exit__(
|
||||
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None
|
||||
) -> None: ...
|
||||
def __del__(self) -> None: ...
|
||||
def add_child_handler(
|
||||
self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts]
|
||||
) -> None: ...
|
||||
def remove_child_handler(self, pid: int) -> bool: ...
|
||||
def attach_loop(self, loop: AbstractEventLoop | None) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
class PidfdChildWatcher(AbstractChildWatcher):
|
||||
def __enter__(self) -> Self: ...
|
||||
def __exit__(
|
||||
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None
|
||||
) -> None: ...
|
||||
def is_active(self) -> bool: ...
|
||||
def close(self) -> None: ...
|
||||
def attach_loop(self, loop: AbstractEventLoop | None) -> None: ...
|
||||
def add_child_handler(
|
||||
self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts]
|
||||
) -> None: ...
|
||||
def remove_child_handler(self, pid: int) -> bool: ...
|
||||
@@ -1,32 +0,0 @@
|
||||
from ._base import (
|
||||
ALL_COMPLETED as ALL_COMPLETED,
|
||||
FIRST_COMPLETED as FIRST_COMPLETED,
|
||||
FIRST_EXCEPTION as FIRST_EXCEPTION,
|
||||
BrokenExecutor as BrokenExecutor,
|
||||
CancelledError as CancelledError,
|
||||
Executor as Executor,
|
||||
Future as Future,
|
||||
InvalidStateError as InvalidStateError,
|
||||
TimeoutError as TimeoutError,
|
||||
as_completed as as_completed,
|
||||
wait as wait,
|
||||
)
|
||||
from .process import ProcessPoolExecutor as ProcessPoolExecutor
|
||||
from .thread import ThreadPoolExecutor as ThreadPoolExecutor
|
||||
|
||||
__all__ = (
|
||||
"FIRST_COMPLETED",
|
||||
"FIRST_EXCEPTION",
|
||||
"ALL_COMPLETED",
|
||||
"CancelledError",
|
||||
"TimeoutError",
|
||||
"BrokenExecutor",
|
||||
"Future",
|
||||
"Executor",
|
||||
"wait",
|
||||
"as_completed",
|
||||
"ProcessPoolExecutor",
|
||||
"ThreadPoolExecutor",
|
||||
)
|
||||
|
||||
def __dir__() -> tuple[str, ...]: ...
|
||||
@@ -1,16 +0,0 @@
|
||||
from typing import Any, TypeVar
|
||||
|
||||
__all__ = ["Error", "copy", "deepcopy"]
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
# None in CPython but non-None in Jython
|
||||
PyStringMap: Any
|
||||
|
||||
# Note: memo and _nil are internal kwargs.
|
||||
def deepcopy(x: _T, memo: dict[int, Any] | None = None, _nil: Any = []) -> _T: ...
|
||||
def copy(x: _T) -> _T: ...
|
||||
|
||||
class Error(Exception): ...
|
||||
|
||||
error = Error
|
||||
@@ -1,99 +0,0 @@
|
||||
from _typeshed import BytesPath, Incomplete, StrOrBytesPath, StrPath, Unused
|
||||
from abc import abstractmethod
|
||||
from collections.abc import Callable, Iterable
|
||||
from distutils.dist import Distribution
|
||||
from distutils.file_util import _BytesPathT, _StrPathT
|
||||
from typing import Any, ClassVar, Literal, overload
|
||||
|
||||
class Command:
|
||||
distribution: Distribution
|
||||
# Any to work around variance issues
|
||||
sub_commands: ClassVar[list[tuple[str, Callable[[Any], bool] | None]]]
|
||||
def __init__(self, dist: Distribution) -> None: ...
|
||||
@abstractmethod
|
||||
def initialize_options(self) -> None: ...
|
||||
@abstractmethod
|
||||
def finalize_options(self) -> None: ...
|
||||
@abstractmethod
|
||||
def run(self) -> None: ...
|
||||
def announce(self, msg: str, level: int = 1) -> None: ...
|
||||
def debug_print(self, msg: str) -> None: ...
|
||||
def ensure_string(self, option: str, default: str | None = None) -> None: ...
|
||||
def ensure_string_list(self, option: str | list[str]) -> None: ...
|
||||
def ensure_filename(self, option: str) -> None: ...
|
||||
def ensure_dirname(self, option: str) -> None: ...
|
||||
def get_command_name(self) -> str: ...
|
||||
def set_undefined_options(self, src_cmd: str, *option_pairs: tuple[str, str]) -> None: ...
|
||||
def get_finalized_command(self, command: str, create: bool | Literal[0, 1] = 1) -> Command: ...
|
||||
def reinitialize_command(self, command: Command | str, reinit_subcommands: bool | Literal[0, 1] = 0) -> Command: ...
|
||||
def run_command(self, command: str) -> None: ...
|
||||
def get_sub_commands(self) -> list[str]: ...
|
||||
def warn(self, msg: str) -> None: ...
|
||||
def execute(self, func: Callable[..., object], args: Iterable[Any], msg: str | None = None, level: int = 1) -> None: ...
|
||||
def mkpath(self, name: str, mode: int = 0o777) -> None: ...
|
||||
@overload
|
||||
def copy_file(
|
||||
self,
|
||||
infile: StrPath,
|
||||
outfile: _StrPathT,
|
||||
preserve_mode: bool | Literal[0, 1] = 1,
|
||||
preserve_times: bool | Literal[0, 1] = 1,
|
||||
link: str | None = None,
|
||||
level: Unused = 1,
|
||||
) -> tuple[_StrPathT | str, bool]: ...
|
||||
@overload
|
||||
def copy_file(
|
||||
self,
|
||||
infile: BytesPath,
|
||||
outfile: _BytesPathT,
|
||||
preserve_mode: bool | Literal[0, 1] = 1,
|
||||
preserve_times: bool | Literal[0, 1] = 1,
|
||||
link: str | None = None,
|
||||
level: Unused = 1,
|
||||
) -> tuple[_BytesPathT | bytes, bool]: ...
|
||||
def copy_tree(
|
||||
self,
|
||||
infile: StrPath,
|
||||
outfile: str,
|
||||
preserve_mode: bool | Literal[0, 1] = 1,
|
||||
preserve_times: bool | Literal[0, 1] = 1,
|
||||
preserve_symlinks: bool | Literal[0, 1] = 0,
|
||||
level: Unused = 1,
|
||||
) -> list[str]: ...
|
||||
@overload
|
||||
def move_file(self, src: StrPath, dst: _StrPathT, level: Unused = 1) -> _StrPathT | str: ...
|
||||
@overload
|
||||
def move_file(self, src: BytesPath, dst: _BytesPathT, level: Unused = 1) -> _BytesPathT | bytes: ...
|
||||
def spawn(self, cmd: Iterable[str], search_path: bool | Literal[0, 1] = 1, level: Unused = 1) -> None: ...
|
||||
@overload
|
||||
def make_archive(
|
||||
self,
|
||||
base_name: str,
|
||||
format: str,
|
||||
root_dir: StrOrBytesPath | None = None,
|
||||
base_dir: str | None = None,
|
||||
owner: str | None = None,
|
||||
group: str | None = None,
|
||||
) -> str: ...
|
||||
@overload
|
||||
def make_archive(
|
||||
self,
|
||||
base_name: StrPath,
|
||||
format: str,
|
||||
root_dir: StrOrBytesPath,
|
||||
base_dir: str | None = None,
|
||||
owner: str | None = None,
|
||||
group: str | None = None,
|
||||
) -> str: ...
|
||||
def make_file(
|
||||
self,
|
||||
infiles: str | list[str] | tuple[str, ...],
|
||||
outfile: StrOrBytesPath,
|
||||
func: Callable[..., object],
|
||||
args: list[Any],
|
||||
exec_msg: str | None = None,
|
||||
skip_msg: str | None = None,
|
||||
level: Unused = 1,
|
||||
) -> None: ...
|
||||
def ensure_finalized(self) -> None: ...
|
||||
def dump_options(self, header: Incomplete | None = None, indent: str = "") -> None: ...
|
||||
@@ -1,25 +0,0 @@
|
||||
from typing import Any
|
||||
|
||||
from ..cmd import Command
|
||||
|
||||
def show_formats() -> None: ...
|
||||
|
||||
class bdist(Command):
|
||||
description: str
|
||||
user_options: Any
|
||||
boolean_options: Any
|
||||
help_options: Any
|
||||
no_format_option: Any
|
||||
default_format: Any
|
||||
format_commands: Any
|
||||
format_command: Any
|
||||
bdist_base: Any
|
||||
plat_name: Any
|
||||
formats: Any
|
||||
dist_dir: Any
|
||||
skip_build: int
|
||||
group: Any
|
||||
owner: Any
|
||||
def initialize_options(self) -> None: ...
|
||||
def finalize_options(self) -> None: ...
|
||||
def run(self) -> None: ...
|
||||
@@ -1 +0,0 @@
|
||||
DEBUG: bool | None
|
||||
@@ -1,149 +0,0 @@
|
||||
from _typeshed import Incomplete, StrOrBytesPath, StrPath, SupportsWrite
|
||||
from collections.abc import Iterable, Mapping
|
||||
from distutils.cmd import Command
|
||||
from re import Pattern
|
||||
from typing import IO, Any, ClassVar, Literal, TypeVar, overload
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
command_re: Pattern[str]
|
||||
|
||||
_OptionsList: TypeAlias = list[tuple[str, str | None, str, int] | tuple[str, str | None, str]]
|
||||
_CommandT = TypeVar("_CommandT", bound=Command)
|
||||
|
||||
class DistributionMetadata:
|
||||
def __init__(self, path: StrOrBytesPath | None = None) -> None: ...
|
||||
name: str | None
|
||||
version: str | None
|
||||
author: str | None
|
||||
author_email: str | None
|
||||
maintainer: str | None
|
||||
maintainer_email: str | None
|
||||
url: str | None
|
||||
license: str | None
|
||||
description: str | None
|
||||
long_description: str | None
|
||||
keywords: str | list[str] | None
|
||||
platforms: str | list[str] | None
|
||||
classifiers: str | list[str] | None
|
||||
download_url: str | None
|
||||
provides: list[str] | None
|
||||
requires: list[str] | None
|
||||
obsoletes: list[str] | None
|
||||
def read_pkg_file(self, file: IO[str]) -> None: ...
|
||||
def write_pkg_info(self, base_dir: StrPath) -> None: ...
|
||||
def write_pkg_file(self, file: SupportsWrite[str]) -> None: ...
|
||||
def get_name(self) -> str: ...
|
||||
def get_version(self) -> str: ...
|
||||
def get_fullname(self) -> str: ...
|
||||
def get_author(self) -> str: ...
|
||||
def get_author_email(self) -> str: ...
|
||||
def get_maintainer(self) -> str: ...
|
||||
def get_maintainer_email(self) -> str: ...
|
||||
def get_contact(self) -> str: ...
|
||||
def get_contact_email(self) -> str: ...
|
||||
def get_url(self) -> str: ...
|
||||
def get_license(self) -> str: ...
|
||||
def get_licence(self) -> str: ...
|
||||
def get_description(self) -> str: ...
|
||||
def get_long_description(self) -> str: ...
|
||||
def get_keywords(self) -> str | list[str]: ...
|
||||
def get_platforms(self) -> str | list[str]: ...
|
||||
def get_classifiers(self) -> str | list[str]: ...
|
||||
def get_download_url(self) -> str: ...
|
||||
def get_requires(self) -> list[str]: ...
|
||||
def set_requires(self, value: Iterable[str]) -> None: ...
|
||||
def get_provides(self) -> list[str]: ...
|
||||
def set_provides(self, value: Iterable[str]) -> None: ...
|
||||
def get_obsoletes(self) -> list[str]: ...
|
||||
def set_obsoletes(self, value: Iterable[str]) -> None: ...
|
||||
|
||||
class Distribution:
|
||||
cmdclass: dict[str, type[Command]]
|
||||
metadata: DistributionMetadata
|
||||
def __init__(self, attrs: Mapping[str, Any] | None = None) -> None: ...
|
||||
def get_option_dict(self, command: str) -> dict[str, tuple[str, str]]: ...
|
||||
def parse_config_files(self, filenames: Iterable[str] | None = None) -> None: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: str, create: Literal[1, True] = 1) -> Command: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: str, create: Literal[0, False]) -> Command | None: ...
|
||||
global_options: ClassVar[_OptionsList]
|
||||
common_usage: ClassVar[str]
|
||||
display_options: ClassVar[_OptionsList]
|
||||
display_option_names: ClassVar[list[str]]
|
||||
negative_opt: ClassVar[dict[str, str]]
|
||||
verbose: int
|
||||
dry_run: int
|
||||
help: int
|
||||
command_packages: list[str] | None
|
||||
script_name: str | None
|
||||
script_args: list[str] | None
|
||||
command_options: dict[str, dict[str, tuple[str, str]]]
|
||||
dist_files: list[tuple[str, str, str]]
|
||||
packages: Incomplete
|
||||
package_data: dict[str, list[str]]
|
||||
package_dir: Incomplete
|
||||
py_modules: Incomplete
|
||||
libraries: Incomplete
|
||||
headers: Incomplete
|
||||
ext_modules: Incomplete
|
||||
ext_package: Incomplete
|
||||
include_dirs: Incomplete
|
||||
extra_path: Incomplete
|
||||
scripts: Incomplete
|
||||
data_files: Incomplete
|
||||
password: str
|
||||
command_obj: Incomplete
|
||||
have_run: Incomplete
|
||||
want_user_cfg: bool
|
||||
def dump_option_dicts(
|
||||
self, header: Incomplete | None = None, commands: Incomplete | None = None, indent: str = ""
|
||||
) -> None: ...
|
||||
def find_config_files(self): ...
|
||||
commands: Incomplete
|
||||
def parse_command_line(self): ...
|
||||
def finalize_options(self) -> None: ...
|
||||
def handle_display_options(self, option_order): ...
|
||||
def print_command_list(self, commands, header, max_length) -> None: ...
|
||||
def print_commands(self) -> None: ...
|
||||
def get_command_list(self): ...
|
||||
def get_command_packages(self): ...
|
||||
def get_command_class(self, command: str) -> type[Command]: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: str, reinit_subcommands: bool = False) -> Command: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: _CommandT, reinit_subcommands: bool = False) -> _CommandT: ...
|
||||
def announce(self, msg, level: int = 2) -> None: ...
|
||||
def run_commands(self) -> None: ...
|
||||
def run_command(self, command: str) -> None: ...
|
||||
def has_pure_modules(self) -> bool: ...
|
||||
def has_ext_modules(self) -> bool: ...
|
||||
def has_c_libraries(self) -> bool: ...
|
||||
def has_modules(self) -> bool: ...
|
||||
def has_headers(self) -> bool: ...
|
||||
def has_scripts(self) -> bool: ...
|
||||
def has_data_files(self) -> bool: ...
|
||||
def is_pure(self) -> bool: ...
|
||||
|
||||
# Getter methods generated in __init__
|
||||
def get_name(self) -> str: ...
|
||||
def get_version(self) -> str: ...
|
||||
def get_fullname(self) -> str: ...
|
||||
def get_author(self) -> str: ...
|
||||
def get_author_email(self) -> str: ...
|
||||
def get_maintainer(self) -> str: ...
|
||||
def get_maintainer_email(self) -> str: ...
|
||||
def get_contact(self) -> str: ...
|
||||
def get_contact_email(self) -> str: ...
|
||||
def get_url(self) -> str: ...
|
||||
def get_license(self) -> str: ...
|
||||
def get_licence(self) -> str: ...
|
||||
def get_description(self) -> str: ...
|
||||
def get_long_description(self) -> str: ...
|
||||
def get_keywords(self) -> str | list[str]: ...
|
||||
def get_platforms(self) -> str | list[str]: ...
|
||||
def get_classifiers(self) -> str | list[str]: ...
|
||||
def get_download_url(self) -> str: ...
|
||||
def get_requires(self) -> list[str]: ...
|
||||
def get_provides(self) -> list[str]: ...
|
||||
def get_obsoletes(self) -> list[str]: ...
|
||||
@@ -1,67 +0,0 @@
|
||||
ENDMARKER: int
|
||||
NAME: int
|
||||
NUMBER: int
|
||||
STRING: int
|
||||
NEWLINE: int
|
||||
INDENT: int
|
||||
DEDENT: int
|
||||
LPAR: int
|
||||
RPAR: int
|
||||
LSQB: int
|
||||
RSQB: int
|
||||
COLON: int
|
||||
COMMA: int
|
||||
SEMI: int
|
||||
PLUS: int
|
||||
MINUS: int
|
||||
STAR: int
|
||||
SLASH: int
|
||||
VBAR: int
|
||||
AMPER: int
|
||||
LESS: int
|
||||
GREATER: int
|
||||
EQUAL: int
|
||||
DOT: int
|
||||
PERCENT: int
|
||||
BACKQUOTE: int
|
||||
LBRACE: int
|
||||
RBRACE: int
|
||||
EQEQUAL: int
|
||||
NOTEQUAL: int
|
||||
LESSEQUAL: int
|
||||
GREATEREQUAL: int
|
||||
TILDE: int
|
||||
CIRCUMFLEX: int
|
||||
LEFTSHIFT: int
|
||||
RIGHTSHIFT: int
|
||||
DOUBLESTAR: int
|
||||
PLUSEQUAL: int
|
||||
MINEQUAL: int
|
||||
STAREQUAL: int
|
||||
SLASHEQUAL: int
|
||||
PERCENTEQUAL: int
|
||||
AMPEREQUAL: int
|
||||
VBAREQUAL: int
|
||||
CIRCUMFLEXEQUAL: int
|
||||
LEFTSHIFTEQUAL: int
|
||||
RIGHTSHIFTEQUAL: int
|
||||
DOUBLESTAREQUAL: int
|
||||
DOUBLESLASH: int
|
||||
DOUBLESLASHEQUAL: int
|
||||
OP: int
|
||||
COMMENT: int
|
||||
NL: int
|
||||
RARROW: int
|
||||
AT: int
|
||||
ATEQUAL: int
|
||||
AWAIT: int
|
||||
ASYNC: int
|
||||
ERRORTOKEN: int
|
||||
COLONEQUAL: int
|
||||
N_TOKENS: int
|
||||
NT_OFFSET: int
|
||||
tok_name: dict[int, str]
|
||||
|
||||
def ISTERMINAL(x: int) -> bool: ...
|
||||
def ISNONTERMINAL(x: int) -> bool: ...
|
||||
def ISEOF(x: int) -> bool: ...
|
||||
@@ -1,19 +0,0 @@
|
||||
import sys
|
||||
from collections.abc import Callable, Iterable
|
||||
from typing import Literal
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
if sys.platform != "win32":
|
||||
__all__ = ["openpty", "fork", "spawn"]
|
||||
_Reader: TypeAlias = Callable[[int], bytes]
|
||||
|
||||
STDIN_FILENO: Literal[0]
|
||||
STDOUT_FILENO: Literal[1]
|
||||
STDERR_FILENO: Literal[2]
|
||||
|
||||
CHILD: Literal[0]
|
||||
def openpty() -> tuple[int, int]: ...
|
||||
def master_open() -> tuple[int, str]: ... # deprecated, use openpty()
|
||||
def slave_open(tty_name: str) -> int: ... # deprecated, use openpty()
|
||||
def fork() -> tuple[int, int]: ...
|
||||
def spawn(argv: str | Iterable[str], master_read: _Reader = ..., stdin_read: _Reader = ...) -> int: ...
|
||||
@@ -1,49 +0,0 @@
|
||||
import sys
|
||||
|
||||
codes: dict[str, int]
|
||||
messages: dict[int, str]
|
||||
|
||||
XML_ERROR_ABORTED: str
|
||||
XML_ERROR_ASYNC_ENTITY: str
|
||||
XML_ERROR_ATTRIBUTE_EXTERNAL_ENTITY_REF: str
|
||||
XML_ERROR_BAD_CHAR_REF: str
|
||||
XML_ERROR_BINARY_ENTITY_REF: str
|
||||
XML_ERROR_CANT_CHANGE_FEATURE_ONCE_PARSING: str
|
||||
XML_ERROR_DUPLICATE_ATTRIBUTE: str
|
||||
XML_ERROR_ENTITY_DECLARED_IN_PE: str
|
||||
XML_ERROR_EXTERNAL_ENTITY_HANDLING: str
|
||||
XML_ERROR_FEATURE_REQUIRES_XML_DTD: str
|
||||
XML_ERROR_FINISHED: str
|
||||
XML_ERROR_INCOMPLETE_PE: str
|
||||
XML_ERROR_INCORRECT_ENCODING: str
|
||||
XML_ERROR_INVALID_TOKEN: str
|
||||
XML_ERROR_JUNK_AFTER_DOC_ELEMENT: str
|
||||
XML_ERROR_MISPLACED_XML_PI: str
|
||||
XML_ERROR_NOT_STANDALONE: str
|
||||
XML_ERROR_NOT_SUSPENDED: str
|
||||
XML_ERROR_NO_ELEMENTS: str
|
||||
XML_ERROR_NO_MEMORY: str
|
||||
XML_ERROR_PARAM_ENTITY_REF: str
|
||||
XML_ERROR_PARTIAL_CHAR: str
|
||||
XML_ERROR_PUBLICID: str
|
||||
XML_ERROR_RECURSIVE_ENTITY_REF: str
|
||||
XML_ERROR_SUSPENDED: str
|
||||
XML_ERROR_SUSPEND_PE: str
|
||||
XML_ERROR_SYNTAX: str
|
||||
XML_ERROR_TAG_MISMATCH: str
|
||||
XML_ERROR_TEXT_DECL: str
|
||||
XML_ERROR_UNBOUND_PREFIX: str
|
||||
XML_ERROR_UNCLOSED_CDATA_SECTION: str
|
||||
XML_ERROR_UNCLOSED_TOKEN: str
|
||||
XML_ERROR_UNDECLARING_PREFIX: str
|
||||
XML_ERROR_UNDEFINED_ENTITY: str
|
||||
XML_ERROR_UNEXPECTED_STATE: str
|
||||
XML_ERROR_UNKNOWN_ENCODING: str
|
||||
XML_ERROR_XML_DECL: str
|
||||
if sys.version_info >= (3, 11):
|
||||
XML_ERROR_RESERVED_PREFIX_XML: str
|
||||
XML_ERROR_RESERVED_PREFIX_XMLNS: str
|
||||
XML_ERROR_RESERVED_NAMESPACE_URI: str
|
||||
XML_ERROR_INVALID_ARGUMENT: str
|
||||
XML_ERROR_NO_BUFFER: str
|
||||
XML_ERROR_AMPLIFICATION_LIMIT_BREACH: str
|
||||
@@ -1,11 +0,0 @@
|
||||
XML_CTYPE_ANY: int
|
||||
XML_CTYPE_CHOICE: int
|
||||
XML_CTYPE_EMPTY: int
|
||||
XML_CTYPE_MIXED: int
|
||||
XML_CTYPE_NAME: int
|
||||
XML_CTYPE_SEQ: int
|
||||
|
||||
XML_CQUANT_NONE: int
|
||||
XML_CQUANT_OPT: int
|
||||
XML_CQUANT_PLUS: int
|
||||
XML_CQUANT_REP: int
|
||||
@@ -1,55 +0,0 @@
|
||||
import sys
|
||||
from typing import Literal, overload
|
||||
|
||||
if sys.platform != "win32":
|
||||
LOG_ALERT: Literal[1]
|
||||
LOG_AUTH: Literal[32]
|
||||
LOG_AUTHPRIV: Literal[80]
|
||||
LOG_CONS: Literal[2]
|
||||
LOG_CRIT: Literal[2]
|
||||
LOG_CRON: Literal[72]
|
||||
LOG_DAEMON: Literal[24]
|
||||
LOG_DEBUG: Literal[7]
|
||||
LOG_EMERG: Literal[0]
|
||||
LOG_ERR: Literal[3]
|
||||
LOG_INFO: Literal[6]
|
||||
LOG_KERN: Literal[0]
|
||||
LOG_LOCAL0: Literal[128]
|
||||
LOG_LOCAL1: Literal[136]
|
||||
LOG_LOCAL2: Literal[144]
|
||||
LOG_LOCAL3: Literal[152]
|
||||
LOG_LOCAL4: Literal[160]
|
||||
LOG_LOCAL5: Literal[168]
|
||||
LOG_LOCAL6: Literal[176]
|
||||
LOG_LOCAL7: Literal[184]
|
||||
LOG_LPR: Literal[48]
|
||||
LOG_MAIL: Literal[16]
|
||||
LOG_NDELAY: Literal[8]
|
||||
LOG_NEWS: Literal[56]
|
||||
LOG_NOTICE: Literal[5]
|
||||
LOG_NOWAIT: Literal[16]
|
||||
LOG_ODELAY: Literal[4]
|
||||
LOG_PERROR: Literal[32]
|
||||
LOG_PID: Literal[1]
|
||||
LOG_SYSLOG: Literal[40]
|
||||
LOG_USER: Literal[8]
|
||||
LOG_UUCP: Literal[64]
|
||||
LOG_WARNING: Literal[4]
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
LOG_FTP: Literal[88]
|
||||
LOG_INSTALL: Literal[112]
|
||||
LOG_LAUNCHD: Literal[192]
|
||||
LOG_NETINFO: Literal[96]
|
||||
LOG_RAS: Literal[120]
|
||||
LOG_REMOTEAUTH: Literal[104]
|
||||
|
||||
def LOG_MASK(pri: int, /) -> int: ...
|
||||
def LOG_UPTO(pri: int, /) -> int: ...
|
||||
def closelog() -> None: ...
|
||||
def openlog(ident: str = ..., logoption: int = ..., facility: int = ...) -> None: ...
|
||||
def setlogmask(maskpri: int, /) -> int: ...
|
||||
@overload
|
||||
def syslog(priority: int, message: str) -> None: ...
|
||||
@overload
|
||||
def syslog(message: str) -> None: ...
|
||||
@@ -1,80 +0,0 @@
|
||||
from typing import Literal
|
||||
|
||||
# These are not actually bools. See #4669
|
||||
NO: bool
|
||||
YES: bool
|
||||
TRUE: bool
|
||||
FALSE: bool
|
||||
ON: bool
|
||||
OFF: bool
|
||||
N: Literal["n"]
|
||||
S: Literal["s"]
|
||||
W: Literal["w"]
|
||||
E: Literal["e"]
|
||||
NW: Literal["nw"]
|
||||
SW: Literal["sw"]
|
||||
NE: Literal["ne"]
|
||||
SE: Literal["se"]
|
||||
NS: Literal["ns"]
|
||||
EW: Literal["ew"]
|
||||
NSEW: Literal["nsew"]
|
||||
CENTER: Literal["center"]
|
||||
NONE: Literal["none"]
|
||||
X: Literal["x"]
|
||||
Y: Literal["y"]
|
||||
BOTH: Literal["both"]
|
||||
LEFT: Literal["left"]
|
||||
TOP: Literal["top"]
|
||||
RIGHT: Literal["right"]
|
||||
BOTTOM: Literal["bottom"]
|
||||
RAISED: Literal["raised"]
|
||||
SUNKEN: Literal["sunken"]
|
||||
FLAT: Literal["flat"]
|
||||
RIDGE: Literal["ridge"]
|
||||
GROOVE: Literal["groove"]
|
||||
SOLID: Literal["solid"]
|
||||
HORIZONTAL: Literal["horizontal"]
|
||||
VERTICAL: Literal["vertical"]
|
||||
NUMERIC: Literal["numeric"]
|
||||
CHAR: Literal["char"]
|
||||
WORD: Literal["word"]
|
||||
BASELINE: Literal["baseline"]
|
||||
INSIDE: Literal["inside"]
|
||||
OUTSIDE: Literal["outside"]
|
||||
SEL: Literal["sel"]
|
||||
SEL_FIRST: Literal["sel.first"]
|
||||
SEL_LAST: Literal["sel.last"]
|
||||
END: Literal["end"]
|
||||
INSERT: Literal["insert"]
|
||||
CURRENT: Literal["current"]
|
||||
ANCHOR: Literal["anchor"]
|
||||
ALL: Literal["all"]
|
||||
NORMAL: Literal["normal"]
|
||||
DISABLED: Literal["disabled"]
|
||||
ACTIVE: Literal["active"]
|
||||
HIDDEN: Literal["hidden"]
|
||||
CASCADE: Literal["cascade"]
|
||||
CHECKBUTTON: Literal["checkbutton"]
|
||||
COMMAND: Literal["command"]
|
||||
RADIOBUTTON: Literal["radiobutton"]
|
||||
SEPARATOR: Literal["separator"]
|
||||
SINGLE: Literal["single"]
|
||||
BROWSE: Literal["browse"]
|
||||
MULTIPLE: Literal["multiple"]
|
||||
EXTENDED: Literal["extended"]
|
||||
DOTBOX: Literal["dotbox"]
|
||||
UNDERLINE: Literal["underline"]
|
||||
PIESLICE: Literal["pieslice"]
|
||||
CHORD: Literal["chord"]
|
||||
ARC: Literal["arc"]
|
||||
FIRST: Literal["first"]
|
||||
LAST: Literal["last"]
|
||||
BUTT: Literal["butt"]
|
||||
PROJECTING: Literal["projecting"]
|
||||
ROUND: Literal["round"]
|
||||
BEVEL: Literal["bevel"]
|
||||
MITER: Literal["miter"]
|
||||
MOVETO: Literal["moveto"]
|
||||
SCROLL: Literal["scroll"]
|
||||
UNITS: Literal["units"]
|
||||
PAGES: Literal["pages"]
|
||||
@@ -1,28 +0,0 @@
|
||||
import sys
|
||||
from _typeshed import ReadableBuffer
|
||||
from typing import Literal, overload
|
||||
|
||||
if sys.platform == "win32":
|
||||
SND_APPLICATION: Literal[128]
|
||||
SND_FILENAME: Literal[131072]
|
||||
SND_ALIAS: Literal[65536]
|
||||
SND_LOOP: Literal[8]
|
||||
SND_MEMORY: Literal[4]
|
||||
SND_PURGE: Literal[64]
|
||||
SND_ASYNC: Literal[1]
|
||||
SND_NODEFAULT: Literal[2]
|
||||
SND_NOSTOP: Literal[16]
|
||||
SND_NOWAIT: Literal[8192]
|
||||
|
||||
MB_ICONASTERISK: Literal[64]
|
||||
MB_ICONEXCLAMATION: Literal[48]
|
||||
MB_ICONHAND: Literal[16]
|
||||
MB_ICONQUESTION: Literal[32]
|
||||
MB_OK: Literal[0]
|
||||
def Beep(frequency: int, duration: int) -> None: ...
|
||||
# Can actually accept anything ORed with 4, and if not it's definitely str, but that's inexpressible
|
||||
@overload
|
||||
def PlaySound(sound: ReadableBuffer | None, flags: Literal[4]) -> None: ...
|
||||
@overload
|
||||
def PlaySound(sound: str | ReadableBuffer | None, flags: int) -> None: ...
|
||||
def MessageBeep(type: int = 0) -> None: ...
|
||||
@@ -11,25 +11,44 @@ repository = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
red_knot_module_resolver = { workspace = true }
|
||||
ruff_db = { workspace = true }
|
||||
ruff_index = { workspace = true }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_python_stdlib = { workspace = true }
|
||||
ruff_source_file = { workspace = true }
|
||||
ruff_text_size = { workspace = true }
|
||||
ruff_python_literal = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
bitflags = { workspace = true }
|
||||
indexmap = { workspace = true }
|
||||
camino = { workspace = true }
|
||||
compact_str = { workspace = true }
|
||||
countme = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
ordermap = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
smallvec = { workspace = true }
|
||||
smol_str = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
hashbrown = { workspace = true }
|
||||
smallvec = { workspace = true }
|
||||
static_assertions = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
path-slash = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
zip = { workspace = true, features = ["zstd", "deflate"] }
|
||||
|
||||
[dev-dependencies]
|
||||
anyhow = { workspace = true }
|
||||
ruff_db = { workspace = true, features = ["os", "testing"] }
|
||||
ruff_python_parser = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
insta = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
# Red Knot
|
||||
|
||||
A work-in-progress multifile module resolver for Ruff.
|
||||
Semantic analysis for the red-knot project.
|
||||
|
||||
## Vendored types for the stdlib
|
||||
|
||||
This crate vendors [typeshed](https://github.com/python/typeshed)'s stubs for the standard library. The vendored stubs can be found in `crates/red_knot_module_resolver/vendor/typeshed`. The file `crates/red_knot_module_resolver/vendor/typeshed/source_commit.txt` tells you the typeshed commit that our vendored stdlib stubs currently correspond to.
|
||||
This crate vendors [typeshed](https://github.com/python/typeshed)'s stubs for the standard library. The vendored stubs can be found in `crates/red_knot_python_semantic/vendor/typeshed`. The file `crates/red_knot_python_semantic/vendor/typeshed/source_commit.txt` tells you the typeshed commit that our vendored stdlib stubs currently correspond to.
|
||||
|
||||
The typeshed stubs are updated every two weeks via an automated PR using the `sync_typeshed.yaml` workflow in the `.github/workflows` directory. This workflow can also be triggered at any time via [workflow dispatch](https://docs.github.com/en/actions/using-workflows/manually-running-a-workflow#running-a-workflow).
|
||||
@@ -3,7 +3,7 @@
|
||||
//!
|
||||
//! This script should be automatically run at build time
|
||||
//! whenever the script itself changes, or whenever any files
|
||||
//! in `crates/red_knot_module_resolver/vendor/typeshed` change.
|
||||
//! in `crates/red_knot_python_semantic/vendor/typeshed` change.
|
||||
|
||||
use std::fs::File;
|
||||
use std::path::Path;
|
||||
@@ -23,8 +23,21 @@ const TYPESHED_ZIP_LOCATION: &str = "/zipped_typeshed.zip";
|
||||
fn zip_dir(directory_path: &str, writer: File) -> ZipResult<File> {
|
||||
let mut zip = ZipWriter::new(writer);
|
||||
|
||||
// Use deflated compression for WASM builds because compiling `zstd-sys` requires clang
|
||||
// [source](https://github.com/gyscos/zstd-rs/wiki/Compile-for-WASM) which complicates the build
|
||||
// by a lot. Deflated compression is slower but it shouldn't matter much for the WASM use case
|
||||
// (WASM itself is already slower than a native build for a specific platform).
|
||||
// We can't use `#[cfg(...)]` here because the target-arch in a build script is the
|
||||
// architecture of the system running the build script and not the architecture of the build-target.
|
||||
// That's why we use the `TARGET` environment variable here.
|
||||
let method = if std::env::var("TARGET").unwrap().contains("wasm32") {
|
||||
CompressionMethod::Deflated
|
||||
} else {
|
||||
CompressionMethod::Zstd
|
||||
};
|
||||
|
||||
let options = FileOptions::default()
|
||||
.compression_method(CompressionMethod::Zstd)
|
||||
.compression_method(method)
|
||||
.unix_permissions(0o644);
|
||||
|
||||
for entry in walkdir::WalkDir::new(directory_path) {
|
||||
@@ -27,13 +27,14 @@ pub struct AstNodeRef<T> {
|
||||
|
||||
#[allow(unsafe_code)]
|
||||
impl<T> AstNodeRef<T> {
|
||||
/// Creates a new `AstNodeRef` that reference `node`. The `parsed` is the [`ParsedModule`] to which
|
||||
/// the `AstNodeRef` belongs.
|
||||
/// Creates a new `AstNodeRef` that reference `node`. The `parsed` is the [`ParsedModule`] to
|
||||
/// which the `AstNodeRef` belongs.
|
||||
///
|
||||
/// ## Safety
|
||||
/// Dereferencing the `node` can result in undefined behavior if `parsed` isn't the [`ParsedModule`] to
|
||||
/// which `node` belongs. It's the caller's responsibility to ensure that the invariant `node belongs to parsed` is upheld.
|
||||
|
||||
///
|
||||
/// Dereferencing the `node` can result in undefined behavior if `parsed` isn't the
|
||||
/// [`ParsedModule`] to which `node` belongs. It's the caller's responsibility to ensure that
|
||||
/// the invariant `node belongs to parsed` is upheld.
|
||||
pub(super) unsafe fn new(parsed: ParsedModule, node: &T) -> Self {
|
||||
Self {
|
||||
_parsed: parsed,
|
||||
@@ -43,8 +44,8 @@ impl<T> AstNodeRef<T> {
|
||||
|
||||
/// Returns a reference to the wrapped node.
|
||||
pub fn node(&self) -> &T {
|
||||
// SAFETY: Holding on to `parsed` ensures that the AST to which `node` belongs is still alive
|
||||
// and not moved.
|
||||
// SAFETY: Holding on to `parsed` ensures that the AST to which `node` belongs is still
|
||||
// alive and not moved.
|
||||
unsafe { self.node.as_ref() }
|
||||
}
|
||||
}
|
||||
|
||||
16
crates/red_knot_python_semantic/src/builtins.rs
Normal file
16
crates/red_knot_python_semantic/src/builtins.rs
Normal file
@@ -0,0 +1,16 @@
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::resolve_module;
|
||||
use crate::semantic_index::global_scope;
|
||||
use crate::semantic_index::symbol::ScopeId;
|
||||
use crate::Db;
|
||||
|
||||
/// Salsa query to get the builtins scope.
|
||||
///
|
||||
/// Can return None if a custom typeshed is used that is missing `builtins.pyi`.
|
||||
#[salsa::tracked]
|
||||
pub(crate) fn builtins_scope(db: &dyn Db) -> Option<ScopeId<'_>> {
|
||||
let builtins_name =
|
||||
ModuleName::new_static("builtins").expect("Expected 'builtins' to be a valid module name");
|
||||
let builtins_file = resolve_module(db, builtins_name)?.file();
|
||||
Some(global_scope(db, builtins_file))
|
||||
}
|
||||
@@ -1,55 +1,30 @@
|
||||
use salsa::DbWithJar;
|
||||
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::{Db as SourceDb, Upcast};
|
||||
|
||||
use red_knot_module_resolver::Db as ResolverDb;
|
||||
|
||||
use crate::semantic_index::symbol::{public_symbols_map, scopes_map, PublicSymbolId, ScopeId};
|
||||
use crate::semantic_index::{root_scope, semantic_index, symbol_table};
|
||||
use crate::types::{infer_types, public_symbol_ty};
|
||||
|
||||
#[salsa::jar(db=Db)]
|
||||
pub struct Jar(
|
||||
ScopeId<'_>,
|
||||
PublicSymbolId<'_>,
|
||||
symbol_table,
|
||||
scopes_map,
|
||||
root_scope,
|
||||
semantic_index,
|
||||
infer_types,
|
||||
public_symbol_ty,
|
||||
public_symbols_map,
|
||||
);
|
||||
|
||||
/// Database giving access to semantic information about a Python program.
|
||||
pub trait Db:
|
||||
SourceDb + ResolverDb + DbWithJar<Jar> + Upcast<dyn SourceDb> + Upcast<dyn ResolverDb>
|
||||
{
|
||||
#[salsa::db]
|
||||
pub trait Db: SourceDb + Upcast<dyn SourceDb> {
|
||||
fn is_file_open(&self, file: File) -> bool;
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use std::fmt::Formatter;
|
||||
use std::marker::PhantomData;
|
||||
use std::sync::Arc;
|
||||
|
||||
use salsa::id::AsId;
|
||||
use salsa::ingredient::Ingredient;
|
||||
use salsa::storage::HasIngredientsFor;
|
||||
use salsa::DebugWithDb;
|
||||
use crate::module_resolver::vendored_typeshed_stubs;
|
||||
use ruff_db::files::{File, Files};
|
||||
use ruff_db::system::{DbWithTestSystem, System, TestSystem};
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
use ruff_db::{Db as SourceDb, Upcast};
|
||||
|
||||
use red_knot_module_resolver::{Db as ResolverDb, Jar as ResolverJar};
|
||||
use ruff_db::file_system::{FileSystem, MemoryFileSystem, OsFileSystem};
|
||||
use ruff_db::vfs::Vfs;
|
||||
use ruff_db::{Db as SourceDb, Jar as SourceJar, Upcast};
|
||||
use super::Db;
|
||||
|
||||
use super::{Db, Jar};
|
||||
|
||||
#[salsa::db(Jar, ResolverJar, SourceJar)]
|
||||
#[salsa::db]
|
||||
pub(crate) struct TestDb {
|
||||
storage: salsa::Storage<Self>,
|
||||
vfs: Vfs,
|
||||
file_system: TestFileSystem,
|
||||
files: Files,
|
||||
system: TestSystem,
|
||||
vendored: VendoredFileSystem,
|
||||
events: std::sync::Arc<std::sync::Mutex<Vec<salsa::Event>>>,
|
||||
}
|
||||
|
||||
@@ -57,29 +32,13 @@ pub(crate) mod tests {
|
||||
pub(crate) fn new() -> Self {
|
||||
Self {
|
||||
storage: salsa::Storage::default(),
|
||||
file_system: TestFileSystem::Memory(MemoryFileSystem::default()),
|
||||
system: TestSystem::default(),
|
||||
vendored: vendored_typeshed_stubs().clone(),
|
||||
events: std::sync::Arc::default(),
|
||||
vfs: Vfs::with_stubbed_vendored(),
|
||||
files: Files::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the memory file system.
|
||||
///
|
||||
/// ## Panics
|
||||
/// If this test db isn't using a memory file system.
|
||||
pub(crate) fn memory_file_system(&self) -> &MemoryFileSystem {
|
||||
if let TestFileSystem::Memory(fs) = &self.file_system {
|
||||
fs
|
||||
} else {
|
||||
panic!("The test db is not using a memory file system");
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub(crate) fn vfs_mut(&mut self) -> &mut Vfs {
|
||||
&mut self.vfs
|
||||
}
|
||||
|
||||
/// Takes the salsa events.
|
||||
///
|
||||
/// ## Panics
|
||||
@@ -100,16 +59,28 @@ pub(crate) mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
impl SourceDb for TestDb {
|
||||
fn file_system(&self) -> &dyn FileSystem {
|
||||
match &self.file_system {
|
||||
TestFileSystem::Memory(fs) => fs,
|
||||
TestFileSystem::Os(fs) => fs,
|
||||
}
|
||||
impl DbWithTestSystem for TestDb {
|
||||
fn test_system(&self) -> &TestSystem {
|
||||
&self.system
|
||||
}
|
||||
|
||||
fn vfs(&self) -> &Vfs {
|
||||
&self.vfs
|
||||
fn test_system_mut(&mut self) -> &mut TestSystem {
|
||||
&mut self.system
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl SourceDb for TestDb {
|
||||
fn vendored(&self) -> &VendoredFileSystem {
|
||||
&self.vendored
|
||||
}
|
||||
|
||||
fn system(&self) -> &dyn System {
|
||||
&self.system
|
||||
}
|
||||
|
||||
fn files(&self) -> &Files {
|
||||
&self.files
|
||||
}
|
||||
}
|
||||
|
||||
@@ -117,144 +88,25 @@ pub(crate) mod tests {
|
||||
fn upcast(&self) -> &(dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn ResolverDb> for TestDb {
|
||||
fn upcast(&self) -> &(dyn ResolverDb + 'static) {
|
||||
fn upcast_mut(&mut self) -> &mut (dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl red_knot_module_resolver::Db for TestDb {}
|
||||
impl Db for TestDb {}
|
||||
#[salsa::db]
|
||||
impl Db for TestDb {
|
||||
fn is_file_open(&self, file: File) -> bool {
|
||||
!file.path(self).is_vendored_path()
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl salsa::Database for TestDb {
|
||||
fn salsa_event(&self, event: salsa::Event) {
|
||||
tracing::trace!("event: {:?}", event.debug(self));
|
||||
fn salsa_event(&self, event: &dyn Fn() -> salsa::Event) {
|
||||
let event = event();
|
||||
tracing::trace!("event: {event:?}");
|
||||
let mut events = self.events.lock().unwrap();
|
||||
events.push(event);
|
||||
}
|
||||
}
|
||||
|
||||
impl salsa::ParallelDatabase for TestDb {
|
||||
fn snapshot(&self) -> salsa::Snapshot<Self> {
|
||||
salsa::Snapshot::new(Self {
|
||||
storage: self.storage.snapshot(),
|
||||
vfs: self.vfs.snapshot(),
|
||||
file_system: match &self.file_system {
|
||||
TestFileSystem::Memory(memory) => TestFileSystem::Memory(memory.snapshot()),
|
||||
TestFileSystem::Os(fs) => TestFileSystem::Os(fs.snapshot()),
|
||||
},
|
||||
events: self.events.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
enum TestFileSystem {
|
||||
Memory(MemoryFileSystem),
|
||||
#[allow(dead_code)]
|
||||
Os(OsFileSystem),
|
||||
}
|
||||
|
||||
pub(crate) fn assert_will_run_function_query<'db, C, Db, Jar>(
|
||||
db: &'db Db,
|
||||
to_function: impl FnOnce(&C) -> &salsa::function::FunctionIngredient<C>,
|
||||
input: &C::Input<'db>,
|
||||
events: &[salsa::Event],
|
||||
) where
|
||||
C: salsa::function::Configuration<Jar = Jar>
|
||||
+ salsa::storage::IngredientsFor<Jar = Jar, Ingredients = C>,
|
||||
Jar: HasIngredientsFor<C>,
|
||||
Db: salsa::DbWithJar<Jar>,
|
||||
C::Input<'db>: AsId,
|
||||
{
|
||||
will_run_function_query(db, to_function, input, events, true);
|
||||
}
|
||||
|
||||
pub(crate) fn assert_will_not_run_function_query<'db, C, Db, Jar>(
|
||||
db: &'db Db,
|
||||
to_function: impl FnOnce(&C) -> &salsa::function::FunctionIngredient<C>,
|
||||
input: &C::Input<'db>,
|
||||
events: &[salsa::Event],
|
||||
) where
|
||||
C: salsa::function::Configuration<Jar = Jar>
|
||||
+ salsa::storage::IngredientsFor<Jar = Jar, Ingredients = C>,
|
||||
Jar: HasIngredientsFor<C>,
|
||||
Db: salsa::DbWithJar<Jar>,
|
||||
C::Input<'db>: AsId,
|
||||
{
|
||||
will_run_function_query(db, to_function, input, events, false);
|
||||
}
|
||||
|
||||
fn will_run_function_query<'db, C, Db, Jar>(
|
||||
db: &'db Db,
|
||||
to_function: impl FnOnce(&C) -> &salsa::function::FunctionIngredient<C>,
|
||||
input: &C::Input<'db>,
|
||||
events: &[salsa::Event],
|
||||
should_run: bool,
|
||||
) where
|
||||
C: salsa::function::Configuration<Jar = Jar>
|
||||
+ salsa::storage::IngredientsFor<Jar = Jar, Ingredients = C>,
|
||||
Jar: HasIngredientsFor<C>,
|
||||
Db: salsa::DbWithJar<Jar>,
|
||||
C::Input<'db>: AsId,
|
||||
{
|
||||
let (jar, _) =
|
||||
<_ as salsa::storage::HasJar<<C as salsa::storage::IngredientsFor>::Jar>>::jar(db);
|
||||
let ingredient = jar.ingredient();
|
||||
|
||||
let function_ingredient = to_function(ingredient);
|
||||
|
||||
let ingredient_index =
|
||||
<salsa::function::FunctionIngredient<C> as Ingredient<Db>>::ingredient_index(
|
||||
function_ingredient,
|
||||
);
|
||||
|
||||
let did_run = events.iter().any(|event| {
|
||||
if let salsa::EventKind::WillExecute { database_key } = event.kind {
|
||||
database_key.ingredient_index() == ingredient_index
|
||||
&& database_key.key_index() == input.as_id()
|
||||
} else {
|
||||
false
|
||||
}
|
||||
});
|
||||
|
||||
if should_run && !did_run {
|
||||
panic!(
|
||||
"Expected query {:?} to run but it didn't",
|
||||
DebugIdx {
|
||||
db: PhantomData::<Db>,
|
||||
value_id: input.as_id(),
|
||||
ingredient: function_ingredient,
|
||||
}
|
||||
);
|
||||
} else if !should_run && did_run {
|
||||
panic!(
|
||||
"Expected query {:?} not to run but it did",
|
||||
DebugIdx {
|
||||
db: PhantomData::<Db>,
|
||||
value_id: input.as_id(),
|
||||
ingredient: function_ingredient,
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
struct DebugIdx<'a, I, Db>
|
||||
where
|
||||
I: Ingredient<Db>,
|
||||
{
|
||||
value_id: salsa::Id,
|
||||
ingredient: &'a I,
|
||||
db: PhantomData<Db>,
|
||||
}
|
||||
|
||||
impl<'a, I, Db> std::fmt::Debug for DebugIdx<'a, I, Db>
|
||||
where
|
||||
I: Ingredient<Db>,
|
||||
{
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
self.ingredient.fmt_index(Some(self.value_id), f)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,12 +1,26 @@
|
||||
use std::hash::BuildHasherDefault;
|
||||
|
||||
use rustc_hash::FxHasher;
|
||||
|
||||
pub use db::Db;
|
||||
pub use module_name::ModuleName;
|
||||
pub use module_resolver::{resolve_module, system_module_search_paths, vendored_typeshed_stubs};
|
||||
pub use program::{Program, ProgramSettings, SearchPathSettings, SitePackages};
|
||||
pub use python_version::PythonVersion;
|
||||
pub use semantic_model::{HasTy, SemanticModel};
|
||||
|
||||
pub mod ast_node_ref;
|
||||
mod builtins;
|
||||
mod db;
|
||||
pub mod name;
|
||||
mod module_name;
|
||||
mod module_resolver;
|
||||
mod node_key;
|
||||
mod program;
|
||||
mod python_version;
|
||||
pub mod semantic_index;
|
||||
mod semantic_model;
|
||||
pub(crate) mod site_packages;
|
||||
pub mod types;
|
||||
|
||||
type FxIndexSet<V> = indexmap::set::IndexSet<V, BuildHasherDefault<FxHasher>>;
|
||||
|
||||
pub use db::{Db, Jar};
|
||||
use rustc_hash::FxHasher;
|
||||
use std::hash::BuildHasherDefault;
|
||||
type FxOrderSet<V> = ordermap::set::OrderSet<V, BuildHasherDefault<FxHasher>>;
|
||||
type FxOrderMap<K, V> = ordermap::map::OrderMap<K, V, BuildHasherDefault<FxHasher>>;
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
use std::hash::BuildHasherDefault;
|
||||
|
||||
use rustc_hash::FxHasher;
|
||||
|
||||
pub mod ast_node_ref;
|
||||
mod node_key;
|
||||
pub mod semantic_index;
|
||||
pub mod types;
|
||||
|
||||
pub(crate) type FxIndexSet<V> = indexmap::set::IndexSet<V, BuildHasherDefault<FxHasher>>;
|
||||
216
crates/red_knot_python_semantic/src/module_name.rs
Normal file
216
crates/red_knot_python_semantic/src/module_name.rs
Normal file
@@ -0,0 +1,216 @@
|
||||
use std::fmt;
|
||||
use std::ops::Deref;
|
||||
|
||||
use compact_str::{CompactString, ToCompactString};
|
||||
|
||||
use ruff_python_stdlib::identifiers::is_identifier;
|
||||
|
||||
/// A module name, e.g. `foo.bar`.
|
||||
///
|
||||
/// Always normalized to the absolute form (never a relative module name, i.e., never `.foo`).
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
|
||||
pub struct ModuleName(compact_str::CompactString);
|
||||
|
||||
impl ModuleName {
|
||||
/// Creates a new module name for `name`. Returns `Some` if `name` is a valid, absolute
|
||||
/// module name and `None` otherwise.
|
||||
///
|
||||
/// The module name is invalid if:
|
||||
///
|
||||
/// * The name is empty
|
||||
/// * The name is relative
|
||||
/// * The name ends with a `.`
|
||||
/// * The name contains a sequence of multiple dots
|
||||
/// * A component of a name (the part between two dots) isn't a valid python identifier.
|
||||
#[inline]
|
||||
#[must_use]
|
||||
pub fn new(name: &str) -> Option<Self> {
|
||||
Self::is_valid_name(name).then(|| Self(CompactString::from(name)))
|
||||
}
|
||||
|
||||
/// Creates a new module name for `name` where `name` is a static string.
|
||||
/// Returns `Some` if `name` is a valid, absolute module name and `None` otherwise.
|
||||
///
|
||||
/// The module name is invalid if:
|
||||
///
|
||||
/// * The name is empty
|
||||
/// * The name is relative
|
||||
/// * The name ends with a `.`
|
||||
/// * The name contains a sequence of multiple dots
|
||||
/// * A component of a name (the part between two dots) isn't a valid python identifier.
|
||||
///
|
||||
/// ## Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar").as_deref(), Some("foo.bar"));
|
||||
/// assert_eq!(ModuleName::new_static(""), None);
|
||||
/// assert_eq!(ModuleName::new_static("..foo"), None);
|
||||
/// assert_eq!(ModuleName::new_static(".foo"), None);
|
||||
/// assert_eq!(ModuleName::new_static("foo."), None);
|
||||
/// assert_eq!(ModuleName::new_static("foo..bar"), None);
|
||||
/// assert_eq!(ModuleName::new_static("2000"), None);
|
||||
/// ```
|
||||
#[inline]
|
||||
#[must_use]
|
||||
pub fn new_static(name: &'static str) -> Option<Self> {
|
||||
Self::is_valid_name(name).then(|| Self(CompactString::const_new(name)))
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
fn is_valid_name(name: &str) -> bool {
|
||||
!name.is_empty() && name.split('.').all(is_identifier)
|
||||
}
|
||||
|
||||
/// An iterator over the components of the module name:
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar.baz").unwrap().components().collect::<Vec<_>>(), vec!["foo", "bar", "baz"]);
|
||||
/// ```
|
||||
#[must_use]
|
||||
pub fn components(&self) -> impl DoubleEndedIterator<Item = &str> {
|
||||
self.0.split('.')
|
||||
}
|
||||
|
||||
/// The name of this module's immediate parent, if it has a parent.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar").unwrap().parent(), Some(ModuleName::new_static("foo").unwrap()));
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar.baz").unwrap().parent(), Some(ModuleName::new_static("foo.bar").unwrap()));
|
||||
/// assert_eq!(ModuleName::new_static("root").unwrap().parent(), None);
|
||||
/// ```
|
||||
#[must_use]
|
||||
pub fn parent(&self) -> Option<ModuleName> {
|
||||
let (parent, _) = self.0.rsplit_once('.')?;
|
||||
Some(Self(parent.to_compact_string()))
|
||||
}
|
||||
|
||||
/// Returns `true` if the name starts with `other`.
|
||||
///
|
||||
/// This is equivalent to checking if `self` is a sub-module of `other`.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// assert!(ModuleName::new_static("foo.bar").unwrap().starts_with(&ModuleName::new_static("foo").unwrap()));
|
||||
///
|
||||
/// assert!(!ModuleName::new_static("foo.bar").unwrap().starts_with(&ModuleName::new_static("bar").unwrap()));
|
||||
/// assert!(!ModuleName::new_static("foo_bar").unwrap().starts_with(&ModuleName::new_static("foo").unwrap()));
|
||||
/// ```
|
||||
#[must_use]
|
||||
pub fn starts_with(&self, other: &ModuleName) -> bool {
|
||||
let mut self_components = self.components();
|
||||
let other_components = other.components();
|
||||
|
||||
for other_component in other_components {
|
||||
if self_components.next() != Some(other_component) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
#[inline]
|
||||
pub fn as_str(&self) -> &str {
|
||||
&self.0
|
||||
}
|
||||
|
||||
/// Construct a [`ModuleName`] from a sequence of parts.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// assert_eq!(&*ModuleName::from_components(["a"]).unwrap(), "a");
|
||||
/// assert_eq!(&*ModuleName::from_components(["a", "b"]).unwrap(), "a.b");
|
||||
/// assert_eq!(&*ModuleName::from_components(["a", "b", "c"]).unwrap(), "a.b.c");
|
||||
///
|
||||
/// assert_eq!(ModuleName::from_components(["a-b"]), None);
|
||||
/// assert_eq!(ModuleName::from_components(["a", "a-b"]), None);
|
||||
/// assert_eq!(ModuleName::from_components(["a", "b", "a-b-c"]), None);
|
||||
/// ```
|
||||
#[must_use]
|
||||
pub fn from_components<'a>(components: impl IntoIterator<Item = &'a str>) -> Option<Self> {
|
||||
let mut components = components.into_iter();
|
||||
let first_part = components.next()?;
|
||||
if !is_identifier(first_part) {
|
||||
return None;
|
||||
}
|
||||
let name = if let Some(second_part) = components.next() {
|
||||
if !is_identifier(second_part) {
|
||||
return None;
|
||||
}
|
||||
let mut name = format!("{first_part}.{second_part}");
|
||||
for part in components {
|
||||
if !is_identifier(part) {
|
||||
return None;
|
||||
}
|
||||
name.push('.');
|
||||
name.push_str(part);
|
||||
}
|
||||
CompactString::from(&name)
|
||||
} else {
|
||||
CompactString::from(first_part)
|
||||
};
|
||||
Some(Self(name))
|
||||
}
|
||||
|
||||
/// Extend `self` with the components of `other`
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// let mut module_name = ModuleName::new_static("foo").unwrap();
|
||||
/// module_name.extend(&ModuleName::new_static("bar").unwrap());
|
||||
/// assert_eq!(&module_name, "foo.bar");
|
||||
/// module_name.extend(&ModuleName::new_static("baz.eggs.ham").unwrap());
|
||||
/// assert_eq!(&module_name, "foo.bar.baz.eggs.ham");
|
||||
/// ```
|
||||
pub fn extend(&mut self, other: &ModuleName) {
|
||||
self.0.push('.');
|
||||
self.0.push_str(other);
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for ModuleName {
|
||||
type Target = str;
|
||||
|
||||
#[inline]
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<str> for ModuleName {
|
||||
fn eq(&self, other: &str) -> bool {
|
||||
self.as_str() == other
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<ModuleName> for str {
|
||||
fn eq(&self, other: &ModuleName) -> bool {
|
||||
self == other.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ModuleName {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(&self.0)
|
||||
}
|
||||
}
|
||||
46
crates/red_knot_python_semantic/src/module_resolver/mod.rs
Normal file
46
crates/red_knot_python_semantic/src/module_resolver/mod.rs
Normal file
@@ -0,0 +1,46 @@
|
||||
use std::iter::FusedIterator;
|
||||
|
||||
pub(crate) use module::Module;
|
||||
pub use resolver::resolve_module;
|
||||
pub(crate) use resolver::{file_to_module, SearchPaths};
|
||||
use ruff_db::system::SystemPath;
|
||||
pub use typeshed::vendored_typeshed_stubs;
|
||||
|
||||
use crate::module_resolver::resolver::search_paths;
|
||||
use crate::Db;
|
||||
use resolver::SearchPathIterator;
|
||||
|
||||
mod module;
|
||||
mod path;
|
||||
mod resolver;
|
||||
mod typeshed;
|
||||
|
||||
#[cfg(test)]
|
||||
mod testing;
|
||||
|
||||
/// Returns an iterator over all search paths pointing to a system path
|
||||
pub fn system_module_search_paths(db: &dyn Db) -> SystemModuleSearchPathsIter {
|
||||
SystemModuleSearchPathsIter {
|
||||
inner: search_paths(db),
|
||||
}
|
||||
}
|
||||
|
||||
pub struct SystemModuleSearchPathsIter<'db> {
|
||||
inner: SearchPathIterator<'db>,
|
||||
}
|
||||
|
||||
impl<'db> Iterator for SystemModuleSearchPathsIter<'db> {
|
||||
type Item = &'db SystemPath;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
loop {
|
||||
let next = self.inner.next()?;
|
||||
|
||||
if let Some(system_path) = next.as_system_path() {
|
||||
return Some(system_path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FusedIterator for SystemModuleSearchPathsIter<'_> {}
|
||||
@@ -0,0 +1,85 @@
|
||||
use std::fmt::Formatter;
|
||||
use std::sync::Arc;
|
||||
|
||||
use ruff_db::files::File;
|
||||
|
||||
use super::path::SearchPath;
|
||||
use crate::module_name::ModuleName;
|
||||
|
||||
/// Representation of a Python module.
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub struct Module {
|
||||
inner: Arc<ModuleInner>,
|
||||
}
|
||||
|
||||
impl Module {
|
||||
pub(crate) fn new(
|
||||
name: ModuleName,
|
||||
kind: ModuleKind,
|
||||
search_path: SearchPath,
|
||||
file: File,
|
||||
) -> Self {
|
||||
Self {
|
||||
inner: Arc::new(ModuleInner {
|
||||
name,
|
||||
kind,
|
||||
search_path,
|
||||
file,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
/// The absolute name of the module (e.g. `foo.bar`)
|
||||
pub fn name(&self) -> &ModuleName {
|
||||
&self.inner.name
|
||||
}
|
||||
|
||||
/// The file to the source code that defines this module
|
||||
pub fn file(&self) -> File {
|
||||
self.inner.file
|
||||
}
|
||||
|
||||
/// The search path from which the module was resolved.
|
||||
pub(crate) fn search_path(&self) -> &SearchPath {
|
||||
&self.inner.search_path
|
||||
}
|
||||
|
||||
/// Determine whether this module is a single-file module or a package
|
||||
pub fn kind(&self) -> ModuleKind {
|
||||
self.inner.kind
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for Module {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("Module")
|
||||
.field("name", &self.name())
|
||||
.field("kind", &self.kind())
|
||||
.field("file", &self.file())
|
||||
.field("search_path", &self.search_path())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq)]
|
||||
struct ModuleInner {
|
||||
name: ModuleName,
|
||||
kind: ModuleKind,
|
||||
search_path: SearchPath,
|
||||
file: File,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub enum ModuleKind {
|
||||
/// A single-file module (e.g. `foo.py` or `foo.pyi`)
|
||||
Module,
|
||||
|
||||
/// A python package (`foo/__init__.py` or `foo/__init__.pyi`)
|
||||
Package,
|
||||
}
|
||||
|
||||
impl ModuleKind {
|
||||
pub const fn is_package(self) -> bool {
|
||||
matches!(self, ModuleKind::Package)
|
||||
}
|
||||
}
|
||||
1094
crates/red_knot_python_semantic/src/module_resolver/path.rs
Normal file
1094
crates/red_knot_python_semantic/src/module_resolver/path.rs
Normal file
File diff suppressed because it is too large
Load Diff
1814
crates/red_knot_python_semantic/src/module_resolver/resolver.rs
Normal file
1814
crates/red_knot_python_semantic/src/module_resolver/resolver.rs
Normal file
File diff suppressed because it is too large
Load Diff
302
crates/red_knot_python_semantic/src/module_resolver/testing.rs
Normal file
302
crates/red_knot_python_semantic/src/module_resolver/testing.rs
Normal file
@@ -0,0 +1,302 @@
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPath, SystemPathBuf};
|
||||
use ruff_db::vendored::VendoredPathBuf;
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::program::{Program, SearchPathSettings};
|
||||
use crate::python_version::PythonVersion;
|
||||
use crate::{ProgramSettings, SitePackages};
|
||||
|
||||
/// A test case for the module resolver.
|
||||
///
|
||||
/// You generally shouldn't construct instances of this struct directly;
|
||||
/// instead, use the [`TestCaseBuilder`].
|
||||
pub(crate) struct TestCase<T> {
|
||||
pub(crate) db: TestDb,
|
||||
pub(crate) src: SystemPathBuf,
|
||||
pub(crate) stdlib: T,
|
||||
// Most test cases only ever need a single `site-packages` directory,
|
||||
// so this is a single directory instead of a `Vec` of directories,
|
||||
// like it is in `ruff_db::Program`.
|
||||
pub(crate) site_packages: SystemPathBuf,
|
||||
pub(crate) target_version: PythonVersion,
|
||||
}
|
||||
|
||||
/// A `(file_name, file_contents)` tuple
|
||||
pub(crate) type FileSpec = (&'static str, &'static str);
|
||||
|
||||
/// Specification for a typeshed mock to be created as part of a test
|
||||
#[derive(Debug, Clone, Copy, Default)]
|
||||
pub(crate) struct MockedTypeshed {
|
||||
/// The stdlib files to be created in the typeshed mock
|
||||
pub(crate) stdlib_files: &'static [FileSpec],
|
||||
|
||||
/// The contents of the `stdlib/VERSIONS` file
|
||||
/// to be created in the typeshed mock
|
||||
pub(crate) versions: &'static str,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct VendoredTypeshed;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct UnspecifiedTypeshed;
|
||||
|
||||
/// A builder for a module-resolver test case.
|
||||
///
|
||||
/// The builder takes care of creating a [`TestDb`]
|
||||
/// instance, applying the module resolver settings,
|
||||
/// and creating mock directories for the stdlib, `site-packages`,
|
||||
/// first-party code, etc.
|
||||
///
|
||||
/// For simple tests that do not involve typeshed,
|
||||
/// test cases can be created as follows:
|
||||
///
|
||||
/// ```rs
|
||||
/// let test_case = TestCaseBuilder::new()
|
||||
/// .with_src_files(...)
|
||||
/// .build();
|
||||
///
|
||||
/// let test_case2 = TestCaseBuilder::new()
|
||||
/// .with_site_packages_files(...)
|
||||
/// .build();
|
||||
/// ```
|
||||
///
|
||||
/// Any tests can specify the target Python version that should be used
|
||||
/// in the module resolver settings:
|
||||
///
|
||||
/// ```rs
|
||||
/// let test_case = TestCaseBuilder::new()
|
||||
/// .with_src_files(...)
|
||||
/// .with_target_version(...)
|
||||
/// .build();
|
||||
/// ```
|
||||
///
|
||||
/// For tests checking that standard-library module resolution is working
|
||||
/// correctly, you should usually create a [`MockedTypeshed`] instance
|
||||
/// and pass it to the [`TestCaseBuilder::with_custom_typeshed`] method.
|
||||
/// If you need to check something that involves the vendored typeshed stubs
|
||||
/// we include as part of the binary, you can instead use the
|
||||
/// [`TestCaseBuilder::with_vendored_typeshed`] method.
|
||||
/// For either of these, you should almost always try to be explicit
|
||||
/// about the Python version you want to be specified in the module-resolver
|
||||
/// settings for the test:
|
||||
///
|
||||
/// ```rs
|
||||
/// const TYPESHED = MockedTypeshed { ... };
|
||||
///
|
||||
/// let test_case = resolver_test_case()
|
||||
/// .with_custom_typeshed(TYPESHED)
|
||||
/// .with_target_version(...)
|
||||
/// .build();
|
||||
///
|
||||
/// let test_case2 = resolver_test_case()
|
||||
/// .with_vendored_typeshed()
|
||||
/// .with_target_version(...)
|
||||
/// .build();
|
||||
/// ```
|
||||
///
|
||||
/// If you have not called one of those options, the `stdlib` field
|
||||
/// on the [`TestCase`] instance created from `.build()` will be set
|
||||
/// to `()`.
|
||||
pub(crate) struct TestCaseBuilder<T> {
|
||||
typeshed_option: T,
|
||||
target_version: PythonVersion,
|
||||
first_party_files: Vec<FileSpec>,
|
||||
site_packages_files: Vec<FileSpec>,
|
||||
}
|
||||
|
||||
impl<T> TestCaseBuilder<T> {
|
||||
/// Specify files to be created in the `src` mock directory
|
||||
pub(crate) fn with_src_files(mut self, files: &[FileSpec]) -> Self {
|
||||
self.first_party_files.extend(files.iter().copied());
|
||||
self
|
||||
}
|
||||
|
||||
/// Specify files to be created in the `site-packages` mock directory
|
||||
pub(crate) fn with_site_packages_files(mut self, files: &[FileSpec]) -> Self {
|
||||
self.site_packages_files.extend(files.iter().copied());
|
||||
self
|
||||
}
|
||||
|
||||
/// Specify the target Python version the module resolver should assume
|
||||
pub(crate) fn with_target_version(mut self, target_version: PythonVersion) -> Self {
|
||||
self.target_version = target_version;
|
||||
self
|
||||
}
|
||||
|
||||
fn write_mock_directory(
|
||||
db: &mut TestDb,
|
||||
location: impl AsRef<SystemPath>,
|
||||
files: impl IntoIterator<Item = FileSpec>,
|
||||
) -> SystemPathBuf {
|
||||
let root = location.as_ref().to_path_buf();
|
||||
// Make sure to create the directory even if the list of files is empty:
|
||||
db.memory_file_system().create_directory_all(&root).unwrap();
|
||||
db.write_files(
|
||||
files
|
||||
.into_iter()
|
||||
.map(|(relative_path, contents)| (root.join(relative_path), contents)),
|
||||
)
|
||||
.unwrap();
|
||||
root
|
||||
}
|
||||
}
|
||||
|
||||
impl TestCaseBuilder<UnspecifiedTypeshed> {
|
||||
pub(crate) fn new() -> TestCaseBuilder<UnspecifiedTypeshed> {
|
||||
Self {
|
||||
typeshed_option: UnspecifiedTypeshed,
|
||||
target_version: PythonVersion::default(),
|
||||
first_party_files: vec![],
|
||||
site_packages_files: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
/// Use the vendored stdlib stubs included in the Ruff binary for this test case
|
||||
pub(crate) fn with_vendored_typeshed(self) -> TestCaseBuilder<VendoredTypeshed> {
|
||||
let TestCaseBuilder {
|
||||
typeshed_option: _,
|
||||
target_version,
|
||||
first_party_files,
|
||||
site_packages_files,
|
||||
} = self;
|
||||
TestCaseBuilder {
|
||||
typeshed_option: VendoredTypeshed,
|
||||
target_version,
|
||||
first_party_files,
|
||||
site_packages_files,
|
||||
}
|
||||
}
|
||||
|
||||
/// Use a mock typeshed directory for this test case
|
||||
pub(crate) fn with_custom_typeshed(
|
||||
self,
|
||||
typeshed: MockedTypeshed,
|
||||
) -> TestCaseBuilder<MockedTypeshed> {
|
||||
let TestCaseBuilder {
|
||||
typeshed_option: _,
|
||||
target_version,
|
||||
first_party_files,
|
||||
site_packages_files,
|
||||
} = self;
|
||||
|
||||
TestCaseBuilder {
|
||||
typeshed_option: typeshed,
|
||||
target_version,
|
||||
first_party_files,
|
||||
site_packages_files,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn build(self) -> TestCase<()> {
|
||||
let TestCase {
|
||||
db,
|
||||
src,
|
||||
stdlib: _,
|
||||
site_packages,
|
||||
target_version,
|
||||
} = self.with_custom_typeshed(MockedTypeshed::default()).build();
|
||||
|
||||
TestCase {
|
||||
db,
|
||||
src,
|
||||
stdlib: (),
|
||||
site_packages,
|
||||
target_version,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TestCaseBuilder<MockedTypeshed> {
|
||||
pub(crate) fn build(self) -> TestCase<SystemPathBuf> {
|
||||
let TestCaseBuilder {
|
||||
typeshed_option,
|
||||
target_version,
|
||||
first_party_files,
|
||||
site_packages_files,
|
||||
} = self;
|
||||
|
||||
let mut db = TestDb::new();
|
||||
|
||||
let site_packages =
|
||||
Self::write_mock_directory(&mut db, "/site-packages", site_packages_files);
|
||||
let src = Self::write_mock_directory(&mut db, "/src", first_party_files);
|
||||
let typeshed = Self::build_typeshed_mock(&mut db, &typeshed_option);
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
&ProgramSettings {
|
||||
target_version,
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
custom_typeshed: Some(typeshed.clone()),
|
||||
site_packages: SitePackages::Known(vec![site_packages.clone()]),
|
||||
},
|
||||
},
|
||||
)
|
||||
.expect("Valid program settings");
|
||||
|
||||
TestCase {
|
||||
db,
|
||||
src,
|
||||
stdlib: typeshed.join("stdlib"),
|
||||
site_packages,
|
||||
target_version,
|
||||
}
|
||||
}
|
||||
|
||||
fn build_typeshed_mock(db: &mut TestDb, typeshed_to_build: &MockedTypeshed) -> SystemPathBuf {
|
||||
let typeshed = SystemPathBuf::from("/typeshed");
|
||||
let MockedTypeshed {
|
||||
stdlib_files,
|
||||
versions,
|
||||
} = typeshed_to_build;
|
||||
Self::write_mock_directory(
|
||||
db,
|
||||
typeshed.join("stdlib"),
|
||||
stdlib_files
|
||||
.iter()
|
||||
.copied()
|
||||
.chain(std::iter::once(("VERSIONS", *versions))),
|
||||
);
|
||||
typeshed
|
||||
}
|
||||
}
|
||||
|
||||
impl TestCaseBuilder<VendoredTypeshed> {
|
||||
pub(crate) fn build(self) -> TestCase<VendoredPathBuf> {
|
||||
let TestCaseBuilder {
|
||||
typeshed_option: VendoredTypeshed,
|
||||
target_version,
|
||||
first_party_files,
|
||||
site_packages_files,
|
||||
} = self;
|
||||
|
||||
let mut db = TestDb::new();
|
||||
|
||||
let site_packages =
|
||||
Self::write_mock_directory(&mut db, "/site-packages", site_packages_files);
|
||||
let src = Self::write_mock_directory(&mut db, "/src", first_party_files);
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
&ProgramSettings {
|
||||
target_version,
|
||||
search_paths: SearchPathSettings {
|
||||
site_packages: SitePackages::Known(vec![site_packages.clone()]),
|
||||
..SearchPathSettings::new(src.clone())
|
||||
},
|
||||
},
|
||||
)
|
||||
.expect("Valid search path settings");
|
||||
|
||||
TestCase {
|
||||
db,
|
||||
src,
|
||||
stdlib: VendoredPathBuf::from("stdlib"),
|
||||
site_packages,
|
||||
target_version,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
pub use self::vendored::vendored_typeshed_stubs;
|
||||
pub(super) use self::versions::{
|
||||
typeshed_versions, vendored_typeshed_versions, TypeshedVersions, TypeshedVersionsParseError,
|
||||
TypeshedVersionsQueryResult,
|
||||
};
|
||||
|
||||
mod vendored;
|
||||
mod versions;
|
||||
@@ -1,17 +1,25 @@
|
||||
pub(crate) mod versions;
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
|
||||
// The file path here is hardcoded in this crate's `build.rs` script.
|
||||
// Luckily this crate will fail to build if this file isn't available at build time.
|
||||
static TYPESHED_ZIP_BYTES: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/zipped_typeshed.zip"));
|
||||
|
||||
pub fn vendored_typeshed_stubs() -> &'static VendoredFileSystem {
|
||||
static VENDORED_TYPESHED_STUBS: Lazy<VendoredFileSystem> =
|
||||
Lazy::new(|| VendoredFileSystem::new_static(TYPESHED_ZIP_BYTES).unwrap());
|
||||
&VENDORED_TYPESHED_STUBS
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::io::{self, Read};
|
||||
use std::path::Path;
|
||||
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
use ruff_db::vfs::VendoredPath;
|
||||
use ruff_db::vendored::VendoredPath;
|
||||
|
||||
// The file path here is hardcoded in this crate's `build.rs` script.
|
||||
// Luckily this crate will fail to build if this file isn't available at build time.
|
||||
const TYPESHED_ZIP_BYTES: &[u8] =
|
||||
include_bytes!(concat!(env!("OUT_DIR"), "/zipped_typeshed.zip"));
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn typeshed_zip_created_at_build_time() {
|
||||
@@ -34,7 +42,7 @@ mod tests {
|
||||
#[test]
|
||||
fn typeshed_vfs_consistent_with_vendored_stubs() {
|
||||
let vendored_typeshed_dir = Path::new("vendor/typeshed").canonicalize().unwrap();
|
||||
let vendored_typeshed_stubs = VendoredFileSystem::new(TYPESHED_ZIP_BYTES).unwrap();
|
||||
let vendored_typeshed_stubs = vendored_typeshed_stubs();
|
||||
|
||||
let mut empty_iterator = true;
|
||||
for entry in walkdir::WalkDir::new(&vendored_typeshed_dir).min_depth(1) {
|
||||
@@ -64,7 +72,7 @@ mod tests {
|
||||
|
||||
let vendored_path_kind = vendored_typeshed_stubs
|
||||
.metadata(vendored_path)
|
||||
.unwrap_or_else(|| {
|
||||
.unwrap_or_else(|_| {
|
||||
panic!(
|
||||
"Expected metadata for {vendored_path:?} to be retrievable from the `VendoredFileSystem!
|
||||
|
||||
@@ -4,13 +4,34 @@ use std::num::{NonZeroU16, NonZeroUsize};
|
||||
use std::ops::{RangeFrom, RangeInclusive};
|
||||
use std::str::FromStr;
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::module::ModuleName;
|
||||
use super::vendored::vendored_typeshed_stubs;
|
||||
use crate::db::Db;
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::{Program, PythonVersion};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct TypeshedVersionsParseError {
|
||||
line_number: NonZeroU16,
|
||||
static VENDORED_VERSIONS: Lazy<TypeshedVersions> = Lazy::new(|| {
|
||||
TypeshedVersions::from_str(
|
||||
&vendored_typeshed_stubs()
|
||||
.read_to_string("stdlib/VERSIONS")
|
||||
.unwrap(),
|
||||
)
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
pub(crate) fn vendored_typeshed_versions() -> &'static TypeshedVersions {
|
||||
&VENDORED_VERSIONS
|
||||
}
|
||||
|
||||
pub(crate) fn typeshed_versions(db: &dyn Db) -> &TypeshedVersions {
|
||||
Program::get(db).search_paths(db).typeshed_versions()
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub(crate) struct TypeshedVersionsParseError {
|
||||
line_number: Option<NonZeroU16>,
|
||||
reason: TypeshedVersionsParseErrorKind,
|
||||
}
|
||||
|
||||
@@ -20,10 +41,14 @@ impl fmt::Display for TypeshedVersionsParseError {
|
||||
line_number,
|
||||
reason,
|
||||
} = self;
|
||||
write!(
|
||||
f,
|
||||
"Error while parsing line {line_number} of typeshed's VERSIONS file: {reason}"
|
||||
)
|
||||
if let Some(line_number) = line_number {
|
||||
write!(
|
||||
f,
|
||||
"Error while parsing line {line_number} of typeshed's VERSIONS file: {reason}"
|
||||
)
|
||||
} else {
|
||||
write!(f, "Error while parsing typeshed's VERSIONS file: {reason}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -37,8 +62,8 @@ impl std::error::Error for TypeshedVersionsParseError {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum TypeshedVersionsParseErrorKind {
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub(super) enum TypeshedVersionsParseErrorKind {
|
||||
TooManyLines(NonZeroUsize),
|
||||
UnexpectedNumberOfColons,
|
||||
InvalidModuleName(String),
|
||||
@@ -81,38 +106,94 @@ impl fmt::Display for TypeshedVersionsParseErrorKind {
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct TypeshedVersions(FxHashMap<ModuleName, PyVersionRange>);
|
||||
pub(crate) struct TypeshedVersions(FxHashMap<ModuleName, PyVersionRange>);
|
||||
|
||||
impl TypeshedVersions {
|
||||
pub fn len(&self) -> usize {
|
||||
self.0.len()
|
||||
#[must_use]
|
||||
fn exact(&self, module_name: &ModuleName) -> Option<&PyVersionRange> {
|
||||
self.0.get(module_name)
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.0.is_empty()
|
||||
}
|
||||
|
||||
pub fn contains_module(&self, module_name: &ModuleName) -> bool {
|
||||
self.0.contains_key(module_name)
|
||||
}
|
||||
|
||||
pub fn module_exists_on_version(
|
||||
#[must_use]
|
||||
pub(in crate::module_resolver) fn query_module(
|
||||
&self,
|
||||
module: ModuleName,
|
||||
version: impl Into<PyVersion>,
|
||||
) -> bool {
|
||||
let version = version.into();
|
||||
let mut module: Option<ModuleName> = Some(module);
|
||||
while let Some(module_to_try) = module {
|
||||
if let Some(range) = self.0.get(&module_to_try) {
|
||||
return range.contains(version);
|
||||
module: &ModuleName,
|
||||
target_version: PythonVersion,
|
||||
) -> TypeshedVersionsQueryResult {
|
||||
if let Some(range) = self.exact(module) {
|
||||
if range.contains(target_version) {
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
} else {
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
}
|
||||
module = module_to_try.parent();
|
||||
} else {
|
||||
let mut module = module.parent();
|
||||
while let Some(module_to_try) = module {
|
||||
if let Some(range) = self.exact(&module_to_try) {
|
||||
return {
|
||||
if range.contains(target_version) {
|
||||
TypeshedVersionsQueryResult::MaybeExists
|
||||
} else {
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
}
|
||||
};
|
||||
}
|
||||
module = module_to_try.parent();
|
||||
}
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
}
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Possible answers [`TypeshedVersions::query_module()`] could give to the question:
|
||||
/// "Does this module exist in the stdlib at runtime on a certain target version?"
|
||||
#[derive(Debug, Copy, PartialEq, Eq, Clone, Hash)]
|
||||
pub(crate) enum TypeshedVersionsQueryResult {
|
||||
/// The module definitely exists in the stdlib at runtime on the user-specified target version.
|
||||
///
|
||||
/// For example:
|
||||
/// - The target version is Python 3.8
|
||||
/// - We're querying whether the `asyncio.tasks` module exists in the stdlib
|
||||
/// - The VERSIONS file contains the line `asyncio.tasks: 3.8-`
|
||||
Exists,
|
||||
|
||||
/// The module definitely does not exist in the stdlib on the user-specified target version.
|
||||
///
|
||||
/// For example:
|
||||
/// - We're querying whether the `foo` module exists in the stdlib
|
||||
/// - There is no top-level `foo` module in VERSIONS
|
||||
///
|
||||
/// OR:
|
||||
/// - The target version is Python 3.8
|
||||
/// - We're querying whether the module `importlib.abc` exists in the stdlib
|
||||
/// - The VERSIONS file contains the line `importlib.abc: 3.10-`,
|
||||
/// indicating that the module was added in 3.10
|
||||
///
|
||||
/// OR:
|
||||
/// - The target version is Python 3.8
|
||||
/// - We're querying whether the module `collections.abc` exists in the stdlib
|
||||
/// - The VERSIONS file does not contain any information about the `collections.abc` submodule,
|
||||
/// but *does* contain the line `collections: 3.10-`,
|
||||
/// indicating that the entire `collections` package was added in Python 3.10.
|
||||
DoesNotExist,
|
||||
|
||||
/// The module potentially exists in the stdlib and, if it does,
|
||||
/// it definitely exists on the user-specified target version.
|
||||
///
|
||||
/// This variant is only relevant for submodules,
|
||||
/// for which the typeshed VERSIONS file does not provide comprehensive information.
|
||||
/// (The VERSIONS file is guaranteed to provide information about all top-level stdlib modules and packages,
|
||||
/// but not necessarily about all submodules within each top-level package.)
|
||||
///
|
||||
/// For example:
|
||||
/// - The target version is Python 3.8
|
||||
/// - We're querying whether the `asyncio.staggered` module exists in the stdlib
|
||||
/// - The typeshed VERSIONS file contains the line `asyncio: 3.8`,
|
||||
/// indicating that the `asyncio` package was added in Python 3.8,
|
||||
/// but does not contain any explicit information about the `asyncio.staggered` submodule.
|
||||
MaybeExists,
|
||||
}
|
||||
|
||||
impl FromStr for TypeshedVersions {
|
||||
type Err = TypeshedVersionsParseError;
|
||||
|
||||
@@ -125,7 +206,7 @@ impl FromStr for TypeshedVersions {
|
||||
|
||||
let Ok(line_number) = NonZeroU16::try_from(line_number) else {
|
||||
return Err(TypeshedVersionsParseError {
|
||||
line_number: NonZeroU16::MAX,
|
||||
line_number: None,
|
||||
reason: TypeshedVersionsParseErrorKind::TooManyLines(line_number),
|
||||
});
|
||||
};
|
||||
@@ -141,14 +222,14 @@ impl FromStr for TypeshedVersions {
|
||||
let (Some(module_name), Some(rest), None) = (parts.next(), parts.next(), parts.next())
|
||||
else {
|
||||
return Err(TypeshedVersionsParseError {
|
||||
line_number,
|
||||
line_number: Some(line_number),
|
||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfColons,
|
||||
});
|
||||
};
|
||||
|
||||
let Some(module_name) = ModuleName::new(module_name) else {
|
||||
return Err(TypeshedVersionsParseError {
|
||||
line_number,
|
||||
line_number: Some(line_number),
|
||||
reason: TypeshedVersionsParseErrorKind::InvalidModuleName(
|
||||
module_name.to_string(),
|
||||
),
|
||||
@@ -159,7 +240,7 @@ impl FromStr for TypeshedVersions {
|
||||
Ok(version) => map.insert(module_name, version),
|
||||
Err(reason) => {
|
||||
return Err(TypeshedVersionsParseError {
|
||||
line_number,
|
||||
line_number: Some(line_number),
|
||||
reason,
|
||||
})
|
||||
}
|
||||
@@ -180,14 +261,15 @@ impl fmt::Display for TypeshedVersions {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
|
||||
enum PyVersionRange {
|
||||
AvailableFrom(RangeFrom<PyVersion>),
|
||||
AvailableWithin(RangeInclusive<PyVersion>),
|
||||
AvailableFrom(RangeFrom<PythonVersion>),
|
||||
AvailableWithin(RangeInclusive<PythonVersion>),
|
||||
}
|
||||
|
||||
impl PyVersionRange {
|
||||
fn contains(&self, version: PyVersion) -> bool {
|
||||
#[must_use]
|
||||
fn contains(&self, version: PythonVersion) -> bool {
|
||||
match self {
|
||||
Self::AvailableFrom(inner) => inner.contains(&version),
|
||||
Self::AvailableWithin(inner) => inner.contains(&version),
|
||||
@@ -201,9 +283,14 @@ impl FromStr for PyVersionRange {
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let mut parts = s.split('-').map(str::trim);
|
||||
match (parts.next(), parts.next(), parts.next()) {
|
||||
(Some(lower), Some(""), None) => Ok(Self::AvailableFrom((lower.parse()?)..)),
|
||||
(Some(lower), Some(""), None) => {
|
||||
let lower = PythonVersion::from_versions_file_string(lower)?;
|
||||
Ok(Self::AvailableFrom(lower..))
|
||||
}
|
||||
(Some(lower), Some(upper), None) => {
|
||||
Ok(Self::AvailableWithin((lower.parse()?)..=(upper.parse()?)))
|
||||
let lower = PythonVersion::from_versions_file_string(lower)?;
|
||||
let upper = PythonVersion::from_versions_file_string(upper)?;
|
||||
Ok(Self::AvailableWithin(lower..=upper))
|
||||
}
|
||||
_ => Err(TypeshedVersionsParseErrorKind::UnexpectedNumberOfHyphens),
|
||||
}
|
||||
@@ -221,87 +308,20 @@ impl fmt::Display for PyVersionRange {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
pub struct PyVersion {
|
||||
major: u8,
|
||||
minor: u8,
|
||||
}
|
||||
|
||||
impl FromStr for PyVersion {
|
||||
type Err = TypeshedVersionsParseErrorKind;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
impl PythonVersion {
|
||||
fn from_versions_file_string(s: &str) -> Result<Self, TypeshedVersionsParseErrorKind> {
|
||||
let mut parts = s.split('.').map(str::trim);
|
||||
let (Some(major), Some(minor), None) = (parts.next(), parts.next(), parts.next()) else {
|
||||
return Err(TypeshedVersionsParseErrorKind::UnexpectedNumberOfPeriods(
|
||||
s.to_string(),
|
||||
));
|
||||
};
|
||||
let major = match u8::from_str(major) {
|
||||
Ok(major) => major,
|
||||
Err(err) => {
|
||||
return Err(TypeshedVersionsParseErrorKind::IntegerParsingFailure {
|
||||
version: s.to_string(),
|
||||
err,
|
||||
})
|
||||
PythonVersion::try_from((major, minor)).map_err(|int_parse_error| {
|
||||
TypeshedVersionsParseErrorKind::IntegerParsingFailure {
|
||||
version: s.to_string(),
|
||||
err: int_parse_error,
|
||||
}
|
||||
};
|
||||
let minor = match u8::from_str(minor) {
|
||||
Ok(minor) => minor,
|
||||
Err(err) => {
|
||||
return Err(TypeshedVersionsParseErrorKind::IntegerParsingFailure {
|
||||
version: s.to_string(),
|
||||
err,
|
||||
})
|
||||
}
|
||||
};
|
||||
Ok(Self { major, minor })
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for PyVersion {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let PyVersion { major, minor } = self;
|
||||
write!(f, "{major}.{minor}")
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: unify with the PythonVersion enum in the linter/formatter crates?
|
||||
#[derive(Copy, Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord, Default)]
|
||||
pub enum SupportedPyVersion {
|
||||
Py37,
|
||||
#[default]
|
||||
Py38,
|
||||
Py39,
|
||||
Py310,
|
||||
Py311,
|
||||
Py312,
|
||||
Py313,
|
||||
}
|
||||
|
||||
impl From<SupportedPyVersion> for PyVersion {
|
||||
fn from(value: SupportedPyVersion) -> Self {
|
||||
match value {
|
||||
SupportedPyVersion::Py37 => PyVersion { major: 3, minor: 7 },
|
||||
SupportedPyVersion::Py38 => PyVersion { major: 3, minor: 8 },
|
||||
SupportedPyVersion::Py39 => PyVersion { major: 3, minor: 9 },
|
||||
SupportedPyVersion::Py310 => PyVersion {
|
||||
major: 3,
|
||||
minor: 10,
|
||||
},
|
||||
SupportedPyVersion::Py311 => PyVersion {
|
||||
major: 3,
|
||||
minor: 11,
|
||||
},
|
||||
SupportedPyVersion::Py312 => PyVersion {
|
||||
major: 3,
|
||||
minor: 12,
|
||||
},
|
||||
SupportedPyVersion::Py313 => PyVersion {
|
||||
major: 3,
|
||||
minor: 13,
|
||||
},
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -310,14 +330,26 @@ mod tests {
|
||||
use std::num::{IntErrorKind, NonZeroU16};
|
||||
use std::path::Path;
|
||||
|
||||
use super::*;
|
||||
|
||||
use insta::assert_snapshot;
|
||||
|
||||
use super::*;
|
||||
|
||||
const TYPESHED_STDLIB_DIR: &str = "stdlib";
|
||||
|
||||
#[allow(unsafe_code)]
|
||||
const ONE: NonZeroU16 = unsafe { NonZeroU16::new_unchecked(1) };
|
||||
const ONE: Option<NonZeroU16> = Some(unsafe { NonZeroU16::new_unchecked(1) });
|
||||
|
||||
impl TypeshedVersions {
|
||||
#[must_use]
|
||||
fn contains_exact(&self, module: &ModuleName) -> bool {
|
||||
self.exact(module).is_some()
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
fn len(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_parse_vendored_versions_file() {
|
||||
@@ -334,23 +366,36 @@ mod tests {
|
||||
let asyncio_staggered = ModuleName::new_static("asyncio.staggered").unwrap();
|
||||
let audioop = ModuleName::new_static("audioop").unwrap();
|
||||
|
||||
assert!(versions.contains_module(&asyncio));
|
||||
assert!(versions.module_exists_on_version(asyncio, SupportedPyVersion::Py310));
|
||||
|
||||
assert!(versions.contains_module(&asyncio_staggered));
|
||||
assert!(
|
||||
versions.module_exists_on_version(asyncio_staggered.clone(), SupportedPyVersion::Py38)
|
||||
assert!(versions.contains_exact(&asyncio));
|
||||
assert_eq!(
|
||||
versions.query_module(&asyncio, PythonVersion::PY310),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert!(!versions.module_exists_on_version(asyncio_staggered, SupportedPyVersion::Py37));
|
||||
|
||||
assert!(versions.contains_module(&audioop));
|
||||
assert!(versions.module_exists_on_version(audioop.clone(), SupportedPyVersion::Py312));
|
||||
assert!(!versions.module_exists_on_version(audioop, SupportedPyVersion::Py313));
|
||||
assert!(versions.contains_exact(&asyncio_staggered));
|
||||
assert_eq!(
|
||||
versions.query_module(&asyncio_staggered, PythonVersion::PY38),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
versions.query_module(&asyncio_staggered, PythonVersion::PY37),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
|
||||
assert!(versions.contains_exact(&audioop));
|
||||
assert_eq!(
|
||||
versions.query_module(&audioop, PythonVersion::PY312),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
versions.query_module(&audioop, PythonVersion::PY313),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn typeshed_versions_consistent_with_vendored_stubs() {
|
||||
const VERSIONS_DATA: &str = include_str!("../../vendor/typeshed/stdlib/VERSIONS");
|
||||
const VERSIONS_DATA: &str = include_str!("../../../vendor/typeshed/stdlib/VERSIONS");
|
||||
let vendored_typeshed_dir = Path::new("vendor/typeshed").canonicalize().unwrap();
|
||||
let vendored_typeshed_versions = TypeshedVersions::from_str(VERSIONS_DATA).unwrap();
|
||||
|
||||
@@ -393,7 +438,7 @@ mod tests {
|
||||
let top_level_module = ModuleName::new(top_level_module)
|
||||
.unwrap_or_else(|| panic!("{top_level_module:?} was not a valid module name!"));
|
||||
|
||||
assert!(vendored_typeshed_versions.contains_module(&top_level_module));
|
||||
assert!(vendored_typeshed_versions.contains_exact(&top_level_module));
|
||||
}
|
||||
|
||||
assert!(
|
||||
@@ -426,30 +471,102 @@ foo: 3.8- # trailing comment
|
||||
foo: 3.8-
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
||||
let foo = ModuleName::new_static("foo").unwrap();
|
||||
#[test]
|
||||
fn version_within_range_parsed_correctly() {
|
||||
let parsed_versions = TypeshedVersions::from_str("bar: 2.7-3.10").unwrap();
|
||||
let bar = ModuleName::new_static("bar").unwrap();
|
||||
|
||||
assert!(parsed_versions.contains_exact(&bar));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar, PythonVersion::PY37),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar, PythonVersion::PY310),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar, PythonVersion::PY311),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn version_from_range_parsed_correctly() {
|
||||
let parsed_versions = TypeshedVersions::from_str("foo: 3.8-").unwrap();
|
||||
let foo = ModuleName::new_static("foo").unwrap();
|
||||
|
||||
assert!(parsed_versions.contains_exact(&foo));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&foo, PythonVersion::PY37),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&foo, PythonVersion::PY38),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&foo, PythonVersion::PY311),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn explicit_submodule_parsed_correctly() {
|
||||
let parsed_versions = TypeshedVersions::from_str("bar.baz: 3.1-3.9").unwrap();
|
||||
let bar_baz = ModuleName::new_static("bar.baz").unwrap();
|
||||
|
||||
assert!(parsed_versions.contains_exact(&bar_baz));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_baz, PythonVersion::PY37),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_baz, PythonVersion::PY39),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_baz, PythonVersion::PY310),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn implicit_submodule_queried_correctly() {
|
||||
let parsed_versions = TypeshedVersions::from_str("bar: 2.7-3.10").unwrap();
|
||||
let bar_eggs = ModuleName::new_static("bar.eggs").unwrap();
|
||||
|
||||
assert!(!parsed_versions.contains_exact(&bar_eggs));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_eggs, PythonVersion::PY37),
|
||||
TypeshedVersionsQueryResult::MaybeExists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_eggs, PythonVersion::PY310),
|
||||
TypeshedVersionsQueryResult::MaybeExists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_eggs, PythonVersion::PY311),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nonexistent_module_queried_correctly() {
|
||||
let parsed_versions = TypeshedVersions::from_str("eggs: 3.8-").unwrap();
|
||||
let spam = ModuleName::new_static("spam").unwrap();
|
||||
|
||||
assert!(parsed_versions.contains_module(&foo));
|
||||
assert!(!parsed_versions.module_exists_on_version(foo.clone(), SupportedPyVersion::Py37));
|
||||
assert!(parsed_versions.module_exists_on_version(foo.clone(), SupportedPyVersion::Py38));
|
||||
assert!(parsed_versions.module_exists_on_version(foo, SupportedPyVersion::Py311));
|
||||
|
||||
assert!(parsed_versions.contains_module(&bar));
|
||||
assert!(parsed_versions.module_exists_on_version(bar.clone(), SupportedPyVersion::Py37));
|
||||
assert!(parsed_versions.module_exists_on_version(bar.clone(), SupportedPyVersion::Py310));
|
||||
assert!(!parsed_versions.module_exists_on_version(bar, SupportedPyVersion::Py311));
|
||||
|
||||
assert!(parsed_versions.contains_module(&bar_baz));
|
||||
assert!(parsed_versions.module_exists_on_version(bar_baz.clone(), SupportedPyVersion::Py37));
|
||||
assert!(parsed_versions.module_exists_on_version(bar_baz.clone(), SupportedPyVersion::Py39));
|
||||
assert!(!parsed_versions.module_exists_on_version(bar_baz, SupportedPyVersion::Py310));
|
||||
|
||||
assert!(!parsed_versions.contains_module(&spam));
|
||||
assert!(!parsed_versions.module_exists_on_version(spam.clone(), SupportedPyVersion::Py37));
|
||||
assert!(!parsed_versions.module_exists_on_version(spam, SupportedPyVersion::Py313));
|
||||
assert!(!parsed_versions.contains_exact(&spam));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&spam, PythonVersion::PY37),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&spam, PythonVersion::PY313),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -465,7 +582,7 @@ foo: 3.8- # trailing comment
|
||||
assert_eq!(
|
||||
TypeshedVersions::from_str(&massive_versions_file),
|
||||
Err(TypeshedVersionsParseError {
|
||||
line_number: NonZeroU16::MAX,
|
||||
line_number: None,
|
||||
reason: TypeshedVersionsParseErrorKind::TooManyLines(
|
||||
NonZeroUsize::new(too_many + 1 - offset).unwrap()
|
||||
)
|
||||
@@ -1,56 +0,0 @@
|
||||
use std::ops::Deref;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
|
||||
pub struct Name(smol_str::SmolStr);
|
||||
|
||||
impl Name {
|
||||
#[inline]
|
||||
pub fn new(name: &str) -> Self {
|
||||
Self(smol_str::SmolStr::new(name))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn new_static(name: &'static str) -> Self {
|
||||
Self(smol_str::SmolStr::new_static(name))
|
||||
}
|
||||
|
||||
pub fn as_str(&self) -> &str {
|
||||
self.0.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Name {
|
||||
type Target = str;
|
||||
|
||||
#[inline]
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> From<T> for Name
|
||||
where
|
||||
T: Into<smol_str::SmolStr>,
|
||||
{
|
||||
fn from(value: T) -> Self {
|
||||
Self(value.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Name {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<str> for Name {
|
||||
fn eq(&self, other: &str) -> bool {
|
||||
self.as_str() == other
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<Name> for str {
|
||||
fn eq(&self, other: &Name) -> bool {
|
||||
other == self
|
||||
}
|
||||
}
|
||||
100
crates/red_knot_python_semantic/src/program.rs
Normal file
100
crates/red_knot_python_semantic/src/program.rs
Normal file
@@ -0,0 +1,100 @@
|
||||
use crate::python_version::PythonVersion;
|
||||
use anyhow::Context;
|
||||
use salsa::Durability;
|
||||
use salsa::Setter;
|
||||
|
||||
use ruff_db::system::{SystemPath, SystemPathBuf};
|
||||
|
||||
use crate::module_resolver::SearchPaths;
|
||||
use crate::Db;
|
||||
|
||||
#[salsa::input(singleton)]
|
||||
pub struct Program {
|
||||
pub target_version: PythonVersion,
|
||||
|
||||
#[return_ref]
|
||||
pub(crate) search_paths: SearchPaths,
|
||||
}
|
||||
|
||||
impl Program {
|
||||
pub fn from_settings(db: &dyn Db, settings: &ProgramSettings) -> anyhow::Result<Self> {
|
||||
let ProgramSettings {
|
||||
target_version,
|
||||
search_paths,
|
||||
} = settings;
|
||||
|
||||
tracing::info!("Target version: Python {target_version}");
|
||||
|
||||
let search_paths = SearchPaths::from_settings(db, search_paths)
|
||||
.with_context(|| "Invalid search path settings")?;
|
||||
|
||||
Ok(Program::builder(settings.target_version, search_paths)
|
||||
.durability(Durability::HIGH)
|
||||
.new(db))
|
||||
}
|
||||
|
||||
pub fn update_search_paths(
|
||||
self,
|
||||
db: &mut dyn Db,
|
||||
search_path_settings: &SearchPathSettings,
|
||||
) -> anyhow::Result<()> {
|
||||
let search_paths = SearchPaths::from_settings(db, search_path_settings)?;
|
||||
|
||||
if self.search_paths(db) != &search_paths {
|
||||
tracing::debug!("Update search paths");
|
||||
self.set_search_paths(db).to(search_paths);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn custom_stdlib_search_path(self, db: &dyn Db) -> Option<&SystemPath> {
|
||||
self.search_paths(db).custom_stdlib()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub struct ProgramSettings {
|
||||
pub target_version: PythonVersion,
|
||||
pub search_paths: SearchPathSettings,
|
||||
}
|
||||
|
||||
/// Configures the search paths for module resolution.
|
||||
#[derive(Eq, PartialEq, Debug, Clone)]
|
||||
pub struct SearchPathSettings {
|
||||
/// List of user-provided paths that should take first priority in the module resolution.
|
||||
/// Examples in other type checkers are mypy's MYPYPATH environment variable,
|
||||
/// or pyright's stubPath configuration setting.
|
||||
pub extra_paths: Vec<SystemPathBuf>,
|
||||
|
||||
/// The root of the workspace, used for finding first-party modules.
|
||||
pub src_root: SystemPathBuf,
|
||||
|
||||
/// Optional path to a "custom typeshed" directory on disk for us to use for standard-library types.
|
||||
/// If this is not provided, we will fallback to our vendored typeshed stubs for the stdlib,
|
||||
/// bundled as a zip file in the binary
|
||||
pub custom_typeshed: Option<SystemPathBuf>,
|
||||
|
||||
/// The path to the user's `site-packages` directory, where third-party packages from ``PyPI`` are installed.
|
||||
pub site_packages: SitePackages,
|
||||
}
|
||||
|
||||
impl SearchPathSettings {
|
||||
pub fn new(src_root: SystemPathBuf) -> Self {
|
||||
Self {
|
||||
src_root,
|
||||
extra_paths: vec![],
|
||||
custom_typeshed: None,
|
||||
site_packages: SitePackages::Known(vec![]),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub enum SitePackages {
|
||||
Derived {
|
||||
venv_path: SystemPathBuf,
|
||||
},
|
||||
/// Resolved site packages paths
|
||||
Known(Vec<SystemPathBuf>),
|
||||
}
|
||||
62
crates/red_knot_python_semantic/src/python_version.rs
Normal file
62
crates/red_knot_python_semantic/src/python_version.rs
Normal file
@@ -0,0 +1,62 @@
|
||||
use std::fmt;
|
||||
|
||||
/// Representation of a Python version.
|
||||
///
|
||||
/// Unlike the `TargetVersion` enums in the CLI crates,
|
||||
/// this does not necessarily represent a Python version that we actually support.
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
pub struct PythonVersion {
|
||||
pub major: u8,
|
||||
pub minor: u8,
|
||||
}
|
||||
|
||||
impl PythonVersion {
|
||||
pub const PY37: PythonVersion = PythonVersion { major: 3, minor: 7 };
|
||||
pub const PY38: PythonVersion = PythonVersion { major: 3, minor: 8 };
|
||||
pub const PY39: PythonVersion = PythonVersion { major: 3, minor: 9 };
|
||||
pub const PY310: PythonVersion = PythonVersion {
|
||||
major: 3,
|
||||
minor: 10,
|
||||
};
|
||||
pub const PY311: PythonVersion = PythonVersion {
|
||||
major: 3,
|
||||
minor: 11,
|
||||
};
|
||||
pub const PY312: PythonVersion = PythonVersion {
|
||||
major: 3,
|
||||
minor: 12,
|
||||
};
|
||||
pub const PY313: PythonVersion = PythonVersion {
|
||||
major: 3,
|
||||
minor: 13,
|
||||
};
|
||||
|
||||
pub fn free_threaded_build_available(self) -> bool {
|
||||
self >= PythonVersion::PY313
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for PythonVersion {
|
||||
fn default() -> Self {
|
||||
Self::PY38
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<(&str, &str)> for PythonVersion {
|
||||
type Error = std::num::ParseIntError;
|
||||
|
||||
fn try_from(value: (&str, &str)) -> Result<Self, Self::Error> {
|
||||
let (major, minor) = value;
|
||||
Ok(Self {
|
||||
major: major.parse()?,
|
||||
minor: minor.parse()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for PythonVersion {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let PythonVersion { major, minor } = self;
|
||||
write!(f, "{major}.{minor}")
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user