Compare commits
724 Commits
dhruv/form
...
0.5.7
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
221ea662e0 | ||
|
|
d28c5afd14 | ||
|
|
f1de08c2a0 | ||
|
|
33e9a6a54e | ||
|
|
f577e03021 | ||
|
|
f53733525c | ||
|
|
2daa914334 | ||
|
|
6d9205e346 | ||
|
|
df7345e118 | ||
|
|
dc6aafecc2 | ||
|
|
5107a50ae7 | ||
|
|
a631d600ac | ||
|
|
f34b9a77f0 | ||
|
|
7997da47f5 | ||
|
|
d380b37a09 | ||
|
|
b14fee9320 | ||
|
|
037e817450 | ||
|
|
7fcfedd430 | ||
|
|
50ff5c7544 | ||
|
|
90e5bc2bd9 | ||
|
|
aae9619d3d | ||
|
|
7fa76a2b2b | ||
|
|
14dd6d980e | ||
|
|
846f57fd15 | ||
|
|
8e6aa78796 | ||
|
|
e91a0fe94a | ||
|
|
d2c627efb3 | ||
|
|
10e977d5f5 | ||
|
|
f0318ff889 | ||
|
|
5cc3fed9a8 | ||
|
|
39dd732e27 | ||
|
|
52630a1d55 | ||
|
|
7b5fd63ce8 | ||
|
|
5499821c67 | ||
|
|
7ee7c68f36 | ||
|
|
2393d19f91 | ||
|
|
a8e2ba508e | ||
|
|
0b4d3ce39b | ||
|
|
0a345dc627 | ||
|
|
ff2aa3ea00 | ||
|
|
0d3bad877d | ||
|
|
756060d676 | ||
|
|
b647f3fba8 | ||
|
|
82e69ebf23 | ||
|
|
2c79045342 | ||
|
|
3497f5257b | ||
|
|
25aabec814 | ||
|
|
0e71485ea9 | ||
|
|
43a9d282f7 | ||
|
|
6f357b8b45 | ||
|
|
73d9f11a9c | ||
|
|
d6c6db5a44 | ||
|
|
56d985a972 | ||
|
|
b3e0655cc9 | ||
|
|
06baffec9e | ||
|
|
67a2ae800a | ||
|
|
7a2c75e2fc | ||
|
|
9ee44637ca | ||
|
|
733341ab39 | ||
|
|
341a25eec1 | ||
|
|
38e178e914 | ||
|
|
daccb3f4f3 | ||
|
|
c858afe03a | ||
|
|
3c1c3199d0 | ||
|
|
fbfe2cb2f5 | ||
|
|
1c311e4fdb | ||
|
|
12177a42e3 | ||
|
|
dfb08856eb | ||
|
|
94d817e1a5 | ||
|
|
9296bd4e3f | ||
|
|
da824ba316 | ||
|
|
012198a1b0 | ||
|
|
fbab04fbe1 | ||
|
|
9aa43d5f91 | ||
|
|
966563c79b | ||
|
|
27edadec29 | ||
|
|
2e2b1b460f | ||
|
|
a3e67abf4c | ||
|
|
ee0518e8f7 | ||
|
|
d774a3bd48 | ||
|
|
7e6b19048e | ||
|
|
8e383b9587 | ||
|
|
3f49ab126f | ||
|
|
c1bc7f4dee | ||
|
|
a44d579f21 | ||
|
|
a3900d2b0b | ||
|
|
83b1c48a93 | ||
|
|
138e70bd5c | ||
|
|
ee103ffb25 | ||
|
|
18f87b9497 | ||
|
|
adc8d4e1e7 | ||
|
|
90db361199 | ||
|
|
4738135801 | ||
|
|
264cd750e9 | ||
|
|
7a4419a2a5 | ||
|
|
ac1666d6e2 | ||
|
|
459c85ba27 | ||
|
|
aaa56eb0bd | ||
|
|
f3c14a4276 | ||
|
|
3169d408fa | ||
|
|
a2286c8e47 | ||
|
|
fb9f566f56 | ||
|
|
381bd1ff4a | ||
|
|
2f54d05d97 | ||
|
|
e18b4e42d3 | ||
|
|
9495331a5f | ||
|
|
e1076db7d0 | ||
|
|
1986c9e8e2 | ||
|
|
d7e80dc955 | ||
|
|
87d09f77cd | ||
|
|
bd37ef13b8 | ||
|
|
ec23c974db | ||
|
|
122e5ab428 | ||
|
|
2f2149aca8 | ||
|
|
9d5c31e7da | ||
|
|
25f3ad6238 | ||
|
|
79926329a4 | ||
|
|
9cdc578dd9 | ||
|
|
665c75f7ab | ||
|
|
f37b39d6cc | ||
|
|
e18c45c310 | ||
|
|
d930052de8 | ||
|
|
7ad4df9e9f | ||
|
|
425761e960 | ||
|
|
4b69271809 | ||
|
|
bf23d38a21 | ||
|
|
49f51583fa | ||
|
|
1fe4a5faed | ||
|
|
998bfe0847 | ||
|
|
6f4db8675b | ||
|
|
71f7aa4971 | ||
|
|
9f72f474e6 | ||
|
|
10c993e21a | ||
|
|
2d3914296d | ||
|
|
7571da8778 | ||
|
|
2ceac5f868 | ||
|
|
5ce80827d2 | ||
|
|
e047b9685a | ||
|
|
fc16d8d04d | ||
|
|
175e5d7b88 | ||
|
|
c03f257ed7 | ||
|
|
6bbb4a28c2 | ||
|
|
2ce3e3ae60 | ||
|
|
2a64cccb61 | ||
|
|
928ffd6650 | ||
|
|
e52be0951a | ||
|
|
889073578e | ||
|
|
eac965ecaf | ||
|
|
8659f2f4ea | ||
|
|
c1b292a0dc | ||
|
|
3af6ccb720 | ||
|
|
f0fc6a95fe | ||
|
|
f96a3c71ff | ||
|
|
b9b7deff17 | ||
|
|
40d9324f5a | ||
|
|
a9f8bd59b2 | ||
|
|
143e172431 | ||
|
|
b2d3a05ee4 | ||
|
|
ef1ca0dd38 | ||
|
|
c7b13bb8fc | ||
|
|
dbbe3526ef | ||
|
|
f22c8ab811 | ||
|
|
2a8f95c437 | ||
|
|
ea2d51c2bb | ||
|
|
ed238e0c76 | ||
|
|
3ace12943e | ||
|
|
978909fcf4 | ||
|
|
f8735e1ee8 | ||
|
|
d70ceb6a56 | ||
|
|
fc7d9e95b8 | ||
|
|
b578fca9cb | ||
|
|
8d3146c2b2 | ||
|
|
fa5c841154 | ||
|
|
f8fcbc19d9 | ||
|
|
97fdd48208 | ||
|
|
731ed2e40b | ||
|
|
3a742c17f8 | ||
|
|
053243635c | ||
|
|
82355712c3 | ||
|
|
4bc73dd87e | ||
|
|
53b84ab054 | ||
|
|
3664f85f45 | ||
|
|
2c1926beeb | ||
|
|
4bcc96ae51 | ||
|
|
c0a2b49bac | ||
|
|
ca22248628 | ||
|
|
d8cf8ac2ef | ||
|
|
1c7b84059e | ||
|
|
f82bb67555 | ||
|
|
5f96f69151 | ||
|
|
ad19b3fd0e | ||
|
|
a62e2d2000 | ||
|
|
d61747093c | ||
|
|
0ba7fc63d0 | ||
|
|
fa5b19d4b6 | ||
|
|
181e7b3c0d | ||
|
|
519eca9fe7 | ||
|
|
f0d589d7a3 | ||
|
|
512c8b2cc5 | ||
|
|
811f78d94d | ||
|
|
8f1be31289 | ||
|
|
8cfbac71a4 | ||
|
|
9460857932 | ||
|
|
a028ca22f0 | ||
|
|
7953f6aa79 | ||
|
|
764d9ab4ee | ||
|
|
9b9d701500 | ||
|
|
648cca199b | ||
|
|
2e77b775b0 | ||
|
|
ebe5b06c95 | ||
|
|
b2a49d8140 | ||
|
|
985a999234 | ||
|
|
1df51b1fbf | ||
|
|
1435b0f022 | ||
|
|
e39298dcbc | ||
|
|
1de8ff3308 | ||
|
|
72e02206d6 | ||
|
|
80f0116641 | ||
|
|
79b535587b | ||
|
|
6e0cbe0f35 | ||
|
|
91338ae902 | ||
|
|
0c72577b5d | ||
|
|
fe04f2b09d | ||
|
|
073588b48e | ||
|
|
9a2dafb43d | ||
|
|
595b1aa4a1 | ||
|
|
30cef67b45 | ||
|
|
d0c5925672 | ||
|
|
b1487b6b4f | ||
|
|
85ae02d62e | ||
|
|
9a817a2922 | ||
|
|
ecd4b4d943 | ||
|
|
b9a8cd390f | ||
|
|
2348714081 | ||
|
|
3817b207cf | ||
|
|
b1cf9ea663 | ||
|
|
8ad10b9307 | ||
|
|
9c5524a9a2 | ||
|
|
1530223311 | ||
|
|
b9671522c4 | ||
|
|
9918202422 | ||
|
|
42e7147860 | ||
|
|
25feab93f8 | ||
|
|
dc8db1afb0 | ||
|
|
18c364d5df | ||
|
|
7a7c601d5e | ||
|
|
3bfbbbc78c | ||
|
|
1a3ee45b23 | ||
|
|
65848869d5 | ||
|
|
456d6a2fb2 | ||
|
|
940df67823 | ||
|
|
e58713e2ac | ||
|
|
aa5c53b38b | ||
|
|
4e6ecb2348 | ||
|
|
6febd96dfe | ||
|
|
17e84d5f40 | ||
|
|
b6545ce5d6 | ||
|
|
90e9aae3f4 | ||
|
|
bd01004a42 | ||
|
|
d0298dc26d | ||
|
|
bbb9fe1692 | ||
|
|
5b21922420 | ||
|
|
abcf07c8c5 | ||
|
|
e8b5341c97 | ||
|
|
880c31d164 | ||
|
|
d365f1a648 | ||
|
|
4cc7bc9d32 | ||
|
|
0bb2fc6eec | ||
|
|
855d62cdde | ||
|
|
88abc6aed8 | ||
|
|
6fa4e32ad3 | ||
|
|
000dabcd88 | ||
|
|
f8ff42a13d | ||
|
|
b5834d57af | ||
|
|
3d3ff10bb9 | ||
|
|
ac04380f36 | ||
|
|
16a63c88cf | ||
|
|
10f07d88a2 | ||
|
|
1e04bd0b73 | ||
|
|
2041b0e5fb | ||
|
|
bf3d903939 | ||
|
|
64855c5f06 | ||
|
|
b5ab4ce293 | ||
|
|
c396b9f08b | ||
|
|
9ed3893e6d | ||
|
|
30c9604c1d | ||
|
|
7e4a1c2b33 | ||
|
|
e379160941 | ||
|
|
38b503ebcc | ||
|
|
dac476f2c0 | ||
|
|
754e5d6a7d | ||
|
|
d9c15e7a12 | ||
|
|
757c75752e | ||
|
|
9d61727289 | ||
|
|
a62a432a48 | ||
|
|
8198723201 | ||
|
|
7df10ea3e9 | ||
|
|
0e44235981 | ||
|
|
7b50061b43 | ||
|
|
3a72400202 | ||
|
|
1b3bff0330 | ||
|
|
0f6f73ecf3 | ||
|
|
7910beecc4 | ||
|
|
f3ccd152e9 | ||
|
|
1e07bfa373 | ||
|
|
5e7ba05612 | ||
|
|
d12570ea00 | ||
|
|
2f3264e148 | ||
|
|
e2e0889a30 | ||
|
|
497fd4c505 | ||
|
|
6cdf3e7af8 | ||
|
|
4d385b60c8 | ||
|
|
262053f85c | ||
|
|
3ce8b9fcae | ||
|
|
e6e09ea93a | ||
|
|
d870720841 | ||
|
|
8210c1ed5b | ||
|
|
a184f84f69 | ||
|
|
c487b99e93 | ||
|
|
24524771f2 | ||
|
|
b950a6c389 | ||
|
|
47eb6ee42b | ||
|
|
b4f7d5b2fb | ||
|
|
c13c60bc47 | ||
|
|
ee90017d3f | ||
|
|
adfd78e05a | ||
|
|
7c8112614a | ||
|
|
8f40928534 | ||
|
|
88a4cc41f7 | ||
|
|
dcb9523b1e | ||
|
|
25080acb7a | ||
|
|
228b1c4235 | ||
|
|
955138b74a | ||
|
|
5677614079 | ||
|
|
37f260b5af | ||
|
|
3f25561511 | ||
|
|
eaf33d85ed | ||
|
|
aaa6cabf3a | ||
|
|
9a4d9072c1 | ||
|
|
4cb6a09fc0 | ||
|
|
5109b50bb3 | ||
|
|
db6ee74cbe | ||
|
|
d1aeadc009 | ||
|
|
d80a9d9ce9 | ||
|
|
85ede4a88c | ||
|
|
d2fefc8bf3 | ||
|
|
5fd3f43de1 | ||
|
|
ab372f5f48 | ||
|
|
6a8a7b65e9 | ||
|
|
211cafc571 | ||
|
|
0b1b94567a | ||
|
|
168112d343 | ||
|
|
a5355084b5 | ||
|
|
deedb29e75 | ||
|
|
f765d19402 | ||
|
|
d1079680bb | ||
|
|
da78de0439 | ||
|
|
47b227394e | ||
|
|
c326778652 | ||
|
|
434ce307a7 | ||
|
|
6a37d7a1e6 | ||
|
|
0179ff97da | ||
|
|
2b54fab02c | ||
|
|
117ab789c9 | ||
|
|
2336c078e2 | ||
|
|
9fec384d11 | ||
|
|
526efd398a | ||
|
|
b28dc9ac14 | ||
|
|
59ea94ce88 | ||
|
|
5bef2b0361 | ||
|
|
244b923f61 | ||
|
|
a8b48fce7e | ||
|
|
04c8597b8a | ||
|
|
4029a25ebd | ||
|
|
0917ce16f4 | ||
|
|
22cebdf29b | ||
|
|
72b6c26101 | ||
|
|
73851e73ab | ||
|
|
e7b49694a7 | ||
|
|
c98d8a040f | ||
|
|
6f2e024cc6 | ||
|
|
fb1d7610ac | ||
|
|
bd845812c7 | ||
|
|
c7b2f2b788 | ||
|
|
8cc96d7868 | ||
|
|
4b3278fe0b | ||
|
|
41203ea208 | ||
|
|
c0d2f439b7 | ||
|
|
b0b68a5601 | ||
|
|
c9a283a5ad | ||
|
|
c54bf0c734 | ||
|
|
1968332d93 | ||
|
|
0a24d70bfd | ||
|
|
a4d711f25f | ||
|
|
c46ae3a3cf | ||
|
|
9e8a45f343 | ||
|
|
36a9efdb48 | ||
|
|
d6a2cad9c2 | ||
|
|
117203f713 | ||
|
|
12effb897c | ||
|
|
bfe36b9584 | ||
|
|
b24e4473c5 | ||
|
|
a4688aebe9 | ||
|
|
e137c824c3 | ||
|
|
55f4812051 | ||
|
|
47c9ed07f2 | ||
|
|
7cb2619ef5 | ||
|
|
83fe44728b | ||
|
|
00e456ead4 | ||
|
|
2853751344 | ||
|
|
7109214b57 | ||
|
|
d930e97212 | ||
|
|
9c1b6ec411 | ||
|
|
692309ebd7 | ||
|
|
68a8978454 | ||
|
|
cd2af3be73 | ||
|
|
e2e98d005c | ||
|
|
32ccc38365 | ||
|
|
35151080b1 | ||
|
|
53a80a5c11 | ||
|
|
446ad0ba44 | ||
|
|
d897811f00 | ||
|
|
5d6b26ed33 | ||
|
|
49e5357dac | ||
|
|
e02c44e46c | ||
|
|
86cbf2d594 | ||
|
|
b79f1ed7f5 | ||
|
|
068b75cc8e | ||
|
|
c3f61a012e | ||
|
|
0c8b5eb17a | ||
|
|
375d2c87b2 | ||
|
|
f846fc9e07 | ||
|
|
92b145e56a | ||
|
|
715609663a | ||
|
|
519a278899 | ||
|
|
91d091bb81 | ||
|
|
79d72e6479 | ||
|
|
81160320de | ||
|
|
b1e7bf76da | ||
|
|
ad4a88657b | ||
|
|
611f4e5c5f | ||
|
|
791f6a1820 | ||
|
|
3d0230f469 | ||
|
|
da79bac33c | ||
|
|
8de0cd6565 | ||
|
|
3277d031f8 | ||
|
|
736a4ead14 | ||
|
|
27ebff36ec | ||
|
|
96da136e6a | ||
|
|
4667d8697c | ||
|
|
690e94f4fb | ||
|
|
9fd84e63bc | ||
|
|
3ab7a8da73 | ||
|
|
927069c12f | ||
|
|
c8ff89c73c | ||
|
|
4c05d7a6d4 | ||
|
|
b54922fd73 | ||
|
|
3f884b4b34 | ||
|
|
b456051be8 | ||
|
|
2dfbf118d7 | ||
|
|
ed948eaefb | ||
|
|
22733cb7c7 | ||
|
|
a26bd01be2 | ||
|
|
b617d90651 | ||
|
|
cdc7c71449 | ||
|
|
ff3bf583b2 | ||
|
|
2e7c3454e0 | ||
|
|
1d73d60bd3 | ||
|
|
f666d79cd7 | ||
|
|
26ac805e6d | ||
|
|
98b13b9844 | ||
|
|
13ad24b13e | ||
|
|
104608b2f7 | ||
|
|
1e0642fac8 | ||
|
|
5e5a81b05f | ||
|
|
c53d55a483 | ||
|
|
ffc98522cd | ||
|
|
8499abfa7f | ||
|
|
1f654ee729 | ||
|
|
355d26f05c | ||
|
|
027ea899ce | ||
|
|
61c568268a | ||
|
|
01754a1209 | ||
|
|
e684f6b1e0 | ||
|
|
142eda7dc6 | ||
|
|
d9c0590169 | ||
|
|
f8f0053a6c | ||
|
|
e7c4d28c5e | ||
|
|
19cd9d7d8a | ||
|
|
c50577f1d7 | ||
|
|
2d6d85e993 | ||
|
|
4f49e918a9 | ||
|
|
d681a45b08 | ||
|
|
89bb07c251 | ||
|
|
fe462b30e7 | ||
|
|
c5bc368e43 | ||
|
|
73370fe798 | ||
|
|
22b6488550 | ||
|
|
d4dd96d1f4 | ||
|
|
efbf7b14b5 | ||
|
|
9dc226be97 | ||
|
|
bcbddac21c | ||
|
|
4ed3aed8d3 | ||
|
|
60ea72a6bc | ||
|
|
a525b4be3d | ||
|
|
93973b96cb | ||
|
|
db8f2c2d9f | ||
|
|
5c0df7a150 | ||
|
|
7d5cf1811b | ||
|
|
4e9d771aa0 | ||
|
|
507f5c1137 | ||
|
|
4e92102922 | ||
|
|
08b548626a | ||
|
|
0d06900cec | ||
|
|
521a358a4d | ||
|
|
134aa7c7d5 | ||
|
|
33e44c25ad | ||
|
|
48163dcaca | ||
|
|
e78b9dc7fe | ||
|
|
141d1a8cdf | ||
|
|
8b9bbc0c84 | ||
|
|
87ea06e360 | ||
|
|
74246f4acc | ||
|
|
b3b2f57d8e | ||
|
|
549cc1e437 | ||
|
|
7509a48eab | ||
|
|
af821ecda1 | ||
|
|
ccc418cc49 | ||
|
|
b98ab1b0b6 | ||
|
|
ed947792cf | ||
|
|
ee1621b2f9 | ||
|
|
32ca704956 | ||
|
|
37d8de3316 | ||
|
|
4157c8635b | ||
|
|
d22f3402e1 | ||
|
|
ea27445479 | ||
|
|
540d76892f | ||
|
|
cd101c83ae | ||
|
|
b2fc0df6db | ||
|
|
93eefb1417 | ||
|
|
303ef02f93 | ||
|
|
1b7d08c2c9 | ||
|
|
fcaa62f0d9 | ||
|
|
f144edeefa | ||
|
|
6c1fa1d440 | ||
|
|
5a5a588a72 | ||
|
|
084e5464fb | ||
|
|
31f97329c0 | ||
|
|
b46e9e825a | ||
|
|
9b2cf569b2 | ||
|
|
5806bc915d | ||
|
|
b0b4706e2d | ||
|
|
a8cf7096ff | ||
|
|
895eb3ef48 | ||
|
|
2e0a9755e0 | ||
|
|
b021b5babe | ||
|
|
eed6d784df | ||
|
|
8338db6c12 | ||
|
|
d056d09547 | ||
|
|
1645be018d | ||
|
|
2c865023ac | ||
|
|
2567e14b7a | ||
|
|
3b19df04d7 | ||
|
|
6ffb96171a | ||
|
|
64165bee43 | ||
|
|
0c75548146 | ||
|
|
b56a577f25 | ||
|
|
e1133a24ed | ||
|
|
2f8ac1e9b3 | ||
|
|
3fb2028506 | ||
|
|
3f9ee31efb | ||
|
|
b02d3f3fd9 | ||
|
|
2b28889ca9 | ||
|
|
8db147c09d | ||
|
|
a58bde6958 | ||
|
|
f4e23d2dff | ||
|
|
4a155e2b22 | ||
|
|
bf5b62edac | ||
|
|
c69a789aa5 | ||
|
|
140c408a92 | ||
|
|
27085a93d9 | ||
|
|
a9b6c4f269 | ||
|
|
ded010cf9c | ||
|
|
436dc18b15 | ||
|
|
9599bd7622 | ||
|
|
ec3f523924 | ||
|
|
010434015e | ||
|
|
25131da2c3 | ||
|
|
712783825d | ||
|
|
94a3c53841 | ||
|
|
0ea2519e80 | ||
|
|
6d79ddc0aa | ||
|
|
9f3e609278 | ||
|
|
b36dd1aa51 | ||
|
|
fd9d68051e | ||
|
|
99834ee93d | ||
|
|
b80bf22c4d | ||
|
|
312f6640b8 | ||
|
|
91a5fdee7a | ||
|
|
1ad5f9c038 | ||
|
|
e914bc300b | ||
|
|
27f6f048f0 | ||
|
|
d62a617938 | ||
|
|
16a926d138 | ||
|
|
05566c6075 | ||
|
|
7ce17b7736 | ||
|
|
f9a64503c8 | ||
|
|
8a25531a71 | ||
|
|
9b6d2ce1f2 | ||
|
|
889667ad84 | ||
|
|
5b500fc4dc | ||
|
|
685d11a909 | ||
|
|
dcabd04caf | ||
|
|
3aa7e35a4c | ||
|
|
b0a751012e | ||
|
|
bd46cd1fcf | ||
|
|
a8d1328c1a | ||
|
|
e35deee583 | ||
|
|
921bc15542 | ||
|
|
e14096f0a8 | ||
|
|
5f976cae07 | ||
|
|
7659114eb3 | ||
|
|
163c374242 | ||
|
|
204c59e353 | ||
|
|
531ae5227c | ||
|
|
e0169d8dea | ||
|
|
9a3b9f9fb5 | ||
|
|
49a5a9ccc2 | ||
|
|
69d9212817 | ||
|
|
4a305588e9 | ||
|
|
16acd4913f | ||
|
|
3989cb8b56 | ||
|
|
a38c05bf13 | ||
|
|
ab107ef1f3 | ||
|
|
b36c713279 | ||
|
|
34a5063aa2 | ||
|
|
adc0a5d126 | ||
|
|
e28e737296 | ||
|
|
37ad994318 | ||
|
|
246a3388ee | ||
|
|
6be00d5775 | ||
|
|
9200dfc79f | ||
|
|
5dcde88099 | ||
|
|
7794eb2bde | ||
|
|
40bfae4f99 | ||
|
|
7b064b25b2 | ||
|
|
9993115f63 | ||
|
|
f0a21c9161 | ||
|
|
f26c155de5 | ||
|
|
c3fa826b0a | ||
|
|
8b69794f1d | ||
|
|
4e7c84df1d | ||
|
|
99c400000a | ||
|
|
b5d147d219 | ||
|
|
77da4615c1 | ||
|
|
627d230688 | ||
|
|
0eef834e89 | ||
|
|
650c578e07 | ||
|
|
9567fddf69 | ||
|
|
ab6d9d4658 | ||
|
|
677893226a | ||
|
|
33fd50027c | ||
|
|
3e30962077 | ||
|
|
81275a6c3d | ||
|
|
52c946a4c5 | ||
|
|
ebdaf5765a | ||
|
|
9a93409e1c | ||
|
|
102b9d930f | ||
|
|
550aa871d3 | ||
|
|
3c22a3bdcc | ||
|
|
6263923915 | ||
|
|
94abea4b08 | ||
|
|
519a65007f | ||
|
|
573facd2ba | ||
|
|
3cb2e677aa | ||
|
|
f0046ab28e | ||
|
|
5bb9720a10 | ||
|
|
9ff18bf9d3 | ||
|
|
aa906b9c75 | ||
|
|
3476e2f359 | ||
|
|
8848eca3c6 | ||
|
|
b0731ef9cb | ||
|
|
84531d1644 | ||
|
|
83b8b62e3e | ||
|
|
7225732859 | ||
|
|
403f0dccd8 | ||
|
|
46fcd19ca6 | ||
|
|
d9ec3d56b0 | ||
|
|
cd87b787d9 | ||
|
|
dd6d411026 | ||
|
|
cfceb437a8 | ||
|
|
48b0660228 | ||
|
|
24899efe50 | ||
|
|
83152fff92 | ||
|
|
43e8147eaf | ||
|
|
42b655b24f | ||
|
|
f67c02c837 | ||
|
|
4436dec1d9 | ||
|
|
27da223e9f | ||
|
|
b3e4d39f64 | ||
|
|
d05347cfcb | ||
|
|
7ac9cabbff | ||
|
|
6963f75a14 | ||
|
|
effe3ad4ef | ||
|
|
bdc15a7cb9 | ||
|
|
da882b6657 | ||
|
|
96f6288622 | ||
|
|
bb1c107afd | ||
|
|
c17193b5f8 | ||
|
|
a33763170e | ||
|
|
025768d303 | ||
|
|
50f14d017e | ||
|
|
aceb182db6 | ||
|
|
6ed2482e27 | ||
|
|
dc5c44ccc4 | ||
|
|
c3c87e86ef | ||
|
|
ca99e9e2f0 | ||
|
|
4b41e4de7f | ||
|
|
0dc130e841 | ||
|
|
10b85a0f07 | ||
|
|
af60d539ab | ||
|
|
b371713591 | ||
|
|
3b0584449d | ||
|
|
6ecb4776de |
@@ -1,3 +1,10 @@
|
||||
[alias]
|
||||
dev = "run --package ruff_dev --bin ruff_dev"
|
||||
benchmark = "bench -p ruff_benchmark --bench linter --bench formatter --"
|
||||
|
||||
# statically link the C runtime so the executable does not depend on
|
||||
# that shared/dynamic library.
|
||||
#
|
||||
# See: https://github.com/astral-sh/ruff/issues/11503
|
||||
[target.'cfg(all(target_env="msvc", target_os = "windows"))']
|
||||
rustflags = ["-C", "target-feature=+crt-static"]
|
||||
|
||||
4
.gitattributes
vendored
4
.gitattributes
vendored
@@ -8,6 +8,10 @@ crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_3.py text eol=crlf
|
||||
crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring_code_examples_crlf.py text eol=crlf
|
||||
crates/ruff_python_formatter/tests/snapshots/format@docstring_code_examples_crlf.py.snap text eol=crlf
|
||||
|
||||
crates/ruff_python_parser/resources/invalid/re_lexing/line_continuation_windows_eol.py text eol=crlf
|
||||
crates/ruff_python_parser/resources/invalid/re_lex_logical_token_windows_eol.py text eol=crlf
|
||||
crates/ruff_python_parser/resources/invalid/re_lex_logical_token_mac_eol.py text eol=cr
|
||||
|
||||
crates/ruff_python_parser/resources/inline linguist-generated=true
|
||||
|
||||
ruff.schema.json linguist-generated=true text=auto eol=lf
|
||||
|
||||
4
.github/CODEOWNERS
vendored
4
.github/CODEOWNERS
vendored
@@ -15,3 +15,7 @@
|
||||
|
||||
# Script for fuzzing the parser
|
||||
/scripts/fuzz-parser/ @AlexWaygood
|
||||
|
||||
# red-knot
|
||||
/crates/red_knot* @carljm @MichaReiser @AlexWaygood
|
||||
/crates/ruff_db/ @carljm @MichaReiser @AlexWaygood
|
||||
|
||||
20
.github/renovate.json5
vendored
20
.github/renovate.json5
vendored
@@ -8,7 +8,7 @@
|
||||
semanticCommits: "disabled",
|
||||
separateMajorMinor: false,
|
||||
prHourlyLimit: 10,
|
||||
enabledManagers: ["github-actions", "pre-commit", "cargo", "pep621", "npm"],
|
||||
enabledManagers: ["github-actions", "pre-commit", "cargo", "pep621", "pip_requirements", "npm"],
|
||||
cargo: {
|
||||
// See https://docs.renovatebot.com/configuration-options/#rangestrategy
|
||||
rangeStrategy: "update-lockfile",
|
||||
@@ -16,6 +16,9 @@
|
||||
pep621: {
|
||||
fileMatch: ["^(python|scripts)/.*pyproject\\.toml$"],
|
||||
},
|
||||
pip_requirements: {
|
||||
fileMatch: ["^docs/requirements.*\\.txt$"],
|
||||
},
|
||||
npm: {
|
||||
fileMatch: ["^playground/.*package\\.json$"],
|
||||
},
|
||||
@@ -41,6 +44,21 @@
|
||||
description: "Disable PRs updating GitHub runners (e.g. 'runs-on: macos-14')",
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
// Disable updates of `zip-rs`; intentionally pinned for now due to ownership change
|
||||
// See: https://github.com/astral-sh/uv/issues/3642
|
||||
matchPackagePatterns: ["zip"],
|
||||
matchManagers: ["cargo"],
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
// `mkdocs-material` requires a manual update to keep the version in sync
|
||||
// with `mkdocs-material-insider`.
|
||||
// See: https://squidfunk.github.io/mkdocs-material/insiders/upgrade/
|
||||
matchManagers: ["pip_requirements"],
|
||||
matchPackagePatterns: ["mkdocs-material"],
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
groupName: "pre-commit dependencies",
|
||||
matchManagers: ["pre-commit"],
|
||||
|
||||
@@ -1,21 +1,23 @@
|
||||
name: "[ruff] Release"
|
||||
# Build ruff on all platforms.
|
||||
#
|
||||
# Generates both wheels (for PyPI) and archived binaries (for GitHub releases).
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a local
|
||||
# artifacts job within `cargo-dist`.
|
||||
name: "Build binaries"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
workflow_call:
|
||||
inputs:
|
||||
tag:
|
||||
description: "The version to tag, without the leading 'v'. If omitted, will initiate a dry run (no uploads)."
|
||||
type: string
|
||||
sha:
|
||||
description: "The full sha of the commit to be released. If omitted, the latest commit on the default branch will be used."
|
||||
default: ""
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
pull_request:
|
||||
paths:
|
||||
# When we change pyproject.toml, we want to ensure that the maturin builds still work
|
||||
# When we change pyproject.toml, we want to ensure that the maturin builds still work.
|
||||
- pyproject.toml
|
||||
# And when we change this workflow itself...
|
||||
- .github/workflows/release.yaml
|
||||
- .github/workflows/build-binaries.yml
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
@@ -23,6 +25,7 @@ concurrency:
|
||||
|
||||
env:
|
||||
PACKAGE_NAME: ruff
|
||||
MODULE_NAME: ruff
|
||||
PYTHON_VERSION: "3.11"
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
@@ -31,11 +34,12 @@ env:
|
||||
|
||||
jobs:
|
||||
sdist:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
submodules: recursive
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
@@ -49,8 +53,8 @@ jobs:
|
||||
- name: "Test sdist"
|
||||
run: |
|
||||
pip install dist/${{ env.PACKAGE_NAME }}-*.tar.gz --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
${{ env.MODULE_NAME }} --help
|
||||
python -m ${{ env.MODULE_NAME }} --help
|
||||
- name: "Upload sdist"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
@@ -58,11 +62,12 @@ jobs:
|
||||
path: dist
|
||||
|
||||
macos-x86_64:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: macos-12
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
submodules: recursive
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
@@ -74,11 +79,6 @@ jobs:
|
||||
with:
|
||||
target: x86_64
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel - x86_64"
|
||||
run: |
|
||||
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
@@ -86,23 +86,29 @@ jobs:
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-x86_64-apple-darwin.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/x86_64-apple-darwin/release ruff
|
||||
TARGET=x86_64-apple-darwin
|
||||
ARCHIVE_NAME=ruff-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
mkdir -p $ARCHIVE_NAME
|
||||
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: binaries-macos-x86_64
|
||||
name: artifacts-macos-x86_64
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
macos-aarch64:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: macos-14
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
submodules: recursive
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
@@ -126,18 +132,24 @@ jobs:
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-aarch64-apple-darwin.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/aarch64-apple-darwin/release ruff
|
||||
TARGET=aarch64-apple-darwin
|
||||
ARCHIVE_NAME=ruff-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
mkdir -p $ARCHIVE_NAME
|
||||
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: binaries-aarch64-apple-darwin
|
||||
name: artifacts-aarch64-apple-darwin
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
windows:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: windows-latest
|
||||
strategy:
|
||||
matrix:
|
||||
@@ -151,7 +163,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
submodules: recursive
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
@@ -163,13 +175,16 @@ jobs:
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
args: --release --locked --out dist
|
||||
env:
|
||||
# aarch64 build fails, see https://github.com/PyO3/maturin/issues/2110
|
||||
XWIN_VERSION: 16
|
||||
- name: "Test wheel"
|
||||
if: ${{ !startsWith(matrix.platform.target, 'aarch64') }}
|
||||
shell: bash
|
||||
run: |
|
||||
python -m pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
${{ env.MODULE_NAME }} --help
|
||||
python -m ${{ env.MODULE_NAME }} --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
@@ -178,18 +193,19 @@ jobs:
|
||||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.zip
|
||||
ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.zip
|
||||
7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/ruff.exe
|
||||
sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: binaries-${{ matrix.platform.target }}
|
||||
name: artifacts-${{ matrix.platform.target }}
|
||||
path: |
|
||||
*.zip
|
||||
*.sha256
|
||||
|
||||
linux:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
@@ -199,7 +215,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
submodules: recursive
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
@@ -216,27 +232,36 @@ jobs:
|
||||
if: ${{ startsWith(matrix.target, 'x86_64') }}
|
||||
run: |
|
||||
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
${{ env.MODULE_NAME }} --help
|
||||
python -m ${{ env.MODULE_NAME }} --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-${{ matrix.target }}
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.target }}.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
|
||||
set -euo pipefail
|
||||
|
||||
TARGET=${{ matrix.target }}
|
||||
ARCHIVE_NAME=ruff-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
mkdir -p $ARCHIVE_NAME
|
||||
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: binaries-${{ matrix.target }}
|
||||
name: artifacts-${{ matrix.target }}
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
linux-cross:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
@@ -258,11 +283,13 @@ jobs:
|
||||
arch: ppc64
|
||||
# see https://github.com/astral-sh/ruff/issues/10073
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
- target: arm-unknown-linux-musleabihf
|
||||
arch: arm
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
submodules: recursive
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
@@ -279,8 +306,8 @@ jobs:
|
||||
if: matrix.platform.arch != 'ppc64'
|
||||
name: Test wheel
|
||||
with:
|
||||
arch: ${{ matrix.platform.arch }}
|
||||
distro: ubuntu20.04
|
||||
arch: ${{ matrix.platform.arch == 'arm' && 'armv6' || matrix.platform.arch }}
|
||||
distro: ${{ matrix.platform.arch == 'arm' && 'bullseye' || 'ubuntu20.04' }}
|
||||
githubToken: ${{ github.token }}
|
||||
install: |
|
||||
apt-get update
|
||||
@@ -295,19 +322,28 @@ jobs:
|
||||
name: wheels-${{ matrix.platform.target }}
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
|
||||
set -euo pipefail
|
||||
|
||||
TARGET=${{ matrix.platform.target }}
|
||||
ARCHIVE_NAME=ruff-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
mkdir -p $ARCHIVE_NAME
|
||||
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: binaries-${{ matrix.platform.target }}
|
||||
name: artifacts-${{ matrix.platform.target }}
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
musllinux:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
@@ -317,7 +353,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
submodules: recursive
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
@@ -340,26 +376,35 @@ jobs:
|
||||
apk add python3
|
||||
python -m venv .venv
|
||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
.venv/bin/ruff check --help
|
||||
.venv/bin/${{ env.MODULE_NAME }} --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-${{ matrix.target }}
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.target }}.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
|
||||
set -euo pipefail
|
||||
|
||||
TARGET=${{ matrix.target }}
|
||||
ARCHIVE_NAME=ruff-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
mkdir -p $ARCHIVE_NAME
|
||||
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: binaries-${{ matrix.target }}
|
||||
name: artifacts-${{ matrix.target }}
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
musllinux-cross:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
@@ -373,7 +418,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
submodules: recursive
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
@@ -397,204 +442,29 @@ jobs:
|
||||
run: |
|
||||
python -m venv .venv
|
||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
.venv/bin/ruff check --help
|
||||
.venv/bin/${{ env.MODULE_NAME }} --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-${{ matrix.platform.target }}
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
|
||||
set -euo pipefail
|
||||
|
||||
TARGET=${{ matrix.platform.target }}
|
||||
ARCHIVE_NAME=ruff-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
mkdir -p $ARCHIVE_NAME
|
||||
cp target/$TARGET/release/ruff $ARCHIVE_NAME/ruff
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: binaries-${{ matrix.platform.target }}
|
||||
name: artifacts-${{ matrix.platform.target }}
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
validate-tag:
|
||||
name: Validate tag
|
||||
runs-on: ubuntu-latest
|
||||
# If you don't set an input tag, it's a dry run (no uploads).
|
||||
if: ${{ inputs.tag }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: main # We checkout the main branch to check for the commit
|
||||
- name: Check main branch
|
||||
if: ${{ inputs.sha }}
|
||||
run: |
|
||||
# Fetch the main branch since a shallow checkout is used by default
|
||||
git fetch origin main --unshallow
|
||||
if ! git branch --contains ${{ inputs.sha }} | grep -E '(^|\s)main$'; then
|
||||
echo "The specified sha is not on the main branch" >&2
|
||||
exit 1
|
||||
fi
|
||||
- name: Check tag consistency
|
||||
run: |
|
||||
# Switch to the commit we want to release
|
||||
git checkout ${{ inputs.sha }}
|
||||
version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
|
||||
if [ "${{ inputs.tag }}" != "${version}" ]; then
|
||||
echo "The input tag does not match the version from pyproject.toml:" >&2
|
||||
echo "${{ inputs.tag }}" >&2
|
||||
echo "${version}" >&2
|
||||
exit 1
|
||||
else
|
||||
echo "Releasing ${version}"
|
||||
fi
|
||||
|
||||
upload-release:
|
||||
name: Upload to PyPI
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- macos-aarch64
|
||||
- macos-x86_64
|
||||
- windows
|
||||
- linux
|
||||
- linux-cross
|
||||
- musllinux
|
||||
- musllinux-cross
|
||||
- validate-tag
|
||||
# If you don't set an input tag, it's a dry run (no uploads).
|
||||
if: ${{ inputs.tag }}
|
||||
environment:
|
||||
name: release
|
||||
permissions:
|
||||
# For pypi trusted publishing
|
||||
id-token: write
|
||||
steps:
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: wheels-*
|
||||
path: wheels
|
||||
merge-multiple: true
|
||||
- name: Publish to PyPi
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
skip-existing: true
|
||||
packages-dir: wheels
|
||||
verbose: true
|
||||
|
||||
tag-release:
|
||||
name: Tag release
|
||||
runs-on: ubuntu-latest
|
||||
needs: upload-release
|
||||
# If you don't set an input tag, it's a dry run (no uploads).
|
||||
if: ${{ inputs.tag }}
|
||||
permissions:
|
||||
# For git tag
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- name: git tag
|
||||
run: |
|
||||
git config user.email "hey@astral.sh"
|
||||
git config user.name "Ruff Release CI"
|
||||
git tag -m "v${{ inputs.tag }}" "v${{ inputs.tag }}"
|
||||
# If there is duplicate tag, this will fail. The publish to pypi action will have been a noop (due to skip
|
||||
# existing), so we make a non-destructive exit here
|
||||
git push --tags
|
||||
|
||||
publish-release:
|
||||
name: Publish to GitHub
|
||||
runs-on: ubuntu-latest
|
||||
needs: tag-release
|
||||
# If you don't set an input tag, it's a dry run (no uploads).
|
||||
if: ${{ inputs.tag }}
|
||||
permissions:
|
||||
# For GitHub release publishing
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: binaries-*
|
||||
path: binaries
|
||||
merge-multiple: true
|
||||
- name: "Publish to GitHub"
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
draft: true
|
||||
files: binaries/*
|
||||
tag_name: v${{ inputs.tag }}
|
||||
|
||||
docker-publish:
|
||||
# This action doesn't need to wait on any other task, it's easy to re-tag if something failed and we're validating
|
||||
# the tag here also
|
||||
name: Push Docker image ghcr.io/astral-sh/ruff
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
permissions:
|
||||
# For the docker push
|
||||
packages: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
- uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ghcr.io/astral-sh/ruff
|
||||
|
||||
- name: Check tag consistency
|
||||
# Unlike validate-tag we don't check if the commit is on the main branch, but it seems good enough since we can
|
||||
# change docker tags
|
||||
if: ${{ inputs.tag }}
|
||||
run: |
|
||||
version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
|
||||
if [ "${{ inputs.tag }}" != "${version}" ]; then
|
||||
echo "The input tag does not match the version from pyproject.toml:" >&2
|
||||
echo "${{ inputs.tag }}" >&2
|
||||
echo "${version}" >&2
|
||||
exit 1
|
||||
else
|
||||
echo "Releasing ${version}"
|
||||
fi
|
||||
|
||||
- name: "Build and push Docker image"
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
# Reuse the builder
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
push: ${{ inputs.tag != '' }}
|
||||
tags: ghcr.io/astral-sh/ruff:latest,ghcr.io/astral-sh/ruff:${{ inputs.tag || 'dry-run' }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
|
||||
# After the release has been published, we update downstream repositories
|
||||
# This is separate because if this fails the release is still fine, we just need to do some manual workflow triggers
|
||||
update-dependents:
|
||||
name: Update dependents
|
||||
runs-on: ubuntu-latest
|
||||
needs: publish-release
|
||||
steps:
|
||||
- name: "Update pre-commit mirror"
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.RUFF_PRE_COMMIT_PAT }}
|
||||
script: |
|
||||
github.rest.actions.createWorkflowDispatch({
|
||||
owner: 'astral-sh',
|
||||
repo: 'ruff-pre-commit',
|
||||
workflow_id: 'main.yml',
|
||||
ref: 'main',
|
||||
})
|
||||
68
.github/workflows/build-docker.yml
vendored
Normal file
68
.github/workflows/build-docker.yml
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
# Build and publish a Docker image.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a local
|
||||
# artifacts job within `cargo-dist`.
|
||||
#
|
||||
# TODO(charlie): Ideally, the publish step would happen as a publish job within `cargo-dist`, but
|
||||
# sharing the built image as an artifact between jobs is challenging.
|
||||
name: "[ruff] Build Docker image"
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/build-docker.yml
|
||||
|
||||
jobs:
|
||||
docker-publish:
|
||||
name: Build Docker image (ghcr.io/astral-sh/ruff)
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
- uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ghcr.io/astral-sh/ruff
|
||||
|
||||
- name: Check tag consistency
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
run: |
|
||||
version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
|
||||
if [ "${{ fromJson(inputs.plan).announcement_tag }}" != "${version}" ]; then
|
||||
echo "The input tag does not match the version from pyproject.toml:" >&2
|
||||
echo "${{ fromJson(inputs.plan).announcement_tag }}" >&2
|
||||
echo "${version}" >&2
|
||||
exit 1
|
||||
else
|
||||
echo "Releasing ${version}"
|
||||
fi
|
||||
|
||||
- name: "Build and push Docker image"
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
# Reuse the builder
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
push: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
tags: ghcr.io/astral-sh/ruff:latest,ghcr.io/astral-sh/ruff:${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || 'dry-run' }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
54
.github/workflows/ci.yaml
vendored
54
.github/workflows/ci.yaml
vendored
@@ -111,7 +111,7 @@ jobs:
|
||||
- name: "Clippy"
|
||||
run: cargo clippy --workspace --all-targets --all-features --locked -- -D warnings
|
||||
- name: "Clippy (wasm)"
|
||||
run: cargo clippy -p ruff_wasm --target wasm32-unknown-unknown --all-features --locked -- -D warnings
|
||||
run: cargo clippy -p ruff_wasm -p red_knot_wasm --target wasm32-unknown-unknown --all-features --locked -- -D warnings
|
||||
|
||||
cargo-test-linux:
|
||||
name: "cargo test (linux)"
|
||||
@@ -167,6 +167,9 @@ jobs:
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
env:
|
||||
# Workaround for <https://github.com/nextest-rs/nextest/issues/1493>.
|
||||
RUSTUP_WINDOWS_PATH_ADD_BIN: 1
|
||||
run: |
|
||||
cargo nextest run --all-features --profile ci
|
||||
cargo test --all-features --doc
|
||||
@@ -188,10 +191,14 @@ jobs:
|
||||
cache-dependency-path: playground/package-lock.json
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run wasm-pack"
|
||||
- name: "Test ruff_wasm"
|
||||
run: |
|
||||
cd crates/ruff_wasm
|
||||
wasm-pack test --node
|
||||
- name: "Test red_knot_wasm"
|
||||
run: |
|
||||
cd crates/red_knot_wasm
|
||||
wasm-pack test --node
|
||||
|
||||
cargo-build-release:
|
||||
name: "cargo build (release)"
|
||||
@@ -209,6 +216,38 @@ jobs:
|
||||
- name: "Build"
|
||||
run: cargo build --release --locked
|
||||
|
||||
cargo-build-msrv:
|
||||
name: "cargo build (msrv)"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: SebRollen/toml-action@v1.2.0
|
||||
id: msrv
|
||||
with:
|
||||
file: "Cargo.toml"
|
||||
field: "workspace.package.rust-version"
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup default ${{ steps.msrv.outputs.value }}
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
env:
|
||||
NEXTEST_PROFILE: "ci"
|
||||
run: cargo +${{ steps.msrv.outputs.value }} insta test --all-features --unreferenced reject --test-runner nextest
|
||||
|
||||
cargo-fuzz:
|
||||
name: "cargo fuzz"
|
||||
runs-on: ubuntu-latest
|
||||
@@ -222,10 +261,13 @@ jobs:
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
workspaces: "fuzz -> target"
|
||||
- name: "Install cargo-fuzz"
|
||||
uses: taiki-e/install-action@v2
|
||||
- name: "Install cargo-binstall"
|
||||
uses: cargo-bins/cargo-binstall@main
|
||||
with:
|
||||
tool: cargo-fuzz@0.11.2
|
||||
- name: "Install cargo-fuzz"
|
||||
# Download the latest version from quick install and not the github releases because github releases only has MUSL targets.
|
||||
run: cargo binstall cargo-fuzz --force --disable-strategies crate-meta-data --no-confirm
|
||||
- run: cargo fuzz build -s none
|
||||
|
||||
fuzz-parser:
|
||||
@@ -303,7 +345,7 @@ jobs:
|
||||
name: ruff
|
||||
path: target/debug
|
||||
|
||||
- uses: dawidd6/action-download-artifact@v3
|
||||
- uses: dawidd6/action-download-artifact@v6
|
||||
name: Download baseline Ruff binary
|
||||
with:
|
||||
name: ruff
|
||||
@@ -581,7 +623,7 @@ jobs:
|
||||
run: cargo codspeed build --features codspeed -p ruff_benchmark
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@v2
|
||||
uses: CodSpeedHQ/action@v3
|
||||
with:
|
||||
run: cargo codspeed run
|
||||
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||
|
||||
55
.github/workflows/docs.yaml
vendored
55
.github/workflows/docs.yaml
vendored
@@ -1,55 +0,0 @@
|
||||
name: mkdocs
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
ref:
|
||||
description: "The commit SHA, tag, or branch to publish. Uses the default branch if not specified."
|
||||
default: ""
|
||||
type: string
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
mkdocs:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
- uses: actions/setup-python@v5
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@v0.9.0
|
||||
with:
|
||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: pip install -r docs/requirements-insiders.txt
|
||||
- name: "Install dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: pip install -r docs/requirements.txt
|
||||
- name: "Copy README File"
|
||||
run: |
|
||||
python scripts/transform_readme.py --target mkdocs
|
||||
python scripts/generate_mkdocs.py
|
||||
- name: "Build Insiders docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.insiders.yml
|
||||
- name: "Build docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.public.yml
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@v3.5.0
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
# `github.head_ref` is only set during pull requests and for manual runs or tags we use `main` to deploy to production
|
||||
command: pages deploy site --project-name=astral-docs --branch ${{ github.head_ref || 'main' }} --commit-hash ${GITHUB_SHA}
|
||||
29
.github/workflows/notify-dependents.yml
vendored
Normal file
29
.github/workflows/notify-dependents.yml
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
# Notify downstream repositories of a new release.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a post-announce
|
||||
# job within `cargo-dist`.
|
||||
name: "[ruff] Notify dependents"
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
update-dependents:
|
||||
name: Notify dependents
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: "Update pre-commit mirror"
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.RUFF_PRE_COMMIT_PAT }}
|
||||
script: |
|
||||
github.rest.actions.createWorkflowDispatch({
|
||||
owner: 'astral-sh',
|
||||
repo: 'ruff-pre-commit',
|
||||
workflow_id: 'main.yml',
|
||||
ref: 'main',
|
||||
})
|
||||
14
.github/workflows/pr-comment.yaml
vendored
14
.github/workflows/pr-comment.yaml
vendored
@@ -17,12 +17,13 @@ jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@v3
|
||||
- uses: dawidd6/action-download-artifact@v6
|
||||
name: Download pull request number
|
||||
with:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Parse pull request number
|
||||
id: pr-number
|
||||
@@ -32,7 +33,7 @@ jobs:
|
||||
echo "pr-number=$(<pr-number)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@v3
|
||||
- uses: dawidd6/action-download-artifact@v6
|
||||
name: "Download ecosystem results"
|
||||
id: download-ecosystem-result
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
@@ -43,11 +44,20 @@ jobs:
|
||||
path: pr/ecosystem
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
if: steps.download-ecosystem-result.outputs.found_artifact == 'true'
|
||||
run: |
|
||||
# Guard against malicious ecosystem results that symlink to a secret
|
||||
# file on this runner
|
||||
if [[ -L pr/ecosystem/ecosystem-result ]]
|
||||
then
|
||||
echo "Error: ecosystem-result cannot be a symlink"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Note this identifier is used to find the comment to update on
|
||||
# subsequent runs
|
||||
echo '<!-- generated-comment ecosystem -->' >> comment.txt
|
||||
|
||||
151
.github/workflows/publish-docs.yml
vendored
Normal file
151
.github/workflows/publish-docs.yml
vendored
Normal file
@@ -0,0 +1,151 @@
|
||||
# Publish the Ruff documentation.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a post-announce
|
||||
# job within `cargo-dist`.
|
||||
name: mkdocs
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
ref:
|
||||
description: "The commit SHA, tag, or branch to publish. Uses the default branch if not specified."
|
||||
default: ""
|
||||
type: string
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
mkdocs:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.12
|
||||
|
||||
- name: "Set docs version"
|
||||
run: |
|
||||
version="${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || inputs.ref }}"
|
||||
# if version is missing, exit with error
|
||||
if [[ -z "$version" ]]; then
|
||||
echo "Can't build docs without a version."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Use version as display name for now
|
||||
display_name="$version"
|
||||
|
||||
echo "version=$version" >> $GITHUB_ENV
|
||||
echo "display_name=$display_name" >> $GITHUB_ENV
|
||||
|
||||
- name: "Set branch name"
|
||||
run: |
|
||||
version="${{ env.version }}"
|
||||
display_name="${{ env.display_name }}"
|
||||
timestamp="$(date +%s)"
|
||||
|
||||
# create branch_display_name from display_name by replacing all
|
||||
# characters disallowed in git branch names with hyphens
|
||||
branch_display_name="$(echo "$display_name" | tr -c '[:alnum:]._' '-' | tr -s '-')"
|
||||
|
||||
echo "branch_name=update-docs-$branch_display_name-$timestamp" >> $GITHUB_ENV
|
||||
echo "timestamp=$timestamp" >> $GITHUB_ENV
|
||||
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@v0.9.0
|
||||
with:
|
||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: pip install -r docs/requirements-insiders.txt
|
||||
|
||||
- name: "Install dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: pip install -r docs/requirements.txt
|
||||
|
||||
- name: "Copy README File"
|
||||
run: |
|
||||
python scripts/transform_readme.py --target mkdocs
|
||||
python scripts/generate_mkdocs.py
|
||||
|
||||
- name: "Build Insiders docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.insiders.yml
|
||||
|
||||
- name: "Build docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.public.yml
|
||||
|
||||
- name: "Clone docs repo"
|
||||
run: |
|
||||
version="${{ env.version }}"
|
||||
git clone https://${{ secrets.ASTRAL_DOCS_PAT }}@github.com/astral-sh/docs.git astral-docs
|
||||
|
||||
- name: "Copy docs"
|
||||
run: rm -rf astral-docs/site/ruff && mkdir -p astral-docs/site && cp -r site/ruff astral-docs/site/
|
||||
|
||||
- name: "Commit docs"
|
||||
working-directory: astral-docs
|
||||
run: |
|
||||
branch_name="${{ env.branch_name }}"
|
||||
|
||||
git config user.name "astral-docs-bot"
|
||||
git config user.email "176161322+astral-docs-bot@users.noreply.github.com"
|
||||
|
||||
git checkout -b $branch_name
|
||||
git add site/ruff
|
||||
git commit -m "Update ruff documentation for $version"
|
||||
|
||||
- name: "Create Pull Request"
|
||||
working-directory: astral-docs
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
|
||||
run: |
|
||||
version="${{ env.version }}"
|
||||
display_name="${{ env.display_name }}"
|
||||
branch_name="${{ env.branch_name }}"
|
||||
|
||||
# set the PR title
|
||||
pull_request_title="Update ruff documentation for $display_name"
|
||||
|
||||
# Delete any existing pull requests that are open for this version
|
||||
# by checking against pull_request_title because the new PR will
|
||||
# supersede the old one.
|
||||
gh pr list --state open --json title --jq '.[] | select(.title == "$pull_request_title") | .number' | \
|
||||
xargs -I {} gh pr close {}
|
||||
|
||||
# push the branch to GitHub
|
||||
git push origin $branch_name
|
||||
|
||||
# create the PR
|
||||
gh pr create --base main --head $branch_name \
|
||||
--title "$pull_request_title" \
|
||||
--body "Automated documentation update for $display_name" \
|
||||
--label "documentation"
|
||||
|
||||
- name: "Merge Pull Request"
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
working-directory: astral-docs
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
|
||||
run: |
|
||||
branch_name="${{ env.branch_name }}"
|
||||
# auto-merge the PR if the build was triggered by a release. Manual builds should be reviewed by a human.
|
||||
# give the PR a few seconds to be created before trying to auto-merge it
|
||||
sleep 10
|
||||
gh pr merge --squash $branch_name
|
||||
@@ -1,9 +1,16 @@
|
||||
# Publish the Ruff playground.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a post-announce
|
||||
# job within `cargo-dist`.
|
||||
name: "[Playground] Release"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
release:
|
||||
types: [published]
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
@@ -40,7 +47,7 @@ jobs:
|
||||
working-directory: playground
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@v3.5.0
|
||||
uses: cloudflare/wrangler-action@v3.7.0
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
34
.github/workflows/publish-pypi.yml
vendored
Normal file
34
.github/workflows/publish-pypi.yml
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
# Publish a release to PyPI.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a publish job
|
||||
# within `cargo-dist`.
|
||||
name: "[ruff] Publish to PyPI"
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
pypi-publish:
|
||||
name: Upload to PyPI
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
permissions:
|
||||
# For PyPI's trusted publishing.
|
||||
id-token: write
|
||||
steps:
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: wheels-*
|
||||
path: wheels
|
||||
merge-multiple: true
|
||||
- name: Publish to PyPi
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
skip-existing: true
|
||||
packages-dir: wheels
|
||||
verbose: true
|
||||
55
.github/workflows/publish-wasm.yml
vendored
Normal file
55
.github/workflows/publish-wasm.yml
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
# Build and publish ruff-api for wasm.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a publish
|
||||
# job within `cargo-dist`.
|
||||
name: "Build and publish wasm"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
ruff_wasm:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
strategy:
|
||||
matrix:
|
||||
target: [web, bundler, nodejs]
|
||||
fail-fast: false
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
- uses: jetli/wasm-bindgen-action@v0.2.0
|
||||
- name: "Run wasm-pack build"
|
||||
run: wasm-pack build --target ${{ matrix.target }} crates/ruff_wasm
|
||||
- name: "Rename generated package"
|
||||
run: | # Replace the package name w/ jq
|
||||
jq '.name="@astral-sh/ruff-wasm-${{ matrix.target }}"' crates/ruff_wasm/pkg/package.json > /tmp/package.json
|
||||
mv /tmp/package.json crates/ruff_wasm/pkg
|
||||
- run: cp LICENSE crates/ruff_wasm/pkg # wasm-pack does not put the LICENSE file in the pkg
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- name: "Publish (dry-run)"
|
||||
if: ${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
run: npm publish --dry-run crates/ruff_wasm/pkg
|
||||
- name: "Publish"
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
run: npm publish --provenance --access public crates/ruff_wasm/pkg
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
298
.github/workflows/release.yml
vendored
Normal file
298
.github/workflows/release.yml
vendored
Normal file
@@ -0,0 +1,298 @@
|
||||
# Copyright 2022-2024, axodotdev
|
||||
# SPDX-License-Identifier: MIT or Apache-2.0
|
||||
#
|
||||
# CI that:
|
||||
#
|
||||
# * checks for a Git Tag that looks like a release
|
||||
# * builds artifacts with cargo-dist (archives, installers, hashes)
|
||||
# * uploads those artifacts to temporary workflow zip
|
||||
# * on success, uploads the artifacts to a GitHub Release
|
||||
#
|
||||
# Note that the GitHub Release will be created with a generated
|
||||
# title/body based on your changelogs.
|
||||
|
||||
name: Release
|
||||
permissions:
|
||||
"contents": "write"
|
||||
|
||||
# This task will run whenever you workflow_dispatch with a tag that looks like a version
|
||||
# like "1.0.0", "v0.1.0-prerelease.1", "my-app/0.1.0", "releases/v1.0.0", etc.
|
||||
# Various formats will be parsed into a VERSION and an optional PACKAGE_NAME, where
|
||||
# PACKAGE_NAME must be the name of a Cargo package in your workspace, and VERSION
|
||||
# must be a Cargo-style SemVer Version (must have at least major.minor.patch).
|
||||
#
|
||||
# If PACKAGE_NAME is specified, then the announcement will be for that
|
||||
# package (erroring out if it doesn't have the given version or isn't cargo-dist-able).
|
||||
#
|
||||
# If PACKAGE_NAME isn't specified, then the announcement will be for all
|
||||
# (cargo-dist-able) packages in the workspace with that version (this mode is
|
||||
# intended for workspaces with only one dist-able package, or with all dist-able
|
||||
# packages versioned/released in lockstep).
|
||||
#
|
||||
# If you push multiple tags at once, separate instances of this workflow will
|
||||
# spin up, creating an independent announcement for each one. However, GitHub
|
||||
# will hard limit this to 3 tags per commit, as it will assume more tags is a
|
||||
# mistake.
|
||||
#
|
||||
# If there's a prerelease-style suffix to the version, then the release(s)
|
||||
# will be marked as a prerelease.
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
description: Release Tag
|
||||
required: true
|
||||
default: dry-run
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
# Run 'cargo dist plan' (or host) to determine what tasks we need to do
|
||||
plan:
|
||||
runs-on: "ubuntu-20.04"
|
||||
outputs:
|
||||
val: ${{ steps.plan.outputs.manifest }}
|
||||
tag: ${{ (inputs.tag != 'dry-run' && inputs.tag) || '' }}
|
||||
tag-flag: ${{ inputs.tag && inputs.tag != 'dry-run' && format('--tag={0}', inputs.tag) || '' }}
|
||||
publishing: ${{ inputs.tag && inputs.tag != 'dry-run' }}
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Install cargo-dist
|
||||
# we specify bash to get pipefail; it guards against the `curl` command
|
||||
# failing. otherwise `sh` won't catch that `curl` returned non-0
|
||||
shell: bash
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.18.0/cargo-dist-installer.sh | sh"
|
||||
- name: Cache cargo-dist
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/cargo-dist
|
||||
# sure would be cool if github gave us proper conditionals...
|
||||
# so here's a doubly-nested ternary-via-truthiness to try to provide the best possible
|
||||
# functionality based on whether this is a pull_request, and whether it's from a fork.
|
||||
# (PRs run on the *source* but secrets are usually on the *target* -- that's *good*
|
||||
# but also really annoying to build CI around when it needs secrets to work right.)
|
||||
- id: plan
|
||||
run: |
|
||||
cargo dist ${{ (inputs.tag && inputs.tag != 'dry-run' && format('host --steps=create --tag={0}', inputs.tag)) || 'plan' }} --output-format=json > plan-dist-manifest.json
|
||||
echo "cargo dist ran successfully"
|
||||
cat plan-dist-manifest.json
|
||||
echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
||||
- name: "Upload dist-manifest.json"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: artifacts-plan-dist-manifest
|
||||
path: plan-dist-manifest.json
|
||||
|
||||
custom-build-binaries:
|
||||
needs:
|
||||
- plan
|
||||
if: ${{ needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload' || inputs.tag == 'dry-run' }}
|
||||
uses: ./.github/workflows/build-binaries.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
|
||||
custom-build-docker:
|
||||
needs:
|
||||
- plan
|
||||
if: ${{ needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload' || inputs.tag == 'dry-run' }}
|
||||
uses: ./.github/workflows/build-docker.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
permissions:
|
||||
"contents": "read"
|
||||
"packages": "write"
|
||||
|
||||
# Build and package all the platform-agnostic(ish) things
|
||||
build-global-artifacts:
|
||||
needs:
|
||||
- plan
|
||||
- custom-build-binaries
|
||||
- custom-build-docker
|
||||
runs-on: "ubuntu-20.04"
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Install cached cargo-dist
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/
|
||||
- run: chmod +x ~/.cargo/bin/cargo-dist
|
||||
# Get all the local artifacts for the global tasks to use (for e.g. checksums)
|
||||
- name: Fetch local artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: target/distrib/
|
||||
merge-multiple: true
|
||||
- id: cargo-dist
|
||||
shell: bash
|
||||
run: |
|
||||
cargo dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json
|
||||
echo "cargo dist ran successfully"
|
||||
|
||||
# Parse out what we just built and upload it to scratch storage
|
||||
echo "paths<<EOF" >> "$GITHUB_OUTPUT"
|
||||
jq --raw-output ".upload_files[]" dist-manifest.json >> "$GITHUB_OUTPUT"
|
||||
echo "EOF" >> "$GITHUB_OUTPUT"
|
||||
|
||||
cp dist-manifest.json "$BUILD_MANIFEST_NAME"
|
||||
- name: "Upload artifacts"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: artifacts-build-global
|
||||
path: |
|
||||
${{ steps.cargo-dist.outputs.paths }}
|
||||
${{ env.BUILD_MANIFEST_NAME }}
|
||||
# Determines if we should publish/announce
|
||||
host:
|
||||
needs:
|
||||
- plan
|
||||
- custom-build-binaries
|
||||
- custom-build-docker
|
||||
- build-global-artifacts
|
||||
# Only run if we're "publishing", and only if local and global didn't fail (skipped is fine)
|
||||
if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.custom-build-binaries.result == 'skipped' || needs.custom-build-binaries.result == 'success') && (needs.custom-build-docker.result == 'skipped' || needs.custom-build-docker.result == 'success') }}
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
runs-on: "ubuntu-20.04"
|
||||
outputs:
|
||||
val: ${{ steps.host.outputs.manifest }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Install cached cargo-dist
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/
|
||||
- run: chmod +x ~/.cargo/bin/cargo-dist
|
||||
# Fetch artifacts from scratch-storage
|
||||
- name: Fetch artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: target/distrib/
|
||||
merge-multiple: true
|
||||
# This is a harmless no-op for GitHub Releases, hosting for that happens in "announce"
|
||||
- id: host
|
||||
shell: bash
|
||||
run: |
|
||||
cargo dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json
|
||||
echo "artifacts uploaded and released successfully"
|
||||
cat dist-manifest.json
|
||||
echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
||||
- name: "Upload dist-manifest.json"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
# Overwrite the previous copy
|
||||
name: artifacts-dist-manifest
|
||||
path: dist-manifest.json
|
||||
|
||||
custom-publish-pypi:
|
||||
needs:
|
||||
- plan
|
||||
- host
|
||||
if: ${{ !fromJson(needs.plan.outputs.val).announcement_is_prerelease || fromJson(needs.plan.outputs.val).publish_prereleases }}
|
||||
uses: ./.github/workflows/publish-pypi.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
# publish jobs get escalated permissions
|
||||
permissions:
|
||||
"id-token": "write"
|
||||
"packages": "write"
|
||||
|
||||
custom-publish-wasm:
|
||||
needs:
|
||||
- plan
|
||||
- host
|
||||
if: ${{ !fromJson(needs.plan.outputs.val).announcement_is_prerelease || fromJson(needs.plan.outputs.val).publish_prereleases }}
|
||||
uses: ./.github/workflows/publish-wasm.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
# publish jobs get escalated permissions
|
||||
permissions:
|
||||
"contents": "read"
|
||||
"id-token": "write"
|
||||
"packages": "write"
|
||||
|
||||
# Create a GitHub Release while uploading all files to it
|
||||
announce:
|
||||
needs:
|
||||
- plan
|
||||
- host
|
||||
- custom-publish-pypi
|
||||
- custom-publish-wasm
|
||||
# use "always() && ..." to allow us to wait for all publish jobs while
|
||||
# still allowing individual publish jobs to skip themselves (for prereleases).
|
||||
# "host" however must run to completion, no skipping allowed!
|
||||
if: ${{ always() && needs.host.result == 'success' && (needs.custom-publish-pypi.result == 'skipped' || needs.custom-publish-pypi.result == 'success') && (needs.custom-publish-wasm.result == 'skipped' || needs.custom-publish-wasm.result == 'success') }}
|
||||
runs-on: "ubuntu-20.04"
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
# Create a GitHub Release while uploading all files to it
|
||||
- name: "Download GitHub Artifacts"
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: artifacts
|
||||
merge-multiple: true
|
||||
- name: Cleanup
|
||||
run: |
|
||||
# Remove the granular manifests
|
||||
rm -f artifacts/*-dist-manifest.json
|
||||
- name: Create GitHub Release
|
||||
env:
|
||||
PRERELEASE_FLAG: "${{ fromJson(needs.host.outputs.val).announcement_is_prerelease && '--prerelease' || '' }}"
|
||||
ANNOUNCEMENT_TITLE: "${{ fromJson(needs.host.outputs.val).announcement_title }}"
|
||||
ANNOUNCEMENT_BODY: "${{ fromJson(needs.host.outputs.val).announcement_github_body }}"
|
||||
RELEASE_COMMIT: "${{ github.sha }}"
|
||||
run: |
|
||||
# Write and read notes from a file to avoid quoting breaking things
|
||||
echo "$ANNOUNCEMENT_BODY" > $RUNNER_TEMP/notes.txt
|
||||
|
||||
gh release create "${{ needs.plan.outputs.tag }}" --target "$RELEASE_COMMIT" $PRERELEASE_FLAG --title "$ANNOUNCEMENT_TITLE" --notes-file "$RUNNER_TEMP/notes.txt" artifacts/*
|
||||
|
||||
custom-notify-dependents:
|
||||
needs:
|
||||
- plan
|
||||
- announce
|
||||
uses: ./.github/workflows/notify-dependents.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
|
||||
custom-publish-docs:
|
||||
needs:
|
||||
- plan
|
||||
- announce
|
||||
uses: ./.github/workflows/publish-docs.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
|
||||
custom-publish-playground:
|
||||
needs:
|
||||
- plan
|
||||
- announce
|
||||
uses: ./.github/workflows/publish-playground.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
80
.github/workflows/sync_typeshed.yaml
vendored
Normal file
80
.github/workflows/sync_typeshed.yaml
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
name: Sync typeshed
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Run on the 1st and the 15th of every month:
|
||||
- cron: "0 0 1,15 * *"
|
||||
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
|
||||
jobs:
|
||||
sync:
|
||||
name: Sync typeshed
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
# Don't run the cron job on forks:
|
||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
name: Checkout Ruff
|
||||
with:
|
||||
path: ruff
|
||||
- uses: actions/checkout@v4
|
||||
name: Checkout typeshed
|
||||
with:
|
||||
repository: python/typeshed
|
||||
path: typeshed
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
git config --global user.email '<>'
|
||||
- name: Sync typeshed
|
||||
id: sync
|
||||
run: |
|
||||
rm -rf ruff/crates/red_knot_python_semantic/vendor/typeshed
|
||||
mkdir ruff/crates/red_knot_python_semantic/vendor/typeshed
|
||||
cp typeshed/README.md ruff/crates/red_knot_python_semantic/vendor/typeshed
|
||||
cp typeshed/LICENSE ruff/crates/red_knot_python_semantic/vendor/typeshed
|
||||
cp -r typeshed/stdlib ruff/crates/red_knot_python_semantic/vendor/typeshed/stdlib
|
||||
rm -rf ruff/crates/red_knot_python_semantic/vendor/typeshed/stdlib/@tests
|
||||
git -C typeshed rev-parse HEAD > ruff/crates/red_knot_python_semantic/vendor/typeshed/source_commit.txt
|
||||
- name: Commit the changes
|
||||
id: commit
|
||||
if: ${{ steps.sync.outcome == 'success' }}
|
||||
run: |
|
||||
cd ruff
|
||||
git checkout -b typeshedbot/sync-typeshed
|
||||
git add .
|
||||
git diff --staged --quiet || git commit -m "Sync typeshed. Source commit: https://github.com/python/typeshed/commit/$(git -C ../typeshed rev-parse HEAD)"
|
||||
- name: Create a PR
|
||||
if: ${{ steps.sync.outcome == 'success' && steps.commit.outcome == 'success' }}
|
||||
run: |
|
||||
cd ruff
|
||||
git push --force origin typeshedbot/sync-typeshed
|
||||
gh pr list --repo $GITHUB_REPOSITORY --head typeshedbot/sync-typeshed --json id --jq length | grep 1 && exit 0 # exit if there is existing pr
|
||||
gh pr create --title "Sync vendored typeshed stubs" --body "Close and reopen this PR to trigger CI" --label "internal"
|
||||
|
||||
create-issue-on-failure:
|
||||
name: Create an issue if the typeshed sync failed
|
||||
runs-on: ubuntu-latest
|
||||
needs: [sync]
|
||||
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && needs.sync.result == 'failure' }}
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
- uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
await github.rest.issues.create({
|
||||
owner: "astral-sh",
|
||||
repo: "ruff",
|
||||
title: `Automated typeshed sync failed on ${new Date().toDateString()}`,
|
||||
body: "Runs are listed here: https://github.com/astral-sh/ruff/actions/workflows/sync_typeshed.yaml",
|
||||
})
|
||||
8
.gitignore
vendored
8
.gitignore
vendored
@@ -21,6 +21,14 @@ flamegraph.svg
|
||||
# `CARGO_TARGET_DIR=target-llvm-lines RUSTFLAGS="-Csymbol-mangling-version=v0" cargo llvm-lines -p ruff --lib`
|
||||
/target*
|
||||
|
||||
# samply profiles
|
||||
profile.json
|
||||
|
||||
# tracing-flame traces
|
||||
tracing.folded
|
||||
tracing-flamechart.svg
|
||||
tracing-flamegraph.svg
|
||||
|
||||
###
|
||||
# Rust.gitignore
|
||||
###
|
||||
|
||||
@@ -2,7 +2,8 @@ fail_fast: true
|
||||
|
||||
exclude: |
|
||||
(?x)^(
|
||||
crates/red_knot/vendor/.*|
|
||||
crates/red_knot_python_semantic/vendor/.*|
|
||||
crates/red_knot_workspace/resources/.*|
|
||||
crates/ruff_linter/resources/.*|
|
||||
crates/ruff_linter/src/rules/.*/snapshots/.*|
|
||||
crates/ruff/resources/.*|
|
||||
@@ -14,7 +15,7 @@ exclude: |
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/abravalheri/validate-pyproject
|
||||
rev: v0.17
|
||||
rev: v0.18
|
||||
hooks:
|
||||
- id: validate-pyproject
|
||||
|
||||
@@ -32,7 +33,7 @@ repos:
|
||||
)$
|
||||
|
||||
- repo: https://github.com/igorshubovych/markdownlint-cli
|
||||
rev: v0.40.0
|
||||
rev: v0.41.0
|
||||
hooks:
|
||||
- id: markdownlint-fix
|
||||
exclude: |
|
||||
@@ -42,7 +43,7 @@ repos:
|
||||
)$
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.21.0
|
||||
rev: v1.23.6
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
@@ -56,18 +57,13 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.4.4
|
||||
rev: v0.5.6
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
args: [--fix, --exit-non-zero-on-fix]
|
||||
types_or: [python, pyi]
|
||||
require_serial: true
|
||||
exclude: |
|
||||
(?x)^(
|
||||
crates/ruff_linter/resources/.*|
|
||||
crates/ruff_python_formatter/resources/.*
|
||||
)$
|
||||
|
||||
# Prettier
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
|
||||
2
.prettierignore
Normal file
2
.prettierignore
Normal file
@@ -0,0 +1,2 @@
|
||||
# Auto-generated by `cargo-dist`.
|
||||
.github/workflows/release.yml
|
||||
5
.vscode/extensions.json
vendored
Normal file
5
.vscode/extensions.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"rust-lang.rust-analyzer"
|
||||
]
|
||||
}
|
||||
6
.vscode/settings.json
vendored
Normal file
6
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"rust-analyzer.check.extraArgs": [
|
||||
"--all-features"
|
||||
],
|
||||
"rust-analyzer.check.command": "clippy",
|
||||
}
|
||||
@@ -1,5 +1,12 @@
|
||||
# Breaking Changes
|
||||
|
||||
## 0.5.0
|
||||
|
||||
- Follow the XDG specification to discover user-level configurations on macOS (same as on other Unix platforms)
|
||||
- Selecting `ALL` now excludes deprecated rules
|
||||
- The released archives now include an extra level of nesting, which can be removed with `--strip-components=1` when untarring.
|
||||
- The release artifact's file name no longer includes the version tag. This enables users to install via `/latest` URLs on GitHub.
|
||||
|
||||
## 0.3.0
|
||||
|
||||
### Ruff 2024.2 style
|
||||
|
||||
617
CHANGELOG.md
617
CHANGELOG.md
@@ -1,5 +1,618 @@
|
||||
# Changelog
|
||||
|
||||
## 0.5.7
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-comprehensions`\] Account for list and set comprehensions in `unnecessary-literal-within-tuple-call` (`C409`) ([#12657](https://github.com/astral-sh/ruff/pull/12657))
|
||||
- \[`flake8-pyi`\] Add autofix for `future-annotations-in-stub` (`PYI044`) ([#12676](https://github.com/astral-sh/ruff/pull/12676))
|
||||
- \[`flake8-return`\] Avoid syntax error when auto-fixing `RET505` with mixed indentation (space and tabs) ([#12740](https://github.com/astral-sh/ruff/pull/12740))
|
||||
- \[`pydoclint`\] Add `docstring-missing-yields` (`DOC402`) and `docstring-extraneous-yields` (`DOC403`) ([#12538](https://github.com/astral-sh/ruff/pull/12538))
|
||||
- \[`pydoclint`\] Avoid `DOC201` if docstring begins with "Return", "Returns", "Yield", or "Yields" ([#12675](https://github.com/astral-sh/ruff/pull/12675))
|
||||
- \[`pydoclint`\] Deduplicate collected exceptions after traversing function bodies (`DOC501`) ([#12642](https://github.com/astral-sh/ruff/pull/12642))
|
||||
- \[`pydoclint`\] Ignore `DOC` errors for stub functions ([#12651](https://github.com/astral-sh/ruff/pull/12651))
|
||||
- \[`pydoclint`\] Teach rules to understand reraised exceptions as being explicitly raised (`DOC501`, `DOC502`) ([#12639](https://github.com/astral-sh/ruff/pull/12639))
|
||||
- \[`ruff`\] Implement `incorrectly-parenthesized-tuple-in-subscript` (`RUF031`) ([#12480](https://github.com/astral-sh/ruff/pull/12480))
|
||||
- \[`ruff`\] Mark `RUF023` fix as unsafe if `__slots__` is not a set and the binding is used elsewhere ([#12692](https://github.com/astral-sh/ruff/pull/12692))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`refurb`\] Add autofix for `implicit-cwd` (`FURB177`) ([#12708](https://github.com/astral-sh/ruff/pull/12708))
|
||||
- \[`ruff`\] Add autofix for `zip-instead-of-pairwise` (`RUF007`) ([#12663](https://github.com/astral-sh/ruff/pull/12663))
|
||||
- \[`tryceratops`\] Add `BaseException` to `raise-vanilla-class` rule (`TRY002`) ([#12620](https://github.com/astral-sh/ruff/pull/12620))
|
||||
|
||||
### Server
|
||||
|
||||
- Ignore non-file workspace URL; Ruff will display a warning notification in this case ([#12725](https://github.com/astral-sh/ruff/pull/12725))
|
||||
|
||||
### CLI
|
||||
|
||||
- Fix cache invalidation for nested `pyproject.toml` files ([#12727](https://github.com/astral-sh/ruff/pull/12727))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-async`\] Fix false positives with multiple `async with` items (`ASYNC100`) ([#12643](https://github.com/astral-sh/ruff/pull/12643))
|
||||
- \[`flake8-bandit`\] Avoid false-positives for list concatenations in SQL construction (`S608`) ([#12720](https://github.com/astral-sh/ruff/pull/12720))
|
||||
- \[`flake8-bugbear`\] Treat `return` as equivalent to `break` (`B909`) ([#12646](https://github.com/astral-sh/ruff/pull/12646))
|
||||
- \[`flake8-comprehensions`\] Set comprehensions not a violation for `sum` in `unnecessary-comprehension-in-call` (`C419`) ([#12691](https://github.com/astral-sh/ruff/pull/12691))
|
||||
- \[`flake8-simplify`\] Parenthesize conditions based on precedence when merging if arms (`SIM114`) ([#12737](https://github.com/astral-sh/ruff/pull/12737))
|
||||
- \[`pydoclint`\] Try both 'Raises' section styles when convention is unspecified (`DOC501`) ([#12649](https://github.com/astral-sh/ruff/pull/12649))
|
||||
|
||||
## 0.5.6
|
||||
|
||||
Ruff 0.5.6 automatically enables linting and formatting of notebooks in *preview mode*.
|
||||
You can opt-out of this behavior by adding `*.ipynb` to the `extend-exclude` setting.
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
extend-exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
### Preview features
|
||||
|
||||
- Enable notebooks by default in preview mode ([#12621](https://github.com/astral-sh/ruff/pull/12621))
|
||||
- \[`flake8-builtins`\] Implement import, lambda, and module shadowing ([#12546](https://github.com/astral-sh/ruff/pull/12546))
|
||||
- \[`pydoclint`\] Add `docstring-missing-returns` (`DOC201`) and `docstring-extraneous-returns` (`DOC202`) ([#12485](https://github.com/astral-sh/ruff/pull/12485))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-return`\] Exempt cached properties and other property-like decorators from explicit return rule (`RET501`) ([#12563](https://github.com/astral-sh/ruff/pull/12563))
|
||||
|
||||
### Server
|
||||
|
||||
- Make server panic hook more error resilient ([#12610](https://github.com/astral-sh/ruff/pull/12610))
|
||||
- Use `$/logTrace` for server trace logs in Zed and VS Code ([#12564](https://github.com/astral-sh/ruff/pull/12564))
|
||||
- Keep track of deleted cells for reorder change request ([#12575](https://github.com/astral-sh/ruff/pull/12575))
|
||||
|
||||
### Configuration
|
||||
|
||||
- \[`flake8-implicit-str-concat`\] Always allow explicit multi-line concatenations when implicit concatenations are banned ([#12532](https://github.com/astral-sh/ruff/pull/12532))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-async`\] Avoid flagging `asyncio.timeout`s as unused when the context manager includes `asyncio.TaskGroup` ([#12605](https://github.com/astral-sh/ruff/pull/12605))
|
||||
- \[`flake8-slots`\] Avoid recommending `__slots__` for classes that inherit from more than `namedtuple` ([#12531](https://github.com/astral-sh/ruff/pull/12531))
|
||||
- \[`isort`\] Avoid marking required imports as unused ([#12537](https://github.com/astral-sh/ruff/pull/12537))
|
||||
- \[`isort`\] Preserve trailing inline comments on import-from statements ([#12498](https://github.com/astral-sh/ruff/pull/12498))
|
||||
- \[`pycodestyle`\] Add newlines before comments (`E305`) ([#12606](https://github.com/astral-sh/ruff/pull/12606))
|
||||
- \[`pycodestyle`\] Don't attach comments with mismatched indents ([#12604](https://github.com/astral-sh/ruff/pull/12604))
|
||||
- \[`pyflakes`\] Fix preview-mode bugs in `F401` when attempting to autofix unused first-party submodule imports in an `__init__.py` file ([#12569](https://github.com/astral-sh/ruff/pull/12569))
|
||||
- \[`pylint`\] Respect start index in `unnecessary-list-index-lookup` ([#12603](https://github.com/astral-sh/ruff/pull/12603))
|
||||
- \[`pyupgrade`\] Avoid recommending no-argument super in `slots=True` dataclasses ([#12530](https://github.com/astral-sh/ruff/pull/12530))
|
||||
- \[`pyupgrade`\] Use colon rather than dot formatting for integer-only types ([#12534](https://github.com/astral-sh/ruff/pull/12534))
|
||||
- Fix NFKC normalization bug when removing unused imports ([#12571](https://github.com/astral-sh/ruff/pull/12571))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Consider more stdlib decorators to be property-like ([#12583](https://github.com/astral-sh/ruff/pull/12583))
|
||||
- Improve handling of metaclasses in various linter rules ([#12579](https://github.com/astral-sh/ruff/pull/12579))
|
||||
- Improve consistency between linter rules in determining whether a function is property ([#12581](https://github.com/astral-sh/ruff/pull/12581))
|
||||
|
||||
## 0.5.5
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`fastapi`\] Implement `fastapi-redundant-response-model` (`FAST001`) and `fastapi-non-annotated-dependency`(`FAST002`) ([#11579](https://github.com/astral-sh/ruff/pull/11579))
|
||||
- \[`pydoclint`\] Implement `docstring-missing-exception` (`DOC501`) and `docstring-extraneous-exception` (`DOC502`) ([#11471](https://github.com/astral-sh/ruff/pull/11471))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`numpy`\] Fix NumPy 2.0 rule for `np.alltrue` and `np.sometrue` ([#12473](https://github.com/astral-sh/ruff/pull/12473))
|
||||
- \[`numpy`\] Ignore `NPY201` inside `except` blocks for compatibility with older numpy versions ([#12490](https://github.com/astral-sh/ruff/pull/12490))
|
||||
- \[`pep8-naming`\] Avoid applying `ignore-names` to `self` and `cls` function names (`N804`, `N805`) ([#12497](https://github.com/astral-sh/ruff/pull/12497))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Fix incorrect placement of leading function comment with type params ([#12447](https://github.com/astral-sh/ruff/pull/12447))
|
||||
|
||||
### Server
|
||||
|
||||
- Do not bail code action resolution when a quick fix is requested ([#12462](https://github.com/astral-sh/ruff/pull/12462))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix `Ord` implementation of `cmp_fix` ([#12471](https://github.com/astral-sh/ruff/pull/12471))
|
||||
- Raise syntax error for unparenthesized generator expression in multi-argument call ([#12445](https://github.com/astral-sh/ruff/pull/12445))
|
||||
- \[`pydoclint`\] Fix panic in `DOC501` reported in [#12428](https://github.com/astral-sh/ruff/pull/12428) ([#12435](https://github.com/astral-sh/ruff/pull/12435))
|
||||
- \[`flake8-bugbear`\] Allow singleton tuples with starred expressions in `B013` ([#12484](https://github.com/astral-sh/ruff/pull/12484))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add Eglot setup guide for Emacs editor ([#12426](https://github.com/astral-sh/ruff/pull/12426))
|
||||
- Add note about the breaking change in `nvim-lspconfig` ([#12507](https://github.com/astral-sh/ruff/pull/12507))
|
||||
- Add note to include notebook files for native server ([#12449](https://github.com/astral-sh/ruff/pull/12449))
|
||||
- Add setup docs for Zed editor ([#12501](https://github.com/astral-sh/ruff/pull/12501))
|
||||
|
||||
## 0.5.4
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`ruff`\] Rename `RUF007` to `zip-instead-of-pairwise` ([#12399](https://github.com/astral-sh/ruff/pull/12399))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-builtins`\] Avoid shadowing diagnostics for `@override` methods ([#12415](https://github.com/astral-sh/ruff/pull/12415))
|
||||
- \[`flake8-comprehensions`\] Insert parentheses for multi-argument generators ([#12422](https://github.com/astral-sh/ruff/pull/12422))
|
||||
- \[`pydocstyle`\] Handle escaped docstrings within docstring (`D301`) ([#12192](https://github.com/astral-sh/ruff/pull/12192))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix GitHub link to Neovim setup ([#12410](https://github.com/astral-sh/ruff/pull/12410))
|
||||
- Fix `output-format` default in settings reference ([#12409](https://github.com/astral-sh/ruff/pull/12409))
|
||||
|
||||
## 0.5.3
|
||||
|
||||
**Ruff 0.5.3 marks the stable release of the Ruff language server and introduces revamped
|
||||
[documentation](https://docs.astral.sh/ruff/editors), including [setup guides for your editor of
|
||||
choice](https://docs.astral.sh/ruff/editors/setup) and [the language server
|
||||
itself](https://docs.astral.sh/ruff/editors/settings)**.
|
||||
|
||||
### Preview features
|
||||
|
||||
- Formatter: Insert empty line between suite and alternative branch after function/class definition ([#12294](https://github.com/astral-sh/ruff/pull/12294))
|
||||
- \[`pyupgrade`\] Implement `unnecessary-default-type-args` (`UP043`) ([#12371](https://github.com/astral-sh/ruff/pull/12371))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-bugbear`\] Detect enumerate iterations in `loop-iterator-mutation` (`B909`) ([#12366](https://github.com/astral-sh/ruff/pull/12366))
|
||||
- \[`flake8-bugbear`\] Remove `discard`, `remove`, and `pop` allowance for `loop-iterator-mutation` (`B909`) ([#12365](https://github.com/astral-sh/ruff/pull/12365))
|
||||
- \[`pylint`\] Allow `repeated-equality-comparison` for mixed operations (`PLR1714`) ([#12369](https://github.com/astral-sh/ruff/pull/12369))
|
||||
- \[`pylint`\] Ignore `self` and `cls` when counting arguments (`PLR0913`) ([#12367](https://github.com/astral-sh/ruff/pull/12367))
|
||||
- \[`pylint`\] Use UTF-8 as default encoding in `unspecified-encoding` fix (`PLW1514`) ([#12370](https://github.com/astral-sh/ruff/pull/12370))
|
||||
|
||||
### Server
|
||||
|
||||
- Build settings index in parallel for the native server ([#12299](https://github.com/astral-sh/ruff/pull/12299))
|
||||
- Use fallback settings when indexing the project ([#12362](https://github.com/astral-sh/ruff/pull/12362))
|
||||
- Consider `--preview` flag for `server` subcommand for the linter and formatter ([#12208](https://github.com/astral-sh/ruff/pull/12208))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-comprehensions`\] Allow additional arguments for `sum` and `max` comprehensions (`C419`) ([#12364](https://github.com/astral-sh/ruff/pull/12364))
|
||||
- \[`pylint`\] Avoid dropping extra boolean operations in `repeated-equality-comparison` (`PLR1714`) ([#12368](https://github.com/astral-sh/ruff/pull/12368))
|
||||
- \[`pylint`\] Consider expression before statement when determining binding kind (`PLR1704`) ([#12346](https://github.com/astral-sh/ruff/pull/12346))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add docs for Ruff language server ([#12344](https://github.com/astral-sh/ruff/pull/12344))
|
||||
- Migrate to standalone docs repo ([#12341](https://github.com/astral-sh/ruff/pull/12341))
|
||||
- Update versioning policy for editor integration ([#12375](https://github.com/astral-sh/ruff/pull/12375))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Publish Wasm API to npm ([#12317](https://github.com/astral-sh/ruff/pull/12317))
|
||||
|
||||
## 0.5.2
|
||||
|
||||
### Preview features
|
||||
|
||||
- Use `space` separator before parenthesized expressions in comprehensions with leading comments ([#12282](https://github.com/astral-sh/ruff/pull/12282))
|
||||
- \[`flake8-async`\] Update `ASYNC100` to include `anyio` and `asyncio` ([#12221](https://github.com/astral-sh/ruff/pull/12221))
|
||||
- \[`flake8-async`\] Update `ASYNC109` to include `anyio` and `asyncio` ([#12236](https://github.com/astral-sh/ruff/pull/12236))
|
||||
- \[`flake8-async`\] Update `ASYNC110` to include `anyio` and `asyncio` ([#12261](https://github.com/astral-sh/ruff/pull/12261))
|
||||
- \[`flake8-async`\] Update `ASYNC115` to include `anyio` and `asyncio` ([#12262](https://github.com/astral-sh/ruff/pull/12262))
|
||||
- \[`flake8-async`\] Update `ASYNC116` to include `anyio` and `asyncio` ([#12266](https://github.com/astral-sh/ruff/pull/12266))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-return`\] Exempt properties from explicit return rule (`RET501`) ([#12243](https://github.com/astral-sh/ruff/pull/12243))
|
||||
- \[`numpy`\] Add `np.NAN`-to-`np.nan` diagnostic ([#12292](https://github.com/astral-sh/ruff/pull/12292))
|
||||
- \[`refurb`\] Make `list-reverse-copy` an unsafe fix ([#12303](https://github.com/astral-sh/ruff/pull/12303))
|
||||
|
||||
### Server
|
||||
|
||||
- Consider `include` and `extend-include` settings in native server ([#12252](https://github.com/astral-sh/ruff/pull/12252))
|
||||
- Include nested configurations in settings reloading ([#12253](https://github.com/astral-sh/ruff/pull/12253))
|
||||
|
||||
### CLI
|
||||
|
||||
- Omit code frames for fixes with empty ranges ([#12304](https://github.com/astral-sh/ruff/pull/12304))
|
||||
- Warn about formatter incompatibility for `D203` ([#12238](https://github.com/astral-sh/ruff/pull/12238))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Make cache-write failures non-fatal on Windows ([#12302](https://github.com/astral-sh/ruff/pull/12302))
|
||||
- Treat `not` operations as boolean tests ([#12301](https://github.com/astral-sh/ruff/pull/12301))
|
||||
- \[`flake8-bandit`\] Avoid `S310` violations for HTTP-safe f-strings ([#12305](https://github.com/astral-sh/ruff/pull/12305))
|
||||
- \[`flake8-bandit`\] Support explicit string concatenations in S310 HTTP detection ([#12315](https://github.com/astral-sh/ruff/pull/12315))
|
||||
- \[`flake8-bandit`\] fix S113 false positive for httpx without `timeout` argument ([#12213](https://github.com/astral-sh/ruff/pull/12213))
|
||||
- \[`pycodestyle`\] Remove "non-obvious" allowance for E721 ([#12300](https://github.com/astral-sh/ruff/pull/12300))
|
||||
- \[`pyflakes`\] Consider `with` blocks as single-item branches for redefinition analysis ([#12311](https://github.com/astral-sh/ruff/pull/12311))
|
||||
- \[`refurb`\] Restrict forwarding for `newline` argument in `open()` calls to Python versions >= 3.10 ([#12244](https://github.com/astral-sh/ruff/pull/12244))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Update help and documentation to reflect `--output-format full` default ([#12248](https://github.com/astral-sh/ruff/pull/12248))
|
||||
|
||||
### Performance
|
||||
|
||||
- Use more threads when discovering Python files ([#12258](https://github.com/astral-sh/ruff/pull/12258))
|
||||
|
||||
## 0.5.1
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-bugbear`\] Implement mutable-contextvar-default (B039) ([#12113](https://github.com/astral-sh/ruff/pull/12113))
|
||||
- \[`pycodestyle`\] Whitespace after decorator (`E204`) ([#12140](https://github.com/astral-sh/ruff/pull/12140))
|
||||
- \[`pytest`\] Reverse `PT001` and `PT0023` defaults ([#12106](https://github.com/astral-sh/ruff/pull/12106))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- Enable token-based rules on source with syntax errors ([#11950](https://github.com/astral-sh/ruff/pull/11950))
|
||||
- \[`flake8-bandit`\] Detect `httpx` for `S113` ([#12174](https://github.com/astral-sh/ruff/pull/12174))
|
||||
- \[`numpy`\] Update `NPY201` to include exception deprecations ([#12065](https://github.com/astral-sh/ruff/pull/12065))
|
||||
- \[`pylint`\] Generate autofix for `duplicate-bases` (`PLE0241`) ([#12105](https://github.com/astral-sh/ruff/pull/12105))
|
||||
|
||||
### Server
|
||||
|
||||
- Avoid syntax error notification for source code actions ([#12148](https://github.com/astral-sh/ruff/pull/12148))
|
||||
- Consider the content of the new cells during notebook sync ([#12203](https://github.com/astral-sh/ruff/pull/12203))
|
||||
- Fix replacement edit range computation ([#12171](https://github.com/astral-sh/ruff/pull/12171))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Disable auto-fix when source has syntax errors ([#12134](https://github.com/astral-sh/ruff/pull/12134))
|
||||
- Fix cache key collisions for paths with separators ([#12159](https://github.com/astral-sh/ruff/pull/12159))
|
||||
- Make `requires-python` inference robust to `==` ([#12091](https://github.com/astral-sh/ruff/pull/12091))
|
||||
- Use char-wise width instead of `str`-width ([#12135](https://github.com/astral-sh/ruff/pull/12135))
|
||||
- \[`pycodestyle`\] Avoid `E275` if keyword followed by comma ([#12136](https://github.com/astral-sh/ruff/pull/12136))
|
||||
- \[`pycodestyle`\] Avoid `E275` if keyword is followed by a semicolon ([#12095](https://github.com/astral-sh/ruff/pull/12095))
|
||||
- \[`pylint`\] Skip [dummy variables](https://docs.astral.sh/ruff/settings/#lint_dummy-variable-rgx) for `PLR1704` ([#12190](https://github.com/astral-sh/ruff/pull/12190))
|
||||
|
||||
### Performance
|
||||
|
||||
- Remove allocation in `parse_identifier` ([#12103](https://github.com/astral-sh/ruff/pull/12103))
|
||||
- Use `CompactString` for `Identifier` AST node ([#12101](https://github.com/astral-sh/ruff/pull/12101))
|
||||
|
||||
## 0.5.0
|
||||
|
||||
Check out the [blog post](https://astral.sh/blog/ruff-v0.5.0) for a migration guide and overview of the changes!
|
||||
|
||||
### Breaking changes
|
||||
|
||||
See also, the "Remapped rules" section which may result in disabled rules.
|
||||
|
||||
- Follow the XDG specification to discover user-level configurations on macOS (same as on other Unix platforms)
|
||||
- Selecting `ALL` now excludes deprecated rules
|
||||
- The released archives now include an extra level of nesting, which can be removed with `--strip-components=1` when untarring.
|
||||
- The release artifact's file name no longer includes the version tag. This enables users to install via `/latest` URLs on GitHub.
|
||||
- The diagnostic ranges for some `flake8-bandit` rules were modified ([#10667](https://github.com/astral-sh/ruff/pull/10667)).
|
||||
|
||||
### Deprecations
|
||||
|
||||
The following rules are now deprecated:
|
||||
|
||||
- [`syntax-error`](https://docs.astral.sh/ruff/rules/syntax-error/) (`E999`): Syntax errors are now always shown
|
||||
|
||||
### Remapped rules
|
||||
|
||||
The following rules have been remapped to new rule codes:
|
||||
|
||||
- [`blocking-http-call-in-async-function`](https://docs.astral.sh/ruff/rules/blocking-http-call-in-async-function/): `ASYNC100` to `ASYNC210`
|
||||
- [`open-sleep-or-subprocess-in-async-function`](https://docs.astral.sh/ruff/rules/open-sleep-or-subprocess-in-async-function/): `ASYNC101` split into `ASYNC220`, `ASYNC221`, `ASYNC230`, and `ASYNC251`
|
||||
- [`blocking-os-call-in-async-function`](https://docs.astral.sh/ruff/rules/blocking-os-call-in-async-function/): `ASYNC102` has been merged into `ASYNC220` and `ASYNC221`
|
||||
- [`trio-timeout-without-await`](https://docs.astral.sh/ruff/rules/trio-timeout-without-await/): `TRIO100` to `ASYNC100`
|
||||
- [`trio-sync-call`](https://docs.astral.sh/ruff/rules/trio-sync-call/): `TRIO105` to `ASYNC105`
|
||||
- [`trio-async-function-with-timeout`](https://docs.astral.sh/ruff/rules/trio-async-function-with-timeout/): `TRIO109` to `ASYNC109`
|
||||
- [`trio-unneeded-sleep`](https://docs.astral.sh/ruff/rules/trio-unneeded-sleep/): `TRIO110` to `ASYNC110`
|
||||
- [`trio-zero-sleep-call`](https://docs.astral.sh/ruff/rules/trio-zero-sleep-call/): `TRIO115` to `ASYNC115`
|
||||
- [`repeated-isinstance-calls`](https://docs.astral.sh/ruff/rules/repeated-isinstance-calls/): `PLR1701` to `SIM101`
|
||||
|
||||
### Stabilization
|
||||
|
||||
The following rules have been stabilized and are no longer in preview:
|
||||
|
||||
- [`mutable-fromkeys-value`](https://docs.astral.sh/ruff/rules/mutable-fromkeys-value/) (`RUF024`)
|
||||
- [`default-factory-kwarg`](https://docs.astral.sh/ruff/rules/default-factory-kwarg/) (`RUF026`)
|
||||
- [`django-extra`](https://docs.astral.sh/ruff/rules/django-extra/) (`S610`)
|
||||
- [`manual-dict-comprehension`](https://docs.astral.sh/ruff/rules/manual-dict-comprehension/) (`PERF403`)
|
||||
- [`print-empty-string`](https://docs.astral.sh/ruff/rules/print-empty-string/) (`FURB105`)
|
||||
- [`readlines-in-for`](https://docs.astral.sh/ruff/rules/readlines-in-for/) (`FURB129`)
|
||||
- [`if-expr-min-max`](https://docs.astral.sh/ruff/rules/if-expr-min-max/) (`FURB136`)
|
||||
- [`bit-count`](https://docs.astral.sh/ruff/rules/bit-count/) (`FURB161`)
|
||||
- [`redundant-log-base`](https://docs.astral.sh/ruff/rules/redundant-log-base/) (`FURB163`)
|
||||
- [`regex-flag-alias`](https://docs.astral.sh/ruff/rules/regex-flag-alias/) (`FURB167`)
|
||||
- [`isinstance-type-none`](https://docs.astral.sh/ruff/rules/isinstance-type-none/) (`FURB168`)
|
||||
- [`type-none-comparison`](https://docs.astral.sh/ruff/rules/type-none-comparison/) (`FURB169`)
|
||||
- [`implicit-cwd`](https://docs.astral.sh/ruff/rules/implicit-cwd/) (`FURB177`)
|
||||
- [`hashlib-digest-hex`](https://docs.astral.sh/ruff/rules/hashlib-digest-hex/) (`FURB181`)
|
||||
- [`list-reverse-copy`](https://docs.astral.sh/ruff/rules/list-reverse-copy/) (`FURB187`)
|
||||
- [`bad-open-mode`](https://docs.astral.sh/ruff/rules/bad-open-mode/) (`PLW1501`)
|
||||
- [`empty-comment`](https://docs.astral.sh/ruff/rules/empty-comment/) (`PLR2044`)
|
||||
- [`global-at-module-level`](https://docs.astral.sh/ruff/rules/global-at-module-level/) (`PLW0604`)
|
||||
- [`misplaced-bare-raise`](https://docs.astral.sh/ruff/rules/misplaced-bare-raise/) (`PLE0744`)
|
||||
- [`non-ascii-import-name`](https://docs.astral.sh/ruff/rules/non-ascii-import-name/) (`PLC2403`)
|
||||
- [`non-ascii-name`](https://docs.astral.sh/ruff/rules/non-ascii-name/) (`PLC2401`)
|
||||
- [`nonlocal-and-global`](https://docs.astral.sh/ruff/rules/nonlocal-and-global/) (`PLE0115`)
|
||||
- [`potential-index-error`](https://docs.astral.sh/ruff/rules/potential-index-error/) (`PLE0643`)
|
||||
- [`redeclared-assigned-name`](https://docs.astral.sh/ruff/rules/redeclared-assigned-name/) (`PLW0128`)
|
||||
- [`redefined-argument-from-local`](https://docs.astral.sh/ruff/rules/redefined-argument-from-local/) (`PLR1704`)
|
||||
- [`repeated-keyword-argument`](https://docs.astral.sh/ruff/rules/repeated-keyword-argument/) (`PLE1132`)
|
||||
- [`super-without-brackets`](https://docs.astral.sh/ruff/rules/super-without-brackets/) (`PLW0245`)
|
||||
- [`unnecessary-list-index-lookup`](https://docs.astral.sh/ruff/rules/unnecessary-list-index-lookup/) (`PLR1736`)
|
||||
- [`useless-exception-statement`](https://docs.astral.sh/ruff/rules/useless-exception-statement/) (`PLW0133`)
|
||||
- [`useless-with-lock`](https://docs.astral.sh/ruff/rules/useless-with-lock/) (`PLW2101`)
|
||||
|
||||
The following behaviors have been stabilized:
|
||||
|
||||
- [`is-literal`](https://docs.astral.sh/ruff/rules/is-literal/) (`F632`) now warns for identity checks against list, set or dictionary literals
|
||||
- [`needless-bool`](https://docs.astral.sh/ruff/rules/needless-bool/) (`SIM103`) now detects `if` expressions with implicit `else` branches
|
||||
- [`module-import-not-at-top-of-file`](https://docs.astral.sh/ruff/rules/module-import-not-at-top-of-file/) (`E402`) now allows `os.environ` modifications between import statements
|
||||
- [`type-comparison`](https://docs.astral.sh/ruff/rules/type-comparison/) (`E721`) now allows idioms such as `type(x) is int`
|
||||
- [`yoda-condition`](https://docs.astral.sh/ruff/rules/yoda-conditions/) (`SIM300`) now flags a wider range of expressions
|
||||
|
||||
### Removals
|
||||
|
||||
The following deprecated settings have been removed:
|
||||
|
||||
- `output-format=text`; use `output-format=concise` or `output-format=full`
|
||||
- `tab-size`; use `indent-width`
|
||||
|
||||
The following deprecated CLI options have been removed:
|
||||
|
||||
- `--show-source`; use `--output-format=full`
|
||||
- `--no-show-source`; use `--output-format=concise`
|
||||
|
||||
The following deprecated CLI commands have been removed:
|
||||
|
||||
- `ruff <path>`; use `ruff check <path>`
|
||||
- `ruff --clean`; use `ruff clean`
|
||||
- `ruff --generate-shell-completion`; use `ruff generate-shell-completion`
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`ruff`\] Add `assert-with-print-message` rule ([#11981](https://github.com/astral-sh/ruff/pull/11981))
|
||||
|
||||
### CLI
|
||||
|
||||
- Use rule name rather than message in `--statistics` ([#11697](https://github.com/astral-sh/ruff/pull/11697))
|
||||
- Use the output format `full` by default ([#12010](https://github.com/astral-sh/ruff/pull/12010))
|
||||
- Don't log syntax errors to the console ([#11902](https://github.com/astral-sh/ruff/pull/11902))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`ruff`\] Fix false positives if `gettext` is imported using an alias (`RUF027`) ([#12025](https://github.com/astral-sh/ruff/pull/12025))
|
||||
- \[`numpy`\] Update `trapz` and `in1d` deprecation (`NPY201`) ([#11948](https://github.com/astral-sh/ruff/pull/11948))
|
||||
- \[`flake8-bandit`\] Modify diagnostic ranges for shell-related rules ([#10667](https://github.com/astral-sh/ruff/pull/10667))
|
||||
|
||||
### Server
|
||||
|
||||
- Closing an untitled, unsaved notebook document no longer throws an error ([#11942](https://github.com/astral-sh/ruff/pull/11942))
|
||||
- Support the usage of tildes and environment variables in `logFile` ([#11945](https://github.com/astral-sh/ruff/pull/11945))
|
||||
- Add option to configure whether to show syntax errors ([#12059](https://github.com/astral-sh/ruff/pull/12059))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`pycodestyle`\] Avoid `E203` for f-string debug expression ([#12024](https://github.com/astral-sh/ruff/pull/12024))
|
||||
- \[`pep8-naming`\] Match import-name ignores against both name and alias (`N812`, `N817`) ([#12033](https://github.com/astral-sh/ruff/pull/12033))
|
||||
- \[`pyflakes`\] Detect assignments that shadow definitions (`F811`) ([#11961](https://github.com/astral-sh/ruff/pull/11961))
|
||||
|
||||
### Parser
|
||||
|
||||
- Emit a syntax error for an empty type parameter list ([#12030](https://github.com/astral-sh/ruff/pull/12030))
|
||||
- Avoid consuming the newline for unterminated strings ([#12067](https://github.com/astral-sh/ruff/pull/12067))
|
||||
- Do not include the newline in the unterminated string range ([#12017](https://github.com/astral-sh/ruff/pull/12017))
|
||||
- Use the correct range to highlight line continuation errors ([#12016](https://github.com/astral-sh/ruff/pull/12016))
|
||||
- Consider 2-character EOL before line continuations ([#12035](https://github.com/astral-sh/ruff/pull/12035))
|
||||
- Consider line continuation character for re-lexing ([#12008](https://github.com/astral-sh/ruff/pull/12008))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Upgrade the Unicode table used for measuring the line-length ([#11194](https://github.com/astral-sh/ruff/pull/11194))
|
||||
- Remove the deprecation error message for the nursery selector ([#10172](https://github.com/astral-sh/ruff/pull/10172))
|
||||
|
||||
## 0.4.10
|
||||
|
||||
### Parser
|
||||
|
||||
- Implement re-lexing logic for better error recovery ([#11845](https://github.com/astral-sh/ruff/pull/11845))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-copyright`\] Update `CPY001` to check the first 4096 bytes instead of 1024 ([#11927](https://github.com/astral-sh/ruff/pull/11927))
|
||||
- \[`pycodestyle`\] Update `E999` to show all syntax errors instead of just the first one ([#11900](https://github.com/astral-sh/ruff/pull/11900))
|
||||
|
||||
### Server
|
||||
|
||||
- Add tracing setup guide to Helix documentation ([#11883](https://github.com/astral-sh/ruff/pull/11883))
|
||||
- Add tracing setup guide to Neovim documentation ([#11884](https://github.com/astral-sh/ruff/pull/11884))
|
||||
- Defer notebook cell deletion to avoid an error message ([#11864](https://github.com/astral-sh/ruff/pull/11864))
|
||||
|
||||
### Security
|
||||
|
||||
- Guard against malicious ecosystem comment artifacts ([#11879](https://github.com/astral-sh/ruff/pull/11879))
|
||||
|
||||
## 0.4.9
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`pylint`\] Implement `consider-dict-items` (`C0206`) ([#11688](https://github.com/astral-sh/ruff/pull/11688))
|
||||
- \[`refurb`\] Implement `repeated-global` (`FURB154`) ([#11187](https://github.com/astral-sh/ruff/pull/11187))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pycodestyle`\] Adapt fix for `E203` to work identical to `ruff format` ([#10999](https://github.com/astral-sh/ruff/pull/10999))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Fix formatter instability for lines only consisting of zero-width characters ([#11748](https://github.com/astral-sh/ruff/pull/11748))
|
||||
|
||||
### Server
|
||||
|
||||
- Add supported commands in server capabilities ([#11850](https://github.com/astral-sh/ruff/pull/11850))
|
||||
- Use real file path when available in `ruff server` ([#11800](https://github.com/astral-sh/ruff/pull/11800))
|
||||
- Improve error message when a command is run on an unavailable document ([#11823](https://github.com/astral-sh/ruff/pull/11823))
|
||||
- Introduce the `ruff.printDebugInformation` command ([#11831](https://github.com/astral-sh/ruff/pull/11831))
|
||||
- Tracing system now respects log level and trace level, with options to log to a file ([#11747](https://github.com/astral-sh/ruff/pull/11747))
|
||||
|
||||
### CLI
|
||||
|
||||
- Handle non-printable characters in diff view ([#11687](https://github.com/astral-sh/ruff/pull/11687))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`refurb`\] Avoid suggesting starmap when arguments are used outside call (`FURB140`) ([#11830](https://github.com/astral-sh/ruff/pull/11830))
|
||||
- \[`flake8-bugbear`\] Avoid panic in `B909` when checking large loop blocks ([#11772](https://github.com/astral-sh/ruff/pull/11772))
|
||||
- \[`refurb`\] Fix misbehavior of `operator.itemgetter` when getter param is a tuple (`FURB118`) ([#11774](https://github.com/astral-sh/ruff/pull/11774))
|
||||
|
||||
## 0.4.8
|
||||
|
||||
### Performance
|
||||
|
||||
- Linter performance has been improved by around 10% on some microbenchmarks by refactoring the lexer and parser to maintain synchronicity between them ([#11457](https://github.com/astral-sh/ruff/pull/11457))
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-bugbear`\] Implement `return-in-generator` (`B901`) ([#11644](https://github.com/astral-sh/ruff/pull/11644))
|
||||
- \[`flake8-pyi`\] Implement `PYI063` ([#11699](https://github.com/astral-sh/ruff/pull/11699))
|
||||
- \[`pygrep_hooks`\] Check blanket ignores via file-level pragmas (`PGH004`) ([#11540](https://github.com/astral-sh/ruff/pull/11540))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pyupgrade`\] Update `UP035` for Python 3.13 and the latest version of `typing_extensions` ([#11693](https://github.com/astral-sh/ruff/pull/11693))
|
||||
- \[`numpy`\] Update `NPY001` rule for NumPy 2.0 ([#11735](https://github.com/astral-sh/ruff/pull/11735))
|
||||
|
||||
### Server
|
||||
|
||||
- Formatting a document with syntax problems no longer spams a visible error popup ([#11745](https://github.com/astral-sh/ruff/pull/11745))
|
||||
|
||||
### CLI
|
||||
|
||||
- Add RDJson support for `--output-format` flag ([#11682](https://github.com/astral-sh/ruff/pull/11682))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`pyupgrade`\] Write empty string in lieu of panic when fixing `UP032` ([#11696](https://github.com/astral-sh/ruff/pull/11696))
|
||||
- \[`flake8-simplify`\] Simplify double negatives in `SIM103` ([#11684](https://github.com/astral-sh/ruff/pull/11684))
|
||||
- Ensure the expression generator adds a newline before `type` statements ([#11720](https://github.com/astral-sh/ruff/pull/11720))
|
||||
- Respect per-file ignores for blanket and redirected noqa rules ([#11728](https://github.com/astral-sh/ruff/pull/11728))
|
||||
|
||||
## 0.4.7
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-pyi`\] Implement `PYI064` ([#11325](https://github.com/astral-sh/ruff/pull/11325))
|
||||
- \[`flake8-pyi`\] Implement `PYI066` ([#11541](https://github.com/astral-sh/ruff/pull/11541))
|
||||
- \[`flake8-pyi`\] Implement `PYI057` ([#11486](https://github.com/astral-sh/ruff/pull/11486))
|
||||
- \[`pyflakes`\] Enable `F822` in `__init__.py` files by default ([#11370](https://github.com/astral-sh/ruff/pull/11370))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Fix incorrect placement of trailing stub function comments ([#11632](https://github.com/astral-sh/ruff/pull/11632))
|
||||
|
||||
### Server
|
||||
|
||||
- Respect file exclusions in `ruff server` ([#11590](https://github.com/astral-sh/ruff/pull/11590))
|
||||
- Add support for documents not exist on disk ([#11588](https://github.com/astral-sh/ruff/pull/11588))
|
||||
- Add Vim and Kate setup guide for `ruff server` ([#11615](https://github.com/astral-sh/ruff/pull/11615))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid removing newlines between docstring headers and rST blocks ([#11609](https://github.com/astral-sh/ruff/pull/11609))
|
||||
- Infer indentation with imports when logical indent is absent ([#11608](https://github.com/astral-sh/ruff/pull/11608))
|
||||
- Use char index rather than position for indent slice ([#11645](https://github.com/astral-sh/ruff/pull/11645))
|
||||
- \[`flake8-comprehension`\] Strip parentheses around generators in `C400` ([#11607](https://github.com/astral-sh/ruff/pull/11607))
|
||||
- Mark `repeated-isinstance-calls` as unsafe on Python 3.10 and later ([#11622](https://github.com/astral-sh/ruff/pull/11622))
|
||||
|
||||
## 0.4.6
|
||||
|
||||
### Breaking changes
|
||||
|
||||
- Use project-relative paths when calculating GitLab fingerprints ([#11532](https://github.com/astral-sh/ruff/pull/11532))
|
||||
- Bump minimum supported Windows version to Windows 10 ([#11613](https://github.com/astral-sh/ruff/pull/11613))
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-async`\] Sleep with >24 hour interval should usually sleep forever (`ASYNC116`) ([#11498](https://github.com/astral-sh/ruff/pull/11498))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`numpy`\] Add missing functions to NumPy 2.0 migration rule ([#11528](https://github.com/astral-sh/ruff/pull/11528))
|
||||
- \[`mccabe`\] Consider irrefutable pattern similar to `if .. else` for `C901` ([#11565](https://github.com/astral-sh/ruff/pull/11565))
|
||||
- Consider `match`-`case` statements for `C901`, `PLR0912`, and `PLR0915` ([#11521](https://github.com/astral-sh/ruff/pull/11521))
|
||||
- Remove empty strings when converting to f-string (`UP032`) ([#11524](https://github.com/astral-sh/ruff/pull/11524))
|
||||
- \[`flake8-bandit`\] `request-without-timeout` should warn for `requests.request` ([#11548](https://github.com/astral-sh/ruff/pull/11548))
|
||||
- \[`flake8-self`\] Ignore sunder accesses in `flake8-self` rules ([#11546](https://github.com/astral-sh/ruff/pull/11546))
|
||||
- \[`pyupgrade`\] Lint for `TypeAliasType` usages (`UP040`) ([#11530](https://github.com/astral-sh/ruff/pull/11530))
|
||||
|
||||
### Server
|
||||
|
||||
- Respect excludes in `ruff server` configuration discovery ([#11551](https://github.com/astral-sh/ruff/pull/11551))
|
||||
- Use default settings if initialization options is empty or not provided ([#11566](https://github.com/astral-sh/ruff/pull/11566))
|
||||
- `ruff server` correctly treats `.pyi` files as stub files ([#11535](https://github.com/astral-sh/ruff/pull/11535))
|
||||
- `ruff server` searches for configuration in parent directories ([#11537](https://github.com/astral-sh/ruff/pull/11537))
|
||||
- `ruff server`: An empty code action filter no longer returns notebook source actions ([#11526](https://github.com/astral-sh/ruff/pull/11526))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-logging-format`\] Fix autofix title in `logging-warn` (`G010`) ([#11514](https://github.com/astral-sh/ruff/pull/11514))
|
||||
- \[`refurb`\] Avoid recommending `operator.itemgetter` with dependence on lambda arguments ([#11574](https://github.com/astral-sh/ruff/pull/11574))
|
||||
- \[`flake8-simplify`\] Avoid recommending context manager in `__enter__` implementations ([#11575](https://github.com/astral-sh/ruff/pull/11575))
|
||||
- Create intermediary directories for `--output-file` ([#11550](https://github.com/astral-sh/ruff/pull/11550))
|
||||
- Propagate reads on global variables ([#11584](https://github.com/astral-sh/ruff/pull/11584))
|
||||
- Treat all `singledispatch` arguments as runtime-required ([#11523](https://github.com/astral-sh/ruff/pull/11523))
|
||||
|
||||
## 0.4.5
|
||||
|
||||
### Ruff's language server is now in Beta
|
||||
|
||||
`v0.4.5` marks the official Beta release of `ruff server`, an integrated language server built into Ruff.
|
||||
`ruff server` supports the same feature set as `ruff-lsp`, powering linting, formatting, and
|
||||
code fixes in Ruff's editor integrations -- but with superior performance and
|
||||
no installation required. We'd love your feedback!
|
||||
|
||||
You can enable `ruff server` in the [VS Code extension](https://github.com/astral-sh/ruff-vscode?tab=readme-ov-file#enabling-the-rust-based-language-server) today.
|
||||
|
||||
To read more about this exciting milestone, check out our [blog post](https://astral.sh/blog/ruff-v0.4.5)!
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-future-annotations`\] Reword `future-rewritable-type-annotation` (`FA100`) message ([#11381](https://github.com/astral-sh/ruff/pull/11381))
|
||||
- \[`isort`\] Expanded the set of standard-library modules to include `_string`, etc. ([#11374](https://github.com/astral-sh/ruff/pull/11374))
|
||||
- \[`pycodestyle`\] Consider soft keywords for `E27` rules ([#11446](https://github.com/astral-sh/ruff/pull/11446))
|
||||
- \[`pyflakes`\] Recommend adding unused import bindings to `__all__` ([#11314](https://github.com/astral-sh/ruff/pull/11314))
|
||||
- \[`pyflakes`\] Update documentation and deprecate `ignore_init_module_imports` ([#11436](https://github.com/astral-sh/ruff/pull/11436))
|
||||
- \[`pyupgrade`\] Mark quotes as unnecessary for non-evaluated annotations ([#11485](https://github.com/astral-sh/ruff/pull/11485))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Avoid multiline quotes warning with `quote-style = preserve` ([#11490](https://github.com/astral-sh/ruff/pull/11490))
|
||||
|
||||
### Server
|
||||
|
||||
- Support Jupyter Notebook files ([#11206](https://github.com/astral-sh/ruff/pull/11206))
|
||||
- Support `noqa` comment code actions ([#11276](https://github.com/astral-sh/ruff/pull/11276))
|
||||
- Fix automatic configuration reloading ([#11492](https://github.com/astral-sh/ruff/pull/11492))
|
||||
- Fix several issues with configuration in Neovim and Helix ([#11497](https://github.com/astral-sh/ruff/pull/11497))
|
||||
|
||||
### CLI
|
||||
|
||||
- Add `--output-format` as a CLI option for `ruff config` ([#11438](https://github.com/astral-sh/ruff/pull/11438))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid `PLE0237` for property with setter ([#11377](https://github.com/astral-sh/ruff/pull/11377))
|
||||
- Avoid `TCH005` for `if` stmt with `elif`/`else` block ([#11376](https://github.com/astral-sh/ruff/pull/11376))
|
||||
- Avoid flagging `__future__` annotations as required for non-evaluated type annotations ([#11414](https://github.com/astral-sh/ruff/pull/11414))
|
||||
- Check for ruff executable in 'bin' directory as installed by 'pip install --target'. ([#11450](https://github.com/astral-sh/ruff/pull/11450))
|
||||
- Sort edits prior to deduplicating in quotation fix ([#11452](https://github.com/astral-sh/ruff/pull/11452))
|
||||
- Treat escaped newline as valid sequence ([#11465](https://github.com/astral-sh/ruff/pull/11465))
|
||||
- \[`flake8-pie`\] Preserve parentheses in `unnecessary-dict-kwargs` ([#11372](https://github.com/astral-sh/ruff/pull/11372))
|
||||
- \[`pylint`\] Ignore `__slots__` with dynamic values ([#11488](https://github.com/astral-sh/ruff/pull/11488))
|
||||
- \[`pylint`\] Remove `try` body from branch counting ([#11487](https://github.com/astral-sh/ruff/pull/11487))
|
||||
- \[`refurb`\] Respect operator precedence in `FURB110` ([#11464](https://github.com/astral-sh/ruff/pull/11464))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add `--preview` to the README ([#11395](https://github.com/astral-sh/ruff/pull/11395))
|
||||
- Add Python 3.13 to list of allowed Python versions ([#11411](https://github.com/astral-sh/ruff/pull/11411))
|
||||
- Simplify Neovim setup documentation ([#11489](https://github.com/astral-sh/ruff/pull/11489))
|
||||
- Update CONTRIBUTING.md to reflect the new parser ([#11434](https://github.com/astral-sh/ruff/pull/11434))
|
||||
- Update server documentation with new migration guide ([#11499](https://github.com/astral-sh/ruff/pull/11499))
|
||||
- \[`pycodestyle`\] Clarify motivation for `E713` and `E714` ([#11483](https://github.com/astral-sh/ruff/pull/11483))
|
||||
- \[`pyflakes`\] Update docs to describe WAI behavior (F541) ([#11362](https://github.com/astral-sh/ruff/pull/11362))
|
||||
- \[`pylint`\] Clearly indicate what is counted as a branch ([#11423](https://github.com/astral-sh/ruff/pull/11423))
|
||||
|
||||
## 0.4.4
|
||||
|
||||
### Preview features
|
||||
@@ -74,6 +687,10 @@
|
||||
- Avoid allocations for isort module names ([#11251](https://github.com/astral-sh/ruff/pull/11251))
|
||||
- Build a separate ARM wheel for macOS ([#11149](https://github.com/astral-sh/ruff/pull/11149))
|
||||
|
||||
### Windows
|
||||
|
||||
- Increase the minimum requirement to Windows 10.
|
||||
|
||||
## 0.4.2
|
||||
|
||||
### Rule changes
|
||||
|
||||
@@ -101,6 +101,8 @@ pre-commit run --all-files --show-diff-on-failure # Rust and Python formatting,
|
||||
These checks will run on GitHub Actions when you open your pull request, but running them locally
|
||||
will save you time and expedite the merge process.
|
||||
|
||||
If you're using VS Code, you can also install the recommended [rust-analyzer](https://marketplace.visualstudio.com/items?itemName=rust-lang.rust-analyzer) extension to get these checks while editing.
|
||||
|
||||
Note that many code changes also require updating the snapshot tests, which is done interactively
|
||||
after running `cargo test` like so:
|
||||
|
||||
@@ -278,7 +280,7 @@ These represent, respectively: the schema used to parse the `pyproject.toml` fil
|
||||
intermediate representation; and the final, internal representation used to power Ruff.
|
||||
|
||||
To add a new configuration option, you'll likely want to modify these latter few files (along with
|
||||
`arg.rs`, if appropriate). If you want to pattern-match against an existing example, grep for
|
||||
`args.rs`, if appropriate). If you want to pattern-match against an existing example, grep for
|
||||
`dummy_variable_rgx`, which defines a regular expression to match against acceptable unused
|
||||
variables (e.g., `_`).
|
||||
|
||||
@@ -331,7 +333,7 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
|
||||
### Creating a new release
|
||||
|
||||
1. Install `uv`: `curl -LsSf https://astral.sh/uv/install.sh | sh`
|
||||
1. Run `./scripts/release/bump.sh`; this command will:
|
||||
1. Run `./scripts/release.sh`; this command will:
|
||||
- Generate a temporary virtual environment with `rooster`
|
||||
- Generate a changelog entry in `CHANGELOG.md`
|
||||
- Update versions in `pyproject.toml` and `Cargo.toml`
|
||||
@@ -344,22 +346,21 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
|
||||
1. Run `cargo check`. This should update the lock file with new versions.
|
||||
1. Create a pull request with the changelog and version updates
|
||||
1. Merge the PR
|
||||
1. Run the [release workflow](https://github.com/astral-sh/ruff/actions/workflows/release.yaml) with:
|
||||
1. Run the [release workflow](https://github.com/astral-sh/ruff/actions/workflows/release.yml) with:
|
||||
- The new version number (without starting `v`)
|
||||
- The commit hash of the merged release pull request on `main`
|
||||
1. The release workflow will do the following:
|
||||
1. Build all the assets. If this fails (even though we tested in step 4), we haven't tagged or
|
||||
uploaded anything, you can restart after pushing a fix.
|
||||
uploaded anything, you can restart after pushing a fix. If you just need to rerun the build,
|
||||
make sure you're [re-running all the failed
|
||||
jobs](https://docs.github.com/en/actions/managing-workflow-runs/re-running-workflows-and-jobs#re-running-failed-jobs-in-a-workflow) and not just a single failed job.
|
||||
1. Upload to PyPI.
|
||||
1. Create and push the Git tag (as extracted from `pyproject.toml`). We create the Git tag only
|
||||
after building the wheels and uploading to PyPI, since we can't delete or modify the tag ([#4468](https://github.com/astral-sh/ruff/issues/4468)).
|
||||
1. Attach artifacts to draft GitHub release
|
||||
1. Trigger downstream repositories. This can fail non-catastrophically, as we can run any
|
||||
downstream jobs manually if needed.
|
||||
1. Publish the GitHub release
|
||||
1. Open the draft release in the GitHub release section
|
||||
1. Copy the changelog for the release into the GitHub release
|
||||
- See previous releases for formatting of section headers
|
||||
1. Verify the GitHub release:
|
||||
1. The Changelog should match the content of `CHANGELOG.md`
|
||||
1. Append the contributors from the `bump.sh` script
|
||||
1. If needed, [update the schemastore](https://github.com/astral-sh/ruff/blob/main/scripts/update_schemastore.py).
|
||||
1. One can determine if an update is needed when
|
||||
@@ -637,11 +638,11 @@ Otherwise, follow the instructions from the linux section.
|
||||
`cargo dev` is a shortcut for `cargo run --package ruff_dev --bin ruff_dev`. You can run some useful
|
||||
utils with it:
|
||||
|
||||
- `cargo dev print-ast <file>`: Print the AST of a python file using the
|
||||
[RustPython parser](https://github.com/astral-sh/ruff/tree/main/crates/ruff_python_parser) that is
|
||||
mainly used in Ruff. For `if True: pass # comment`, you can see the syntax tree, the byte offsets
|
||||
for start and stop of each node and also how the `:` token, the comment and whitespace are not
|
||||
represented anymore:
|
||||
- `cargo dev print-ast <file>`: Print the AST of a python file using Ruff's
|
||||
[Python parser](https://github.com/astral-sh/ruff/tree/main/crates/ruff_python_parser).
|
||||
For `if True: pass # comment`, you can see the syntax tree, the byte offsets for start and
|
||||
stop of each node and also how the `:` token, the comment and whitespace are not represented
|
||||
anymore:
|
||||
|
||||
```text
|
||||
[
|
||||
@@ -904,7 +905,7 @@ There are three ways in which an import can be categorized as "first-party":
|
||||
package (e.g., `from foo import bar` or `import foo.bar`), they'll be classified as first-party
|
||||
automatically. This check is as simple as comparing the first segment of the current file's
|
||||
module path to the first segment of the import.
|
||||
1. **Source roots**: Ruff supports a `[src](https://docs.astral.sh/ruff/settings/#src)` setting, which
|
||||
1. **Source roots**: Ruff supports a [`src`](https://docs.astral.sh/ruff/settings/#src) setting, which
|
||||
sets the directories to scan when identifying first-party imports. The algorithm is
|
||||
straightforward: given an import, like `import foo`, iterate over the directories enumerated in
|
||||
the `src` setting and, for each directory, check for the existence of a subdirectory `foo` or a
|
||||
|
||||
843
Cargo.lock
generated
843
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
133
Cargo.toml
133
Cargo.toml
@@ -4,7 +4,7 @@ resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
edition = "2021"
|
||||
rust-version = "1.71"
|
||||
rust-version = "1.76"
|
||||
homepage = "https://docs.astral.sh/ruff"
|
||||
documentation = "https://docs.astral.sh/ruff"
|
||||
repository = "https://github.com/astral-sh/ruff"
|
||||
@@ -12,6 +12,33 @@ authors = ["Charlie Marsh <charlie.r.marsh@gmail.com>"]
|
||||
license = "MIT"
|
||||
|
||||
[workspace.dependencies]
|
||||
ruff = { path = "crates/ruff" }
|
||||
ruff_cache = { path = "crates/ruff_cache" }
|
||||
ruff_db = { path = "crates/ruff_db" }
|
||||
ruff_diagnostics = { path = "crates/ruff_diagnostics" }
|
||||
ruff_formatter = { path = "crates/ruff_formatter" }
|
||||
ruff_index = { path = "crates/ruff_index" }
|
||||
ruff_linter = { path = "crates/ruff_linter" }
|
||||
ruff_macros = { path = "crates/ruff_macros" }
|
||||
ruff_notebook = { path = "crates/ruff_notebook" }
|
||||
ruff_python_ast = { path = "crates/ruff_python_ast" }
|
||||
ruff_python_codegen = { path = "crates/ruff_python_codegen" }
|
||||
ruff_python_formatter = { path = "crates/ruff_python_formatter" }
|
||||
ruff_python_index = { path = "crates/ruff_python_index" }
|
||||
ruff_python_literal = { path = "crates/ruff_python_literal" }
|
||||
ruff_python_parser = { path = "crates/ruff_python_parser" }
|
||||
ruff_python_semantic = { path = "crates/ruff_python_semantic" }
|
||||
ruff_python_stdlib = { path = "crates/ruff_python_stdlib" }
|
||||
ruff_python_trivia = { path = "crates/ruff_python_trivia" }
|
||||
ruff_server = { path = "crates/ruff_server" }
|
||||
ruff_source_file = { path = "crates/ruff_source_file" }
|
||||
ruff_text_size = { path = "crates/ruff_text_size" }
|
||||
ruff_workspace = { path = "crates/ruff_workspace" }
|
||||
|
||||
red_knot_python_semantic = { path = "crates/red_knot_python_semantic" }
|
||||
red_knot_server = { path = "crates/red_knot_server" }
|
||||
red_knot_workspace = { path = "crates/red_knot_workspace" }
|
||||
|
||||
aho-corasick = { version = "1.1.3" }
|
||||
annotate-snippets = { version = "0.9.2", features = ["color"] }
|
||||
anyhow = { version = "1.0.80" }
|
||||
@@ -20,56 +47,57 @@ bincode = { version = "1.3.3" }
|
||||
bitflags = { version = "2.5.0" }
|
||||
bstr = { version = "1.9.1" }
|
||||
cachedir = { version = "0.3.1" }
|
||||
camino = { version = "1.1.7" }
|
||||
chrono = { version = "0.4.35", default-features = false, features = ["clock"] }
|
||||
clap = { version = "4.5.3", features = ["derive"] }
|
||||
clap_complete_command = { version = "0.5.1" }
|
||||
clap_complete_command = { version = "0.6.0" }
|
||||
clearscreen = { version = "3.0.0" }
|
||||
codspeed-criterion-compat = { version = "2.6.0", default-features = false }
|
||||
colored = { version = "2.1.0" }
|
||||
console_error_panic_hook = { version = "0.1.7" }
|
||||
console_log = { version = "1.0.0" }
|
||||
countme = { version = "3.0.1" }
|
||||
compact_str = "0.8.0"
|
||||
criterion = { version = "0.5.1", default-features = false }
|
||||
crossbeam = { version = "0.8.4" }
|
||||
dashmap = { version = "5.5.3" }
|
||||
dirs = { version = "5.0.0" }
|
||||
dashmap = { version = "6.0.1" }
|
||||
drop_bomb = { version = "0.1.5" }
|
||||
env_logger = { version = "0.11.0" }
|
||||
etcetera = { version = "0.8.0" }
|
||||
fern = { version = "0.6.1" }
|
||||
filetime = { version = "0.2.23" }
|
||||
glob = { version = "0.3.1" }
|
||||
globset = { version = "0.4.14" }
|
||||
hashbrown = "0.14.3"
|
||||
hexf-parse = { version = "0.2.1" }
|
||||
ignore = { version = "0.4.22" }
|
||||
imara-diff = { version = "0.1.5" }
|
||||
imperative = { version = "1.0.4" }
|
||||
indexmap = { version = "2.2.6" }
|
||||
indicatif = { version = "0.17.8" }
|
||||
indoc = { version = "2.0.4" }
|
||||
insta = { version = "1.35.1", feature = ["filters", "glob"] }
|
||||
insta = { version = "1.35.1" }
|
||||
insta-cmd = { version = "0.6.0" }
|
||||
is-macro = { version = "0.3.5" }
|
||||
is-wsl = { version = "0.4.0" }
|
||||
itertools = { version = "0.12.1" }
|
||||
itertools = { version = "0.13.0" }
|
||||
js-sys = { version = "0.3.69" }
|
||||
jod-thread = { version = "0.1.2" }
|
||||
lexical-parse-float = { version = "0.8.0", features = ["format"] }
|
||||
libc = { version = "0.2.153" }
|
||||
libcst = { version = "1.1.0", default-features = false }
|
||||
log = { version = "0.4.17" }
|
||||
lsp-server = { version = "0.7.6" }
|
||||
lsp-types = { version = "0.95.0", features = ["proposed"] }
|
||||
lsp-types = { git = "https://github.com/astral-sh/lsp-types.git", rev = "3512a9f", features = [
|
||||
"proposed",
|
||||
] }
|
||||
matchit = { version = "0.8.1" }
|
||||
memchr = { version = "2.7.1" }
|
||||
mimalloc = { version = "0.1.39" }
|
||||
natord = { version = "1.0.9" }
|
||||
notify = { version = "6.1.1" }
|
||||
once_cell = { version = "1.19.0" }
|
||||
ordermap = { version = "0.5.0" }
|
||||
path-absolutize = { version = "3.1.1" }
|
||||
path-slash = { version = "0.2.1" }
|
||||
pathdiff = { version = "0.2.1" }
|
||||
parking_lot = "0.12.1"
|
||||
pep440_rs = { version = "0.6.0", features = ["serde"] }
|
||||
pretty_assertions = "1.3.0"
|
||||
proc-macro2 = { version = "1.0.79" }
|
||||
@@ -79,15 +107,17 @@ quote = { version = "1.0.23" }
|
||||
rand = { version = "0.8.5" }
|
||||
rayon = { version = "1.10.0" }
|
||||
regex = { version = "1.10.2" }
|
||||
result-like = { version = "0.5.0" }
|
||||
rustc-hash = { version = "1.1.0" }
|
||||
rustc-hash = { version = "2.0.0" }
|
||||
salsa = { git = "https://github.com/MichaReiser/salsa.git", tag = "red-knot-0.0.1" }
|
||||
schemars = { version = "0.8.16" }
|
||||
seahash = { version = "4.1.0" }
|
||||
serde = { version = "1.0.197", features = ["derive"] }
|
||||
serde-wasm-bindgen = { version = "0.6.4" }
|
||||
serde_json = { version = "1.0.113" }
|
||||
serde_test = { version = "1.0.152" }
|
||||
serde_with = { version = "3.6.0", default-features = false, features = ["macros"] }
|
||||
serde_with = { version = "3.6.0", default-features = false, features = [
|
||||
"macros",
|
||||
] }
|
||||
shellexpand = { version = "3.0.0" }
|
||||
similar = { version = "2.4.0", features = ["inline"] }
|
||||
smallvec = { version = "1.13.2" }
|
||||
@@ -98,12 +128,13 @@ syn = { version = "2.0.55" }
|
||||
tempfile = { version = "3.9.0" }
|
||||
test-case = { version = "3.3.1" }
|
||||
thiserror = { version = "1.0.58" }
|
||||
tikv-jemallocator = { version = "0.5.0" }
|
||||
tikv-jemallocator = { version = "0.6.0" }
|
||||
toml = { version = "0.8.11" }
|
||||
tracing = { version = "0.1.40" }
|
||||
tracing-flame = { version = "0.2.0" }
|
||||
tracing-indicatif = { version = "0.3.6" }
|
||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
|
||||
tracing-tree = { version = "0.3.0" }
|
||||
tracing-subscriber = { version = "0.3.18", default-features = false, features = ["env-filter", "fmt"] }
|
||||
tracing-tree = { version = "0.4.0" }
|
||||
typed-arena = { version = "2.0.2" }
|
||||
unic-ucd-category = { version = "0.9" }
|
||||
unicode-ident = { version = "1.0.12" }
|
||||
@@ -112,15 +143,22 @@ unicode_names2 = { version = "1.2.2" }
|
||||
unicode-normalization = { version = "0.1.23" }
|
||||
ureq = { version = "2.9.6" }
|
||||
url = { version = "2.5.0" }
|
||||
uuid = { version = "1.6.1", features = ["v4", "fast-rng", "macro-diagnostics", "js"] }
|
||||
uuid = { version = "1.6.1", features = [
|
||||
"v4",
|
||||
"fast-rng",
|
||||
"macro-diagnostics",
|
||||
"js",
|
||||
] }
|
||||
walkdir = { version = "2.3.2" }
|
||||
wasm-bindgen = { version = "0.2.92" }
|
||||
wasm-bindgen-test = { version = "0.3.42" }
|
||||
wild = { version = "2" }
|
||||
zip = { version = "0.6.6", default-features = false }
|
||||
|
||||
[workspace.lints.rust]
|
||||
unsafe_code = "warn"
|
||||
unreachable_pub = "warn"
|
||||
unexpected_cfgs = { level = "warn", check-cfg = ["cfg(fuzzing)", "cfg(codspeed)"] }
|
||||
|
||||
[workspace.lints.clippy]
|
||||
pedantic = { level = "warn", priority = -2 }
|
||||
@@ -184,3 +222,62 @@ opt-level = 1
|
||||
[profile.profiling]
|
||||
inherits = "release"
|
||||
debug = 1
|
||||
|
||||
# The profile that 'cargo dist' will build with.
|
||||
[profile.dist]
|
||||
inherits = "release"
|
||||
|
||||
# Config for 'cargo dist'
|
||||
[workspace.metadata.dist]
|
||||
# The preferred cargo-dist version to use in CI (Cargo.toml SemVer syntax)
|
||||
cargo-dist-version = "0.18.0"
|
||||
# CI backends to support
|
||||
ci = ["github"]
|
||||
# The installers to generate for each app
|
||||
installers = ["shell", "powershell"]
|
||||
# The archive format to use for windows builds (defaults .zip)
|
||||
windows-archive = ".zip"
|
||||
# The archive format to use for non-windows builds (defaults .tar.xz)
|
||||
unix-archive = ".tar.gz"
|
||||
# Target platforms to build apps for (Rust target-triple syntax)
|
||||
targets = [
|
||||
"aarch64-apple-darwin",
|
||||
"aarch64-pc-windows-msvc",
|
||||
"aarch64-unknown-linux-gnu",
|
||||
"aarch64-unknown-linux-musl",
|
||||
"arm-unknown-linux-musleabihf",
|
||||
"armv7-unknown-linux-gnueabihf",
|
||||
"armv7-unknown-linux-musleabihf",
|
||||
"i686-pc-windows-msvc",
|
||||
"i686-unknown-linux-gnu",
|
||||
"i686-unknown-linux-musl",
|
||||
"powerpc64-unknown-linux-gnu",
|
||||
"powerpc64le-unknown-linux-gnu",
|
||||
"s390x-unknown-linux-gnu",
|
||||
"x86_64-apple-darwin",
|
||||
"x86_64-pc-windows-msvc",
|
||||
"x86_64-unknown-linux-gnu",
|
||||
"x86_64-unknown-linux-musl",
|
||||
]
|
||||
# Whether to auto-include files like READMEs, LICENSEs, and CHANGELOGs (default true)
|
||||
auto-includes = false
|
||||
# Whether cargo-dist should create a GitHub Release or use an existing draft
|
||||
create-release = true
|
||||
# Publish jobs to run in CI
|
||||
pr-run-mode = "skip"
|
||||
# Whether CI should trigger releases with dispatches instead of tag pushes
|
||||
dispatch-releases = true
|
||||
# The stage during which the GitHub Release should be created
|
||||
github-release = "announce"
|
||||
# Whether CI should include auto-generated code to build local artifacts
|
||||
build-local-artifacts = false
|
||||
# Local artifacts jobs to run in CI
|
||||
local-artifacts-jobs = ["./build-binaries", "./build-docker"]
|
||||
# Publish jobs to run in CI
|
||||
publish-jobs = ["./publish-pypi", "./publish-wasm"]
|
||||
# Announcement jobs to run in CI
|
||||
post-announce-jobs = ["./notify-dependents", "./publish-docs", "./publish-playground"]
|
||||
# Custom permissions for GitHub Jobs
|
||||
github-custom-job-permissions = { "build-docker" = { packages = "write", contents = "read" }, "publish-wasm" = { contents = "read", id-token = "write", packages = "write" } }
|
||||
# Whether to install an updater program
|
||||
install-updater = false
|
||||
|
||||
25
LICENSE
25
LICENSE
@@ -1371,3 +1371,28 @@ are:
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
"""
|
||||
|
||||
- pydoclint, licensed as follows:
|
||||
"""
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2023 jsh9
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
"""
|
||||
|
||||
43
README.md
43
README.md
@@ -28,15 +28,15 @@ An extremely fast Python linter and code formatter, written in Rust.
|
||||
- ⚡️ 10-100x faster than existing linters (like Flake8) and formatters (like Black)
|
||||
- 🐍 Installable via `pip`
|
||||
- 🛠️ `pyproject.toml` support
|
||||
- 🤝 Python 3.12 compatibility
|
||||
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruff-compare-to-flake8), isort, and Black
|
||||
- 🤝 Python 3.13 compatibility
|
||||
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruffs-linter-compare-to-flake8), isort, and [Black](https://docs.astral.sh/ruff/faq/#how-does-ruffs-formatter-compare-to-black)
|
||||
- 📦 Built-in caching, to avoid re-analyzing unchanged files
|
||||
- 🔧 Fix support, for automatic error correction (e.g., automatically remove unused imports)
|
||||
- 📏 Over [800 built-in rules](https://docs.astral.sh/ruff/rules/), with native re-implementations
|
||||
of popular Flake8 plugins, like flake8-bugbear
|
||||
- ⌨️ First-party [editor integrations](https://docs.astral.sh/ruff/integrations/) for
|
||||
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://github.com/astral-sh/ruff-lsp)
|
||||
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://docs.astral.sh/ruff/configuration/#pyprojecttoml-discovery)
|
||||
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://docs.astral.sh/ruff/editors/setup)
|
||||
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://docs.astral.sh/ruff/configuration/#config-file-discovery)
|
||||
|
||||
Ruff aims to be orders of magnitude faster than alternative tools while integrating more
|
||||
functionality behind a single, common interface.
|
||||
@@ -119,7 +119,25 @@ For more, see the [documentation](https://docs.astral.sh/ruff/).
|
||||
Ruff is available as [`ruff`](https://pypi.org/project/ruff/) on PyPI:
|
||||
|
||||
```shell
|
||||
# With pip.
|
||||
pip install ruff
|
||||
|
||||
# With pipx.
|
||||
pipx install ruff
|
||||
```
|
||||
|
||||
Starting with version `0.5.0`, Ruff can be installed with our standalone installers:
|
||||
|
||||
```shell
|
||||
# On macOS and Linux.
|
||||
curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
|
||||
# On Windows.
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.5.7/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.5.7/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -152,7 +170,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.4.4
|
||||
rev: v0.5.7
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
@@ -161,8 +179,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
- id: ruff-format
|
||||
```
|
||||
|
||||
Ruff can also be used as a [VS Code extension](https://github.com/astral-sh/ruff-vscode) or
|
||||
alongside any other editor through the [Ruff LSP](https://github.com/astral-sh/ruff-lsp).
|
||||
Ruff can also be used as a [VS Code extension](https://github.com/astral-sh/ruff-vscode) or with [various other editors](https://docs.astral.sh/ruff/editors/setup).
|
||||
|
||||
Ruff can also be used as a [GitHub Action](https://github.com/features/actions) via
|
||||
[`ruff-action`](https://github.com/chartboost/ruff-action):
|
||||
@@ -266,6 +283,11 @@ The remaining configuration options can be provided through a catch-all `--confi
|
||||
ruff check --config "lint.per-file-ignores = {'some_file.py' = ['F841']}"
|
||||
```
|
||||
|
||||
To opt in to the latest lint rules, formatter style changes, interface updates, and more, enable
|
||||
[preview mode](https://docs.astral.sh/ruff/rules/) by setting `preview = true` in your configuration
|
||||
file or passing `--preview` on the command line. Preview mode enables a collection of unstable
|
||||
features that may change prior to stabilization.
|
||||
|
||||
See `ruff help` for more on Ruff's top-level commands, or `ruff help check` and `ruff help format`
|
||||
for more on the linting and formatting commands, respectively.
|
||||
|
||||
@@ -329,7 +351,6 @@ quality tools, including:
|
||||
- [flake8-super](https://pypi.org/project/flake8-super/)
|
||||
- [flake8-tidy-imports](https://pypi.org/project/flake8-tidy-imports/)
|
||||
- [flake8-todos](https://pypi.org/project/flake8-todos/)
|
||||
- [flake8-trio](https://pypi.org/project/flake8-trio/)
|
||||
- [flake8-type-checking](https://pypi.org/project/flake8-type-checking/)
|
||||
- [flake8-use-pathlib](https://pypi.org/project/flake8-use-pathlib/)
|
||||
- [flynt](https://pypi.org/project/flynt/) ([#2102](https://github.com/astral-sh/ruff/issues/2102))
|
||||
@@ -402,7 +423,9 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [Dagger](https://github.com/dagger/dagger)
|
||||
- [Dagster](https://github.com/dagster-io/dagster)
|
||||
- Databricks ([MLflow](https://github.com/mlflow/mlflow))
|
||||
- [Dify](https://github.com/langgenius/dify)
|
||||
- [FastAPI](https://github.com/tiangolo/fastapi)
|
||||
- [Godot](https://github.com/godotengine/godot)
|
||||
- [Gradio](https://github.com/gradio-app/gradio)
|
||||
- [Great Expectations](https://github.com/great-expectations/great_expectations)
|
||||
- [HTTPX](https://github.com/encode/httpx)
|
||||
@@ -411,6 +434,7 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- Hugging Face ([Transformers](https://github.com/huggingface/transformers),
|
||||
[Datasets](https://github.com/huggingface/datasets),
|
||||
[Diffusers](https://github.com/huggingface/diffusers))
|
||||
- IBM ([Qiskit](https://github.com/Qiskit/qiskit))
|
||||
- ING Bank ([popmon](https://github.com/ing-bank/popmon), [probatus](https://github.com/ing-bank/probatus))
|
||||
- [Ibis](https://github.com/ibis-project/ibis)
|
||||
- [ivy](https://github.com/unifyai/ivy)
|
||||
@@ -428,6 +452,7 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- Modern Treasury ([Python SDK](https://github.com/Modern-Treasury/modern-treasury-python))
|
||||
- Mozilla ([Firefox](https://github.com/mozilla/gecko-dev))
|
||||
- [Mypy](https://github.com/python/mypy)
|
||||
- [Nautobot](https://github.com/nautobot/nautobot)
|
||||
- Netflix ([Dispatch](https://github.com/Netflix/dispatch))
|
||||
- [Neon](https://github.com/neondatabase/neon)
|
||||
- [Nokia](https://nokia.com/)
|
||||
@@ -435,6 +460,7 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [NumPyro](https://github.com/pyro-ppl/numpyro)
|
||||
- [ONNX](https://github.com/onnx/onnx)
|
||||
- [OpenBB](https://github.com/OpenBB-finance/OpenBBTerminal)
|
||||
- [Open Wine Components](https://github.com/Open-Wine-Components/umu-launcher)
|
||||
- [PDM](https://github.com/pdm-project/pdm)
|
||||
- [PaddlePaddle](https://github.com/PaddlePaddle/Paddle)
|
||||
- [Pandas](https://github.com/pandas-dev/pandas)
|
||||
@@ -462,6 +488,7 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [Sphinx](https://github.com/sphinx-doc/sphinx)
|
||||
- [Stable Baselines3](https://github.com/DLR-RM/stable-baselines3)
|
||||
- [Starlette](https://github.com/encode/starlette)
|
||||
- [Streamlit](https://github.com/streamlit/streamlit)
|
||||
- [The Algorithms](https://github.com/TheAlgorithms/Python)
|
||||
- [Vega-Altair](https://github.com/altair-viz/altair)
|
||||
- WordPress ([Openverse](https://github.com/WordPress/openverse))
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[files]
|
||||
# https://github.com/crate-ci/typos/issues/868
|
||||
extend-exclude = ["crates/red_knot/vendor/**/*", "**/resources/**/*", "**/snapshots/**/*"]
|
||||
extend-exclude = ["crates/red_knot_python_semantic/vendor/**/*", "**/resources/**/*", "**/snapshots/**/*"]
|
||||
|
||||
[default.extend-words]
|
||||
"arange" = "arange" # e.g. `numpy.arange`
|
||||
@@ -16,5 +16,6 @@ jod = "jod" # e.g., `jod-thread`
|
||||
[default]
|
||||
extend-ignore-re = [
|
||||
# Line ignore with trailing "spellchecker:disable-line"
|
||||
"(?Rm)^.*#\\s*spellchecker:disable-line$"
|
||||
"(?Rm)^.*#\\s*spellchecker:disable-line$",
|
||||
"LICENSEs",
|
||||
]
|
||||
|
||||
@@ -10,4 +10,12 @@ doc-valid-idents = [
|
||||
"SCREAMING_SNAKE_CASE",
|
||||
"SQLAlchemy",
|
||||
"StackOverflow",
|
||||
"PyCharm",
|
||||
]
|
||||
|
||||
ignore-interior-mutability = [
|
||||
# Interned is read-only. The wrapped `Rc` never gets updated.
|
||||
"ruff_formatter::format_element::Interned",
|
||||
# The expression is read-only.
|
||||
"ruff_python_ast::hashable::HashableExpr",
|
||||
]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "red_knot"
|
||||
version = "0.1.0"
|
||||
version = "0.0.0"
|
||||
edition.workspace = true
|
||||
rust-version.workspace = true
|
||||
homepage.workspace = true
|
||||
@@ -12,30 +12,28 @@ license.workspace = true
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
ruff_python_parser = { path = "../ruff_python_parser" }
|
||||
ruff_python_ast = { path = "../ruff_python_ast" }
|
||||
ruff_text_size = { path = "../ruff_text_size" }
|
||||
ruff_index = { path = "../ruff_index" }
|
||||
ruff_notebook = { path = "../ruff_notebook" }
|
||||
red_knot_python_semantic = { workspace = true }
|
||||
red_knot_workspace = { workspace = true }
|
||||
red_knot_server = { workspace = true }
|
||||
|
||||
ruff_db = { workspace = true, features = ["os", "cache"] }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
bitflags = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
clap = { workspace = true, features = ["wrap_help"] }
|
||||
colored = { workspace = true }
|
||||
countme = { workspace = true, features = ["enable"] }
|
||||
crossbeam = { workspace = true }
|
||||
ctrlc = { version = "3.4.4" }
|
||||
dashmap = { workspace = true }
|
||||
hashbrown = { workspace = true }
|
||||
indexmap = { workspace = true }
|
||||
notify = { workspace = true }
|
||||
parking_lot = { workspace = true }
|
||||
rayon = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
smol_str = { version = "0.2.1" }
|
||||
tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
tracing = { workspace = true, features = ["release_max_level_debug"] }
|
||||
tracing-subscriber = { workspace = true, features = ["env-filter", "fmt"] }
|
||||
tracing-flame = { workspace = true }
|
||||
tracing-tree = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
textwrap = { version = "0.16.1" }
|
||||
filetime = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
|
||||
[lints]
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
# Red Knot
|
||||
|
||||
The Red Knot crate contains code working towards multifile analysis, type inference and, ultimately, type-checking. It's very much a work in progress for now.
|
||||
|
||||
## Vendored types for the stdlib
|
||||
|
||||
Red Knot vendors [typeshed](https://github.com/python/typeshed)'s stubs for the standard library. The vendored stubs can be found in `crates/red_knot/vendor/typeshed`. The file `crates/red_knot/vendor/typeshed/source_commit.txt` tells you the typeshed commit that our vendored stdlib stubs currently correspond to.
|
||||
|
||||
Updating the vendored stubs is currently done manually. On a Unix machine, follow the following steps (if you have a typeshed clone in a `typeshed` directory, and a Ruff clone in a `ruff` directory):
|
||||
|
||||
```shell
|
||||
rm -rf ruff/crates/red_knot/vendor/typeshed
|
||||
mkdir ruff/crates/red_knot/vendor/typeshed
|
||||
cp typeshed/README.md ruff/crates/red_knot/vendor/typeshed
|
||||
cp typeshed/LICENSE ruff/crates/red_knot/vendor/typeshed
|
||||
cp -r typeshed/stdlib ruff/crates/red_knot/vendor/typeshed/stdlib
|
||||
git -C typeshed rev-parse HEAD > ruff/crates/red_knot/vendor/typeshed/source_commit.txt
|
||||
```
|
||||
BIN
crates/red_knot/docs/tracing-flamegraph.png
Normal file
BIN
crates/red_knot/docs/tracing-flamegraph.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 40 KiB |
103
crates/red_knot/docs/tracing.md
Normal file
103
crates/red_knot/docs/tracing.md
Normal file
@@ -0,0 +1,103 @@
|
||||
# Tracing
|
||||
|
||||
Traces are a useful tool to narrow down the location of a bug or, at least, to understand why the compiler is doing a particular thing.
|
||||
Note, tracing messages with severity `debug` or greater are user-facing. They should be phrased accordingly.
|
||||
Tracing spans are only shown when using `-vvv`.
|
||||
|
||||
## Verbosity levels
|
||||
|
||||
The CLI supports different verbosity levels.
|
||||
|
||||
- default: Only show errors and warnings.
|
||||
- `-v` activates `info!`: Show generally useful information such as paths of configuration files, detected platform, etc., but it's not a lot of messages, it's something you'll activate in CI by default. cargo build e.g. shows you which packages are fresh.
|
||||
- `-vv` activates `debug!` and timestamps: This should be enough information to get to the bottom of bug reports. When you're processing many packages or files, you'll get pages and pages of output, but each line is link to a specific action or state change.
|
||||
- `-vvv` activates `trace!` (only in debug builds) and shows tracing-spans: At this level, you're logging everything. Most of this is wasted, it's really slow, we dump e.g. the entire resolution graph. Only useful to developers, and you almost certainly want to use `RED_KNOT_LOG` to filter it down to the area your investigating.
|
||||
|
||||
## `RED_KNOT_LOG`
|
||||
|
||||
By default, the CLI shows messages from the `ruff` and `red_knot` crates. Tracing messages from other crates are not shown.
|
||||
The `RED_KNOT_LOG` environment variable allows you to customize which messages are shown by specifying one
|
||||
or more [filter directives](https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives).
|
||||
|
||||
### Examples
|
||||
|
||||
#### Show all debug messages
|
||||
|
||||
Shows debug messages from all crates.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG=debug
|
||||
```
|
||||
|
||||
#### Show salsa query execution messages
|
||||
|
||||
Show the salsa `execute: my_query` messages in addition to all red knot messages.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG=ruff=trace,red_knot=trace,salsa=info
|
||||
```
|
||||
|
||||
#### Show typing traces
|
||||
|
||||
Only show traces for the `red_knot_python_semantic::types` module.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG="red_knot_python_semantic::types"
|
||||
```
|
||||
|
||||
Note: Ensure that you use `-vvv` to see tracing spans.
|
||||
|
||||
#### Show messages for a single file
|
||||
|
||||
Shows all messages that are inside of a span for a specific file.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG=red_knot[{file=/home/micha/astral/test/x.py}]=trace
|
||||
```
|
||||
|
||||
**Note**: Tracing still shows all spans because tracing can't know at the time of entering the span
|
||||
whether one if its children has the file `x.py`.
|
||||
|
||||
**Note**: Salsa currently logs the entire memoized values. In our case, the source text and parsed AST.
|
||||
This very quickly leads to extremely long outputs.
|
||||
|
||||
## Tracing and Salsa
|
||||
|
||||
Be mindful about using `tracing` in Salsa queries, especially when using `warn` or `error` because it isn't guaranteed
|
||||
that the query will execute after restoring from a persistent cache. In which case the user won't see the message.
|
||||
|
||||
For example, don't use `tracing` to show the user a message when generating a lint violation failed
|
||||
because the message would only be shown when linting the file the first time, but not on subsequent analysis
|
||||
runs or when restoring from a persistent cache. This can be confusing for users because they
|
||||
don't understand why a specific lint violation isn't raised. Instead, change your
|
||||
query to return the failure as part of the query's result or use a Salsa accumulator.
|
||||
|
||||
## Release builds
|
||||
|
||||
`trace!` events are removed in release builds.
|
||||
|
||||
## Profiling
|
||||
|
||||
Red Knot generates a folded stack trace to the current directory named `tracing.folded` when setting the environment variable `RED_KNOT_LOG_PROFILE` to `1` or `true`.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG_PROFILE=1 red_knot -- --current-directory=../test -vvv
|
||||
```
|
||||
|
||||
You can convert the textual representation into a visual one using `inferno`.
|
||||
|
||||
```shell
|
||||
cargo install inferno
|
||||
```
|
||||
|
||||
```shell
|
||||
# flamegraph
|
||||
cat tracing.folded | inferno-flamegraph > tracing-flamegraph.svg
|
||||
|
||||
# flamechart
|
||||
cat tracing.folded | inferno-flamegraph --flamechart > tracing-flamechart.svg
|
||||
```
|
||||
|
||||

|
||||
|
||||
See [`tracing-flame`](https://crates.io/crates/tracing-flame) for more details.
|
||||
@@ -1,415 +0,0 @@
|
||||
use std::any::type_name;
|
||||
use std::fmt::{Debug, Formatter};
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::marker::PhantomData;
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_index::{Idx, IndexVec};
|
||||
use ruff_python_ast::visitor::preorder;
|
||||
use ruff_python_ast::visitor::preorder::{PreorderVisitor, TraversalSignal};
|
||||
use ruff_python_ast::{
|
||||
AnyNodeRef, AstNode, ExceptHandler, ExceptHandlerExceptHandler, Expr, MatchCase, ModModule,
|
||||
NodeKind, Parameter, Stmt, StmtAnnAssign, StmtAssign, StmtAugAssign, StmtClassDef,
|
||||
StmtFunctionDef, StmtGlobal, StmtImport, StmtImportFrom, StmtNonlocal, StmtTypeAlias,
|
||||
TypeParam, TypeParamParamSpec, TypeParamTypeVar, TypeParamTypeVarTuple, WithItem,
|
||||
};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
/// A type agnostic ID that uniquely identifies an AST node in a file.
|
||||
#[ruff_index::newtype_index]
|
||||
pub struct AstId;
|
||||
|
||||
/// A typed ID that uniquely identifies an AST node in a file.
|
||||
///
|
||||
/// This is different from [`AstId`] in that it is a combination of ID and the type of the node the ID identifies.
|
||||
/// Typing the ID prevents mixing IDs of different node types and allows to restrict the API to only accept
|
||||
/// nodes for which an ID has been created (not all AST nodes get an ID).
|
||||
pub struct TypedAstId<N: HasAstId> {
|
||||
erased: AstId,
|
||||
_marker: PhantomData<fn() -> N>,
|
||||
}
|
||||
|
||||
impl<N: HasAstId> TypedAstId<N> {
|
||||
/// Upcasts this ID from a more specific node type to a more general node type.
|
||||
pub fn upcast<M: HasAstId>(self) -> TypedAstId<M>
|
||||
where
|
||||
N: Into<M>,
|
||||
{
|
||||
TypedAstId {
|
||||
erased: self.erased,
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> Copy for TypedAstId<N> {}
|
||||
impl<N: HasAstId> Clone for TypedAstId<N> {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> PartialEq for TypedAstId<N> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.erased == other.erased
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> Eq for TypedAstId<N> {}
|
||||
impl<N: HasAstId> Hash for TypedAstId<N> {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.erased.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> Debug for TypedAstId<N> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_tuple("TypedAstId")
|
||||
.field(&self.erased)
|
||||
.field(&type_name::<N>())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct AstIds {
|
||||
ids: IndexVec<AstId, NodeKey>,
|
||||
reverse: FxHashMap<NodeKey, AstId>,
|
||||
}
|
||||
|
||||
impl AstIds {
|
||||
// TODO rust analyzer doesn't allocate an ID for every node. It only allocates ids for
|
||||
// nodes with a corresponding HIR element, that is nodes that are definitions.
|
||||
pub fn from_module(module: &ModModule) -> Self {
|
||||
let mut visitor = AstIdsVisitor::default();
|
||||
|
||||
// TODO: visit_module?
|
||||
// Make sure we visit the root
|
||||
visitor.create_id(module);
|
||||
visitor.visit_body(&module.body);
|
||||
|
||||
while let Some(deferred) = visitor.deferred.pop() {
|
||||
match deferred {
|
||||
DeferredNode::FunctionDefinition(def) => {
|
||||
def.visit_preorder(&mut visitor);
|
||||
}
|
||||
DeferredNode::ClassDefinition(def) => def.visit_preorder(&mut visitor),
|
||||
}
|
||||
}
|
||||
|
||||
AstIds {
|
||||
ids: visitor.ids,
|
||||
reverse: visitor.reverse,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the ID to the root node.
|
||||
pub fn root(&self) -> NodeKey {
|
||||
self.ids[AstId::new(0)]
|
||||
}
|
||||
|
||||
/// Returns the [`TypedAstId`] for a node.
|
||||
pub fn ast_id<N: HasAstId>(&self, node: &N) -> TypedAstId<N> {
|
||||
let key = node.syntax_node_key();
|
||||
TypedAstId {
|
||||
erased: self.reverse.get(&key).copied().unwrap(),
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the [`TypedAstId`] for the node identified with the given [`TypedNodeKey`].
|
||||
pub fn ast_id_for_key<N: HasAstId>(&self, node: &TypedNodeKey<N>) -> TypedAstId<N> {
|
||||
let ast_id = self.ast_id_for_node_key(node.inner);
|
||||
|
||||
TypedAstId {
|
||||
erased: ast_id,
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the untyped [`AstId`] for the node identified by the given `node` key.
|
||||
pub fn ast_id_for_node_key(&self, node: NodeKey) -> AstId {
|
||||
self.reverse
|
||||
.get(&node)
|
||||
.copied()
|
||||
.expect("Can't find node in AstIds map.")
|
||||
}
|
||||
|
||||
/// Returns the [`TypedNodeKey`] for the node identified by the given [`TypedAstId`].
|
||||
pub fn key<N: HasAstId>(&self, id: TypedAstId<N>) -> TypedNodeKey<N> {
|
||||
let syntax_key = self.ids[id.erased];
|
||||
|
||||
TypedNodeKey::new(syntax_key).unwrap()
|
||||
}
|
||||
|
||||
pub fn node_key<H: HasAstId>(&self, id: TypedAstId<H>) -> NodeKey {
|
||||
self.ids[id.erased]
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for AstIds {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
let mut map = f.debug_map();
|
||||
for (key, value) in self.ids.iter_enumerated() {
|
||||
map.entry(&key, &value);
|
||||
}
|
||||
|
||||
map.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for AstIds {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.ids == other.ids
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for AstIds {}
|
||||
|
||||
#[derive(Default)]
|
||||
struct AstIdsVisitor<'a> {
|
||||
ids: IndexVec<AstId, NodeKey>,
|
||||
reverse: FxHashMap<NodeKey, AstId>,
|
||||
deferred: Vec<DeferredNode<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> AstIdsVisitor<'a> {
|
||||
fn create_id<A: HasAstId>(&mut self, node: &A) {
|
||||
let node_key = node.syntax_node_key();
|
||||
|
||||
let id = self.ids.push(node_key);
|
||||
self.reverse.insert(node_key, id);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> PreorderVisitor<'a> for AstIdsVisitor<'a> {
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt) {
|
||||
match stmt {
|
||||
Stmt::FunctionDef(def) => {
|
||||
self.create_id(def);
|
||||
self.deferred.push(DeferredNode::FunctionDefinition(def));
|
||||
return;
|
||||
}
|
||||
// TODO defer visiting the assignment body, type alias parameters etc?
|
||||
Stmt::ClassDef(def) => {
|
||||
self.create_id(def);
|
||||
self.deferred.push(DeferredNode::ClassDefinition(def));
|
||||
return;
|
||||
}
|
||||
Stmt::Expr(_) => {
|
||||
// Skip
|
||||
return;
|
||||
}
|
||||
Stmt::Return(_) => {}
|
||||
Stmt::Delete(_) => {}
|
||||
Stmt::Assign(assignment) => self.create_id(assignment),
|
||||
Stmt::AugAssign(assignment) => {
|
||||
self.create_id(assignment);
|
||||
}
|
||||
Stmt::AnnAssign(assignment) => self.create_id(assignment),
|
||||
Stmt::TypeAlias(assignment) => self.create_id(assignment),
|
||||
Stmt::For(_) => {}
|
||||
Stmt::While(_) => {}
|
||||
Stmt::If(_) => {}
|
||||
Stmt::With(_) => {}
|
||||
Stmt::Match(_) => {}
|
||||
Stmt::Raise(_) => {}
|
||||
Stmt::Try(_) => {}
|
||||
Stmt::Assert(_) => {}
|
||||
Stmt::Import(import) => self.create_id(import),
|
||||
Stmt::ImportFrom(import_from) => self.create_id(import_from),
|
||||
Stmt::Global(global) => self.create_id(global),
|
||||
Stmt::Nonlocal(non_local) => self.create_id(non_local),
|
||||
Stmt::Pass(_) => {}
|
||||
Stmt::Break(_) => {}
|
||||
Stmt::Continue(_) => {}
|
||||
Stmt::IpyEscapeCommand(_) => {}
|
||||
}
|
||||
|
||||
preorder::walk_stmt(self, stmt);
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, _expr: &'a Expr) {}
|
||||
|
||||
fn visit_parameter(&mut self, parameter: &'a Parameter) {
|
||||
self.create_id(parameter);
|
||||
preorder::walk_parameter(self, parameter);
|
||||
}
|
||||
|
||||
fn visit_except_handler(&mut self, except_handler: &'a ExceptHandler) {
|
||||
match except_handler {
|
||||
ExceptHandler::ExceptHandler(except_handler) => {
|
||||
self.create_id(except_handler);
|
||||
}
|
||||
}
|
||||
|
||||
preorder::walk_except_handler(self, except_handler);
|
||||
}
|
||||
|
||||
fn visit_with_item(&mut self, with_item: &'a WithItem) {
|
||||
self.create_id(with_item);
|
||||
preorder::walk_with_item(self, with_item);
|
||||
}
|
||||
|
||||
fn visit_match_case(&mut self, match_case: &'a MatchCase) {
|
||||
self.create_id(match_case);
|
||||
preorder::walk_match_case(self, match_case);
|
||||
}
|
||||
|
||||
fn visit_type_param(&mut self, type_param: &'a TypeParam) {
|
||||
self.create_id(type_param);
|
||||
}
|
||||
}
|
||||
|
||||
enum DeferredNode<'a> {
|
||||
FunctionDefinition(&'a StmtFunctionDef),
|
||||
ClassDefinition(&'a StmtClassDef),
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub struct TypedNodeKey<N: AstNode> {
|
||||
/// The type erased node key.
|
||||
inner: NodeKey,
|
||||
_marker: PhantomData<fn() -> N>,
|
||||
}
|
||||
|
||||
impl<N: AstNode> TypedNodeKey<N> {
|
||||
pub fn from_node(node: &N) -> Self {
|
||||
let inner = NodeKey {
|
||||
kind: node.as_any_node_ref().kind(),
|
||||
range: node.range(),
|
||||
};
|
||||
Self {
|
||||
inner,
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new(node_key: NodeKey) -> Option<Self> {
|
||||
N::can_cast(node_key.kind).then_some(TypedNodeKey {
|
||||
inner: node_key,
|
||||
_marker: PhantomData,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn resolve<'a>(&self, root: AnyNodeRef<'a>) -> Option<N::Ref<'a>> {
|
||||
let node_ref = self.inner.resolve(root)?;
|
||||
|
||||
Some(N::cast_ref(node_ref).unwrap())
|
||||
}
|
||||
|
||||
pub fn resolve_unwrap<'a>(&self, root: AnyNodeRef<'a>) -> N::Ref<'a> {
|
||||
self.resolve(root).expect("node should resolve")
|
||||
}
|
||||
|
||||
pub fn erased(&self) -> &NodeKey {
|
||||
&self.inner
|
||||
}
|
||||
}
|
||||
|
||||
struct FindNodeKeyVisitor<'a> {
|
||||
key: NodeKey,
|
||||
result: Option<AnyNodeRef<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> PreorderVisitor<'a> for FindNodeKeyVisitor<'a> {
|
||||
fn enter_node(&mut self, node: AnyNodeRef<'a>) -> TraversalSignal {
|
||||
if self.result.is_some() {
|
||||
return TraversalSignal::Skip;
|
||||
}
|
||||
|
||||
if node.range() == self.key.range && node.kind() == self.key.kind {
|
||||
self.result = Some(node);
|
||||
TraversalSignal::Skip
|
||||
} else if node.range().contains_range(self.key.range) {
|
||||
TraversalSignal::Traverse
|
||||
} else {
|
||||
TraversalSignal::Skip
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_body(&mut self, body: &'a [Stmt]) {
|
||||
// TODO it would be more efficient to use binary search instead of linear
|
||||
for stmt in body {
|
||||
if stmt.range().start() > self.key.range.end() {
|
||||
break;
|
||||
}
|
||||
|
||||
self.visit_stmt(stmt);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO an alternative to this is to have a `NodeId` on each node (in increasing order depending on the position).
|
||||
// This would allow to reduce the size of this to a u32.
|
||||
// What would be nice if we could use an `Arc::weak_ref` here but that only works if we use
|
||||
// `Arc` internally
|
||||
// TODO: Implement the logic to resolve a node, given a db (and the correct file).
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub struct NodeKey {
|
||||
kind: NodeKind,
|
||||
range: TextRange,
|
||||
}
|
||||
|
||||
impl NodeKey {
|
||||
pub fn resolve<'a>(&self, root: AnyNodeRef<'a>) -> Option<AnyNodeRef<'a>> {
|
||||
// We need to do a binary search here. Only traverse into a node if the range is withint the node
|
||||
let mut visitor = FindNodeKeyVisitor {
|
||||
key: *self,
|
||||
result: None,
|
||||
};
|
||||
|
||||
if visitor.enter_node(root) == TraversalSignal::Traverse {
|
||||
root.visit_preorder(&mut visitor);
|
||||
}
|
||||
|
||||
visitor.result
|
||||
}
|
||||
}
|
||||
|
||||
/// Marker trait implemented by AST nodes for which we extract the `AstId`.
|
||||
pub trait HasAstId: AstNode {
|
||||
fn node_key(&self) -> TypedNodeKey<Self>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
TypedNodeKey {
|
||||
inner: self.syntax_node_key(),
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
fn syntax_node_key(&self) -> NodeKey {
|
||||
NodeKey {
|
||||
kind: self.as_any_node_ref().kind(),
|
||||
range: self.range(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HasAstId for StmtFunctionDef {}
|
||||
impl HasAstId for StmtClassDef {}
|
||||
impl HasAstId for StmtAnnAssign {}
|
||||
impl HasAstId for StmtAugAssign {}
|
||||
impl HasAstId for StmtAssign {}
|
||||
impl HasAstId for StmtTypeAlias {}
|
||||
|
||||
impl HasAstId for ModModule {}
|
||||
|
||||
impl HasAstId for StmtImport {}
|
||||
|
||||
impl HasAstId for StmtImportFrom {}
|
||||
|
||||
impl HasAstId for Parameter {}
|
||||
|
||||
impl HasAstId for TypeParam {}
|
||||
impl HasAstId for Stmt {}
|
||||
impl HasAstId for TypeParamTypeVar {}
|
||||
impl HasAstId for TypeParamTypeVarTuple {}
|
||||
impl HasAstId for TypeParamParamSpec {}
|
||||
impl HasAstId for StmtGlobal {}
|
||||
impl HasAstId for StmtNonlocal {}
|
||||
|
||||
impl HasAstId for ExceptHandlerExceptHandler {}
|
||||
impl HasAstId for WithItem {}
|
||||
impl HasAstId for MatchCase {}
|
||||
@@ -1,165 +0,0 @@
|
||||
use std::fmt::Formatter;
|
||||
use std::hash::Hash;
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
|
||||
use crate::db::QueryResult;
|
||||
use dashmap::mapref::entry::Entry;
|
||||
|
||||
use crate::FxDashMap;
|
||||
|
||||
/// Simple key value cache that locks on a per-key level.
|
||||
pub struct KeyValueCache<K, V> {
|
||||
map: FxDashMap<K, V>,
|
||||
statistics: CacheStatistics,
|
||||
}
|
||||
|
||||
impl<K, V> KeyValueCache<K, V>
|
||||
where
|
||||
K: Eq + Hash + Clone,
|
||||
V: Clone,
|
||||
{
|
||||
pub fn try_get(&self, key: &K) -> Option<V> {
|
||||
if let Some(existing) = self.map.get(key) {
|
||||
self.statistics.hit();
|
||||
Some(existing.clone())
|
||||
} else {
|
||||
self.statistics.miss();
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get<F>(&self, key: &K, compute: F) -> QueryResult<V>
|
||||
where
|
||||
F: FnOnce(&K) -> QueryResult<V>,
|
||||
{
|
||||
Ok(match self.map.entry(key.clone()) {
|
||||
Entry::Occupied(cached) => {
|
||||
self.statistics.hit();
|
||||
|
||||
cached.get().clone()
|
||||
}
|
||||
Entry::Vacant(vacant) => {
|
||||
self.statistics.miss();
|
||||
|
||||
let value = compute(key)?;
|
||||
vacant.insert(value.clone());
|
||||
value
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn set(&mut self, key: K, value: V) {
|
||||
self.map.insert(key, value);
|
||||
}
|
||||
|
||||
pub fn remove(&mut self, key: &K) -> Option<V> {
|
||||
self.map.remove(key).map(|(_, value)| value)
|
||||
}
|
||||
|
||||
pub fn clear(&mut self) {
|
||||
self.map.clear();
|
||||
self.map.shrink_to_fit();
|
||||
}
|
||||
|
||||
pub fn statistics(&self) -> Option<Statistics> {
|
||||
self.statistics.to_statistics()
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> Default for KeyValueCache<K, V>
|
||||
where
|
||||
K: Eq + Hash,
|
||||
V: Clone,
|
||||
{
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
map: FxDashMap::default(),
|
||||
statistics: CacheStatistics::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> std::fmt::Debug for KeyValueCache<K, V>
|
||||
where
|
||||
K: std::fmt::Debug + Eq + Hash,
|
||||
V: std::fmt::Debug,
|
||||
{
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
let mut debug = f.debug_map();
|
||||
|
||||
for entry in &self.map {
|
||||
debug.entry(&entry.value(), &entry.key());
|
||||
}
|
||||
|
||||
debug.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct Statistics {
|
||||
pub hits: usize,
|
||||
pub misses: usize,
|
||||
}
|
||||
|
||||
impl Statistics {
|
||||
#[allow(clippy::cast_precision_loss)]
|
||||
pub fn hit_rate(&self) -> Option<f64> {
|
||||
if self.hits + self.misses == 0 {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some((self.hits as f64) / (self.hits + self.misses) as f64)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
pub type CacheStatistics = DebugStatistics;
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
pub type CacheStatistics = ReleaseStatistics;
|
||||
|
||||
pub trait StatisticsRecorder {
|
||||
fn hit(&self);
|
||||
fn miss(&self);
|
||||
fn to_statistics(&self) -> Option<Statistics>;
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct DebugStatistics {
|
||||
hits: AtomicUsize,
|
||||
misses: AtomicUsize,
|
||||
}
|
||||
|
||||
impl StatisticsRecorder for DebugStatistics {
|
||||
// TODO figure out appropriate Ordering
|
||||
fn hit(&self) {
|
||||
self.hits.fetch_add(1, Ordering::SeqCst);
|
||||
}
|
||||
|
||||
fn miss(&self) {
|
||||
self.misses.fetch_add(1, Ordering::SeqCst);
|
||||
}
|
||||
|
||||
fn to_statistics(&self) -> Option<Statistics> {
|
||||
let hits = self.hits.load(Ordering::SeqCst);
|
||||
let misses = self.misses.load(Ordering::SeqCst);
|
||||
|
||||
Some(Statistics { hits, misses })
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct ReleaseStatistics;
|
||||
|
||||
impl StatisticsRecorder for ReleaseStatistics {
|
||||
#[inline]
|
||||
fn hit(&self) {}
|
||||
|
||||
#[inline]
|
||||
fn miss(&self) {}
|
||||
|
||||
#[inline]
|
||||
fn to_statistics(&self) -> Option<Statistics> {
|
||||
None
|
||||
}
|
||||
}
|
||||
@@ -1,42 +0,0 @@
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct CancellationTokenSource {
|
||||
signal: Arc<AtomicBool>,
|
||||
}
|
||||
|
||||
impl CancellationTokenSource {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
signal: Arc::new(AtomicBool::new(false)),
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "trace", skip_all)]
|
||||
pub fn cancel(&self) {
|
||||
self.signal.store(true, std::sync::atomic::Ordering::SeqCst);
|
||||
}
|
||||
|
||||
pub fn is_cancelled(&self) -> bool {
|
||||
self.signal.load(std::sync::atomic::Ordering::SeqCst)
|
||||
}
|
||||
|
||||
pub fn token(&self) -> CancellationToken {
|
||||
CancellationToken {
|
||||
signal: self.signal.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct CancellationToken {
|
||||
signal: Arc<AtomicBool>,
|
||||
}
|
||||
|
||||
impl CancellationToken {
|
||||
/// Returns `true` if cancellation has been requested.
|
||||
pub fn is_cancelled(&self) -> bool {
|
||||
self.signal.load(std::sync::atomic::Ordering::SeqCst)
|
||||
}
|
||||
}
|
||||
@@ -1,248 +0,0 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
pub use jars::{HasJar, HasJars};
|
||||
pub use query::{QueryError, QueryResult};
|
||||
pub use runtime::DbRuntime;
|
||||
pub use storage::JarsStorage;
|
||||
|
||||
use crate::files::FileId;
|
||||
use crate::lint::{LintSemanticStorage, LintSyntaxStorage};
|
||||
use crate::module::ModuleResolver;
|
||||
use crate::parse::ParsedStorage;
|
||||
use crate::source::SourceStorage;
|
||||
use crate::symbols::SymbolTablesStorage;
|
||||
use crate::types::TypeStore;
|
||||
|
||||
mod jars;
|
||||
mod query;
|
||||
mod runtime;
|
||||
mod storage;
|
||||
|
||||
pub trait Database {
|
||||
/// Returns a reference to the runtime of the current worker.
|
||||
fn runtime(&self) -> &DbRuntime;
|
||||
|
||||
/// Returns a mutable reference to the runtime. Only one worker can hold a mutable reference to the runtime.
|
||||
fn runtime_mut(&mut self) -> &mut DbRuntime;
|
||||
|
||||
/// Returns `Ok` if the queries have not been cancelled and `Err(QueryError::Cancelled)` otherwise.
|
||||
fn cancelled(&self) -> QueryResult<()> {
|
||||
self.runtime().cancelled()
|
||||
}
|
||||
|
||||
/// Returns `true` if the queries have been cancelled.
|
||||
fn is_cancelled(&self) -> bool {
|
||||
self.runtime().is_cancelled()
|
||||
}
|
||||
}
|
||||
|
||||
/// Database that supports running queries from multiple threads.
|
||||
pub trait ParallelDatabase: Database + Send {
|
||||
/// Creates a snapshot of the database state that can be used to query the database in another thread.
|
||||
///
|
||||
/// The snapshot is a read-only view of the database but query results are shared between threads.
|
||||
/// All queries will be automatically cancelled when applying any mutations (calling [`HasJars::jars_mut`])
|
||||
/// to the database (not the snapshot, because they're readonly).
|
||||
///
|
||||
/// ## Creating a snapshot
|
||||
///
|
||||
/// Creating a snapshot of the database's jars is cheap but creating a snapshot of
|
||||
/// other state stored on the database might require deep-cloning data. That's why you should
|
||||
/// avoid creating snapshots in a hot function (e.g. don't create a snapshot for each file, instead
|
||||
/// create a snapshot when scheduling the check of an entire program).
|
||||
///
|
||||
/// ## Salsa compatibility
|
||||
/// Salsa prohibits creating a snapshot while running a local query (it's fine if other workers run a query) [[source](https://github.com/salsa-rs/salsa/issues/80)].
|
||||
/// We should avoid creating snapshots while running a query because we might want to adopt Salsa in the future (if we can figure out persistent caching).
|
||||
/// Unfortunately, the infrastructure doesn't provide an automated way of knowing when a query is run, that's
|
||||
/// why we have to "enforce" this constraint manually.
|
||||
#[must_use]
|
||||
fn snapshot(&self) -> Snapshot<Self>;
|
||||
}
|
||||
|
||||
pub trait DbWithJar<Jar>: Database + HasJar<Jar> {}
|
||||
|
||||
/// Readonly snapshot of a database.
|
||||
///
|
||||
/// ## Dead locks
|
||||
/// A snapshot should always be dropped as soon as it is no longer necessary to run queries.
|
||||
/// Storing the snapshot without running a query or periodically checking if cancellation was requested
|
||||
/// can lead to deadlocks because mutating the [`Database`] requires cancels all pending queries
|
||||
/// and waiting for all [`Snapshot`]s to be dropped.
|
||||
#[derive(Debug)]
|
||||
pub struct Snapshot<DB: ?Sized>
|
||||
where
|
||||
DB: ParallelDatabase,
|
||||
{
|
||||
db: DB,
|
||||
}
|
||||
|
||||
impl<DB> Snapshot<DB>
|
||||
where
|
||||
DB: ParallelDatabase,
|
||||
{
|
||||
pub fn new(db: DB) -> Self {
|
||||
Snapshot { db }
|
||||
}
|
||||
}
|
||||
|
||||
impl<DB> std::ops::Deref for Snapshot<DB>
|
||||
where
|
||||
DB: ParallelDatabase,
|
||||
{
|
||||
type Target = DB;
|
||||
|
||||
fn deref(&self) -> &DB {
|
||||
&self.db
|
||||
}
|
||||
}
|
||||
|
||||
pub trait Upcast<T: ?Sized> {
|
||||
fn upcast(&self) -> &T;
|
||||
}
|
||||
|
||||
// Red knot specific databases code.
|
||||
|
||||
pub trait SourceDb: DbWithJar<SourceJar> {
|
||||
// queries
|
||||
fn file_id(&self, path: &std::path::Path) -> FileId;
|
||||
|
||||
fn file_path(&self, file_id: FileId) -> Arc<std::path::Path>;
|
||||
}
|
||||
|
||||
pub trait SemanticDb: SourceDb + DbWithJar<SemanticJar> + Upcast<dyn SourceDb> {}
|
||||
|
||||
pub trait LintDb: SemanticDb + DbWithJar<LintJar> + Upcast<dyn SemanticDb> {}
|
||||
|
||||
pub trait Db: LintDb + Upcast<dyn LintDb> {}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SourceJar {
|
||||
pub sources: SourceStorage,
|
||||
pub parsed: ParsedStorage,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SemanticJar {
|
||||
pub module_resolver: ModuleResolver,
|
||||
pub symbol_tables: SymbolTablesStorage,
|
||||
pub type_store: TypeStore,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct LintJar {
|
||||
pub lint_syntax: LintSyntaxStorage,
|
||||
pub lint_semantic: LintSemanticStorage,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::db::{
|
||||
Database, DbRuntime, DbWithJar, HasJar, HasJars, JarsStorage, LintDb, LintJar, QueryResult,
|
||||
SourceDb, SourceJar, Upcast,
|
||||
};
|
||||
use crate::files::{FileId, Files};
|
||||
|
||||
use super::{SemanticDb, SemanticJar};
|
||||
|
||||
// This can be a partial database used in a single crate for testing.
|
||||
// It would hold fewer data than the full database.
|
||||
#[derive(Debug, Default)]
|
||||
pub(crate) struct TestDb {
|
||||
files: Files,
|
||||
jars: JarsStorage<Self>,
|
||||
}
|
||||
|
||||
impl HasJar<SourceJar> for TestDb {
|
||||
fn jar(&self) -> QueryResult<&SourceJar> {
|
||||
Ok(&self.jars()?.0)
|
||||
}
|
||||
|
||||
fn jar_mut(&mut self) -> &mut SourceJar {
|
||||
&mut self.jars_mut().0
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJar<SemanticJar> for TestDb {
|
||||
fn jar(&self) -> QueryResult<&SemanticJar> {
|
||||
Ok(&self.jars()?.1)
|
||||
}
|
||||
|
||||
fn jar_mut(&mut self) -> &mut SemanticJar {
|
||||
&mut self.jars_mut().1
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJar<LintJar> for TestDb {
|
||||
fn jar(&self) -> QueryResult<&LintJar> {
|
||||
Ok(&self.jars()?.2)
|
||||
}
|
||||
|
||||
fn jar_mut(&mut self) -> &mut LintJar {
|
||||
&mut self.jars_mut().2
|
||||
}
|
||||
}
|
||||
|
||||
impl SourceDb for TestDb {
|
||||
fn file_id(&self, path: &Path) -> FileId {
|
||||
self.files.intern(path)
|
||||
}
|
||||
|
||||
fn file_path(&self, file_id: FileId) -> Arc<Path> {
|
||||
self.files.path(file_id)
|
||||
}
|
||||
}
|
||||
|
||||
impl DbWithJar<SourceJar> for TestDb {}
|
||||
|
||||
impl Upcast<dyn SourceDb> for TestDb {
|
||||
fn upcast(&self) -> &(dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl SemanticDb for TestDb {}
|
||||
|
||||
impl DbWithJar<SemanticJar> for TestDb {}
|
||||
|
||||
impl Upcast<dyn SemanticDb> for TestDb {
|
||||
fn upcast(&self) -> &(dyn SemanticDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl LintDb for TestDb {}
|
||||
|
||||
impl Upcast<dyn LintDb> for TestDb {
|
||||
fn upcast(&self) -> &(dyn LintDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl DbWithJar<LintJar> for TestDb {}
|
||||
|
||||
impl HasJars for TestDb {
|
||||
type Jars = (SourceJar, SemanticJar, LintJar);
|
||||
|
||||
fn jars(&self) -> QueryResult<&Self::Jars> {
|
||||
self.jars.jars()
|
||||
}
|
||||
|
||||
fn jars_mut(&mut self) -> &mut Self::Jars {
|
||||
self.jars.jars_mut()
|
||||
}
|
||||
}
|
||||
|
||||
impl Database for TestDb {
|
||||
fn runtime(&self) -> &DbRuntime {
|
||||
self.jars.runtime()
|
||||
}
|
||||
|
||||
fn runtime_mut(&mut self) -> &mut DbRuntime {
|
||||
self.jars.runtime_mut()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,37 +0,0 @@
|
||||
use crate::db::query::QueryResult;
|
||||
|
||||
/// Gives access to a specific jar in the database.
|
||||
///
|
||||
/// Nope, the terminology isn't borrowed from Java but from Salsa <https://salsa-rs.github.io/salsa/>,
|
||||
/// which is an analogy to storing the salsa in different jars.
|
||||
///
|
||||
/// The basic idea is that each crate can define its own jar and the jars can be combined to a single
|
||||
/// database in the top level crate. Each crate also defines its own `Database` trait. The combination of
|
||||
/// `Database` trait and the jar allows to write queries in isolation without having to know how they get composed at the upper levels.
|
||||
///
|
||||
/// Salsa further defines a `HasIngredient` trait which slices the jar to a specific storage (e.g. a specific cache).
|
||||
/// We don't need this just yet because we write our queries by hand. We may want a similar trait if we decide
|
||||
/// to use a macro to generate the queries.
|
||||
pub trait HasJar<T> {
|
||||
/// Gives a read-only reference to the jar.
|
||||
fn jar(&self) -> QueryResult<&T>;
|
||||
|
||||
/// Gives a mutable reference to the jar.
|
||||
fn jar_mut(&mut self) -> &mut T;
|
||||
}
|
||||
|
||||
/// Gives access to the jars in a database.
|
||||
pub trait HasJars {
|
||||
/// A type storing the jars.
|
||||
///
|
||||
/// Most commonly, this is a tuple where each jar is a tuple element.
|
||||
type Jars: Default;
|
||||
|
||||
/// Gives access to the underlying jars but tests if the queries have been cancelled.
|
||||
///
|
||||
/// Returns `Err(QueryError::Cancelled)` if the queries have been cancelled.
|
||||
fn jars(&self) -> QueryResult<&Self::Jars>;
|
||||
|
||||
/// Gives mutable access to the underlying jars.
|
||||
fn jars_mut(&mut self) -> &mut Self::Jars;
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
use std::fmt::{Display, Formatter};
|
||||
|
||||
/// Reason why a db query operation failed.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum QueryError {
|
||||
/// The query was cancelled because the DB was mutated or the query was cancelled by the host (e.g. on a file change or when pressing CTRL+C).
|
||||
Cancelled,
|
||||
}
|
||||
|
||||
impl Display for QueryError {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
QueryError::Cancelled => f.write_str("query was cancelled"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for QueryError {}
|
||||
|
||||
pub type QueryResult<T> = Result<T, QueryError>;
|
||||
@@ -1,41 +0,0 @@
|
||||
use crate::cancellation::CancellationTokenSource;
|
||||
use crate::db::{QueryError, QueryResult};
|
||||
|
||||
/// Holds the jar agnostic state of the database.
|
||||
#[derive(Debug, Default)]
|
||||
pub struct DbRuntime {
|
||||
/// The cancellation token source used to signal other works that the queries should be aborted and
|
||||
/// exit at the next possible point.
|
||||
cancellation_token: CancellationTokenSource,
|
||||
}
|
||||
|
||||
impl DbRuntime {
|
||||
pub(super) fn snapshot(&self) -> Self {
|
||||
Self {
|
||||
cancellation_token: self.cancellation_token.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Cancels the pending queries of other workers. The current worker cannot have any pending
|
||||
/// queries because we're holding a mutable reference to the runtime.
|
||||
pub(super) fn cancel_other_workers(&mut self) {
|
||||
self.cancellation_token.cancel();
|
||||
// Set a new cancellation token so that we're in a non-cancelled state again when running the next
|
||||
// query.
|
||||
self.cancellation_token = CancellationTokenSource::default();
|
||||
}
|
||||
|
||||
/// Returns `Ok` if the queries have not been cancelled and `Err(QueryError::Cancelled)` otherwise.
|
||||
pub(super) fn cancelled(&self) -> QueryResult<()> {
|
||||
if self.cancellation_token.is_cancelled() {
|
||||
Err(QueryError::Cancelled)
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the queries have been cancelled.
|
||||
pub(super) fn is_cancelled(&self) -> bool {
|
||||
self.cancellation_token.is_cancelled()
|
||||
}
|
||||
}
|
||||
@@ -1,117 +0,0 @@
|
||||
use std::fmt::Formatter;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crossbeam::sync::WaitGroup;
|
||||
|
||||
use crate::db::query::QueryResult;
|
||||
use crate::db::runtime::DbRuntime;
|
||||
use crate::db::{HasJars, ParallelDatabase};
|
||||
|
||||
/// Stores the jars of a database and the state for each worker.
|
||||
///
|
||||
/// Today, all state is shared across all workers, but it may be desired to store data per worker in the future.
|
||||
pub struct JarsStorage<T>
|
||||
where
|
||||
T: HasJars + Sized,
|
||||
{
|
||||
// It's important that `jars_wait_group` is declared after `jars` to ensure that `jars` is dropped first.
|
||||
// See https://doc.rust-lang.org/reference/destructors.html
|
||||
/// Stores the jars of the database.
|
||||
jars: Arc<T::Jars>,
|
||||
|
||||
/// Used to count the references to `jars`. Allows implementing `jars_mut` without requiring to clone `jars`.
|
||||
jars_wait_group: WaitGroup,
|
||||
|
||||
/// The data agnostic state.
|
||||
runtime: DbRuntime,
|
||||
}
|
||||
|
||||
impl<Db> JarsStorage<Db>
|
||||
where
|
||||
Db: HasJars,
|
||||
{
|
||||
pub(super) fn new() -> Self {
|
||||
Self {
|
||||
jars: Arc::new(Db::Jars::default()),
|
||||
jars_wait_group: WaitGroup::default(),
|
||||
runtime: DbRuntime::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a snapshot of the jars.
|
||||
///
|
||||
/// Creating the snapshot is cheap because it doesn't clone the jars, it only increments a ref counter.
|
||||
#[must_use]
|
||||
pub fn snapshot(&self) -> JarsStorage<Db>
|
||||
where
|
||||
Db: ParallelDatabase,
|
||||
{
|
||||
Self {
|
||||
jars: self.jars.clone(),
|
||||
jars_wait_group: self.jars_wait_group.clone(),
|
||||
runtime: self.runtime.snapshot(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn jars(&self) -> QueryResult<&Db::Jars> {
|
||||
self.runtime.cancelled()?;
|
||||
Ok(&self.jars)
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the jars without cloning their content.
|
||||
///
|
||||
/// The method cancels any pending queries of other works and waits for them to complete so that
|
||||
/// this instance is the only instance holding a reference to the jars.
|
||||
pub(crate) fn jars_mut(&mut self) -> &mut Db::Jars {
|
||||
// We have a mutable ref here, so no more workers can be spawned between calling this function and taking the mut ref below.
|
||||
self.cancel_other_workers();
|
||||
|
||||
// Now all other references to `self.jars` should have been released. We can now safely return a mutable reference
|
||||
// to the Arc's content.
|
||||
let jars =
|
||||
Arc::get_mut(&mut self.jars).expect("All references to jars should have been released");
|
||||
|
||||
jars
|
||||
}
|
||||
|
||||
pub(crate) fn runtime(&self) -> &DbRuntime {
|
||||
&self.runtime
|
||||
}
|
||||
|
||||
pub(crate) fn runtime_mut(&mut self) -> &mut DbRuntime {
|
||||
// Note: This method may need to use a similar trick to `jars_mut` if `DbRuntime` is ever to store data that is shared between workers.
|
||||
&mut self.runtime
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "trace", skip(self))]
|
||||
fn cancel_other_workers(&mut self) {
|
||||
self.runtime.cancel_other_workers();
|
||||
|
||||
// Wait for all other works to complete.
|
||||
let existing_wait = std::mem::take(&mut self.jars_wait_group);
|
||||
existing_wait.wait();
|
||||
}
|
||||
}
|
||||
|
||||
impl<Db> Default for JarsStorage<Db>
|
||||
where
|
||||
Db: HasJars,
|
||||
{
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> std::fmt::Debug for JarsStorage<T>
|
||||
where
|
||||
T: HasJars,
|
||||
<T as HasJars>::Jars: std::fmt::Debug,
|
||||
{
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("SharedStorage")
|
||||
.field("jars", &self.jars)
|
||||
.field("jars_wait_group", &self.jars_wait_group)
|
||||
.field("runtime", &self.runtime)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
@@ -1,180 +0,0 @@
|
||||
use std::fmt::{Debug, Formatter};
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
|
||||
use hashbrown::hash_map::RawEntryMut;
|
||||
use parking_lot::RwLock;
|
||||
use rustc_hash::FxHasher;
|
||||
|
||||
use ruff_index::{newtype_index, IndexVec};
|
||||
|
||||
type Map<K, V> = hashbrown::HashMap<K, V, ()>;
|
||||
|
||||
#[newtype_index]
|
||||
pub struct FileId;
|
||||
|
||||
// TODO we'll need a higher level virtual file system abstraction that allows testing if a file exists
|
||||
// or retrieving its content (ideally lazily and in a way that the memory can be retained later)
|
||||
// I suspect that we'll end up with a FileSystem trait and our own Path abstraction.
|
||||
#[derive(Default)]
|
||||
pub struct Files {
|
||||
inner: Arc<RwLock<FilesInner>>,
|
||||
}
|
||||
|
||||
impl Files {
|
||||
#[tracing::instrument(level = "debug", skip(self))]
|
||||
pub fn intern(&self, path: &Path) -> FileId {
|
||||
self.inner.write().intern(path)
|
||||
}
|
||||
|
||||
pub fn try_get(&self, path: &Path) -> Option<FileId> {
|
||||
self.inner.read().try_get(path)
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(self))]
|
||||
pub fn path(&self, id: FileId) -> Arc<Path> {
|
||||
self.inner.read().path(id)
|
||||
}
|
||||
|
||||
/// Snapshots files for a new database snapshot.
|
||||
///
|
||||
/// This method should not be used outside a database snapshot.
|
||||
#[must_use]
|
||||
pub fn snapshot(&self) -> Files {
|
||||
Files {
|
||||
inner: self.inner.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Files {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
let files = self.inner.read();
|
||||
let mut debug = f.debug_map();
|
||||
for item in files.iter() {
|
||||
debug.entry(&item.0, &item.1);
|
||||
}
|
||||
|
||||
debug.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Files {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.inner.read().eq(&other.inner.read())
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for Files {}
|
||||
|
||||
#[derive(Default)]
|
||||
struct FilesInner {
|
||||
by_path: Map<FileId, ()>,
|
||||
// TODO should we use a map here to reclaim the space for removed files?
|
||||
// TODO I think we should use our own path abstraction here to avoid having to normalize paths
|
||||
// and dealing with non-utf paths everywhere.
|
||||
by_id: IndexVec<FileId, Arc<Path>>,
|
||||
}
|
||||
|
||||
impl FilesInner {
|
||||
/// Inserts the path and returns a new id for it or returns the id if it is an existing path.
|
||||
// TODO should this accept Path or PathBuf?
|
||||
pub(crate) fn intern(&mut self, path: &Path) -> FileId {
|
||||
let hash = FilesInner::hash_path(path);
|
||||
|
||||
let entry = self
|
||||
.by_path
|
||||
.raw_entry_mut()
|
||||
.from_hash(hash, |existing_file| &*self.by_id[*existing_file] == path);
|
||||
|
||||
match entry {
|
||||
RawEntryMut::Occupied(entry) => *entry.key(),
|
||||
RawEntryMut::Vacant(entry) => {
|
||||
let id = self.by_id.push(Arc::from(path));
|
||||
entry.insert_with_hasher(hash, id, (), |file| {
|
||||
FilesInner::hash_path(&self.by_id[*file])
|
||||
});
|
||||
id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn hash_path(path: &Path) -> u64 {
|
||||
let mut hasher = FxHasher::default();
|
||||
path.hash(&mut hasher);
|
||||
hasher.finish()
|
||||
}
|
||||
|
||||
pub(crate) fn try_get(&self, path: &Path) -> Option<FileId> {
|
||||
let mut hasher = FxHasher::default();
|
||||
path.hash(&mut hasher);
|
||||
let hash = hasher.finish();
|
||||
|
||||
Some(
|
||||
*self
|
||||
.by_path
|
||||
.raw_entry()
|
||||
.from_hash(hash, |existing_file| &*self.by_id[*existing_file] == path)?
|
||||
.0,
|
||||
)
|
||||
}
|
||||
|
||||
/// Returns the path for the file with the given id.
|
||||
pub(crate) fn path(&self, id: FileId) -> Arc<Path> {
|
||||
self.by_id[id].clone()
|
||||
}
|
||||
|
||||
pub(crate) fn iter(&self) -> impl Iterator<Item = (FileId, Arc<Path>)> + '_ {
|
||||
self.by_path.keys().map(|id| (*id, self.by_id[*id].clone()))
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for FilesInner {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.by_id == other.by_id
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for FilesInner {}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[test]
|
||||
fn insert_path_twice_same_id() {
|
||||
let files = Files::default();
|
||||
let path = PathBuf::from("foo/bar");
|
||||
let id1 = files.intern(&path);
|
||||
let id2 = files.intern(&path);
|
||||
assert_eq!(id1, id2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_different_paths_different_ids() {
|
||||
let files = Files::default();
|
||||
let path1 = PathBuf::from("foo/bar");
|
||||
let path2 = PathBuf::from("foo/bar/baz");
|
||||
let id1 = files.intern(&path1);
|
||||
let id2 = files.intern(&path2);
|
||||
assert_ne!(id1, id2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn four_files() {
|
||||
let files = Files::default();
|
||||
let foo_path = PathBuf::from("foo");
|
||||
let foo_id = files.intern(&foo_path);
|
||||
let bar_path = PathBuf::from("bar");
|
||||
files.intern(&bar_path);
|
||||
let baz_path = PathBuf::from("baz");
|
||||
files.intern(&baz_path);
|
||||
let qux_path = PathBuf::from("qux");
|
||||
files.intern(&qux_path);
|
||||
|
||||
let foo_id_2 = files.try_get(&foo_path).expect("foo_path to be found");
|
||||
assert_eq!(foo_id_2, foo_id);
|
||||
}
|
||||
}
|
||||
@@ -1,67 +0,0 @@
|
||||
//! Key observations
|
||||
//!
|
||||
//! The HIR (High-Level Intermediate Representation) avoids allocations to large extends by:
|
||||
//! * Using an arena per node type
|
||||
//! * using ids and id ranges to reference items.
|
||||
//!
|
||||
//! Using separate arena per node type has the advantage that the IDs are relatively stable, because
|
||||
//! they only change when a node of the same kind has been added or removed. (What's unclear is if that matters or if
|
||||
//! it still triggers a re-compute because the AST-id in the node has changed).
|
||||
//!
|
||||
//! The HIR does not store all details. It mainly stores the *public* interface. There's a reference
|
||||
//! back to the AST node to get more details.
|
||||
//!
|
||||
//!
|
||||
|
||||
use crate::ast_ids::{HasAstId, TypedAstId};
|
||||
use crate::files::FileId;
|
||||
use std::fmt::Formatter;
|
||||
use std::hash::{Hash, Hasher};
|
||||
|
||||
pub struct HirAstId<N: HasAstId> {
|
||||
file_id: FileId,
|
||||
node_id: TypedAstId<N>,
|
||||
}
|
||||
|
||||
impl<N: HasAstId> Copy for HirAstId<N> {}
|
||||
impl<N: HasAstId> Clone for HirAstId<N> {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> PartialEq for HirAstId<N> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.file_id == other.file_id && self.node_id == other.node_id
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> Eq for HirAstId<N> {}
|
||||
|
||||
impl<N: HasAstId> std::fmt::Debug for HirAstId<N> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("HirAstId")
|
||||
.field("file_id", &self.file_id)
|
||||
.field("node_id", &self.node_id)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> Hash for HirAstId<N> {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.file_id.hash(state);
|
||||
self.node_id.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: HasAstId> HirAstId<N> {
|
||||
pub fn upcast<M: HasAstId>(self) -> HirAstId<M>
|
||||
where
|
||||
N: Into<M>,
|
||||
{
|
||||
HirAstId {
|
||||
file_id: self.file_id,
|
||||
node_id: self.node_id.upcast(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,556 +0,0 @@
|
||||
use std::ops::{Index, Range};
|
||||
|
||||
use ruff_index::{newtype_index, IndexVec};
|
||||
use ruff_python_ast::visitor::preorder;
|
||||
use ruff_python_ast::visitor::preorder::PreorderVisitor;
|
||||
use ruff_python_ast::{
|
||||
Decorator, ExceptHandler, ExceptHandlerExceptHandler, Expr, MatchCase, ModModule, Stmt,
|
||||
StmtAnnAssign, StmtAssign, StmtClassDef, StmtFunctionDef, StmtGlobal, StmtImport,
|
||||
StmtImportFrom, StmtNonlocal, StmtTypeAlias, TypeParam, TypeParamParamSpec, TypeParamTypeVar,
|
||||
TypeParamTypeVarTuple, WithItem,
|
||||
};
|
||||
|
||||
use crate::ast_ids::{AstIds, HasAstId};
|
||||
use crate::files::FileId;
|
||||
use crate::hir::HirAstId;
|
||||
use crate::Name;
|
||||
|
||||
#[newtype_index]
|
||||
pub struct FunctionId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Function {
|
||||
ast_id: HirAstId<StmtFunctionDef>,
|
||||
name: Name,
|
||||
parameters: Range<ParameterId>,
|
||||
type_parameters: Range<TypeParameterId>, // TODO: type_parameters, return expression, decorators
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct ParameterId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Parameter {
|
||||
kind: ParameterKind,
|
||||
name: Name,
|
||||
default: Option<()>, // TODO use expression HIR
|
||||
ast_id: HirAstId<ruff_python_ast::Parameter>,
|
||||
}
|
||||
|
||||
// TODO or should `Parameter` be an enum?
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub enum ParameterKind {
|
||||
PositionalOnly,
|
||||
Arguments,
|
||||
Vararg,
|
||||
KeywordOnly,
|
||||
Kwarg,
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct ClassId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Class {
|
||||
name: Name,
|
||||
ast_id: HirAstId<StmtClassDef>,
|
||||
// TODO type parameters, inheritance, decorators, members
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct AssignmentId;
|
||||
|
||||
// This can have more than one name...
|
||||
// but that means we can't implement `name()` on `ModuleItem`.
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Assignment {
|
||||
// TODO: Handle multiple names / targets
|
||||
name: Name,
|
||||
ast_id: HirAstId<StmtAssign>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct AnnotatedAssignment {
|
||||
name: Name,
|
||||
ast_id: HirAstId<StmtAnnAssign>,
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct AnnotatedAssignmentId;
|
||||
|
||||
#[newtype_index]
|
||||
pub struct TypeAliasId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct TypeAlias {
|
||||
name: Name,
|
||||
ast_id: HirAstId<StmtTypeAlias>,
|
||||
parameters: Range<TypeParameterId>,
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct TypeParameterId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub enum TypeParameter {
|
||||
TypeVar(TypeParameterTypeVar),
|
||||
ParamSpec(TypeParameterParamSpec),
|
||||
TypeVarTuple(TypeParameterTypeVarTuple),
|
||||
}
|
||||
|
||||
impl TypeParameter {
|
||||
pub fn ast_id(&self) -> HirAstId<TypeParam> {
|
||||
match self {
|
||||
TypeParameter::TypeVar(type_var) => type_var.ast_id.upcast(),
|
||||
TypeParameter::ParamSpec(param_spec) => param_spec.ast_id.upcast(),
|
||||
TypeParameter::TypeVarTuple(type_var_tuple) => type_var_tuple.ast_id.upcast(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct TypeParameterTypeVar {
|
||||
name: Name,
|
||||
ast_id: HirAstId<TypeParamTypeVar>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct TypeParameterParamSpec {
|
||||
name: Name,
|
||||
ast_id: HirAstId<TypeParamParamSpec>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct TypeParameterTypeVarTuple {
|
||||
name: Name,
|
||||
ast_id: HirAstId<TypeParamTypeVarTuple>,
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct GlobalId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Global {
|
||||
// TODO track names
|
||||
ast_id: HirAstId<StmtGlobal>,
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct NonLocalId;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct NonLocal {
|
||||
// TODO track names
|
||||
ast_id: HirAstId<StmtNonlocal>,
|
||||
}
|
||||
|
||||
pub enum DefinitionId {
|
||||
Function(FunctionId),
|
||||
Parameter(ParameterId),
|
||||
Class(ClassId),
|
||||
Assignment(AssignmentId),
|
||||
AnnotatedAssignment(AnnotatedAssignmentId),
|
||||
Global(GlobalId),
|
||||
NonLocal(NonLocalId),
|
||||
TypeParameter(TypeParameterId),
|
||||
TypeAlias(TypeAlias),
|
||||
}
|
||||
|
||||
pub enum DefinitionItem {
|
||||
Function(Function),
|
||||
Parameter(Parameter),
|
||||
Class(Class),
|
||||
Assignment(Assignment),
|
||||
AnnotatedAssignment(AnnotatedAssignment),
|
||||
Global(Global),
|
||||
NonLocal(NonLocal),
|
||||
TypeParameter(TypeParameter),
|
||||
TypeAlias(TypeAlias),
|
||||
}
|
||||
|
||||
// The closest is rust-analyzers item-tree. It only represents "Items" which make the public interface of a module
|
||||
// (it excludes any other statement or expressions). rust-analyzer uses it as the main input to the name resolution
|
||||
// algorithm
|
||||
// > It is the input to the name resolution algorithm, as well as to the queries defined in `adt.rs`,
|
||||
// > `data.rs`, and most things in `attr.rs`.
|
||||
//
|
||||
// > One important purpose of this layer is to provide an "invalidation barrier" for incremental
|
||||
// > computations: when typing inside an item body, the `ItemTree` of the modified file is typically
|
||||
// > unaffected, so we don't have to recompute name resolution results or item data (see `data.rs`).
|
||||
//
|
||||
// I haven't fully figured this out but I think that this composes the "public" interface of a module?
|
||||
// But maybe that's too optimistic.
|
||||
//
|
||||
//
|
||||
#[derive(Debug, Clone, Default, Eq, PartialEq)]
|
||||
pub struct Definitions {
|
||||
functions: IndexVec<FunctionId, Function>,
|
||||
parameters: IndexVec<ParameterId, Parameter>,
|
||||
classes: IndexVec<ClassId, Class>,
|
||||
assignments: IndexVec<AssignmentId, Assignment>,
|
||||
annotated_assignments: IndexVec<AnnotatedAssignmentId, AnnotatedAssignment>,
|
||||
type_aliases: IndexVec<TypeAliasId, TypeAlias>,
|
||||
type_parameters: IndexVec<TypeParameterId, TypeParameter>,
|
||||
globals: IndexVec<GlobalId, Global>,
|
||||
non_locals: IndexVec<NonLocalId, NonLocal>,
|
||||
}
|
||||
|
||||
impl Definitions {
|
||||
pub fn from_module(module: &ModModule, ast_ids: &AstIds, file_id: FileId) -> Self {
|
||||
let mut visitor = DefinitionsVisitor {
|
||||
definitions: Definitions::default(),
|
||||
ast_ids,
|
||||
file_id,
|
||||
};
|
||||
|
||||
visitor.visit_body(&module.body);
|
||||
|
||||
visitor.definitions
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<FunctionId> for Definitions {
|
||||
type Output = Function;
|
||||
|
||||
fn index(&self, index: FunctionId) -> &Self::Output {
|
||||
&self.functions[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<ParameterId> for Definitions {
|
||||
type Output = Parameter;
|
||||
|
||||
fn index(&self, index: ParameterId) -> &Self::Output {
|
||||
&self.parameters[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<ClassId> for Definitions {
|
||||
type Output = Class;
|
||||
|
||||
fn index(&self, index: ClassId) -> &Self::Output {
|
||||
&self.classes[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<AssignmentId> for Definitions {
|
||||
type Output = Assignment;
|
||||
|
||||
fn index(&self, index: AssignmentId) -> &Self::Output {
|
||||
&self.assignments[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<AnnotatedAssignmentId> for Definitions {
|
||||
type Output = AnnotatedAssignment;
|
||||
|
||||
fn index(&self, index: AnnotatedAssignmentId) -> &Self::Output {
|
||||
&self.annotated_assignments[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<TypeAliasId> for Definitions {
|
||||
type Output = TypeAlias;
|
||||
|
||||
fn index(&self, index: TypeAliasId) -> &Self::Output {
|
||||
&self.type_aliases[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<GlobalId> for Definitions {
|
||||
type Output = Global;
|
||||
|
||||
fn index(&self, index: GlobalId) -> &Self::Output {
|
||||
&self.globals[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<NonLocalId> for Definitions {
|
||||
type Output = NonLocal;
|
||||
|
||||
fn index(&self, index: NonLocalId) -> &Self::Output {
|
||||
&self.non_locals[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<TypeParameterId> for Definitions {
|
||||
type Output = TypeParameter;
|
||||
|
||||
fn index(&self, index: TypeParameterId) -> &Self::Output {
|
||||
&self.type_parameters[index]
|
||||
}
|
||||
}
|
||||
|
||||
struct DefinitionsVisitor<'a> {
|
||||
definitions: Definitions,
|
||||
ast_ids: &'a AstIds,
|
||||
file_id: FileId,
|
||||
}
|
||||
|
||||
impl DefinitionsVisitor<'_> {
|
||||
fn ast_id<N: HasAstId>(&self, node: &N) -> HirAstId<N> {
|
||||
HirAstId {
|
||||
file_id: self.file_id,
|
||||
node_id: self.ast_ids.ast_id(node),
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_function_def(&mut self, function: &StmtFunctionDef) -> FunctionId {
|
||||
let name = Name::new(&function.name);
|
||||
|
||||
let first_type_parameter_id = self.definitions.type_parameters.next_index();
|
||||
let mut last_type_parameter_id = first_type_parameter_id;
|
||||
|
||||
if let Some(type_params) = &function.type_params {
|
||||
for parameter in &type_params.type_params {
|
||||
let id = self.lower_type_parameter(parameter);
|
||||
last_type_parameter_id = id;
|
||||
}
|
||||
}
|
||||
|
||||
let parameters = self.lower_parameters(&function.parameters);
|
||||
|
||||
self.definitions.functions.push(Function {
|
||||
name,
|
||||
ast_id: self.ast_id(function),
|
||||
parameters,
|
||||
type_parameters: first_type_parameter_id..last_type_parameter_id,
|
||||
})
|
||||
}
|
||||
|
||||
fn lower_parameters(&mut self, parameters: &ruff_python_ast::Parameters) -> Range<ParameterId> {
|
||||
let first_parameter_id = self.definitions.parameters.next_index();
|
||||
let mut last_parameter_id = first_parameter_id;
|
||||
|
||||
for parameter in ¶meters.posonlyargs {
|
||||
last_parameter_id = self.definitions.parameters.push(Parameter {
|
||||
kind: ParameterKind::PositionalOnly,
|
||||
name: Name::new(¶meter.parameter.name),
|
||||
default: None,
|
||||
ast_id: self.ast_id(¶meter.parameter),
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(vararg) = ¶meters.vararg {
|
||||
last_parameter_id = self.definitions.parameters.push(Parameter {
|
||||
kind: ParameterKind::Vararg,
|
||||
name: Name::new(&vararg.name),
|
||||
default: None,
|
||||
ast_id: self.ast_id(vararg),
|
||||
});
|
||||
}
|
||||
|
||||
for parameter in ¶meters.kwonlyargs {
|
||||
last_parameter_id = self.definitions.parameters.push(Parameter {
|
||||
kind: ParameterKind::KeywordOnly,
|
||||
name: Name::new(¶meter.parameter.name),
|
||||
default: None,
|
||||
ast_id: self.ast_id(¶meter.parameter),
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(kwarg) = ¶meters.kwarg {
|
||||
last_parameter_id = self.definitions.parameters.push(Parameter {
|
||||
kind: ParameterKind::KeywordOnly,
|
||||
name: Name::new(&kwarg.name),
|
||||
default: None,
|
||||
ast_id: self.ast_id(kwarg),
|
||||
});
|
||||
}
|
||||
|
||||
first_parameter_id..last_parameter_id
|
||||
}
|
||||
|
||||
fn lower_class_def(&mut self, class: &StmtClassDef) -> ClassId {
|
||||
let name = Name::new(&class.name);
|
||||
|
||||
self.definitions.classes.push(Class {
|
||||
name,
|
||||
ast_id: self.ast_id(class),
|
||||
})
|
||||
}
|
||||
|
||||
fn lower_assignment(&mut self, assignment: &StmtAssign) {
|
||||
// FIXME handle multiple names
|
||||
if let Some(Expr::Name(name)) = assignment.targets.first() {
|
||||
self.definitions.assignments.push(Assignment {
|
||||
name: Name::new(&name.id),
|
||||
ast_id: self.ast_id(assignment),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_annotated_assignment(&mut self, annotated_assignment: &StmtAnnAssign) {
|
||||
if let Expr::Name(name) = &*annotated_assignment.target {
|
||||
self.definitions
|
||||
.annotated_assignments
|
||||
.push(AnnotatedAssignment {
|
||||
name: Name::new(&name.id),
|
||||
ast_id: self.ast_id(annotated_assignment),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_type_alias(&mut self, type_alias: &StmtTypeAlias) {
|
||||
if let Expr::Name(name) = &*type_alias.name {
|
||||
let name = Name::new(&name.id);
|
||||
|
||||
let lower_parameters_id = self.definitions.type_parameters.next_index();
|
||||
let mut last_parameter_id = lower_parameters_id;
|
||||
|
||||
if let Some(type_params) = &type_alias.type_params {
|
||||
for type_parameter in &type_params.type_params {
|
||||
let id = self.lower_type_parameter(type_parameter);
|
||||
last_parameter_id = id;
|
||||
}
|
||||
}
|
||||
|
||||
self.definitions.type_aliases.push(TypeAlias {
|
||||
name,
|
||||
ast_id: self.ast_id(type_alias),
|
||||
parameters: lower_parameters_id..last_parameter_id,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_type_parameter(&mut self, type_parameter: &TypeParam) -> TypeParameterId {
|
||||
match type_parameter {
|
||||
TypeParam::TypeVar(type_var) => {
|
||||
self.definitions
|
||||
.type_parameters
|
||||
.push(TypeParameter::TypeVar(TypeParameterTypeVar {
|
||||
name: Name::new(&type_var.name),
|
||||
ast_id: self.ast_id(type_var),
|
||||
}))
|
||||
}
|
||||
TypeParam::ParamSpec(param_spec) => {
|
||||
self.definitions
|
||||
.type_parameters
|
||||
.push(TypeParameter::ParamSpec(TypeParameterParamSpec {
|
||||
name: Name::new(¶m_spec.name),
|
||||
ast_id: self.ast_id(param_spec),
|
||||
}))
|
||||
}
|
||||
TypeParam::TypeVarTuple(type_var_tuple) => {
|
||||
self.definitions
|
||||
.type_parameters
|
||||
.push(TypeParameter::TypeVarTuple(TypeParameterTypeVarTuple {
|
||||
name: Name::new(&type_var_tuple.name),
|
||||
ast_id: self.ast_id(type_var_tuple),
|
||||
}))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_import(&mut self, _import: &StmtImport) {
|
||||
// TODO
|
||||
}
|
||||
|
||||
fn lower_import_from(&mut self, _import_from: &StmtImportFrom) {
|
||||
// TODO
|
||||
}
|
||||
|
||||
fn lower_global(&mut self, global: &StmtGlobal) -> GlobalId {
|
||||
self.definitions.globals.push(Global {
|
||||
ast_id: self.ast_id(global),
|
||||
})
|
||||
}
|
||||
|
||||
fn lower_non_local(&mut self, non_local: &StmtNonlocal) -> NonLocalId {
|
||||
self.definitions.non_locals.push(NonLocal {
|
||||
ast_id: self.ast_id(non_local),
|
||||
})
|
||||
}
|
||||
|
||||
fn lower_except_handler(&mut self, _except_handler: &ExceptHandlerExceptHandler) {
|
||||
// TODO
|
||||
}
|
||||
|
||||
fn lower_with_item(&mut self, _with_item: &WithItem) {
|
||||
// TODO
|
||||
}
|
||||
|
||||
fn lower_match_case(&mut self, _match_case: &MatchCase) {
|
||||
// TODO
|
||||
}
|
||||
}
|
||||
|
||||
impl PreorderVisitor<'_> for DefinitionsVisitor<'_> {
|
||||
fn visit_stmt(&mut self, stmt: &Stmt) {
|
||||
match stmt {
|
||||
// Definition statements
|
||||
Stmt::FunctionDef(definition) => {
|
||||
self.lower_function_def(definition);
|
||||
self.visit_body(&definition.body);
|
||||
}
|
||||
Stmt::ClassDef(definition) => {
|
||||
self.lower_class_def(definition);
|
||||
self.visit_body(&definition.body);
|
||||
}
|
||||
Stmt::Assign(assignment) => {
|
||||
self.lower_assignment(assignment);
|
||||
}
|
||||
Stmt::AnnAssign(annotated_assignment) => {
|
||||
self.lower_annotated_assignment(annotated_assignment);
|
||||
}
|
||||
Stmt::TypeAlias(type_alias) => {
|
||||
self.lower_type_alias(type_alias);
|
||||
}
|
||||
|
||||
Stmt::Import(import) => self.lower_import(import),
|
||||
Stmt::ImportFrom(import_from) => self.lower_import_from(import_from),
|
||||
Stmt::Global(global) => {
|
||||
self.lower_global(global);
|
||||
}
|
||||
Stmt::Nonlocal(non_local) => {
|
||||
self.lower_non_local(non_local);
|
||||
}
|
||||
|
||||
// Visit the compound statement bodies because they can contain other definitions.
|
||||
Stmt::For(_)
|
||||
| Stmt::While(_)
|
||||
| Stmt::If(_)
|
||||
| Stmt::With(_)
|
||||
| Stmt::Match(_)
|
||||
| Stmt::Try(_) => {
|
||||
preorder::walk_stmt(self, stmt);
|
||||
}
|
||||
|
||||
// Skip over simple statements because they can't contain any other definitions.
|
||||
Stmt::Return(_)
|
||||
| Stmt::Delete(_)
|
||||
| Stmt::AugAssign(_)
|
||||
| Stmt::Raise(_)
|
||||
| Stmt::Assert(_)
|
||||
| Stmt::Expr(_)
|
||||
| Stmt::Pass(_)
|
||||
| Stmt::Break(_)
|
||||
| Stmt::Continue(_)
|
||||
| Stmt::IpyEscapeCommand(_) => {
|
||||
// No op
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, _: &'_ Expr) {}
|
||||
|
||||
fn visit_decorator(&mut self, _decorator: &'_ Decorator) {}
|
||||
|
||||
fn visit_except_handler(&mut self, except_handler: &'_ ExceptHandler) {
|
||||
match except_handler {
|
||||
ExceptHandler::ExceptHandler(except_handler) => {
|
||||
self.lower_except_handler(except_handler);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_with_item(&mut self, with_item: &'_ WithItem) {
|
||||
self.lower_with_item(with_item);
|
||||
}
|
||||
|
||||
fn visit_match_case(&mut self, match_case: &'_ MatchCase) {
|
||||
self.lower_match_case(match_case);
|
||||
self.visit_body(&match_case.body);
|
||||
}
|
||||
}
|
||||
@@ -1,109 +0,0 @@
|
||||
use std::fmt::Formatter;
|
||||
use std::hash::BuildHasherDefault;
|
||||
use std::ops::Deref;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use rustc_hash::{FxHashSet, FxHasher};
|
||||
|
||||
use crate::files::FileId;
|
||||
|
||||
pub mod ast_ids;
|
||||
pub mod cache;
|
||||
pub mod cancellation;
|
||||
pub mod db;
|
||||
pub mod files;
|
||||
pub mod hir;
|
||||
pub mod lint;
|
||||
pub mod module;
|
||||
mod parse;
|
||||
pub mod program;
|
||||
pub mod source;
|
||||
mod symbols;
|
||||
mod types;
|
||||
pub mod watch;
|
||||
|
||||
pub(crate) type FxDashMap<K, V> = dashmap::DashMap<K, V, BuildHasherDefault<FxHasher>>;
|
||||
#[allow(unused)]
|
||||
pub(crate) type FxDashSet<V> = dashmap::DashSet<V, BuildHasherDefault<FxHasher>>;
|
||||
pub(crate) type FxIndexSet<V> = indexmap::set::IndexSet<V, BuildHasherDefault<FxHasher>>;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Workspace {
|
||||
/// TODO this should be a resolved path. We should probably use a newtype wrapper that guarantees that
|
||||
/// PATH is a UTF-8 path and is normalized.
|
||||
root: PathBuf,
|
||||
/// The files that are open in the workspace.
|
||||
///
|
||||
/// * Editor: The files that are actively being edited in the editor (the user has a tab open with the file).
|
||||
/// * CLI: The resolved files passed as arguments to the CLI.
|
||||
open_files: FxHashSet<FileId>,
|
||||
}
|
||||
|
||||
impl Workspace {
|
||||
pub fn new(root: PathBuf) -> Self {
|
||||
Self {
|
||||
root,
|
||||
open_files: FxHashSet::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn root(&self) -> &Path {
|
||||
self.root.as_path()
|
||||
}
|
||||
|
||||
// TODO having the content in workspace feels wrong.
|
||||
pub fn open_file(&mut self, file_id: FileId) {
|
||||
self.open_files.insert(file_id);
|
||||
}
|
||||
|
||||
pub fn close_file(&mut self, file_id: FileId) {
|
||||
self.open_files.remove(&file_id);
|
||||
}
|
||||
|
||||
// TODO introduce an `OpenFile` type instead of using an anonymous tuple.
|
||||
pub fn open_files(&self) -> impl Iterator<Item = FileId> + '_ {
|
||||
self.open_files.iter().copied()
|
||||
}
|
||||
|
||||
pub fn is_file_open(&self, file_id: FileId) -> bool {
|
||||
self.open_files.contains(&file_id)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
|
||||
pub struct Name(smol_str::SmolStr);
|
||||
|
||||
impl Name {
|
||||
#[inline]
|
||||
pub fn new(name: &str) -> Self {
|
||||
Self(smol_str::SmolStr::new(name))
|
||||
}
|
||||
|
||||
pub fn as_str(&self) -> &str {
|
||||
self.0.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Name {
|
||||
type Target = str;
|
||||
|
||||
#[inline]
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> From<T> for Name
|
||||
where
|
||||
T: Into<smol_str::SmolStr>,
|
||||
{
|
||||
fn from(value: T) -> Self {
|
||||
Self(value.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Name {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
@@ -1,321 +0,0 @@
|
||||
use std::cell::RefCell;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use ruff_python_ast::visitor::Visitor;
|
||||
use ruff_python_ast::{ModModule, StringLiteral};
|
||||
|
||||
use crate::cache::KeyValueCache;
|
||||
use crate::db::{LintDb, LintJar, QueryResult};
|
||||
use crate::files::FileId;
|
||||
use crate::module::ModuleName;
|
||||
use crate::parse::{parse, Parsed};
|
||||
use crate::source::{source_text, Source};
|
||||
use crate::symbols::{
|
||||
resolve_global_symbol, symbol_table, Definition, GlobalSymbolId, SymbolId, SymbolTable,
|
||||
};
|
||||
use crate::types::{infer_definition_type, infer_symbol_type, Type};
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub(crate) fn lint_syntax(db: &dyn LintDb, file_id: FileId) -> QueryResult<Diagnostics> {
|
||||
let lint_jar: &LintJar = db.jar()?;
|
||||
let storage = &lint_jar.lint_syntax;
|
||||
|
||||
#[allow(clippy::print_stdout)]
|
||||
if std::env::var("RED_KNOT_SLOW_LINT").is_ok() {
|
||||
for i in 0..10 {
|
||||
db.cancelled()?;
|
||||
println!("RED_KNOT_SLOW_LINT is set, sleeping for {i}/10 seconds");
|
||||
std::thread::sleep(Duration::from_secs(1));
|
||||
}
|
||||
}
|
||||
|
||||
storage.get(&file_id, |file_id| {
|
||||
let mut diagnostics = Vec::new();
|
||||
|
||||
let source = source_text(db.upcast(), *file_id)?;
|
||||
lint_lines(source.text(), &mut diagnostics);
|
||||
|
||||
let parsed = parse(db.upcast(), *file_id)?;
|
||||
|
||||
if parsed.errors().is_empty() {
|
||||
let ast = parsed.ast();
|
||||
|
||||
let mut visitor = SyntaxLintVisitor {
|
||||
diagnostics,
|
||||
source: source.text(),
|
||||
};
|
||||
visitor.visit_body(&ast.body);
|
||||
diagnostics = visitor.diagnostics;
|
||||
} else {
|
||||
diagnostics.extend(parsed.errors().iter().map(std::string::ToString::to_string));
|
||||
}
|
||||
|
||||
Ok(Diagnostics::from(diagnostics))
|
||||
})
|
||||
}
|
||||
|
||||
fn lint_lines(source: &str, diagnostics: &mut Vec<String>) {
|
||||
for (line_number, line) in source.lines().enumerate() {
|
||||
if line.len() < 88 {
|
||||
continue;
|
||||
}
|
||||
|
||||
let char_count = line.chars().count();
|
||||
if char_count > 88 {
|
||||
diagnostics.push(format!(
|
||||
"Line {} is too long ({} characters)",
|
||||
line_number + 1,
|
||||
char_count
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub(crate) fn lint_semantic(db: &dyn LintDb, file_id: FileId) -> QueryResult<Diagnostics> {
|
||||
let lint_jar: &LintJar = db.jar()?;
|
||||
let storage = &lint_jar.lint_semantic;
|
||||
|
||||
storage.get(&file_id, |file_id| {
|
||||
let source = source_text(db.upcast(), *file_id)?;
|
||||
let parsed = parse(db.upcast(), *file_id)?;
|
||||
let symbols = symbol_table(db.upcast(), *file_id)?;
|
||||
|
||||
let context = SemanticLintContext {
|
||||
file_id: *file_id,
|
||||
source,
|
||||
parsed,
|
||||
symbols,
|
||||
db,
|
||||
diagnostics: RefCell::new(Vec::new()),
|
||||
};
|
||||
|
||||
lint_unresolved_imports(&context)?;
|
||||
lint_bad_overrides(&context)?;
|
||||
|
||||
Ok(Diagnostics::from(context.diagnostics.take()))
|
||||
})
|
||||
}
|
||||
|
||||
fn lint_unresolved_imports(context: &SemanticLintContext) -> QueryResult<()> {
|
||||
// TODO: Consider iterating over the dependencies (imports) only instead of all definitions.
|
||||
for (symbol, definition) in context.symbols().all_definitions() {
|
||||
match definition {
|
||||
Definition::Import(import) => {
|
||||
let ty = context.infer_symbol_type(symbol)?;
|
||||
|
||||
if ty.is_unknown() {
|
||||
context.push_diagnostic(format!("Unresolved module {}", import.module));
|
||||
}
|
||||
}
|
||||
Definition::ImportFrom(import) => {
|
||||
let ty = context.infer_symbol_type(symbol)?;
|
||||
|
||||
if ty.is_unknown() {
|
||||
let module_name = import.module().map(Deref::deref).unwrap_or_default();
|
||||
let message = if import.level() > 0 {
|
||||
format!(
|
||||
"Unresolved relative import '{}' from {}{}",
|
||||
import.name(),
|
||||
".".repeat(import.level() as usize),
|
||||
module_name
|
||||
)
|
||||
} else {
|
||||
format!(
|
||||
"Unresolved import '{}' from '{}'",
|
||||
import.name(),
|
||||
module_name
|
||||
)
|
||||
};
|
||||
|
||||
context.push_diagnostic(message);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn lint_bad_overrides(context: &SemanticLintContext) -> QueryResult<()> {
|
||||
// TODO we should have a special marker on the real typing module (from typeshed) so if you
|
||||
// have your own "typing" module in your project, we don't consider it THE typing module (and
|
||||
// same for other stdlib modules that our lint rules care about)
|
||||
let Some(typing_override) =
|
||||
resolve_global_symbol(context.db.upcast(), ModuleName::new("typing"), "override")?
|
||||
else {
|
||||
// TODO once we bundle typeshed, this should be unreachable!()
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
// TODO we should maybe index definitions by type instead of iterating all, or else iterate all
|
||||
// just once, match, and branch to all lint rules that care about a type of definition
|
||||
for (symbol, definition) in context.symbols().all_definitions() {
|
||||
if !matches!(definition, Definition::FunctionDef(_)) {
|
||||
continue;
|
||||
}
|
||||
let ty = infer_definition_type(
|
||||
context.db.upcast(),
|
||||
GlobalSymbolId {
|
||||
file_id: context.file_id,
|
||||
symbol_id: symbol,
|
||||
},
|
||||
definition.clone(),
|
||||
)?;
|
||||
let Type::Function(func) = ty else {
|
||||
unreachable!("type of a FunctionDef should always be a Function");
|
||||
};
|
||||
let Some(class) = func.get_containing_class(context.db.upcast())? else {
|
||||
// not a method of a class
|
||||
continue;
|
||||
};
|
||||
if func.has_decorator(context.db.upcast(), typing_override)? {
|
||||
let method_name = func.name(context.db.upcast())?;
|
||||
if class
|
||||
.get_super_class_member(context.db.upcast(), &method_name)?
|
||||
.is_none()
|
||||
{
|
||||
// TODO should have a qualname() method to support nested classes
|
||||
context.push_diagnostic(
|
||||
format!(
|
||||
"Method {}.{} is decorated with `typing.override` but does not override any base class method",
|
||||
class.name(context.db.upcast())?,
|
||||
method_name,
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub struct SemanticLintContext<'a> {
|
||||
file_id: FileId,
|
||||
source: Source,
|
||||
parsed: Parsed,
|
||||
symbols: Arc<SymbolTable>,
|
||||
db: &'a dyn LintDb,
|
||||
diagnostics: RefCell<Vec<String>>,
|
||||
}
|
||||
|
||||
impl<'a> SemanticLintContext<'a> {
|
||||
pub fn source_text(&self) -> &str {
|
||||
self.source.text()
|
||||
}
|
||||
|
||||
pub fn file_id(&self) -> FileId {
|
||||
self.file_id
|
||||
}
|
||||
|
||||
pub fn ast(&self) -> &ModModule {
|
||||
self.parsed.ast()
|
||||
}
|
||||
|
||||
pub fn symbols(&self) -> &SymbolTable {
|
||||
&self.symbols
|
||||
}
|
||||
|
||||
pub fn infer_symbol_type(&self, symbol_id: SymbolId) -> QueryResult<Type> {
|
||||
infer_symbol_type(
|
||||
self.db.upcast(),
|
||||
GlobalSymbolId {
|
||||
file_id: self.file_id,
|
||||
symbol_id,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
pub fn push_diagnostic(&self, diagnostic: String) {
|
||||
self.diagnostics.borrow_mut().push(diagnostic);
|
||||
}
|
||||
|
||||
pub fn extend_diagnostics(&mut self, diagnostics: impl IntoIterator<Item = String>) {
|
||||
self.diagnostics.get_mut().extend(diagnostics);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct SyntaxLintVisitor<'a> {
|
||||
diagnostics: Vec<String>,
|
||||
source: &'a str,
|
||||
}
|
||||
|
||||
impl Visitor<'_> for SyntaxLintVisitor<'_> {
|
||||
fn visit_string_literal(&mut self, string_literal: &'_ StringLiteral) {
|
||||
// A very naive implementation of use double quotes
|
||||
let text = &self.source[string_literal.range];
|
||||
|
||||
if text.starts_with('\'') {
|
||||
self.diagnostics
|
||||
.push("Use double quotes for strings".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Diagnostics {
|
||||
Empty,
|
||||
List(Arc<Vec<String>>),
|
||||
}
|
||||
|
||||
impl Diagnostics {
|
||||
pub fn as_slice(&self) -> &[String] {
|
||||
match self {
|
||||
Diagnostics::Empty => &[],
|
||||
Diagnostics::List(list) => list.as_slice(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Diagnostics {
|
||||
type Target = [String];
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.as_slice()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Vec<String>> for Diagnostics {
|
||||
fn from(value: Vec<String>) -> Self {
|
||||
if value.is_empty() {
|
||||
Diagnostics::Empty
|
||||
} else {
|
||||
Diagnostics::List(Arc::new(value))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub struct LintSyntaxStorage(KeyValueCache<FileId, Diagnostics>);
|
||||
|
||||
impl Deref for LintSyntaxStorage {
|
||||
type Target = KeyValueCache<FileId, Diagnostics>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for LintSyntaxStorage {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub struct LintSemanticStorage(KeyValueCache<FileId, Diagnostics>);
|
||||
|
||||
impl Deref for LintSemanticStorage {
|
||||
type Target = KeyValueCache<FileId, Diagnostics>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for LintSemanticStorage {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
254
crates/red_knot/src/logging.rs
Normal file
254
crates/red_knot/src/logging.rs
Normal file
@@ -0,0 +1,254 @@
|
||||
//! Sets up logging for Red Knot
|
||||
|
||||
use anyhow::Context;
|
||||
use colored::Colorize;
|
||||
use std::fmt;
|
||||
use std::fs::File;
|
||||
use std::io::BufWriter;
|
||||
use tracing::log::LevelFilter;
|
||||
use tracing::{Event, Subscriber};
|
||||
use tracing_subscriber::fmt::format::Writer;
|
||||
use tracing_subscriber::fmt::{FmtContext, FormatEvent, FormatFields};
|
||||
use tracing_subscriber::registry::LookupSpan;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
|
||||
/// Logging flags to `#[command(flatten)]` into your CLI
|
||||
#[derive(clap::Args, Debug, Clone, Default)]
|
||||
#[command(about = None, long_about = None)]
|
||||
pub(crate) struct Verbosity {
|
||||
#[arg(
|
||||
long,
|
||||
short = 'v',
|
||||
help = "Use verbose output (or `-vv` and `-vvv` for more verbose output)",
|
||||
action = clap::ArgAction::Count,
|
||||
global = true,
|
||||
)]
|
||||
verbose: u8,
|
||||
}
|
||||
|
||||
impl Verbosity {
|
||||
/// Returns the verbosity level based on the number of `-v` flags.
|
||||
///
|
||||
/// Returns `None` if the user did not specify any verbosity flags.
|
||||
pub(crate) fn level(&self) -> VerbosityLevel {
|
||||
match self.verbose {
|
||||
0 => VerbosityLevel::Default,
|
||||
1 => VerbosityLevel::Verbose,
|
||||
2 => VerbosityLevel::ExtraVerbose,
|
||||
_ => VerbosityLevel::Trace,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
|
||||
pub(crate) enum VerbosityLevel {
|
||||
/// Default output level. Only shows Ruff and Red Knot events up to the [`WARN`](tracing::Level::WARN).
|
||||
Default,
|
||||
|
||||
/// Enables verbose output. Emits Ruff and Red Knot events up to the [`INFO`](tracing::Level::INFO).
|
||||
/// Corresponds to `-v`.
|
||||
Verbose,
|
||||
|
||||
/// Enables a more verbose tracing format and emits Ruff and Red Knot events up to [`DEBUG`](tracing::Level::DEBUG).
|
||||
/// Corresponds to `-vv`
|
||||
ExtraVerbose,
|
||||
|
||||
/// Enables all tracing events and uses a tree-like output format. Corresponds to `-vvv`.
|
||||
Trace,
|
||||
}
|
||||
|
||||
impl VerbosityLevel {
|
||||
const fn level_filter(self) -> LevelFilter {
|
||||
match self {
|
||||
VerbosityLevel::Default => LevelFilter::Warn,
|
||||
VerbosityLevel::Verbose => LevelFilter::Info,
|
||||
VerbosityLevel::ExtraVerbose => LevelFilter::Debug,
|
||||
VerbosityLevel::Trace => LevelFilter::Trace,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) const fn is_trace(self) -> bool {
|
||||
matches!(self, VerbosityLevel::Trace)
|
||||
}
|
||||
|
||||
pub(crate) const fn is_extra_verbose(self) -> bool {
|
||||
matches!(self, VerbosityLevel::ExtraVerbose)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn setup_tracing(level: VerbosityLevel) -> anyhow::Result<TracingGuard> {
|
||||
use tracing_subscriber::prelude::*;
|
||||
|
||||
// The `RED_KNOT_LOG` environment variable overrides the default log level.
|
||||
let filter = if let Ok(log_env_variable) = std::env::var("RED_KNOT_LOG") {
|
||||
EnvFilter::builder()
|
||||
.parse(log_env_variable)
|
||||
.context("Failed to parse directives specified in RED_KNOT_LOG environment variable.")?
|
||||
} else {
|
||||
match level {
|
||||
VerbosityLevel::Default => {
|
||||
// Show warning traces
|
||||
EnvFilter::default().add_directive(tracing::level_filters::LevelFilter::WARN.into())
|
||||
}
|
||||
level => {
|
||||
let level_filter = level.level_filter();
|
||||
|
||||
// Show info|debug|trace events, but allow `RED_KNOT_LOG` to override
|
||||
let filter = EnvFilter::default().add_directive(
|
||||
format!("red_knot={level_filter}")
|
||||
.parse()
|
||||
.expect("Hardcoded directive to be valid"),
|
||||
);
|
||||
|
||||
filter.add_directive(
|
||||
format!("ruff={level_filter}")
|
||||
.parse()
|
||||
.expect("Hardcoded directive to be valid"),
|
||||
)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let (profiling_layer, guard) = setup_profile();
|
||||
|
||||
let registry = tracing_subscriber::registry()
|
||||
.with(filter)
|
||||
.with(profiling_layer);
|
||||
|
||||
if level.is_trace() {
|
||||
let subscriber = registry.with(
|
||||
tracing_tree::HierarchicalLayer::default()
|
||||
.with_indent_lines(true)
|
||||
.with_indent_amount(2)
|
||||
.with_bracketed_fields(true)
|
||||
.with_thread_ids(true)
|
||||
.with_targets(true)
|
||||
.with_writer(std::io::stderr)
|
||||
.with_timer(tracing_tree::time::Uptime::default()),
|
||||
);
|
||||
|
||||
subscriber.init();
|
||||
} else {
|
||||
let subscriber = registry.with(
|
||||
tracing_subscriber::fmt::layer()
|
||||
.event_format(RedKnotFormat {
|
||||
display_level: true,
|
||||
display_timestamp: level.is_extra_verbose(),
|
||||
show_spans: false,
|
||||
})
|
||||
.with_writer(std::io::stderr),
|
||||
);
|
||||
|
||||
subscriber.init();
|
||||
}
|
||||
|
||||
Ok(TracingGuard {
|
||||
_flame_guard: guard,
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn setup_profile<S>() -> (
|
||||
Option<tracing_flame::FlameLayer<S, BufWriter<File>>>,
|
||||
Option<tracing_flame::FlushGuard<BufWriter<File>>>,
|
||||
)
|
||||
where
|
||||
S: Subscriber + for<'span> LookupSpan<'span>,
|
||||
{
|
||||
if let Ok("1" | "true") = std::env::var("RED_KNOT_LOG_PROFILE").as_deref() {
|
||||
let (layer, guard) = tracing_flame::FlameLayer::with_file("tracing.folded")
|
||||
.expect("Flame layer to be created");
|
||||
(Some(layer), Some(guard))
|
||||
} else {
|
||||
(None, None)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct TracingGuard {
|
||||
_flame_guard: Option<tracing_flame::FlushGuard<BufWriter<File>>>,
|
||||
}
|
||||
|
||||
struct RedKnotFormat {
|
||||
display_timestamp: bool,
|
||||
display_level: bool,
|
||||
show_spans: bool,
|
||||
}
|
||||
|
||||
/// See <https://docs.rs/tracing-subscriber/0.3.18/src/tracing_subscriber/fmt/format/mod.rs.html#1026-1156>
|
||||
impl<S, N> FormatEvent<S, N> for RedKnotFormat
|
||||
where
|
||||
S: Subscriber + for<'a> LookupSpan<'a>,
|
||||
N: for<'a> FormatFields<'a> + 'static,
|
||||
{
|
||||
fn format_event(
|
||||
&self,
|
||||
ctx: &FmtContext<'_, S, N>,
|
||||
mut writer: Writer<'_>,
|
||||
event: &Event<'_>,
|
||||
) -> fmt::Result {
|
||||
let meta = event.metadata();
|
||||
let ansi = writer.has_ansi_escapes();
|
||||
|
||||
if self.display_timestamp {
|
||||
let timestamp = chrono::Local::now()
|
||||
.format("%Y-%m-%d %H:%M:%S.%f")
|
||||
.to_string();
|
||||
if ansi {
|
||||
write!(writer, "{} ", timestamp.dimmed())?;
|
||||
} else {
|
||||
write!(
|
||||
writer,
|
||||
"{} ",
|
||||
chrono::Local::now().format("%Y-%m-%d %H:%M:%S.%f")
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
if self.display_level {
|
||||
let level = meta.level();
|
||||
// Same colors as tracing
|
||||
if ansi {
|
||||
let formatted_level = level.to_string();
|
||||
match *level {
|
||||
tracing::Level::TRACE => {
|
||||
write!(writer, "{} ", formatted_level.purple().bold())?;
|
||||
}
|
||||
tracing::Level::DEBUG => write!(writer, "{} ", formatted_level.blue().bold())?,
|
||||
tracing::Level::INFO => write!(writer, "{} ", formatted_level.green().bold())?,
|
||||
tracing::Level::WARN => write!(writer, "{} ", formatted_level.yellow().bold())?,
|
||||
tracing::Level::ERROR => write!(writer, "{} ", level.to_string().red().bold())?,
|
||||
}
|
||||
} else {
|
||||
write!(writer, "{level} ")?;
|
||||
}
|
||||
}
|
||||
|
||||
if self.show_spans {
|
||||
let span = event.parent();
|
||||
let mut seen = false;
|
||||
|
||||
let span = span
|
||||
.and_then(|id| ctx.span(id))
|
||||
.or_else(|| ctx.lookup_current());
|
||||
|
||||
let scope = span.into_iter().flat_map(|span| span.scope().from_root());
|
||||
|
||||
for span in scope {
|
||||
seen = true;
|
||||
if ansi {
|
||||
write!(writer, "{}:", span.metadata().name().bold())?;
|
||||
} else {
|
||||
write!(writer, "{}:", span.metadata().name())?;
|
||||
}
|
||||
}
|
||||
|
||||
if seen {
|
||||
writer.write_char(' ')?;
|
||||
}
|
||||
}
|
||||
|
||||
ctx.field_format().format_fields(writer.by_ref(), event)?;
|
||||
|
||||
writeln!(writer)
|
||||
}
|
||||
}
|
||||
@@ -1,58 +1,197 @@
|
||||
#![allow(clippy::dbg_macro)]
|
||||
|
||||
use std::path::Path;
|
||||
use std::process::{ExitCode, Termination};
|
||||
use std::sync::Mutex;
|
||||
|
||||
use anyhow::{anyhow, Context};
|
||||
use clap::Parser;
|
||||
use colored::Colorize;
|
||||
use crossbeam::channel as crossbeam_channel;
|
||||
use tracing::subscriber::Interest;
|
||||
use tracing::{Level, Metadata};
|
||||
use tracing_subscriber::filter::LevelFilter;
|
||||
use tracing_subscriber::layer::{Context, Filter, SubscriberExt};
|
||||
use tracing_subscriber::{Layer, Registry};
|
||||
use tracing_tree::time::Uptime;
|
||||
use salsa::plumbing::ZalsaDatabase;
|
||||
|
||||
use red_knot::db::{HasJar, ParallelDatabase, QueryError, SourceDb, SourceJar};
|
||||
use red_knot::module::{set_module_search_paths, ModuleSearchPath, ModuleSearchPathKind};
|
||||
use red_knot::program::check::ExecutionMode;
|
||||
use red_knot::program::{FileWatcherChange, Program};
|
||||
use red_knot::watch::FileWatcher;
|
||||
use red_knot::Workspace;
|
||||
use red_knot_server::run_server;
|
||||
use red_knot_workspace::db::RootDatabase;
|
||||
use red_knot_workspace::site_packages::site_packages_dirs_of_venv;
|
||||
use red_knot_workspace::watch;
|
||||
use red_knot_workspace::watch::WorkspaceWatcher;
|
||||
use red_knot_workspace::workspace::WorkspaceMetadata;
|
||||
use ruff_db::program::{ProgramSettings, SearchPathSettings};
|
||||
use ruff_db::system::{OsSystem, System, SystemPath, SystemPathBuf};
|
||||
use target_version::TargetVersion;
|
||||
|
||||
use crate::logging::{setup_tracing, Verbosity};
|
||||
|
||||
mod logging;
|
||||
mod target_version;
|
||||
mod verbosity;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(
|
||||
author,
|
||||
name = "red-knot",
|
||||
about = "An extremely fast Python type checker."
|
||||
)]
|
||||
#[command(version)]
|
||||
struct Args {
|
||||
#[command(subcommand)]
|
||||
pub(crate) command: Option<Command>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help = "Changes the current working directory.",
|
||||
long_help = "Changes the current working directory before any specified operations. This affects the workspace and configuration discovery.",
|
||||
value_name = "PATH"
|
||||
)]
|
||||
current_directory: Option<SystemPathBuf>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help = "Path to the virtual environment the project uses",
|
||||
long_help = "\
|
||||
Path to the virtual environment the project uses. \
|
||||
If provided, red-knot will use the `site-packages` directory of this virtual environment \
|
||||
to resolve type information for the project's third-party dependencies.",
|
||||
value_name = "PATH"
|
||||
)]
|
||||
venv_path: Option<SystemPathBuf>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
value_name = "DIRECTORY",
|
||||
help = "Custom directory to use for stdlib typeshed stubs"
|
||||
)]
|
||||
custom_typeshed_dir: Option<SystemPathBuf>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
value_name = "PATH",
|
||||
help = "Additional path to use as a module-resolution source (can be passed multiple times)"
|
||||
)]
|
||||
extra_search_path: Vec<SystemPathBuf>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help = "Python version to assume when resolving types",
|
||||
default_value_t = TargetVersion::default(),
|
||||
value_name="VERSION")
|
||||
]
|
||||
target_version: TargetVersion,
|
||||
|
||||
#[clap(flatten)]
|
||||
verbosity: Verbosity,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help = "Run in watch mode by re-running whenever files change",
|
||||
short = 'W'
|
||||
)]
|
||||
watch: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Subcommand)]
|
||||
pub enum Command {
|
||||
/// Start the language server
|
||||
Server,
|
||||
}
|
||||
|
||||
#[allow(clippy::print_stdout, clippy::unnecessary_wraps, clippy::print_stderr)]
|
||||
fn main() -> anyhow::Result<()> {
|
||||
setup_tracing();
|
||||
pub fn main() -> ExitStatus {
|
||||
run().unwrap_or_else(|error| {
|
||||
use std::io::Write;
|
||||
|
||||
let arguments: Vec<_> = std::env::args().collect();
|
||||
// Use `writeln` instead of `eprintln` to avoid panicking when the stderr pipe is broken.
|
||||
let mut stderr = std::io::stderr().lock();
|
||||
|
||||
if arguments.len() < 2 {
|
||||
eprintln!("Usage: red_knot <path>");
|
||||
return Err(anyhow::anyhow!("Invalid arguments"));
|
||||
// This communicates that this isn't a linter error but Red Knot itself hard-errored for
|
||||
// some reason (e.g. failed to resolve the configuration)
|
||||
writeln!(stderr, "{}", "Red Knot failed".red().bold()).ok();
|
||||
// Currently we generally only see one error, but e.g. with io errors when resolving
|
||||
// the configuration it is help to chain errors ("resolving configuration failed" ->
|
||||
// "failed to read file: subdir/pyproject.toml")
|
||||
for cause in error.chain() {
|
||||
writeln!(stderr, " {} {cause}", "Cause:".bold()).ok();
|
||||
}
|
||||
|
||||
ExitStatus::Error
|
||||
})
|
||||
}
|
||||
|
||||
fn run() -> anyhow::Result<ExitStatus> {
|
||||
let Args {
|
||||
command,
|
||||
current_directory,
|
||||
custom_typeshed_dir,
|
||||
extra_search_path: extra_paths,
|
||||
venv_path,
|
||||
target_version,
|
||||
verbosity,
|
||||
watch,
|
||||
} = Args::parse_from(std::env::args().collect::<Vec<_>>());
|
||||
|
||||
if matches!(command, Some(Command::Server)) {
|
||||
return run_server().map(|()| ExitStatus::Success);
|
||||
}
|
||||
|
||||
let entry_point = Path::new(&arguments[1]);
|
||||
let verbosity = verbosity.level();
|
||||
countme::enable(verbosity.is_trace());
|
||||
let _guard = setup_tracing(verbosity)?;
|
||||
|
||||
if !entry_point.exists() {
|
||||
eprintln!("The entry point does not exist.");
|
||||
return Err(anyhow::anyhow!("Invalid arguments"));
|
||||
}
|
||||
// The base path to which all CLI arguments are relative to.
|
||||
let cli_base_path = {
|
||||
let cwd = std::env::current_dir().context("Failed to get the current working directory")?;
|
||||
SystemPathBuf::from_path_buf(cwd)
|
||||
.map_err(|path| {
|
||||
anyhow!(
|
||||
"The current working directory '{}' contains non-unicode characters. Red Knot only supports unicode paths.",
|
||||
path.display()
|
||||
)
|
||||
})?
|
||||
};
|
||||
|
||||
if !entry_point.is_file() {
|
||||
eprintln!("The entry point is not a file.");
|
||||
return Err(anyhow::anyhow!("Invalid arguments"));
|
||||
}
|
||||
let cwd = current_directory
|
||||
.map(|cwd| {
|
||||
if cwd.as_std_path().is_dir() {
|
||||
Ok(SystemPath::absolute(&cwd, &cli_base_path))
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
"Provided current-directory path '{cwd}' is not a directory."
|
||||
))
|
||||
}
|
||||
})
|
||||
.transpose()?
|
||||
.unwrap_or_else(|| cli_base_path.clone());
|
||||
|
||||
let workspace_folder = entry_point.parent().unwrap();
|
||||
let workspace = Workspace::new(workspace_folder.to_path_buf());
|
||||
let system = OsSystem::new(cwd.clone());
|
||||
let workspace_metadata = WorkspaceMetadata::from_path(system.current_directory(), &system)?;
|
||||
|
||||
let workspace_search_path = ModuleSearchPath::new(
|
||||
workspace.root().to_path_buf(),
|
||||
ModuleSearchPathKind::FirstParty,
|
||||
);
|
||||
let mut program = Program::new(workspace);
|
||||
set_module_search_paths(&mut program, vec![workspace_search_path]);
|
||||
// TODO: Verify the remaining search path settings eagerly.
|
||||
let site_packages = venv_path
|
||||
.map(|venv_path| {
|
||||
let venv_path = SystemPath::absolute(venv_path, &cli_base_path);
|
||||
|
||||
let entry_id = program.file_id(entry_point);
|
||||
program.workspace_mut().open_file(entry_id);
|
||||
if system.is_directory(&venv_path) {
|
||||
Ok(site_packages_dirs_of_venv(&venv_path, &system)?)
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
"Provided venv-path {venv_path} is not a directory!"
|
||||
))
|
||||
}
|
||||
})
|
||||
.transpose()?
|
||||
.unwrap_or_default();
|
||||
|
||||
// TODO: Respect the settings from the workspace metadata. when resolving the program settings.
|
||||
let program_settings = ProgramSettings {
|
||||
target_version: target_version.into(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths,
|
||||
src_root: workspace_metadata.root().to_path_buf(),
|
||||
custom_typeshed: custom_typeshed_dir,
|
||||
site_packages,
|
||||
},
|
||||
};
|
||||
|
||||
// TODO: Use the `program_settings` to compute the key for the database's persistent
|
||||
// cache and load the cache if it exists.
|
||||
let mut db = RootDatabase::new(workspace_metadata, program_settings, system);
|
||||
|
||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new();
|
||||
|
||||
@@ -66,122 +205,158 @@ fn main() -> anyhow::Result<()> {
|
||||
}
|
||||
})?;
|
||||
|
||||
let file_changes_notifier = main_loop.file_changes_notifier();
|
||||
let exit_status = if watch {
|
||||
main_loop.watch(&mut db)?
|
||||
} else {
|
||||
main_loop.run(&mut db)
|
||||
};
|
||||
|
||||
// Watch for file changes and re-trigger the analysis.
|
||||
let mut file_watcher = FileWatcher::new(move |changes| {
|
||||
file_changes_notifier.notify(changes);
|
||||
})?;
|
||||
tracing::trace!("Counts for entire CLI run:\n{}", countme::get_all());
|
||||
|
||||
file_watcher.watch_folder(workspace_folder)?;
|
||||
std::mem::forget(db);
|
||||
|
||||
main_loop.run(&mut program);
|
||||
Ok(exit_status)
|
||||
}
|
||||
|
||||
let source_jar: &SourceJar = program.jar().unwrap();
|
||||
#[derive(Copy, Clone)]
|
||||
pub enum ExitStatus {
|
||||
/// Checking was successful and there were no errors.
|
||||
Success = 0,
|
||||
|
||||
dbg!(source_jar.parsed.statistics());
|
||||
dbg!(source_jar.sources.statistics());
|
||||
/// Checking was successful but there were errors.
|
||||
Failure = 1,
|
||||
|
||||
Ok(())
|
||||
/// Checking failed.
|
||||
Error = 2,
|
||||
}
|
||||
|
||||
impl Termination for ExitStatus {
|
||||
fn report(self) -> ExitCode {
|
||||
ExitCode::from(self as u8)
|
||||
}
|
||||
}
|
||||
|
||||
struct MainLoop {
|
||||
orchestrator_sender: crossbeam_channel::Sender<OrchestratorMessage>,
|
||||
main_loop_receiver: crossbeam_channel::Receiver<MainLoopMessage>,
|
||||
/// Sender that can be used to send messages to the main loop.
|
||||
sender: crossbeam_channel::Sender<MainLoopMessage>,
|
||||
|
||||
/// Receiver for the messages sent **to** the main loop.
|
||||
receiver: crossbeam_channel::Receiver<MainLoopMessage>,
|
||||
|
||||
/// The file system watcher, if running in watch mode.
|
||||
watcher: Option<WorkspaceWatcher>,
|
||||
}
|
||||
|
||||
impl MainLoop {
|
||||
fn new() -> (Self, MainLoopCancellationToken) {
|
||||
let (orchestrator_sender, orchestrator_receiver) = crossbeam_channel::bounded(1);
|
||||
let (main_loop_sender, main_loop_receiver) = crossbeam_channel::bounded(1);
|
||||
|
||||
let mut orchestrator = Orchestrator {
|
||||
receiver: orchestrator_receiver,
|
||||
sender: main_loop_sender.clone(),
|
||||
revision: 0,
|
||||
};
|
||||
|
||||
std::thread::spawn(move || {
|
||||
orchestrator.run();
|
||||
});
|
||||
let (sender, receiver) = crossbeam_channel::bounded(10);
|
||||
|
||||
(
|
||||
Self {
|
||||
orchestrator_sender,
|
||||
main_loop_receiver,
|
||||
},
|
||||
MainLoopCancellationToken {
|
||||
sender: main_loop_sender,
|
||||
sender: sender.clone(),
|
||||
receiver,
|
||||
watcher: None,
|
||||
},
|
||||
MainLoopCancellationToken { sender },
|
||||
)
|
||||
}
|
||||
|
||||
fn file_changes_notifier(&self) -> FileChangesNotifier {
|
||||
FileChangesNotifier {
|
||||
sender: self.orchestrator_sender.clone(),
|
||||
}
|
||||
fn watch(mut self, db: &mut RootDatabase) -> anyhow::Result<ExitStatus> {
|
||||
tracing::debug!("Starting watch mode");
|
||||
let sender = self.sender.clone();
|
||||
let watcher = watch::directory_watcher(move |event| {
|
||||
sender.send(MainLoopMessage::ApplyChanges(event)).unwrap();
|
||||
})?;
|
||||
|
||||
self.watcher = Some(WorkspaceWatcher::new(watcher, db));
|
||||
|
||||
self.run(db);
|
||||
|
||||
Ok(ExitStatus::Success)
|
||||
}
|
||||
|
||||
fn run(self, program: &mut Program) {
|
||||
self.orchestrator_sender
|
||||
.send(OrchestratorMessage::Run)
|
||||
.unwrap();
|
||||
fn run(mut self, db: &mut RootDatabase) -> ExitStatus {
|
||||
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
|
||||
|
||||
for message in &self.main_loop_receiver {
|
||||
tracing::trace!("Main Loop: Tick");
|
||||
let result = self.main_loop(db);
|
||||
|
||||
tracing::debug!("Exiting main loop");
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn main_loop(&mut self, db: &mut RootDatabase) -> ExitStatus {
|
||||
// Schedule the first check.
|
||||
tracing::debug!("Starting main loop");
|
||||
|
||||
let mut revision = 0u64;
|
||||
|
||||
while let Ok(message) = self.receiver.recv() {
|
||||
match message {
|
||||
MainLoopMessage::CheckProgram { revision } => {
|
||||
let program = program.snapshot();
|
||||
let sender = self.orchestrator_sender.clone();
|
||||
MainLoopMessage::CheckWorkspace => {
|
||||
let db = db.snapshot();
|
||||
let sender = self.sender.clone();
|
||||
|
||||
// Spawn a new task that checks the program. This needs to be done in a separate thread
|
||||
// Spawn a new task that checks the workspace. This needs to be done in a separate thread
|
||||
// to prevent blocking the main loop here.
|
||||
rayon::spawn(move || match program.check(ExecutionMode::ThreadPool) {
|
||||
Ok(result) => {
|
||||
rayon::spawn(move || {
|
||||
if let Ok(result) = db.check() {
|
||||
// Send the result back to the main loop for printing.
|
||||
sender
|
||||
.send(OrchestratorMessage::CheckProgramCompleted {
|
||||
diagnostics: result,
|
||||
revision,
|
||||
})
|
||||
.send(MainLoopMessage::CheckCompleted { result, revision })
|
||||
.unwrap();
|
||||
}
|
||||
Err(QueryError::Cancelled) => {}
|
||||
});
|
||||
}
|
||||
MainLoopMessage::ApplyChanges(changes) => {
|
||||
// Automatically cancels any pending queries and waits for them to complete.
|
||||
program.apply_changes(changes);
|
||||
|
||||
MainLoopMessage::CheckCompleted {
|
||||
result,
|
||||
revision: check_revision,
|
||||
} => {
|
||||
let has_diagnostics = !result.is_empty();
|
||||
if check_revision == revision {
|
||||
for diagnostic in result {
|
||||
tracing::error!("{}", diagnostic);
|
||||
}
|
||||
} else {
|
||||
tracing::debug!(
|
||||
"Discarding check result for outdated revision: current: {revision}, result revision: {check_revision}"
|
||||
);
|
||||
}
|
||||
|
||||
if self.watcher.is_none() {
|
||||
return if has_diagnostics {
|
||||
ExitStatus::Failure
|
||||
} else {
|
||||
ExitStatus::Success
|
||||
};
|
||||
}
|
||||
|
||||
tracing::trace!("Counts after last check:\n{}", countme::get_all());
|
||||
}
|
||||
MainLoopMessage::CheckCompleted(diagnostics) => {
|
||||
dbg!(diagnostics);
|
||||
|
||||
MainLoopMessage::ApplyChanges(changes) => {
|
||||
revision += 1;
|
||||
// Automatically cancels any pending queries and waits for them to complete.
|
||||
db.apply_changes(changes);
|
||||
if let Some(watcher) = self.watcher.as_mut() {
|
||||
watcher.update(db);
|
||||
}
|
||||
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
|
||||
}
|
||||
MainLoopMessage::Exit => {
|
||||
return;
|
||||
// Cancel any pending queries and wait for them to complete.
|
||||
// TODO: Don't use Salsa internal APIs
|
||||
// [Zulip-Thread](https://salsa.zulipchat.com/#narrow/stream/333573-salsa-3.2E0/topic/Expose.20an.20API.20to.20cancel.20other.20queries)
|
||||
let _ = db.zalsa_mut();
|
||||
return ExitStatus::Success;
|
||||
}
|
||||
}
|
||||
|
||||
tracing::debug!("Waiting for next main loop message.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for MainLoop {
|
||||
fn drop(&mut self) {
|
||||
self.orchestrator_sender
|
||||
.send(OrchestratorMessage::Shutdown)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct FileChangesNotifier {
|
||||
sender: crossbeam_channel::Sender<OrchestratorMessage>,
|
||||
}
|
||||
|
||||
impl FileChangesNotifier {
|
||||
fn notify(&self, changes: Vec<FileWatcherChange>) {
|
||||
self.sender
|
||||
.send(OrchestratorMessage::FileChanges(changes))
|
||||
.unwrap();
|
||||
ExitStatus::Success
|
||||
}
|
||||
}
|
||||
|
||||
@@ -196,164 +371,11 @@ impl MainLoopCancellationToken {
|
||||
}
|
||||
}
|
||||
|
||||
struct Orchestrator {
|
||||
/// Sends messages to the main loop.
|
||||
sender: crossbeam_channel::Sender<MainLoopMessage>,
|
||||
/// Receives messages from the main loop.
|
||||
receiver: crossbeam_channel::Receiver<OrchestratorMessage>,
|
||||
revision: usize,
|
||||
}
|
||||
|
||||
impl Orchestrator {
|
||||
fn run(&mut self) {
|
||||
while let Ok(message) = self.receiver.recv() {
|
||||
match message {
|
||||
OrchestratorMessage::Run => {
|
||||
self.sender
|
||||
.send(MainLoopMessage::CheckProgram {
|
||||
revision: self.revision,
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
OrchestratorMessage::CheckProgramCompleted {
|
||||
diagnostics,
|
||||
revision,
|
||||
} => {
|
||||
// Only take the diagnostics if they are for the latest revision.
|
||||
if self.revision == revision {
|
||||
self.sender
|
||||
.send(MainLoopMessage::CheckCompleted(diagnostics))
|
||||
.unwrap();
|
||||
} else {
|
||||
tracing::debug!("Discarding diagnostics for outdated revision {revision} (current: {}).", self.revision);
|
||||
}
|
||||
}
|
||||
|
||||
OrchestratorMessage::FileChanges(changes) => {
|
||||
// Request cancellation, but wait until all analysis tasks have completed to
|
||||
// avoid stale messages in the next main loop.
|
||||
|
||||
self.revision += 1;
|
||||
self.debounce_changes(changes);
|
||||
}
|
||||
OrchestratorMessage::Shutdown => {
|
||||
return self.shutdown();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn debounce_changes(&self, mut changes: Vec<FileWatcherChange>) {
|
||||
loop {
|
||||
// Consume possibly incoming file change messages before running a new analysis, but don't wait for more than 100ms.
|
||||
crossbeam_channel::select! {
|
||||
recv(self.receiver) -> message => {
|
||||
match message {
|
||||
Ok(OrchestratorMessage::Shutdown) => {
|
||||
return self.shutdown();
|
||||
}
|
||||
Ok(OrchestratorMessage::FileChanges(file_changes)) => {
|
||||
changes.extend(file_changes);
|
||||
}
|
||||
|
||||
Ok(OrchestratorMessage::CheckProgramCompleted { .. })=> {
|
||||
// disregard any outdated completion message.
|
||||
}
|
||||
Ok(OrchestratorMessage::Run) => unreachable!("The orchestrator is already running."),
|
||||
|
||||
Err(_) => {
|
||||
// There are no more senders, no point in waiting for more messages
|
||||
return;
|
||||
}
|
||||
}
|
||||
},
|
||||
default(std::time::Duration::from_millis(10)) => {
|
||||
// No more file changes after 10 ms, send the changes and schedule a new analysis
|
||||
self.sender.send(MainLoopMessage::ApplyChanges(changes)).unwrap();
|
||||
self.sender.send(MainLoopMessage::CheckProgram { revision: self.revision}).unwrap();
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::unused_self)]
|
||||
fn shutdown(&self) {
|
||||
tracing::trace!("Shutting down orchestrator.");
|
||||
}
|
||||
}
|
||||
|
||||
/// Message sent from the orchestrator to the main loop.
|
||||
#[derive(Debug)]
|
||||
enum MainLoopMessage {
|
||||
CheckProgram { revision: usize },
|
||||
CheckCompleted(Vec<String>),
|
||||
ApplyChanges(Vec<FileWatcherChange>),
|
||||
CheckWorkspace,
|
||||
CheckCompleted { result: Vec<String>, revision: u64 },
|
||||
ApplyChanges(Vec<watch::ChangeEvent>),
|
||||
Exit,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum OrchestratorMessage {
|
||||
Run,
|
||||
Shutdown,
|
||||
|
||||
CheckProgramCompleted {
|
||||
diagnostics: Vec<String>,
|
||||
revision: usize,
|
||||
},
|
||||
|
||||
FileChanges(Vec<FileWatcherChange>),
|
||||
}
|
||||
|
||||
fn setup_tracing() {
|
||||
let subscriber = Registry::default().with(
|
||||
tracing_tree::HierarchicalLayer::default()
|
||||
.with_indent_lines(true)
|
||||
.with_indent_amount(2)
|
||||
.with_bracketed_fields(true)
|
||||
.with_thread_ids(true)
|
||||
.with_targets(true)
|
||||
.with_writer(|| Box::new(std::io::stderr()))
|
||||
.with_timer(Uptime::default())
|
||||
.with_filter(LoggingFilter {
|
||||
trace_level: Level::TRACE,
|
||||
}),
|
||||
);
|
||||
|
||||
tracing::subscriber::set_global_default(subscriber).unwrap();
|
||||
}
|
||||
|
||||
struct LoggingFilter {
|
||||
trace_level: Level,
|
||||
}
|
||||
|
||||
impl LoggingFilter {
|
||||
fn is_enabled(&self, meta: &Metadata<'_>) -> bool {
|
||||
let filter = if meta.target().starts_with("red_knot") || meta.target().starts_with("ruff") {
|
||||
self.trace_level
|
||||
} else {
|
||||
Level::INFO
|
||||
};
|
||||
|
||||
meta.level() <= &filter
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> Filter<S> for LoggingFilter {
|
||||
fn enabled(&self, meta: &Metadata<'_>, _cx: &Context<'_, S>) -> bool {
|
||||
self.is_enabled(meta)
|
||||
}
|
||||
|
||||
fn callsite_enabled(&self, meta: &'static Metadata<'static>) -> Interest {
|
||||
if self.is_enabled(meta) {
|
||||
Interest::always()
|
||||
} else {
|
||||
Interest::never()
|
||||
}
|
||||
}
|
||||
|
||||
fn max_level_hint(&self) -> Option<LevelFilter> {
|
||||
Some(LevelFilter::from_level(self.trace_level))
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,93 +0,0 @@
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::sync::Arc;
|
||||
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_parser::{Mode, ParseError};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
use crate::cache::KeyValueCache;
|
||||
use crate::db::{QueryResult, SourceDb};
|
||||
use crate::files::FileId;
|
||||
use crate::source::source_text;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct Parsed {
|
||||
inner: Arc<ParsedInner>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
struct ParsedInner {
|
||||
ast: ast::ModModule,
|
||||
errors: Vec<ParseError>,
|
||||
}
|
||||
|
||||
impl Parsed {
|
||||
fn new(ast: ast::ModModule, errors: Vec<ParseError>) -> Self {
|
||||
Self {
|
||||
inner: Arc::new(ParsedInner { ast, errors }),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn from_text(text: &str) -> Self {
|
||||
let result = ruff_python_parser::parse(text, Mode::Module);
|
||||
|
||||
let (module, errors) = match result {
|
||||
Ok(ast::Mod::Module(module)) => (module, vec![]),
|
||||
Ok(ast::Mod::Expression(expression)) => (
|
||||
ast::ModModule {
|
||||
range: expression.range(),
|
||||
body: vec![ast::Stmt::Expr(ast::StmtExpr {
|
||||
range: expression.range(),
|
||||
value: expression.body,
|
||||
})],
|
||||
},
|
||||
vec![],
|
||||
),
|
||||
Err(errors) => (
|
||||
ast::ModModule {
|
||||
range: TextRange::default(),
|
||||
body: Vec::new(),
|
||||
},
|
||||
vec![errors],
|
||||
),
|
||||
};
|
||||
|
||||
Parsed::new(module, errors)
|
||||
}
|
||||
|
||||
pub fn ast(&self) -> &ast::ModModule {
|
||||
&self.inner.ast
|
||||
}
|
||||
|
||||
pub fn errors(&self) -> &[ParseError] {
|
||||
&self.inner.errors
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub(crate) fn parse(db: &dyn SourceDb, file_id: FileId) -> QueryResult<Parsed> {
|
||||
let jar = db.jar()?;
|
||||
|
||||
jar.parsed.get(&file_id, |file_id| {
|
||||
let source = source_text(db, *file_id)?;
|
||||
|
||||
Ok(Parsed::from_text(source.text()))
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct ParsedStorage(KeyValueCache<FileId, Parsed>);
|
||||
|
||||
impl Deref for ParsedStorage {
|
||||
type Target = KeyValueCache<FileId, Parsed>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for ParsedStorage {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
@@ -1,413 +0,0 @@
|
||||
use rayon::{current_num_threads, yield_local};
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use crate::db::{Database, QueryError, QueryResult};
|
||||
use crate::files::FileId;
|
||||
use crate::lint::{lint_semantic, lint_syntax, Diagnostics};
|
||||
use crate::module::{file_to_module, resolve_module};
|
||||
use crate::program::Program;
|
||||
use crate::symbols::{symbol_table, Dependency};
|
||||
|
||||
impl Program {
|
||||
/// Checks all open files in the workspace and its dependencies.
|
||||
#[tracing::instrument(level = "debug", skip_all)]
|
||||
pub fn check(&self, mode: ExecutionMode) -> QueryResult<Vec<String>> {
|
||||
self.cancelled()?;
|
||||
|
||||
let mut context = CheckContext::new(self);
|
||||
|
||||
match mode {
|
||||
ExecutionMode::SingleThreaded => SingleThreadedExecutor.run(&mut context)?,
|
||||
ExecutionMode::ThreadPool => ThreadPoolExecutor.run(&mut context)?,
|
||||
};
|
||||
|
||||
Ok(context.finish())
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(self, context))]
|
||||
fn check_file(&self, file: FileId, context: &CheckFileContext) -> QueryResult<Diagnostics> {
|
||||
self.cancelled()?;
|
||||
|
||||
let symbol_table = symbol_table(self, file)?;
|
||||
let dependencies = symbol_table.dependencies();
|
||||
|
||||
if !dependencies.is_empty() {
|
||||
let module = file_to_module(self, file)?;
|
||||
|
||||
// TODO scheduling all dependencies here is wasteful if we don't infer any types on them
|
||||
// but I think that's unlikely, so it is okay?
|
||||
// Anyway, we need to figure out a way to retrieve the dependencies of a module
|
||||
// from the persistent cache. So maybe it should be a separate query after all.
|
||||
for dependency in dependencies {
|
||||
let dependency_name = match dependency {
|
||||
Dependency::Module(name) => Some(name.clone()),
|
||||
Dependency::Relative { .. } => match &module {
|
||||
Some(module) => module.resolve_dependency(self, dependency)?,
|
||||
None => None,
|
||||
},
|
||||
};
|
||||
|
||||
if let Some(dependency_name) = dependency_name {
|
||||
// TODO We may want to have a different check functions for non-first-party
|
||||
// files because we only need to index them and not check them.
|
||||
// Supporting non-first-party code also requires supporting typing stubs.
|
||||
if let Some(dependency) = resolve_module(self, dependency_name)? {
|
||||
if dependency.path(self)?.root().kind().is_first_party() {
|
||||
context.schedule_dependency(dependency.path(self)?.file());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut diagnostics = Vec::new();
|
||||
|
||||
if self.workspace().is_file_open(file) {
|
||||
diagnostics.extend_from_slice(&lint_syntax(self, file)?);
|
||||
diagnostics.extend_from_slice(&lint_semantic(self, file)?);
|
||||
}
|
||||
|
||||
Ok(Diagnostics::from(diagnostics))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub enum ExecutionMode {
|
||||
SingleThreaded,
|
||||
ThreadPool,
|
||||
}
|
||||
|
||||
/// Context that stores state information about the entire check operation.
|
||||
struct CheckContext<'a> {
|
||||
/// IDs of the files that have been queued for checking.
|
||||
///
|
||||
/// Used to avoid queuing the same file twice.
|
||||
scheduled_files: FxHashSet<FileId>,
|
||||
|
||||
/// Reference to the program that is checked.
|
||||
program: &'a Program,
|
||||
|
||||
/// The aggregated diagnostics
|
||||
diagnostics: Vec<String>,
|
||||
}
|
||||
|
||||
impl<'a> CheckContext<'a> {
|
||||
fn new(program: &'a Program) -> Self {
|
||||
Self {
|
||||
scheduled_files: FxHashSet::default(),
|
||||
program,
|
||||
diagnostics: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the tasks to check all open files in the workspace.
|
||||
fn check_open_files(&mut self) -> Vec<CheckOpenFileTask> {
|
||||
self.scheduled_files
|
||||
.extend(self.program.workspace().open_files());
|
||||
|
||||
self.program
|
||||
.workspace()
|
||||
.open_files()
|
||||
.map(|file_id| CheckOpenFileTask { file_id })
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Returns the task to check a dependency.
|
||||
fn check_dependency(&mut self, file_id: FileId) -> Option<CheckDependencyTask> {
|
||||
if self.scheduled_files.insert(file_id) {
|
||||
Some(CheckDependencyTask { file_id })
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Pushes the result for a single file check operation
|
||||
fn push_diagnostics(&mut self, diagnostics: &Diagnostics) {
|
||||
self.diagnostics.extend_from_slice(diagnostics);
|
||||
}
|
||||
|
||||
/// Returns a reference to the program that is being checked.
|
||||
fn program(&self) -> &'a Program {
|
||||
self.program
|
||||
}
|
||||
|
||||
/// Creates a task context that is used to check a single file.
|
||||
fn task_context<'b, S>(&self, dependency_scheduler: &'b S) -> CheckTaskContext<'a, 'b, S>
|
||||
where
|
||||
S: ScheduleDependency,
|
||||
{
|
||||
CheckTaskContext {
|
||||
program: self.program,
|
||||
dependency_scheduler,
|
||||
}
|
||||
}
|
||||
|
||||
fn finish(self) -> Vec<String> {
|
||||
self.diagnostics
|
||||
}
|
||||
}
|
||||
|
||||
/// Trait that abstracts away how a dependency of a file gets scheduled for checking.
|
||||
trait ScheduleDependency {
|
||||
/// Schedules the file with the given ID for checking.
|
||||
fn schedule(&self, file_id: FileId);
|
||||
}
|
||||
|
||||
impl<T> ScheduleDependency for T
|
||||
where
|
||||
T: Fn(FileId),
|
||||
{
|
||||
fn schedule(&self, file_id: FileId) {
|
||||
let f = self;
|
||||
f(file_id);
|
||||
}
|
||||
}
|
||||
|
||||
/// Context that is used to run a single file check task.
|
||||
///
|
||||
/// The task is generic over `S` because it is passed across thread boundaries and
|
||||
/// we don't want to add the requirement that [`ScheduleDependency`] must be [`Send`].
|
||||
struct CheckTaskContext<'a, 'scheduler, S>
|
||||
where
|
||||
S: ScheduleDependency,
|
||||
{
|
||||
dependency_scheduler: &'scheduler S,
|
||||
program: &'a Program,
|
||||
}
|
||||
|
||||
impl<'a, 'scheduler, S> CheckTaskContext<'a, 'scheduler, S>
|
||||
where
|
||||
S: ScheduleDependency,
|
||||
{
|
||||
fn as_file_context(&self) -> CheckFileContext<'scheduler> {
|
||||
CheckFileContext {
|
||||
dependency_scheduler: self.dependency_scheduler,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Context passed when checking a single file.
|
||||
///
|
||||
/// This is a trimmed down version of [`CheckTaskContext`] with the type parameter `S` erased
|
||||
/// to avoid monomorphization of [`Program:check_file`].
|
||||
struct CheckFileContext<'a> {
|
||||
dependency_scheduler: &'a dyn ScheduleDependency,
|
||||
}
|
||||
|
||||
impl<'a> CheckFileContext<'a> {
|
||||
fn schedule_dependency(&self, file_id: FileId) {
|
||||
self.dependency_scheduler.schedule(file_id);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum CheckFileTask {
|
||||
OpenFile(CheckOpenFileTask),
|
||||
Dependency(CheckDependencyTask),
|
||||
}
|
||||
|
||||
impl CheckFileTask {
|
||||
/// Runs the task and returns the results for checking this file.
|
||||
fn run<S>(&self, context: &CheckTaskContext<S>) -> QueryResult<Diagnostics>
|
||||
where
|
||||
S: ScheduleDependency,
|
||||
{
|
||||
match self {
|
||||
Self::OpenFile(task) => task.run(context),
|
||||
Self::Dependency(task) => task.run(context),
|
||||
}
|
||||
}
|
||||
|
||||
fn file_id(&self) -> FileId {
|
||||
match self {
|
||||
CheckFileTask::OpenFile(task) => task.file_id,
|
||||
CheckFileTask::Dependency(task) => task.file_id,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Task to check an open file.
|
||||
|
||||
#[derive(Debug)]
|
||||
struct CheckOpenFileTask {
|
||||
file_id: FileId,
|
||||
}
|
||||
|
||||
impl CheckOpenFileTask {
|
||||
fn run<S>(&self, context: &CheckTaskContext<S>) -> QueryResult<Diagnostics>
|
||||
where
|
||||
S: ScheduleDependency,
|
||||
{
|
||||
context
|
||||
.program
|
||||
.check_file(self.file_id, &context.as_file_context())
|
||||
}
|
||||
}
|
||||
|
||||
/// Task to check a dependency file.
|
||||
#[derive(Debug)]
|
||||
struct CheckDependencyTask {
|
||||
file_id: FileId,
|
||||
}
|
||||
|
||||
impl CheckDependencyTask {
|
||||
fn run<S>(&self, context: &CheckTaskContext<S>) -> QueryResult<Diagnostics>
|
||||
where
|
||||
S: ScheduleDependency,
|
||||
{
|
||||
context
|
||||
.program
|
||||
.check_file(self.file_id, &context.as_file_context())
|
||||
}
|
||||
}
|
||||
|
||||
/// Executor that schedules the checking of individual program files.
|
||||
trait CheckExecutor {
|
||||
fn run(self, context: &mut CheckContext) -> QueryResult<()>;
|
||||
}
|
||||
|
||||
/// Executor that runs all check operations on the current thread.
|
||||
///
|
||||
/// The executor does not schedule dependencies for checking.
|
||||
/// The main motivation for scheduling dependencies
|
||||
/// in a multithreaded environment is to parse and index the dependencies concurrently.
|
||||
/// However, that doesn't make sense in a single threaded environment, because the dependencies then compute
|
||||
/// with checking the open files. Checking dependencies in a single threaded environment is more likely
|
||||
/// to hurt performance because we end up analyzing files in their entirety, even if we only need to type check parts of them.
|
||||
#[derive(Debug, Default)]
|
||||
struct SingleThreadedExecutor;
|
||||
|
||||
impl CheckExecutor for SingleThreadedExecutor {
|
||||
fn run(self, context: &mut CheckContext) -> QueryResult<()> {
|
||||
let mut queue = context.check_open_files();
|
||||
|
||||
let noop_schedule_dependency = |_| {};
|
||||
|
||||
while let Some(file) = queue.pop() {
|
||||
context.program().cancelled()?;
|
||||
|
||||
let task_context = context.task_context(&noop_schedule_dependency);
|
||||
context.push_diagnostics(&file.run(&task_context)?);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Executor that runs the check operations on a thread pool.
|
||||
///
|
||||
/// The executor runs each check operation as its own task using a thread pool.
|
||||
///
|
||||
/// Other than [`SingleThreadedExecutor`], this executor schedules dependencies for checking. It
|
||||
/// even schedules dependencies for checking when the thread pool size is 1 for a better debugging experience.
|
||||
#[derive(Debug, Default)]
|
||||
struct ThreadPoolExecutor;
|
||||
|
||||
impl CheckExecutor for ThreadPoolExecutor {
|
||||
fn run(self, context: &mut CheckContext) -> QueryResult<()> {
|
||||
let num_threads = current_num_threads();
|
||||
let single_threaded = num_threads == 1;
|
||||
let span = tracing::trace_span!("ThreadPoolExecutor::run", num_threads);
|
||||
let _ = span.enter();
|
||||
|
||||
let mut queue: Vec<_> = context
|
||||
.check_open_files()
|
||||
.into_iter()
|
||||
.map(CheckFileTask::OpenFile)
|
||||
.collect();
|
||||
|
||||
let (sender, receiver) = if single_threaded {
|
||||
// Use an unbounded queue for single threaded execution to prevent deadlocks
|
||||
// when a single file schedules multiple dependencies.
|
||||
crossbeam::channel::unbounded()
|
||||
} else {
|
||||
// Use a bounded queue to apply backpressure when the orchestration thread isn't able to keep
|
||||
// up processing messages from the worker threads.
|
||||
crossbeam::channel::bounded(num_threads)
|
||||
};
|
||||
|
||||
let schedule_sender = sender.clone();
|
||||
let schedule_dependency = move |file_id| {
|
||||
schedule_sender
|
||||
.send(ThreadPoolMessage::ScheduleDependency(file_id))
|
||||
.unwrap();
|
||||
};
|
||||
|
||||
let result = rayon::in_place_scope(|scope| {
|
||||
let mut pending = 0usize;
|
||||
|
||||
loop {
|
||||
context.program().cancelled()?;
|
||||
|
||||
// 1. Try to get a queued message to ensure that we have always remaining space in the channel to prevent blocking the worker threads.
|
||||
// 2. Try to process a queued file
|
||||
// 3. If there's no queued file wait for the next incoming message.
|
||||
// 4. Exit if there are no more messages and no senders.
|
||||
let message = if let Ok(message) = receiver.try_recv() {
|
||||
message
|
||||
} else if let Some(task) = queue.pop() {
|
||||
pending += 1;
|
||||
|
||||
let task_context = context.task_context(&schedule_dependency);
|
||||
let sender = sender.clone();
|
||||
let task_span = tracing::trace_span!(
|
||||
parent: &span,
|
||||
"CheckFileTask::run",
|
||||
file_id = task.file_id().as_u32(),
|
||||
);
|
||||
|
||||
scope.spawn(move |_| {
|
||||
task_span.in_scope(|| match task.run(&task_context) {
|
||||
Ok(result) => {
|
||||
sender.send(ThreadPoolMessage::Completed(result)).unwrap();
|
||||
}
|
||||
Err(err) => sender.send(ThreadPoolMessage::Errored(err)).unwrap(),
|
||||
});
|
||||
});
|
||||
|
||||
// If this is a single threaded rayon thread pool, yield the current thread
|
||||
// or we never start processing the work items.
|
||||
if single_threaded {
|
||||
yield_local();
|
||||
}
|
||||
|
||||
continue;
|
||||
} else if let Ok(message) = receiver.recv() {
|
||||
message
|
||||
} else {
|
||||
break;
|
||||
};
|
||||
|
||||
match message {
|
||||
ThreadPoolMessage::ScheduleDependency(dependency) => {
|
||||
if let Some(task) = context.check_dependency(dependency) {
|
||||
queue.push(CheckFileTask::Dependency(task));
|
||||
}
|
||||
}
|
||||
ThreadPoolMessage::Completed(diagnostics) => {
|
||||
context.push_diagnostics(&diagnostics);
|
||||
pending -= 1;
|
||||
|
||||
if pending == 0 && queue.is_empty() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
ThreadPoolMessage::Errored(err) => {
|
||||
return Err(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
});
|
||||
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum ThreadPoolMessage {
|
||||
ScheduleDependency(FileId),
|
||||
Completed(Diagnostics),
|
||||
Errored(QueryError),
|
||||
}
|
||||
@@ -1,275 +0,0 @@
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::db::{
|
||||
Database, Db, DbRuntime, DbWithJar, HasJar, HasJars, JarsStorage, LintDb, LintJar,
|
||||
ParallelDatabase, QueryResult, SemanticDb, SemanticJar, Snapshot, SourceDb, SourceJar, Upcast,
|
||||
};
|
||||
use crate::files::{FileId, Files};
|
||||
use crate::Workspace;
|
||||
|
||||
pub mod check;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Program {
|
||||
jars: JarsStorage<Program>,
|
||||
files: Files,
|
||||
workspace: Workspace,
|
||||
}
|
||||
|
||||
impl Program {
|
||||
pub fn new(workspace: Workspace) -> Self {
|
||||
Self {
|
||||
jars: JarsStorage::default(),
|
||||
files: Files::default(),
|
||||
workspace,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn apply_changes<I>(&mut self, changes: I)
|
||||
where
|
||||
I: IntoIterator<Item = FileWatcherChange>,
|
||||
{
|
||||
let mut aggregated_changes = AggregatedChanges::default();
|
||||
|
||||
aggregated_changes.extend(changes.into_iter().map(|change| FileChange {
|
||||
id: self.files.intern(&change.path),
|
||||
kind: change.kind,
|
||||
}));
|
||||
|
||||
let (source, semantic, lint) = self.jars_mut();
|
||||
for change in aggregated_changes.iter() {
|
||||
semantic.module_resolver.remove_module(change.id);
|
||||
semantic.symbol_tables.remove(&change.id);
|
||||
source.sources.remove(&change.id);
|
||||
source.parsed.remove(&change.id);
|
||||
// TODO: remove all dependent modules as well
|
||||
semantic.type_store.remove_module(change.id);
|
||||
lint.lint_syntax.remove(&change.id);
|
||||
lint.lint_semantic.remove(&change.id);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn files(&self) -> &Files {
|
||||
&self.files
|
||||
}
|
||||
|
||||
pub fn workspace(&self) -> &Workspace {
|
||||
&self.workspace
|
||||
}
|
||||
|
||||
pub fn workspace_mut(&mut self) -> &mut Workspace {
|
||||
&mut self.workspace
|
||||
}
|
||||
}
|
||||
|
||||
impl SourceDb for Program {
|
||||
fn file_id(&self, path: &Path) -> FileId {
|
||||
self.files.intern(path)
|
||||
}
|
||||
|
||||
fn file_path(&self, file_id: FileId) -> Arc<Path> {
|
||||
self.files.path(file_id)
|
||||
}
|
||||
}
|
||||
|
||||
impl DbWithJar<SourceJar> for Program {}
|
||||
|
||||
impl SemanticDb for Program {}
|
||||
|
||||
impl DbWithJar<SemanticJar> for Program {}
|
||||
|
||||
impl LintDb for Program {}
|
||||
|
||||
impl DbWithJar<LintJar> for Program {}
|
||||
|
||||
impl Upcast<dyn SemanticDb> for Program {
|
||||
fn upcast(&self) -> &(dyn SemanticDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SourceDb> for Program {
|
||||
fn upcast(&self) -> &(dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn LintDb> for Program {
|
||||
fn upcast(&self) -> &(dyn LintDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Db for Program {}
|
||||
|
||||
impl Database for Program {
|
||||
fn runtime(&self) -> &DbRuntime {
|
||||
self.jars.runtime()
|
||||
}
|
||||
|
||||
fn runtime_mut(&mut self) -> &mut DbRuntime {
|
||||
self.jars.runtime_mut()
|
||||
}
|
||||
}
|
||||
|
||||
impl ParallelDatabase for Program {
|
||||
fn snapshot(&self) -> Snapshot<Self> {
|
||||
Snapshot::new(Self {
|
||||
jars: self.jars.snapshot(),
|
||||
files: self.files.snapshot(),
|
||||
workspace: self.workspace.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJars for Program {
|
||||
type Jars = (SourceJar, SemanticJar, LintJar);
|
||||
|
||||
fn jars(&self) -> QueryResult<&Self::Jars> {
|
||||
self.jars.jars()
|
||||
}
|
||||
|
||||
fn jars_mut(&mut self) -> &mut Self::Jars {
|
||||
self.jars.jars_mut()
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJar<SourceJar> for Program {
|
||||
fn jar(&self) -> QueryResult<&SourceJar> {
|
||||
Ok(&self.jars()?.0)
|
||||
}
|
||||
|
||||
fn jar_mut(&mut self) -> &mut SourceJar {
|
||||
&mut self.jars_mut().0
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJar<SemanticJar> for Program {
|
||||
fn jar(&self) -> QueryResult<&SemanticJar> {
|
||||
Ok(&self.jars()?.1)
|
||||
}
|
||||
|
||||
fn jar_mut(&mut self) -> &mut SemanticJar {
|
||||
&mut self.jars_mut().1
|
||||
}
|
||||
}
|
||||
|
||||
impl HasJar<LintJar> for Program {
|
||||
fn jar(&self) -> QueryResult<&LintJar> {
|
||||
Ok(&self.jars()?.2)
|
||||
}
|
||||
|
||||
fn jar_mut(&mut self) -> &mut LintJar {
|
||||
&mut self.jars_mut().2
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct FileWatcherChange {
|
||||
path: PathBuf,
|
||||
kind: FileChangeKind,
|
||||
}
|
||||
|
||||
impl FileWatcherChange {
|
||||
pub fn new(path: PathBuf, kind: FileChangeKind) -> Self {
|
||||
Self { path, kind }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
struct FileChange {
|
||||
id: FileId,
|
||||
kind: FileChangeKind,
|
||||
}
|
||||
|
||||
impl FileChange {
|
||||
fn file_id(self) -> FileId {
|
||||
self.id
|
||||
}
|
||||
|
||||
fn kind(self) -> FileChangeKind {
|
||||
self.kind
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
pub enum FileChangeKind {
|
||||
Created,
|
||||
Modified,
|
||||
Deleted,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
struct AggregatedChanges {
|
||||
changes: FxHashMap<FileId, FileChangeKind>,
|
||||
}
|
||||
|
||||
impl AggregatedChanges {
|
||||
fn add(&mut self, change: FileChange) {
|
||||
match self.changes.entry(change.file_id()) {
|
||||
Entry::Occupied(mut entry) => {
|
||||
let merged = entry.get_mut();
|
||||
|
||||
match (merged, change.kind()) {
|
||||
(FileChangeKind::Created, FileChangeKind::Deleted) => {
|
||||
// Deletion after creations means that ruff never saw the file.
|
||||
entry.remove();
|
||||
}
|
||||
(FileChangeKind::Created, FileChangeKind::Modified) => {
|
||||
// No-op, for ruff, modifying a file that it doesn't yet know that it exists is still considered a creation.
|
||||
}
|
||||
|
||||
(FileChangeKind::Modified, FileChangeKind::Created) => {
|
||||
// Uhh, that should probably not happen. Continue considering it a modification.
|
||||
}
|
||||
|
||||
(FileChangeKind::Modified, FileChangeKind::Deleted) => {
|
||||
*entry.get_mut() = FileChangeKind::Deleted;
|
||||
}
|
||||
|
||||
(FileChangeKind::Deleted, FileChangeKind::Created) => {
|
||||
*entry.get_mut() = FileChangeKind::Modified;
|
||||
}
|
||||
|
||||
(FileChangeKind::Deleted, FileChangeKind::Modified) => {
|
||||
// That's weird, but let's consider it a modification.
|
||||
*entry.get_mut() = FileChangeKind::Modified;
|
||||
}
|
||||
|
||||
(FileChangeKind::Created, FileChangeKind::Created)
|
||||
| (FileChangeKind::Modified, FileChangeKind::Modified)
|
||||
| (FileChangeKind::Deleted, FileChangeKind::Deleted) => {
|
||||
// No-op transitions. Some of them should be impossible but we handle them anyway.
|
||||
}
|
||||
}
|
||||
}
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(change.kind());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn extend<I>(&mut self, changes: I)
|
||||
where
|
||||
I: IntoIterator<Item = FileChange>,
|
||||
{
|
||||
let iter = changes.into_iter();
|
||||
let (lower, _) = iter.size_hint();
|
||||
self.changes.reserve(lower);
|
||||
|
||||
for change in iter {
|
||||
self.add(change);
|
||||
}
|
||||
}
|
||||
|
||||
fn iter(&self) -> impl Iterator<Item = FileChange> + '_ {
|
||||
self.changes.iter().map(|(id, kind)| FileChange {
|
||||
id: *id,
|
||||
kind: *kind,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,95 +0,0 @@
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::sync::Arc;
|
||||
|
||||
use ruff_notebook::Notebook;
|
||||
use ruff_python_ast::PySourceType;
|
||||
|
||||
use crate::cache::KeyValueCache;
|
||||
use crate::db::{QueryResult, SourceDb};
|
||||
use crate::files::FileId;
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub(crate) fn source_text(db: &dyn SourceDb, file_id: FileId) -> QueryResult<Source> {
|
||||
let jar = db.jar()?;
|
||||
let sources = &jar.sources;
|
||||
|
||||
sources.get(&file_id, |file_id| {
|
||||
let path = db.file_path(*file_id);
|
||||
|
||||
let source_text = std::fs::read_to_string(&path).unwrap_or_else(|err| {
|
||||
tracing::error!("Failed to read file '{path:?}: {err}'. Falling back to empty text");
|
||||
String::new()
|
||||
});
|
||||
|
||||
let python_ty = PySourceType::from(&path);
|
||||
|
||||
let kind = match python_ty {
|
||||
PySourceType::Python => {
|
||||
SourceKind::Python(Arc::from(source_text))
|
||||
}
|
||||
PySourceType::Stub => SourceKind::Stub(Arc::from(source_text)),
|
||||
PySourceType::Ipynb => {
|
||||
let notebook = Notebook::from_source_code(&source_text).unwrap_or_else(|err| {
|
||||
// TODO should this be changed to never fail?
|
||||
// or should we instead add a diagnostic somewhere? But what would we return in this case?
|
||||
tracing::error!(
|
||||
"Failed to parse notebook '{path:?}: {err}'. Falling back to an empty notebook"
|
||||
);
|
||||
Notebook::from_source_code("").unwrap()
|
||||
});
|
||||
|
||||
SourceKind::IpyNotebook(Arc::new(notebook))
|
||||
}
|
||||
};
|
||||
|
||||
Ok(Source { kind })
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum SourceKind {
|
||||
Python(Arc<str>),
|
||||
Stub(Arc<str>),
|
||||
IpyNotebook(Arc<Notebook>),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct Source {
|
||||
kind: SourceKind,
|
||||
}
|
||||
|
||||
impl Source {
|
||||
pub fn python<T: Into<Arc<str>>>(source: T) -> Self {
|
||||
Self {
|
||||
kind: SourceKind::Python(source.into()),
|
||||
}
|
||||
}
|
||||
pub fn kind(&self) -> &SourceKind {
|
||||
&self.kind
|
||||
}
|
||||
|
||||
pub fn text(&self) -> &str {
|
||||
match &self.kind {
|
||||
SourceKind::Python(text) => text,
|
||||
SourceKind::Stub(text) => text,
|
||||
SourceKind::IpyNotebook(notebook) => notebook.source_code(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SourceStorage(pub(crate) KeyValueCache<FileId, Source>);
|
||||
|
||||
impl Deref for SourceStorage {
|
||||
type Target = KeyValueCache<FileId, Source>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for SourceStorage {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
34
crates/red_knot/src/target_version.rs
Normal file
34
crates/red_knot/src/target_version.rs
Normal file
@@ -0,0 +1,34 @@
|
||||
/// Enumeration of all supported Python versions
|
||||
///
|
||||
/// TODO: unify with the `PythonVersion` enum in the linter/formatter crates?
|
||||
#[derive(Copy, Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord, Default, clap::ValueEnum)]
|
||||
pub enum TargetVersion {
|
||||
Py37,
|
||||
#[default]
|
||||
Py38,
|
||||
Py39,
|
||||
Py310,
|
||||
Py311,
|
||||
Py312,
|
||||
Py313,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for TargetVersion {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
ruff_db::program::TargetVersion::from(*self).fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TargetVersion> for ruff_db::program::TargetVersion {
|
||||
fn from(value: TargetVersion) -> Self {
|
||||
match value {
|
||||
TargetVersion::Py37 => Self::Py37,
|
||||
TargetVersion::Py38 => Self::Py38,
|
||||
TargetVersion::Py39 => Self::Py39,
|
||||
TargetVersion::Py310 => Self::Py310,
|
||||
TargetVersion::Py311 => Self::Py311,
|
||||
TargetVersion::Py312 => Self::Py312,
|
||||
TargetVersion::Py313 => Self::Py313,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,745 +0,0 @@
|
||||
#![allow(dead_code)]
|
||||
use crate::ast_ids::NodeKey;
|
||||
use crate::db::{QueryResult, SemanticDb, SemanticJar};
|
||||
use crate::files::FileId;
|
||||
use crate::symbols::{symbol_table, GlobalSymbolId, ScopeId, ScopeKind, SymbolId};
|
||||
use crate::{FxDashMap, FxIndexSet, Name};
|
||||
use ruff_index::{newtype_index, IndexVec};
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
pub(crate) mod infer;
|
||||
|
||||
pub(crate) use infer::{infer_definition_type, infer_symbol_type};
|
||||
|
||||
/// unique ID for a type
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
pub enum Type {
|
||||
/// the dynamic or gradual type: a statically-unknown set of values
|
||||
Any,
|
||||
/// the empty set of values
|
||||
Never,
|
||||
/// unknown type (no annotation)
|
||||
/// equivalent to Any, or to object in strict mode
|
||||
Unknown,
|
||||
/// name is not bound to any value
|
||||
Unbound,
|
||||
/// a specific function object
|
||||
Function(FunctionTypeId),
|
||||
/// a specific class object
|
||||
Class(ClassTypeId),
|
||||
/// the set of Python objects with the given class in their __class__'s method resolution order
|
||||
Instance(ClassTypeId),
|
||||
Union(UnionTypeId),
|
||||
Intersection(IntersectionTypeId),
|
||||
// TODO protocols, callable types, overloads, generics, type vars
|
||||
}
|
||||
|
||||
impl Type {
|
||||
fn display<'a>(&'a self, store: &'a TypeStore) -> DisplayType<'a> {
|
||||
DisplayType { ty: self, store }
|
||||
}
|
||||
|
||||
pub const fn is_unbound(&self) -> bool {
|
||||
matches!(self, Type::Unbound)
|
||||
}
|
||||
|
||||
pub const fn is_unknown(&self) -> bool {
|
||||
matches!(self, Type::Unknown)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<FunctionTypeId> for Type {
|
||||
fn from(id: FunctionTypeId) -> Self {
|
||||
Type::Function(id)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<UnionTypeId> for Type {
|
||||
fn from(id: UnionTypeId) -> Self {
|
||||
Type::Union(id)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<IntersectionTypeId> for Type {
|
||||
fn from(id: IntersectionTypeId) -> Self {
|
||||
Type::Intersection(id)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: currently calling `get_function` et al and holding on to the `FunctionTypeRef` will lock a
|
||||
// shard of this dashmap, for as long as you hold the reference. This may be a problem. We could
|
||||
// switch to having all the arenas hold Arc, or we could see if we can split up ModuleTypeStore,
|
||||
// and/or give it inner mutability and finer-grained internal locking.
|
||||
#[derive(Debug, Default)]
|
||||
pub struct TypeStore {
|
||||
modules: FxDashMap<FileId, ModuleTypeStore>,
|
||||
}
|
||||
|
||||
impl TypeStore {
|
||||
pub fn remove_module(&mut self, file_id: FileId) {
|
||||
self.modules.remove(&file_id);
|
||||
}
|
||||
|
||||
pub fn cache_symbol_type(&self, symbol: GlobalSymbolId, ty: Type) {
|
||||
self.add_or_get_module(symbol.file_id)
|
||||
.symbol_types
|
||||
.insert(symbol.symbol_id, ty);
|
||||
}
|
||||
|
||||
pub fn cache_node_type(&self, file_id: FileId, node_key: NodeKey, ty: Type) {
|
||||
self.add_or_get_module(file_id)
|
||||
.node_types
|
||||
.insert(node_key, ty);
|
||||
}
|
||||
|
||||
pub fn get_cached_symbol_type(&self, symbol: GlobalSymbolId) -> Option<Type> {
|
||||
self.try_get_module(symbol.file_id)?
|
||||
.symbol_types
|
||||
.get(&symbol.symbol_id)
|
||||
.copied()
|
||||
}
|
||||
|
||||
pub fn get_cached_node_type(&self, file_id: FileId, node_key: &NodeKey) -> Option<Type> {
|
||||
self.try_get_module(file_id)?
|
||||
.node_types
|
||||
.get(node_key)
|
||||
.copied()
|
||||
}
|
||||
|
||||
fn add_or_get_module(&self, file_id: FileId) -> ModuleStoreRefMut {
|
||||
self.modules
|
||||
.entry(file_id)
|
||||
.or_insert_with(|| ModuleTypeStore::new(file_id))
|
||||
}
|
||||
|
||||
fn get_module(&self, file_id: FileId) -> ModuleStoreRef {
|
||||
self.try_get_module(file_id).expect("module should exist")
|
||||
}
|
||||
|
||||
fn try_get_module(&self, file_id: FileId) -> Option<ModuleStoreRef> {
|
||||
self.modules.get(&file_id)
|
||||
}
|
||||
|
||||
fn add_function(
|
||||
&self,
|
||||
file_id: FileId,
|
||||
name: &str,
|
||||
symbol_id: SymbolId,
|
||||
scope_id: ScopeId,
|
||||
decorators: Vec<Type>,
|
||||
) -> FunctionTypeId {
|
||||
self.add_or_get_module(file_id)
|
||||
.add_function(name, symbol_id, scope_id, decorators)
|
||||
}
|
||||
|
||||
fn add_class(
|
||||
&self,
|
||||
file_id: FileId,
|
||||
name: &str,
|
||||
scope_id: ScopeId,
|
||||
bases: Vec<Type>,
|
||||
) -> ClassTypeId {
|
||||
self.add_or_get_module(file_id)
|
||||
.add_class(name, scope_id, bases)
|
||||
}
|
||||
|
||||
fn add_union(&mut self, file_id: FileId, elems: &[Type]) -> UnionTypeId {
|
||||
self.add_or_get_module(file_id).add_union(elems)
|
||||
}
|
||||
|
||||
fn add_intersection(
|
||||
&mut self,
|
||||
file_id: FileId,
|
||||
positive: &[Type],
|
||||
negative: &[Type],
|
||||
) -> IntersectionTypeId {
|
||||
self.add_or_get_module(file_id)
|
||||
.add_intersection(positive, negative)
|
||||
}
|
||||
|
||||
fn get_function(&self, id: FunctionTypeId) -> FunctionTypeRef {
|
||||
FunctionTypeRef {
|
||||
module_store: self.get_module(id.file_id),
|
||||
function_id: id.func_id,
|
||||
}
|
||||
}
|
||||
|
||||
fn get_class(&self, id: ClassTypeId) -> ClassTypeRef {
|
||||
ClassTypeRef {
|
||||
module_store: self.get_module(id.file_id),
|
||||
class_id: id.class_id,
|
||||
}
|
||||
}
|
||||
|
||||
fn get_union(&self, id: UnionTypeId) -> UnionTypeRef {
|
||||
UnionTypeRef {
|
||||
module_store: self.get_module(id.file_id),
|
||||
union_id: id.union_id,
|
||||
}
|
||||
}
|
||||
|
||||
fn get_intersection(&self, id: IntersectionTypeId) -> IntersectionTypeRef {
|
||||
IntersectionTypeRef {
|
||||
module_store: self.get_module(id.file_id),
|
||||
intersection_id: id.intersection_id,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type ModuleStoreRef<'a> = dashmap::mapref::one::Ref<
|
||||
'a,
|
||||
FileId,
|
||||
ModuleTypeStore,
|
||||
std::hash::BuildHasherDefault<rustc_hash::FxHasher>,
|
||||
>;
|
||||
|
||||
type ModuleStoreRefMut<'a> = dashmap::mapref::one::RefMut<
|
||||
'a,
|
||||
FileId,
|
||||
ModuleTypeStore,
|
||||
std::hash::BuildHasherDefault<rustc_hash::FxHasher>,
|
||||
>;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct FunctionTypeRef<'a> {
|
||||
module_store: ModuleStoreRef<'a>,
|
||||
function_id: ModuleFunctionTypeId,
|
||||
}
|
||||
|
||||
impl<'a> std::ops::Deref for FunctionTypeRef<'a> {
|
||||
type Target = FunctionType;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.module_store.get_function(self.function_id)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct ClassTypeRef<'a> {
|
||||
module_store: ModuleStoreRef<'a>,
|
||||
class_id: ModuleClassTypeId,
|
||||
}
|
||||
|
||||
impl<'a> std::ops::Deref for ClassTypeRef<'a> {
|
||||
type Target = ClassType;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.module_store.get_class(self.class_id)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct UnionTypeRef<'a> {
|
||||
module_store: ModuleStoreRef<'a>,
|
||||
union_id: ModuleUnionTypeId,
|
||||
}
|
||||
|
||||
impl<'a> std::ops::Deref for UnionTypeRef<'a> {
|
||||
type Target = UnionType;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.module_store.get_union(self.union_id)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct IntersectionTypeRef<'a> {
|
||||
module_store: ModuleStoreRef<'a>,
|
||||
intersection_id: ModuleIntersectionTypeId,
|
||||
}
|
||||
|
||||
impl<'a> std::ops::Deref for IntersectionTypeRef<'a> {
|
||||
type Target = IntersectionType;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.module_store.get_intersection(self.intersection_id)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
|
||||
pub struct FunctionTypeId {
|
||||
file_id: FileId,
|
||||
func_id: ModuleFunctionTypeId,
|
||||
}
|
||||
|
||||
impl FunctionTypeId {
|
||||
fn function(self, db: &dyn SemanticDb) -> QueryResult<FunctionTypeRef> {
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
Ok(jar.type_store.get_function(self))
|
||||
}
|
||||
|
||||
pub(crate) fn name(self, db: &dyn SemanticDb) -> QueryResult<Name> {
|
||||
Ok(self.function(db)?.name().into())
|
||||
}
|
||||
|
||||
pub(crate) fn global_symbol(self, db: &dyn SemanticDb) -> QueryResult<GlobalSymbolId> {
|
||||
Ok(GlobalSymbolId {
|
||||
file_id: self.file(),
|
||||
symbol_id: self.symbol(db)?,
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn file(self) -> FileId {
|
||||
self.file_id
|
||||
}
|
||||
|
||||
pub(crate) fn symbol(self, db: &dyn SemanticDb) -> QueryResult<SymbolId> {
|
||||
let FunctionType { symbol_id, .. } = *self.function(db)?;
|
||||
Ok(symbol_id)
|
||||
}
|
||||
|
||||
pub(crate) fn get_containing_class(
|
||||
self,
|
||||
db: &dyn SemanticDb,
|
||||
) -> QueryResult<Option<ClassTypeId>> {
|
||||
let table = symbol_table(db, self.file_id)?;
|
||||
let FunctionType { symbol_id, .. } = *self.function(db)?;
|
||||
let scope_id = symbol_id.symbol(&table).scope_id();
|
||||
let scope = scope_id.scope(&table);
|
||||
if !matches!(scope.kind(), ScopeKind::Class) {
|
||||
return Ok(None);
|
||||
};
|
||||
let Some(def) = scope.definition() else {
|
||||
return Ok(None);
|
||||
};
|
||||
let Some(symbol_id) = scope.defining_symbol() else {
|
||||
return Ok(None);
|
||||
};
|
||||
let Type::Class(class) = infer_definition_type(
|
||||
db,
|
||||
GlobalSymbolId {
|
||||
file_id: self.file_id,
|
||||
symbol_id,
|
||||
},
|
||||
def,
|
||||
)?
|
||||
else {
|
||||
return Ok(None);
|
||||
};
|
||||
Ok(Some(class))
|
||||
}
|
||||
|
||||
pub(crate) fn has_decorator(
|
||||
self,
|
||||
db: &dyn SemanticDb,
|
||||
decorator_symbol: GlobalSymbolId,
|
||||
) -> QueryResult<bool> {
|
||||
for deco_ty in self.function(db)?.decorators() {
|
||||
let Type::Function(deco_func) = deco_ty else {
|
||||
continue;
|
||||
};
|
||||
if deco_func.global_symbol(db)? == decorator_symbol {
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
Ok(false)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
|
||||
pub struct ClassTypeId {
|
||||
file_id: FileId,
|
||||
class_id: ModuleClassTypeId,
|
||||
}
|
||||
|
||||
impl ClassTypeId {
|
||||
fn class(self, db: &dyn SemanticDb) -> QueryResult<ClassTypeRef> {
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
Ok(jar.type_store.get_class(self))
|
||||
}
|
||||
|
||||
pub(crate) fn name(self, db: &dyn SemanticDb) -> QueryResult<Name> {
|
||||
Ok(self.class(db)?.name().into())
|
||||
}
|
||||
|
||||
pub(crate) fn get_super_class_member(
|
||||
self,
|
||||
db: &dyn SemanticDb,
|
||||
name: &Name,
|
||||
) -> QueryResult<Option<Type>> {
|
||||
// TODO we should linearize the MRO instead of doing this recursively
|
||||
let class = self.class(db)?;
|
||||
for base in class.bases() {
|
||||
if let Type::Class(base) = base {
|
||||
if let Some(own_member) = base.get_own_class_member(db, name)? {
|
||||
return Ok(Some(own_member));
|
||||
}
|
||||
if let Some(base_member) = base.get_super_class_member(db, name)? {
|
||||
return Ok(Some(base_member));
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
fn get_own_class_member(self, db: &dyn SemanticDb, name: &Name) -> QueryResult<Option<Type>> {
|
||||
// TODO: this should distinguish instance-only members (e.g. `x: int`) and not return them
|
||||
let ClassType { scope_id, .. } = *self.class(db)?;
|
||||
let table = symbol_table(db, self.file_id)?;
|
||||
if let Some(symbol_id) = table.symbol_id_by_name(scope_id, name) {
|
||||
Ok(Some(infer_symbol_type(
|
||||
db,
|
||||
GlobalSymbolId {
|
||||
file_id: self.file_id,
|
||||
symbol_id,
|
||||
},
|
||||
)?))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: get_own_instance_member, get_class_member, get_instance_member
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
|
||||
pub struct UnionTypeId {
|
||||
file_id: FileId,
|
||||
union_id: ModuleUnionTypeId,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
|
||||
pub struct IntersectionTypeId {
|
||||
file_id: FileId,
|
||||
intersection_id: ModuleIntersectionTypeId,
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
struct ModuleFunctionTypeId;
|
||||
|
||||
#[newtype_index]
|
||||
struct ModuleClassTypeId;
|
||||
|
||||
#[newtype_index]
|
||||
struct ModuleUnionTypeId;
|
||||
|
||||
#[newtype_index]
|
||||
struct ModuleIntersectionTypeId;
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ModuleTypeStore {
|
||||
file_id: FileId,
|
||||
/// arena of all function types defined in this module
|
||||
functions: IndexVec<ModuleFunctionTypeId, FunctionType>,
|
||||
/// arena of all class types defined in this module
|
||||
classes: IndexVec<ModuleClassTypeId, ClassType>,
|
||||
/// arenda of all union types created in this module
|
||||
unions: IndexVec<ModuleUnionTypeId, UnionType>,
|
||||
/// arena of all intersection types created in this module
|
||||
intersections: IndexVec<ModuleIntersectionTypeId, IntersectionType>,
|
||||
/// cached types of symbols in this module
|
||||
symbol_types: FxHashMap<SymbolId, Type>,
|
||||
/// cached types of AST nodes in this module
|
||||
node_types: FxHashMap<NodeKey, Type>,
|
||||
}
|
||||
|
||||
impl ModuleTypeStore {
|
||||
fn new(file_id: FileId) -> Self {
|
||||
Self {
|
||||
file_id,
|
||||
functions: IndexVec::default(),
|
||||
classes: IndexVec::default(),
|
||||
unions: IndexVec::default(),
|
||||
intersections: IndexVec::default(),
|
||||
symbol_types: FxHashMap::default(),
|
||||
node_types: FxHashMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
fn add_function(
|
||||
&mut self,
|
||||
name: &str,
|
||||
symbol_id: SymbolId,
|
||||
scope_id: ScopeId,
|
||||
decorators: Vec<Type>,
|
||||
) -> FunctionTypeId {
|
||||
let func_id = self.functions.push(FunctionType {
|
||||
name: Name::new(name),
|
||||
symbol_id,
|
||||
scope_id,
|
||||
decorators,
|
||||
});
|
||||
FunctionTypeId {
|
||||
file_id: self.file_id,
|
||||
func_id,
|
||||
}
|
||||
}
|
||||
|
||||
fn add_class(&mut self, name: &str, scope_id: ScopeId, bases: Vec<Type>) -> ClassTypeId {
|
||||
let class_id = self.classes.push(ClassType {
|
||||
name: Name::new(name),
|
||||
scope_id,
|
||||
// TODO: if no bases are given, that should imply [object]
|
||||
bases,
|
||||
});
|
||||
ClassTypeId {
|
||||
file_id: self.file_id,
|
||||
class_id,
|
||||
}
|
||||
}
|
||||
|
||||
fn add_union(&mut self, elems: &[Type]) -> UnionTypeId {
|
||||
let union_id = self.unions.push(UnionType {
|
||||
elements: elems.iter().copied().collect(),
|
||||
});
|
||||
UnionTypeId {
|
||||
file_id: self.file_id,
|
||||
union_id,
|
||||
}
|
||||
}
|
||||
|
||||
fn add_intersection(&mut self, positive: &[Type], negative: &[Type]) -> IntersectionTypeId {
|
||||
let intersection_id = self.intersections.push(IntersectionType {
|
||||
positive: positive.iter().copied().collect(),
|
||||
negative: negative.iter().copied().collect(),
|
||||
});
|
||||
IntersectionTypeId {
|
||||
file_id: self.file_id,
|
||||
intersection_id,
|
||||
}
|
||||
}
|
||||
|
||||
fn get_function(&self, func_id: ModuleFunctionTypeId) -> &FunctionType {
|
||||
&self.functions[func_id]
|
||||
}
|
||||
|
||||
fn get_class(&self, class_id: ModuleClassTypeId) -> &ClassType {
|
||||
&self.classes[class_id]
|
||||
}
|
||||
|
||||
fn get_union(&self, union_id: ModuleUnionTypeId) -> &UnionType {
|
||||
&self.unions[union_id]
|
||||
}
|
||||
|
||||
fn get_intersection(&self, intersection_id: ModuleIntersectionTypeId) -> &IntersectionType {
|
||||
&self.intersections[intersection_id]
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
struct DisplayType<'a> {
|
||||
ty: &'a Type,
|
||||
store: &'a TypeStore,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for DisplayType<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self.ty {
|
||||
Type::Any => f.write_str("Any"),
|
||||
Type::Never => f.write_str("Never"),
|
||||
Type::Unknown => f.write_str("Unknown"),
|
||||
Type::Unbound => f.write_str("Unbound"),
|
||||
// TODO functions and classes should display using a fully qualified name
|
||||
Type::Class(class_id) => {
|
||||
f.write_str("Literal[")?;
|
||||
f.write_str(self.store.get_class(*class_id).name())?;
|
||||
f.write_str("]")
|
||||
}
|
||||
Type::Instance(class_id) => f.write_str(self.store.get_class(*class_id).name()),
|
||||
Type::Function(func_id) => f.write_str(self.store.get_function(*func_id).name()),
|
||||
Type::Union(union_id) => self
|
||||
.store
|
||||
.get_module(union_id.file_id)
|
||||
.get_union(union_id.union_id)
|
||||
.display(f, self.store),
|
||||
Type::Intersection(int_id) => self
|
||||
.store
|
||||
.get_module(int_id.file_id)
|
||||
.get_intersection(int_id.intersection_id)
|
||||
.display(f, self.store),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct ClassType {
|
||||
/// Name of the class at definition
|
||||
name: Name,
|
||||
/// `ScopeId` of the class body
|
||||
scope_id: ScopeId,
|
||||
/// Types of all class bases
|
||||
bases: Vec<Type>,
|
||||
}
|
||||
|
||||
impl ClassType {
|
||||
fn name(&self) -> &str {
|
||||
self.name.as_str()
|
||||
}
|
||||
|
||||
fn bases(&self) -> &[Type] {
|
||||
self.bases.as_slice()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct FunctionType {
|
||||
/// name of the function at definition
|
||||
name: Name,
|
||||
/// symbol which this function is a definition of
|
||||
symbol_id: SymbolId,
|
||||
/// scope of this function's body
|
||||
scope_id: ScopeId,
|
||||
/// types of all decorators on this function
|
||||
decorators: Vec<Type>,
|
||||
}
|
||||
|
||||
impl FunctionType {
|
||||
fn name(&self) -> &str {
|
||||
self.name.as_str()
|
||||
}
|
||||
|
||||
fn scope_id(&self) -> ScopeId {
|
||||
self.scope_id
|
||||
}
|
||||
|
||||
pub(crate) fn decorators(&self) -> &[Type] {
|
||||
self.decorators.as_slice()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct UnionType {
|
||||
// the union type includes values in any of these types
|
||||
elements: FxIndexSet<Type>,
|
||||
}
|
||||
|
||||
impl UnionType {
|
||||
fn display(&self, f: &mut std::fmt::Formatter<'_>, store: &TypeStore) -> std::fmt::Result {
|
||||
f.write_str("(")?;
|
||||
let mut first = true;
|
||||
for ty in &self.elements {
|
||||
if !first {
|
||||
f.write_str(" | ")?;
|
||||
};
|
||||
first = false;
|
||||
write!(f, "{}", ty.display(store))?;
|
||||
}
|
||||
f.write_str(")")
|
||||
}
|
||||
}
|
||||
|
||||
// Negation types aren't expressible in annotations, and are most likely to arise from type
|
||||
// narrowing along with intersections (e.g. `if not isinstance(...)`), so we represent them
|
||||
// directly in intersections rather than as a separate type. This sacrifices some efficiency in the
|
||||
// case where a Not appears outside an intersection (unclear when that could even happen, but we'd
|
||||
// have to represent it as a single-element intersection if it did) in exchange for better
|
||||
// efficiency in the within-intersection case.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct IntersectionType {
|
||||
// the intersection type includes only values in all of these types
|
||||
positive: FxIndexSet<Type>,
|
||||
// the intersection type does not include any value in any of these types
|
||||
negative: FxIndexSet<Type>,
|
||||
}
|
||||
|
||||
impl IntersectionType {
|
||||
fn display(&self, f: &mut std::fmt::Formatter<'_>, store: &TypeStore) -> std::fmt::Result {
|
||||
f.write_str("(")?;
|
||||
let mut first = true;
|
||||
for (neg, ty) in self
|
||||
.positive
|
||||
.iter()
|
||||
.map(|ty| (false, ty))
|
||||
.chain(self.negative.iter().map(|ty| (true, ty)))
|
||||
{
|
||||
if !first {
|
||||
f.write_str(" & ")?;
|
||||
};
|
||||
first = false;
|
||||
if neg {
|
||||
f.write_str("~")?;
|
||||
};
|
||||
write!(f, "{}", ty.display(store))?;
|
||||
}
|
||||
f.write_str(")")
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::path::Path;
|
||||
|
||||
use crate::files::Files;
|
||||
use crate::symbols::{SymbolFlags, SymbolTable};
|
||||
use crate::types::{Type, TypeStore};
|
||||
use crate::FxIndexSet;
|
||||
|
||||
#[test]
|
||||
fn add_class() {
|
||||
let store = TypeStore::default();
|
||||
let files = Files::default();
|
||||
let file_id = files.intern(Path::new("/foo"));
|
||||
let id = store.add_class(file_id, "C", SymbolTable::root_scope_id(), Vec::new());
|
||||
assert_eq!(store.get_class(id).name(), "C");
|
||||
let inst = Type::Instance(id);
|
||||
assert_eq!(format!("{}", inst.display(&store)), "C");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_function() {
|
||||
let store = TypeStore::default();
|
||||
let files = Files::default();
|
||||
let file_id = files.intern(Path::new("/foo"));
|
||||
let mut table = SymbolTable::new();
|
||||
let func_symbol = table.add_or_update_symbol(
|
||||
SymbolTable::root_scope_id(),
|
||||
"func",
|
||||
SymbolFlags::IS_DEFINED,
|
||||
);
|
||||
|
||||
let id = store.add_function(
|
||||
file_id,
|
||||
"func",
|
||||
func_symbol,
|
||||
SymbolTable::root_scope_id(),
|
||||
vec![Type::Unknown],
|
||||
);
|
||||
assert_eq!(store.get_function(id).name(), "func");
|
||||
assert_eq!(store.get_function(id).decorators(), vec![Type::Unknown]);
|
||||
let func = Type::Function(id);
|
||||
assert_eq!(format!("{}", func.display(&store)), "func");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_union() {
|
||||
let mut store = TypeStore::default();
|
||||
let files = Files::default();
|
||||
let file_id = files.intern(Path::new("/foo"));
|
||||
let c1 = store.add_class(file_id, "C1", SymbolTable::root_scope_id(), Vec::new());
|
||||
let c2 = store.add_class(file_id, "C2", SymbolTable::root_scope_id(), Vec::new());
|
||||
let elems = vec![Type::Instance(c1), Type::Instance(c2)];
|
||||
let id = store.add_union(file_id, &elems);
|
||||
assert_eq!(
|
||||
store.get_union(id).elements,
|
||||
elems.into_iter().collect::<FxIndexSet<_>>()
|
||||
);
|
||||
let union = Type::Union(id);
|
||||
assert_eq!(format!("{}", union.display(&store)), "(C1 | C2)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_intersection() {
|
||||
let mut store = TypeStore::default();
|
||||
let files = Files::default();
|
||||
let file_id = files.intern(Path::new("/foo"));
|
||||
let c1 = store.add_class(file_id, "C1", SymbolTable::root_scope_id(), Vec::new());
|
||||
let c2 = store.add_class(file_id, "C2", SymbolTable::root_scope_id(), Vec::new());
|
||||
let c3 = store.add_class(file_id, "C3", SymbolTable::root_scope_id(), Vec::new());
|
||||
let pos = vec![Type::Instance(c1), Type::Instance(c2)];
|
||||
let neg = vec![Type::Instance(c3)];
|
||||
let id = store.add_intersection(file_id, &pos, &neg);
|
||||
assert_eq!(
|
||||
store.get_intersection(id).positive,
|
||||
pos.into_iter().collect::<FxIndexSet<_>>()
|
||||
);
|
||||
assert_eq!(
|
||||
store.get_intersection(id).negative,
|
||||
neg.into_iter().collect::<FxIndexSet<_>>()
|
||||
);
|
||||
let intersection = Type::Intersection(id);
|
||||
assert_eq!(
|
||||
format!("{}", intersection.display(&store)),
|
||||
"(C1 & C2 & ~C3)"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,292 +0,0 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::AstNode;
|
||||
|
||||
use crate::db::{QueryResult, SemanticDb, SemanticJar};
|
||||
|
||||
use crate::module::ModuleName;
|
||||
use crate::parse::parse;
|
||||
use crate::symbols::{
|
||||
resolve_global_symbol, symbol_table, Definition, GlobalSymbolId, ImportFromDefinition,
|
||||
};
|
||||
use crate::types::Type;
|
||||
use crate::FileId;
|
||||
|
||||
// FIXME: Figure out proper dead-lock free synchronisation now that this takes `&db` instead of `&mut db`.
|
||||
#[tracing::instrument(level = "trace", skip(db))]
|
||||
pub fn infer_symbol_type(db: &dyn SemanticDb, symbol: GlobalSymbolId) -> QueryResult<Type> {
|
||||
let symbols = symbol_table(db, symbol.file_id)?;
|
||||
let defs = symbols.definitions(symbol.symbol_id);
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
|
||||
if let Some(ty) = jar.type_store.get_cached_symbol_type(symbol) {
|
||||
return Ok(ty);
|
||||
}
|
||||
|
||||
// TODO handle multiple defs, conditional defs...
|
||||
assert_eq!(defs.len(), 1);
|
||||
|
||||
let ty = infer_definition_type(db, symbol, defs[0].clone())?;
|
||||
|
||||
jar.type_store.cache_symbol_type(symbol, ty);
|
||||
|
||||
// TODO record dependencies
|
||||
Ok(ty)
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "trace", skip(db))]
|
||||
pub fn infer_definition_type(
|
||||
db: &dyn SemanticDb,
|
||||
symbol: GlobalSymbolId,
|
||||
definition: Definition,
|
||||
) -> QueryResult<Type> {
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
let type_store = &jar.type_store;
|
||||
let file_id = symbol.file_id;
|
||||
|
||||
match definition {
|
||||
Definition::ImportFrom(ImportFromDefinition {
|
||||
module,
|
||||
name,
|
||||
level,
|
||||
}) => {
|
||||
// TODO relative imports
|
||||
assert!(matches!(level, 0));
|
||||
let module_name = ModuleName::new(module.as_ref().expect("TODO relative imports"));
|
||||
if let Some(remote_symbol) = resolve_global_symbol(db, module_name, &name)? {
|
||||
infer_symbol_type(db, remote_symbol)
|
||||
} else {
|
||||
Ok(Type::Unknown)
|
||||
}
|
||||
}
|
||||
Definition::ClassDef(node_key) => {
|
||||
if let Some(ty) = type_store.get_cached_node_type(file_id, node_key.erased()) {
|
||||
Ok(ty)
|
||||
} else {
|
||||
let parsed = parse(db.upcast(), file_id)?;
|
||||
let ast = parsed.ast();
|
||||
let table = symbol_table(db, file_id)?;
|
||||
let node = node_key.resolve_unwrap(ast.as_any_node_ref());
|
||||
|
||||
let mut bases = Vec::with_capacity(node.bases().len());
|
||||
|
||||
for base in node.bases() {
|
||||
bases.push(infer_expr_type(db, file_id, base)?);
|
||||
}
|
||||
let scope_id = table.scope_id_for_node(node_key.erased());
|
||||
let ty = Type::Class(type_store.add_class(file_id, &node.name.id, scope_id, bases));
|
||||
type_store.cache_node_type(file_id, *node_key.erased(), ty);
|
||||
Ok(ty)
|
||||
}
|
||||
}
|
||||
Definition::FunctionDef(node_key) => {
|
||||
if let Some(ty) = type_store.get_cached_node_type(file_id, node_key.erased()) {
|
||||
Ok(ty)
|
||||
} else {
|
||||
let parsed = parse(db.upcast(), file_id)?;
|
||||
let ast = parsed.ast();
|
||||
let table = symbol_table(db, file_id)?;
|
||||
let node = node_key
|
||||
.resolve(ast.as_any_node_ref())
|
||||
.expect("node key should resolve");
|
||||
|
||||
let decorator_tys = node
|
||||
.decorator_list
|
||||
.iter()
|
||||
.map(|decorator| infer_expr_type(db, file_id, &decorator.expression))
|
||||
.collect::<QueryResult<_>>()?;
|
||||
let scope_id = table.scope_id_for_node(node_key.erased());
|
||||
let ty = type_store
|
||||
.add_function(
|
||||
file_id,
|
||||
&node.name.id,
|
||||
symbol.symbol_id,
|
||||
scope_id,
|
||||
decorator_tys,
|
||||
)
|
||||
.into();
|
||||
type_store.cache_node_type(file_id, *node_key.erased(), ty);
|
||||
Ok(ty)
|
||||
}
|
||||
}
|
||||
Definition::Assignment(node_key) => {
|
||||
let parsed = parse(db.upcast(), file_id)?;
|
||||
let ast = parsed.ast();
|
||||
let node = node_key.resolve_unwrap(ast.as_any_node_ref());
|
||||
// TODO handle unpacking assignment correctly
|
||||
infer_expr_type(db, file_id, &node.value)
|
||||
}
|
||||
_ => todo!("other kinds of definitions"),
|
||||
}
|
||||
}
|
||||
|
||||
fn infer_expr_type(db: &dyn SemanticDb, file_id: FileId, expr: &ast::Expr) -> QueryResult<Type> {
|
||||
// TODO cache the resolution of the type on the node
|
||||
let symbols = symbol_table(db, file_id)?;
|
||||
match expr {
|
||||
ast::Expr::Name(name) => {
|
||||
// TODO look up in the correct scope, don't assume global
|
||||
if let Some(symbol_id) = symbols.root_symbol_id_by_name(&name.id) {
|
||||
infer_symbol_type(db, GlobalSymbolId { file_id, symbol_id })
|
||||
} else {
|
||||
Ok(Type::Unknown)
|
||||
}
|
||||
}
|
||||
_ => todo!("full expression type resolution"),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::db::{HasJar, SemanticJar};
|
||||
use crate::module::{
|
||||
resolve_module, set_module_search_paths, ModuleName, ModuleSearchPath, ModuleSearchPathKind,
|
||||
};
|
||||
use crate::symbols::{symbol_table, GlobalSymbolId};
|
||||
use crate::types::{infer_symbol_type, Type};
|
||||
use crate::Name;
|
||||
|
||||
// TODO with virtual filesystem we shouldn't have to write files to disk for these
|
||||
// tests
|
||||
|
||||
struct TestCase {
|
||||
temp_dir: tempfile::TempDir,
|
||||
db: TestDb,
|
||||
|
||||
src: ModuleSearchPath,
|
||||
}
|
||||
|
||||
fn create_test() -> std::io::Result<TestCase> {
|
||||
let temp_dir = tempfile::tempdir()?;
|
||||
|
||||
let src = temp_dir.path().join("src");
|
||||
std::fs::create_dir(&src)?;
|
||||
let src = ModuleSearchPath::new(src.canonicalize()?, ModuleSearchPathKind::FirstParty);
|
||||
|
||||
let roots = vec![src.clone()];
|
||||
|
||||
let mut db = TestDb::default();
|
||||
set_module_search_paths(&mut db, roots);
|
||||
|
||||
Ok(TestCase { temp_dir, db, src })
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn follow_import_to_class() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
let db = &case.db;
|
||||
|
||||
let a_path = case.src.path().join("a.py");
|
||||
let b_path = case.src.path().join("b.py");
|
||||
std::fs::write(a_path, "from b import C as D; E = D")?;
|
||||
std::fs::write(b_path, "class C: pass")?;
|
||||
let a_file = resolve_module(db, ModuleName::new("a"))?
|
||||
.expect("module should be found")
|
||||
.path(db)?
|
||||
.file();
|
||||
let a_syms = symbol_table(db, a_file)?;
|
||||
let e_sym = a_syms
|
||||
.root_symbol_id_by_name("E")
|
||||
.expect("E symbol should be found");
|
||||
|
||||
let ty = infer_symbol_type(
|
||||
db,
|
||||
GlobalSymbolId {
|
||||
file_id: a_file,
|
||||
symbol_id: e_sym,
|
||||
},
|
||||
)?;
|
||||
|
||||
let jar = HasJar::<SemanticJar>::jar(db)?;
|
||||
assert!(matches!(ty, Type::Class(_)));
|
||||
assert_eq!(format!("{}", ty.display(&jar.type_store)), "Literal[C]");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_base_class_by_name() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
let db = &case.db;
|
||||
|
||||
let path = case.src.path().join("mod.py");
|
||||
std::fs::write(path, "class Base: pass\nclass Sub(Base): pass")?;
|
||||
let file = resolve_module(db, ModuleName::new("mod"))?
|
||||
.expect("module should be found")
|
||||
.path(db)?
|
||||
.file();
|
||||
let syms = symbol_table(db, file)?;
|
||||
let sym = syms
|
||||
.root_symbol_id_by_name("Sub")
|
||||
.expect("Sub symbol should be found");
|
||||
|
||||
let ty = infer_symbol_type(
|
||||
db,
|
||||
GlobalSymbolId {
|
||||
file_id: file,
|
||||
symbol_id: sym,
|
||||
},
|
||||
)?;
|
||||
|
||||
let Type::Class(class_id) = ty else {
|
||||
panic!("Sub is not a Class")
|
||||
};
|
||||
let jar = HasJar::<SemanticJar>::jar(db)?;
|
||||
let base_names: Vec<_> = jar
|
||||
.type_store
|
||||
.get_class(class_id)
|
||||
.bases()
|
||||
.iter()
|
||||
.map(|base_ty| format!("{}", base_ty.display(&jar.type_store)))
|
||||
.collect();
|
||||
|
||||
assert_eq!(base_names, vec!["Literal[Base]"]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_method() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
let db = &case.db;
|
||||
|
||||
let path = case.src.path().join("mod.py");
|
||||
std::fs::write(path, "class C:\n def f(self): pass")?;
|
||||
let file = resolve_module(db, ModuleName::new("mod"))?
|
||||
.expect("module should be found")
|
||||
.path(db)?
|
||||
.file();
|
||||
let syms = symbol_table(db, file)?;
|
||||
let sym = syms
|
||||
.root_symbol_id_by_name("C")
|
||||
.expect("C symbol should be found");
|
||||
|
||||
let ty = infer_symbol_type(
|
||||
db,
|
||||
GlobalSymbolId {
|
||||
file_id: file,
|
||||
symbol_id: sym,
|
||||
},
|
||||
)?;
|
||||
|
||||
let Type::Class(class_id) = ty else {
|
||||
panic!("C is not a Class");
|
||||
};
|
||||
|
||||
let member_ty = class_id
|
||||
.get_own_class_member(db, &Name::new("f"))
|
||||
.expect("C.f to resolve");
|
||||
|
||||
let Some(Type::Function(func_id)) = member_ty else {
|
||||
panic!("C.f is not a Function");
|
||||
};
|
||||
|
||||
let jar = HasJar::<SemanticJar>::jar(db)?;
|
||||
let function = jar.type_store.get_function(func_id);
|
||||
assert_eq!(function.name(), "f");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
1
crates/red_knot/src/verbosity.rs
Normal file
1
crates/red_knot/src/verbosity.rs
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
@@ -1,77 +0,0 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Context;
|
||||
use notify::event::{CreateKind, RemoveKind};
|
||||
use notify::{recommended_watcher, Event, EventKind, RecommendedWatcher, RecursiveMode, Watcher};
|
||||
|
||||
use crate::program::{FileChangeKind, FileWatcherChange};
|
||||
|
||||
pub struct FileWatcher {
|
||||
watcher: RecommendedWatcher,
|
||||
}
|
||||
|
||||
pub trait EventHandler: Send + 'static {
|
||||
fn handle(&self, changes: Vec<FileWatcherChange>);
|
||||
}
|
||||
|
||||
impl<F> EventHandler for F
|
||||
where
|
||||
F: Fn(Vec<FileWatcherChange>) + Send + 'static,
|
||||
{
|
||||
fn handle(&self, changes: Vec<FileWatcherChange>) {
|
||||
let f = self;
|
||||
f(changes);
|
||||
}
|
||||
}
|
||||
|
||||
impl FileWatcher {
|
||||
pub fn new<E>(handler: E) -> anyhow::Result<Self>
|
||||
where
|
||||
E: EventHandler,
|
||||
{
|
||||
Self::from_handler(Box::new(handler))
|
||||
}
|
||||
|
||||
fn from_handler(handler: Box<dyn EventHandler>) -> anyhow::Result<Self> {
|
||||
let watcher = recommended_watcher(move |changes: notify::Result<Event>| {
|
||||
match changes {
|
||||
Ok(event) => {
|
||||
// TODO verify that this handles all events correctly
|
||||
let change_kind = match event.kind {
|
||||
EventKind::Create(CreateKind::File) => FileChangeKind::Created,
|
||||
EventKind::Modify(_) => FileChangeKind::Modified,
|
||||
EventKind::Remove(RemoveKind::File) => FileChangeKind::Deleted,
|
||||
_ => {
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let mut changes = Vec::new();
|
||||
|
||||
for path in event.paths {
|
||||
if path.is_file() {
|
||||
changes.push(FileWatcherChange::new(path, change_kind));
|
||||
}
|
||||
}
|
||||
|
||||
if !changes.is_empty() {
|
||||
handler.handle(changes);
|
||||
}
|
||||
}
|
||||
// TODO proper error handling
|
||||
Err(err) => {
|
||||
panic!("Error: {err}");
|
||||
}
|
||||
}
|
||||
})
|
||||
.context("Failed to create file watcher.")?;
|
||||
|
||||
Ok(Self { watcher })
|
||||
}
|
||||
|
||||
pub fn watch_folder(&mut self, path: &Path) -> anyhow::Result<()> {
|
||||
self.watcher.watch(path, RecursiveMode::Recursive)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
1234
crates/red_knot/tests/file_watching.rs
Normal file
1234
crates/red_knot/tests/file_watching.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1 +0,0 @@
|
||||
2d33fe212221a05661c0db5215a91cf3d7b7f072
|
||||
591
crates/red_knot/vendor/typeshed/stdlib/_ast.pyi
vendored
591
crates/red_knot/vendor/typeshed/stdlib/_ast.pyi
vendored
@@ -1,591 +0,0 @@
|
||||
import sys
|
||||
import typing_extensions
|
||||
from typing import Any, ClassVar, Literal
|
||||
|
||||
PyCF_ONLY_AST: Literal[1024]
|
||||
PyCF_TYPE_COMMENTS: Literal[4096]
|
||||
PyCF_ALLOW_TOP_LEVEL_AWAIT: Literal[8192]
|
||||
|
||||
# Alias used for fields that must always be valid identifiers
|
||||
# A string `x` counts as a valid identifier if both the following are True
|
||||
# (1) `x.isidentifier()` evaluates to `True`
|
||||
# (2) `keyword.iskeyword(x)` evaluates to `False`
|
||||
_Identifier: typing_extensions.TypeAlias = str
|
||||
|
||||
class AST:
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ()
|
||||
_attributes: ClassVar[tuple[str, ...]]
|
||||
_fields: ClassVar[tuple[str, ...]]
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None: ...
|
||||
# TODO: Not all nodes have all of the following attributes
|
||||
lineno: int
|
||||
col_offset: int
|
||||
end_lineno: int | None
|
||||
end_col_offset: int | None
|
||||
type_comment: str | None
|
||||
|
||||
class mod(AST): ...
|
||||
class type_ignore(AST): ...
|
||||
|
||||
class TypeIgnore(type_ignore):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("lineno", "tag")
|
||||
tag: str
|
||||
|
||||
class FunctionType(mod):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("argtypes", "returns")
|
||||
argtypes: list[expr]
|
||||
returns: expr
|
||||
|
||||
class Module(mod):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("body", "type_ignores")
|
||||
body: list[stmt]
|
||||
type_ignores: list[TypeIgnore]
|
||||
|
||||
class Interactive(mod):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("body",)
|
||||
body: list[stmt]
|
||||
|
||||
class Expression(mod):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("body",)
|
||||
body: expr
|
||||
|
||||
class stmt(AST): ...
|
||||
|
||||
class FunctionDef(stmt):
|
||||
if sys.version_info >= (3, 12):
|
||||
__match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment", "type_params")
|
||||
elif sys.version_info >= (3, 10):
|
||||
__match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment")
|
||||
name: _Identifier
|
||||
args: arguments
|
||||
body: list[stmt]
|
||||
decorator_list: list[expr]
|
||||
returns: expr | None
|
||||
if sys.version_info >= (3, 12):
|
||||
type_params: list[type_param]
|
||||
|
||||
class AsyncFunctionDef(stmt):
|
||||
if sys.version_info >= (3, 12):
|
||||
__match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment", "type_params")
|
||||
elif sys.version_info >= (3, 10):
|
||||
__match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment")
|
||||
name: _Identifier
|
||||
args: arguments
|
||||
body: list[stmt]
|
||||
decorator_list: list[expr]
|
||||
returns: expr | None
|
||||
if sys.version_info >= (3, 12):
|
||||
type_params: list[type_param]
|
||||
|
||||
class ClassDef(stmt):
|
||||
if sys.version_info >= (3, 12):
|
||||
__match_args__ = ("name", "bases", "keywords", "body", "decorator_list", "type_params")
|
||||
elif sys.version_info >= (3, 10):
|
||||
__match_args__ = ("name", "bases", "keywords", "body", "decorator_list")
|
||||
name: _Identifier
|
||||
bases: list[expr]
|
||||
keywords: list[keyword]
|
||||
body: list[stmt]
|
||||
decorator_list: list[expr]
|
||||
if sys.version_info >= (3, 12):
|
||||
type_params: list[type_param]
|
||||
|
||||
class Return(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value",)
|
||||
value: expr | None
|
||||
|
||||
class Delete(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("targets",)
|
||||
targets: list[expr]
|
||||
|
||||
class Assign(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("targets", "value", "type_comment")
|
||||
targets: list[expr]
|
||||
value: expr
|
||||
|
||||
class AugAssign(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "op", "value")
|
||||
target: Name | Attribute | Subscript
|
||||
op: operator
|
||||
value: expr
|
||||
|
||||
class AnnAssign(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "annotation", "value", "simple")
|
||||
target: Name | Attribute | Subscript
|
||||
annotation: expr
|
||||
value: expr | None
|
||||
simple: int
|
||||
|
||||
class For(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "iter", "body", "orelse", "type_comment")
|
||||
target: expr
|
||||
iter: expr
|
||||
body: list[stmt]
|
||||
orelse: list[stmt]
|
||||
|
||||
class AsyncFor(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "iter", "body", "orelse", "type_comment")
|
||||
target: expr
|
||||
iter: expr
|
||||
body: list[stmt]
|
||||
orelse: list[stmt]
|
||||
|
||||
class While(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("test", "body", "orelse")
|
||||
test: expr
|
||||
body: list[stmt]
|
||||
orelse: list[stmt]
|
||||
|
||||
class If(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("test", "body", "orelse")
|
||||
test: expr
|
||||
body: list[stmt]
|
||||
orelse: list[stmt]
|
||||
|
||||
class With(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("items", "body", "type_comment")
|
||||
items: list[withitem]
|
||||
body: list[stmt]
|
||||
|
||||
class AsyncWith(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("items", "body", "type_comment")
|
||||
items: list[withitem]
|
||||
body: list[stmt]
|
||||
|
||||
class Raise(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("exc", "cause")
|
||||
exc: expr | None
|
||||
cause: expr | None
|
||||
|
||||
class Try(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("body", "handlers", "orelse", "finalbody")
|
||||
body: list[stmt]
|
||||
handlers: list[ExceptHandler]
|
||||
orelse: list[stmt]
|
||||
finalbody: list[stmt]
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
class TryStar(stmt):
|
||||
__match_args__ = ("body", "handlers", "orelse", "finalbody")
|
||||
body: list[stmt]
|
||||
handlers: list[ExceptHandler]
|
||||
orelse: list[stmt]
|
||||
finalbody: list[stmt]
|
||||
|
||||
class Assert(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("test", "msg")
|
||||
test: expr
|
||||
msg: expr | None
|
||||
|
||||
class Import(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("names",)
|
||||
names: list[alias]
|
||||
|
||||
class ImportFrom(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("module", "names", "level")
|
||||
module: str | None
|
||||
names: list[alias]
|
||||
level: int
|
||||
|
||||
class Global(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("names",)
|
||||
names: list[_Identifier]
|
||||
|
||||
class Nonlocal(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("names",)
|
||||
names: list[_Identifier]
|
||||
|
||||
class Expr(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value",)
|
||||
value: expr
|
||||
|
||||
class Pass(stmt): ...
|
||||
class Break(stmt): ...
|
||||
class Continue(stmt): ...
|
||||
class expr(AST): ...
|
||||
|
||||
class BoolOp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("op", "values")
|
||||
op: boolop
|
||||
values: list[expr]
|
||||
|
||||
class BinOp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("left", "op", "right")
|
||||
left: expr
|
||||
op: operator
|
||||
right: expr
|
||||
|
||||
class UnaryOp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("op", "operand")
|
||||
op: unaryop
|
||||
operand: expr
|
||||
|
||||
class Lambda(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("args", "body")
|
||||
args: arguments
|
||||
body: expr
|
||||
|
||||
class IfExp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("test", "body", "orelse")
|
||||
test: expr
|
||||
body: expr
|
||||
orelse: expr
|
||||
|
||||
class Dict(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("keys", "values")
|
||||
keys: list[expr | None]
|
||||
values: list[expr]
|
||||
|
||||
class Set(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elts",)
|
||||
elts: list[expr]
|
||||
|
||||
class ListComp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elt", "generators")
|
||||
elt: expr
|
||||
generators: list[comprehension]
|
||||
|
||||
class SetComp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elt", "generators")
|
||||
elt: expr
|
||||
generators: list[comprehension]
|
||||
|
||||
class DictComp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("key", "value", "generators")
|
||||
key: expr
|
||||
value: expr
|
||||
generators: list[comprehension]
|
||||
|
||||
class GeneratorExp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elt", "generators")
|
||||
elt: expr
|
||||
generators: list[comprehension]
|
||||
|
||||
class Await(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value",)
|
||||
value: expr
|
||||
|
||||
class Yield(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value",)
|
||||
value: expr | None
|
||||
|
||||
class YieldFrom(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value",)
|
||||
value: expr
|
||||
|
||||
class Compare(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("left", "ops", "comparators")
|
||||
left: expr
|
||||
ops: list[cmpop]
|
||||
comparators: list[expr]
|
||||
|
||||
class Call(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("func", "args", "keywords")
|
||||
func: expr
|
||||
args: list[expr]
|
||||
keywords: list[keyword]
|
||||
|
||||
class FormattedValue(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value", "conversion", "format_spec")
|
||||
value: expr
|
||||
conversion: int
|
||||
format_spec: expr | None
|
||||
|
||||
class JoinedStr(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("values",)
|
||||
values: list[expr]
|
||||
|
||||
class Constant(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value", "kind")
|
||||
value: Any # None, str, bytes, bool, int, float, complex, Ellipsis
|
||||
kind: str | None
|
||||
# Aliases for value, for backwards compatibility
|
||||
s: Any
|
||||
n: int | float | complex
|
||||
|
||||
class NamedExpr(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "value")
|
||||
target: Name
|
||||
value: expr
|
||||
|
||||
class Attribute(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value", "attr", "ctx")
|
||||
value: expr
|
||||
attr: _Identifier
|
||||
ctx: expr_context
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
_Slice: typing_extensions.TypeAlias = expr
|
||||
else:
|
||||
class slice(AST): ...
|
||||
_Slice: typing_extensions.TypeAlias = slice
|
||||
|
||||
class Slice(_Slice):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("lower", "upper", "step")
|
||||
lower: expr | None
|
||||
upper: expr | None
|
||||
step: expr | None
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
class ExtSlice(slice):
|
||||
dims: list[slice]
|
||||
|
||||
class Index(slice):
|
||||
value: expr
|
||||
|
||||
class Subscript(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value", "slice", "ctx")
|
||||
value: expr
|
||||
slice: _Slice
|
||||
ctx: expr_context
|
||||
|
||||
class Starred(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value", "ctx")
|
||||
value: expr
|
||||
ctx: expr_context
|
||||
|
||||
class Name(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("id", "ctx")
|
||||
id: _Identifier
|
||||
ctx: expr_context
|
||||
|
||||
class List(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elts", "ctx")
|
||||
elts: list[expr]
|
||||
ctx: expr_context
|
||||
|
||||
class Tuple(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elts", "ctx")
|
||||
elts: list[expr]
|
||||
ctx: expr_context
|
||||
if sys.version_info >= (3, 9):
|
||||
dims: list[expr]
|
||||
|
||||
class expr_context(AST): ...
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
class AugLoad(expr_context): ...
|
||||
class AugStore(expr_context): ...
|
||||
class Param(expr_context): ...
|
||||
|
||||
class Suite(mod):
|
||||
body: list[stmt]
|
||||
|
||||
class Del(expr_context): ...
|
||||
class Load(expr_context): ...
|
||||
class Store(expr_context): ...
|
||||
class boolop(AST): ...
|
||||
class And(boolop): ...
|
||||
class Or(boolop): ...
|
||||
class operator(AST): ...
|
||||
class Add(operator): ...
|
||||
class BitAnd(operator): ...
|
||||
class BitOr(operator): ...
|
||||
class BitXor(operator): ...
|
||||
class Div(operator): ...
|
||||
class FloorDiv(operator): ...
|
||||
class LShift(operator): ...
|
||||
class Mod(operator): ...
|
||||
class Mult(operator): ...
|
||||
class MatMult(operator): ...
|
||||
class Pow(operator): ...
|
||||
class RShift(operator): ...
|
||||
class Sub(operator): ...
|
||||
class unaryop(AST): ...
|
||||
class Invert(unaryop): ...
|
||||
class Not(unaryop): ...
|
||||
class UAdd(unaryop): ...
|
||||
class USub(unaryop): ...
|
||||
class cmpop(AST): ...
|
||||
class Eq(cmpop): ...
|
||||
class Gt(cmpop): ...
|
||||
class GtE(cmpop): ...
|
||||
class In(cmpop): ...
|
||||
class Is(cmpop): ...
|
||||
class IsNot(cmpop): ...
|
||||
class Lt(cmpop): ...
|
||||
class LtE(cmpop): ...
|
||||
class NotEq(cmpop): ...
|
||||
class NotIn(cmpop): ...
|
||||
|
||||
class comprehension(AST):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "iter", "ifs", "is_async")
|
||||
target: expr
|
||||
iter: expr
|
||||
ifs: list[expr]
|
||||
is_async: int
|
||||
|
||||
class excepthandler(AST): ...
|
||||
|
||||
class ExceptHandler(excepthandler):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("type", "name", "body")
|
||||
type: expr | None
|
||||
name: _Identifier | None
|
||||
body: list[stmt]
|
||||
|
||||
class arguments(AST):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("posonlyargs", "args", "vararg", "kwonlyargs", "kw_defaults", "kwarg", "defaults")
|
||||
posonlyargs: list[arg]
|
||||
args: list[arg]
|
||||
vararg: arg | None
|
||||
kwonlyargs: list[arg]
|
||||
kw_defaults: list[expr | None]
|
||||
kwarg: arg | None
|
||||
defaults: list[expr]
|
||||
|
||||
class arg(AST):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("arg", "annotation", "type_comment")
|
||||
arg: _Identifier
|
||||
annotation: expr | None
|
||||
|
||||
class keyword(AST):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("arg", "value")
|
||||
arg: _Identifier | None
|
||||
value: expr
|
||||
|
||||
class alias(AST):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("name", "asname")
|
||||
name: str
|
||||
asname: _Identifier | None
|
||||
|
||||
class withitem(AST):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("context_expr", "optional_vars")
|
||||
context_expr: expr
|
||||
optional_vars: expr | None
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
class Match(stmt):
|
||||
__match_args__ = ("subject", "cases")
|
||||
subject: expr
|
||||
cases: list[match_case]
|
||||
|
||||
class pattern(AST): ...
|
||||
# Without the alias, Pyright complains variables named pattern are recursively defined
|
||||
_Pattern: typing_extensions.TypeAlias = pattern
|
||||
|
||||
class match_case(AST):
|
||||
__match_args__ = ("pattern", "guard", "body")
|
||||
pattern: _Pattern
|
||||
guard: expr | None
|
||||
body: list[stmt]
|
||||
|
||||
class MatchValue(pattern):
|
||||
__match_args__ = ("value",)
|
||||
value: expr
|
||||
|
||||
class MatchSingleton(pattern):
|
||||
__match_args__ = ("value",)
|
||||
value: Literal[True, False] | None
|
||||
|
||||
class MatchSequence(pattern):
|
||||
__match_args__ = ("patterns",)
|
||||
patterns: list[pattern]
|
||||
|
||||
class MatchStar(pattern):
|
||||
__match_args__ = ("name",)
|
||||
name: _Identifier | None
|
||||
|
||||
class MatchMapping(pattern):
|
||||
__match_args__ = ("keys", "patterns", "rest")
|
||||
keys: list[expr]
|
||||
patterns: list[pattern]
|
||||
rest: _Identifier | None
|
||||
|
||||
class MatchClass(pattern):
|
||||
__match_args__ = ("cls", "patterns", "kwd_attrs", "kwd_patterns")
|
||||
cls: expr
|
||||
patterns: list[pattern]
|
||||
kwd_attrs: list[_Identifier]
|
||||
kwd_patterns: list[pattern]
|
||||
|
||||
class MatchAs(pattern):
|
||||
__match_args__ = ("pattern", "name")
|
||||
pattern: _Pattern | None
|
||||
name: _Identifier | None
|
||||
|
||||
class MatchOr(pattern):
|
||||
__match_args__ = ("patterns",)
|
||||
patterns: list[pattern]
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
class type_param(AST):
|
||||
end_lineno: int
|
||||
end_col_offset: int
|
||||
|
||||
class TypeVar(type_param):
|
||||
__match_args__ = ("name", "bound")
|
||||
name: _Identifier
|
||||
bound: expr | None
|
||||
|
||||
class ParamSpec(type_param):
|
||||
__match_args__ = ("name",)
|
||||
name: _Identifier
|
||||
|
||||
class TypeVarTuple(type_param):
|
||||
__match_args__ = ("name",)
|
||||
name: _Identifier
|
||||
|
||||
class TypeAlias(stmt):
|
||||
__match_args__ = ("name", "type_params", "value")
|
||||
name: Name
|
||||
type_params: list[type_param]
|
||||
value: expr
|
||||
103
crates/red_knot/vendor/typeshed/stdlib/_stat.pyi
vendored
103
crates/red_knot/vendor/typeshed/stdlib/_stat.pyi
vendored
@@ -1,103 +0,0 @@
|
||||
import sys
|
||||
from typing import Literal
|
||||
|
||||
SF_APPEND: Literal[0x00040000]
|
||||
SF_ARCHIVED: Literal[0x00010000]
|
||||
SF_IMMUTABLE: Literal[0x00020000]
|
||||
SF_NOUNLINK: Literal[0x00100000]
|
||||
SF_SNAPSHOT: Literal[0x00200000]
|
||||
|
||||
ST_MODE: Literal[0]
|
||||
ST_INO: Literal[1]
|
||||
ST_DEV: Literal[2]
|
||||
ST_NLINK: Literal[3]
|
||||
ST_UID: Literal[4]
|
||||
ST_GID: Literal[5]
|
||||
ST_SIZE: Literal[6]
|
||||
ST_ATIME: Literal[7]
|
||||
ST_MTIME: Literal[8]
|
||||
ST_CTIME: Literal[9]
|
||||
|
||||
S_IFIFO: Literal[0o010000]
|
||||
S_IFLNK: Literal[0o120000]
|
||||
S_IFREG: Literal[0o100000]
|
||||
S_IFSOCK: Literal[0o140000]
|
||||
S_IFBLK: Literal[0o060000]
|
||||
S_IFCHR: Literal[0o020000]
|
||||
S_IFDIR: Literal[0o040000]
|
||||
|
||||
# These are 0 on systems that don't support the specific kind of file.
|
||||
# Example: Linux doesn't support door files, so S_IFDOOR is 0 on linux.
|
||||
S_IFDOOR: int
|
||||
S_IFPORT: int
|
||||
S_IFWHT: int
|
||||
|
||||
S_ISUID: Literal[0o4000]
|
||||
S_ISGID: Literal[0o2000]
|
||||
S_ISVTX: Literal[0o1000]
|
||||
|
||||
S_IRWXU: Literal[0o0700]
|
||||
S_IRUSR: Literal[0o0400]
|
||||
S_IWUSR: Literal[0o0200]
|
||||
S_IXUSR: Literal[0o0100]
|
||||
|
||||
S_IRWXG: Literal[0o0070]
|
||||
S_IRGRP: Literal[0o0040]
|
||||
S_IWGRP: Literal[0o0020]
|
||||
S_IXGRP: Literal[0o0010]
|
||||
|
||||
S_IRWXO: Literal[0o0007]
|
||||
S_IROTH: Literal[0o0004]
|
||||
S_IWOTH: Literal[0o0002]
|
||||
S_IXOTH: Literal[0o0001]
|
||||
|
||||
S_ENFMT: Literal[0o2000]
|
||||
S_IREAD: Literal[0o0400]
|
||||
S_IWRITE: Literal[0o0200]
|
||||
S_IEXEC: Literal[0o0100]
|
||||
|
||||
UF_APPEND: Literal[0x00000004]
|
||||
UF_COMPRESSED: Literal[0x00000020] # OS X 10.6+ only
|
||||
UF_HIDDEN: Literal[0x00008000] # OX X 10.5+ only
|
||||
UF_IMMUTABLE: Literal[0x00000002]
|
||||
UF_NODUMP: Literal[0x00000001]
|
||||
UF_NOUNLINK: Literal[0x00000010]
|
||||
UF_OPAQUE: Literal[0x00000008]
|
||||
|
||||
def S_IMODE(mode: int) -> int: ...
|
||||
def S_IFMT(mode: int) -> int: ...
|
||||
def S_ISBLK(mode: int) -> bool: ...
|
||||
def S_ISCHR(mode: int) -> bool: ...
|
||||
def S_ISDIR(mode: int) -> bool: ...
|
||||
def S_ISDOOR(mode: int) -> bool: ...
|
||||
def S_ISFIFO(mode: int) -> bool: ...
|
||||
def S_ISLNK(mode: int) -> bool: ...
|
||||
def S_ISPORT(mode: int) -> bool: ...
|
||||
def S_ISREG(mode: int) -> bool: ...
|
||||
def S_ISSOCK(mode: int) -> bool: ...
|
||||
def S_ISWHT(mode: int) -> bool: ...
|
||||
def filemode(mode: int) -> str: ...
|
||||
|
||||
if sys.platform == "win32":
|
||||
IO_REPARSE_TAG_SYMLINK: int
|
||||
IO_REPARSE_TAG_MOUNT_POINT: int
|
||||
IO_REPARSE_TAG_APPEXECLINK: int
|
||||
|
||||
if sys.platform == "win32":
|
||||
FILE_ATTRIBUTE_ARCHIVE: Literal[32]
|
||||
FILE_ATTRIBUTE_COMPRESSED: Literal[2048]
|
||||
FILE_ATTRIBUTE_DEVICE: Literal[64]
|
||||
FILE_ATTRIBUTE_DIRECTORY: Literal[16]
|
||||
FILE_ATTRIBUTE_ENCRYPTED: Literal[16384]
|
||||
FILE_ATTRIBUTE_HIDDEN: Literal[2]
|
||||
FILE_ATTRIBUTE_INTEGRITY_STREAM: Literal[32768]
|
||||
FILE_ATTRIBUTE_NORMAL: Literal[128]
|
||||
FILE_ATTRIBUTE_NOT_CONTENT_INDEXED: Literal[8192]
|
||||
FILE_ATTRIBUTE_NO_SCRUB_DATA: Literal[131072]
|
||||
FILE_ATTRIBUTE_OFFLINE: Literal[4096]
|
||||
FILE_ATTRIBUTE_READONLY: Literal[1]
|
||||
FILE_ATTRIBUTE_REPARSE_POINT: Literal[1024]
|
||||
FILE_ATTRIBUTE_SPARSE_FILE: Literal[512]
|
||||
FILE_ATTRIBUTE_SYSTEM: Literal[4]
|
||||
FILE_ATTRIBUTE_TEMPORARY: Literal[256]
|
||||
FILE_ATTRIBUTE_VIRTUAL: Literal[65536]
|
||||
@@ -1,20 +0,0 @@
|
||||
import enum
|
||||
import sys
|
||||
from typing import Literal
|
||||
|
||||
LOG_THRESHOLD_FOR_CONNLOST_WRITES: Literal[5]
|
||||
ACCEPT_RETRY_DELAY: Literal[1]
|
||||
DEBUG_STACK_DEPTH: Literal[10]
|
||||
SSL_HANDSHAKE_TIMEOUT: float
|
||||
SENDFILE_FALLBACK_READBUFFER_SIZE: Literal[262144]
|
||||
if sys.version_info >= (3, 11):
|
||||
SSL_SHUTDOWN_TIMEOUT: float
|
||||
FLOW_CONTROL_HIGH_WATER_SSL_READ: Literal[256]
|
||||
FLOW_CONTROL_HIGH_WATER_SSL_WRITE: Literal[512]
|
||||
if sys.version_info >= (3, 12):
|
||||
THREAD_JOIN_TIMEOUT: Literal[300]
|
||||
|
||||
class _SendfileMode(enum.Enum):
|
||||
UNSUPPORTED = 1
|
||||
TRY_NATIVE = 2
|
||||
FALLBACK = 3
|
||||
@@ -1,20 +0,0 @@
|
||||
import functools
|
||||
import traceback
|
||||
from collections.abc import Iterable
|
||||
from types import FrameType, FunctionType
|
||||
from typing import Any, overload
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
class _HasWrapper:
|
||||
__wrapper__: _HasWrapper | FunctionType
|
||||
|
||||
_FuncType: TypeAlias = FunctionType | _HasWrapper | functools.partial[Any] | functools.partialmethod[Any]
|
||||
|
||||
@overload
|
||||
def _get_function_source(func: _FuncType) -> tuple[str, int]: ...
|
||||
@overload
|
||||
def _get_function_source(func: object) -> tuple[str, int] | None: ...
|
||||
def _format_callback_source(func: object, args: Iterable[Any]) -> str: ...
|
||||
def _format_args_and_kwargs(args: Iterable[Any], kwargs: dict[str, Any]) -> str: ...
|
||||
def _format_callback(func: object, args: Iterable[Any], kwargs: dict[str, Any], suffix: str = "") -> str: ...
|
||||
def extract_stack(f: FrameType | None = None, limit: int | None = None) -> traceback.StackSummary: ...
|
||||
@@ -1,196 +0,0 @@
|
||||
import sys
|
||||
import types
|
||||
from abc import ABCMeta, abstractmethod
|
||||
from collections.abc import Callable
|
||||
from typing import Literal
|
||||
from typing_extensions import Self, TypeVarTuple, Unpack, deprecated
|
||||
|
||||
from .events import AbstractEventLoop, BaseDefaultEventLoopPolicy
|
||||
from .selector_events import BaseSelectorEventLoop
|
||||
|
||||
_Ts = TypeVarTuple("_Ts")
|
||||
|
||||
# This is also technically not available on Win,
|
||||
# but other parts of typeshed need this definition.
|
||||
# So, it is special cased.
|
||||
if sys.version_info >= (3, 12):
|
||||
@deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14")
|
||||
class AbstractChildWatcher:
|
||||
@abstractmethod
|
||||
def add_child_handler(
|
||||
self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts]
|
||||
) -> None: ...
|
||||
@abstractmethod
|
||||
def remove_child_handler(self, pid: int) -> bool: ...
|
||||
@abstractmethod
|
||||
def attach_loop(self, loop: AbstractEventLoop | None) -> None: ...
|
||||
@abstractmethod
|
||||
def close(self) -> None: ...
|
||||
@abstractmethod
|
||||
def __enter__(self) -> Self: ...
|
||||
@abstractmethod
|
||||
def __exit__(
|
||||
self, typ: type[BaseException] | None, exc: BaseException | None, tb: types.TracebackType | None
|
||||
) -> None: ...
|
||||
@abstractmethod
|
||||
def is_active(self) -> bool: ...
|
||||
|
||||
else:
|
||||
class AbstractChildWatcher:
|
||||
@abstractmethod
|
||||
def add_child_handler(
|
||||
self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts]
|
||||
) -> None: ...
|
||||
@abstractmethod
|
||||
def remove_child_handler(self, pid: int) -> bool: ...
|
||||
@abstractmethod
|
||||
def attach_loop(self, loop: AbstractEventLoop | None) -> None: ...
|
||||
@abstractmethod
|
||||
def close(self) -> None: ...
|
||||
@abstractmethod
|
||||
def __enter__(self) -> Self: ...
|
||||
@abstractmethod
|
||||
def __exit__(
|
||||
self, typ: type[BaseException] | None, exc: BaseException | None, tb: types.TracebackType | None
|
||||
) -> None: ...
|
||||
@abstractmethod
|
||||
def is_active(self) -> bool: ...
|
||||
|
||||
if sys.platform != "win32":
|
||||
if sys.version_info >= (3, 9):
|
||||
__all__ = (
|
||||
"SelectorEventLoop",
|
||||
"AbstractChildWatcher",
|
||||
"SafeChildWatcher",
|
||||
"FastChildWatcher",
|
||||
"PidfdChildWatcher",
|
||||
"MultiLoopChildWatcher",
|
||||
"ThreadedChildWatcher",
|
||||
"DefaultEventLoopPolicy",
|
||||
)
|
||||
else:
|
||||
__all__ = (
|
||||
"SelectorEventLoop",
|
||||
"AbstractChildWatcher",
|
||||
"SafeChildWatcher",
|
||||
"FastChildWatcher",
|
||||
"MultiLoopChildWatcher",
|
||||
"ThreadedChildWatcher",
|
||||
"DefaultEventLoopPolicy",
|
||||
)
|
||||
|
||||
# Doesn't actually have ABCMeta metaclass at runtime, but mypy complains if we don't have it in the stub.
|
||||
# See discussion in #7412
|
||||
class BaseChildWatcher(AbstractChildWatcher, metaclass=ABCMeta):
|
||||
def close(self) -> None: ...
|
||||
def is_active(self) -> bool: ...
|
||||
def attach_loop(self, loop: AbstractEventLoop | None) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
@deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14")
|
||||
class SafeChildWatcher(BaseChildWatcher):
|
||||
def __enter__(self) -> Self: ...
|
||||
def __exit__(self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None) -> None: ...
|
||||
def add_child_handler(
|
||||
self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts]
|
||||
) -> None: ...
|
||||
def remove_child_handler(self, pid: int) -> bool: ...
|
||||
|
||||
@deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14")
|
||||
class FastChildWatcher(BaseChildWatcher):
|
||||
def __enter__(self) -> Self: ...
|
||||
def __exit__(self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None) -> None: ...
|
||||
def add_child_handler(
|
||||
self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts]
|
||||
) -> None: ...
|
||||
def remove_child_handler(self, pid: int) -> bool: ...
|
||||
|
||||
else:
|
||||
class SafeChildWatcher(BaseChildWatcher):
|
||||
def __enter__(self) -> Self: ...
|
||||
def __exit__(self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None) -> None: ...
|
||||
def add_child_handler(
|
||||
self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts]
|
||||
) -> None: ...
|
||||
def remove_child_handler(self, pid: int) -> bool: ...
|
||||
|
||||
class FastChildWatcher(BaseChildWatcher):
|
||||
def __enter__(self) -> Self: ...
|
||||
def __exit__(self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None) -> None: ...
|
||||
def add_child_handler(
|
||||
self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts]
|
||||
) -> None: ...
|
||||
def remove_child_handler(self, pid: int) -> bool: ...
|
||||
|
||||
class _UnixSelectorEventLoop(BaseSelectorEventLoop): ...
|
||||
|
||||
class _UnixDefaultEventLoopPolicy(BaseDefaultEventLoopPolicy):
|
||||
if sys.version_info >= (3, 12):
|
||||
@deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14")
|
||||
def get_child_watcher(self) -> AbstractChildWatcher: ...
|
||||
@deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14")
|
||||
def set_child_watcher(self, watcher: AbstractChildWatcher | None) -> None: ...
|
||||
else:
|
||||
def get_child_watcher(self) -> AbstractChildWatcher: ...
|
||||
def set_child_watcher(self, watcher: AbstractChildWatcher | None) -> None: ...
|
||||
|
||||
SelectorEventLoop = _UnixSelectorEventLoop
|
||||
|
||||
DefaultEventLoopPolicy = _UnixDefaultEventLoopPolicy
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
@deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14")
|
||||
class MultiLoopChildWatcher(AbstractChildWatcher):
|
||||
def is_active(self) -> bool: ...
|
||||
def close(self) -> None: ...
|
||||
def __enter__(self) -> Self: ...
|
||||
def __exit__(
|
||||
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None
|
||||
) -> None: ...
|
||||
def add_child_handler(
|
||||
self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts]
|
||||
) -> None: ...
|
||||
def remove_child_handler(self, pid: int) -> bool: ...
|
||||
def attach_loop(self, loop: AbstractEventLoop | None) -> None: ...
|
||||
|
||||
else:
|
||||
class MultiLoopChildWatcher(AbstractChildWatcher):
|
||||
def is_active(self) -> bool: ...
|
||||
def close(self) -> None: ...
|
||||
def __enter__(self) -> Self: ...
|
||||
def __exit__(
|
||||
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None
|
||||
) -> None: ...
|
||||
def add_child_handler(
|
||||
self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts]
|
||||
) -> None: ...
|
||||
def remove_child_handler(self, pid: int) -> bool: ...
|
||||
def attach_loop(self, loop: AbstractEventLoop | None) -> None: ...
|
||||
|
||||
class ThreadedChildWatcher(AbstractChildWatcher):
|
||||
def is_active(self) -> Literal[True]: ...
|
||||
def close(self) -> None: ...
|
||||
def __enter__(self) -> Self: ...
|
||||
def __exit__(
|
||||
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None
|
||||
) -> None: ...
|
||||
def __del__(self) -> None: ...
|
||||
def add_child_handler(
|
||||
self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts]
|
||||
) -> None: ...
|
||||
def remove_child_handler(self, pid: int) -> bool: ...
|
||||
def attach_loop(self, loop: AbstractEventLoop | None) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
class PidfdChildWatcher(AbstractChildWatcher):
|
||||
def __enter__(self) -> Self: ...
|
||||
def __exit__(
|
||||
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None
|
||||
) -> None: ...
|
||||
def is_active(self) -> bool: ...
|
||||
def close(self) -> None: ...
|
||||
def attach_loop(self, loop: AbstractEventLoop | None) -> None: ...
|
||||
def add_child_handler(
|
||||
self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts]
|
||||
) -> None: ...
|
||||
def remove_child_handler(self, pid: int) -> bool: ...
|
||||
@@ -1,32 +0,0 @@
|
||||
from ._base import (
|
||||
ALL_COMPLETED as ALL_COMPLETED,
|
||||
FIRST_COMPLETED as FIRST_COMPLETED,
|
||||
FIRST_EXCEPTION as FIRST_EXCEPTION,
|
||||
BrokenExecutor as BrokenExecutor,
|
||||
CancelledError as CancelledError,
|
||||
Executor as Executor,
|
||||
Future as Future,
|
||||
InvalidStateError as InvalidStateError,
|
||||
TimeoutError as TimeoutError,
|
||||
as_completed as as_completed,
|
||||
wait as wait,
|
||||
)
|
||||
from .process import ProcessPoolExecutor as ProcessPoolExecutor
|
||||
from .thread import ThreadPoolExecutor as ThreadPoolExecutor
|
||||
|
||||
__all__ = (
|
||||
"FIRST_COMPLETED",
|
||||
"FIRST_EXCEPTION",
|
||||
"ALL_COMPLETED",
|
||||
"CancelledError",
|
||||
"TimeoutError",
|
||||
"BrokenExecutor",
|
||||
"Future",
|
||||
"Executor",
|
||||
"wait",
|
||||
"as_completed",
|
||||
"ProcessPoolExecutor",
|
||||
"ThreadPoolExecutor",
|
||||
)
|
||||
|
||||
def __dir__() -> tuple[str, ...]: ...
|
||||
16
crates/red_knot/vendor/typeshed/stdlib/copy.pyi
vendored
16
crates/red_knot/vendor/typeshed/stdlib/copy.pyi
vendored
@@ -1,16 +0,0 @@
|
||||
from typing import Any, TypeVar
|
||||
|
||||
__all__ = ["Error", "copy", "deepcopy"]
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
# None in CPython but non-None in Jython
|
||||
PyStringMap: Any
|
||||
|
||||
# Note: memo and _nil are internal kwargs.
|
||||
def deepcopy(x: _T, memo: dict[int, Any] | None = None, _nil: Any = []) -> _T: ...
|
||||
def copy(x: _T) -> _T: ...
|
||||
|
||||
class Error(Exception): ...
|
||||
|
||||
error = Error
|
||||
@@ -1,20 +0,0 @@
|
||||
def make_archive(
|
||||
base_name: str,
|
||||
format: str,
|
||||
root_dir: str | None = None,
|
||||
base_dir: str | None = None,
|
||||
verbose: int = 0,
|
||||
dry_run: int = 0,
|
||||
owner: str | None = None,
|
||||
group: str | None = None,
|
||||
) -> str: ...
|
||||
def make_tarball(
|
||||
base_name: str,
|
||||
base_dir: str,
|
||||
compress: str | None = "gzip",
|
||||
verbose: int = 0,
|
||||
dry_run: int = 0,
|
||||
owner: str | None = None,
|
||||
group: str | None = None,
|
||||
) -> str: ...
|
||||
def make_zipfile(base_name: str, base_dir: str, verbose: int = 0, dry_run: int = 0) -> str: ...
|
||||
@@ -1,66 +0,0 @@
|
||||
from _typeshed import Incomplete
|
||||
from abc import abstractmethod
|
||||
from collections.abc import Callable, Iterable
|
||||
from distutils.dist import Distribution
|
||||
from typing import Any
|
||||
|
||||
class Command:
|
||||
distribution: Distribution
|
||||
sub_commands: list[tuple[str, Callable[[Command], bool] | None]]
|
||||
def __init__(self, dist: Distribution) -> None: ...
|
||||
@abstractmethod
|
||||
def initialize_options(self) -> None: ...
|
||||
@abstractmethod
|
||||
def finalize_options(self) -> None: ...
|
||||
@abstractmethod
|
||||
def run(self) -> None: ...
|
||||
def announce(self, msg: str, level: int = 1) -> None: ...
|
||||
def debug_print(self, msg: str) -> None: ...
|
||||
def ensure_string(self, option: str, default: str | None = None) -> None: ...
|
||||
def ensure_string_list(self, option: str | list[str]) -> None: ...
|
||||
def ensure_filename(self, option: str) -> None: ...
|
||||
def ensure_dirname(self, option: str) -> None: ...
|
||||
def get_command_name(self) -> str: ...
|
||||
def set_undefined_options(self, src_cmd: str, *option_pairs: tuple[str, str]) -> None: ...
|
||||
def get_finalized_command(self, command: str, create: int = 1) -> Command: ...
|
||||
def reinitialize_command(self, command: Command | str, reinit_subcommands: int = 0) -> Command: ...
|
||||
def run_command(self, command: str) -> None: ...
|
||||
def get_sub_commands(self) -> list[str]: ...
|
||||
def warn(self, msg: str) -> None: ...
|
||||
def execute(self, func: Callable[..., object], args: Iterable[Any], msg: str | None = None, level: int = 1) -> None: ...
|
||||
def mkpath(self, name: str, mode: int = 0o777) -> None: ...
|
||||
def copy_file(
|
||||
self, infile: str, outfile: str, preserve_mode: int = 1, preserve_times: int = 1, link: str | None = None, level: Any = 1
|
||||
) -> tuple[str, bool]: ... # level is not used
|
||||
def copy_tree(
|
||||
self,
|
||||
infile: str,
|
||||
outfile: str,
|
||||
preserve_mode: int = 1,
|
||||
preserve_times: int = 1,
|
||||
preserve_symlinks: int = 0,
|
||||
level: Any = 1,
|
||||
) -> list[str]: ... # level is not used
|
||||
def move_file(self, src: str, dst: str, level: Any = 1) -> str: ... # level is not used
|
||||
def spawn(self, cmd: Iterable[str], search_path: int = 1, level: Any = 1) -> None: ... # level is not used
|
||||
def make_archive(
|
||||
self,
|
||||
base_name: str,
|
||||
format: str,
|
||||
root_dir: str | None = None,
|
||||
base_dir: str | None = None,
|
||||
owner: str | None = None,
|
||||
group: str | None = None,
|
||||
) -> str: ...
|
||||
def make_file(
|
||||
self,
|
||||
infiles: str | list[str] | tuple[str, ...],
|
||||
outfile: str,
|
||||
func: Callable[..., object],
|
||||
args: list[Any],
|
||||
exec_msg: str | None = None,
|
||||
skip_msg: str | None = None,
|
||||
level: Any = 1,
|
||||
) -> None: ... # level is not used
|
||||
def ensure_finalized(self) -> None: ...
|
||||
def dump_options(self, header: Incomplete | None = None, indent: str = "") -> None: ...
|
||||
@@ -1,25 +0,0 @@
|
||||
from typing import Any
|
||||
|
||||
from ..cmd import Command
|
||||
|
||||
def show_formats() -> None: ...
|
||||
|
||||
class bdist(Command):
|
||||
description: str
|
||||
user_options: Any
|
||||
boolean_options: Any
|
||||
help_options: Any
|
||||
no_format_option: Any
|
||||
default_format: Any
|
||||
format_commands: Any
|
||||
format_command: Any
|
||||
bdist_base: Any
|
||||
plat_name: Any
|
||||
formats: Any
|
||||
dist_dir: Any
|
||||
skip_build: int
|
||||
group: Any
|
||||
owner: Any
|
||||
def initialize_options(self) -> None: ...
|
||||
def finalize_options(self) -> None: ...
|
||||
def run(self) -> None: ...
|
||||
@@ -1,3 +0,0 @@
|
||||
def newer(source: str, target: str) -> bool: ...
|
||||
def newer_pairwise(sources: list[str], targets: list[str]) -> list[tuple[str, str]]: ...
|
||||
def newer_group(sources: list[str], target: str, missing: str = "error") -> bool: ...
|
||||
@@ -1,13 +0,0 @@
|
||||
def mkpath(name: str, mode: int = 0o777, verbose: int = 1, dry_run: int = 0) -> list[str]: ...
|
||||
def create_tree(base_dir: str, files: list[str], mode: int = 0o777, verbose: int = 1, dry_run: int = 0) -> None: ...
|
||||
def copy_tree(
|
||||
src: str,
|
||||
dst: str,
|
||||
preserve_mode: int = 1,
|
||||
preserve_times: int = 1,
|
||||
preserve_symlinks: int = 0,
|
||||
update: int = 0,
|
||||
verbose: int = 1,
|
||||
dry_run: int = 0,
|
||||
) -> list[str]: ...
|
||||
def remove_tree(directory: str, verbose: int = 1, dry_run: int = 0) -> None: ...
|
||||
@@ -1,14 +0,0 @@
|
||||
from collections.abc import Sequence
|
||||
|
||||
def copy_file(
|
||||
src: str,
|
||||
dst: str,
|
||||
preserve_mode: bool = ...,
|
||||
preserve_times: bool = ...,
|
||||
update: bool = ...,
|
||||
link: str | None = None,
|
||||
verbose: bool = ...,
|
||||
dry_run: bool = ...,
|
||||
) -> tuple[str, str]: ...
|
||||
def move_file(src: str, dst: str, verbose: bool = ..., dry_run: bool = ...) -> str: ...
|
||||
def write_file(filename: str, contents: Sequence[str]) -> None: ...
|
||||
@@ -1,2 +0,0 @@
|
||||
def spawn(cmd: list[str], search_path: bool = ..., verbose: bool = ..., dry_run: bool = ...) -> None: ...
|
||||
def find_executable(executable: str, path: str | None = None) -> str | None: ...
|
||||
@@ -1,67 +0,0 @@
|
||||
ENDMARKER: int
|
||||
NAME: int
|
||||
NUMBER: int
|
||||
STRING: int
|
||||
NEWLINE: int
|
||||
INDENT: int
|
||||
DEDENT: int
|
||||
LPAR: int
|
||||
RPAR: int
|
||||
LSQB: int
|
||||
RSQB: int
|
||||
COLON: int
|
||||
COMMA: int
|
||||
SEMI: int
|
||||
PLUS: int
|
||||
MINUS: int
|
||||
STAR: int
|
||||
SLASH: int
|
||||
VBAR: int
|
||||
AMPER: int
|
||||
LESS: int
|
||||
GREATER: int
|
||||
EQUAL: int
|
||||
DOT: int
|
||||
PERCENT: int
|
||||
BACKQUOTE: int
|
||||
LBRACE: int
|
||||
RBRACE: int
|
||||
EQEQUAL: int
|
||||
NOTEQUAL: int
|
||||
LESSEQUAL: int
|
||||
GREATEREQUAL: int
|
||||
TILDE: int
|
||||
CIRCUMFLEX: int
|
||||
LEFTSHIFT: int
|
||||
RIGHTSHIFT: int
|
||||
DOUBLESTAR: int
|
||||
PLUSEQUAL: int
|
||||
MINEQUAL: int
|
||||
STAREQUAL: int
|
||||
SLASHEQUAL: int
|
||||
PERCENTEQUAL: int
|
||||
AMPEREQUAL: int
|
||||
VBAREQUAL: int
|
||||
CIRCUMFLEXEQUAL: int
|
||||
LEFTSHIFTEQUAL: int
|
||||
RIGHTSHIFTEQUAL: int
|
||||
DOUBLESTAREQUAL: int
|
||||
DOUBLESLASH: int
|
||||
DOUBLESLASHEQUAL: int
|
||||
OP: int
|
||||
COMMENT: int
|
||||
NL: int
|
||||
RARROW: int
|
||||
AT: int
|
||||
ATEQUAL: int
|
||||
AWAIT: int
|
||||
ASYNC: int
|
||||
ERRORTOKEN: int
|
||||
COLONEQUAL: int
|
||||
N_TOKENS: int
|
||||
NT_OFFSET: int
|
||||
tok_name: dict[int, str]
|
||||
|
||||
def ISTERMINAL(x: int) -> bool: ...
|
||||
def ISNONTERMINAL(x: int) -> bool: ...
|
||||
def ISEOF(x: int) -> bool: ...
|
||||
@@ -1,33 +0,0 @@
|
||||
import builtins
|
||||
import types
|
||||
from _typeshed import ReadableBuffer, SupportsRead, SupportsWrite
|
||||
from typing import Any
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
version: int
|
||||
|
||||
_Marshallable: TypeAlias = (
|
||||
# handled in w_object() in marshal.c
|
||||
None
|
||||
| type[StopIteration]
|
||||
| builtins.ellipsis
|
||||
| bool
|
||||
# handled in w_complex_object() in marshal.c
|
||||
| int
|
||||
| float
|
||||
| complex
|
||||
| bytes
|
||||
| str
|
||||
| tuple[_Marshallable, ...]
|
||||
| list[Any]
|
||||
| dict[Any, Any]
|
||||
| set[Any]
|
||||
| frozenset[_Marshallable]
|
||||
| types.CodeType
|
||||
| ReadableBuffer
|
||||
)
|
||||
|
||||
def dump(value: _Marshallable, file: SupportsWrite[bytes], version: int = 4, /) -> None: ...
|
||||
def load(file: SupportsRead[bytes], /) -> Any: ...
|
||||
def dumps(value: _Marshallable, version: int = 4, /) -> bytes: ...
|
||||
def loads(bytes: ReadableBuffer, /) -> Any: ...
|
||||
@@ -1 +0,0 @@
|
||||
from _stat import *
|
||||
@@ -1,46 +0,0 @@
|
||||
import sys
|
||||
from typing import Literal, overload
|
||||
|
||||
if sys.platform != "win32":
|
||||
LOG_ALERT: Literal[1]
|
||||
LOG_AUTH: Literal[32]
|
||||
LOG_AUTHPRIV: Literal[80]
|
||||
LOG_CONS: Literal[2]
|
||||
LOG_CRIT: Literal[2]
|
||||
LOG_CRON: Literal[72]
|
||||
LOG_DAEMON: Literal[24]
|
||||
LOG_DEBUG: Literal[7]
|
||||
LOG_EMERG: Literal[0]
|
||||
LOG_ERR: Literal[3]
|
||||
LOG_INFO: Literal[6]
|
||||
LOG_KERN: Literal[0]
|
||||
LOG_LOCAL0: Literal[128]
|
||||
LOG_LOCAL1: Literal[136]
|
||||
LOG_LOCAL2: Literal[144]
|
||||
LOG_LOCAL3: Literal[152]
|
||||
LOG_LOCAL4: Literal[160]
|
||||
LOG_LOCAL5: Literal[168]
|
||||
LOG_LOCAL6: Literal[176]
|
||||
LOG_LOCAL7: Literal[184]
|
||||
LOG_LPR: Literal[48]
|
||||
LOG_MAIL: Literal[16]
|
||||
LOG_NDELAY: Literal[8]
|
||||
LOG_NEWS: Literal[56]
|
||||
LOG_NOTICE: Literal[5]
|
||||
LOG_NOWAIT: Literal[16]
|
||||
LOG_ODELAY: Literal[4]
|
||||
LOG_PERROR: Literal[32]
|
||||
LOG_PID: Literal[1]
|
||||
LOG_SYSLOG: Literal[40]
|
||||
LOG_USER: Literal[8]
|
||||
LOG_UUCP: Literal[64]
|
||||
LOG_WARNING: Literal[4]
|
||||
def LOG_MASK(pri: int, /) -> int: ...
|
||||
def LOG_UPTO(pri: int, /) -> int: ...
|
||||
def closelog() -> None: ...
|
||||
def openlog(ident: str = ..., logoption: int = ..., facility: int = ...) -> None: ...
|
||||
def setlogmask(maskpri: int, /) -> int: ...
|
||||
@overload
|
||||
def syslog(priority: int, message: str) -> None: ...
|
||||
@overload
|
||||
def syslog(message: str) -> None: ...
|
||||
@@ -1,80 +0,0 @@
|
||||
from typing import Literal
|
||||
|
||||
# These are not actually bools. See #4669
|
||||
NO: bool
|
||||
YES: bool
|
||||
TRUE: bool
|
||||
FALSE: bool
|
||||
ON: bool
|
||||
OFF: bool
|
||||
N: Literal["n"]
|
||||
S: Literal["s"]
|
||||
W: Literal["w"]
|
||||
E: Literal["e"]
|
||||
NW: Literal["nw"]
|
||||
SW: Literal["sw"]
|
||||
NE: Literal["ne"]
|
||||
SE: Literal["se"]
|
||||
NS: Literal["ns"]
|
||||
EW: Literal["ew"]
|
||||
NSEW: Literal["nsew"]
|
||||
CENTER: Literal["center"]
|
||||
NONE: Literal["none"]
|
||||
X: Literal["x"]
|
||||
Y: Literal["y"]
|
||||
BOTH: Literal["both"]
|
||||
LEFT: Literal["left"]
|
||||
TOP: Literal["top"]
|
||||
RIGHT: Literal["right"]
|
||||
BOTTOM: Literal["bottom"]
|
||||
RAISED: Literal["raised"]
|
||||
SUNKEN: Literal["sunken"]
|
||||
FLAT: Literal["flat"]
|
||||
RIDGE: Literal["ridge"]
|
||||
GROOVE: Literal["groove"]
|
||||
SOLID: Literal["solid"]
|
||||
HORIZONTAL: Literal["horizontal"]
|
||||
VERTICAL: Literal["vertical"]
|
||||
NUMERIC: Literal["numeric"]
|
||||
CHAR: Literal["char"]
|
||||
WORD: Literal["word"]
|
||||
BASELINE: Literal["baseline"]
|
||||
INSIDE: Literal["inside"]
|
||||
OUTSIDE: Literal["outside"]
|
||||
SEL: Literal["sel"]
|
||||
SEL_FIRST: Literal["sel.first"]
|
||||
SEL_LAST: Literal["sel.last"]
|
||||
END: Literal["end"]
|
||||
INSERT: Literal["insert"]
|
||||
CURRENT: Literal["current"]
|
||||
ANCHOR: Literal["anchor"]
|
||||
ALL: Literal["all"]
|
||||
NORMAL: Literal["normal"]
|
||||
DISABLED: Literal["disabled"]
|
||||
ACTIVE: Literal["active"]
|
||||
HIDDEN: Literal["hidden"]
|
||||
CASCADE: Literal["cascade"]
|
||||
CHECKBUTTON: Literal["checkbutton"]
|
||||
COMMAND: Literal["command"]
|
||||
RADIOBUTTON: Literal["radiobutton"]
|
||||
SEPARATOR: Literal["separator"]
|
||||
SINGLE: Literal["single"]
|
||||
BROWSE: Literal["browse"]
|
||||
MULTIPLE: Literal["multiple"]
|
||||
EXTENDED: Literal["extended"]
|
||||
DOTBOX: Literal["dotbox"]
|
||||
UNDERLINE: Literal["underline"]
|
||||
PIESLICE: Literal["pieslice"]
|
||||
CHORD: Literal["chord"]
|
||||
ARC: Literal["arc"]
|
||||
FIRST: Literal["first"]
|
||||
LAST: Literal["last"]
|
||||
BUTT: Literal["butt"]
|
||||
PROJECTING: Literal["projecting"]
|
||||
ROUND: Literal["round"]
|
||||
BEVEL: Literal["bevel"]
|
||||
MITER: Literal["miter"]
|
||||
MOVETO: Literal["moveto"]
|
||||
SCROLL: Literal["scroll"]
|
||||
UNITS: Literal["units"]
|
||||
PAGES: Literal["pages"]
|
||||
@@ -1,28 +0,0 @@
|
||||
import sys
|
||||
from _typeshed import ReadableBuffer
|
||||
from typing import Literal, overload
|
||||
|
||||
if sys.platform == "win32":
|
||||
SND_APPLICATION: Literal[128]
|
||||
SND_FILENAME: Literal[131072]
|
||||
SND_ALIAS: Literal[65536]
|
||||
SND_LOOP: Literal[8]
|
||||
SND_MEMORY: Literal[4]
|
||||
SND_PURGE: Literal[64]
|
||||
SND_ASYNC: Literal[1]
|
||||
SND_NODEFAULT: Literal[2]
|
||||
SND_NOSTOP: Literal[16]
|
||||
SND_NOWAIT: Literal[8192]
|
||||
|
||||
MB_ICONASTERISK: Literal[64]
|
||||
MB_ICONEXCLAMATION: Literal[48]
|
||||
MB_ICONHAND: Literal[16]
|
||||
MB_ICONQUESTION: Literal[32]
|
||||
MB_OK: Literal[0]
|
||||
def Beep(frequency: int, duration: int) -> None: ...
|
||||
# Can actually accept anything ORed with 4, and if not it's definitely str, but that's inexpressible
|
||||
@overload
|
||||
def PlaySound(sound: ReadableBuffer | None, flags: Literal[4]) -> None: ...
|
||||
@overload
|
||||
def PlaySound(sound: str | ReadableBuffer | None, flags: int) -> None: ...
|
||||
def MessageBeep(type: int = 0) -> None: ...
|
||||
46
crates/red_knot_python_semantic/Cargo.toml
Normal file
46
crates/red_knot_python_semantic/Cargo.toml
Normal file
@@ -0,0 +1,46 @@
|
||||
[package]
|
||||
name = "red_knot_python_semantic"
|
||||
version = "0.0.0"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
ruff_db = { workspace = true }
|
||||
ruff_index = { workspace = true }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_python_stdlib = { workspace = true }
|
||||
ruff_text_size = { workspace = true }
|
||||
|
||||
bitflags = { workspace = true }
|
||||
camino = { workspace = true }
|
||||
compact_str = { workspace = true }
|
||||
countme = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
ordermap = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
hashbrown = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
path-slash = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
zip = { workspace = true, features = ["zstd", "deflate"] }
|
||||
|
||||
[dev-dependencies]
|
||||
anyhow = { workspace = true }
|
||||
insta = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
ruff_python_parser = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
9
crates/red_knot_python_semantic/README.md
Normal file
9
crates/red_knot_python_semantic/README.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# Red Knot
|
||||
|
||||
Semantic analysis for the red-knot project.
|
||||
|
||||
## Vendored types for the stdlib
|
||||
|
||||
This crate vendors [typeshed](https://github.com/python/typeshed)'s stubs for the standard library. The vendored stubs can be found in `crates/red_knot_python_semantic/vendor/typeshed`. The file `crates/red_knot_python_semantic/vendor/typeshed/source_commit.txt` tells you the typeshed commit that our vendored stdlib stubs currently correspond to.
|
||||
|
||||
The typeshed stubs are updated every two weeks via an automated PR using the `sync_typeshed.yaml` workflow in the `.github/workflows` directory. This workflow can also be triggered at any time via [workflow dispatch](https://docs.github.com/en/actions/using-workflows/manually-running-a-workflow#running-a-workflow).
|
||||
87
crates/red_knot_python_semantic/build.rs
Normal file
87
crates/red_knot_python_semantic/build.rs
Normal file
@@ -0,0 +1,87 @@
|
||||
//! Build script to package our vendored typeshed files
|
||||
//! into a zip archive that can be included in the Ruff binary.
|
||||
//!
|
||||
//! This script should be automatically run at build time
|
||||
//! whenever the script itself changes, or whenever any files
|
||||
//! in `crates/red_knot_python_semantic/vendor/typeshed` change.
|
||||
|
||||
use std::fs::File;
|
||||
use std::path::Path;
|
||||
|
||||
use path_slash::PathExt;
|
||||
use zip::result::ZipResult;
|
||||
use zip::write::{FileOptions, ZipWriter};
|
||||
use zip::CompressionMethod;
|
||||
|
||||
const TYPESHED_SOURCE_DIR: &str = "vendor/typeshed";
|
||||
const TYPESHED_ZIP_LOCATION: &str = "/zipped_typeshed.zip";
|
||||
|
||||
/// Recursively zip the contents of an entire directory.
|
||||
///
|
||||
/// This routine is adapted from a recipe at
|
||||
/// <https://github.com/zip-rs/zip-old/blob/5d0f198124946b7be4e5969719a7f29f363118cd/examples/write_dir.rs>
|
||||
fn zip_dir(directory_path: &str, writer: File) -> ZipResult<File> {
|
||||
let mut zip = ZipWriter::new(writer);
|
||||
|
||||
// Use deflated compression for WASM builds because compiling `zstd-sys` requires clang
|
||||
// [source](https://github.com/gyscos/zstd-rs/wiki/Compile-for-WASM) which complicates the build
|
||||
// by a lot. Deflated compression is slower but it shouldn't matter much for the WASM use case
|
||||
// (WASM itself is already slower than a native build for a specific platform).
|
||||
// We can't use `#[cfg(...)]` here because the target-arch in a build script is the
|
||||
// architecture of the system running the build script and not the architecture of the build-target.
|
||||
// That's why we use the `TARGET` environment variable here.
|
||||
let method = if std::env::var("TARGET").unwrap().contains("wasm32") {
|
||||
CompressionMethod::Deflated
|
||||
} else {
|
||||
CompressionMethod::Zstd
|
||||
};
|
||||
|
||||
let options = FileOptions::default()
|
||||
.compression_method(method)
|
||||
.unix_permissions(0o644);
|
||||
|
||||
for entry in walkdir::WalkDir::new(directory_path) {
|
||||
let dir_entry = entry.unwrap();
|
||||
let absolute_path = dir_entry.path();
|
||||
let normalized_relative_path = absolute_path
|
||||
.strip_prefix(Path::new(directory_path))
|
||||
.unwrap()
|
||||
.to_slash()
|
||||
.expect("Unexpected non-utf8 typeshed path!");
|
||||
|
||||
// Write file or directory explicitly
|
||||
// Some unzip tools unzip files with directory paths correctly, some do not!
|
||||
if absolute_path.is_file() {
|
||||
println!("adding file {absolute_path:?} as {normalized_relative_path:?} ...");
|
||||
zip.start_file(normalized_relative_path, options)?;
|
||||
let mut f = File::open(absolute_path)?;
|
||||
std::io::copy(&mut f, &mut zip).unwrap();
|
||||
} else if !normalized_relative_path.is_empty() {
|
||||
// Only if not root! Avoids path spec / warning
|
||||
// and mapname conversion failed error on unzip
|
||||
println!("adding dir {absolute_path:?} as {normalized_relative_path:?} ...");
|
||||
zip.add_directory(normalized_relative_path, options)?;
|
||||
}
|
||||
}
|
||||
zip.finish()
|
||||
}
|
||||
|
||||
fn main() {
|
||||
println!("cargo:rerun-if-changed={TYPESHED_SOURCE_DIR}");
|
||||
assert!(
|
||||
Path::new(TYPESHED_SOURCE_DIR).is_dir(),
|
||||
"Where is typeshed?"
|
||||
);
|
||||
let out_dir = std::env::var("OUT_DIR").unwrap();
|
||||
|
||||
// N.B. Deliberately using `format!()` instead of `Path::join()` here,
|
||||
// so that we use `/` as a path separator on all platforms.
|
||||
// That enables us to load the typeshed zip at compile time in `module.rs`
|
||||
// (otherwise we'd have to dynamically determine the exact path to the typeshed zip
|
||||
// based on the default path separator for the specific platform we're on,
|
||||
// which can't be done at compile time.)
|
||||
let zipped_typeshed_location = format!("{out_dir}{TYPESHED_ZIP_LOCATION}");
|
||||
|
||||
let zipped_typeshed = File::create(zipped_typeshed_location).unwrap();
|
||||
zip_dir(TYPESHED_SOURCE_DIR, zipped_typeshed).unwrap();
|
||||
}
|
||||
163
crates/red_knot_python_semantic/src/ast_node_ref.rs
Normal file
163
crates/red_knot_python_semantic/src/ast_node_ref.rs
Normal file
@@ -0,0 +1,163 @@
|
||||
use std::hash::Hash;
|
||||
use std::ops::Deref;
|
||||
|
||||
use ruff_db::parsed::ParsedModule;
|
||||
|
||||
/// Ref-counted owned reference to an AST node.
|
||||
///
|
||||
/// The type holds an owned reference to the node's ref-counted [`ParsedModule`].
|
||||
/// Holding on to the node's [`ParsedModule`] guarantees that the reference to the
|
||||
/// node must still be valid.
|
||||
///
|
||||
/// Holding on to any [`AstNodeRef`] prevents the [`ParsedModule`] from being released.
|
||||
///
|
||||
/// ## Equality
|
||||
/// Two `AstNodeRef` are considered equal if their wrapped nodes are equal.
|
||||
#[derive(Clone)]
|
||||
pub struct AstNodeRef<T> {
|
||||
/// Owned reference to the node's [`ParsedModule`].
|
||||
///
|
||||
/// The node's reference is guaranteed to remain valid as long as it's enclosing
|
||||
/// [`ParsedModule`] is alive.
|
||||
_parsed: ParsedModule,
|
||||
|
||||
/// Pointer to the referenced node.
|
||||
node: std::ptr::NonNull<T>,
|
||||
}
|
||||
|
||||
#[allow(unsafe_code)]
|
||||
impl<T> AstNodeRef<T> {
|
||||
/// Creates a new `AstNodeRef` that reference `node`. The `parsed` is the [`ParsedModule`] to
|
||||
/// which the `AstNodeRef` belongs.
|
||||
///
|
||||
/// ## Safety
|
||||
/// Dereferencing the `node` can result in undefined behavior if `parsed` isn't the
|
||||
/// [`ParsedModule`] to which `node` belongs. It's the caller's responsibility to ensure that
|
||||
/// the invariant `node belongs to parsed` is upheld.
|
||||
|
||||
pub(super) unsafe fn new(parsed: ParsedModule, node: &T) -> Self {
|
||||
Self {
|
||||
_parsed: parsed,
|
||||
node: std::ptr::NonNull::from(node),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a reference to the wrapped node.
|
||||
pub fn node(&self) -> &T {
|
||||
// SAFETY: Holding on to `parsed` ensures that the AST to which `node` belongs is still
|
||||
// alive and not moved.
|
||||
unsafe { self.node.as_ref() }
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Deref for AstNodeRef<T> {
|
||||
type Target = T;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.node()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> std::fmt::Debug for AstNodeRef<T>
|
||||
where
|
||||
T: std::fmt::Debug,
|
||||
{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_tuple("AstNodeRef").field(&self.node()).finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> PartialEq for AstNodeRef<T>
|
||||
where
|
||||
T: PartialEq,
|
||||
{
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.node().eq(other.node())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Eq for AstNodeRef<T> where T: Eq {}
|
||||
|
||||
impl<T> Hash for AstNodeRef<T>
|
||||
where
|
||||
T: Hash,
|
||||
{
|
||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||
self.node().hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unsafe_code)]
|
||||
unsafe impl<T> Send for AstNodeRef<T> where T: Send {}
|
||||
#[allow(unsafe_code)]
|
||||
unsafe impl<T> Sync for AstNodeRef<T> where T: Sync {}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::ast_node_ref::AstNodeRef;
|
||||
use ruff_db::parsed::ParsedModule;
|
||||
use ruff_python_ast::PySourceType;
|
||||
use ruff_python_parser::parse_unchecked_source;
|
||||
|
||||
#[test]
|
||||
#[allow(unsafe_code)]
|
||||
fn equality() {
|
||||
let parsed_raw = parse_unchecked_source("1 + 2", PySourceType::Python);
|
||||
let parsed = ParsedModule::new(parsed_raw.clone());
|
||||
|
||||
let stmt = &parsed.syntax().body[0];
|
||||
|
||||
let node1 = unsafe { AstNodeRef::new(parsed.clone(), stmt) };
|
||||
let node2 = unsafe { AstNodeRef::new(parsed.clone(), stmt) };
|
||||
|
||||
assert_eq!(node1, node2);
|
||||
|
||||
// Compare from different trees
|
||||
let cloned = ParsedModule::new(parsed_raw);
|
||||
let stmt_cloned = &cloned.syntax().body[0];
|
||||
let cloned_node = unsafe { AstNodeRef::new(cloned.clone(), stmt_cloned) };
|
||||
|
||||
assert_eq!(node1, cloned_node);
|
||||
|
||||
let other_raw = parse_unchecked_source("2 + 2", PySourceType::Python);
|
||||
let other = ParsedModule::new(other_raw);
|
||||
|
||||
let other_stmt = &other.syntax().body[0];
|
||||
let other_node = unsafe { AstNodeRef::new(other.clone(), other_stmt) };
|
||||
|
||||
assert_ne!(node1, other_node);
|
||||
}
|
||||
|
||||
#[allow(unsafe_code)]
|
||||
#[test]
|
||||
fn inequality() {
|
||||
let parsed_raw = parse_unchecked_source("1 + 2", PySourceType::Python);
|
||||
let parsed = ParsedModule::new(parsed_raw.clone());
|
||||
|
||||
let stmt = &parsed.syntax().body[0];
|
||||
let node = unsafe { AstNodeRef::new(parsed.clone(), stmt) };
|
||||
|
||||
let other_raw = parse_unchecked_source("2 + 2", PySourceType::Python);
|
||||
let other = ParsedModule::new(other_raw);
|
||||
|
||||
let other_stmt = &other.syntax().body[0];
|
||||
let other_node = unsafe { AstNodeRef::new(other.clone(), other_stmt) };
|
||||
|
||||
assert_ne!(node, other_node);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[allow(unsafe_code)]
|
||||
fn debug() {
|
||||
let parsed_raw = parse_unchecked_source("1 + 2", PySourceType::Python);
|
||||
let parsed = ParsedModule::new(parsed_raw.clone());
|
||||
|
||||
let stmt = &parsed.syntax().body[0];
|
||||
|
||||
let stmt_node = unsafe { AstNodeRef::new(parsed.clone(), stmt) };
|
||||
|
||||
let debug = format!("{stmt_node:?}");
|
||||
|
||||
assert_eq!(debug, format!("AstNodeRef({stmt:?})"));
|
||||
}
|
||||
}
|
||||
16
crates/red_knot_python_semantic/src/builtins.rs
Normal file
16
crates/red_knot_python_semantic/src/builtins.rs
Normal file
@@ -0,0 +1,16 @@
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::resolve_module;
|
||||
use crate::semantic_index::global_scope;
|
||||
use crate::semantic_index::symbol::ScopeId;
|
||||
use crate::Db;
|
||||
|
||||
/// Salsa query to get the builtins scope.
|
||||
///
|
||||
/// Can return None if a custom typeshed is used that is missing `builtins.pyi`.
|
||||
#[salsa::tracked]
|
||||
pub(crate) fn builtins_scope(db: &dyn Db) -> Option<ScopeId<'_>> {
|
||||
let builtins_name =
|
||||
ModuleName::new_static("builtins").expect("Expected 'builtins' to be a valid module name");
|
||||
let builtins_file = resolve_module(db, builtins_name)?.file();
|
||||
Some(global_scope(db, builtins_file))
|
||||
}
|
||||
105
crates/red_knot_python_semantic/src/db.rs
Normal file
105
crates/red_knot_python_semantic/src/db.rs
Normal file
@@ -0,0 +1,105 @@
|
||||
use ruff_db::{Db as SourceDb, Upcast};
|
||||
|
||||
/// Database giving access to semantic information about a Python program.
|
||||
#[salsa::db]
|
||||
pub trait Db: SourceDb + Upcast<dyn SourceDb> {}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::module_resolver::vendored_typeshed_stubs;
|
||||
use ruff_db::files::Files;
|
||||
use ruff_db::system::{DbWithTestSystem, System, TestSystem};
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
use ruff_db::{Db as SourceDb, Upcast};
|
||||
|
||||
use super::Db;
|
||||
|
||||
#[salsa::db]
|
||||
pub(crate) struct TestDb {
|
||||
storage: salsa::Storage<Self>,
|
||||
files: Files,
|
||||
system: TestSystem,
|
||||
vendored: VendoredFileSystem,
|
||||
events: std::sync::Arc<std::sync::Mutex<Vec<salsa::Event>>>,
|
||||
}
|
||||
|
||||
impl TestDb {
|
||||
pub(crate) fn new() -> Self {
|
||||
Self {
|
||||
storage: salsa::Storage::default(),
|
||||
system: TestSystem::default(),
|
||||
vendored: vendored_typeshed_stubs().clone(),
|
||||
events: std::sync::Arc::default(),
|
||||
files: Files::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Takes the salsa events.
|
||||
///
|
||||
/// ## Panics
|
||||
/// If there are any pending salsa snapshots.
|
||||
pub(crate) fn take_salsa_events(&mut self) -> Vec<salsa::Event> {
|
||||
let inner = Arc::get_mut(&mut self.events).expect("no pending salsa snapshots");
|
||||
|
||||
let events = inner.get_mut().unwrap();
|
||||
std::mem::take(&mut *events)
|
||||
}
|
||||
|
||||
/// Clears the salsa events.
|
||||
///
|
||||
/// ## Panics
|
||||
/// If there are any pending salsa snapshots.
|
||||
pub(crate) fn clear_salsa_events(&mut self) {
|
||||
self.take_salsa_events();
|
||||
}
|
||||
}
|
||||
|
||||
impl DbWithTestSystem for TestDb {
|
||||
fn test_system(&self) -> &TestSystem {
|
||||
&self.system
|
||||
}
|
||||
|
||||
fn test_system_mut(&mut self) -> &mut TestSystem {
|
||||
&mut self.system
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl SourceDb for TestDb {
|
||||
fn vendored(&self) -> &VendoredFileSystem {
|
||||
&self.vendored
|
||||
}
|
||||
|
||||
fn system(&self) -> &dyn System {
|
||||
&self.system
|
||||
}
|
||||
|
||||
fn files(&self) -> &Files {
|
||||
&self.files
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SourceDb> for TestDb {
|
||||
fn upcast(&self) -> &(dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
fn upcast_mut(&mut self) -> &mut (dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl Db for TestDb {}
|
||||
|
||||
#[salsa::db]
|
||||
impl salsa::Database for TestDb {
|
||||
fn salsa_event(&self, event: &dyn Fn() -> salsa::Event) {
|
||||
let event = event();
|
||||
tracing::trace!("event: {event:?}");
|
||||
let mut events = self.events.lock().unwrap();
|
||||
events.push(event);
|
||||
}
|
||||
}
|
||||
}
|
||||
20
crates/red_knot_python_semantic/src/lib.rs
Normal file
20
crates/red_knot_python_semantic/src/lib.rs
Normal file
@@ -0,0 +1,20 @@
|
||||
use std::hash::BuildHasherDefault;
|
||||
|
||||
use rustc_hash::FxHasher;
|
||||
|
||||
pub use db::Db;
|
||||
pub use module_name::ModuleName;
|
||||
pub use module_resolver::{resolve_module, system_module_search_paths, vendored_typeshed_stubs};
|
||||
pub use semantic_model::{HasTy, SemanticModel};
|
||||
|
||||
pub mod ast_node_ref;
|
||||
mod builtins;
|
||||
mod db;
|
||||
mod module_name;
|
||||
mod module_resolver;
|
||||
mod node_key;
|
||||
pub mod semantic_index;
|
||||
mod semantic_model;
|
||||
pub mod types;
|
||||
|
||||
type FxOrderSet<V> = ordermap::set::OrderSet<V, BuildHasherDefault<FxHasher>>;
|
||||
198
crates/red_knot_python_semantic/src/module_name.rs
Normal file
198
crates/red_knot_python_semantic/src/module_name.rs
Normal file
@@ -0,0 +1,198 @@
|
||||
use std::fmt;
|
||||
use std::ops::Deref;
|
||||
|
||||
use compact_str::{CompactString, ToCompactString};
|
||||
|
||||
use ruff_python_stdlib::identifiers::is_identifier;
|
||||
|
||||
/// A module name, e.g. `foo.bar`.
|
||||
///
|
||||
/// Always normalized to the absolute form (never a relative module name, i.e., never `.foo`).
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
|
||||
pub struct ModuleName(compact_str::CompactString);
|
||||
|
||||
impl ModuleName {
|
||||
/// Creates a new module name for `name`. Returns `Some` if `name` is a valid, absolute
|
||||
/// module name and `None` otherwise.
|
||||
///
|
||||
/// The module name is invalid if:
|
||||
///
|
||||
/// * The name is empty
|
||||
/// * The name is relative
|
||||
/// * The name ends with a `.`
|
||||
/// * The name contains a sequence of multiple dots
|
||||
/// * A component of a name (the part between two dots) isn't a valid python identifier.
|
||||
#[inline]
|
||||
#[must_use]
|
||||
pub fn new(name: &str) -> Option<Self> {
|
||||
Self::is_valid_name(name).then(|| Self(CompactString::from(name)))
|
||||
}
|
||||
|
||||
/// Creates a new module name for `name` where `name` is a static string.
|
||||
/// Returns `Some` if `name` is a valid, absolute module name and `None` otherwise.
|
||||
///
|
||||
/// The module name is invalid if:
|
||||
///
|
||||
/// * The name is empty
|
||||
/// * The name is relative
|
||||
/// * The name ends with a `.`
|
||||
/// * The name contains a sequence of multiple dots
|
||||
/// * A component of a name (the part between two dots) isn't a valid python identifier.
|
||||
///
|
||||
/// ## Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar").as_deref(), Some("foo.bar"));
|
||||
/// assert_eq!(ModuleName::new_static(""), None);
|
||||
/// assert_eq!(ModuleName::new_static("..foo"), None);
|
||||
/// assert_eq!(ModuleName::new_static(".foo"), None);
|
||||
/// assert_eq!(ModuleName::new_static("foo."), None);
|
||||
/// assert_eq!(ModuleName::new_static("foo..bar"), None);
|
||||
/// assert_eq!(ModuleName::new_static("2000"), None);
|
||||
/// ```
|
||||
#[inline]
|
||||
#[must_use]
|
||||
pub fn new_static(name: &'static str) -> Option<Self> {
|
||||
Self::is_valid_name(name).then(|| Self(CompactString::const_new(name)))
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
fn is_valid_name(name: &str) -> bool {
|
||||
!name.is_empty() && name.split('.').all(is_identifier)
|
||||
}
|
||||
|
||||
/// An iterator over the components of the module name:
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar.baz").unwrap().components().collect::<Vec<_>>(), vec!["foo", "bar", "baz"]);
|
||||
/// ```
|
||||
#[must_use]
|
||||
pub fn components(&self) -> impl DoubleEndedIterator<Item = &str> {
|
||||
self.0.split('.')
|
||||
}
|
||||
|
||||
/// The name of this module's immediate parent, if it has a parent.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar").unwrap().parent(), Some(ModuleName::new_static("foo").unwrap()));
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar.baz").unwrap().parent(), Some(ModuleName::new_static("foo.bar").unwrap()));
|
||||
/// assert_eq!(ModuleName::new_static("root").unwrap().parent(), None);
|
||||
/// ```
|
||||
#[must_use]
|
||||
pub fn parent(&self) -> Option<ModuleName> {
|
||||
let (parent, _) = self.0.rsplit_once('.')?;
|
||||
Some(Self(parent.to_compact_string()))
|
||||
}
|
||||
|
||||
/// Returns `true` if the name starts with `other`.
|
||||
///
|
||||
/// This is equivalent to checking if `self` is a sub-module of `other`.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// assert!(ModuleName::new_static("foo.bar").unwrap().starts_with(&ModuleName::new_static("foo").unwrap()));
|
||||
///
|
||||
/// assert!(!ModuleName::new_static("foo.bar").unwrap().starts_with(&ModuleName::new_static("bar").unwrap()));
|
||||
/// assert!(!ModuleName::new_static("foo_bar").unwrap().starts_with(&ModuleName::new_static("foo").unwrap()));
|
||||
/// ```
|
||||
#[must_use]
|
||||
pub fn starts_with(&self, other: &ModuleName) -> bool {
|
||||
let mut self_components = self.components();
|
||||
let other_components = other.components();
|
||||
|
||||
for other_component in other_components {
|
||||
if self_components.next() != Some(other_component) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
#[inline]
|
||||
pub fn as_str(&self) -> &str {
|
||||
&self.0
|
||||
}
|
||||
|
||||
/// Construct a [`ModuleName`] from a sequence of parts.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// assert_eq!(&*ModuleName::from_components(["a"]).unwrap(), "a");
|
||||
/// assert_eq!(&*ModuleName::from_components(["a", "b"]).unwrap(), "a.b");
|
||||
/// assert_eq!(&*ModuleName::from_components(["a", "b", "c"]).unwrap(), "a.b.c");
|
||||
///
|
||||
/// assert_eq!(ModuleName::from_components(["a-b"]), None);
|
||||
/// assert_eq!(ModuleName::from_components(["a", "a-b"]), None);
|
||||
/// assert_eq!(ModuleName::from_components(["a", "b", "a-b-c"]), None);
|
||||
/// ```
|
||||
#[must_use]
|
||||
pub fn from_components<'a>(components: impl IntoIterator<Item = &'a str>) -> Option<Self> {
|
||||
let mut components = components.into_iter();
|
||||
let first_part = components.next()?;
|
||||
if !is_identifier(first_part) {
|
||||
return None;
|
||||
}
|
||||
let name = if let Some(second_part) = components.next() {
|
||||
if !is_identifier(second_part) {
|
||||
return None;
|
||||
}
|
||||
let mut name = format!("{first_part}.{second_part}");
|
||||
for part in components {
|
||||
if !is_identifier(part) {
|
||||
return None;
|
||||
}
|
||||
name.push('.');
|
||||
name.push_str(part);
|
||||
}
|
||||
CompactString::from(&name)
|
||||
} else {
|
||||
CompactString::from(first_part)
|
||||
};
|
||||
Some(Self(name))
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for ModuleName {
|
||||
type Target = str;
|
||||
|
||||
#[inline]
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<str> for ModuleName {
|
||||
fn eq(&self, other: &str) -> bool {
|
||||
self.as_str() == other
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<ModuleName> for str {
|
||||
fn eq(&self, other: &ModuleName) -> bool {
|
||||
self == other.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ModuleName {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(&self.0)
|
||||
}
|
||||
}
|
||||
45
crates/red_knot_python_semantic/src/module_resolver/mod.rs
Normal file
45
crates/red_knot_python_semantic/src/module_resolver/mod.rs
Normal file
@@ -0,0 +1,45 @@
|
||||
use std::iter::FusedIterator;
|
||||
|
||||
pub(crate) use module::Module;
|
||||
pub use resolver::resolve_module;
|
||||
use ruff_db::system::SystemPath;
|
||||
pub use typeshed::vendored_typeshed_stubs;
|
||||
|
||||
use crate::Db;
|
||||
use resolver::{module_resolution_settings, SearchPathIterator};
|
||||
|
||||
mod module;
|
||||
mod path;
|
||||
mod resolver;
|
||||
mod state;
|
||||
mod typeshed;
|
||||
|
||||
#[cfg(test)]
|
||||
mod testing;
|
||||
|
||||
/// Returns an iterator over all search paths pointing to a system path
|
||||
pub fn system_module_search_paths(db: &dyn Db) -> SystemModuleSearchPathsIter {
|
||||
SystemModuleSearchPathsIter {
|
||||
inner: module_resolution_settings(db).search_paths(db),
|
||||
}
|
||||
}
|
||||
|
||||
pub struct SystemModuleSearchPathsIter<'db> {
|
||||
inner: SearchPathIterator<'db>,
|
||||
}
|
||||
|
||||
impl<'db> Iterator for SystemModuleSearchPathsIter<'db> {
|
||||
type Item = &'db SystemPath;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
loop {
|
||||
let next = self.inner.next()?;
|
||||
|
||||
if let Some(system_path) = next.as_system_path() {
|
||||
return Some(system_path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FusedIterator for SystemModuleSearchPathsIter<'_> {}
|
||||
@@ -0,0 +1,79 @@
|
||||
use std::fmt::Formatter;
|
||||
use std::sync::Arc;
|
||||
|
||||
use ruff_db::files::File;
|
||||
|
||||
use super::path::SearchPath;
|
||||
use crate::module_name::ModuleName;
|
||||
|
||||
/// Representation of a Python module.
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub struct Module {
|
||||
inner: Arc<ModuleInner>,
|
||||
}
|
||||
|
||||
impl Module {
|
||||
pub(crate) fn new(
|
||||
name: ModuleName,
|
||||
kind: ModuleKind,
|
||||
search_path: SearchPath,
|
||||
file: File,
|
||||
) -> Self {
|
||||
Self {
|
||||
inner: Arc::new(ModuleInner {
|
||||
name,
|
||||
kind,
|
||||
search_path,
|
||||
file,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
/// The absolute name of the module (e.g. `foo.bar`)
|
||||
pub fn name(&self) -> &ModuleName {
|
||||
&self.inner.name
|
||||
}
|
||||
|
||||
/// The file to the source code that defines this module
|
||||
pub fn file(&self) -> File {
|
||||
self.inner.file
|
||||
}
|
||||
|
||||
/// The search path from which the module was resolved.
|
||||
pub(crate) fn search_path(&self) -> &SearchPath {
|
||||
&self.inner.search_path
|
||||
}
|
||||
|
||||
/// Determine whether this module is a single-file module or a package
|
||||
pub fn kind(&self) -> ModuleKind {
|
||||
self.inner.kind
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for Module {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("Module")
|
||||
.field("name", &self.name())
|
||||
.field("kind", &self.kind())
|
||||
.field("file", &self.file())
|
||||
.field("search_path", &self.search_path())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq)]
|
||||
struct ModuleInner {
|
||||
name: ModuleName,
|
||||
kind: ModuleKind,
|
||||
search_path: SearchPath,
|
||||
file: File,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub enum ModuleKind {
|
||||
/// A single-file module (e.g. `foo.py` or `foo.pyi`)
|
||||
Module,
|
||||
|
||||
/// A python package (`foo/__init__.py` or `foo/__init__.pyi`)
|
||||
Package,
|
||||
}
|
||||
1099
crates/red_knot_python_semantic/src/module_resolver/path.rs
Normal file
1099
crates/red_knot_python_semantic/src/module_resolver/path.rs
Normal file
File diff suppressed because it is too large
Load Diff
1705
crates/red_knot_python_semantic/src/module_resolver/resolver.rs
Normal file
1705
crates/red_knot_python_semantic/src/module_resolver/resolver.rs
Normal file
File diff suppressed because it is too large
Load Diff
25
crates/red_knot_python_semantic/src/module_resolver/state.rs
Normal file
25
crates/red_knot_python_semantic/src/module_resolver/state.rs
Normal file
@@ -0,0 +1,25 @@
|
||||
use ruff_db::program::TargetVersion;
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
|
||||
use super::typeshed::LazyTypeshedVersions;
|
||||
use crate::db::Db;
|
||||
|
||||
pub(crate) struct ResolverState<'db> {
|
||||
pub(crate) db: &'db dyn Db,
|
||||
pub(crate) typeshed_versions: LazyTypeshedVersions<'db>,
|
||||
pub(crate) target_version: TargetVersion,
|
||||
}
|
||||
|
||||
impl<'db> ResolverState<'db> {
|
||||
pub(crate) fn new(db: &'db dyn Db, target_version: TargetVersion) -> Self {
|
||||
Self {
|
||||
db,
|
||||
typeshed_versions: LazyTypeshedVersions::new(),
|
||||
target_version,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn vendored(&self) -> &VendoredFileSystem {
|
||||
self.db.vendored()
|
||||
}
|
||||
}
|
||||
294
crates/red_knot_python_semantic/src/module_resolver/testing.rs
Normal file
294
crates/red_knot_python_semantic/src/module_resolver/testing.rs
Normal file
@@ -0,0 +1,294 @@
|
||||
use ruff_db::program::{Program, SearchPathSettings, TargetVersion};
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPath, SystemPathBuf};
|
||||
use ruff_db::vendored::VendoredPathBuf;
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
|
||||
/// A test case for the module resolver.
|
||||
///
|
||||
/// You generally shouldn't construct instances of this struct directly;
|
||||
/// instead, use the [`TestCaseBuilder`].
|
||||
pub(crate) struct TestCase<T> {
|
||||
pub(crate) db: TestDb,
|
||||
pub(crate) src: SystemPathBuf,
|
||||
pub(crate) stdlib: T,
|
||||
// Most test cases only ever need a single `site-packages` directory,
|
||||
// so this is a single directory instead of a `Vec` of directories,
|
||||
// like it is in `ruff_db::Program`.
|
||||
pub(crate) site_packages: SystemPathBuf,
|
||||
pub(crate) target_version: TargetVersion,
|
||||
}
|
||||
|
||||
/// A `(file_name, file_contents)` tuple
|
||||
pub(crate) type FileSpec = (&'static str, &'static str);
|
||||
|
||||
/// Specification for a typeshed mock to be created as part of a test
|
||||
#[derive(Debug, Clone, Copy, Default)]
|
||||
pub(crate) struct MockedTypeshed {
|
||||
/// The stdlib files to be created in the typeshed mock
|
||||
pub(crate) stdlib_files: &'static [FileSpec],
|
||||
|
||||
/// The contents of the `stdlib/VERSIONS` file
|
||||
/// to be created in the typeshed mock
|
||||
pub(crate) versions: &'static str,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct VendoredTypeshed;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct UnspecifiedTypeshed;
|
||||
|
||||
/// A builder for a module-resolver test case.
|
||||
///
|
||||
/// The builder takes care of creating a [`TestDb`]
|
||||
/// instance, applying the module resolver settings,
|
||||
/// and creating mock directories for the stdlib, `site-packages`,
|
||||
/// first-party code, etc.
|
||||
///
|
||||
/// For simple tests that do not involve typeshed,
|
||||
/// test cases can be created as follows:
|
||||
///
|
||||
/// ```rs
|
||||
/// let test_case = TestCaseBuilder::new()
|
||||
/// .with_src_files(...)
|
||||
/// .build();
|
||||
///
|
||||
/// let test_case2 = TestCaseBuilder::new()
|
||||
/// .with_site_packages_files(...)
|
||||
/// .build();
|
||||
/// ```
|
||||
///
|
||||
/// Any tests can specify the target Python version that should be used
|
||||
/// in the module resolver settings:
|
||||
///
|
||||
/// ```rs
|
||||
/// let test_case = TestCaseBuilder::new()
|
||||
/// .with_src_files(...)
|
||||
/// .with_target_version(...)
|
||||
/// .build();
|
||||
/// ```
|
||||
///
|
||||
/// For tests checking that standard-library module resolution is working
|
||||
/// correctly, you should usually create a [`MockedTypeshed`] instance
|
||||
/// and pass it to the [`TestCaseBuilder::with_custom_typeshed`] method.
|
||||
/// If you need to check something that involves the vendored typeshed stubs
|
||||
/// we include as part of the binary, you can instead use the
|
||||
/// [`TestCaseBuilder::with_vendored_typeshed`] method.
|
||||
/// For either of these, you should almost always try to be explicit
|
||||
/// about the Python version you want to be specified in the module-resolver
|
||||
/// settings for the test:
|
||||
///
|
||||
/// ```rs
|
||||
/// const TYPESHED = MockedTypeshed { ... };
|
||||
///
|
||||
/// let test_case = resolver_test_case()
|
||||
/// .with_custom_typeshed(TYPESHED)
|
||||
/// .with_target_version(...)
|
||||
/// .build();
|
||||
///
|
||||
/// let test_case2 = resolver_test_case()
|
||||
/// .with_vendored_typeshed()
|
||||
/// .with_target_version(...)
|
||||
/// .build();
|
||||
/// ```
|
||||
///
|
||||
/// If you have not called one of those options, the `stdlib` field
|
||||
/// on the [`TestCase`] instance created from `.build()` will be set
|
||||
/// to `()`.
|
||||
pub(crate) struct TestCaseBuilder<T> {
|
||||
typeshed_option: T,
|
||||
target_version: TargetVersion,
|
||||
first_party_files: Vec<FileSpec>,
|
||||
site_packages_files: Vec<FileSpec>,
|
||||
}
|
||||
|
||||
impl<T> TestCaseBuilder<T> {
|
||||
/// Specify files to be created in the `src` mock directory
|
||||
pub(crate) fn with_src_files(mut self, files: &[FileSpec]) -> Self {
|
||||
self.first_party_files.extend(files.iter().copied());
|
||||
self
|
||||
}
|
||||
|
||||
/// Specify files to be created in the `site-packages` mock directory
|
||||
pub(crate) fn with_site_packages_files(mut self, files: &[FileSpec]) -> Self {
|
||||
self.site_packages_files.extend(files.iter().copied());
|
||||
self
|
||||
}
|
||||
|
||||
/// Specify the target Python version the module resolver should assume
|
||||
pub(crate) fn with_target_version(mut self, target_version: TargetVersion) -> Self {
|
||||
self.target_version = target_version;
|
||||
self
|
||||
}
|
||||
|
||||
fn write_mock_directory(
|
||||
db: &mut TestDb,
|
||||
location: impl AsRef<SystemPath>,
|
||||
files: impl IntoIterator<Item = FileSpec>,
|
||||
) -> SystemPathBuf {
|
||||
let root = location.as_ref().to_path_buf();
|
||||
// Make sure to create the directory even if the list of files is empty:
|
||||
db.memory_file_system().create_directory_all(&root).unwrap();
|
||||
db.write_files(
|
||||
files
|
||||
.into_iter()
|
||||
.map(|(relative_path, contents)| (root.join(relative_path), contents)),
|
||||
)
|
||||
.unwrap();
|
||||
root
|
||||
}
|
||||
}
|
||||
|
||||
impl TestCaseBuilder<UnspecifiedTypeshed> {
|
||||
pub(crate) fn new() -> TestCaseBuilder<UnspecifiedTypeshed> {
|
||||
Self {
|
||||
typeshed_option: UnspecifiedTypeshed,
|
||||
target_version: TargetVersion::default(),
|
||||
first_party_files: vec![],
|
||||
site_packages_files: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
/// Use the vendored stdlib stubs included in the Ruff binary for this test case
|
||||
pub(crate) fn with_vendored_typeshed(self) -> TestCaseBuilder<VendoredTypeshed> {
|
||||
let TestCaseBuilder {
|
||||
typeshed_option: _,
|
||||
target_version,
|
||||
first_party_files,
|
||||
site_packages_files,
|
||||
} = self;
|
||||
TestCaseBuilder {
|
||||
typeshed_option: VendoredTypeshed,
|
||||
target_version,
|
||||
first_party_files,
|
||||
site_packages_files,
|
||||
}
|
||||
}
|
||||
|
||||
/// Use a mock typeshed directory for this test case
|
||||
pub(crate) fn with_custom_typeshed(
|
||||
self,
|
||||
typeshed: MockedTypeshed,
|
||||
) -> TestCaseBuilder<MockedTypeshed> {
|
||||
let TestCaseBuilder {
|
||||
typeshed_option: _,
|
||||
target_version,
|
||||
first_party_files,
|
||||
site_packages_files,
|
||||
} = self;
|
||||
TestCaseBuilder {
|
||||
typeshed_option: typeshed,
|
||||
target_version,
|
||||
first_party_files,
|
||||
site_packages_files,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn build(self) -> TestCase<()> {
|
||||
let TestCase {
|
||||
db,
|
||||
src,
|
||||
stdlib: _,
|
||||
site_packages,
|
||||
target_version,
|
||||
} = self.with_custom_typeshed(MockedTypeshed::default()).build();
|
||||
TestCase {
|
||||
db,
|
||||
src,
|
||||
stdlib: (),
|
||||
site_packages,
|
||||
target_version,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TestCaseBuilder<MockedTypeshed> {
|
||||
pub(crate) fn build(self) -> TestCase<SystemPathBuf> {
|
||||
let TestCaseBuilder {
|
||||
typeshed_option,
|
||||
target_version,
|
||||
first_party_files,
|
||||
site_packages_files,
|
||||
} = self;
|
||||
|
||||
let mut db = TestDb::new();
|
||||
|
||||
let site_packages =
|
||||
Self::write_mock_directory(&mut db, "/site-packages", site_packages_files);
|
||||
let src = Self::write_mock_directory(&mut db, "/src", first_party_files);
|
||||
let typeshed = Self::build_typeshed_mock(&mut db, &typeshed_option);
|
||||
|
||||
Program::new(
|
||||
&db,
|
||||
target_version,
|
||||
SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
custom_typeshed: Some(typeshed.clone()),
|
||||
site_packages: vec![site_packages.clone()],
|
||||
},
|
||||
);
|
||||
|
||||
TestCase {
|
||||
db,
|
||||
src,
|
||||
stdlib: typeshed.join("stdlib"),
|
||||
site_packages,
|
||||
target_version,
|
||||
}
|
||||
}
|
||||
|
||||
fn build_typeshed_mock(db: &mut TestDb, typeshed_to_build: &MockedTypeshed) -> SystemPathBuf {
|
||||
let typeshed = SystemPathBuf::from("/typeshed");
|
||||
let MockedTypeshed {
|
||||
stdlib_files,
|
||||
versions,
|
||||
} = typeshed_to_build;
|
||||
Self::write_mock_directory(
|
||||
db,
|
||||
typeshed.join("stdlib"),
|
||||
stdlib_files
|
||||
.iter()
|
||||
.copied()
|
||||
.chain(std::iter::once(("VERSIONS", *versions))),
|
||||
);
|
||||
typeshed
|
||||
}
|
||||
}
|
||||
|
||||
impl TestCaseBuilder<VendoredTypeshed> {
|
||||
pub(crate) fn build(self) -> TestCase<VendoredPathBuf> {
|
||||
let TestCaseBuilder {
|
||||
typeshed_option: VendoredTypeshed,
|
||||
target_version,
|
||||
first_party_files,
|
||||
site_packages_files,
|
||||
} = self;
|
||||
|
||||
let mut db = TestDb::new();
|
||||
|
||||
let site_packages =
|
||||
Self::write_mock_directory(&mut db, "/site-packages", site_packages_files);
|
||||
let src = Self::write_mock_directory(&mut db, "/src", first_party_files);
|
||||
|
||||
Program::new(
|
||||
&db,
|
||||
target_version,
|
||||
SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
custom_typeshed: None,
|
||||
site_packages: vec![site_packages.clone()],
|
||||
},
|
||||
);
|
||||
|
||||
TestCase {
|
||||
db,
|
||||
src,
|
||||
stdlib: VendoredPathBuf::from("stdlib"),
|
||||
site_packages,
|
||||
target_version,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
pub use self::vendored::vendored_typeshed_stubs;
|
||||
pub(super) use self::versions::{
|
||||
parse_typeshed_versions, LazyTypeshedVersions, TypeshedVersionsParseError,
|
||||
TypeshedVersionsQueryResult,
|
||||
};
|
||||
|
||||
mod vendored;
|
||||
mod versions;
|
||||
@@ -0,0 +1,99 @@
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
|
||||
// The file path here is hardcoded in this crate's `build.rs` script.
|
||||
// Luckily this crate will fail to build if this file isn't available at build time.
|
||||
static TYPESHED_ZIP_BYTES: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/zipped_typeshed.zip"));
|
||||
|
||||
pub fn vendored_typeshed_stubs() -> &'static VendoredFileSystem {
|
||||
static VENDORED_TYPESHED_STUBS: Lazy<VendoredFileSystem> =
|
||||
Lazy::new(|| VendoredFileSystem::new_static(TYPESHED_ZIP_BYTES).unwrap());
|
||||
&VENDORED_TYPESHED_STUBS
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::io::{self, Read};
|
||||
use std::path::Path;
|
||||
|
||||
use ruff_db::vendored::VendoredPath;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn typeshed_zip_created_at_build_time() {
|
||||
let mut typeshed_zip_archive =
|
||||
zip::ZipArchive::new(io::Cursor::new(TYPESHED_ZIP_BYTES)).unwrap();
|
||||
|
||||
let mut functools_module_stub = typeshed_zip_archive
|
||||
.by_name("stdlib/functools.pyi")
|
||||
.unwrap();
|
||||
assert!(functools_module_stub.is_file());
|
||||
|
||||
let mut functools_module_stub_source = String::new();
|
||||
functools_module_stub
|
||||
.read_to_string(&mut functools_module_stub_source)
|
||||
.unwrap();
|
||||
|
||||
assert!(functools_module_stub_source.contains("def update_wrapper("));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn typeshed_vfs_consistent_with_vendored_stubs() {
|
||||
let vendored_typeshed_dir = Path::new("vendor/typeshed").canonicalize().unwrap();
|
||||
let vendored_typeshed_stubs = vendored_typeshed_stubs();
|
||||
|
||||
let mut empty_iterator = true;
|
||||
for entry in walkdir::WalkDir::new(&vendored_typeshed_dir).min_depth(1) {
|
||||
empty_iterator = false;
|
||||
let entry = entry.unwrap();
|
||||
let absolute_path = entry.path();
|
||||
let file_type = entry.file_type();
|
||||
|
||||
let relative_path = absolute_path
|
||||
.strip_prefix(&vendored_typeshed_dir)
|
||||
.unwrap_or_else(|_| {
|
||||
panic!("Expected {absolute_path:?} to be a child of {vendored_typeshed_dir:?}")
|
||||
});
|
||||
|
||||
let vendored_path = <&VendoredPath>::try_from(relative_path)
|
||||
.unwrap_or_else(|_| panic!("Expected {relative_path:?} to be valid UTF-8"));
|
||||
|
||||
assert!(
|
||||
vendored_typeshed_stubs.exists(vendored_path),
|
||||
"Expected {vendored_path:?} to exist in the `VendoredFileSystem`!
|
||||
|
||||
Vendored file system:
|
||||
|
||||
{vendored_typeshed_stubs:#?}
|
||||
"
|
||||
);
|
||||
|
||||
let vendored_path_kind = vendored_typeshed_stubs
|
||||
.metadata(vendored_path)
|
||||
.unwrap_or_else(|_| {
|
||||
panic!(
|
||||
"Expected metadata for {vendored_path:?} to be retrievable from the `VendoredFileSystem!
|
||||
|
||||
Vendored file system:
|
||||
|
||||
{vendored_typeshed_stubs:#?}
|
||||
"
|
||||
)
|
||||
})
|
||||
.kind();
|
||||
|
||||
assert_eq!(
|
||||
vendored_path_kind.is_directory(),
|
||||
file_type.is_dir(),
|
||||
"{vendored_path:?} had type {vendored_path_kind:?}, inconsistent with fs path {relative_path:?}: {file_type:?}"
|
||||
);
|
||||
}
|
||||
|
||||
assert!(
|
||||
!empty_iterator,
|
||||
"Expected there to be at least one file or directory in the vendored typeshed stubs!"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,817 @@
|
||||
use std::cell::OnceCell;
|
||||
use std::collections::BTreeMap;
|
||||
use std::fmt;
|
||||
use std::num::{NonZeroU16, NonZeroUsize};
|
||||
use std::ops::{RangeFrom, RangeInclusive};
|
||||
use std::str::FromStr;
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use ruff_db::program::TargetVersion;
|
||||
use ruff_db::system::SystemPath;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
|
||||
use crate::db::Db;
|
||||
use crate::module_name::ModuleName;
|
||||
|
||||
use super::vendored::vendored_typeshed_stubs;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct LazyTypeshedVersions<'db>(OnceCell<&'db TypeshedVersions>);
|
||||
|
||||
impl<'db> LazyTypeshedVersions<'db> {
|
||||
#[must_use]
|
||||
pub(crate) fn new() -> Self {
|
||||
Self(OnceCell::new())
|
||||
}
|
||||
|
||||
/// Query whether a module exists at runtime in the stdlib on a certain Python version.
|
||||
///
|
||||
/// Simply probing whether a file exists in typeshed is insufficient for this question,
|
||||
/// as a module in the stdlib may have been added in Python 3.10, but the typeshed stub
|
||||
/// will still be available (either in a custom typeshed dir or in our vendored copy)
|
||||
/// even if the user specified Python 3.8 as the target version.
|
||||
///
|
||||
/// For top-level modules and packages, the VERSIONS file can always provide an unambiguous answer
|
||||
/// as to whether the module exists on the specified target version. However, VERSIONS does not
|
||||
/// provide comprehensive information on all submodules, meaning that this method sometimes
|
||||
/// returns [`TypeshedVersionsQueryResult::MaybeExists`].
|
||||
/// See [`TypeshedVersionsQueryResult`] for more details.
|
||||
#[must_use]
|
||||
pub(crate) fn query_module(
|
||||
&self,
|
||||
db: &'db dyn Db,
|
||||
module: &ModuleName,
|
||||
stdlib_root: Option<&SystemPath>,
|
||||
target_version: TargetVersion,
|
||||
) -> TypeshedVersionsQueryResult {
|
||||
let versions = self.0.get_or_init(|| {
|
||||
let versions_path = if let Some(system_path) = stdlib_root {
|
||||
system_path.join("VERSIONS")
|
||||
} else {
|
||||
return &VENDORED_VERSIONS;
|
||||
};
|
||||
let Ok(versions_file) = system_path_to_file(db.upcast(), &versions_path) else {
|
||||
todo!(
|
||||
"Still need to figure out how to handle VERSIONS files being deleted \
|
||||
from custom typeshed directories! Expected a file to exist at {versions_path}"
|
||||
)
|
||||
};
|
||||
// TODO(Alex/Micha): If VERSIONS is invalid,
|
||||
// this should invalidate not just the specific module resolution we're currently attempting,
|
||||
// but all type inference that depends on any standard-library types.
|
||||
// Unwrapping here is not correct...
|
||||
parse_typeshed_versions(db, versions_file).as_ref().unwrap()
|
||||
});
|
||||
versions.query_module(module, PyVersion::from(target_version))
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub(crate) fn parse_typeshed_versions(
|
||||
db: &dyn Db,
|
||||
versions_file: File,
|
||||
) -> Result<TypeshedVersions, TypeshedVersionsParseError> {
|
||||
// TODO: Handle IO errors
|
||||
let file_content = versions_file
|
||||
.read_to_string(db.upcast())
|
||||
.unwrap_or_default();
|
||||
file_content.parse()
|
||||
}
|
||||
|
||||
static VENDORED_VERSIONS: Lazy<TypeshedVersions> = Lazy::new(|| {
|
||||
TypeshedVersions::from_str(
|
||||
&vendored_typeshed_stubs()
|
||||
.read_to_string("stdlib/VERSIONS")
|
||||
.unwrap(),
|
||||
)
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub(crate) struct TypeshedVersionsParseError {
|
||||
line_number: Option<NonZeroU16>,
|
||||
reason: TypeshedVersionsParseErrorKind,
|
||||
}
|
||||
|
||||
impl fmt::Display for TypeshedVersionsParseError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let TypeshedVersionsParseError {
|
||||
line_number,
|
||||
reason,
|
||||
} = self;
|
||||
if let Some(line_number) = line_number {
|
||||
write!(
|
||||
f,
|
||||
"Error while parsing line {line_number} of typeshed's VERSIONS file: {reason}"
|
||||
)
|
||||
} else {
|
||||
write!(f, "Error while parsing typeshed's VERSIONS file: {reason}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for TypeshedVersionsParseError {
|
||||
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
|
||||
if let TypeshedVersionsParseErrorKind::IntegerParsingFailure { err, .. } = &self.reason {
|
||||
Some(err)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub(super) enum TypeshedVersionsParseErrorKind {
|
||||
TooManyLines(NonZeroUsize),
|
||||
UnexpectedNumberOfColons,
|
||||
InvalidModuleName(String),
|
||||
UnexpectedNumberOfHyphens,
|
||||
UnexpectedNumberOfPeriods(String),
|
||||
IntegerParsingFailure {
|
||||
version: String,
|
||||
err: std::num::ParseIntError,
|
||||
},
|
||||
}
|
||||
|
||||
impl fmt::Display for TypeshedVersionsParseErrorKind {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::TooManyLines(num_lines) => write!(
|
||||
f,
|
||||
"File has too many lines ({num_lines}); maximum allowed is {}",
|
||||
NonZeroU16::MAX
|
||||
),
|
||||
Self::UnexpectedNumberOfColons => {
|
||||
f.write_str("Expected every non-comment line to have exactly one colon")
|
||||
}
|
||||
Self::InvalidModuleName(name) => write!(
|
||||
f,
|
||||
"Expected all components of '{name}' to be valid Python identifiers"
|
||||
),
|
||||
Self::UnexpectedNumberOfHyphens => {
|
||||
f.write_str("Expected every non-comment line to have exactly one '-' character")
|
||||
}
|
||||
Self::UnexpectedNumberOfPeriods(format) => write!(
|
||||
f,
|
||||
"Expected all versions to be in the form {{MAJOR}}.{{MINOR}}; got '{format}'"
|
||||
),
|
||||
Self::IntegerParsingFailure { version, err } => write!(
|
||||
f,
|
||||
"Failed to convert '{version}' to a pair of integers due to {err}",
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub(crate) struct TypeshedVersions(FxHashMap<ModuleName, PyVersionRange>);
|
||||
|
||||
impl TypeshedVersions {
|
||||
#[must_use]
|
||||
fn exact(&self, module_name: &ModuleName) -> Option<&PyVersionRange> {
|
||||
self.0.get(module_name)
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
fn query_module(
|
||||
&self,
|
||||
module: &ModuleName,
|
||||
target_version: PyVersion,
|
||||
) -> TypeshedVersionsQueryResult {
|
||||
if let Some(range) = self.exact(module) {
|
||||
if range.contains(target_version) {
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
} else {
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
}
|
||||
} else {
|
||||
let mut module = module.parent();
|
||||
while let Some(module_to_try) = module {
|
||||
if let Some(range) = self.exact(&module_to_try) {
|
||||
return {
|
||||
if range.contains(target_version) {
|
||||
TypeshedVersionsQueryResult::MaybeExists
|
||||
} else {
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
}
|
||||
};
|
||||
}
|
||||
module = module_to_try.parent();
|
||||
}
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Possible answers [`LazyTypeshedVersions::query_module()`] could give to the question:
|
||||
/// "Does this module exist in the stdlib at runtime on a certain target version?"
|
||||
#[derive(Debug, Copy, PartialEq, Eq, Clone, Hash)]
|
||||
pub(crate) enum TypeshedVersionsQueryResult {
|
||||
/// The module definitely exists in the stdlib at runtime on the user-specified target version.
|
||||
///
|
||||
/// For example:
|
||||
/// - The target version is Python 3.8
|
||||
/// - We're querying whether the `asyncio.tasks` module exists in the stdlib
|
||||
/// - The VERSIONS file contains the line `asyncio.tasks: 3.8-`
|
||||
Exists,
|
||||
|
||||
/// The module definitely does not exist in the stdlib on the user-specified target version.
|
||||
///
|
||||
/// For example:
|
||||
/// - We're querying whether the `foo` module exists in the stdlib
|
||||
/// - There is no top-level `foo` module in VERSIONS
|
||||
///
|
||||
/// OR:
|
||||
/// - The target version is Python 3.8
|
||||
/// - We're querying whether the module `importlib.abc` exists in the stdlib
|
||||
/// - The VERSIONS file contains the line `importlib.abc: 3.10-`,
|
||||
/// indicating that the module was added in 3.10
|
||||
///
|
||||
/// OR:
|
||||
/// - The target version is Python 3.8
|
||||
/// - We're querying whether the module `collections.abc` exists in the stdlib
|
||||
/// - The VERSIONS file does not contain any information about the `collections.abc` submodule,
|
||||
/// but *does* contain the line `collections: 3.10-`,
|
||||
/// indicating that the entire `collections` package was added in Python 3.10.
|
||||
DoesNotExist,
|
||||
|
||||
/// The module potentially exists in the stdlib and, if it does,
|
||||
/// it definitely exists on the user-specified target version.
|
||||
///
|
||||
/// This variant is only relevant for submodules,
|
||||
/// for which the typeshed VERSIONS file does not provide comprehensive information.
|
||||
/// (The VERSIONS file is guaranteed to provide information about all top-level stdlib modules and packages,
|
||||
/// but not necessarily about all submodules within each top-level package.)
|
||||
///
|
||||
/// For example:
|
||||
/// - The target version is Python 3.8
|
||||
/// - We're querying whether the `asyncio.staggered` module exists in the stdlib
|
||||
/// - The typeshed VERSIONS file contains the line `asyncio: 3.8`,
|
||||
/// indicating that the `asyncio` package was added in Python 3.8,
|
||||
/// but does not contain any explicit information about the `asyncio.staggered` submodule.
|
||||
MaybeExists,
|
||||
}
|
||||
|
||||
impl FromStr for TypeshedVersions {
|
||||
type Err = TypeshedVersionsParseError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let mut map = FxHashMap::default();
|
||||
|
||||
for (line_index, line) in s.lines().enumerate() {
|
||||
// humans expect line numbers to be 1-indexed
|
||||
let line_number = NonZeroUsize::new(line_index.saturating_add(1)).unwrap();
|
||||
|
||||
let Ok(line_number) = NonZeroU16::try_from(line_number) else {
|
||||
return Err(TypeshedVersionsParseError {
|
||||
line_number: None,
|
||||
reason: TypeshedVersionsParseErrorKind::TooManyLines(line_number),
|
||||
});
|
||||
};
|
||||
|
||||
let Some(content) = line.split('#').map(str::trim).next() else {
|
||||
continue;
|
||||
};
|
||||
if content.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut parts = content.split(':').map(str::trim);
|
||||
let (Some(module_name), Some(rest), None) = (parts.next(), parts.next(), parts.next())
|
||||
else {
|
||||
return Err(TypeshedVersionsParseError {
|
||||
line_number: Some(line_number),
|
||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfColons,
|
||||
});
|
||||
};
|
||||
|
||||
let Some(module_name) = ModuleName::new(module_name) else {
|
||||
return Err(TypeshedVersionsParseError {
|
||||
line_number: Some(line_number),
|
||||
reason: TypeshedVersionsParseErrorKind::InvalidModuleName(
|
||||
module_name.to_string(),
|
||||
),
|
||||
});
|
||||
};
|
||||
|
||||
match PyVersionRange::from_str(rest) {
|
||||
Ok(version) => map.insert(module_name, version),
|
||||
Err(reason) => {
|
||||
return Err(TypeshedVersionsParseError {
|
||||
line_number: Some(line_number),
|
||||
reason,
|
||||
})
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
Ok(Self(map))
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for TypeshedVersions {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let sorted_items: BTreeMap<&ModuleName, &PyVersionRange> = self.0.iter().collect();
|
||||
for (module_name, range) in sorted_items {
|
||||
writeln!(f, "{module_name}: {range}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
|
||||
enum PyVersionRange {
|
||||
AvailableFrom(RangeFrom<PyVersion>),
|
||||
AvailableWithin(RangeInclusive<PyVersion>),
|
||||
}
|
||||
|
||||
impl PyVersionRange {
|
||||
#[must_use]
|
||||
fn contains(&self, version: PyVersion) -> bool {
|
||||
match self {
|
||||
Self::AvailableFrom(inner) => inner.contains(&version),
|
||||
Self::AvailableWithin(inner) => inner.contains(&version),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for PyVersionRange {
|
||||
type Err = TypeshedVersionsParseErrorKind;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let mut parts = s.split('-').map(str::trim);
|
||||
match (parts.next(), parts.next(), parts.next()) {
|
||||
(Some(lower), Some(""), None) => Ok(Self::AvailableFrom((lower.parse()?)..)),
|
||||
(Some(lower), Some(upper), None) => {
|
||||
Ok(Self::AvailableWithin((lower.parse()?)..=(upper.parse()?)))
|
||||
}
|
||||
_ => Err(TypeshedVersionsParseErrorKind::UnexpectedNumberOfHyphens),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for PyVersionRange {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::AvailableFrom(range_from) => write!(f, "{}-", range_from.start),
|
||||
Self::AvailableWithin(range_inclusive) => {
|
||||
write!(f, "{}-{}", range_inclusive.start(), range_inclusive.end())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
struct PyVersion {
|
||||
major: u8,
|
||||
minor: u8,
|
||||
}
|
||||
|
||||
impl FromStr for PyVersion {
|
||||
type Err = TypeshedVersionsParseErrorKind;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let mut parts = s.split('.').map(str::trim);
|
||||
let (Some(major), Some(minor), None) = (parts.next(), parts.next(), parts.next()) else {
|
||||
return Err(TypeshedVersionsParseErrorKind::UnexpectedNumberOfPeriods(
|
||||
s.to_string(),
|
||||
));
|
||||
};
|
||||
let major = match u8::from_str(major) {
|
||||
Ok(major) => major,
|
||||
Err(err) => {
|
||||
return Err(TypeshedVersionsParseErrorKind::IntegerParsingFailure {
|
||||
version: s.to_string(),
|
||||
err,
|
||||
})
|
||||
}
|
||||
};
|
||||
let minor = match u8::from_str(minor) {
|
||||
Ok(minor) => minor,
|
||||
Err(err) => {
|
||||
return Err(TypeshedVersionsParseErrorKind::IntegerParsingFailure {
|
||||
version: s.to_string(),
|
||||
err,
|
||||
})
|
||||
}
|
||||
};
|
||||
Ok(Self { major, minor })
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for PyVersion {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let PyVersion { major, minor } = self;
|
||||
write!(f, "{major}.{minor}")
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TargetVersion> for PyVersion {
|
||||
fn from(value: TargetVersion) -> Self {
|
||||
match value {
|
||||
TargetVersion::Py37 => PyVersion { major: 3, minor: 7 },
|
||||
TargetVersion::Py38 => PyVersion { major: 3, minor: 8 },
|
||||
TargetVersion::Py39 => PyVersion { major: 3, minor: 9 },
|
||||
TargetVersion::Py310 => PyVersion {
|
||||
major: 3,
|
||||
minor: 10,
|
||||
},
|
||||
TargetVersion::Py311 => PyVersion {
|
||||
major: 3,
|
||||
minor: 11,
|
||||
},
|
||||
TargetVersion::Py312 => PyVersion {
|
||||
major: 3,
|
||||
minor: 12,
|
||||
},
|
||||
TargetVersion::Py313 => PyVersion {
|
||||
major: 3,
|
||||
minor: 13,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::num::{IntErrorKind, NonZeroU16};
|
||||
use std::path::Path;
|
||||
|
||||
use insta::assert_snapshot;
|
||||
use ruff_db::program::TargetVersion;
|
||||
|
||||
use super::*;
|
||||
|
||||
const TYPESHED_STDLIB_DIR: &str = "stdlib";
|
||||
|
||||
#[allow(unsafe_code)]
|
||||
const ONE: Option<NonZeroU16> = Some(unsafe { NonZeroU16::new_unchecked(1) });
|
||||
|
||||
impl TypeshedVersions {
|
||||
#[must_use]
|
||||
fn contains_exact(&self, module: &ModuleName) -> bool {
|
||||
self.exact(module).is_some()
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
fn len(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_parse_vendored_versions_file() {
|
||||
let versions_data = include_str!(concat!(
|
||||
env!("CARGO_MANIFEST_DIR"),
|
||||
"/vendor/typeshed/stdlib/VERSIONS"
|
||||
));
|
||||
|
||||
let versions = TypeshedVersions::from_str(versions_data).unwrap();
|
||||
assert!(versions.len() > 100);
|
||||
assert!(versions.len() < 1000);
|
||||
|
||||
let asyncio = ModuleName::new_static("asyncio").unwrap();
|
||||
let asyncio_staggered = ModuleName::new_static("asyncio.staggered").unwrap();
|
||||
let audioop = ModuleName::new_static("audioop").unwrap();
|
||||
|
||||
assert!(versions.contains_exact(&asyncio));
|
||||
assert_eq!(
|
||||
versions.query_module(&asyncio, TargetVersion::Py310.into()),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
|
||||
assert!(versions.contains_exact(&asyncio_staggered));
|
||||
assert_eq!(
|
||||
versions.query_module(&asyncio_staggered, TargetVersion::Py38.into()),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
versions.query_module(&asyncio_staggered, TargetVersion::Py37.into()),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
|
||||
assert!(versions.contains_exact(&audioop));
|
||||
assert_eq!(
|
||||
versions.query_module(&audioop, TargetVersion::Py312.into()),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
versions.query_module(&audioop, TargetVersion::Py313.into()),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn typeshed_versions_consistent_with_vendored_stubs() {
|
||||
const VERSIONS_DATA: &str = include_str!("../../../vendor/typeshed/stdlib/VERSIONS");
|
||||
let vendored_typeshed_dir = Path::new("vendor/typeshed").canonicalize().unwrap();
|
||||
let vendored_typeshed_versions = TypeshedVersions::from_str(VERSIONS_DATA).unwrap();
|
||||
|
||||
let mut empty_iterator = true;
|
||||
|
||||
let stdlib_stubs_path = vendored_typeshed_dir.join(TYPESHED_STDLIB_DIR);
|
||||
|
||||
for entry in std::fs::read_dir(&stdlib_stubs_path).unwrap() {
|
||||
empty_iterator = false;
|
||||
let entry = entry.unwrap();
|
||||
let absolute_path = entry.path();
|
||||
|
||||
let relative_path = absolute_path
|
||||
.strip_prefix(&stdlib_stubs_path)
|
||||
.unwrap_or_else(|_| panic!("Expected path to be a child of {stdlib_stubs_path:?} but found {absolute_path:?}"));
|
||||
|
||||
let relative_path_str = relative_path.as_os_str().to_str().unwrap_or_else(|| {
|
||||
panic!("Expected all typeshed paths to be valid UTF-8; got {relative_path:?}")
|
||||
});
|
||||
if relative_path_str == "VERSIONS" {
|
||||
continue;
|
||||
}
|
||||
|
||||
let top_level_module = if let Some(extension) = relative_path.extension() {
|
||||
// It was a file; strip off the file extension to get the module name:
|
||||
let extension = extension
|
||||
.to_str()
|
||||
.unwrap_or_else(||panic!("Expected all file extensions to be UTF-8; was not true for {relative_path:?}"));
|
||||
|
||||
relative_path_str
|
||||
.strip_suffix(extension)
|
||||
.and_then(|string| string.strip_suffix('.')).unwrap_or_else(|| {
|
||||
panic!("Expected path {relative_path_str:?} to end with computed extension {extension:?}")
|
||||
})
|
||||
} else {
|
||||
// It was a directory; no need to do anything to get the module name
|
||||
relative_path_str
|
||||
};
|
||||
|
||||
let top_level_module = ModuleName::new(top_level_module)
|
||||
.unwrap_or_else(|| panic!("{top_level_module:?} was not a valid module name!"));
|
||||
|
||||
assert!(vendored_typeshed_versions.contains_exact(&top_level_module));
|
||||
}
|
||||
|
||||
assert!(
|
||||
!empty_iterator,
|
||||
"Expected there to be at least one file or directory in the vendored typeshed stubs"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_parse_mock_versions_file() {
|
||||
const VERSIONS: &str = "\
|
||||
# a comment
|
||||
# some more comment
|
||||
# yet more comment
|
||||
|
||||
|
||||
# and some more comment
|
||||
|
||||
bar: 2.7-3.10
|
||||
|
||||
# more comment
|
||||
bar.baz: 3.1-3.9
|
||||
foo: 3.8- # trailing comment
|
||||
";
|
||||
let parsed_versions = TypeshedVersions::from_str(VERSIONS).unwrap();
|
||||
assert_eq!(parsed_versions.len(), 3);
|
||||
assert_snapshot!(parsed_versions.to_string(), @r###"
|
||||
bar: 2.7-3.10
|
||||
bar.baz: 3.1-3.9
|
||||
foo: 3.8-
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn version_within_range_parsed_correctly() {
|
||||
let parsed_versions = TypeshedVersions::from_str("bar: 2.7-3.10").unwrap();
|
||||
let bar = ModuleName::new_static("bar").unwrap();
|
||||
|
||||
assert!(parsed_versions.contains_exact(&bar));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar, TargetVersion::Py37.into()),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar, TargetVersion::Py310.into()),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar, TargetVersion::Py311.into()),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn version_from_range_parsed_correctly() {
|
||||
let parsed_versions = TypeshedVersions::from_str("foo: 3.8-").unwrap();
|
||||
let foo = ModuleName::new_static("foo").unwrap();
|
||||
|
||||
assert!(parsed_versions.contains_exact(&foo));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&foo, TargetVersion::Py37.into()),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&foo, TargetVersion::Py38.into()),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&foo, TargetVersion::Py311.into()),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn explicit_submodule_parsed_correctly() {
|
||||
let parsed_versions = TypeshedVersions::from_str("bar.baz: 3.1-3.9").unwrap();
|
||||
let bar_baz = ModuleName::new_static("bar.baz").unwrap();
|
||||
|
||||
assert!(parsed_versions.contains_exact(&bar_baz));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_baz, TargetVersion::Py37.into()),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_baz, TargetVersion::Py39.into()),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_baz, TargetVersion::Py310.into()),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn implicit_submodule_queried_correctly() {
|
||||
let parsed_versions = TypeshedVersions::from_str("bar: 2.7-3.10").unwrap();
|
||||
let bar_eggs = ModuleName::new_static("bar.eggs").unwrap();
|
||||
|
||||
assert!(!parsed_versions.contains_exact(&bar_eggs));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_eggs, TargetVersion::Py37.into()),
|
||||
TypeshedVersionsQueryResult::MaybeExists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_eggs, TargetVersion::Py310.into()),
|
||||
TypeshedVersionsQueryResult::MaybeExists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_eggs, TargetVersion::Py311.into()),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nonexistent_module_queried_correctly() {
|
||||
let parsed_versions = TypeshedVersions::from_str("eggs: 3.8-").unwrap();
|
||||
let spam = ModuleName::new_static("spam").unwrap();
|
||||
|
||||
assert!(!parsed_versions.contains_exact(&spam));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&spam, TargetVersion::Py37.into()),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&spam, TargetVersion::Py313.into()),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_huge_versions_file() {
|
||||
let offset = 100;
|
||||
let too_many = u16::MAX as usize + offset;
|
||||
|
||||
let mut massive_versions_file = String::new();
|
||||
for i in 0..too_many {
|
||||
massive_versions_file.push_str(&format!("x{i}: 3.8-\n"));
|
||||
}
|
||||
|
||||
assert_eq!(
|
||||
TypeshedVersions::from_str(&massive_versions_file),
|
||||
Err(TypeshedVersionsParseError {
|
||||
line_number: None,
|
||||
reason: TypeshedVersionsParseErrorKind::TooManyLines(
|
||||
NonZeroUsize::new(too_many + 1 - offset).unwrap()
|
||||
)
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_typeshed_versions_bad_colon_number() {
|
||||
assert_eq!(
|
||||
TypeshedVersions::from_str("foo 3.7"),
|
||||
Err(TypeshedVersionsParseError {
|
||||
line_number: ONE,
|
||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfColons
|
||||
})
|
||||
);
|
||||
assert_eq!(
|
||||
TypeshedVersions::from_str("foo:: 3.7"),
|
||||
Err(TypeshedVersionsParseError {
|
||||
line_number: ONE,
|
||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfColons
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_typeshed_versions_non_identifier_modules() {
|
||||
assert_eq!(
|
||||
TypeshedVersions::from_str("not!an!identifier!: 3.7"),
|
||||
Err(TypeshedVersionsParseError {
|
||||
line_number: ONE,
|
||||
reason: TypeshedVersionsParseErrorKind::InvalidModuleName(
|
||||
"not!an!identifier!".to_string()
|
||||
)
|
||||
})
|
||||
);
|
||||
assert_eq!(
|
||||
TypeshedVersions::from_str("(also_not).(an_identifier): 3.7"),
|
||||
Err(TypeshedVersionsParseError {
|
||||
line_number: ONE,
|
||||
reason: TypeshedVersionsParseErrorKind::InvalidModuleName(
|
||||
"(also_not).(an_identifier)".to_string()
|
||||
)
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_typeshed_versions_bad_hyphen_number() {
|
||||
assert_eq!(
|
||||
TypeshedVersions::from_str("foo: 3.8"),
|
||||
Err(TypeshedVersionsParseError {
|
||||
line_number: ONE,
|
||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfHyphens
|
||||
})
|
||||
);
|
||||
assert_eq!(
|
||||
TypeshedVersions::from_str("foo: 3.8--"),
|
||||
Err(TypeshedVersionsParseError {
|
||||
line_number: ONE,
|
||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfHyphens
|
||||
})
|
||||
);
|
||||
assert_eq!(
|
||||
TypeshedVersions::from_str("foo: 3.8--3.9"),
|
||||
Err(TypeshedVersionsParseError {
|
||||
line_number: ONE,
|
||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfHyphens
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_typeshed_versions_bad_period_number() {
|
||||
assert_eq!(
|
||||
TypeshedVersions::from_str("foo: 38-"),
|
||||
Err(TypeshedVersionsParseError {
|
||||
line_number: ONE,
|
||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfPeriods("38".to_string())
|
||||
})
|
||||
);
|
||||
assert_eq!(
|
||||
TypeshedVersions::from_str("foo: 3..8-"),
|
||||
Err(TypeshedVersionsParseError {
|
||||
line_number: ONE,
|
||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfPeriods(
|
||||
"3..8".to_string()
|
||||
)
|
||||
})
|
||||
);
|
||||
assert_eq!(
|
||||
TypeshedVersions::from_str("foo: 3.8-3..11"),
|
||||
Err(TypeshedVersionsParseError {
|
||||
line_number: ONE,
|
||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfPeriods(
|
||||
"3..11".to_string()
|
||||
)
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_typeshed_versions_non_digits() {
|
||||
let err = TypeshedVersions::from_str("foo: 1.two-").unwrap_err();
|
||||
assert_eq!(err.line_number, ONE);
|
||||
let TypeshedVersionsParseErrorKind::IntegerParsingFailure { version, err } = err.reason
|
||||
else {
|
||||
panic!()
|
||||
};
|
||||
assert_eq!(version, "1.two".to_string());
|
||||
assert_eq!(*err.kind(), IntErrorKind::InvalidDigit);
|
||||
|
||||
let err = TypeshedVersions::from_str("foo: 3.8-four.9").unwrap_err();
|
||||
assert_eq!(err.line_number, ONE);
|
||||
let TypeshedVersionsParseErrorKind::IntegerParsingFailure { version, err } = err.reason
|
||||
else {
|
||||
panic!()
|
||||
};
|
||||
assert_eq!(version, "four.9".to_string());
|
||||
assert_eq!(*err.kind(), IntErrorKind::InvalidDigit);
|
||||
}
|
||||
}
|
||||
24
crates/red_knot_python_semantic/src/node_key.rs
Normal file
24
crates/red_knot_python_semantic/src/node_key.rs
Normal file
@@ -0,0 +1,24 @@
|
||||
use ruff_python_ast::{AnyNodeRef, NodeKind};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
/// Compact key for a node for use in a hash map.
|
||||
///
|
||||
/// Compares two nodes by their kind and text range.
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub(super) struct NodeKey {
|
||||
kind: NodeKind,
|
||||
range: TextRange,
|
||||
}
|
||||
|
||||
impl NodeKey {
|
||||
pub(super) fn from_node<'a, N>(node: N) -> Self
|
||||
where
|
||||
N: Into<AnyNodeRef<'a>>,
|
||||
{
|
||||
let node = node.into();
|
||||
NodeKey {
|
||||
kind: node.kind(),
|
||||
range: node.range(),
|
||||
}
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user